From ea4f4c7aea7a105485cf670839643f50133cbc3b Mon Sep 17 00:00:00 2001 From: aperez Date: Mon, 15 Sep 2025 11:10:29 +0300 Subject: [PATCH] Updated from external repo on ansible deploy --- .gitignore | 2 + .vault_pass | 1 + README.md | 79 + airflow/.dockerignore | 1 + airflow/Dockerfile | 5 +- airflow/Dockerfile.caddy | 30 +- airflow/README-proxy.RU.md | 249 -- airflow/bgutil-diff.txt | 407 -- airflow/camoufox/Dockerfile | 1 + airflow/camoufox/camoufox_server.py | 2 +- airflow/config/.DS_Store | Bin 0 -> 6148 bytes airflow/config/airflow.cfg | 3167 +++++++++++++++ airflow/configs/.env.master | 4 + airflow/configs/.env.worker | 4 + airflow/configs/docker-compose-dl.yaml.j2 | 16 +- airflow/configs/docker-compose-master.yaml.j2 | 24 +- .../configs/docker-compose-ytdlp-ops.yaml.j2 | 14 +- .../configs/docker-compose.camoufox.yaml.j2 | 13 +- .../docker-compose.config-generate.yaml | 7 +- airflow/dags/.DS_Store | Bin 0 -> 6148 bytes airflow/dags/README.ru.md | 88 + .../ytdlp_service_deploy.cpython-312.pyc | Bin 3339 -> 0 bytes .../ytdlp_service_test.cpython-312.pyc | Bin 2269 -> 0 bytes .../ytdlp_token_dag.cpython-312.pyc | Bin 5700 -> 0 bytes airflow/dags/ytdlp_mgmt_queues.py | 2 +- airflow/dags/ytdlp_ops_worker_per_url.py | 103 +- airflow/generate_envoy_config.py | 7 +- airflow/requirements.txt | 9 - airflow/roles/airflow-master/tasks/main.yml | 277 +- airflow/roles/airflow-worker/tasks/main.yml | 103 - airflow/scripts/minio-init.sh | 17 - .../thrift_exceptions_patch.cpython-311.pyc | Bin 3682 -> 0 bytes .../build/lib/pangramia/__init__.py | 0 .../lib/pangramia/base_service/BaseService.py | 564 --- .../lib/pangramia/base_service/__init__.py | 1 - .../lib/pangramia/base_service/constants.py | 14 - .../lib/pangramia/base_service/ttypes.py | 20 - .../build/lib/pangramia/yt/__init__.py | 0 .../yt/admin_ops/YTAccountsOpService.py | 3491 ----------------- .../lib/pangramia/yt/admin_ops/__init__.py | 1 - .../lib/pangramia/yt/admin_ops/constants.py | 14 - .../lib/pangramia/yt/admin_ops/ttypes.py | 21 - .../build/lib/pangramia/yt/common/__init__.py | 1 - .../lib/pangramia/yt/common/constants.py | 14 - .../build/lib/pangramia/yt/common/ttypes.py | 905 ----- .../lib/pangramia/yt/exceptions/__init__.py | 1 - .../lib/pangramia/yt/exceptions/constants.py | 14 - .../lib/pangramia/yt/exceptions/ttypes.py | 254 -- .../yt/tokens_ops/YTTokenOpService.py | 1360 ------- .../lib/pangramia/yt/tokens_ops/__init__.py | 1 - .../lib/pangramia/yt/tokens_ops/constants.py | 14 - .../lib/pangramia/yt/tokens_ops/ttypes.py | 21 - .../build/lib/thrift_model/__init__.py | 0 .../build/lib/thrift_model/gen_py/__init__.py | 0 .../thrift_model/gen_py/pangramia/__init__.py | 0 .../pangramia/base_service/BaseService.py | 564 --- .../gen_py/pangramia/base_service/__init__.py | 1 - .../pangramia/base_service/constants.py | 14 - .../gen_py/pangramia/base_service/ttypes.py | 20 - .../gen_py/pangramia/yt/__init__.py | 0 .../yt/admin_ops/YTAccountsOpService.py | 3491 ----------------- .../gen_py/pangramia/yt/admin_ops/__init__.py | 1 - .../pangramia/yt/admin_ops/constants.py | 14 - .../gen_py/pangramia/yt/admin_ops/ttypes.py | 21 - .../gen_py/pangramia/yt/common/__init__.py | 1 - .../gen_py/pangramia/yt/common/constants.py | 14 - .../gen_py/pangramia/yt/common/ttypes.py | 905 ----- .../pangramia/yt/exceptions/__init__.py | 1 - .../pangramia/yt/exceptions/constants.py | 14 - .../gen_py/pangramia/yt/exceptions/ttypes.py | 254 -- .../yt/tokens_ops/YTTokenOpService.py | 1360 ------- .../pangramia/yt/tokens_ops/__init__.py | 1 - .../pangramia/yt/tokens_ops/constants.py | 14 - .../gen_py/pangramia/yt/tokens_ops/ttypes.py | 21 - .../yt_ops_services-1.6.2.dev0-py3.11.egg | Bin 187877 -> 0 bytes airflow/ytdlp-ops-auth/pangramia | 1 - airflow/ytdlp-ops-auth/requirements.txt | 11 - airflow/ytdlp-ops-auth/setup.py | 2 - .../yt_ops_services.egg-info/PKG-INFO | 9 - .../yt_ops_services.egg-info/SOURCES.txt | 48 - .../dependency_links.txt | 1 - .../yt_ops_services.egg-info/requires.txt | 3 - .../yt_ops_services.egg-info/top_level.txt | 2 - airflow/ytdlp-ops-auth/ytdlp_ops_client.log | 261 -- airflow/ytdlp-ops-auth/ytdlp_ops_client.py | 721 ---- airflow/ytdlp-ops-auth/ytdlp_utils.py | 60 - ansible/.gitignore | 1 + ansible/.vault_pass | 1 + ansible/MIGRATION.md | 9 + ansible/README-yt.md | 2 +- ansible/ansible.cfg | 1 + ansible/configs/etc/ssh/sshd_config | 42 + .../etc/sysctl.d/99-system-limits.conf | 18 + ansible/group_vars/all.yml | 52 - ansible/group_vars/all/generated_vars.yml | 41 +- ansible/group_vars/all/vault.yml | 6 +- ansible/host_vars/af-green.yml | 4 - ansible/host_vars/af-test.yml | 23 + ansible/host_vars/dl002.yml | 23 + ansible/host_vars/dl003.yml | 6 - ansible/inventory.ini | 4 +- ansible/playbook-dags.yml | 2 - ansible/playbook-depricated.dl.yml | 73 + ansible/playbook-dl.yml | 73 - ansible/playbook-full-with-proxies.yml | 6 + ansible/playbook-full.yml | 95 +- ansible/playbook-master.yml | 74 +- ansible/playbook-proxies.yml | 151 + ansible/playbook-worker.yml | 61 +- ansible/roles/airflow-master/tasks/main.yml | 96 +- ansible/roles/airflow-worker/tasks/main.yml | 30 +- ansible/roles/fail2ban/handlers/main.yml | 6 + ansible/roles/fail2ban/tasks/main.yml | 24 + .../roles/fail2ban/templates/jail.local.j2 | 16 + .../roles/shadowsocks-deploy/tasks/main.yml | 60 + .../templates/config.json.j2 | 11 + .../templates/docker-compose.proxies.yaml.j2 | 22 + ansible/roles/ytdlp-master/tasks/main.yml | 38 +- ansible/roles/ytdlp-worker/tasks/main.yml | 48 +- ansible/scripts/verify_camoufox_services.py | 242 ++ ansible/tasks/docker_health_check.yml | 35 + ansible/tasks/verify_camoufox.yml | 38 + ansible/templates/.env.j2 | 9 +- ansible/templates/shadowsocks-compose.yml.j2 | 21 + cluster.green.yml | 105 + cluster.test.yml | 101 + cluster.yml | 8 - get_info_json_client.py | 150 + proxy_manager_client.py | 192 + thrift_exceptions_patch.py | 58 + thrift_model/data/common.thrift | 131 - thrift_model/data/exceptions.thrift | 14 - thrift_model/services/base_service.thrift | 19 - thrift_model/services/yt_admin_ops.thrift | 63 - thrift_model/services/yt_management.thrift | 27 - thrift_model/services/yt_tokens_ops.thrift | 40 - tools/create-deployment-bundle.sh | 99 + tools/generate-inventory.py | 67 +- tools/host_vars/dl-master.yml | 19 - tools/host_vars/dl-worker-001.yml | 26 - tools/sync-to-tower.sh | 63 + yt_ops_services/__init__.py | 3 + yt_ops_services/client_utils.py | 36 + yt_ops_services/version.py | 9 + 144 files changed, 5868 insertions(+), 15999 deletions(-) create mode 100644 .gitignore create mode 100644 .vault_pass create mode 100644 README.md delete mode 100644 airflow/README-proxy.RU.md delete mode 100644 airflow/bgutil-diff.txt create mode 100644 airflow/config/.DS_Store create mode 100644 airflow/config/airflow.cfg create mode 100644 airflow/configs/.env.master create mode 100644 airflow/configs/.env.worker create mode 100644 airflow/dags/.DS_Store create mode 100644 airflow/dags/README.ru.md delete mode 100644 airflow/dags/__pycache__/ytdlp_service_deploy.cpython-312.pyc delete mode 100644 airflow/dags/__pycache__/ytdlp_service_test.cpython-312.pyc delete mode 100644 airflow/dags/__pycache__/ytdlp_token_dag.cpython-312.pyc delete mode 100644 airflow/requirements.txt delete mode 100644 airflow/roles/airflow-worker/tasks/main.yml delete mode 100644 airflow/scripts/minio-init.sh delete mode 100644 airflow/ytdlp-ops-auth/__pycache__/thrift_exceptions_patch.cpython-311.pyc delete mode 100644 airflow/ytdlp-ops-auth/build/lib/pangramia/__init__.py delete mode 100644 airflow/ytdlp-ops-auth/build/lib/pangramia/base_service/BaseService.py delete mode 100644 airflow/ytdlp-ops-auth/build/lib/pangramia/base_service/__init__.py delete mode 100644 airflow/ytdlp-ops-auth/build/lib/pangramia/base_service/constants.py delete mode 100644 airflow/ytdlp-ops-auth/build/lib/pangramia/base_service/ttypes.py delete mode 100644 airflow/ytdlp-ops-auth/build/lib/pangramia/yt/__init__.py delete mode 100644 airflow/ytdlp-ops-auth/build/lib/pangramia/yt/admin_ops/YTAccountsOpService.py delete mode 100644 airflow/ytdlp-ops-auth/build/lib/pangramia/yt/admin_ops/__init__.py delete mode 100644 airflow/ytdlp-ops-auth/build/lib/pangramia/yt/admin_ops/constants.py delete mode 100644 airflow/ytdlp-ops-auth/build/lib/pangramia/yt/admin_ops/ttypes.py delete mode 100644 airflow/ytdlp-ops-auth/build/lib/pangramia/yt/common/__init__.py delete mode 100644 airflow/ytdlp-ops-auth/build/lib/pangramia/yt/common/constants.py delete mode 100644 airflow/ytdlp-ops-auth/build/lib/pangramia/yt/common/ttypes.py delete mode 100644 airflow/ytdlp-ops-auth/build/lib/pangramia/yt/exceptions/__init__.py delete mode 100644 airflow/ytdlp-ops-auth/build/lib/pangramia/yt/exceptions/constants.py delete mode 100644 airflow/ytdlp-ops-auth/build/lib/pangramia/yt/exceptions/ttypes.py delete mode 100644 airflow/ytdlp-ops-auth/build/lib/pangramia/yt/tokens_ops/YTTokenOpService.py delete mode 100644 airflow/ytdlp-ops-auth/build/lib/pangramia/yt/tokens_ops/__init__.py delete mode 100644 airflow/ytdlp-ops-auth/build/lib/pangramia/yt/tokens_ops/constants.py delete mode 100644 airflow/ytdlp-ops-auth/build/lib/pangramia/yt/tokens_ops/ttypes.py delete mode 100644 airflow/ytdlp-ops-auth/build/lib/thrift_model/__init__.py delete mode 100644 airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/__init__.py delete mode 100644 airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/__init__.py delete mode 100644 airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/base_service/BaseService.py delete mode 100644 airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/base_service/__init__.py delete mode 100644 airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/base_service/constants.py delete mode 100644 airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/base_service/ttypes.py delete mode 100644 airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/__init__.py delete mode 100644 airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/admin_ops/YTAccountsOpService.py delete mode 100644 airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/admin_ops/__init__.py delete mode 100644 airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/admin_ops/constants.py delete mode 100644 airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/admin_ops/ttypes.py delete mode 100644 airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/common/__init__.py delete mode 100644 airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/common/constants.py delete mode 100644 airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/common/ttypes.py delete mode 100644 airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/exceptions/__init__.py delete mode 100644 airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/exceptions/constants.py delete mode 100644 airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/exceptions/ttypes.py delete mode 100644 airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/tokens_ops/YTTokenOpService.py delete mode 100644 airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/tokens_ops/__init__.py delete mode 100644 airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/tokens_ops/constants.py delete mode 100644 airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/tokens_ops/ttypes.py delete mode 100644 airflow/ytdlp-ops-auth/dist/yt_ops_services-1.6.2.dev0-py3.11.egg delete mode 120000 airflow/ytdlp-ops-auth/pangramia delete mode 100644 airflow/ytdlp-ops-auth/requirements.txt delete mode 100644 airflow/ytdlp-ops-auth/setup.py delete mode 100644 airflow/ytdlp-ops-auth/yt_ops_services.egg-info/PKG-INFO delete mode 100644 airflow/ytdlp-ops-auth/yt_ops_services.egg-info/SOURCES.txt delete mode 100644 airflow/ytdlp-ops-auth/yt_ops_services.egg-info/dependency_links.txt delete mode 100644 airflow/ytdlp-ops-auth/yt_ops_services.egg-info/requires.txt delete mode 100644 airflow/ytdlp-ops-auth/yt_ops_services.egg-info/top_level.txt delete mode 100644 airflow/ytdlp-ops-auth/ytdlp_ops_client.log delete mode 100644 airflow/ytdlp-ops-auth/ytdlp_ops_client.py delete mode 100644 airflow/ytdlp-ops-auth/ytdlp_utils.py create mode 100644 ansible/.gitignore create mode 100644 ansible/.vault_pass create mode 100644 ansible/MIGRATION.md create mode 100644 ansible/configs/etc/ssh/sshd_config create mode 100644 ansible/configs/etc/sysctl.d/99-system-limits.conf delete mode 100644 ansible/group_vars/all.yml delete mode 100644 ansible/host_vars/af-green.yml create mode 100644 ansible/host_vars/af-test.yml create mode 100644 ansible/host_vars/dl002.yml delete mode 100644 ansible/host_vars/dl003.yml create mode 100644 ansible/playbook-depricated.dl.yml create mode 100644 ansible/playbook-full-with-proxies.yml create mode 100644 ansible/playbook-proxies.yml create mode 100644 ansible/roles/fail2ban/handlers/main.yml create mode 100644 ansible/roles/fail2ban/tasks/main.yml create mode 100644 ansible/roles/fail2ban/templates/jail.local.j2 create mode 100644 ansible/roles/shadowsocks-deploy/tasks/main.yml create mode 100644 ansible/roles/shadowsocks-deploy/templates/config.json.j2 create mode 100644 ansible/roles/shadowsocks-deploy/templates/docker-compose.proxies.yaml.j2 create mode 100644 ansible/scripts/verify_camoufox_services.py create mode 100644 ansible/tasks/docker_health_check.yml create mode 100644 ansible/tasks/verify_camoufox.yml create mode 100644 ansible/templates/shadowsocks-compose.yml.j2 create mode 100644 cluster.green.yml create mode 100644 cluster.test.yml delete mode 100644 cluster.yml create mode 100644 get_info_json_client.py create mode 100644 proxy_manager_client.py create mode 100644 thrift_exceptions_patch.py delete mode 100644 thrift_model/data/common.thrift delete mode 100644 thrift_model/data/exceptions.thrift delete mode 100644 thrift_model/services/base_service.thrift delete mode 100644 thrift_model/services/yt_admin_ops.thrift delete mode 100644 thrift_model/services/yt_management.thrift delete mode 100644 thrift_model/services/yt_tokens_ops.thrift create mode 100755 tools/create-deployment-bundle.sh delete mode 100644 tools/host_vars/dl-master.yml delete mode 100644 tools/host_vars/dl-worker-001.yml create mode 100755 tools/sync-to-tower.sh create mode 100644 yt_ops_services/__init__.py create mode 100644 yt_ops_services/client_utils.py create mode 100644 yt_ops_services/version.py diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..fda177f --- /dev/null +++ b/.gitignore @@ -0,0 +1,2 @@ +.aider* +*/.DS_Store diff --git a/.vault_pass b/.vault_pass new file mode 100644 index 0000000..89a0cd2 --- /dev/null +++ b/.vault_pass @@ -0,0 +1 @@ +ytdlp-ops diff --git a/README.md b/README.md new file mode 100644 index 0000000..22f80b4 --- /dev/null +++ b/README.md @@ -0,0 +1,79 @@ +# Airflow/YT-DLP Cluster Deployment + +This repository contains Ansible playbooks and configuration files for deploying a distributed Airflow cluster with YT-DLP services. + +## Prerequisites + +1. Install Ansible on your control machine +2. Ensure SSH access to all target nodes +3. Set up your vault password in `.vault_pass` file + +## Initial Setup + +Generate the inventory and configuration files from your cluster definition: + +```bash +./tools/generate-inventory.py cluster.test.yml +cd ansible +``` + +## Full Deployment + +### Deploy entire cluster with proxies (recommended for new setups): + +```bash +ansible-playbook playbook-full-with-proxies.yml +``` + +### Deploy cluster without proxies: + +```bash +ansible-playbook playbook-full.yml +``` + +## Targeted Deployments + +### Deploy only to master node: + +```bash +ansible-playbook playbook-master.yml --limit="af-test" +``` + +### Deploy only to worker nodes: + +```bash +ansible-playbook playbook-worker.yml +``` + +## Deploy Specific Steps + +To start at a specific task (useful for debugging or partial deployments): + +```bash +ansible-playbook playbook-master.yml --limit="af-test" --start-at-task="Prepare Caddy asset extraction directory" +``` + +## Debug Deployments + +Run with dry-run and verbose output for debugging: + +```bash +ansible-playbook playbook-full.yml --check --diff -vv +``` + +## DAGs Only Deployment + +To update only DAG files and configurations: + +```bash +ansible-playbook playbook-dags.yml +``` + +## Vault Management + +All sensitive data is encrypted with Ansible Vault. The vault password should be stored in `.vault_pass` file in the project root. + +To edit vault files: +```bash +ansible-vault edit group_vars/all/vault.yml +``` diff --git a/airflow/.dockerignore b/airflow/.dockerignore index 312f1f7..6037927 100644 --- a/airflow/.dockerignore +++ b/airflow/.dockerignore @@ -1,5 +1,6 @@ redis-data minio-data +postgres-data logs downloadfiles addfiles diff --git a/airflow/Dockerfile b/airflow/Dockerfile index 8ea5aec..9a48fc6 100644 --- a/airflow/Dockerfile +++ b/airflow/Dockerfile @@ -58,7 +58,10 @@ RUN pip install --no-cache-dir \ apache-airflow-providers-http \ apache-airflow-providers-amazon \ "botocore>=1.34.118" \ - psycopg2-binary "gunicorn==20.1.0" + psycopg2-binary \ + "gunicorn==20.1.0" \ + "python-ffmpeg==2.0.12" \ + "ffprobe3" # --- Install the custom yt_ops_services package --- # Copy all the necessary source code for the package. diff --git a/airflow/Dockerfile.caddy b/airflow/Dockerfile.caddy index 83e1076..5d37c33 100644 --- a/airflow/Dockerfile.caddy +++ b/airflow/Dockerfile.caddy @@ -1,30 +1,10 @@ -# Stage 1: Extract static assets from the Airflow image -FROM pangramia/ytdlp-ops-airflow:latest AS asset-extractor - -# Switch to root to create and write to the /assets directory -USER root - -# Create a temporary directory for extracted assets -WORKDIR /assets - -# Copy static assets from the Airflow image. -# This dynamically finds the paths to flask_appbuilder and airflow static assets -# to be resilient to version changes. -RUN cp -R $(python -c 'import os, flask_appbuilder; print(os.path.join(os.path.dirname(flask_appbuilder.__file__), "static"))') ./appbuilder && \ - cp -R $(python -c 'import os, airflow; print(os.path.join(os.path.dirname(airflow.__file__), "www/static/dist"))') ./dist - -# Pre-compress the static assets using gzip -# This improves performance by allowing Caddy to serve compressed files directly. -RUN find ./appbuilder -type f -print0 | xargs -0 gzip -k -9 && \ - find ./dist -type f -print0 | xargs -0 gzip -k -9 - - -# Stage 2: Build the final Caddy image +# Build the final Caddy image FROM caddy:2-alpine -# Copy the pre-compressed static assets from the first stage -COPY --from=asset-extractor /assets/appbuilder /usr/share/caddy/static/appbuilder -COPY --from=asset-extractor /assets/dist /usr/share/caddy/static/dist +# Copy the pre-compressed static assets from the build context. +# These assets are extracted from the main Airflow image by the Ansible playbook. +COPY caddy_build_assets/appbuilder /usr/share/caddy/static/appbuilder +COPY caddy_build_assets/dist /usr/share/caddy/static/dist # Copy the Caddyfile configuration. The build context is the project root, # so the path is relative to that. diff --git a/airflow/README-proxy.RU.md b/airflow/README-proxy.RU.md deleted file mode 100644 index e7ed166..0000000 --- a/airflow/README-proxy.RU.md +++ /dev/null @@ -1,249 +0,0 @@ -# Стратегия Управления Прокси и Аккаунтами - -В этом документе описывается интеллектуальная стратегия управления ресурсами (прокси и аккаунтами), используемая в `ytdlp-ops-server`. Цель этой системы — максимизировать процент успешных операций, минимизировать блокировки и обеспечить отказоустойчивость. - -Сервер может работать в разных ролях для поддержки распределенной архитектуры, разделяя задачи управления и задачи генерации токенов. - ---- - -## Роли Сервиса и Архитектура - -Сервер предназначен для работы в одной из трех ролей, указываемых флагом `--service-role`: - -- **`management`**: Один легковесный экземпляр сервиса, отвечающий за все вызовы API управления. - - **Назначение**: Предоставляет централизованную точку входа для мониторинга и управления состоянием всех прокси и аккаунтов в системе. - - **Поведение**: Предоставляет только функции управления (`getProxyStatus`, `banAccount` и т.д.). Вызовы функций генерации токенов будут завершаться ошибкой. - - **Развертывание**: Запускается как один контейнер (`ytdlp-ops-management`) и напрямую открывает свой порт на хост (например, порт `9091`), минуя Envoy. - -- **`worker`**: Основная "рабочая лошадка" для генерации токенов и `info.json`. - - **Назначение**: Обрабатывает все запросы на генерацию токенов. - - **Поведение**: Реализует полный API, но его функции управления ограничены его собственным `server_identity`. - - **Развертывание**: Запускается как масштабируемый сервис (`ytdlp-ops-worker`) за балансировщиком нагрузки Envoy (например, порт `9080`). - -- **`all-in-one`** (По умолчанию): Один экземпляр, который выполняет как управленческие, так и рабочие функции. Идеально подходит для локальной разработки или небольших развертываний. - -Эта архитектура позволяет создать надежную, федеративную систему, где воркеры управляют своими ресурсами локально, в то время как центральный сервис предоставляет глобальное представление для управления и мониторинга. - ---- - -## 1. Управление Жизненным Циклом Аккаунтов (Cooldown / Resting) - -**Цель:** Предотвратить чрезмерное использование и последующую блокировку аккаунтов, предоставляя им периоды "отдыха" после интенсивной работы. - -### Как это работает: -Жизненный цикл аккаунта состоит из трех состояний: -- **`ACTIVE`**: Аккаунт активен и используется для выполнения задач. При первом успешном использовании запускается таймер его активности. -- **`RESTING`**: Если аккаунт был в состоянии `ACTIVE` дольше установленного лимита, `AccountManager` автоматически переводит его в состояние "отдыха". В этом состоянии Airflow worker не будет выбирать его для новых задач. -- **Возврат в `ACTIVE`**: После завершения периода "отдыха" `AccountManager` автоматически возвращает аккаунт в состояние `ACTIVE`, делая его снова доступным. - -### Конфигурация: -Эти параметры настраиваются при запуске `ytdlp-ops-server`. - -- `--account-active-duration-min`: "Время работы" в **минутах**, которое аккаунт может быть непрерывно активным до перехода в `RESTING`. - - **Значение по умолчанию:** `30` (минут). -- `--account-cooldown-duration-min`: "Время отдыха" в **минутах**, которое аккаунт должен находиться в состоянии `RESTING`. - - **Значение по умолчанию:** `60` (минут). - -**Где настраивать:** -Параметры передаются как аргументы командной строки при запуске сервера. При использовании Docker Compose это делается в файле `airflow/docker-compose-ytdlp-ops.yaml`: -```yaml - command: - # ... другие параметры - - "--account-active-duration-min" - - "${ACCOUNT_ACTIVE_DURATION_MIN:-30}" - - "--account-cooldown-duration-min" - - "${ACCOUNT_COOLDOWN_DURATION_MIN:-60}" -``` -Вы можете изменить значения по умолчанию, установив переменные окружения `ACCOUNT_ACTIVE_DURATION_MIN` и `ACCOUNT_COOLDOWN_DURATION_MIN` в вашем `.env` файле. - -**Соответствующие файлы:** -- `server_fix/account_manager.py`: Содержит основную логику для переключения состояний. -- `ytdlp_ops_server_fix.py`: Обрабатывает аргументы командной строки. -- `airflow/docker-compose-ytdlp-ops.yaml`: Передает аргументы в контейнер сервера. - ---- - -## 2. Умная Стратегия Банов - -**Цель:** Избежать необоснованных банов хороших прокси. Проблема часто может быть в аккаунте, а не в прокси, через который он работает. - -### Как это работает: - -#### Этап 1: Сначала Бан Аккаунта -- При возникновении серьезной ошибки, требующей бана (например, `BOT_DETECTED` или `SOCKS5_CONNECTION_FAILED`), система применяет санкции **только к аккаунту**, который вызвал ошибку. -- Для прокси эта ошибка просто фиксируется как один сбой, но сам прокси **не банится** и остается в работе. - -#### Этап 2: Бан Прокси по "Скользящему Окну" -- Прокси блокируется автоматически, только если он демонстрирует **систематические сбои с РАЗНЫМИ аккаунтами** за короткий промежуток времени. -- Это является надежным индикатором того, что проблема именно в прокси. `ProxyManager` на сервере отслеживает это и автоматически банит такой прокси. - -### Конфигурация: -Эти параметры **жестко заданы** как константы в коде и для их изменения требуется редактирование файла. - -**Где настраивать:** -- **Файл:** `server_fix/proxy_manager.py` -- **Константы** в классе `ProxyManager`: - - `FAILURE_WINDOW_SECONDS`: Временное окно в секундах для анализа сбоев. - - **Значение по умолчанию:** `3600` (1 час). - - `FAILURE_THRESHOLD_COUNT`: Минимальное общее количество сбоев для запуска проверки. - - **Значение по умолчанию:** `3`. - - `FAILURE_THRESHOLD_UNIQUE_ACCOUNTS`: Минимальное количество **уникальных аккаунтов**, с которыми произошли сбои, чтобы забанить прокси. - - **Значение по умолчанию:** `3`. - -**Соответствующие файлы:** -- `server_fix/proxy_manager.py`: Содержит логику "скользящего окна" и константы. -- `airflow/dags/ytdlp_ops_worker_per_url.py`: Функция `handle_bannable_error_callable` реализует политику бана "только аккаунт". - ---- - -### Расшифровка Статусов Аккаунтов - -Вы можете просмотреть статус всех аккаунтов с помощью DAG `ytdlp_mgmt_proxy_account`. Статусы имеют следующие значения: - -- **`ACTIVE`**: Аккаунт исправен и доступен для использования. По умолчанию, аккаунт считается `ACTIVE`, если у него не установлен конкретный статус. -- **`BANNED`**: Аккаунт временно отключен из-за повторяющихся сбоев (например, ошибок `BOT_DETECTED`) или забанен вручную. В статусе будет указано время, оставшееся до его автоматического возвращения в `ACTIVE` (например, `BANNED (active in 55m)`). -- **`RESTING`**: Аккаунт использовался в течение длительного времени и находится в обязательном периоде "отдыха" для предотвращения "выгорания". В статусе будет указано время, оставшееся до его возвращения в `ACTIVE` (например, `RESTING (active in 25m)`). -- **(Пустой Статус)**: В более старых версиях аккаунт, у которого были только сбои (и ни одного успеха), мог отображаться с пустым статусом. Это было исправлено; теперь такие аккаунты корректно отображаются как `ACTIVE`. - ---- - -## 3. Сквозной Процесс Ротации: Как Всё Работает Вместе - -Этот раздел описывает пошаговый процесс того, как воркер получает аккаунт и прокси для одной задачи, объединяя все вышеописанные стратегии управления. - -1. **Инициализация Воркера (`ytdlp_ops_worker_per_url`)** - - Запускается DAG, инициированный либо оркестратором, либо предыдущим успешным запуском самого себя. - - Задача `pull_url_from_redis` извлекает URL из очереди `_inbox` в Redis. - -2. **Выбор Аккаунта (Воркер Airflow)** - - Выполняется задача `assign_account`. - - Она генерирует полный список потенциальных ID аккаунтов на основе параметра `account_pool` (например, от `my_prefix_01` до `my_prefix_50`). - - Она подключается к Redis и проверяет статус каждого аккаунта из этого списка. - - Она создает новый временный список, содержащий только те аккаунты, которые **не** находятся в состоянии `BANNED` или `RESTING`. - - Если итоговый список активных аккаунтов пуст, воркер завершается с ошибкой (если не включено автосоздание). - - Затем из отфильтрованного списка активных аккаунтов с помощью **`random.choice()`** выбирается один. - - Выбранный `account_id` передается следующей задаче. - -3. **Выбор Прокси (`ytdlp-ops-server`)** - - Выполняется задача `get_token`, которая отправляет случайно выбранный `account_id` в Thrift RPC-вызове на `ytdlp-ops-server`. - - На сервере у `ProxyManager` запрашивается прокси. - - `ProxyManager`: - a. Обновляет свое внутреннее состояние, загружая статусы всех прокси из Redis. - b. Фильтрует список, оставляя только прокси со статусом `ACTIVE`. - c. Применяет политику бана по "скользящему окну", потенциально блокируя прокси, которые недавно слишком часто выходили из строя. - d. Выбирает следующий доступный прокси из активного списка, используя индекс **round-robin** (по кругу). - e. Возвращает выбранный `proxy_url`. - -4. **Выполнение и Отчетность** - - Теперь у сервера есть и `account_id` (от Airflow), и `proxy_url` (от его `ProxyManager`). - - Он приступает к процессу генерации токенов, используя эти ресурсы. - - По завершении (успешном или неудачном) он сообщает о результате в Redis, обновляя статусы для конкретного аккаунта и прокси, которые были использованы. Это влияет на их счетчики сбоев, таймеры "отдыха" и т.д. для следующего запуска. - -Это разделение ответственности является ключевым: -- **Воркер Airflow (задача `assign_account`)** отвечает за **случайный выбор активного аккаунта**, сохраняя при этом "привязку" (повторно используя тот же аккаунт после успеха). -- **Сервер `ytdlp-ops-server`** отвечает за **циклический выбор (round-robin) активного прокси**. - ---- - -## 4. Автоматический Бан Аккаунтов по Количеству Сбоев - -**Цель:** Автоматически выводить из ротации аккаунты, которые постоянно вызывают ошибки, не связанные с баном (например, неверный пароль, проблемы с авторизацией). - -### Как это работает: -- `AccountManager` отслеживает количество **последовательных** сбоев для каждого аккаунта. -- При успешной операции счетчик сбрасывается. -- Если количество последовательных сбоев достигает заданного порога, аккаунт автоматически банится на определенный срок. - -### Конфигурация: -Эти параметры задаются в конструкторе класса `AccountManager`. - -**Где настраивать:** -- **Файл:** `server_fix/account_manager.py` -- **Параметры** в `__init__` метода `AccountManager`: - - `failure_threshold`: Количество последовательных сбоев до бана. - - **Значение по умолчанию:** `5`. - - `ban_duration_s`: Длительность бана в секундах. - - **Значение по умолчанию:** `3600` (1 час). - ---- - -## 5. Мониторинг и Восстановление - -### Как Проверить Статусы -DAG **`ytdlp_mgmt_proxy_account`** — это основной инструмент для мониторинга состояния ваших ресурсов. Он подключается напрямую к **сервису управления** для выполнения действий. - -- **ID DAG'а:** `ytdlp_mgmt_proxy_account` -- **Как использовать:** Запустите DAG из интерфейса Airflow. Убедитесь, что параметры `management_host` и `management_port` правильно указывают на ваш экземпляр сервиса `ytdlp-ops-management`. Для получения полного обзора установите параметры: - - `entity`: `all` - - `action`: `list` -- **Результат:** В логе DAG'а будут отображены таблицы с текущим статусом всех аккаунтов и прокси. Для аккаунтов в состоянии `BANNED` или `RESTING` будет показано время, оставшееся до их активации (например, `RESTING (active in 45m)`). Для прокси будет подсвечено, какой из них является следующим `(next)` в ротации для конкретного воркера. - -### Что Произойдет, если Все Аккаунты Будут Забанены или в "Отдыхе"? -Если весь пул аккаунтов станет недоступен (в статусе `BANNED` или `RESTING`), система по умолчанию приостановит работу. -- DAG `ytdlp_ops_worker_per_url` завершится с ошибкой `AirflowException` на шаге `assign_account`, так как пул активных аккаунтов будет пуст. -- Это остановит циклы обработки. Система будет находиться в состоянии паузы до тех пор, пока аккаунты не будут разбанены вручную или пока не истечет их таймер бана/отдыха. После этого вы сможете перезапустить циклы обработки с помощью DAG'а `ytdlp_ops_orchestrator`. -- Граф выполнения DAG `ytdlp_ops_worker_per_url` теперь явно показывает такие задачи, как `assign_account`, `get_token`, `ban_account`, `retry_get_token` и т.д., что делает поток выполнения и точки сбоя более наглядными. - -Систему можно настроить на автоматическое создание новых аккаунтов, чтобы предотвратить полную остановку обработки. - -#### Автоматическое Создание Аккаунтов при Исчерпании -- **Цель**: Обеспечить непрерывную работу конвейера обработки, даже если все аккаунты в основном пуле временно забанены или находятся в "отдыхе". -- **Как это работает**: Если параметр `auto_create_new_accounts_on_exhaustion` установлен в `True` и пул аккаунтов задан с помощью префикса (а не явного списка), система сгенерирует новый уникальный ID аккаунта, когда обнаружит, что активный пул пуст. -- **Именование новых аккаунтов**: Новые аккаунты создаются в формате `{prefix}-auto-{уникальный_id}`. -- **Конфигурация**: - - **Параметр**: `auto_create_new_accounts_on_exhaustion` - - **Где настраивать**: В конфигурации DAG `ytdlp_ops_orchestrator` при запуске. - - **Значение по умолчанию**: `True`. - ---- - -## 6. Обработка Сбоев и Политика Повторных Попыток - -**Цель:** Обеспечить гибкое управление поведением системы, когда воркер сталкивается с ошибкой, требующей бана (например, `BOT_DETECTED`). - -### Как это работает -Когда задача `get_token` воркера завершается с ошибкой, требующей бана, поведение системы определяется политикой `on_bannable_failure`, которую можно настроить при запуске `ytdlp_ops_orchestrator`. - -### Конфигурация -- **Параметр**: `on_bannable_failure` -- **Где настраивать**: В конфигурации DAG `ytdlp_ops_orchestrator`. -- **Опции**: - - `stop_loop` (Самая строгая): - - Использованный аккаунт банится. - - URL помечается как сбойный в хэше `_fail` в Redis. - - Цикл обработки воркера **останавливается**. "Линия" обработки становится неактивной. - - `retry_with_new_account` (По умолчанию, самая отказоустойчивая): - - Аккаунт, вызвавший сбой, банится. - - Воркер немедленно повторяет обработку **того же URL** с новым, неиспользованным аккаунтом из пула. - - Если повторная попытка успешна, воркер продолжает свой цикл для обработки следующего URL. - - Если повторная попытка также завершается сбоем, второй аккаунт **и использованный прокси** также банятся, и цикл работы воркера останавливается. - - `retry_and_ban_account_only`: - - Похожа на `retry_with_new_account`, но при втором сбое банится **только второй аккаунт**, а не прокси. - - Это полезно, когда вы доверяете своим прокси, но хотите агрессивно перебирать сбойные аккаунты. - - `retry_without_ban` (Самая мягкая): - - Воркер повторяет попытку с новым аккаунтом, но **ни аккаунты, ни прокси никогда не банятся**. - - Эта политика полезна для отладки или когда вы уверены, что сбои являются временными и не вызваны проблемами с ресурсами. - -Эта политика позволяет системе быть устойчивой к сбоям отдельных аккаунтов, не теряя URL, и в то же время обеспечивает гранулярный контроль над тем, когда банить аккаунты и/или прокси, если проблема сохраняется. - ---- - -## 7. Логика Работы Worker DAG (`ytdlp_ops_worker_per_url`) - -Этот DAG является "рабочей лошадкой" системы. Он спроектирован как самоподдерживающийся цикл для обработки одного URL за запуск. Логика обработки сбоев и повторных попыток теперь явно видна в графе задач DAG. - -### Задачи и их назначение: - -- **`pull_url_from_redis`**: Извлекает один URL из очереди `_inbox` в Redis. Если очередь пуста, DAG завершается со статусом `skipped`, останавливая эту "линию" обработки. -- **`assign_account`**: Выбирает аккаунт для задачи. Он поддерживает **привязку аккаунта (affinity)**, повторно используя тот же аккаунт из предыдущего успешного запуска в своей "линии". Если это первый запуск или предыдущий был неудачным, он выбирает случайный активный аккаунт. -- **`get_token`**: Основная попытка получить токены и `info.json` путем вызова `ytdlp-ops-server`. -- **`handle_bannable_error_branch`**: Задача-развилка, которая запускается в случае сбоя `get_token`. Она анализирует ошибку и определяет следующий шаг на основе политики `on_bannable_failure`. -- **`ban_account_and_prepare_for_retry`**: Если разрешен повтор, эта задача банит сбойный аккаунт и выбирает новый. -- **`retry_get_token`**: Вторая попытка получить токен с использованием нового аккаунта. -- **`ban_second_account_and_proxy`**: Если и повторная попытка завершается неудачей, эта задача банит второй аккаунт и использованный прокси. -- **`download_and_probe`**: Если `get_token` или `retry_get_token` завершается успешно, эта задача использует `yt-dlp` для скачивания медиа и `ffmpeg` для проверки целостности файла. -- **`mark_url_as_success`**: Если `download_and_probe` завершается успешно, эта задача записывает успешный результат в хэш `_result` в Redis. -- **`handle_generic_failure`**: Если любая задача завершается с неисправимой ошибкой, эта задача записывает подробную информацию об ошибке в хэш `_fail` в Redis. -- **`decide_what_to_do_next`**: Финальная задача-развилка, которая решает, продолжать ли цикл (`trigger_self_run`), остановить его корректно (`stop_loop`) или пометить как сбойный (`fail_loop`). -- **`trigger_self_run`**: Задача, которая фактически запускает следующий экземпляр DAG, создавая непрерывный цикл. - diff --git a/airflow/bgutil-diff.txt b/airflow/bgutil-diff.txt deleted file mode 100644 index 6647512..0000000 --- a/airflow/bgutil-diff.txt +++ /dev/null @@ -1,407 +0,0 @@ -Diff to getpot_bgutil_http - - def _validate_get_pot(self, client: str, ydl: YoutubeDL, visitor_data=None, data_sync_id=None, player_url=None, **kwargs): - if client != 'ios': - raise UnsupportedRequest(f'Client {client} is not supported') - - base_url = ydl.get_info_extractor('Youtube')._configuration_arg( - 'getpot_bgutil_baseurl', ['http://127.0.0.1:4416'], casesense=True)[0] - - # Validate visitor data format for ios client - if visitor_data and not visitor_data.startswith('Cg'): - raise UnsupportedRequest('Invalid visitor data format for ios client') - - if not data_sync_id and not visitor_data: - raise UnsupportedRequest( - 'One of [data_sync_id, visitor_data] must be passed') ->>>>>>> 559b875 (feat: Add support for pre-provided ios PO tokens and client-specific validation) - try: - self.logger.trace( - f'Checking server availability at {self._base_url}/ping') - response = json.load(self._request_webpage(Request( - f'{self._base_url}/ping', extensions={'timeout': self._GET_SERVER_VSN_TIMEOUT}, proxies={'all': None}), - note=False)) - except TransportError as e: - # the server may be down - script_path_provided = self.ie._configuration_arg( - ie_key='youtubepot-bgutilscript', key='script_path', default=[None])[0] is not None - - warning_base = f'Error reaching GET {self._base_url}/ping (caused by {e.__class__.__name__}). ' - if script_path_provided: # server down is expected, log info - self._info_and_raise( - warning_base + 'This is expected if you are using the script method.') - else: - self._warn_and_raise( - warning_base + f'Please make sure that the server is reachable at {self._base_url}.') - - return - except HTTPError as e: - # may be an old server, don't raise - self.logger.warning( - f'HTTP Error reaching GET /ping (caused by {e!r})', once=True) - return - except json.JSONDecodeError as e: - # invalid server - self._warn_and_raise( - f'Error parsing ping response JSON (caused by {e!r})') - return - except Exception as e: - self._warn_and_raise( - f'Unknown error reaching GET /ping (caused by {e!r})', raise_from=e) - return - else: - self._check_version(response.get('version', ''), name='HTTP server') - self._server_available = True - return True - finally: - self._last_server_check = time.time() - -<<<<<<< HEAD - def is_available(self): - return self._server_available or self._last_server_check + 60 < int(time.time()) - - def _real_request_pot( - self, - request: PoTokenRequest, - ) -> PoTokenResponse: - if not self._check_server_availability(request): - raise PoTokenProviderRejectedRequest( - f'{self.PROVIDER_NAME} server is not available') - - # used for CI check - self.logger.trace('Generating POT via HTTP server') -======= - def _validate_get_pot(self, client: str, ydl: YoutubeDL, visitor_data=None, data_sync_id=None, player_url=None, **kwargs): - if client != 'ios': - raise UnsupportedRequest(f'Client {client} is not supported') - - base_url = ydl.get_info_extractor('Youtube')._configuration_arg( - 'getpot_bgutil_baseurl', ['http://127.0.0.1:4416'], casesense=True)[0] - - # Validate visitor data format for ios client - if visitor_data and not visitor_data.startswith('Cg'): - raise UnsupportedRequest('Invalid visitor data format for ios client') - - if not data_sync_id and not visitor_data: - raise UnsupportedRequest( - 'One of [data_sync_id, visitor_data] must be passed') ->>>>>>> 559b875 (feat: Add support for pre-provided ios PO tokens and client-specific validation) - try: - self.logger.trace( - f'Checking server availability at {self._base_url}/ping') - response = json.load(self._request_webpage(Request( - f'{self._base_url}/ping', extensions={'timeout': self._GET_SERVER_VSN_TIMEOUT}, proxies={'all': None}), - note=False)) - except TransportError as e: - # the server may be down - script_path_provided = self.ie._configuration_arg( - ie_key='youtubepot-bgutilscript', key='script_path', default=[None])[0] is not None - - warning_base = f'Error reaching GET {self._base_url}/ping (caused by {e.__class__.__name__}). ' - if script_path_provided: # server down is expected, log info - self._info_and_raise( - warning_base + 'This is expected if you are using the script method.') - else: - self._warn_and_raise( - warning_base + f'Please make sure that the server is reachable at {self._base_url}.') - - return - except HTTPError as e: - # may be an old server, don't raise - self.logger.warning( - f'HTTP Error reaching GET /ping (caused by {e!r})', once=True) - return - except json.JSONDecodeError as e: - # invalid server - self._warn_and_raise( - f'Error parsing ping response JSON (caused by {e!r})') - return - except Exception as e: - self._warn_and_raise( - f'Unknown error reaching GET /ping (caused by {e!r})', raise_from=e) - return - else: - self._check_version(response.get('version', ''), name='HTTP server') - self._server_available = True - return True - finally: - self._last_server_check = time.time() - -<<<<<<< HEAD - def is_available(self): - return self._server_available or self._last_server_check + 60 < int(time.time()) - - def _real_request_pot( - self, - request: PoTokenRequest, - ) -> PoTokenResponse: - if not self._check_server_availability(request): - raise PoTokenProviderRejectedRequest( - f'{self.PROVIDER_NAME} server is not available') - - # used for CI check - self.logger.trace('Generating POT via HTTP server') -======= - def _get_pot(self, client: str, ydl: YoutubeDL, visitor_data=None, data_sync_id=None, player_url=None, **kwargs) -> str: - # Check if we have a pre-provided token - if client == 'ios' and kwargs.get('po_token'): - self._logger.info('Using provided ios PO token') - return kwargs['po_token'] - - self._logger.info(f'Generating POT via HTTP server for {client} client') - if ((proxy := select_proxy('https://jnn-pa.googleapis.com', self.proxies)) - != select_proxy('https://youtube.com', self.proxies)): - self._logger.warning( - 'Proxies for https://youtube.com and https://jnn-pa.googleapis.com are different. ' - 'This is likely to cause subsequent errors.') ->>>>>>> 559b875 (feat: Add support for pre-provided ios PO tokens and client-specific validation) - - try: - response = self._request_webpage( - request=Request( - f'{self._base_url}/get_pot', data=json.dumps({ - 'content_binding': get_webpo_content_binding(request)[0], - 'proxy': request.request_proxy, - 'bypass_cache': request.bypass_cache, - 'source_address': request.request_source_address, - 'disable_tls_verification': not request.request_verify_tls, - }).encode(), headers={'Content-Type': 'application/json'}, - extensions={'timeout': self._GETPOT_TIMEOUT}, proxies={'all': None}), - note=f'Generating a {request.context.value} PO Token for ' - f'{request.internal_client_name} client via bgutil HTTP server', - ) - except Exception as e: - raise PoTokenProviderError( - f'Error reaching POST /get_pot (caused by {e!r})') from e - - try: - response_json = json.load(response) - except Exception as e: - raise PoTokenProviderError( - f'Error parsing response JSON (caused by {e!r}). response = {response.read().decode()}') from e - - if error_msg := response_json.get('error'): - raise PoTokenProviderError(error_msg) - if 'poToken' not in response_json: - raise PoTokenProviderError( - f'Server did not respond with a poToken. Received response: {json.dumps(response_json)}') - - po_token = response_json['poToken'] - self.logger.trace(f'Generated POT: {po_token}') - return PoTokenResponse(po_token=po_token) - - -@register_preference(BgUtilHTTPPTP) -def bgutil_HTTP_getpot_preference(provider, request): - return 100 - - -__all__ = [BgUtilHTTPPTP.__name__, - bgutil_HTTP_getpot_preference.__name__] - - -------------------------- -Diff to getpot_bgutil_script.py - - - -from __future__ import annotations - -import contextlib -import functools -import json -import os.path -import re -import shutil -import subprocess - -from yt_dlp.extractor.youtube.pot.utils import get_webpo_content_binding -from yt_dlp.utils import Popen - -with contextlib.suppress(ImportError): - from yt_dlp_plugins.extractor.getpot_bgutil import BgUtilPTPBase - -from yt_dlp.extractor.youtube.pot.provider import ( - PoTokenProviderError, - PoTokenRequest, - PoTokenResponse, - register_preference, - register_provider, -) - - -@register_provider -class BgUtilScriptPTP(BgUtilPTPBase): - PROVIDER_NAME = 'bgutil:script' - - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self._check_script = functools.cache(self._check_script_impl) - - @functools.cached_property - def _script_path(self): - script_path = self._configuration_arg( - 'script_path', casesense=True, default=[None])[0] - - if script_path: - return os.path.expandvars(script_path) - - # check deprecated arg - deprecated_script_path = self.ie._configuration_arg( - ie_key='youtube', key='getpot_bgutil_script', default=[None])[0] - - if deprecated_script_path: - self._warn_and_raise( - "'youtube:getpot_bgutil_script' extractor arg is deprecated, use 'youtubepot-bgutilscript:script_path' instead") - - # default if no arg was passed - home = os.path.expanduser('~') - default_path = os.path.join( - home, 'bgutil-ytdlp-pot-provider', 'server', 'build', 'generate_once.js') - self.logger.debug( - f'No script path passed, defaulting to {default_path}') - return default_path - -<<<<<<< HEAD - def is_available(self): - return self._check_script(self._script_path) - - @functools.cached_property - def _node_path(self): - node_path = shutil.which('node') - if node_path is None: - self.logger.trace('node is not in PATH') - vsn = self._check_node_version(node_path) - if vsn: - self.logger.trace(f'Node version: {vsn}') - return node_path - - def _check_script_impl(self, script_path): -======= - def _validate_get_pot(self, client: str, ydl: YoutubeDL, visitor_data=None, data_sync_id=None, player_url=None, **kwargs): - script_path = ydl.get_info_extractor('Youtube')._configuration_arg( - 'getpot_bgutil_script', [self._default_script_path], casesense=True)[0] - - # If a specific client is requested, validate it's supported - requested_client = ydl.params.get('extractor_args', {}).get('youtube', {}).get('formats') - if requested_client and client != requested_client: - raise UnsupportedRequest(f'Skipping {client} as {requested_client} was specifically requested') - - if not data_sync_id and not visitor_data: - raise UnsupportedRequest( - 'One of [data_sync_id, visitor_data] must be passed') ->>>>>>> 046a994 (refactor: support client-specific requests via extractor_args in POT providers) - if not os.path.isfile(script_path): - self.logger.debug( - f"Script path doesn't exist: {script_path}") - return False - if os.path.basename(script_path) != 'generate_once.js': - self.logger.warning( - 'Incorrect script passed to extractor args. Path to generate_once.js required', once=True) - return False - node_path = self._node_path - if not node_path: - return False - stdout, stderr, returncode = Popen.run( - [self._node_path, script_path, '--version'], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True, - timeout=self._GET_SERVER_VSN_TIMEOUT) - if returncode: - self.logger.warning( - f'Failed to check script version. ' - f'Script returned {returncode} exit status. ' - f'Script stdout: {stdout}; Script stderr: {stderr}', - once=True) - return False - else: - self._check_version(stdout.strip(), name='script') - return True - - def _check_node_version(self, node_path): - try: - stdout, stderr, returncode = Popen.run( - [node_path, '--version'], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True, - timeout=self._GET_SERVER_VSN_TIMEOUT) - stdout = stdout.strip() - mobj = re.match(r'v(\d+)\.(\d+)\.(\d+)', stdout) - if returncode or not mobj: - raise ValueError - node_vsn = tuple(map(int, mobj.groups())) - if node_vsn >= self._MIN_NODE_VSN: - return node_vsn - raise RuntimeError - except RuntimeError: - min_vsn_str = 'v' + '.'.join(str(v) for v in self._MIN_NODE_VSN) - self.logger.warning( - f'Node version too low. ' - f'(got {stdout}, but at least {min_vsn_str} is required)') - except (subprocess.TimeoutExpired, ValueError): - self.logger.warning( - f'Failed to check node version. ' - f'Node returned {returncode} exit status. ' - f'Node stdout: {stdout}; Node stderr: {stderr}') - - def _real_request_pot( - self, - request: PoTokenRequest, - ) -> PoTokenResponse: - # used for CI check - self.logger.trace( - f'Generating POT via script: {self._script_path}') - - command_args = [self._node_path, self._script_path] - if proxy := request.request_proxy: - command_args.extend(['-p', proxy]) - command_args.extend(['-c', get_webpo_content_binding(request)[0]]) - if request.bypass_cache: - command_args.append('--bypass-cache') - if request.request_source_address: - command_args.extend( - ['--source-address', request.request_source_address]) - if request.request_verify_tls is False: - command_args.append('--disable-tls-verification') - - self.logger.info( - f'Generating a {request.context.value} PO Token for ' - f'{request.internal_client_name} client via bgutil script', - ) - self.logger.debug( - f'Executing command to get POT via script: {" ".join(command_args)}') - - try: - stdout, stderr, returncode = Popen.run( - command_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True, - timeout=self._GETPOT_TIMEOUT) - except subprocess.TimeoutExpired as e: - raise PoTokenProviderError( - f'_get_pot_via_script failed: Timeout expired when trying to run script (caused by {e!r})') - except Exception as e: - raise PoTokenProviderError( - f'_get_pot_via_script failed: Unable to run script (caused by {e!r})') from e - - msg = f'stdout:\n{stdout.strip()}' - if stderr.strip(): # Empty strings are falsy - msg += f'\nstderr:\n{stderr.strip()}' - self.logger.trace(msg) - if returncode: - raise PoTokenProviderError( - f'_get_pot_via_script failed with returncode {returncode}') - - try: - # The JSON response is always the last line - script_data_resp = json.loads(stdout.splitlines()[-1]) - except json.JSONDecodeError as e: - raise PoTokenProviderError( - f'Error parsing JSON response from _get_pot_via_script (caused by {e!r})') from e - if 'poToken' not in script_data_resp: - raise PoTokenProviderError( - 'The script did not respond with a po_token') - return PoTokenResponse(po_token=script_data_resp['poToken']) - - -@register_preference(BgUtilScriptPTP) -def bgutil_script_getpot_preference(provider, request): - return 1 - - -__all__ = [BgUtilScriptPTP.__name__, - bgutil_script_getpot_preference.__name__] diff --git a/airflow/camoufox/Dockerfile b/airflow/camoufox/Dockerfile index f65f13c..207faca 100644 --- a/airflow/camoufox/Dockerfile +++ b/airflow/camoufox/Dockerfile @@ -28,6 +28,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ libnss3 libnspr4 libdbus-1-3 libatk1.0-0 libatk-bridge2.0-0 libcups2 libdrm2 libxkbcommon0 libxcomposite1 libxdamage1 libxfixes3 libxrandr2 libgbm1 libpango-1.0-0 libcairo2 libasound2 \ libgtk-3-0 libx11-xcb1 fonts-liberation tzdata \ xauth util-linux x11-xserver-utils \ + curl \ && \ # Configure timezone ln -fs /usr/share/zoneinfo/${TZ} /etc/localtime && \ diff --git a/airflow/camoufox/camoufox_server.py b/airflow/camoufox/camoufox_server.py index 1fe2528..c4c9b4d 100644 --- a/airflow/camoufox/camoufox_server.py +++ b/airflow/camoufox/camoufox_server.py @@ -80,7 +80,7 @@ def monitor_resources(server_ports, proxy_url): process_cpu = current_process.cpu_percent() # Update active connections using psutil - all_connections = current_process.net_connections(kind='inet') + all_connections = psutil.net_connections(kind='inet') new_active_connections = defaultdict(int) for conn in all_connections: if conn.status == psutil.CONN_ESTABLISHED and conn.laddr.port in server_ports: diff --git a/airflow/config/.DS_Store b/airflow/config/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..5008ddfcf53c02e82d7eee2e57c38e5672ef89f6 GIT binary patch literal 6148 zcmeH~Jr2S!425mzP>H1@V-^m;4Wg<&0T*E43hX&L&p$$qDprKhvt+--jT7}7np#A3 zem<@ulZcFPQ@L2!n>{z**++&mCkOWA81W14cNZlEfg7;MkzE(HCqgga^y>{tEnwC%0;vJ&^%eQ zLs35+`xjp>T0`__, +# see related `CPython Issue `__. +# +# No argument should be required in the function specified. +# If using IP address as hostname is preferred, use value ``airflow.utils.net.get_host_ip_address`` +# +# Variable: AIRFLOW__CORE__HOSTNAME_CALLABLE +# +hostname_callable = airflow.utils.net.getfqdn + +# A callable to check if a python file has airflow dags defined or not and should +# return ``True`` if it has dags otherwise ``False``. +# If this is not provided, Airflow uses its own heuristic rules. +# +# The function should have the following signature +# +# .. code-block:: python +# +# def func_name(file_path: str, zip_file: zipfile.ZipFile | None = None) -> bool: ... +# +# Variable: AIRFLOW__CORE__MIGHT_CONTAIN_DAG_CALLABLE +# +might_contain_dag_callable = airflow.utils.file.might_contain_dag_via_default_heuristic + +# Default timezone in case supplied date times are naive +# can be `UTC` (default), `system`, or any `IANA ` +# timezone string (e.g. Europe/Amsterdam) +# +# Variable: AIRFLOW__CORE__DEFAULT_TIMEZONE +# +default_timezone = utc + +# The executor class that airflow should use. Choices include +# ``SequentialExecutor``, ``LocalExecutor``, ``CeleryExecutor``, +# ``KubernetesExecutor``, ``CeleryKubernetesExecutor``, ``LocalKubernetesExecutor`` or the +# full import path to the class when using a custom executor. +# +# Variable: AIRFLOW__CORE__EXECUTOR +# +executor = CeleryExecutor + +# The auth manager class that airflow should use. Full import path to the auth manager class. +# +# Variable: AIRFLOW__CORE__AUTH_MANAGER +# +auth_manager = airflow.providers.fab.auth_manager.fab_auth_manager.FabAuthManager + +# This defines the maximum number of task instances that can run concurrently per scheduler in +# Airflow, regardless of the worker count. Generally this value, multiplied by the number of +# schedulers in your cluster, is the maximum number of task instances with the running +# state in the metadata database. Setting this value to zero allows unlimited parallelism. +# +# Variable: AIRFLOW__CORE__PARALLELISM +# +parallelism = 32 + +# The maximum number of task instances allowed to run concurrently in each DAG. To calculate +# the number of tasks that is running concurrently for a DAG, add up the number of running +# tasks for all DAG runs of the DAG. This is configurable at the DAG level with ``max_active_tasks``, +# which is defaulted as ``[core] max_active_tasks_per_dag``. +# +# An example scenario when this would be useful is when you want to stop a new dag with an early +# start date from stealing all the executor slots in a cluster. +# +# Variable: AIRFLOW__CORE__MAX_ACTIVE_TASKS_PER_DAG +# +max_active_tasks_per_dag = 16 + +# Are DAGs paused by default at creation +# +# Variable: AIRFLOW__CORE__DAGS_ARE_PAUSED_AT_CREATION +# +dags_are_paused_at_creation = True + +# The maximum number of active DAG runs per DAG. The scheduler will not create more DAG runs +# if it reaches the limit. This is configurable at the DAG level with ``max_active_runs``, +# which is defaulted as ``[core] max_active_runs_per_dag``. +# +# Variable: AIRFLOW__CORE__MAX_ACTIVE_RUNS_PER_DAG +# +max_active_runs_per_dag = 16 + +# (experimental) The maximum number of consecutive DAG failures before DAG is automatically paused. +# This is also configurable per DAG level with ``max_consecutive_failed_dag_runs``, +# which is defaulted as ``[core] max_consecutive_failed_dag_runs_per_dag``. +# If not specified, then the value is considered as 0, +# meaning that the dags are never paused out by default. +# +# Variable: AIRFLOW__CORE__MAX_CONSECUTIVE_FAILED_DAG_RUNS_PER_DAG +# +max_consecutive_failed_dag_runs_per_dag = 0 + +# The name of the method used in order to start Python processes via the multiprocessing module. +# This corresponds directly with the options available in the Python docs: +# `multiprocessing.set_start_method +# `__ +# must be one of the values returned by `multiprocessing.get_all_start_methods() +# `__. +# +# Example: mp_start_method = fork +# +# Variable: AIRFLOW__CORE__MP_START_METHOD +# +# mp_start_method = + +# Whether to load the DAG examples that ship with Airflow. It's good to +# get started, but you probably want to set this to ``False`` in a production +# environment +# +# Variable: AIRFLOW__CORE__LOAD_EXAMPLES +# +load_examples = False + +# Path to the folder containing Airflow plugins +# +# Variable: AIRFLOW__CORE__PLUGINS_FOLDER +# +plugins_folder = /opt/airflow/plugins + +# Should tasks be executed via forking of the parent process +# +# * ``False``: Execute via forking of the parent process +# * ``True``: Spawning a new python process, slower than fork, but means plugin changes picked +# up by tasks straight away +# +# Variable: AIRFLOW__CORE__EXECUTE_TASKS_NEW_PYTHON_INTERPRETER +# +execute_tasks_new_python_interpreter = False + +# Secret key to save connection passwords in the db +# +# Variable: AIRFLOW__CORE__FERNET_KEY +# +fernet_key = + +# Whether to disable pickling dags +# +# Variable: AIRFLOW__CORE__DONOT_PICKLE +# +donot_pickle = True + +# How long before timing out a python file import +# +# Variable: AIRFLOW__CORE__DAGBAG_IMPORT_TIMEOUT +# +dagbag_import_timeout = 30.0 + +# Should a traceback be shown in the UI for dagbag import errors, +# instead of just the exception message +# +# Variable: AIRFLOW__CORE__DAGBAG_IMPORT_ERROR_TRACEBACKS +# +dagbag_import_error_tracebacks = True + +# If tracebacks are shown, how many entries from the traceback should be shown +# +# Variable: AIRFLOW__CORE__DAGBAG_IMPORT_ERROR_TRACEBACK_DEPTH +# +dagbag_import_error_traceback_depth = 2 + +# How long before timing out a DagFileProcessor, which processes a dag file +# +# Variable: AIRFLOW__CORE__DAG_FILE_PROCESSOR_TIMEOUT +# +dag_file_processor_timeout = 50 + +# The class to use for running task instances in a subprocess. +# Choices include StandardTaskRunner, CgroupTaskRunner or the full import path to the class +# when using a custom task runner. +# +# Variable: AIRFLOW__CORE__TASK_RUNNER +# +task_runner = StandardTaskRunner + +# If set, tasks without a ``run_as_user`` argument will be run with this user +# Can be used to de-elevate a sudo user running Airflow when executing tasks +# +# Variable: AIRFLOW__CORE__DEFAULT_IMPERSONATION +# +default_impersonation = + +# What security module to use (for example kerberos) +# +# Variable: AIRFLOW__CORE__SECURITY +# +security = + +# Turn unit test mode on (overwrites many configuration options with test +# values at runtime) +# +# Variable: AIRFLOW__CORE__UNIT_TEST_MODE +# +unit_test_mode = False + +# Whether to enable pickling for xcom (note that this is insecure and allows for +# RCE exploits). +# +# Variable: AIRFLOW__CORE__ENABLE_XCOM_PICKLING +# +enable_xcom_pickling = False + +# What classes can be imported during deserialization. This is a multi line value. +# The individual items will be parsed as a pattern to a glob function. +# Python built-in classes (like dict) are always allowed. +# +# Variable: AIRFLOW__CORE__ALLOWED_DESERIALIZATION_CLASSES +# +allowed_deserialization_classes = airflow.* + +# What classes can be imported during deserialization. This is a multi line value. +# The individual items will be parsed as regexp patterns. +# This is a secondary option to ``[core] allowed_deserialization_classes``. +# +# Variable: AIRFLOW__CORE__ALLOWED_DESERIALIZATION_CLASSES_REGEXP +# +allowed_deserialization_classes_regexp = + +# When a task is killed forcefully, this is the amount of time in seconds that +# it has to cleanup after it is sent a SIGTERM, before it is SIGKILLED +# +# Variable: AIRFLOW__CORE__KILLED_TASK_CLEANUP_TIME +# +killed_task_cleanup_time = 60 + +# Whether to override params with dag_run.conf. If you pass some key-value pairs +# through ``airflow dags backfill -c`` or +# ``airflow dags trigger -c``, the key-value pairs will override the existing ones in params. +# +# Variable: AIRFLOW__CORE__DAG_RUN_CONF_OVERRIDES_PARAMS +# +dag_run_conf_overrides_params = True + +# If enabled, Airflow will only scan files containing both ``DAG`` and ``airflow`` (case-insensitive). +# +# Variable: AIRFLOW__CORE__DAG_DISCOVERY_SAFE_MODE +# +dag_discovery_safe_mode = True + +# The pattern syntax used in the +# `.airflowignore +# `__ +# files in the DAG directories. Valid values are ``regexp`` or ``glob``. +# +# Variable: AIRFLOW__CORE__DAG_IGNORE_FILE_SYNTAX +# +dag_ignore_file_syntax = regexp + +# The number of retries each task is going to have by default. Can be overridden at dag or task level. +# +# Variable: AIRFLOW__CORE__DEFAULT_TASK_RETRIES +# +default_task_retries = 3 + +# The number of seconds each task is going to wait by default between retries. Can be overridden at +# dag or task level. +# +# Variable: AIRFLOW__CORE__DEFAULT_TASK_RETRY_DELAY +# +default_task_retry_delay = 300 + +# The maximum delay (in seconds) each task is going to wait by default between retries. +# This is a global setting and cannot be overridden at task or DAG level. +# +# Variable: AIRFLOW__CORE__MAX_TASK_RETRY_DELAY +# +max_task_retry_delay = 86400 + +# The weighting method used for the effective total priority weight of the task +# +# Variable: AIRFLOW__CORE__DEFAULT_TASK_WEIGHT_RULE +# +default_task_weight_rule = downstream + +# Maximum possible time (in seconds) that task will have for execution of auxiliary processes +# (like listeners, mini scheduler...) after task is marked as success.. +# +# Variable: AIRFLOW__CORE__TASK_SUCCESS_OVERTIME +# +task_success_overtime = 20 + +# The default task execution_timeout value for the operators. Expected an integer value to +# be passed into timedelta as seconds. If not specified, then the value is considered as None, +# meaning that the operators are never timed out by default. +# +# Variable: AIRFLOW__CORE__DEFAULT_TASK_EXECUTION_TIMEOUT +# +default_task_execution_timeout = 3600 + +# Updating serialized DAG can not be faster than a minimum interval to reduce database write rate. +# +# Variable: AIRFLOW__CORE__MIN_SERIALIZED_DAG_UPDATE_INTERVAL +# +min_serialized_dag_update_interval = 30 + +# If ``True``, serialized DAGs are compressed before writing to DB. +# +# .. note:: +# +# This will disable the DAG dependencies view +# +# Variable: AIRFLOW__CORE__COMPRESS_SERIALIZED_DAGS +# +compress_serialized_dags = False + +# Fetching serialized DAG can not be faster than a minimum interval to reduce database +# read rate. This config controls when your DAGs are updated in the Webserver +# +# Variable: AIRFLOW__CORE__MIN_SERIALIZED_DAG_FETCH_INTERVAL +# +min_serialized_dag_fetch_interval = 10 + +# Maximum number of Rendered Task Instance Fields (Template Fields) per task to store +# in the Database. +# All the template_fields for each of Task Instance are stored in the Database. +# Keeping this number small may cause an error when you try to view ``Rendered`` tab in +# TaskInstance view for older tasks. +# +# Variable: AIRFLOW__CORE__MAX_NUM_RENDERED_TI_FIELDS_PER_TASK +# +max_num_rendered_ti_fields_per_task = 30 + +# On each dagrun check against defined SLAs +# +# Variable: AIRFLOW__CORE__CHECK_SLAS +# +check_slas = True + +# Path to custom XCom class that will be used to store and resolve operators results +# +# Example: xcom_backend = path.to.CustomXCom +# +# Variable: AIRFLOW__CORE__XCOM_BACKEND +# +xcom_backend = airflow.models.xcom.BaseXCom + +# By default Airflow plugins are lazily-loaded (only loaded when required). Set it to ``False``, +# if you want to load plugins whenever 'airflow' is invoked via cli or loaded from module. +# +# Variable: AIRFLOW__CORE__LAZY_LOAD_PLUGINS +# +lazy_load_plugins = True + +# By default Airflow providers are lazily-discovered (discovery and imports happen only when required). +# Set it to ``False``, if you want to discover providers whenever 'airflow' is invoked via cli or +# loaded from module. +# +# Variable: AIRFLOW__CORE__LAZY_DISCOVER_PROVIDERS +# +lazy_discover_providers = True + +# Hide sensitive **Variables** or **Connection extra json keys** from UI +# and task logs when set to ``True`` +# +# .. note:: +# +# Connection passwords are always hidden in logs +# +# Variable: AIRFLOW__CORE__HIDE_SENSITIVE_VAR_CONN_FIELDS +# +hide_sensitive_var_conn_fields = False + +# A comma-separated list of extra sensitive keywords to look for in variables names or connection's +# extra JSON. +# +# Variable: AIRFLOW__CORE__SENSITIVE_VAR_CONN_NAMES +# +sensitive_var_conn_names = + +# Task Slot counts for ``default_pool``. This setting would not have any effect in an existing +# deployment where the ``default_pool`` is already created. For existing deployments, users can +# change the number of slots using Webserver, API or the CLI +# +# Variable: AIRFLOW__CORE__DEFAULT_POOL_TASK_SLOT_COUNT +# +default_pool_task_slot_count = 128 + +# The maximum list/dict length an XCom can push to trigger task mapping. If the pushed list/dict has a +# length exceeding this value, the task pushing the XCom will be failed automatically to prevent the +# mapped tasks from clogging the scheduler. +# +# Variable: AIRFLOW__CORE__MAX_MAP_LENGTH +# +max_map_length = 1024 + +# The default umask to use for process when run in daemon mode (scheduler, worker, etc.) +# +# This controls the file-creation mode mask which determines the initial value of file permission bits +# for newly created files. +# +# This value is treated as an octal-integer. +# +# Variable: AIRFLOW__CORE__DAEMON_UMASK +# +daemon_umask = 0o002 + +# Class to use as dataset manager. +# +# Example: dataset_manager_class = airflow.datasets.manager.DatasetManager +# +# Variable: AIRFLOW__CORE__DATASET_MANAGER_CLASS +# +# dataset_manager_class = + +# Kwargs to supply to dataset manager. +# +# Example: dataset_manager_kwargs = {"some_param": "some_value"} +# +# Variable: AIRFLOW__CORE__DATASET_MANAGER_KWARGS +# +# dataset_manager_kwargs = + +# Dataset URI validation should raise an exception if it is not compliant with AIP-60. +# By default this configuration is false, meaning that Airflow 2.x only warns the user. +# In Airflow 3, this configuration will be removed, unconditionally enabling strict validation. +# +# Variable: AIRFLOW__CORE__STRICT_DATASET_URI_VALIDATION +# +strict_dataset_uri_validation = False + +# (experimental) Whether components should use Airflow Internal API for DB connectivity. +# +# Variable: AIRFLOW__CORE__DATABASE_ACCESS_ISOLATION +# +database_access_isolation = False + +# (experimental) Airflow Internal API url. +# Only used if ``[core] database_access_isolation`` is ``True``. +# +# Example: internal_api_url = http://localhost:8080 +# +# Variable: AIRFLOW__CORE__INTERNAL_API_URL +# +# internal_api_url = + +# Secret key used to authenticate internal API clients to core. It should be as random as possible. +# However, when running more than 1 instances of webserver / internal API services, make sure all +# of them use the same ``secret_key`` otherwise calls will fail on authentication. +# The authentication token generated using the secret key has a short expiry time though - make +# sure that time on ALL the machines that you run airflow components on is synchronized +# (for example using ntpd) otherwise you might get "forbidden" errors when the logs are accessed. +# +# Variable: AIRFLOW__CORE__INTERNAL_API_SECRET_KEY +# +internal_api_secret_key = tCnTbEabdFBDLHWoT/LxLw== + +# The ability to allow testing connections across Airflow UI, API and CLI. +# Supported options: ``Disabled``, ``Enabled``, ``Hidden``. Default: Disabled +# Disabled - Disables the test connection functionality and disables the Test Connection button in UI. +# Enabled - Enables the test connection functionality and shows the Test Connection button in UI. +# Hidden - Disables the test connection functionality and hides the Test Connection button in UI. +# Before setting this to Enabled, make sure that you review the users who are able to add/edit +# connections and ensure they are trusted. Connection testing can be done maliciously leading to +# undesired and insecure outcomes. +# See `Airflow Security Model: Capabilities of authenticated UI users +# `__ +# for more details. +# +# Variable: AIRFLOW__CORE__TEST_CONNECTION +# +test_connection = Disabled + +# The maximum length of the rendered template field. If the value to be stored in the +# rendered template field exceeds this size, it's redacted. +# +# Variable: AIRFLOW__CORE__MAX_TEMPLATED_FIELD_LENGTH +# +max_templated_field_length = 4096 + +host_docker_socket = /var/run/docker.sock + +[database] +# Path to the ``alembic.ini`` file. You can either provide the file path relative +# to the Airflow home directory or the absolute path if it is located elsewhere. +# +# Variable: AIRFLOW__DATABASE__ALEMBIC_INI_FILE_PATH +# +alembic_ini_file_path = alembic.ini + +# The SQLAlchemy connection string to the metadata database. +# SQLAlchemy supports many different database engines. +# See: `Set up a Database Backend: Database URI +# `__ +# for more details. +# +# Variable: AIRFLOW__DATABASE__SQL_ALCHEMY_CONN +# +# This is configured via the AIRFLOW__DATABASE__SQL_ALCHEMY_CONN environment variable +# in the docker-compose files, as it differs between master and workers. +# A dummy value is set here to ensure the env var override is picked up. +sql_alchemy_conn = postgresql://dummy:dummy@dummy/dummy + +# Extra engine specific keyword args passed to SQLAlchemy's create_engine, as a JSON-encoded value +# +# Example: sql_alchemy_engine_args = {"arg1": true} +# +# Variable: AIRFLOW__DATABASE__SQL_ALCHEMY_ENGINE_ARGS +# +# sql_alchemy_engine_args = + +# The encoding for the databases +# +# Variable: AIRFLOW__DATABASE__SQL_ENGINE_ENCODING +# +sql_engine_encoding = utf-8 + +# Collation for ``dag_id``, ``task_id``, ``key``, ``external_executor_id`` columns +# in case they have different encoding. +# By default this collation is the same as the database collation, however for ``mysql`` and ``mariadb`` +# the default is ``utf8mb3_bin`` so that the index sizes of our index keys will not exceed +# the maximum size of allowed index when collation is set to ``utf8mb4`` variant, see +# `GitHub Issue Comment `__ +# for more details. +# +# Variable: AIRFLOW__DATABASE__SQL_ENGINE_COLLATION_FOR_IDS +# +# sql_engine_collation_for_ids = + +# If SQLAlchemy should pool database connections. +# +# Variable: AIRFLOW__DATABASE__SQL_ALCHEMY_POOL_ENABLED +# +sql_alchemy_pool_enabled = True + +# The SQLAlchemy pool size is the maximum number of database connections +# in the pool. 0 indicates no limit. +# +# Variable: AIRFLOW__DATABASE__SQL_ALCHEMY_POOL_SIZE +# +sql_alchemy_pool_size = 20 + +# The maximum overflow size of the pool. +# When the number of checked-out connections reaches the size set in pool_size, +# additional connections will be returned up to this limit. +# When those additional connections are returned to the pool, they are disconnected and discarded. +# It follows then that the total number of simultaneous connections the pool will allow +# is **pool_size** + **max_overflow**, +# and the total number of "sleeping" connections the pool will allow is pool_size. +# max_overflow can be set to ``-1`` to indicate no overflow limit; +# no limit will be placed on the total number of concurrent connections. Defaults to ``10``. +# +# Variable: AIRFLOW__DATABASE__SQL_ALCHEMY_MAX_OVERFLOW +# +sql_alchemy_max_overflow = 30 + +# The SQLAlchemy pool recycle is the number of seconds a connection +# can be idle in the pool before it is invalidated. This config does +# not apply to sqlite. If the number of DB connections is ever exceeded, +# a lower config value will allow the system to recover faster. +# +# Variable: AIRFLOW__DATABASE__SQL_ALCHEMY_POOL_RECYCLE +# +sql_alchemy_pool_recycle = 1800 + +# Check connection at the start of each connection pool checkout. +# Typically, this is a simple statement like "SELECT 1". +# See `SQLAlchemy Pooling: Disconnect Handling - Pessimistic +# `__ +# for more details. +# +# Variable: AIRFLOW__DATABASE__SQL_ALCHEMY_POOL_PRE_PING +# +sql_alchemy_pool_pre_ping = True + +# The schema to use for the metadata database. +# SQLAlchemy supports databases with the concept of multiple schemas. +# +# Variable: AIRFLOW__DATABASE__SQL_ALCHEMY_SCHEMA +# +sql_alchemy_schema = + +# Import path for connect args in SQLAlchemy. Defaults to an empty dict. +# This is useful when you want to configure db engine args that SQLAlchemy won't parse +# in connection string. This can be set by passing a dictionary containing the create engine parameters. +# For more details about passing create engine parameters (keepalives variables, timeout etc) +# in Postgres DB Backend see `Setting up a PostgreSQL Database +# `__ +# e.g ``connect_args={"timeout":30}`` can be defined in ``airflow_local_settings.py`` and +# can be imported as shown below +# +# Example: sql_alchemy_connect_args = airflow_local_settings.connect_args +# +# Variable: AIRFLOW__DATABASE__SQL_ALCHEMY_CONNECT_ARGS +# +# sql_alchemy_connect_args = + +# Important Warning: Use of sql_alchemy_session_maker Highly Discouraged +# Import path for function which returns 'sqlalchemy.orm.sessionmaker'. +# Improper configuration of sql_alchemy_session_maker can lead to serious issues, +# including data corruption, unrecoverable application crashes. Please review the SQLAlchemy +# documentation for detailed guidance on proper configuration and best practices. +# +# Example: sql_alchemy_session_maker = airflow_local_settings._sessionmaker +# +# Variable: AIRFLOW__DATABASE__SQL_ALCHEMY_SESSION_MAKER +# +# sql_alchemy_session_maker = + +# Whether to load the default connections that ship with Airflow when ``airflow db init`` is called. +# It's good to get started, but you probably want to set this to ``False`` in a production environment. +# +# Variable: AIRFLOW__DATABASE__LOAD_DEFAULT_CONNECTIONS +# +load_default_connections = True + +# Number of times the code should be retried in case of DB Operational Errors. +# Not all transactions will be retried as it can cause undesired state. +# Currently it is only used in ``DagFileProcessor.process_file`` to retry ``dagbag.sync_to_db``. +# +# Variable: AIRFLOW__DATABASE__MAX_DB_RETRIES +# +max_db_retries = 3 + +# Whether to run alembic migrations during Airflow start up. Sometimes this operation can be expensive, +# and the users can assert the correct version through other means (e.g. through a Helm chart). +# Accepts ``True`` or ``False``. +# +# Variable: AIRFLOW__DATABASE__CHECK_MIGRATIONS +# +check_migrations = True + +[logging] +# The folder where airflow should store its log files. +# This path must be absolute. +# There are a few existing configurations that assume this is set to the default. +# If you choose to override this you may need to update the +# ``[logging] dag_processor_manager_log_location`` and +# ``[logging] child_process_log_directory settings`` as well. +# +# Variable: AIRFLOW__LOGGING__BASE_LOG_FOLDER +# +base_log_folder = /opt/airflow/logs + +# Airflow can store logs remotely in AWS S3, Google Cloud Storage or Elastic Search. +# Set this to ``True`` if you want to enable remote logging. +# +# Variable: AIRFLOW__LOGGING__REMOTE_LOGGING +# +remote_logging = True + +# Users must supply an Airflow connection id that provides access to the storage +# location. Depending on your remote logging service, this may only be used for +# reading logs, not writing them. +# +# Variable: AIRFLOW__LOGGING__REMOTE_LOG_CONN_ID +# +remote_log_conn_id = minio_default + +# Whether the local log files for GCS, S3, WASB and OSS remote logging should be deleted after +# they are uploaded to the remote location. +# +# Variable: AIRFLOW__LOGGING__DELETE_LOCAL_LOGS +# +delete_local_logs = False + +# Path to Google Credential JSON file. If omitted, authorization based on `the Application Default +# Credentials +# `__ will +# be used. +# +# Variable: AIRFLOW__LOGGING__GOOGLE_KEY_PATH +# +google_key_path = + +# Storage bucket URL for remote logging +# S3 buckets should start with **s3://** +# Cloudwatch log groups should start with **cloudwatch://** +# GCS buckets should start with **gs://** +# WASB buckets should start with **wasb** just to help Airflow select correct handler +# Stackdriver logs should start with **stackdriver://** +# +# Variable: AIRFLOW__LOGGING__REMOTE_BASE_LOG_FOLDER +# +remote_base_log_folder = s3://airflow-logs/ + +# The remote_task_handler_kwargs param is loaded into a dictionary and passed to the ``__init__`` +# of remote task handler and it overrides the values provided by Airflow config. For example if you set +# ``delete_local_logs=False`` and you provide ``{"delete_local_copy": true}``, then the local +# log files will be deleted after they are uploaded to remote location. +# +# Example: remote_task_handler_kwargs = {"delete_local_copy": true} +# +# Variable: AIRFLOW__LOGGING__REMOTE_TASK_HANDLER_KWARGS +# +remote_task_handler_kwargs = + +# Use server-side encryption for logs stored in S3 +# +# Variable: AIRFLOW__LOGGING__ENCRYPT_S3_LOGS +# +encrypt_s3_logs = False + +# Logging level. +# +# Supported values: ``CRITICAL``, ``ERROR``, ``WARNING``, ``INFO``, ``DEBUG``. +# +# Variable: AIRFLOW__LOGGING__LOGGING_LEVEL +# +logging_level = INFO + +# Logging level for celery. If not set, it uses the value of logging_level +# +# Supported values: ``CRITICAL``, ``ERROR``, ``WARNING``, ``INFO``, ``DEBUG``. +# +# Variable: AIRFLOW__LOGGING__CELERY_LOGGING_LEVEL +# +celery_logging_level = + +# Logging level for Flask-appbuilder UI. +# +# Supported values: ``CRITICAL``, ``ERROR``, ``WARNING``, ``INFO``, ``DEBUG``. +# +# Variable: AIRFLOW__LOGGING__FAB_LOGGING_LEVEL +# +fab_logging_level = WARNING + +# Logging class +# Specify the class that will specify the logging configuration +# This class has to be on the python classpath +# +# Example: logging_config_class = my.path.default_local_settings.LOGGING_CONFIG +# +# Variable: AIRFLOW__LOGGING__LOGGING_CONFIG_CLASS +# +logging_config_class = airflow_local_settings.LOGGING_CONFIG + +# Flag to enable/disable Colored logs in Console +# Colour the logs when the controlling terminal is a TTY. +# +# Variable: AIRFLOW__LOGGING__COLORED_CONSOLE_LOG +# +colored_console_log = True + +# Log format for when Colored logs is enabled +# +# Variable: AIRFLOW__LOGGING__COLORED_LOG_FORMAT +# +colored_log_format = [%%(blue)s%%(asctime)s%%(reset)s] {%%(blue)s%%(filename)s:%%(reset)s%%(lineno)d} %%(log_color)s%%(levelname)s%%(reset)s - %%(log_color)s%%(message)s%%(reset)s + +# Specifies the class utilized by Airflow to implement colored logging +# +# Variable: AIRFLOW__LOGGING__COLORED_FORMATTER_CLASS +# +colored_formatter_class = airflow.utils.log.colored_log.CustomTTYColoredFormatter + +# Format of Log line +# +# Variable: AIRFLOW__LOGGING__LOG_FORMAT +# +log_format = [%%(asctime)s] {%%(filename)s:%%(lineno)d} %%(levelname)s - %%(message)s + +# Defines the format of log messages for simple logging configuration +# +# Variable: AIRFLOW__LOGGING__SIMPLE_LOG_FORMAT +# +simple_log_format = %%(asctime)s %%(levelname)s - %%(message)s + +# Where to send dag parser logs. If "file", logs are sent to log files defined by child_process_log_directory. +# +# Variable: AIRFLOW__LOGGING__DAG_PROCESSOR_LOG_TARGET +# +dag_processor_log_target = file + +# Format of Dag Processor Log line +# +# Variable: AIRFLOW__LOGGING__DAG_PROCESSOR_LOG_FORMAT +# +dag_processor_log_format = [%%(asctime)s] [SOURCE:DAG_PROCESSOR] {%%(filename)s:%%(lineno)d} %%(levelname)s - %%(message)s + +# Determines the formatter class used by Airflow for structuring its log messages +# The default formatter class is timezone-aware, which means that timestamps attached to log entries +# will be adjusted to reflect the local timezone of the Airflow instance +# +# Variable: AIRFLOW__LOGGING__LOG_FORMATTER_CLASS +# +log_formatter_class = airflow.utils.log.timezone_aware.TimezoneAware + +# An import path to a function to add adaptations of each secret added with +# ``airflow.utils.log.secrets_masker.mask_secret`` to be masked in log messages. The given function +# is expected to require a single parameter: the secret to be adapted. It may return a +# single adaptation of the secret or an iterable of adaptations to each be masked as secrets. +# The original secret will be masked as well as any adaptations returned. +# +# Example: secret_mask_adapter = urllib.parse.quote +# +# Variable: AIRFLOW__LOGGING__SECRET_MASK_ADAPTER +# +secret_mask_adapter = + +secret_mask_exception_args = False + +# Specify prefix pattern like mentioned below with stream handler ``TaskHandlerWithCustomFormatter`` +# +# Example: task_log_prefix_template = {{ti.dag_id}}-{{ti.task_id}}-{{execution_date}}-{{ti.try_number}} +# +# Variable: AIRFLOW__LOGGING__TASK_LOG_PREFIX_TEMPLATE +# +task_log_prefix_template = {{ ti.dag_id }}-{{ ti.task_id }}-{{ ti.run_id }} + +# Formatting for how airflow generates file names/paths for each task run. +# +# Variable: AIRFLOW__LOGGING__LOG_FILENAME_TEMPLATE +# +log_filename_template = dag_id={{ ti.dag_id }}/run_id={{ ti.run_id }}/task_id={{ ti.task_id }}/{%% if ti.map_index >= 0 %%}map_index={{ ti.map_index }}/{%% endif %%}attempt={{ try_number }}.log + +# Formatting for how airflow generates file names for log +# +# Variable: AIRFLOW__LOGGING__LOG_PROCESSOR_FILENAME_TEMPLATE +# +log_processor_filename_template = {{ filename }}.log + +# Full path of dag_processor_manager logfile. +# +# Variable: AIRFLOW__LOGGING__DAG_PROCESSOR_MANAGER_LOG_LOCATION +# +dag_processor_manager_log_location = /opt/airflow/logs/dag_processor_manager/dag_processor_manager.log + +# Whether DAG processor manager will write logs to stdout +# +# Variable: AIRFLOW__LOGGING__DAG_PROCESSOR_MANAGER_LOG_STDOUT +# +dag_processor_manager_log_stdout = False + +# Name of handler to read task instance logs. +# Defaults to use ``task`` handler. +# +# Variable: AIRFLOW__LOGGING__TASK_LOG_READER +# +task_log_reader = task + +# A comma\-separated list of third-party logger names that will be configured to print messages to +# consoles\. +# +# Example: extra_logger_names = connexion,sqlalchemy +# +# Variable: AIRFLOW__LOGGING__EXTRA_LOGGER_NAMES +# +extra_logger_names = + +# When you start an Airflow worker, Airflow starts a tiny web server +# subprocess to serve the workers local log files to the airflow main +# web server, who then builds pages and sends them to users. This defines +# the port on which the logs are served. It needs to be unused, and open +# visible from the main web server to connect into the workers. +# +# Variable: AIRFLOW__LOGGING__WORKER_LOG_SERVER_PORT +# +worker_log_server_port = 8793 + +# Port to serve logs from for triggerer. +# See ``[logging] worker_log_server_port`` description for more info. +# +# Variable: AIRFLOW__LOGGING__TRIGGER_LOG_SERVER_PORT +# +trigger_log_server_port = 8794 + +# We must parse timestamps to interleave logs between trigger and task. To do so, +# we need to parse timestamps in log files. In case your log format is non-standard, +# you may provide import path to callable which takes a string log line and returns +# the timestamp (datetime.datetime compatible). +# +# Example: interleave_timestamp_parser = path.to.my_func +# +# Variable: AIRFLOW__LOGGING__INTERLEAVE_TIMESTAMP_PARSER +# +# interleave_timestamp_parser = + +# Permissions in the form or of octal string as understood by chmod. The permissions are important +# when you use impersonation, when logs are written by a different user than airflow. The most secure +# way of configuring it in this case is to add both users to the same group and make it the default +# group of both users. Group-writeable logs are default in airflow, but you might decide that you are +# OK with having the logs other-writeable, in which case you should set it to ``0o777``. You might +# decide to add more security if you do not use impersonation and change it to ``0o755`` to make it +# only owner-writeable. You can also make it just readable only for owner by changing it to ``0o700`` +# if all the access (read/write) for your logs happens from the same user. +# +# Example: file_task_handler_new_folder_permissions = 0o775 +# +# Variable: AIRFLOW__LOGGING__FILE_TASK_HANDLER_NEW_FOLDER_PERMISSIONS +# +file_task_handler_new_folder_permissions = 0o775 + +# Permissions in the form or of octal string as understood by chmod. The permissions are important +# when you use impersonation, when logs are written by a different user than airflow. The most secure +# way of configuring it in this case is to add both users to the same group and make it the default +# group of both users. Group-writeable logs are default in airflow, but you might decide that you are +# OK with having the logs other-writeable, in which case you should set it to ``0o666``. You might +# decide to add more security if you do not use impersonation and change it to ``0o644`` to make it +# only owner-writeable. You can also make it just readable only for owner by changing it to ``0o600`` +# if all the access (read/write) for your logs happens from the same user. +# +# Example: file_task_handler_new_file_permissions = 0o664 +# +# Variable: AIRFLOW__LOGGING__FILE_TASK_HANDLER_NEW_FILE_PERMISSIONS +# +file_task_handler_new_file_permissions = 0o664 + +# By default Celery sends all logs into stderr. +# If enabled any previous logging handlers will get *removed*. +# With this option AirFlow will create new handlers +# and send low level logs like INFO and WARNING to stdout, +# while sending higher severity logs to stderr. +# +# Variable: AIRFLOW__LOGGING__CELERY_STDOUT_STDERR_SEPARATION +# +celery_stdout_stderr_separation = False + +# If enabled, Airflow may ship messages to task logs from outside the task run context, e.g. from +# the scheduler, executor, or callback execution context. This can help in circumstances such as +# when there's something blocking the execution of the task and ordinarily there may be no task +# logs at all. +# This is set to ``True`` by default. If you encounter issues with this feature +# (e.g. scheduler performance issues) it can be disabled. +# +# Variable: AIRFLOW__LOGGING__ENABLE_TASK_CONTEXT_LOGGER +# +enable_task_context_logger = True + +# A comma separated list of keywords related to errors whose presence should display the line in red +# color in UI +# +# Variable: AIRFLOW__LOGGING__COLOR_LOG_ERROR_KEYWORDS +# +color_log_error_keywords = error,exception + +# A comma separated list of keywords related to warning whose presence should display the line in yellow +# color in UI +# +# Variable: AIRFLOW__LOGGING__COLOR_LOG_WARNING_KEYWORDS +# +color_log_warning_keywords = warn + +[metrics] +# `StatsD `__ integration settings. + +# If true, ``[metrics] metrics_allow_list`` and ``[metrics] metrics_block_list`` will use +# regex pattern matching anywhere within the metric name instead of only prefix matching +# at the start of the name. +# +# Variable: AIRFLOW__METRICS__METRICS_USE_PATTERN_MATCH +# +metrics_use_pattern_match = False + +# Configure an allow list (comma separated string) to send only certain metrics. +# If ``[metrics] metrics_use_pattern_match`` is ``false``, match only the exact metric name prefix. +# If ``[metrics] metrics_use_pattern_match`` is ``true``, provide regex patterns to match. +# +# Example: metrics_allow_list = "scheduler,executor,dagrun,pool,triggerer,celery" or "^scheduler,^executor,heartbeat|timeout" +# +# Variable: AIRFLOW__METRICS__METRICS_ALLOW_LIST +# +metrics_allow_list = + +# Configure a block list (comma separated string) to block certain metrics from being emitted. +# If ``[metrics] metrics_allow_list`` and ``[metrics] metrics_block_list`` are both configured, +# ``[metrics] metrics_block_list`` is ignored. +# +# If ``[metrics] metrics_use_pattern_match`` is ``false``, match only the exact metric name prefix. +# +# If ``[metrics] metrics_use_pattern_match`` is ``true``, provide regex patterns to match. +# +# Example: metrics_block_list = "scheduler,executor,dagrun,pool,triggerer,celery" or "^scheduler,^executor,heartbeat|timeout" +# +# Variable: AIRFLOW__METRICS__METRICS_BLOCK_LIST +# +metrics_block_list = + +# Enables sending metrics to StatsD. +# +# Variable: AIRFLOW__METRICS__STATSD_ON +# +statsd_on = False + +# Specifies the host address where the StatsD daemon (or server) is running +# +# Variable: AIRFLOW__METRICS__STATSD_HOST +# +statsd_host = localhost + +# Specifies the port on which the StatsD daemon (or server) is listening to +# +# Variable: AIRFLOW__METRICS__STATSD_PORT +# +statsd_port = 8125 + +# Defines the namespace for all metrics sent from Airflow to StatsD +# +# Variable: AIRFLOW__METRICS__STATSD_PREFIX +# +statsd_prefix = airflow + +# A function that validate the StatsD stat name, apply changes to the stat name if necessary and return +# the transformed stat name. +# +# The function should have the following signature +# +# .. code-block:: python +# +# def func_name(stat_name: str) -> str: ... +# +# Variable: AIRFLOW__METRICS__STAT_NAME_HANDLER +# +stat_name_handler = + +# To enable datadog integration to send airflow metrics. +# +# Variable: AIRFLOW__METRICS__STATSD_DATADOG_ENABLED +# +statsd_datadog_enabled = False + +# List of datadog tags attached to all metrics(e.g: ``key1:value1,key2:value2``) +# +# Variable: AIRFLOW__METRICS__STATSD_DATADOG_TAGS +# +statsd_datadog_tags = + +# Set to ``False`` to disable metadata tags for some of the emitted metrics +# +# Variable: AIRFLOW__METRICS__STATSD_DATADOG_METRICS_TAGS +# +statsd_datadog_metrics_tags = True + +# If you want to utilise your own custom StatsD client set the relevant +# module path below. +# Note: The module path must exist on your +# `PYTHONPATH ` +# for Airflow to pick it up +# +# Variable: AIRFLOW__METRICS__STATSD_CUSTOM_CLIENT_PATH +# +# statsd_custom_client_path = + +# If you want to avoid sending all the available metrics tags to StatsD, +# you can configure a block list of prefixes (comma separated) to filter out metric tags +# that start with the elements of the list (e.g: ``job_id,run_id``) +# +# Example: statsd_disabled_tags = job_id,run_id,dag_id,task_id +# +# Variable: AIRFLOW__METRICS__STATSD_DISABLED_TAGS +# +statsd_disabled_tags = job_id,run_id + +# To enable sending Airflow metrics with StatsD-Influxdb tagging convention. +# +# Variable: AIRFLOW__METRICS__STATSD_INFLUXDB_ENABLED +# +statsd_influxdb_enabled = False + +# Enables sending metrics to OpenTelemetry. +# +# Variable: AIRFLOW__METRICS__OTEL_ON +# +otel_on = False + +# Specifies the hostname or IP address of the OpenTelemetry Collector to which Airflow sends +# metrics and traces. +# +# Variable: AIRFLOW__METRICS__OTEL_HOST +# +otel_host = localhost + +# Specifies the port of the OpenTelemetry Collector that is listening to. +# +# Variable: AIRFLOW__METRICS__OTEL_PORT +# +otel_port = 8889 + +# The prefix for the Airflow metrics. +# +# Variable: AIRFLOW__METRICS__OTEL_PREFIX +# +otel_prefix = airflow + +# Defines the interval, in milliseconds, at which Airflow sends batches of metrics and traces +# to the configured OpenTelemetry Collector. +# +# Variable: AIRFLOW__METRICS__OTEL_INTERVAL_MILLISECONDS +# +otel_interval_milliseconds = 60000 + +# If ``True``, all metrics are also emitted to the console. Defaults to ``False``. +# +# Variable: AIRFLOW__METRICS__OTEL_DEBUGGING_ON +# +otel_debugging_on = False + +# The default service name of traces. +# +# Variable: AIRFLOW__METRICS__OTEL_SERVICE +# +otel_service = Airflow + +# If ``True``, SSL will be enabled. Defaults to ``False``. +# To establish an HTTPS connection to the OpenTelemetry collector, +# you need to configure the SSL certificate and key within the OpenTelemetry collector's +# ``config.yml`` file. +# +# Variable: AIRFLOW__METRICS__OTEL_SSL_ACTIVE +# +otel_ssl_active = False + +[traces] +# Distributed traces integration settings. + +# Enables sending traces to OpenTelemetry. +# +# Variable: AIRFLOW__TRACES__OTEL_ON +# +otel_on = False + +# Specifies the hostname or IP address of the OpenTelemetry Collector to which Airflow sends +# traces. +# +# Variable: AIRFLOW__TRACES__OTEL_HOST +# +otel_host = localhost + +# Specifies the port of the OpenTelemetry Collector that is listening to. +# +# Variable: AIRFLOW__TRACES__OTEL_PORT +# +otel_port = 8889 + +# The default service name of traces. +# +# Variable: AIRFLOW__TRACES__OTEL_SERVICE +# +otel_service = Airflow + +# If True, all traces are also emitted to the console. Defaults to False. +# +# Variable: AIRFLOW__TRACES__OTEL_DEBUGGING_ON +# +otel_debugging_on = False + +# If True, SSL will be enabled. Defaults to False. +# To establish an HTTPS connection to the OpenTelemetry collector, +# you need to configure the SSL certificate and key within the OpenTelemetry collector's +# config.yml file. +# +# Variable: AIRFLOW__TRACES__OTEL_SSL_ACTIVE +# +otel_ssl_active = False + +# If True, after the task is complete, the full task log messages will be added as the +# span events, chunked by 64k size. defaults to False. +# +# Variable: AIRFLOW__TRACES__OTEL_TASK_LOG_EVENT +# +otel_task_log_event = False + +[secrets] +# Full class name of secrets backend to enable (will precede env vars and metastore in search path) +# +# Example: backend = airflow.providers.amazon.aws.secrets.systems_manager.SystemsManagerParameterStoreBackend +# +# Variable: AIRFLOW__SECRETS__BACKEND +# +backend = + +# The backend_kwargs param is loaded into a dictionary and passed to ``__init__`` +# of secrets backend class. See documentation for the secrets backend you are using. +# JSON is expected. +# +# Example for AWS Systems Manager ParameterStore: +# ``{"connections_prefix": "/airflow/connections", "profile_name": "default"}`` +# +# Variable: AIRFLOW__SECRETS__BACKEND_KWARGS +# +backend_kwargs = + +# .. note:: |experimental| +# +# Enables local caching of Variables, when parsing DAGs only. +# Using this option can make dag parsing faster if Variables are used in top level code, at the expense +# of longer propagation time for changes. +# Please note that this cache concerns only the DAG parsing step. There is no caching in place when DAG +# tasks are run. +# +# Variable: AIRFLOW__SECRETS__USE_CACHE +# +use_cache = False + +# .. note:: |experimental| +# +# When the cache is enabled, this is the duration for which we consider an entry in the cache to be +# valid. Entries are refreshed if they are older than this many seconds. +# It means that when the cache is enabled, this is the maximum amount of time you need to wait to see a +# Variable change take effect. +# +# Variable: AIRFLOW__SECRETS__CACHE_TTL_SECONDS +# +cache_ttl_seconds = 900 + +[cli] +# In what way should the cli access the API. The LocalClient will use the +# database directly, while the json_client will use the api running on the +# webserver +# +# Variable: AIRFLOW__CLI__API_CLIENT +# +api_client = airflow.api.client.local_client + +# If you set web_server_url_prefix, do NOT forget to append it here, ex: +# ``endpoint_url = http://localhost:8080/myroot`` +# So api will look like: ``http://localhost:8080/myroot/api/experimental/...`` +# +# Variable: AIRFLOW__CLI__ENDPOINT_URL +# +endpoint_url = http://localhost:8080 + +[debug] +# Used only with ``DebugExecutor``. If set to ``True`` DAG will fail with first +# failed task. Helpful for debugging purposes. +# +# Variable: AIRFLOW__DEBUG__FAIL_FAST +# +fail_fast = False + +[api] +# Enables the deprecated experimental API. Please note that these API endpoints do not have +# access control. An authenticated user has full access. +# +# .. warning:: +# +# This `Experimental REST API +# `__ is +# deprecated since version 2.0. Please consider using +# `the Stable REST API +# `__. +# For more information on migration, see +# `RELEASE_NOTES.rst `_ +# +# Variable: AIRFLOW__API__ENABLE_EXPERIMENTAL_API +# +enable_experimental_api = False + +# Comma separated list of auth backends to authenticate users of the API. See +# `Security: API +# `__ for possible values. +# ("airflow.api.auth.backend.default" allows all requests for historic reasons) +# +# Variable: AIRFLOW__API__AUTH_BACKENDS +# +auth_backends = airflow.api.auth.backend.session + +# Used to set the maximum page limit for API requests. If limit passed as param +# is greater than maximum page limit, it will be ignored and maximum page limit value +# will be set as the limit +# +# Variable: AIRFLOW__API__MAXIMUM_PAGE_LIMIT +# +maximum_page_limit = 100 + +# Used to set the default page limit when limit param is zero or not provided in API +# requests. Otherwise if positive integer is passed in the API requests as limit, the +# smallest number of user given limit or maximum page limit is taken as limit. +# +# Variable: AIRFLOW__API__FALLBACK_PAGE_LIMIT +# +fallback_page_limit = 100 + +# The intended audience for JWT token credentials used for authorization. This value must match on the client and server sides. If empty, audience will not be tested. +# +# Example: google_oauth2_audience = project-id-random-value.apps.googleusercontent.com +# +# Variable: AIRFLOW__API__GOOGLE_OAUTH2_AUDIENCE +# +google_oauth2_audience = + +# Path to Google Cloud Service Account key file (JSON). If omitted, authorization based on +# `the Application Default Credentials +# `__ will +# be used. +# +# Example: google_key_path = /files/service-account-json +# +# Variable: AIRFLOW__API__GOOGLE_KEY_PATH +# +google_key_path = + +# Used in response to a preflight request to indicate which HTTP +# headers can be used when making the actual request. This header is +# the server side response to the browser's +# Access-Control-Request-Headers header. +# +# Variable: AIRFLOW__API__ACCESS_CONTROL_ALLOW_HEADERS +# +access_control_allow_headers = + +# Specifies the method or methods allowed when accessing the resource. +# +# Variable: AIRFLOW__API__ACCESS_CONTROL_ALLOW_METHODS +# +access_control_allow_methods = + +# Indicates whether the response can be shared with requesting code from the given origins. +# Separate URLs with space. +# +# Variable: AIRFLOW__API__ACCESS_CONTROL_ALLOW_ORIGINS +# +access_control_allow_origins = + +# Indicates whether the **xcomEntries** endpoint supports the **deserialize** +# flag. If set to ``False``, setting this flag in a request would result in a +# 400 Bad Request error. +# +# Variable: AIRFLOW__API__ENABLE_XCOM_DESERIALIZE_SUPPORT +# +enable_xcom_deserialize_support = False + +[lineage] +# what lineage backend to use +# +# Variable: AIRFLOW__LINEAGE__BACKEND +# +backend = + +[operators] +# The default owner assigned to each new operator, unless +# provided explicitly or passed via ``default_args`` +# +# Variable: AIRFLOW__OPERATORS__DEFAULT_OWNER +# +default_owner = airflow + +# The default value of attribute "deferrable" in operators and sensors. +# +# Variable: AIRFLOW__OPERATORS__DEFAULT_DEFERRABLE +# +default_deferrable = false + +# Indicates the default number of CPU units allocated to each operator when no specific CPU request +# is specified in the operator's configuration +# +# Variable: AIRFLOW__OPERATORS__DEFAULT_CPUS +# +default_cpus = 1 + +# Indicates the default number of RAM allocated to each operator when no specific RAM request +# is specified in the operator's configuration +# +# Variable: AIRFLOW__OPERATORS__DEFAULT_RAM +# +default_ram = 512 + +# Indicates the default number of disk storage allocated to each operator when no specific disk request +# is specified in the operator's configuration +# +# Variable: AIRFLOW__OPERATORS__DEFAULT_DISK +# +default_disk = 512 + +# Indicates the default number of GPUs allocated to each operator when no specific GPUs request +# is specified in the operator's configuration +# +# Variable: AIRFLOW__OPERATORS__DEFAULT_GPUS +# +default_gpus = 0 + +# Default queue that tasks get assigned to and that worker listen on. +# +# Variable: AIRFLOW__OPERATORS__DEFAULT_QUEUE +# +default_queue = default + +# Is allowed to pass additional/unused arguments (args, kwargs) to the BaseOperator operator. +# If set to ``False``, an exception will be thrown, +# otherwise only the console message will be displayed. +# +# Variable: AIRFLOW__OPERATORS__ALLOW_ILLEGAL_ARGUMENTS +# +allow_illegal_arguments = False + +[webserver] +# The message displayed when a user attempts to execute actions beyond their authorised privileges. +# +# Variable: AIRFLOW__WEBSERVER__ACCESS_DENIED_MESSAGE +# +access_denied_message = Access is Denied + +# Path of webserver config file used for configuring the webserver parameters +# +# Variable: AIRFLOW__WEBSERVER__CONFIG_FILE +# +config_file = /opt/airflow/webserver_config.py + +# The base url of your website: Airflow cannot guess what domain or CNAME you are using. +# This is used to create links in the Log Url column in the Browse - Task Instances menu, +# as well as in any automated emails sent by Airflow that contain links to your webserver. +# +# Variable: AIRFLOW__WEBSERVER__BASE_URL +# +base_url = http://localhost:8080 + +# Default timezone to display all dates in the UI, can be UTC, system, or +# any IANA timezone string (e.g. **Europe/Amsterdam**). If left empty the +# default value of core/default_timezone will be used +# +# Example: default_ui_timezone = America/New_York +# +# Variable: AIRFLOW__WEBSERVER__DEFAULT_UI_TIMEZONE +# +default_ui_timezone = UTC + +# The ip specified when starting the web server +# +# Variable: AIRFLOW__WEBSERVER__WEB_SERVER_HOST +# +web_server_host = 0.0.0.0 + +# The port on which to run the web server +# +# Variable: AIRFLOW__WEBSERVER__WEB_SERVER_PORT +# +web_server_port = 8080 + +# Paths to the SSL certificate and key for the web server. When both are +# provided SSL will be enabled. This does not change the web server port. +# +# Variable: AIRFLOW__WEBSERVER__WEB_SERVER_SSL_CERT +# +web_server_ssl_cert = + +# Paths to the SSL certificate and key for the web server. When both are +# provided SSL will be enabled. This does not change the web server port. +# +# Variable: AIRFLOW__WEBSERVER__WEB_SERVER_SSL_KEY +# +web_server_ssl_key = + +# The type of backend used to store web session data, can be ``database`` or ``securecookie``. For the +# ``database`` backend, sessions are store in the database and they can be +# managed there (for example when you reset password of the user, all sessions for that user are +# deleted). For the ``securecookie`` backend, sessions are stored in encrypted cookies on the client +# side. The ``securecookie`` mechanism is 'lighter' than database backend, but sessions are not deleted +# when you reset password of the user, which means that other than waiting for expiry time, the only +# way to invalidate all sessions for a user is to change secret_key and restart webserver (which +# also invalidates and logs out all other user's sessions). +# +# When you are using ``database`` backend, make sure to keep your database session table small +# by periodically running ``airflow db clean --table session`` command, especially if you have +# automated API calls that will create a new session for each call rather than reuse the sessions +# stored in browser cookies. +# +# Example: session_backend = securecookie +# +# Variable: AIRFLOW__WEBSERVER__SESSION_BACKEND +# +session_backend = database + +# Number of seconds the webserver waits before killing gunicorn master that doesn't respond +# +# Variable: AIRFLOW__WEBSERVER__WEB_SERVER_MASTER_TIMEOUT +# +web_server_master_timeout = 120 + +# Number of seconds the gunicorn webserver waits before timing out on a worker +# +# Variable: AIRFLOW__WEBSERVER__WEB_SERVER_WORKER_TIMEOUT +# +web_server_worker_timeout = 120 + +# Number of workers to refresh at a time. When set to 0, worker refresh is +# disabled. When nonzero, airflow periodically refreshes webserver workers by +# bringing up new ones and killing old ones. +# +# Variable: AIRFLOW__WEBSERVER__WORKER_REFRESH_BATCH_SIZE +# +worker_refresh_batch_size = 1 + +# Number of seconds to wait before refreshing a batch of workers. +# +# Variable: AIRFLOW__WEBSERVER__WORKER_REFRESH_INTERVAL +# +worker_refresh_interval = 6000 + +# If set to ``True``, Airflow will track files in plugins_folder directory. When it detects changes, +# then reload the gunicorn. If set to ``True``, gunicorn starts without preloading, which is slower, +# uses more memory, and may cause race conditions. Avoid setting this to ``True`` in production. +# +# Variable: AIRFLOW__WEBSERVER__RELOAD_ON_PLUGIN_CHANGE +# +reload_on_plugin_change = False + +# Secret key used to run your flask app. It should be as random as possible. However, when running +# more than 1 instances of webserver, make sure all of them use the same ``secret_key`` otherwise +# one of them will error with "CSRF session token is missing". +# The webserver key is also used to authorize requests to Celery workers when logs are retrieved. +# The token generated using the secret key has a short expiry time though - make sure that time on +# ALL the machines that you run airflow components on is synchronized (for example using ntpd) +# otherwise you might get "forbidden" errors when the logs are accessed. +# +# Variable: AIRFLOW__WEBSERVER__SECRET_KEY +# +secret_key = tCnTbEabdFBDLHWoT/LxLw== + +# Number of workers to run the Gunicorn web server +# +# Variable: AIRFLOW__WEBSERVER__WORKERS +# +workers = 1 + +# The worker class gunicorn should use. Choices include +# ``sync`` (default), ``eventlet``, ``gevent``. +# +# .. warning:: +# +# When using ``gevent`` you might also want to set the ``_AIRFLOW_PATCH_GEVENT`` +# environment variable to ``"1"`` to make sure gevent patching is done as early as possible. +# +# Be careful to set ``_AIRFLOW_PATCH_GEVENT`` only on the web server as gevent patching may +# affect the scheduler behavior via the ``multiprocessing`` sockets module and cause crash. +# +# See related Issues / PRs for more details: +# +# * https://github.com/benoitc/gunicorn/issues/2796 +# * https://github.com/apache/airflow/issues/8212 +# * https://github.com/apache/airflow/pull/28283 +# +# Variable: AIRFLOW__WEBSERVER__WORKER_CLASS +# +worker_class = gevent + +# Log files for the gunicorn webserver. '-' means log to stderr. +# +# Variable: AIRFLOW__WEBSERVER__ACCESS_LOGFILE +# +access_logfile = - + +# Log files for the gunicorn webserver. '-' means log to stderr. +# +# Variable: AIRFLOW__WEBSERVER__ERROR_LOGFILE +# +error_logfile = - + +# Access log format for gunicorn webserver. +# default format is ``%%(h)s %%(l)s %%(u)s %%(t)s "%%(r)s" %%(s)s %%(b)s "%%(f)s" "%%(a)s"`` +# See `Gunicorn Settings: 'access_log_format' Reference +# `__ for more details +# +# Variable: AIRFLOW__WEBSERVER__ACCESS_LOGFORMAT +# +access_logformat = + +# Expose the configuration file in the web server. Set to ``non-sensitive-only`` to show all values +# except those that have security implications. ``True`` shows all values. ``False`` hides the +# configuration completely. +# +# Variable: AIRFLOW__WEBSERVER__EXPOSE_CONFIG +# +expose_config = False + +# Expose hostname in the web server +# +# Variable: AIRFLOW__WEBSERVER__EXPOSE_HOSTNAME +# +expose_hostname = False + +# Expose stacktrace in the web server +# +# Variable: AIRFLOW__WEBSERVER__EXPOSE_STACKTRACE +# +expose_stacktrace = False + +# Default DAG view. Valid values are: ``grid``, ``graph``, ``duration``, ``gantt``, ``landing_times`` +# +# Variable: AIRFLOW__WEBSERVER__DAG_DEFAULT_VIEW +# +dag_default_view = grid + +# Default DAG orientation. Valid values are: +# ``LR`` (Left->Right), ``TB`` (Top->Bottom), ``RL`` (Right->Left), ``BT`` (Bottom->Top) +# +# Variable: AIRFLOW__WEBSERVER__DAG_ORIENTATION +# +dag_orientation = LR + +# Sorting order in grid view. Valid values are: ``topological``, ``hierarchical_alphabetical`` +# +# Variable: AIRFLOW__WEBSERVER__GRID_VIEW_SORTING_ORDER +# +grid_view_sorting_order = topological + +# The amount of time (in secs) webserver will wait for initial handshake +# while fetching logs from other worker machine +# +# Variable: AIRFLOW__WEBSERVER__LOG_FETCH_TIMEOUT_SEC +# +log_fetch_timeout_sec = 10 + +# Time interval (in secs) to wait before next log fetching. +# +# Variable: AIRFLOW__WEBSERVER__LOG_FETCH_DELAY_SEC +# +log_fetch_delay_sec = 5 + +# Distance away from page bottom to enable auto tailing. +# +# Variable: AIRFLOW__WEBSERVER__LOG_AUTO_TAILING_OFFSET +# +log_auto_tailing_offset = 30 + +# Animation speed for auto tailing log display. +# +# Variable: AIRFLOW__WEBSERVER__LOG_ANIMATION_SPEED +# +log_animation_speed = 1000 + +# By default, the webserver shows paused DAGs. Flip this to hide paused +# DAGs by default +# +# Variable: AIRFLOW__WEBSERVER__HIDE_PAUSED_DAGS_BY_DEFAULT +# +hide_paused_dags_by_default = False + +# Consistent page size across all listing views in the UI +# +# Variable: AIRFLOW__WEBSERVER__PAGE_SIZE +# +page_size = 100 + +# Define the color of navigation bar +# +# Variable: AIRFLOW__WEBSERVER__NAVBAR_COLOR +# +navbar_color = #fff + +# Define the color of text in the navigation bar +# +# Variable: AIRFLOW__WEBSERVER__NAVBAR_TEXT_COLOR +# +navbar_text_color = #51504f + +# Define the color of navigation bar links when hovered +# +# Variable: AIRFLOW__WEBSERVER__NAVBAR_HOVER_COLOR +# +navbar_hover_color = #eee + +# Define the color of text in the navigation bar when hovered +# +# Variable: AIRFLOW__WEBSERVER__NAVBAR_TEXT_HOVER_COLOR +# +navbar_text_hover_color = #51504f + +# Define the color of the logo text +# +# Variable: AIRFLOW__WEBSERVER__NAVBAR_LOGO_TEXT_COLOR +# +navbar_logo_text_color = #51504f + +# Default dagrun to show in UI +# +# Variable: AIRFLOW__WEBSERVER__DEFAULT_DAG_RUN_DISPLAY_NUMBER +# +default_dag_run_display_number = 25 + +# Enable werkzeug ``ProxyFix`` middleware for reverse proxy +# +# Variable: AIRFLOW__WEBSERVER__ENABLE_PROXY_FIX +# +enable_proxy_fix = True + +# Number of values to trust for ``X-Forwarded-For``. +# See `Werkzeug: X-Forwarded-For Proxy Fix +# `__ for more details. +# +# Variable: AIRFLOW__WEBSERVER__PROXY_FIX_X_FOR +# +proxy_fix_x_for = 1 + +# Number of values to trust for ``X-Forwarded-Proto``. +# See `Werkzeug: X-Forwarded-For Proxy Fix +# `__ for more details. +# +# Variable: AIRFLOW__WEBSERVER__PROXY_FIX_X_PROTO +# +proxy_fix_x_proto = 1 + +# Number of values to trust for ``X-Forwarded-Host``. +# See `Werkzeug: X-Forwarded-For Proxy Fix +# `__ for more details. +# +# Variable: AIRFLOW__WEBSERVER__PROXY_FIX_X_HOST +# +proxy_fix_x_host = 1 + +# Number of values to trust for ``X-Forwarded-Port``. +# See `Werkzeug: X-Forwarded-For Proxy Fix +# `__ for more details. +# +# Variable: AIRFLOW__WEBSERVER__PROXY_FIX_X_PORT +# +proxy_fix_x_port = 1 + +# Number of values to trust for ``X-Forwarded-Prefix``. +# See `Werkzeug: X-Forwarded-For Proxy Fix +# `__ for more details. +# +# Variable: AIRFLOW__WEBSERVER__PROXY_FIX_X_PREFIX +# +proxy_fix_x_prefix = 1 + +# Set secure flag on session cookie +# +# Variable: AIRFLOW__WEBSERVER__COOKIE_SECURE +# +cookie_secure = False + +# Set samesite policy on session cookie +# +# Variable: AIRFLOW__WEBSERVER__COOKIE_SAMESITE +# +cookie_samesite = Lax + +# Default setting for wrap toggle on DAG code and TI log views. +# +# Variable: AIRFLOW__WEBSERVER__DEFAULT_WRAP +# +default_wrap = False + +# Allow the UI to be rendered in a frame +# +# Variable: AIRFLOW__WEBSERVER__X_FRAME_ENABLED +# +x_frame_enabled = True + +# Send anonymous user activity to your analytics tool +# choose from ``google_analytics``, ``segment``, ``metarouter``, or ``matomo`` +# +# Variable: AIRFLOW__WEBSERVER__ANALYTICS_TOOL +# +# analytics_tool = + +# Unique ID of your account in the analytics tool +# +# Variable: AIRFLOW__WEBSERVER__ANALYTICS_ID +# +# analytics_id = + +# Your instances url, only applicable to Matomo. +# +# Example: analytics_url = https://your.matomo.instance.com/ +# +# Variable: AIRFLOW__WEBSERVER__ANALYTICS_URL +# +# analytics_url = + +# 'Recent Tasks' stats will show for old DagRuns if set +# +# Variable: AIRFLOW__WEBSERVER__SHOW_RECENT_STATS_FOR_COMPLETED_RUNS +# +show_recent_stats_for_completed_runs = True + +# The UI cookie lifetime in minutes. User will be logged out from UI after +# ``[webserver] session_lifetime_minutes`` of non-activity +# +# Variable: AIRFLOW__WEBSERVER__SESSION_LIFETIME_MINUTES +# +session_lifetime_minutes = 43200 + +# Sets a custom page title for the DAGs overview page and site title for all pages +# +# Variable: AIRFLOW__WEBSERVER__INSTANCE_NAME +# +# instance_name = + +# Whether the custom page title for the DAGs overview page contains any Markup language +# +# Variable: AIRFLOW__WEBSERVER__INSTANCE_NAME_HAS_MARKUP +# +instance_name_has_markup = False + +# How frequently, in seconds, the DAG data will auto-refresh in graph or grid view +# when auto-refresh is turned on +# +# Variable: AIRFLOW__WEBSERVER__AUTO_REFRESH_INTERVAL +# +auto_refresh_interval = 3 + +# Boolean for displaying warning for publicly viewable deployment +# +# Variable: AIRFLOW__WEBSERVER__WARN_DEPLOYMENT_EXPOSURE +# +warn_deployment_exposure = True + +# Comma separated string of view events to exclude from dag audit view. +# All other events will be added minus the ones passed here. +# The audit logs in the db will not be affected by this parameter. +# +# Example: audit_view_excluded_events = cli_task_run,running,success +# +# Variable: AIRFLOW__WEBSERVER__AUDIT_VIEW_EXCLUDED_EVENTS +# +# audit_view_excluded_events = + +# Comma separated string of view events to include in dag audit view. +# If passed, only these events will populate the dag audit view. +# The audit logs in the db will not be affected by this parameter. +# +# Example: audit_view_included_events = dagrun_cleared,failed +# +# Variable: AIRFLOW__WEBSERVER__AUDIT_VIEW_INCLUDED_EVENTS +# +# audit_view_included_events = + +# Boolean for running SwaggerUI in the webserver. +# +# Variable: AIRFLOW__WEBSERVER__ENABLE_SWAGGER_UI +# +enable_swagger_ui = True + +# Boolean for running Internal API in the webserver. +# +# Variable: AIRFLOW__WEBSERVER__RUN_INTERNAL_API +# +run_internal_api = False + +# The caching algorithm used by the webserver. Must be a valid hashlib function name. +# +# Example: caching_hash_method = sha256 +# +# Variable: AIRFLOW__WEBSERVER__CACHING_HASH_METHOD +# +caching_hash_method = md5 + +# Behavior of the trigger DAG run button for DAGs without params. ``False`` to skip and trigger +# without displaying a form to add a **dag_run.conf**, ``True`` to always display the form. +# The form is displayed always if parameters are defined. +# +# Variable: AIRFLOW__WEBSERVER__SHOW_TRIGGER_FORM_IF_NO_PARAMS +# +show_trigger_form_if_no_params = False + +# Number of recent DAG run configurations in the selector on the trigger web form. +# +# Example: num_recent_configurations_for_trigger = 10 +# +# Variable: AIRFLOW__WEBSERVER__NUM_RECENT_CONFIGURATIONS_FOR_TRIGGER +# +num_recent_configurations_for_trigger = 5 + +# A DAG author is able to provide any raw HTML into ``doc_md`` or params description in +# ``description_md`` for text formatting. This is including potentially unsafe javascript. +# Displaying the DAG or trigger form in web UI provides the DAG author the potential to +# inject malicious code into clients browsers. To ensure the web UI is safe by default, +# raw HTML is disabled by default. If you trust your DAG authors, you can enable HTML +# support in markdown by setting this option to ``True``. +# +# This parameter also enables the deprecated fields ``description_html`` and +# ``custom_html_form`` in DAG params until the feature is removed in a future version. +# +# Example: allow_raw_html_descriptions = False +# +# Variable: AIRFLOW__WEBSERVER__ALLOW_RAW_HTML_DESCRIPTIONS +# +allow_raw_html_descriptions = False + +# The maximum size of the request payload (in MB) that can be sent. +# +# Variable: AIRFLOW__WEBSERVER__ALLOWED_PAYLOAD_SIZE +# +allowed_payload_size = 1.0 + +# Require confirmation when changing a DAG in the web UI. This is to prevent accidental changes +# to a DAG that may be running on sensitive environments like production. +# When set to ``True``, confirmation dialog will be shown when a user tries to Pause/Unpause, +# Trigger a DAG +# +# Variable: AIRFLOW__WEBSERVER__REQUIRE_CONFIRMATION_DAG_CHANGE +# +require_confirmation_dag_change = False + +# The maximum size in bytes any non-file form field may be in a multipart/form-data body. +# If this limit is exceeded, a 413 RequestEntityTooLarge error is raised by webserver. +# +# Variable: AIRFLOW__WEBSERVER__MAX_FORM_MEMORY_SIZE +# +max_form_memory_size = 500000 + +# The maximum number of fields that may be present in a multipart/form-data body. +# If this limit is exceeded, a 413 RequestEntityTooLarge error is raised by webserver. +# +# Variable: AIRFLOW__WEBSERVER__MAX_FORM_PARTS +# +max_form_parts = 1000 + +[email] +# Configuration email backend and whether to +# send email alerts on retry or failure + +# Email backend to use +# +# Variable: AIRFLOW__EMAIL__EMAIL_BACKEND +# +email_backend = airflow.utils.email.send_email_smtp + +# Email connection to use +# +# Variable: AIRFLOW__EMAIL__EMAIL_CONN_ID +# +email_conn_id = smtp_default + +# Whether email alerts should be sent when a task is retried +# +# Variable: AIRFLOW__EMAIL__DEFAULT_EMAIL_ON_RETRY +# +default_email_on_retry = True + +# Whether email alerts should be sent when a task failed +# +# Variable: AIRFLOW__EMAIL__DEFAULT_EMAIL_ON_FAILURE +# +default_email_on_failure = True + +# File that will be used as the template for Email subject (which will be rendered using Jinja2). +# If not set, Airflow uses a base template. +# +# Example: subject_template = /path/to/my_subject_template_file +# +# Variable: AIRFLOW__EMAIL__SUBJECT_TEMPLATE +# +# subject_template = + +# File that will be used as the template for Email content (which will be rendered using Jinja2). +# If not set, Airflow uses a base template. +# +# Example: html_content_template = /path/to/my_html_content_template_file +# +# Variable: AIRFLOW__EMAIL__HTML_CONTENT_TEMPLATE +# +# html_content_template = + +# Email address that will be used as sender address. +# It can either be raw email or the complete address in a format ``Sender Name `` +# +# Example: from_email = Airflow +# +# Variable: AIRFLOW__EMAIL__FROM_EMAIL +# +# from_email = + +# ssl context to use when using SMTP and IMAP SSL connections. By default, the context is "default" +# which sets it to ``ssl.create_default_context()`` which provides the right balance between +# compatibility and security, it however requires that certificates in your operating system are +# updated and that SMTP/IMAP servers of yours have valid certificates that have corresponding public +# keys installed on your machines. You can switch it to "none" if you want to disable checking +# of the certificates, but it is not recommended as it allows MITM (man-in-the-middle) attacks +# if your infrastructure is not sufficiently secured. It should only be set temporarily while you +# are fixing your certificate configuration. This can be typically done by upgrading to newer +# version of the operating system you run Airflow components on,by upgrading/refreshing proper +# certificates in the OS or by updating certificates for your mail servers. +# +# Example: ssl_context = default +# +# Variable: AIRFLOW__EMAIL__SSL_CONTEXT +# +ssl_context = default + +[smtp] +# If you want airflow to send emails on retries, failure, and you want to use +# the airflow.utils.email.send_email_smtp function, you have to configure an +# smtp server here + +# Specifies the host server address used by Airflow when sending out email notifications via SMTP. +# +# Variable: AIRFLOW__SMTP__SMTP_HOST +# +smtp_host = localhost + +# Determines whether to use the STARTTLS command when connecting to the SMTP server. +# +# Variable: AIRFLOW__SMTP__SMTP_STARTTLS +# +smtp_starttls = True + +# Determines whether to use an SSL connection when talking to the SMTP server. +# +# Variable: AIRFLOW__SMTP__SMTP_SSL +# +smtp_ssl = False + +# Username to authenticate when connecting to smtp server. +# +# Example: smtp_user = airflow +# +# Variable: AIRFLOW__SMTP__SMTP_USER +# +# smtp_user = + +# Password to authenticate when connecting to smtp server. +# +# Example: smtp_password = airflow +# +# Variable: AIRFLOW__SMTP__SMTP_PASSWORD +# +# smtp_password = + +# Defines the port number on which Airflow connects to the SMTP server to send email notifications. +# +# Variable: AIRFLOW__SMTP__SMTP_PORT +# +smtp_port = 25 + +# Specifies the default **from** email address used when Airflow sends email notifications. +# +# Variable: AIRFLOW__SMTP__SMTP_MAIL_FROM +# +smtp_mail_from = airflow@example.com + +# Determines the maximum time (in seconds) the Apache Airflow system will wait for a +# connection to the SMTP server to be established. +# +# Variable: AIRFLOW__SMTP__SMTP_TIMEOUT +# +smtp_timeout = 30 + +# Defines the maximum number of times Airflow will attempt to connect to the SMTP server. +# +# Variable: AIRFLOW__SMTP__SMTP_RETRY_LIMIT +# +smtp_retry_limit = 5 + +[sentry] +# `Sentry `__ integration. Here you can supply +# additional configuration options based on the Python platform. +# See `Python / Configuration / Basic Options +# `__ for more details. +# Unsupported options: ``integrations``, ``in_app_include``, ``in_app_exclude``, +# ``ignore_errors``, ``before_breadcrumb``, ``transport``. + +# Enable error reporting to Sentry +# +# Variable: AIRFLOW__SENTRY__SENTRY_ON +# +sentry_on = false + +# +# Variable: AIRFLOW__SENTRY__SENTRY_DSN +# +sentry_dsn = + +# Dotted path to a before_send function that the sentry SDK should be configured to use. +# +# Variable: AIRFLOW__SENTRY__BEFORE_SEND +# +# before_send = + +[scheduler] +# Task instances listen for external kill signal (when you clear tasks +# from the CLI or the UI), this defines the frequency at which they should +# listen (in seconds). +# +# Variable: AIRFLOW__SCHEDULER__JOB_HEARTBEAT_SEC +# +job_heartbeat_sec = 5 + +# The scheduler constantly tries to trigger new tasks (look at the +# scheduler section in the docs for more information). This defines +# how often the scheduler should run (in seconds). +# +# Variable: AIRFLOW__SCHEDULER__SCHEDULER_HEARTBEAT_SEC +# +scheduler_heartbeat_sec = 5 + +# The frequency (in seconds) at which the LocalTaskJob should send heartbeat signals to the +# scheduler to notify it's still alive. If this value is set to 0, the heartbeat interval will default +# to the value of ``[scheduler] scheduler_zombie_task_threshold``. +# +# Variable: AIRFLOW__SCHEDULER__LOCAL_TASK_JOB_HEARTBEAT_SEC +# +local_task_job_heartbeat_sec = 0 + +# The number of times to try to schedule each DAG file +# -1 indicates unlimited number +# +# Variable: AIRFLOW__SCHEDULER__NUM_RUNS +# +num_runs = -1 + +# Controls how long the scheduler will sleep between loops, but if there was nothing to do +# in the loop. i.e. if it scheduled something then it will start the next loop +# iteration straight away. +# +# Variable: AIRFLOW__SCHEDULER__SCHEDULER_IDLE_SLEEP_TIME +# +scheduler_idle_sleep_time = 1 + +# Number of seconds after which a DAG file is parsed. The DAG file is parsed every +# ``[scheduler] min_file_process_interval`` number of seconds. Updates to DAGs are reflected after +# this interval. Keeping this number low will increase CPU usage. +# +# Variable: AIRFLOW__SCHEDULER__MIN_FILE_PROCESS_INTERVAL +# +min_file_process_interval = 60 + +# How often (in seconds) to check for stale DAGs (DAGs which are no longer present in +# the expected files) which should be deactivated, as well as datasets that are no longer +# referenced and should be marked as orphaned. +# +# Variable: AIRFLOW__SCHEDULER__PARSING_CLEANUP_INTERVAL +# +parsing_cleanup_interval = 60 + +# How long (in seconds) to wait after we have re-parsed a DAG file before deactivating stale +# DAGs (DAGs which are no longer present in the expected files). The reason why we need +# this threshold is to account for the time between when the file is parsed and when the +# DAG is loaded. The absolute maximum that this could take is ``[core] dag_file_processor_timeout``, +# but when you have a long timeout configured, it results in a significant delay in the +# deactivation of stale dags. +# +# Variable: AIRFLOW__SCHEDULER__STALE_DAG_THRESHOLD +# +stale_dag_threshold = 50 + +# How often (in seconds) to scan the DAGs directory for new files. Default to 5 minutes. +# +# Variable: AIRFLOW__SCHEDULER__DAG_DIR_LIST_INTERVAL +# +dag_dir_list_interval = 600 + +# How often should stats be printed to the logs. Setting to 0 will disable printing stats +# +# Variable: AIRFLOW__SCHEDULER__PRINT_STATS_INTERVAL +# +print_stats_interval = 30 + +# How often (in seconds) should pool usage stats be sent to StatsD (if statsd_on is enabled) +# +# Variable: AIRFLOW__SCHEDULER__POOL_METRICS_INTERVAL +# +pool_metrics_interval = 5.0 + +# If the last scheduler heartbeat happened more than ``[scheduler] scheduler_health_check_threshold`` +# ago (in seconds), scheduler is considered unhealthy. +# This is used by the health check in the **/health** endpoint and in ``airflow jobs check`` CLI +# for SchedulerJob. +# +# Variable: AIRFLOW__SCHEDULER__SCHEDULER_HEALTH_CHECK_THRESHOLD +# +scheduler_health_check_threshold = 30 + +# When you start a scheduler, airflow starts a tiny web server +# subprocess to serve a health check if this is set to ``True`` +# +# Variable: AIRFLOW__SCHEDULER__ENABLE_HEALTH_CHECK +# +enable_health_check = True + +# When you start a scheduler, airflow starts a tiny web server +# subprocess to serve a health check on this host +# +# Variable: AIRFLOW__SCHEDULER__SCHEDULER_HEALTH_CHECK_SERVER_HOST +# +scheduler_health_check_server_host = 0.0.0.0 + +# When you start a scheduler, airflow starts a tiny web server +# subprocess to serve a health check on this port +# +# Variable: AIRFLOW__SCHEDULER__SCHEDULER_HEALTH_CHECK_SERVER_PORT +# +scheduler_health_check_server_port = 8974 + +# How often (in seconds) should the scheduler check for orphaned tasks and SchedulerJobs +# +# Variable: AIRFLOW__SCHEDULER__ORPHANED_TASKS_CHECK_INTERVAL +# +orphaned_tasks_check_interval = 300.0 + +# Determines the directory where logs for the child processes of the scheduler will be stored +# +# Variable: AIRFLOW__SCHEDULER__CHILD_PROCESS_LOG_DIRECTORY +# +child_process_log_directory = /opt/airflow/logs/scheduler + +# Local task jobs periodically heartbeat to the DB. If the job has +# not heartbeat in this many seconds, the scheduler will mark the +# associated task instance as failed and will re-schedule the task. +# +# Variable: AIRFLOW__SCHEDULER__SCHEDULER_ZOMBIE_TASK_THRESHOLD +# +scheduler_zombie_task_threshold = 300 + +# How often (in seconds) should the scheduler check for zombie tasks. +# +# Variable: AIRFLOW__SCHEDULER__ZOMBIE_DETECTION_INTERVAL +# +zombie_detection_interval = 10.0 + +# Turn off scheduler catchup by setting this to ``False``. +# Default behavior is unchanged and +# Command Line Backfills still work, but the scheduler +# will not do scheduler catchup if this is ``False``, +# however it can be set on a per DAG basis in the +# DAG definition (catchup) +# +# Variable: AIRFLOW__SCHEDULER__CATCHUP_BY_DEFAULT +# +catchup_by_default = True + +# Setting this to ``True`` will make first task instance of a task +# ignore depends_on_past setting. A task instance will be considered +# as the first task instance of a task when there is no task instance +# in the DB with an execution_date earlier than it., i.e. no manual marking +# success will be needed for a newly added task to be scheduled. +# +# Variable: AIRFLOW__SCHEDULER__IGNORE_FIRST_DEPENDS_ON_PAST_BY_DEFAULT +# +ignore_first_depends_on_past_by_default = True + +# This changes the batch size of queries in the scheduling main loop. +# This should not be greater than ``[core] parallelism``. +# If this is too high, SQL query performance may be impacted by +# complexity of query predicate, and/or excessive locking. +# Additionally, you may hit the maximum allowable query length for your db. +# Set this to 0 to use the value of ``[core] parallelism`` +# +# Variable: AIRFLOW__SCHEDULER__MAX_TIS_PER_QUERY +# +max_tis_per_query = 16 + +# Should the scheduler issue ``SELECT ... FOR UPDATE`` in relevant queries. +# If this is set to ``False`` then you should not run more than a single +# scheduler at once +# +# Variable: AIRFLOW__SCHEDULER__USE_ROW_LEVEL_LOCKING +# +use_row_level_locking = True + +# Max number of DAGs to create DagRuns for per scheduler loop. +# +# Variable: AIRFLOW__SCHEDULER__MAX_DAGRUNS_TO_CREATE_PER_LOOP +# +max_dagruns_to_create_per_loop = 10 + +# How many DagRuns should a scheduler examine (and lock) when scheduling +# and queuing tasks. +# +# Variable: AIRFLOW__SCHEDULER__MAX_DAGRUNS_PER_LOOP_TO_SCHEDULE +# +max_dagruns_per_loop_to_schedule = 20 + +# Should the Task supervisor process perform a "mini scheduler" to attempt to schedule more tasks of the +# same DAG. Leaving this on will mean tasks in the same DAG execute quicker, but might starve out other +# dags in some circumstances +# +# Variable: AIRFLOW__SCHEDULER__SCHEDULE_AFTER_TASK_EXECUTION +# +schedule_after_task_execution = True + +# The scheduler reads dag files to extract the airflow modules that are going to be used, +# and imports them ahead of time to avoid having to re-do it for each parsing process. +# This flag can be set to ``False`` to disable this behavior in case an airflow module needs +# to be freshly imported each time (at the cost of increased DAG parsing time). +# +# Variable: AIRFLOW__SCHEDULER__PARSING_PRE_IMPORT_MODULES +# +parsing_pre_import_modules = True + +# The scheduler can run multiple processes in parallel to parse dags. +# This defines how many processes will run. +# +# Variable: AIRFLOW__SCHEDULER__PARSING_PROCESSES +# +parsing_processes = 2 + +# One of ``modified_time``, ``random_seeded_by_host`` and ``alphabetical``. +# The scheduler will list and sort the dag files to decide the parsing order. +# +# * ``modified_time``: Sort by modified time of the files. This is useful on large scale to parse the +# recently modified DAGs first. +# * ``random_seeded_by_host``: Sort randomly across multiple Schedulers but with same order on the +# same host. This is useful when running with Scheduler in HA mode where each scheduler can +# parse different DAG files. +# * ``alphabetical``: Sort by filename +# +# Variable: AIRFLOW__SCHEDULER__FILE_PARSING_SORT_MODE +# +file_parsing_sort_mode = modified_time + +# Whether the dag processor is running as a standalone process or it is a subprocess of a scheduler +# job. +# +# Variable: AIRFLOW__SCHEDULER__STANDALONE_DAG_PROCESSOR +# +standalone_dag_processor = False + +# Only applicable if ``[scheduler] standalone_dag_processor`` is true and callbacks are stored +# in database. Contains maximum number of callbacks that are fetched during a single loop. +# +# Variable: AIRFLOW__SCHEDULER__MAX_CALLBACKS_PER_LOOP +# +max_callbacks_per_loop = 20 + +# Only applicable if ``[scheduler] standalone_dag_processor`` is true. +# Time in seconds after which dags, which were not updated by Dag Processor are deactivated. +# +# Variable: AIRFLOW__SCHEDULER__DAG_STALE_NOT_SEEN_DURATION +# +dag_stale_not_seen_duration = 600 + +# Turn off scheduler use of cron intervals by setting this to ``False``. +# DAGs submitted manually in the web UI or with trigger_dag will still run. +# +# Variable: AIRFLOW__SCHEDULER__USE_JOB_SCHEDULE +# +use_job_schedule = True + +# Allow externally triggered DagRuns for Execution Dates in the future +# Only has effect if schedule_interval is set to None in DAG +# +# Variable: AIRFLOW__SCHEDULER__ALLOW_TRIGGER_IN_FUTURE +# +allow_trigger_in_future = False + +# How often to check for expired trigger requests that have not run yet. +# +# Variable: AIRFLOW__SCHEDULER__TRIGGER_TIMEOUT_CHECK_INTERVAL +# +trigger_timeout_check_interval = 15 + +# Amount of time a task can be in the queued state before being retried or set to failed. +# +# Variable: AIRFLOW__SCHEDULER__TASK_QUEUED_TIMEOUT +# +task_queued_timeout = 300.0 + +# How often to check for tasks that have been in the queued state for +# longer than ``[scheduler] task_queued_timeout``. +# +# Variable: AIRFLOW__SCHEDULER__TASK_QUEUED_TIMEOUT_CHECK_INTERVAL +# +task_queued_timeout_check_interval = 120.0 + +# The run_id pattern used to verify the validity of user input to the run_id parameter when +# triggering a DAG. This pattern cannot change the pattern used by scheduler to generate run_id +# for scheduled DAG runs or DAG runs triggered without changing the run_id parameter. +# +# Variable: AIRFLOW__SCHEDULER__ALLOWED_RUN_ID_PATTERN +# +allowed_run_id_pattern = ^[A-Za-z0-9_.~:+-]+$ + +# Whether to create DAG runs that span an interval or one single point in time for cron schedules, when +# a cron string is provided to ``schedule`` argument of a DAG. +# +# * ``True``: **CronDataIntervalTimetable** is used, which is suitable +# for DAGs with well-defined data interval. You get contiguous intervals from the end of the previous +# interval up to the scheduled datetime. +# * ``False``: **CronTriggerTimetable** is used, which is closer to the behavior of cron itself. +# +# Notably, for **CronTriggerTimetable**, the logical date is the same as the time the DAG Run will +# try to schedule, while for **CronDataIntervalTimetable**, the logical date is the beginning of +# the data interval, but the DAG Run will try to schedule at the end of the data interval. +# +# Variable: AIRFLOW__SCHEDULER__CREATE_CRON_DATA_INTERVALS +# +create_cron_data_intervals = True + +[triggerer] +# How many triggers a single Triggerer will run at once, by default. +# +# Variable: AIRFLOW__TRIGGERER__DEFAULT_CAPACITY +# +default_capacity = 1000 + +# How often to heartbeat the Triggerer job to ensure it hasn't been killed. +# +# Variable: AIRFLOW__TRIGGERER__JOB_HEARTBEAT_SEC +# +job_heartbeat_sec = 5 + +# If the last triggerer heartbeat happened more than ``[triggerer] triggerer_health_check_threshold`` +# ago (in seconds), triggerer is considered unhealthy. +# This is used by the health check in the **/health** endpoint and in ``airflow jobs check`` CLI +# for TriggererJob. +# +# Variable: AIRFLOW__TRIGGERER__TRIGGERER_HEALTH_CHECK_THRESHOLD +# +triggerer_health_check_threshold = 30 + +[kerberos] +# Location of your ccache file once kinit has been performed. +# +# Variable: AIRFLOW__KERBEROS__CCACHE +# +ccache = /tmp/airflow_krb5_ccache + +# gets augmented with fqdn +# +# Variable: AIRFLOW__KERBEROS__PRINCIPAL +# +principal = airflow + +# Determines the frequency at which initialization or re-initialization processes occur. +# +# Variable: AIRFLOW__KERBEROS__REINIT_FREQUENCY +# +reinit_frequency = 3600 + +# Path to the kinit executable +# +# Variable: AIRFLOW__KERBEROS__KINIT_PATH +# +kinit_path = kinit + +# Designates the path to the Kerberos keytab file for the Airflow user +# +# Variable: AIRFLOW__KERBEROS__KEYTAB +# +keytab = airflow.keytab + +# Allow to disable ticket forwardability. +# +# Variable: AIRFLOW__KERBEROS__FORWARDABLE +# +forwardable = True + +# Allow to remove source IP from token, useful when using token behind NATted Docker host. +# +# Variable: AIRFLOW__KERBEROS__INCLUDE_IP +# +include_ip = True + +[sensors] +# Sensor default timeout, 7 days by default (7 * 24 * 60 * 60). +# +# Variable: AIRFLOW__SENSORS__DEFAULT_TIMEOUT +# +default_timeout = 604800 + +[aws] +# This section contains settings for Amazon Web Services (AWS) integration. + +# session_factory = +cloudwatch_task_handler_json_serializer = airflow.providers.amazon.aws.log.cloudwatch_task_handler.json_serialize_legacy + +[aws_batch_executor] +# This section only applies if you are using the AwsBatchExecutor in +# Airflow's ``[core]`` configuration. +# For more information on any of these execution parameters, see the link below: +# https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/batch.html#Batch.Client.submit_job +# For boto3 credential management, see +# https://boto3.amazonaws.com/v1/documentation/api/latest/guide/configuration.html + +conn_id = aws_default +# region_name = +max_submit_job_attempts = 3 +check_health_on_startup = True +# job_name = +# job_queue = +# job_definition = +# submit_job_kwargs = + +[aws_ecs_executor] +# This section only applies if you are using the AwsEcsExecutor in +# Airflow's ``[core]`` configuration. +# For more information on any of these execution parameters, see the link below: +# https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/ecs/client/run_task.html +# For boto3 credential management, see +# https://boto3.amazonaws.com/v1/documentation/api/latest/guide/configuration.html + +conn_id = aws_default +# region_name = +assign_public_ip = False +# cluster = +# capacity_provider_strategy = +# container_name = +# launch_type = +platform_version = LATEST +# security_groups = +# subnets = +# task_definition = +max_run_task_attempts = 3 +# run_task_kwargs = +check_health_on_startup = True + +[aws_auth_manager] +# This section only applies if you are using the AwsAuthManager. In other words, if you set +# ``[core] auth_manager = airflow.providers.amazon.aws.auth_manager.aws_auth_manager.AwsAuthManager`` in +# Airflow's configuration. + +enable = False +conn_id = aws_default +# region_name = +# saml_metadata_url = +# avp_policy_store_id = + +[celery_kubernetes_executor] +# This section only applies if you are using the ``CeleryKubernetesExecutor`` in +# ``[core]`` section above + +# Define when to send a task to ``KubernetesExecutor`` when using ``CeleryKubernetesExecutor``. +# When the queue of a task is the value of ``kubernetes_queue`` (default ``kubernetes``), +# the task is executed via ``KubernetesExecutor``, +# otherwise via ``CeleryExecutor`` +# +# Variable: AIRFLOW__CELERY_KUBERNETES_EXECUTOR__KUBERNETES_QUEUE +# +kubernetes_queue = kubernetes + +[celery] +# This section only applies if you are using the CeleryExecutor in +# ``[core]`` section above + +# The app name that will be used by celery +# +# Variable: AIRFLOW__CELERY__CELERY_APP_NAME +# +celery_app_name = airflow.providers.celery.executors.celery_executor + +# The concurrency that will be used when starting workers with the +# ``airflow celery worker`` command. This defines the number of task instances that +# a worker will take, so size up your workers based on the resources on +# your worker box and the nature of your tasks +# +# Variable: AIRFLOW__CELERY__WORKER_CONCURRENCY +# +worker_concurrency = 32 + +# The maximum and minimum number of pool processes that will be used to dynamically resize +# the pool based on load.Enable autoscaling by providing max_concurrency,min_concurrency +# with the ``airflow celery worker`` command (always keep minimum processes, +# but grow to maximum if necessary). +# Pick these numbers based on resources on worker box and the nature of the task. +# If autoscale option is available, worker_concurrency will be ignored. +# https://docs.celeryq.dev/en/latest/reference/celery.bin.worker.html#cmdoption-celery-worker-autoscale +# +# Example: worker_autoscale = 16,12 +# +# Variable: AIRFLOW__CELERY__WORKER_AUTOSCALE +# +# worker_autoscale = + +# Used to increase the number of tasks that a worker prefetches which can improve performance. +# The number of processes multiplied by worker_prefetch_multiplier is the number of tasks +# that are prefetched by a worker. A value greater than 1 can result in tasks being unnecessarily +# blocked if there are multiple workers and one worker prefetches tasks that sit behind long +# running tasks while another worker has unutilized processes that are unable to process the already +# claimed blocked tasks. +# https://docs.celeryq.dev/en/stable/userguide/optimizing.html#prefetch-limits +# +# Variable: AIRFLOW__CELERY__WORKER_PREFETCH_MULTIPLIER +# +worker_prefetch_multiplier = 2 + +# Specify if remote control of the workers is enabled. +# In some cases when the broker does not support remote control, Celery creates lots of +# ``.*reply-celery-pidbox`` queues. You can prevent this by setting this to false. +# However, with this disabled Flower won't work. +# https://docs.celeryq.dev/en/stable/getting-started/backends-and-brokers/index.html#broker-overview +# +# Variable: AIRFLOW__CELERY__WORKER_ENABLE_REMOTE_CONTROL +# +worker_enable_remote_control = true + +# The Celery broker URL. Celery supports RabbitMQ, Redis and experimentally +# a sqlalchemy database. Refer to the Celery documentation for more information. +# +# Variable: AIRFLOW__CELERY__BROKER_URL +# +# This will be configured via environment variables, as it differs between master and workers. +# broker_url = + +# The Celery result_backend. When a job finishes, it needs to update the +# metadata of the job. Therefore it will post a message on a message bus, +# or insert it into a database (depending of the backend) +# This status is used by the scheduler to update the state of the task +# The use of a database is highly recommended +# When not specified, sql_alchemy_conn with a db+ scheme prefix will be used +# https://docs.celeryq.dev/en/latest/userguide/configuration.html#task-result-backend-settings +# +# Example: result_backend = db+postgresql://postgres:airflow@postgres/airflow +# +# Variable: AIRFLOW__CELERY__RESULT_BACKEND +# +# The result_backend is intentionally left blank. +# When blank, Airflow's CeleryExecutor defaults to using the value from +# `sql_alchemy_conn` as the result backend, which is the recommended setup. +result_backend = + +# Optional configuration dictionary to pass to the Celery result backend SQLAlchemy engine. +# +# Example: result_backend_sqlalchemy_engine_options = {"pool_recycle": 1800} +# +# Variable: AIRFLOW__CELERY__RESULT_BACKEND_SQLALCHEMY_ENGINE_OPTIONS +# +result_backend_sqlalchemy_engine_options = + +# Celery Flower is a sweet UI for Celery. Airflow has a shortcut to start +# it ``airflow celery flower``. This defines the IP that Celery Flower runs on +# +# Variable: AIRFLOW__CELERY__FLOWER_HOST +# +flower_host = 0.0.0.0 + +# The root URL for Flower +# +# Example: flower_url_prefix = /flower +# +# Variable: AIRFLOW__CELERY__FLOWER_URL_PREFIX +# +flower_url_prefix = + +# This defines the port that Celery Flower runs on +# +# Variable: AIRFLOW__CELERY__FLOWER_PORT +# +flower_port = 5555 + +# Securing Flower with Basic Authentication +# Accepts user:password pairs separated by a comma +# +# Example: flower_basic_auth = user1:password1,user2:password2 +# +# Variable: AIRFLOW__CELERY__FLOWER_BASIC_AUTH +# +flower_basic_auth = + +# How many processes CeleryExecutor uses to sync task state. +# 0 means to use max(1, number of cores - 1) processes. +# +# Variable: AIRFLOW__CELERY__SYNC_PARALLELISM +# +sync_parallelism = 0 + +# Import path for celery configuration options +# +# Variable: AIRFLOW__CELERY__CELERY_CONFIG_OPTIONS +# +celery_config_options = airflow.providers.celery.executors.default_celery.DEFAULT_CELERY_CONFIG + +# +# Variable: AIRFLOW__CELERY__SSL_ACTIVE +# +ssl_active = False + +# Path to the client key. +# +# Variable: AIRFLOW__CELERY__SSL_KEY +# +ssl_key = + +# Path to the client certificate. +# +# Variable: AIRFLOW__CELERY__SSL_CERT +# +ssl_cert = + +# Path to the CA certificate. +# +# Variable: AIRFLOW__CELERY__SSL_CACERT +# +ssl_cacert = + +# Celery Pool implementation. +# Choices include: ``prefork`` (default), ``eventlet``, ``gevent`` or ``solo``. +# See: +# https://docs.celeryq.dev/en/latest/userguide/workers.html#concurrency +# https://docs.celeryq.dev/en/latest/userguide/concurrency/eventlet.html +# +# Variable: AIRFLOW__CELERY__POOL +# +pool = prefork + +# The number of seconds to wait before timing out ``send_task_to_executor`` or +# ``fetch_celery_task_state`` operations. +# +# Variable: AIRFLOW__CELERY__OPERATION_TIMEOUT +# +operation_timeout = 1.0 + +task_acks_late = True +# Celery task will report its status as 'started' when the task is executed by a worker. +# This is used in Airflow to keep track of the running tasks and if a Scheduler is restarted +# or run in HA mode, it can adopt the orphan tasks launched by previous SchedulerJob. +# +# Variable: AIRFLOW__CELERY__TASK_TRACK_STARTED +# +task_track_started = True + +# The Maximum number of retries for publishing task messages to the broker when failing +# due to ``AirflowTaskTimeout`` error before giving up and marking Task as failed. +# +# Variable: AIRFLOW__CELERY__TASK_PUBLISH_MAX_RETRIES +# +task_publish_max_retries = 3 + +# Worker initialisation check to validate Metadata Database connection +# +# Variable: AIRFLOW__CELERY__WORKER_PRECHECK +# +worker_precheck = False + +# Extra celery configs to include in the celery worker. +# Any of the celery config can be added to this config and it +# will be applied while starting the celery worker. e.g. {"worker_max_tasks_per_child": 10} +# See also: +# https://docs.celeryq.dev/en/stable/userguide/configuration.html#configuration-and-defaults +# +# Variable: AIRFLOW__CELERY__EXTRA_CELERY_CONFIG +# +extra_celery_config = {} + +[celery_broker_transport_options] +# This section is for specifying options which can be passed to the +# underlying celery broker transport. See: +# https://docs.celeryq.dev/en/latest/userguide/configuration.html#std:setting-broker_transport_options + +# The visibility timeout defines the number of seconds to wait for the worker +# to acknowledge the task before the message is redelivered to another worker. +# Make sure to increase the visibility timeout to match the time of the longest +# ETA you're planning to use. +# visibility_timeout is only supported for Redis and SQS celery brokers. +# See: +# https://docs.celeryq.dev/en/stable/getting-started/backends-and-brokers/redis.html#visibility-timeout +# +# Example: visibility_timeout = 21600 +# +# Variable: AIRFLOW__CELERY_BROKER_TRANSPORT_OPTIONS__VISIBILITY_TIMEOUT +# +# visibility_timeout = + +# The sentinel_kwargs parameter allows passing additional options to the Sentinel client. +# In a typical scenario where Redis Sentinel is used as the broker and Redis servers are +# password-protected, the password needs to be passed through this parameter. Although its +# type is string, it is required to pass a string that conforms to the dictionary format. +# See: +# https://docs.celeryq.dev/en/stable/getting-started/backends-and-brokers/redis.html#configuration +# +# Example: sentinel_kwargs = {"password": "password_for_redis_server"} +# +# Variable: AIRFLOW__CELERY_BROKER_TRANSPORT_OPTIONS__SENTINEL_KWARGS +# +# sentinel_kwargs = + +[local_kubernetes_executor] +# This section only applies if you are using the ``LocalKubernetesExecutor`` in +# ``[core]`` section above + +# Define when to send a task to ``KubernetesExecutor`` when using ``LocalKubernetesExecutor``. +# When the queue of a task is the value of ``kubernetes_queue`` (default ``kubernetes``), +# the task is executed via ``KubernetesExecutor``, +# otherwise via ``LocalExecutor`` +# +# Variable: AIRFLOW__LOCAL_KUBERNETES_EXECUTOR__KUBERNETES_QUEUE +# +kubernetes_queue = kubernetes + +[kubernetes_executor] +# Kwargs to override the default urllib3 Retry used in the kubernetes API client +# +# Example: api_client_retry_configuration = { "total": 3, "backoff_factor": 0.5 } +# +# Variable: AIRFLOW__KUBERNETES_EXECUTOR__API_CLIENT_RETRY_CONFIGURATION +# +api_client_retry_configuration = + +# Flag to control the information added to kubernetes executor logs for better traceability +# +# Variable: AIRFLOW__KUBERNETES_EXECUTOR__LOGS_TASK_METADATA +# +logs_task_metadata = False + +# Path to the YAML pod file that forms the basis for KubernetesExecutor workers. +# +# Variable: AIRFLOW__KUBERNETES_EXECUTOR__POD_TEMPLATE_FILE +# +pod_template_file = + +# The repository of the Kubernetes Image for the Worker to Run +# +# Variable: AIRFLOW__KUBERNETES_EXECUTOR__WORKER_CONTAINER_REPOSITORY +# +worker_container_repository = + +# The tag of the Kubernetes Image for the Worker to Run +# +# Variable: AIRFLOW__KUBERNETES_EXECUTOR__WORKER_CONTAINER_TAG +# +worker_container_tag = + +# The Kubernetes namespace where airflow workers should be created. Defaults to ``default`` +# +# Variable: AIRFLOW__KUBERNETES_EXECUTOR__NAMESPACE +# +namespace = default + +# If True, all worker pods will be deleted upon termination +# +# Variable: AIRFLOW__KUBERNETES_EXECUTOR__DELETE_WORKER_PODS +# +delete_worker_pods = True + +# If False (and delete_worker_pods is True), +# failed worker pods will not be deleted so users can investigate them. +# This only prevents removal of worker pods where the worker itself failed, +# not when the task it ran failed. +# +# Variable: AIRFLOW__KUBERNETES_EXECUTOR__DELETE_WORKER_PODS_ON_FAILURE +# +delete_worker_pods_on_failure = False + +worker_pod_pending_fatal_container_state_reasons = CreateContainerConfigError,ErrImagePull,CreateContainerError,ImageInspectError, InvalidImageName +# Number of Kubernetes Worker Pod creation calls per scheduler loop. +# Note that the current default of "1" will only launch a single pod +# per-heartbeat. It is HIGHLY recommended that users increase this +# number to match the tolerance of their kubernetes cluster for +# better performance. +# +# Variable: AIRFLOW__KUBERNETES_EXECUTOR__WORKER_PODS_CREATION_BATCH_SIZE +# +worker_pods_creation_batch_size = 1 + +# Allows users to launch pods in multiple namespaces. +# Will require creating a cluster-role for the scheduler, +# or use multi_namespace_mode_namespace_list configuration. +# +# Variable: AIRFLOW__KUBERNETES_EXECUTOR__MULTI_NAMESPACE_MODE +# +multi_namespace_mode = False + +# If multi_namespace_mode is True while scheduler does not have a cluster-role, +# give the list of namespaces where the scheduler will schedule jobs +# Scheduler needs to have the necessary permissions in these namespaces. +# +# Variable: AIRFLOW__KUBERNETES_EXECUTOR__MULTI_NAMESPACE_MODE_NAMESPACE_LIST +# +multi_namespace_mode_namespace_list = + +# Use the service account kubernetes gives to pods to connect to kubernetes cluster. +# It's intended for clients that expect to be running inside a pod running on kubernetes. +# It will raise an exception if called from a process not running in a kubernetes environment. +# +# Variable: AIRFLOW__KUBERNETES_EXECUTOR__IN_CLUSTER +# +in_cluster = True + +# When running with in_cluster=False change the default cluster_context or config_file +# options to Kubernetes client. Leave blank these to use default behaviour like ``kubectl`` has. +# +# Variable: AIRFLOW__KUBERNETES_EXECUTOR__CLUSTER_CONTEXT +# +# cluster_context = + +# Path to the kubernetes configfile to be used when ``in_cluster`` is set to False +# +# Variable: AIRFLOW__KUBERNETES_EXECUTOR__CONFIG_FILE +# +# config_file = + +# Keyword parameters to pass while calling a kubernetes client core_v1_api methods +# from Kubernetes Executor provided as a single line formatted JSON dictionary string. +# List of supported params are similar for all core_v1_apis, hence a single config +# variable for all apis. See: +# https://raw.githubusercontent.com/kubernetes-client/python/41f11a09995efcd0142e25946adc7591431bfb2f/kubernetes/client/api/core_v1_api.py +# +# Variable: AIRFLOW__KUBERNETES_EXECUTOR__KUBE_CLIENT_REQUEST_ARGS +# +kube_client_request_args = + +# Optional keyword arguments to pass to the ``delete_namespaced_pod`` kubernetes client +# ``core_v1_api`` method when using the Kubernetes Executor. +# This should be an object and can contain any of the options listed in the ``v1DeleteOptions`` +# class defined here: +# https://github.com/kubernetes-client/python/blob/41f11a09995efcd0142e25946adc7591431bfb2f/kubernetes/client/models/v1_delete_options.py#L19 +# +# Example: delete_option_kwargs = {"grace_period_seconds": 10} +# +# Variable: AIRFLOW__KUBERNETES_EXECUTOR__DELETE_OPTION_KWARGS +# +delete_option_kwargs = + +# Enables TCP keepalive mechanism. This prevents Kubernetes API requests to hang indefinitely +# when idle connection is time-outed on services like cloud load balancers or firewalls. +# +# Variable: AIRFLOW__KUBERNETES_EXECUTOR__ENABLE_TCP_KEEPALIVE +# +enable_tcp_keepalive = True + +# When the `enable_tcp_keepalive` option is enabled, TCP probes a connection that has +# been idle for `tcp_keep_idle` seconds. +# +# Variable: AIRFLOW__KUBERNETES_EXECUTOR__TCP_KEEP_IDLE +# +tcp_keep_idle = 120 + +# When the `enable_tcp_keepalive` option is enabled, if Kubernetes API does not respond +# to a keepalive probe, TCP retransmits the probe after `tcp_keep_intvl` seconds. +# +# Variable: AIRFLOW__KUBERNETES_EXECUTOR__TCP_KEEP_INTVL +# +tcp_keep_intvl = 30 + +# When the `enable_tcp_keepalive` option is enabled, if Kubernetes API does not respond +# to a keepalive probe, TCP retransmits the probe `tcp_keep_cnt number` of times before +# a connection is considered to be broken. +# +# Variable: AIRFLOW__KUBERNETES_EXECUTOR__TCP_KEEP_CNT +# +tcp_keep_cnt = 6 + +# Set this to false to skip verifying SSL certificate of Kubernetes python client. +# +# Variable: AIRFLOW__KUBERNETES_EXECUTOR__VERIFY_SSL +# +verify_ssl = True + +# How often in seconds to check for task instances stuck in "queued" status without a pod +# +# Variable: AIRFLOW__KUBERNETES_EXECUTOR__WORKER_PODS_QUEUED_CHECK_INTERVAL +# +worker_pods_queued_check_interval = 60 + +# Path to a CA certificate to be used by the Kubernetes client to verify the server's SSL certificate. +# +# Variable: AIRFLOW__KUBERNETES_EXECUTOR__SSL_CA_CERT +# +ssl_ca_cert = + +# The Maximum number of retries for queuing the task to the kubernetes scheduler when +# failing due to Kube API exceeded quota errors before giving up and marking task as failed. +# -1 for unlimited times. +# +# Variable: AIRFLOW__KUBERNETES_EXECUTOR__TASK_PUBLISH_MAX_RETRIES +# +task_publish_max_retries = 0 + +[common.io] +# Common IO configuration section + +# Path to a location on object storage where XComs can be stored in url format. +# +# Example: xcom_objectstorage_path = s3://conn_id@bucket/path +# +# Variable: AIRFLOW__COMMON.IO__XCOM_OBJECTSTORAGE_PATH +# +xcom_objectstorage_path = + +# Threshold in bytes for storing XComs in object storage. -1 means always store in the +# database. 0 means always store in object storage. Any positive number means +# it will be stored in object storage if the size of the value is greater than the threshold. +# +# Example: xcom_objectstorage_threshold = 1000000 +# +# Variable: AIRFLOW__COMMON.IO__XCOM_OBJECTSTORAGE_THRESHOLD +# +xcom_objectstorage_threshold = -1 + +# Compression algorithm to use when storing XComs in object storage. Supported algorithms +# are a.o.: snappy, zip, gzip, bz2, and lzma. If not specified, no compression will be used. +# Note that the compression algorithm must be available in the Python installation (e.g. +# python-snappy for snappy). Zip, gz, bz2 are available by default. +# +# Example: xcom_objectstorage_compression = gz +# +# Variable: AIRFLOW__COMMON.IO__XCOM_OBJECTSTORAGE_COMPRESSION +# +xcom_objectstorage_compression = + + + +[fab] +# This section contains configs specific to FAB provider. + +# Boolean for enabling rate limiting on authentication endpoints. +# +# Variable: AIRFLOW__FAB__AUTH_RATE_LIMITED +# +auth_rate_limited = True + +# Rate limit for authentication endpoints. +# +# Variable: AIRFLOW__FAB__AUTH_RATE_LIMIT +# +auth_rate_limit = 5 per 40 second + +# Update FAB permissions and sync security manager roles +# on webserver startup +# +# Variable: AIRFLOW__FAB__UPDATE_FAB_PERMS +# +update_fab_perms = True + +[imap] +# Options for IMAP provider. + +# ssl_context = + +[azure_remote_logging] +# Configuration that needs to be set for enable remote logging in Azure Blob Storage + +remote_wasb_log_container = airflow-logs + +[openlineage] +# This section applies settings for OpenLineage integration. +# More about configuration and it's precedence can be found at +# https://airflow.apache.org/docs/apache-airflow-providers-openlineage/stable/guides/user.html#transport-setup + +# Disable sending events without uninstalling the OpenLineage Provider by setting this to true. +# +# Variable: AIRFLOW__OPENLINEAGE__DISABLED +# +disabled = False + +# Exclude some Operators from emitting OpenLineage events by passing a string of semicolon separated +# full import paths of Operators to disable. +# +# Example: disabled_for_operators = airflow.providers.standard.operators.bash.BashOperator; airflow.providers.standard.operators.python.PythonOperator +# +# Variable: AIRFLOW__OPENLINEAGE__DISABLED_FOR_OPERATORS +# +disabled_for_operators = + +# If this setting is enabled, OpenLineage integration won't collect and emit metadata, +# unless you explicitly enable it per `DAG` or `Task` using `enable_lineage` method. +# +# Variable: AIRFLOW__OPENLINEAGE__SELECTIVE_ENABLE +# +selective_enable = False + +# Set namespace that the lineage data belongs to, so that if you use multiple OpenLineage producers, +# events coming from them will be logically separated. +# +# Example: namespace = my_airflow_instance_1 +# +# Variable: AIRFLOW__OPENLINEAGE__NAMESPACE +# +# namespace = + +# Register custom OpenLineage Extractors by passing a string of semicolon separated full import paths. +# +# Example: extractors = full.path.to.ExtractorClass;full.path.to.AnotherExtractorClass +# +# Variable: AIRFLOW__OPENLINEAGE__EXTRACTORS +# +# extractors = + +# Register custom run facet functions by passing a string of semicolon separated full import paths. +# +# Example: custom_run_facets = full.path.to.custom_facet_function;full.path.to.another_custom_facet_function +# +# Variable: AIRFLOW__OPENLINEAGE__CUSTOM_RUN_FACETS +# +custom_run_facets = + +# Specify the path to the YAML configuration file. +# This ensures backwards compatibility with passing config through the `openlineage.yml` file. +# +# Example: config_path = full/path/to/openlineage.yml +# +# Variable: AIRFLOW__OPENLINEAGE__CONFIG_PATH +# +config_path = + +# Pass OpenLineage Client transport configuration as JSON string. It should contain type of the +# transport and additional options (different for each transport type). For more details see: +# https://openlineage.io/docs/client/python/#built-in-transport-types +# +# Currently supported types are: +# +# * HTTP +# * Kafka +# * Console +# * File +# +# Example: transport = {"type": "http", "url": "http://localhost:5000", "endpoint": "api/v1/lineage"} +# +# Variable: AIRFLOW__OPENLINEAGE__TRANSPORT +# +transport = + +# Disable the inclusion of source code in OpenLineage events by setting this to `true`. +# By default, several Operators (e.g. Python, Bash) will include their source code in the events +# unless disabled. +# +# Variable: AIRFLOW__OPENLINEAGE__DISABLE_SOURCE_CODE +# +disable_source_code = False + +# Number of processes to utilize for processing DAG state changes +# in an asynchronous manner within the scheduler process. +# +# Variable: AIRFLOW__OPENLINEAGE__DAG_STATE_CHANGE_PROCESS_POOL_SIZE +# +dag_state_change_process_pool_size = 1 + +# Maximum amount of time (in seconds) that OpenLineage can spend executing metadata extraction. +# +# Variable: AIRFLOW__OPENLINEAGE__EXECUTION_TIMEOUT +# +execution_timeout = 10 + +# If true, OpenLineage event will include full task info - potentially containing large fields. +# +# Variable: AIRFLOW__OPENLINEAGE__INCLUDE_FULL_TASK_INFO +# +include_full_task_info = False + +# If true, OpenLineage events will include information useful for debugging - potentially +# containing large fields e.g. all installed packages and their versions. +# +# Variable: AIRFLOW__OPENLINEAGE__DEBUG_MODE +# +debug_mode = False + +# Automatically inject OpenLineage's parent job (namespace, job name, run id) information into Spark +# application properties for supported Operators. +# +# Variable: AIRFLOW__OPENLINEAGE__SPARK_INJECT_PARENT_JOB_INFO +# +spark_inject_parent_job_info = False + +[smtp_provider] +# Options for SMTP provider. + +# ssl context to use when using SMTP and IMAP SSL connections. By default, the context is "default" +# which sets it to ``ssl.create_default_context()`` which provides the right balance between +# compatibility and security, it however requires that certificates in your operating system are +# updated and that SMTP/IMAP servers of yours have valid certificates that have corresponding public +# keys installed on your machines. You can switch it to "none" if you want to disable checking +# of the certificates, but it is not recommended as it allows MITM (man-in-the-middle) attacks +# if your infrastructure is not sufficiently secured. It should only be set temporarily while you +# are fixing your certificate configuration. This can be typically done by upgrading to newer +# version of the operating system you run Airflow components on,by upgrading/refreshing proper +# certificates in the OS or by updating certificates for your mail servers. +# +# If you do not set this option explicitly, it will use Airflow "email.ssl_context" configuration, +# but if this configuration is not present, it will use "default" value. +# +# Example: ssl_context = default +# +# Variable: AIRFLOW__SMTP_PROVIDER__SSL_CONTEXT +# +# ssl_context = + +# Allows overriding of the standard templated email subject line when the SmtpNotifier is used. +# Must provide a path to the template. +# +# Example: templated_email_subject_path = path/to/override/email_subject.html +# +# Variable: AIRFLOW__SMTP_PROVIDER__TEMPLATED_EMAIL_SUBJECT_PATH +# +# templated_email_subject_path = + +# Allows overriding of the standard templated email path when the SmtpNotifier is used. Must provide +# a path to the template. +# +# Example: templated_html_content_path = path/to/override/email.html +# +# Variable: AIRFLOW__SMTP_PROVIDER__TEMPLATED_HTML_CONTENT_PATH +# +# templated_html_content_path = + +[docker] +docker_url = unix://var/run/docker.sock diff --git a/airflow/configs/.env.master b/airflow/configs/.env.master new file mode 100644 index 0000000..9288246 --- /dev/null +++ b/airflow/configs/.env.master @@ -0,0 +1,4 @@ +# This file should be generated from ansible/templates/.env.ytdlp.j2 +# Do not edit manually - your changes will be overwritten. +# +# To generate this file, run the Ansible playbook that processes the templates. diff --git a/airflow/configs/.env.worker b/airflow/configs/.env.worker new file mode 100644 index 0000000..9288246 --- /dev/null +++ b/airflow/configs/.env.worker @@ -0,0 +1,4 @@ +# This file should be generated from ansible/templates/.env.ytdlp.j2 +# Do not edit manually - your changes will be overwritten. +# +# To generate this file, run the Ansible playbook that processes the templates. diff --git a/airflow/configs/docker-compose-dl.yaml.j2 b/airflow/configs/docker-compose-dl.yaml.j2 index fd93f4d..41c3633 100644 --- a/airflow/configs/docker-compose-dl.yaml.j2 +++ b/airflow/configs/docker-compose-dl.yaml.j2 @@ -28,8 +28,8 @@ x-airflow-common: environment: &airflow-common-env - AIRFLOW__CORE__PARALLELISM: 64 - AIRFLOW__CORE__MAX_ACTIVE_TASKS_PER_DAG: 32 + AIRFLOW__CORE__PARALLELISM: 128 + AIRFLOW__CORE__MAX_ACTIVE_TASKS_PER_DAG: 64 AIRFLOW__SCHEDULER__PARSING_PROCESSES: 4 AIRFLOW__WEBSERVER__WORKERS: 5 AIRFLOW__WEBSERVER__WORKER_CLASS: "gevent" @@ -49,8 +49,8 @@ x-airflow-common: # Backend connections - These should point to the master node # Set MASTER_HOST_IP, POSTGRES_PASSWORD, and REDIS_PASSWORD in your .env file AIRFLOW__DATABASE__SQL_ALCHEMY_CONN: postgresql+psycopg2://airflow:${{ '{' }}POSTGRES_PASSWORD{{ '}' }}@${{ '{' }}MASTER_HOST_IP{{ '}' }}:{{ postgres_port }}/airflow - IRFLOW__CELERY__RESULT_BACKEND: db+postgresql+psycopg2://airflow:${{ '{' }}POSTGRES_PASSWORD{{ '}' }}@${{ '{' }}MASTER_HOST_IP{{ '}' }}:{{ postgres_port }}/airflow - AIRFLOW__CELERY__BROKER_URL: redis://:${REDIS_PASSWORD}@${MASTER_HOST_IP}:52909/0 + AIRFLOW__CELERY__RESULT_BACKEND: db+postgresql+psycopg2://airflow:${{ '{' }}POSTGRES_PASSWORD{{ '}' }}@${{ '{' }}MASTER_HOST_IP{{ '}' }}:{{ postgres_port }}/airflow + AIRFLOW__CELERY__BROKER_URL: redis://:${REDIS_PASSWORD}@${MASTER_HOST_IP}:{{ redis_port }}/0 # Remote Logging - connection is configured directly via environment variables #_PIP_ADDITIONAL_REQUIREMENTS: ${{ '{' }}_PIP_ADDITIONAL_REQUIREMENTS:- apache-airflow-providers-docker apache-airflow-providers-http thrift>=0.16.0,<=0.20.0 backoff>=2.2.1 python-dotenv==1.0.1 psutil>=5.9.0 apache-airflow-providers-amazon{{ '}' }} @@ -75,8 +75,8 @@ x-airflow-common: - ${AIRFLOW_PROJ_DIR:-.}/downloadfiles:/opt/airflow/downloadfiles - ${AIRFLOW_PROJ_DIR:-.}/addfiles:/opt/airflow/addfiles - ${AIRFLOW_PROJ_DIR:-.}/inputfiles:/opt/airflow/inputfiles - # Use AIRFLOW_UID from .env file to fix permission issues. - user: "${AIRFLOW_UID:-50000}" + # Use AIRFLOW_UID from .env file to fix permission issues. GID is set to 0 for compatibility with the Airflow image. + user: "${{ '{' }}AIRFLOW_UID:-50000{{ '}' }}:0" services: airflow-worker: @@ -108,7 +108,9 @@ services: AIRFLOW__CELERY__WORKER_QUEUES: "queue-dl,queue-dl-${HOSTNAME:-dl001}" AIRFLOW__CELERY__WORKER_TAGS: "dl" AIRFLOW__CELERY__WORKER_PREFETCH_MULTIPLIER: "1" - AIRFLOW__CELERY__WORKER_CONCURRENCY: ${AIRFLOW_WORKER_DOWNLOAD_CONCURRENCY:-16} + # Use autoscaling to adjust number of workers based on load. + # Format is max_concurrency,min_concurrency. + AIRFLOW__CELERY__WORKER_AUTOSCALE: "16,4" # Use prefork pool for better compatibility with blocking libraries. AIRFLOW__CELERY__POOL: "prefork" AIRFLOW__CELERY__TASK_ACKS_LATE: "False" diff --git a/airflow/configs/docker-compose-master.yaml.j2 b/airflow/configs/docker-compose-master.yaml.j2 index 1b7cb5d..35419cb 100644 --- a/airflow/configs/docker-compose-master.yaml.j2 +++ b/airflow/configs/docker-compose-master.yaml.j2 @@ -66,10 +66,10 @@ x-airflow-common: - proxynet environment: &airflow-common-env - AIRFLOW__CORE__PARALLELISM: 64 - AIRFLOW__CORE__MAX_ACTIVE_TASKS_PER_DAG: 32 + AIRFLOW__CORE__PARALLELISM: 128 + AIRFLOW__CORE__MAX_ACTIVE_TASKS_PER_DAG: 64 AIRFLOW__SCHEDULER__PARSING_PROCESSES: 4 - AIRFLOW__WEBSERVER__WORKER_CLASS: gevent + AIRFLOW__WEBSERVER__WORKER_CLASS: sync AIRFLOW__WEBSERVER__WORKERS: 8 AIRFLOW__LOGGING__SECRET_MASK_EXCEPTION_ARGS: 'false' @@ -101,7 +101,7 @@ x-airflow-common: AIRFLOW__LOGGING__REMOTE_LOGGING: 'true' AIRFLOW__LOGGING__REMOTE_BASE_LOG_FOLDER: "s3://airflow-logs" AIRFLOW__LOGGING__REMOTE_LOG_CONN_ID: minio_default - AIRFLOW__LOGGING__ENCRYPT_S3_LOGS: 'false' + AIRROW__LOGGING__ENCRYPT_S3_LOGS: 'false' AIRFLOW__CORE__LOCAL_SETTINGS_PATH: "/opt/airflow/config/custom_task_hooks.py" volumes: - ${{ '{' }}AIRFLOW_PROJ_DIR:-.{{ '}' }}/dags:/opt/airflow/dags @@ -310,17 +310,17 @@ services: ports: - "8080:8080" depends_on: - - airflow-webserver + airflow-webserver: + condition: service_started restart: always airflow-webserver: <<: *airflow-common command: webserver + expose: + - "8080" environment: <<: *airflow-common-env - # Trigger gevent monkeypatching for webserver. - # See: https://github.com/apache/airflow/pull/28283 - _AIRFLOW_PATCH_GEVENT: "1" healthcheck: test: ["CMD", "curl", "--fail", "http://localhost:8080/health"] interval: 30s @@ -511,14 +511,14 @@ services: - -c - airflow - # You can enable flower by adding "--profile flower" option e.g. docker-compose --profile flower up - # or by explicitly targeted on the command line e.g. docker-compose up flower. - # See: https://docs.docker.com/compose/profiles/ flower: <<: *airflow-common command: celery flower ports: - "5555:5555" + environment: + <<: *airflow-common-env + FLOWER_BASIC_AUTH: "flower:${{ '{' }}FLOWER_PASSWORD{{ '}' }}" healthcheck: test: ["CMD", "curl", "--fail", "http://localhost:5555/"] interval: 30s @@ -530,8 +530,6 @@ services: <<: *airflow-common-depends-on airflow-init: condition: service_completed_successfully - profiles: - - flower docker-socket-proxy: profiles: diff --git a/airflow/configs/docker-compose-ytdlp-ops.yaml.j2 b/airflow/configs/docker-compose-ytdlp-ops.yaml.j2 index bec7764..98007cd 100644 --- a/airflow/configs/docker-compose-ytdlp-ops.yaml.j2 +++ b/airflow/configs/docker-compose-ytdlp-ops.yaml.j2 @@ -4,17 +4,21 @@ include: # This automatically includes the generated camoufox service definitions and dependencies. # It simplifies the docker-compose command, as you no longer need to specify both files with -f. # The file is generated by the config-generator service and will be created even if empty. - - docker-compose.camoufox.yaml + - ./configs/docker-compose.camoufox.yaml {% endif %} services: envoy: image: envoyproxy/envoy:v1.29-latest + {% if service_role != 'management' %} + container_name: envoy-thrift-lb-${HOSTNAME} + {% else %} container_name: envoy-thrift-lb + {% endif %} restart: unless-stopped volumes: # Mount the generated config file from the host - - ./envoy.yaml:/etc/envoy/envoy.yaml:ro + - ./configs/envoy.yaml:/etc/envoy/envoy.yaml:ro ports: # This is the single public port for all Thrift traffic - "${ENVOY_PORT:-9080}:${ENVOY_PORT:-9080}" @@ -37,12 +41,12 @@ services: {% endif %} # Ports are no longer exposed directly. Envoy will connect to them on the internal network. env_file: - - ./.env # Path is relative to the compose file location (configs directory) + - ./.env # Path is relative to the project directory volumes: - context-data:/app/context-data {% if service_role != 'management' %} # Mount the generated endpoints file to make it available to the server - - ../camoufox/camoufox_endpoints.json:/app/config/camoufox_endpoints.json:ro + - ./configs/camoufox_endpoints.json:/app/config/camoufox_endpoints.json:ro {% endif %} # Mount the plugin source code for live updates without rebuilding the image. # Assumes the plugin source is in a 'bgutil-ytdlp-pot-provider' directory @@ -95,8 +99,6 @@ services: volumes: context-data: - name: context-data - external: true {% if service_role == 'management' or not camoufox_proxies %} networks: diff --git a/airflow/configs/docker-compose.camoufox.yaml.j2 b/airflow/configs/docker-compose.camoufox.yaml.j2 index 387eb74..7cd4cc4 100644 --- a/airflow/configs/docker-compose.camoufox.yaml.j2 +++ b/airflow/configs/docker-compose.camoufox.yaml.j2 @@ -5,16 +5,17 @@ # and adds the necessary dependencies to the main services. services: {% for proxy in camoufox_proxies %} +{% set proxy_port = _get_port_from_proxy_url(proxy.url) | int %} {% set container_base_port = camoufox_port + loop.index0 * worker_count %} {% set host_base_port = container_base_port %} - camoufox-{{ loop.index }}: + camoufox-{{ proxy_port }}-{{ loop.index }}: build: context: ../camoufox dockerfile: Dockerfile args: VNC_PASSWORD: "{{ vnc_password }}" image: camoufox:latest - container_name: ytdlp-ops-camoufox-{{ loop.index }}-1 + container_name: ytdlp-ops-camoufox-{{ proxy_port }}-{{ loop.index }}-1 restart: unless-stopped shm_size: '2gb' # Mitigates browser crashes due to shared memory limitations ports: @@ -27,7 +28,7 @@ services: - CAMOUFOX_RESTART_THRESHOLD_MB=1500 volumes: - /tmp/.X11-unix:/tmp/.X11-unix:rw - - camoufox-data-{{ loop.index }}:/app/context-data + - camoufox-data-{{ proxy_port }}-{{ loop.index }}:/app/context-data - camoufox-browser-cache:/root/.cache/ms-playwright # Persist browser binaries command: [ "--ws-host", "0.0.0.0", @@ -62,7 +63,8 @@ services: restart: "no" depends_on: {% for proxy in camoufox_proxies %} - - camoufox-{{ loop.index }} +{% set proxy_port = _get_port_from_proxy_url(proxy.url) | int %} + - camoufox-{{ proxy_port }}-{{ loop.index }} {% endfor %} networks: - proxynet @@ -70,7 +72,8 @@ services: volumes: {% for proxy in camoufox_proxies %} - camoufox-data-{{ loop.index }}: +{% set proxy_port = _get_port_from_proxy_url(proxy.url) | int %} + camoufox-data-{{ proxy_port }}-{{ loop.index }}: {% endfor %} {% if camoufox_proxies %} camoufox-browser-cache: diff --git a/airflow/configs/docker-compose.config-generate.yaml b/airflow/configs/docker-compose.config-generate.yaml index aaeefca..42ea4ea 100644 --- a/airflow/configs/docker-compose.config-generate.yaml +++ b/airflow/configs/docker-compose.config-generate.yaml @@ -1,6 +1,5 @@ -# This file is used to generate the necessary configuration files for the main application stack. -# It should be run as a one-off command before starting the main services. -# Example: docker-compose -f airflow/docker-compose.config-generate.yaml run --rm config-generator +version: '3.8' + services: config-generator: image: python:3.12-slim @@ -9,6 +8,6 @@ services: - ./.env volumes: # Mount the entire project directory to access scripts and write output files - - ./:/app + - ../:/app command: > sh -c "pip install jinja2 && python3 /app/generate_envoy_config.py" diff --git a/airflow/dags/.DS_Store b/airflow/dags/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..5008ddfcf53c02e82d7eee2e57c38e5672ef89f6 GIT binary patch literal 6148 zcmeH~Jr2S!425mzP>H1@V-^m;4Wg<&0T*E43hX&L&p$$qDprKhvt+--jT7}7np#A3 zem<@ulZcFPQ@L2!n>{z**++&mCkOWA81W14cNZlEfg7;MkzE(HCqgga^y>{tEnwC%0;vJ&^%eQ zLs35+`xjp>T03Q@#jbG~r43G6N?2HXIX0_I`$|4(xAeeYz6NV_Q_EdS?cK39v zy2tj+8p%kJcBM!Wh?O7;QP7@<>_d)w-0U?Mdm&;dK-pDt2)8yQT8T^Y*Gvx&u~H79 zEdTY@Uw{99)&H9MOFSM$FuaYsq!h(TJp(qF(2aJFz7R4qO z4ap1^gH0?MvMCp3GgJ&U*Fhs(41;{oh!i8uIa-Wj8HbvG;Q%5b9NCDjgAG2ga#JoU zIQA=4jKQ9O<3zy;90V-mBo5>7y68)W$_S^v5H`~5p%!Nx72_Z$;tmqSJ2pDolEnmY zgLqs=xC?h*6L1gi1^O251KN)VfDYoFKzHE`(A_u$bPs6X`-K2#AATDT;q1)-;QcsP z=DY*=9iVw!06L6EfF8sY`u2RAh&pDL1w8lbw(uu>A=2``dTCKzn-X1alo=8u>X(o~DhZ!N)4)35st40HWF>!|3fyiD!w8cSQY02AE8{bK6RE7Tcacc z>0?+mlT_TyY%NowM052@Ia9JKE?IFiSf``}nOnHmRL%0bSP72R=FYmrH&g$WB`t*p{& zg-Kkm{_DL6)nn(KwxNj(i@Tb$sOlBR)hZ>z6kn|h2LyfMBB`<9l4ev%F2O{{rA(?; zb0Q1)dHLjL z7jL|MSIT^=bX<9FOBuYU?AcQG+*5|Ol%ZQox7|C+;kC(!lJe>4%crlre@nWtw3!~d zD`hnFh+}w6p$QOWC7{g>vs~WDV z(K2*3Oq>$cZCAG{EbWx$39cFhQpbh7)C?Ad6r!%myDr30b4&A8n@KK&r|I%@+*m8w zFiYCa{;ifY!_a080t{*`>6oZ~0?_yPtrD$Bwgbx)&DPZ=LLGhtNvpaRi{fVASF6;h z?{rJ{#K=gCQ*ii(TL8r?wTaQOcgI;!H?=aU@3yr{nQEr4weqH5*-nAqn$QUYP7lpK z;cJD{R3HnVTbZxUvRH+<&FpMem^@2$T!zBor%@>}ttEba%QT^|bJ2RNP-uCr0ObIc ztl4%wUMTq2Y%-@m`6Tm76%P9)VHo>;hdD>HuV->LpMml`n)P+@lFjBamoC-iLV=h2 zvy=a)Vy>j6F#;zxbf_Mu-l^Orro+oXt!O5xfBb5_y)ip!XpSnhH%B$K71hz}1pIl>WUun)bSN2N3<}gzpSMpI zowI0(Kr1y2Cc18og`Ih`r9&DWKVtq7WP9NJVHH>(AXn1`8Txx0a6~I$ODvs?wROm^b9n52OHh} zjb!Tnj)7Z0y}Kj39S@}fYq2LC31M(MgXBm<>T9G28>xJwyYGIo=Z17cyOq9^+}nty zStR%GWLOS(DUkavG0;fuYxEvy^bY(hBqimw6WcMA7Ia1=w`JH0qZ3Dni+B>_E6$?2$<2IOVX?tP7Z@+2sBMZH~| zGlWv5z&1ij9PqOIv;C;o7Q7sX2T*^;dxxVu>fh-VI2uOXeclL12T^CQcZj1=6i<6& z934g-z1}#Wwd39d|B+exM(X*m&r_UL?}q diff --git a/airflow/dags/__pycache__/ytdlp_service_test.cpython-312.pyc b/airflow/dags/__pycache__/ytdlp_service_test.cpython-312.pyc deleted file mode 100644 index 248433d50c66d5486584b2a2f936f0042e56d065..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 2269 zcmaKtUu+ab7{F)$-2S=$*P~v+t6oKgOMprgfrgM!Q4^vCl?1t%Y?hm8yJc^8Gdo9n z%^fMm1bh;nfT)3(m}uw&i9YybBCo!*mV|YX7-LN2tpyDRU;O6o7BK$Vz07NTBcGx^x$!+xgtP>v;{|EuO;y#BEN56#j;jaVY-E8x9&aN>&G zlj0fhRl|DsHDp8**j!A;YdH0hR?Fb@d#IL$H3?@(24}GfvWj!qz{VA&XASF92!Nc=$Wz_11#Z}_y6`=IOb`T1clY)qLl!vrm#9=JZh^a&%|^;vVS*}g`v|%%zwCi=h=`qW}^Mh~oj|j}NiU5+}u}anC`4+=&U;*$}4|qBtY#273 z3Tzs}1(^fmR?u`^uGW2Tk}KdEHz(`{2?HBqV7@$8CjoPOI7-N9H7VTj7@1?d&@5ccXN7f{IkX~#q!U;+#*~C%&vzC6H{=>jz;U@mLQ!KY51#v*M*V<|_OIk9gfN8b zDh?1wR-A8J(Bf$!#ItkYcOYmBN9A1F$1CaJIUfEZ<0uEGI{9B%fJMK69C@ zB)2TQ1dHi)H&VmPso^Ly6lDrgelW^!h|;+zSB#3~)f`GcaTghBZDIfFxXEf%wbg}P|&ID8c$Gh_@)R3ye^%j8DJVv6;#!>MB;X{26w)y?a9zy0trKhUU z5v7{Kml*+$A64hF@IT_4-PF;yOAA>e*6MDn~=Z(PNKCx%~IV z^5r+K7dNeC^t`-~zB4c^jjioPni;7>QDHdB?~NYY@O`dyLA_vKF0AA>M(F}KtH0+A zO&v{F|-{{O%3H;ynb-nrQBaX>k5pDLuYLxwa)SzC-^Dw3KNm diff --git a/airflow/dags/__pycache__/ytdlp_token_dag.cpython-312.pyc b/airflow/dags/__pycache__/ytdlp_token_dag.cpython-312.pyc deleted file mode 100644 index d59e82008e9c9ccdf48e9a6eaffb965dfed915e3..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 5700 zcmb7IZEPDydY&bBxl4)Pl1Pe@EZUN7(T+qrwd2~2)5w-T(z9jPQm!%6-eScWODiq8 zyxFBbSjs0Hj8;Z_O%8X3eQgfsegIV<0X5M6$j?jA9|!agbylIx#zs^03e^AP*oOq} z2l~z~SCW0{k6Vee^Yy+n^UnJ|^UVHpINXBZ`MdvqF!^~qLjTSR-qYI5JYC`tx`Qyn zoQ6tVf#X=-r};|$g5SyeHNGSi1SijHfs$AdoxGq)rC=fGD94GBKfknu-;5Hn_VNV{1CJ}D`Yi_M$Rdl6}qe2Hx-a>^W zPC=Ri89z>wB<@_xcv6Kl)QY$ZcjF$s2WHfZvv}{?K2JxX(~;^UDcp(s*Y>aSuI@sH zq)BHjh!DxJ6zm|fzg3^x&Y=ad4;@7YH|OKfb!cZb^Zx5-j>{c*%sS;b+dndV0bY@- z3+9wwzEUAnF?9+xp)-mxwX586(KIV#q-;2?610+wx;~Th*%DSvVyY!#2U&`VW-6ng zfxoJlQ$=?&ZrZdndO85*KL#%#sK8-V@L~Tly1`-YRAzfY55QhC_9(j^u6WI+|{7(Y6R= z>YTUT(6?!3gciU~qc97>sDgQHNtV^JYRa-L7WHyjR2g%=lnKr z5bXwyfqpwE%SBBw3|ThV#IxC@=lyxTV&)Z<-q7^9JXR)+{DO(KifrmLq%1?lP-THe zp?58@ZH6Il4y@id2+3DV=%Ez8Ev`%H4Jl)#_AK*%8nQBbHZz0knZcFG2btHF`R_%a zLRVDWUK$iS;O7{z8;k>S$w4eaTrTn)GQoiEV{RL3TJq|4-ky;?<1GrEQ{&skQXBY$wjRk@Nw-wr7=D2cAU{wmanJcc;_M_T6>3w;o{g^fZFwh|}@i zJ>0yC*0MW&@LpGH4D}&1;q~rER9Te%pRt+UUwZJqUG|VX?Jv&8OnO4oX)m|)p6=So zd%1eR(?mPW9YoG@z;I;$2*c2-rg zBYJU$P|AXfL5W-Rpre+^E0s#V*?$nQLdn;{d592`%$rWxj<_D*>?7N}QZ@Bjrdn3# zPv!Ho3eD4MIgg!Th75Ss_EwbgBvneP;zrA$UNHulJfWvFfFr}STipKgRCU4*Oi&e1 zlJQz_aFAh?UNu>^sHvoE8Z}{Xuu3(XI|^vldiKib__=q+<&n3p*|DqF$1h$PeS7rG z+vm@pyLN8md@b%Q=Y{F>6KWafcsnqrS80*hfpLWbBG~-+LWS7UHKO1vWo@ArIC|vB zkyA$=GebRQ=gNniEtb@B73N#(ngV<^V7Z`+%nvmetLpjLqj@{E0GegD4%Q=AVX0*W z5clPV3V_R;PH`>iNRX1EYHyNxrBu<#P*E?zEd6Y$(zXW46R`!ScP?s+ree%6Tx(S- zsthJI98hc8lZgvzNtq;eq*=w_j`a_fi3y|6$R!;UkcM59DJki*gkfc=q?AD?DbK1@ zFPFgbcDSTh%cg9WDl%579dL?<&9m9rA|)nOiD9?0%z_LDi?U!#luW7ylxs)EOof_i zd9vw%S+*&PS|&6*rVCgXm2R2k| z`lQXPG+4i>4vS!xH!JS^R?1IARJZ?S_wANuUg&vtgaJQ@}$*!*y`DDb!R_s@BQ1hd%yQL-Bx?=y_D77 zb$4t_+3HA1G1yJ9RwePrS(NDcsmiBY7g8iQemn*_Vw;yHzMq8d90CV0!bs$@8r>D0& z>|~}HC41??+oWU>xa_Yf3$)t17%T!e{xziq9N3oj?~oLa_$~5v{_FX4-B{JWZ1tsQm`xiV~B8P4oTbMN82GMt#Q{WE027GQg;1js)RuzADw!?ErLfD+=rX8I|02*Czb)mJ*bsLxHH^-;5(204C@mG*6 zAwa5V@_ltZlHH8#UytnHjO5lMxt0A7B1e|}R-*H6ZavYznSeC6nHXG846djjwLM6j z2Auuc#>UAj4-!|GMZo1)=b!xHGH=B@Hsb^9@c}E=2G9!sTOvy2o+3WcvK-iopkVC% z=(?2Mln$*+hZ_0c{^@K(IuoLZH@8tgr^S^_FJ8sS}lRd-E5oT_M6*|HSK(hcyYj;3m)8V7<4%g+)cO_S@wd+{(#FfjB_5~ z^6iVhT{3~X??oA4%;KW&rf(NxmTH}6HKLTO6%Tu|;0g{&G-wI*I=gL4&e;qEh+U?e z+{>UkwAc1H@<2~O1?$B?4LZU-Q>P23vbE$nMiH{ON?A+M9gJ}l$@ZL6)96Ju)LSh1 z9hQ89C6^$vg$l@-^gQGtLL3ifM+v28Unwe>@N!KH>lPgaCQR!sD>GJan?773VXDTvRkona~JSxCD;f z-We~nBPAU!_d1oQh@t`I3b}`jTdIk9H&fj0%gkRHTt6Lxgu(js?pCN{j|Y`5@M7N# zd;$S@^N#|VGZnK6r;n?80nT@R+R)4J2Ci#V^s+$=rm?vb((9U1n!w5%^pBwO&){dU zBYFv~NPi#xFuWT5NLn8l{^|9f4~#m|`Iz1G)OTQ(b_?ih9$q=owMd9vJDhJ9jKE89 z7*}*~2|MBqErURD#tv8Da|C?&a8Eo3;%uJLe8&-N2@DD>u}pHjqn9$Tpf@40LqOsu zCkX|7)<~|4zROyez3jBdqIXEMyFZi#zMcHl<_Y*FMcFZGcX^G5^GV7Wa@OG%C<9tM z;q_n`Yz%=Yq3kmTWnVBH{!NEj&WQ#VA|$BWKc(+729v64_oQ(XCmJbSi1Zqf|6M)mbIWMp7}I2Y9-U`ya;zbi%QAB z(&&>Uio!5^t@J^VvO0R71%+r}=>jMh67PR&o$tFh_j7*e%eQ5A^M7(oICEP3KL}jsZU6uP diff --git a/airflow/dags/ytdlp_mgmt_queues.py b/airflow/dags/ytdlp_mgmt_queues.py index d54acea..be3b573 100644 --- a/airflow/dags/ytdlp_mgmt_queues.py +++ b/airflow/dags/ytdlp_mgmt_queues.py @@ -634,7 +634,7 @@ with DAG( ), # --- Params for 'list_contents' --- "queue_to_list": Param( - 'video_queue_inbox,video_queue_fail', + 'video_queue_inbox,video_queue_result,video_queue_fail', type="string", title="[list_contents] Queues to List", description="Comma-separated list of exact Redis key names to list.", diff --git a/airflow/dags/ytdlp_ops_worker_per_url.py b/airflow/dags/ytdlp_ops_worker_per_url.py index b9383a3..d931522 100644 --- a/airflow/dags/ytdlp_ops_worker_per_url.py +++ b/airflow/dags/ytdlp_ops_worker_per_url.py @@ -238,9 +238,15 @@ def handle_bannable_error_branch(task_id_to_check: str, **context): logger.error(f"Task {task_id_to_check} failed without error details. Marking as fatal.") return 'handle_fatal_error' + error_message = error_details.get('error_message', '').strip() error_code = error_details.get('error_code', '').strip() policy = params.get('on_bannable_failure', 'retry_with_new_account') + # Check if this is an age confirmation error - should not stop the loop + if "Sign in to confirm your age" in error_message or "confirm your age" in error_message.lower(): + logger.info(f"Age confirmation error detected for '{task_id_to_check}'. This is a content restriction, not a bot detection issue.") + return 'handle_age_restriction_error' + # Fatal Thrift connection errors that should stop all processing. if error_code == 'TRANSPORT_ERROR': logger.error(f"Fatal Thrift connection error from '{task_id_to_check}'. Stopping processing.") @@ -574,8 +580,15 @@ def report_failure_and_continue(**context): try: client = _get_redis_client(params['redis_conn_id']) - client.hset(f"{params['queue_name']}_result", url, json.dumps(result_data)) - logger.info(f"Stored failure result for URL '{url}'.") + result_queue = f"{params['queue_name']}_result" + fail_queue = f"{params['queue_name']}_fail" + + with client.pipeline() as pipe: + pipe.hset(result_queue, url, json.dumps(result_data)) + pipe.hset(fail_queue, url, json.dumps(result_data)) + pipe.execute() + + logger.info(f"Stored failure result for URL '{url}' in '{result_queue}' and '{fail_queue}'.") except Exception as e: logger.error(f"Could not report failure to Redis: {e}", exc_info=True) @@ -610,8 +623,8 @@ def handle_fatal_error(**context): # Report failure to Redis so the URL can be reprocessed later try: result_data = { - 'status': 'failed', - 'end_time': time.time(), + 'status': 'failed', + 'end_time': time.time(), 'url': url, 'dag_run_id': context['dag_run'].run_id, 'error': 'fatal_error', @@ -619,8 +632,15 @@ def handle_fatal_error(**context): 'error_details': error_details } client = _get_redis_client(params['redis_conn_id']) - client.hset(f"{params['queue_name']}_result", url, json.dumps(result_data)) - logger.info(f"Stored fatal error result for URL '{url}' in Redis for later reprocessing.") + result_queue = f"{params['queue_name']}_result" + fail_queue = f"{params['queue_name']}_fail" + + with client.pipeline() as pipe: + pipe.hset(result_queue, url, json.dumps(result_data)) + pipe.hset(fail_queue, url, json.dumps(result_data)) + pipe.execute() + + logger.info(f"Stored fatal error result for URL '{url}' in '{result_queue}' and '{fail_queue}' for later reprocessing.") except Exception as e: logger.error(f"Could not report fatal error to Redis: {e}", exc_info=True) @@ -669,8 +689,14 @@ def handle_retry_failure_branch(task_id_to_check: str, **context): if not error_details: return 'handle_fatal_error' + error_message = error_details.get('error_message', '').strip() error_code = error_details.get('error_code', '').strip() + # Check if this is an age confirmation error - should not stop the loop + if "Sign in to confirm your age" in error_message or "confirm your age" in error_message.lower(): + logger.info(f"Age confirmation error detected on retry from '{task_id_to_check}'. Reporting failure and continuing loop.") + return 'report_failure_and_continue' + if error_code == 'TRANSPORT_ERROR': logger.error(f"Fatal Thrift connection error on retry from '{task_id_to_check}'.") return 'handle_fatal_error' @@ -715,6 +741,61 @@ def coalesce_token_data(get_token_result=None, retry_get_token_result=None): # This should not be reached if trigger_rule='one_success' is working correctly. raise AirflowException("Could not find a successful token result from any attempt.") + +@task(trigger_rule='one_failed') +def handle_age_restriction_error(**context): + """ + Handles age restriction errors specifically. These are content restrictions + that cannot be bypassed by using different accounts, so we report the failure + and continue the processing loop rather than stopping it. + """ + params = context['params'] + ti = context['task_instance'] + url = params.get('url_to_process', 'unknown') + + # Collect error details + error_details = {} + first_token_task_id = 'get_token' + retry_token_task_id = 'retry_get_token' + + first_token_error = ti.xcom_pull(task_ids=first_token_task_id, key='error_details') + retry_token_error = ti.xcom_pull(task_ids=retry_token_task_id, key='error_details') + + # Use the most recent error details + if retry_token_error: + error_details = retry_token_error + elif first_token_error: + error_details = first_token_error + + logger.error(f"Age restriction error for URL '{url}'. This content requires age confirmation and cannot be bypassed.") + + # Report failure to Redis so the URL can be marked as failed + try: + result_data = { + 'status': 'failed', + 'end_time': time.time(), + 'url': url, + 'dag_run_id': context['dag_run'].run_id, + 'error': 'age_restriction', + 'error_message': 'Content requires age confirmation', + 'error_details': error_details + } + client = _get_redis_client(params['redis_conn_id']) + result_queue = f"{params['queue_name']}_result" + fail_queue = f"{params['queue_name']}_fail" + + with client.pipeline() as pipe: + pipe.hset(result_queue, url, json.dumps(result_data)) + pipe.hset(fail_queue, url, json.dumps(result_data)) + pipe.execute() + + logger.info(f"Stored age restriction error for URL '{url}' in '{result_queue}' and '{fail_queue}'.") + except Exception as e: + logger.error(f"Could not report age restriction error to Redis: {e}", exc_info=True) + + # This is NOT a fatal error for the processing loop - we just continue with the next URL + + # ============================================================================= # DAG Definition with TaskGroups # ============================================================================= @@ -755,6 +836,7 @@ with DAG( fatal_error_task = handle_fatal_error() report_failure_task = report_failure_and_continue() continue_loop_task = continue_processing_loop() + age_restriction_task = handle_age_restriction_error() # --- Task Group 1: Initial Attempt --- with TaskGroup("initial_attempt", tooltip="Initial token acquisition attempt") as initial_attempt_group: @@ -770,7 +852,7 @@ with DAG( ) first_token_attempt >> initial_branch_task - initial_branch_task >> [fatal_error_task, ban_and_report_immediately_task] + initial_branch_task >> [fatal_error_task, ban_and_report_immediately_task, age_restriction_task] # --- Task Group 2: Retry Logic --- with TaskGroup("retry_logic", tooltip="Retry logic with account management") as retry_logic_group: @@ -820,7 +902,7 @@ with DAG( direct_retry_account_task >> coalesced_retry_data coalesced_retry_data >> retry_token_task retry_token_task >> retry_branch_task - retry_branch_task >> [fatal_error_task, report_failure_task, ban_after_retry_report_task] + retry_branch_task >> [fatal_error_task, report_failure_task, ban_after_retry_report_task, age_restriction_task] ban_after_retry_report_task >> report_failure_task # --- Task Group 3: Download and Processing --- @@ -849,10 +931,13 @@ with DAG( # --- DAG Dependencies between TaskGroups --- # Initial attempt can lead to retry logic or direct failure - initial_branch_task >> [retry_logic_group, fatal_error_task, ban_and_report_immediately_task] + initial_branch_task >> [retry_logic_group, fatal_error_task, ban_and_report_immediately_task, age_restriction_task] # Retry logic leads to download processing on success or failure reporting on failure retry_branch_task >> [download_processing_group, report_failure_task] # Ban and report immediately leads to failure reporting ban_and_report_immediately_task >> report_failure_task + + # Age restriction error leads to failure reporting and continues the loop + age_restriction_task >> continue_loop_task diff --git a/airflow/generate_envoy_config.py b/airflow/generate_envoy_config.py index e5251b7..ff446c3 100644 --- a/airflow/generate_envoy_config.py +++ b/airflow/generate_envoy_config.py @@ -113,6 +113,8 @@ def generate_configs(): # The templates are in the 'configs' directory. env = Environment(loader=FileSystemLoader(configs_dir), trim_blocks=True, lstrip_blocks=True) + # Make the helper function available to Jinja2 templates + env.globals['_get_port_from_proxy_url'] = _get_port_from_proxy_url # Get service role from environment to determine what to generate service_role = os.getenv('SERVICE_ROLE', 'all-in-one') @@ -165,11 +167,14 @@ def generate_configs(): for i, proxy in enumerate(camoufox_proxies): proxy_port = _get_port_from_proxy_url(proxy['url']) if proxy_port: + # Use the correct container name pattern that matches the docker-compose template + # The container name in the template is: ytdlp-ops-camoufox-{{ proxy_port }}-{{ loop.index }}-1 + container_name = f"ytdlp-ops-camoufox-{proxy_port}-{i+1}-1" container_base_port = camoufox_port + i * worker_count endpoints = [] for j in range(worker_count): port = container_base_port + j - endpoints.append(f"ws://{camoufox_backend_prefix}{i+1}:{port}/mypath") + endpoints.append(f"ws://{container_name}:{port}/mypath") endpoints_map[proxy_port] = { "ws_endpoints": endpoints diff --git a/airflow/requirements.txt b/airflow/requirements.txt deleted file mode 100644 index 0d0b044..0000000 --- a/airflow/requirements.txt +++ /dev/null @@ -1,9 +0,0 @@ -thrift>=0.16.0,<=0.20.0 -backoff>=2.2.1 -python-dotenv==1.0.1 -psutil>=5.9.0 -docker>=6.0.0 -apache-airflow-providers-docker -redis -ffprobe3 -ffmpeg-python \ No newline at end of file diff --git a/airflow/roles/airflow-master/tasks/main.yml b/airflow/roles/airflow-master/tasks/main.yml index 12d63b0..479c8d7 100644 --- a/airflow/roles/airflow-master/tasks/main.yml +++ b/airflow/roles/airflow-master/tasks/main.yml @@ -9,85 +9,222 @@ path: "{{ airflow_master_dir }}" state: directory owner: "{{ ssh_user }}" - group: ytdl + group: "{{ deploy_group }}" mode: '0755' become: yes when: not master_dir_stat.stat.exists +- name: Ensure Airflow master configs directory exists + file: + path: "{{ airflow_master_dir }}/configs" + state: directory + owner: "{{ ssh_user }}" + group: "{{ deploy_group }}" + mode: '0755' + become: yes + +- name: Ensure Airflow master config directory exists + file: + path: "{{ airflow_master_dir }}/config" + state: directory + owner: "{{ ssh_user }}" + group: "{{ deploy_group }}" + mode: '0755' + become: yes + +- name: Ensure Airflow operational directories exist with correct permissions + file: + path: "{{ airflow_master_dir }}/{{ item }}" + state: directory + owner: "{{ airflow_uid }}" + group: "{{ deploy_group }}" + mode: '0775' + become: yes + loop: + - "dags" + - "logs" + - "plugins" + - "downloadfiles" + - "addfiles" + - "inputfiles" + - name: Check if source directories exist stat: - path: "{{ playbook_dir }}/../{{ item }}" + path: "../{{ item }}" register: source_dirs loop: - - "airflow/inputfiles" - "airflow/plugins" - "airflow/addfiles" - "airflow/bgutil-ytdlp-pot-provider" +- name: "Log: Syncing Airflow core files" + debug: + msg: "Syncing DAGs, configs, and Python source code to the master node." + - name: Sync Airflow master files synchronize: - src: "{{ playbook_dir }}/../{{ item }}" + src: "../{{ item }}" dest: "{{ airflow_master_dir }}/" archive: yes recursive: yes delete: yes + rsync_path: "sudo rsync" rsync_opts: "{{ rsync_default_opts }}" loop: - "airflow/Dockerfile" - - "airflow/docker-compose-master.yaml" - - "airflow/dags/" - - "airflow/config/" + - "airflow/Dockerfile.caddy" + - "airflow/.dockerignore" + - "airflow/dags" + - "airflow/inputfiles" - "setup.py" - - "yt_ops_services/" - - "thrift_model/" + - "yt_ops_services" + - "thrift_model" - "VERSION" - - "airflow/init-airflow.sh" - - "airflow/nginx.conf" + - "airflow/update-yt-dlp.sh" - "get_info_json_client.py" - "proxy_manager_client.py" + - "utils" + +- name: Copy custom Python config files to master + copy: + src: "../airflow/config/{{ item }}" + dest: "{{ airflow_master_dir }}/config/{{ item }}" + owner: "{{ ssh_user }}" + group: "{{ deploy_group }}" + mode: '0644' + become: yes + loop: + - "custom_task_hooks.py" + - "airflow_local_settings.py" + +- name: Ensure any existing airflow.cfg directory is removed + file: + path: "{{ airflow_master_dir }}/config/airflow.cfg" + state: absent + become: yes + ignore_errors: yes + +- name: Copy airflow.cfg to master + copy: + src: "../airflow/airflow.cfg" + dest: "{{ airflow_master_dir }}/config/airflow.cfg" + owner: "{{ ssh_user }}" + group: "{{ deploy_group }}" + mode: '0644' + become: yes + +- name: Sync Airflow master config files + synchronize: + src: "../airflow/configs/{{ item }}" + dest: "{{ airflow_master_dir }}/configs/" + archive: yes + recursive: yes + rsync_path: "sudo rsync" + rsync_opts: "{{ rsync_default_opts }}" + loop: + - "nginx.conf" + - "Caddyfile" - name: Sync optional directories if they exist synchronize: - src: "{{ playbook_dir }}/../{{ item }}/" - dest: "{{ airflow_master_dir }}/{{ item | basename }}/" + src: "../{{ item.item }}/" + dest: "{{ airflow_master_dir }}/{{ item.item | basename }}/" archive: yes recursive: yes delete: yes + rsync_path: "sudo rsync" rsync_opts: "{{ rsync_default_opts }}" - loop: - - "airflow/inputfiles" - - "airflow/plugins" - - "airflow/addfiles" - - "airflow/bgutil-ytdlp-pot-provider" - when: source_dirs.results | selectattr('item', 'equalto', item) | map(attribute='stat.exists') | first + loop: "{{ source_dirs.results }}" + when: item.stat.exists - name: Sync pangramia thrift files synchronize: - src: "{{ playbook_dir }}/../thrift_model/gen_py/pangramia/" + src: "../thrift_model/gen_py/pangramia/" dest: "{{ airflow_master_dir }}/pangramia/" archive: yes recursive: yes delete: yes + rsync_path: "sudo rsync" rsync_opts: "{{ rsync_default_opts }}" -- name: Create .env file for Airflow master service +- name: Template docker-compose file for master template: - src: "../../templates/.env.master.j2" - dest: "{{ airflow_master_dir }}/.env" + src: "{{ playbook_dir }}/../airflow/configs/docker-compose-master.yaml.j2" + dest: "{{ airflow_master_dir }}/configs/docker-compose-master.yaml" mode: "{{ file_permissions }}" owner: "{{ ssh_user }}" - group: ytdl - vars: - service_role: "master" + group: "{{ deploy_group }}" + become: yes + +- name: Template Redis connection file + template: + src: "../airflow/config/redis_default_conn.json.j2" + dest: "{{ airflow_master_dir }}/config/redis_default_conn.json" + mode: "{{ file_permissions }}" + owner: "{{ ssh_user }}" + group: "{{ deploy_group }}" + become: yes + +- name: Template Minio connection file for master + template: + src: "../airflow/config/minio_default_conn.json.j2" + dest: "{{ airflow_master_dir }}/config/minio_default_conn.json" + mode: "{{ file_permissions }}" + owner: "{{ ssh_user }}" + group: "{{ deploy_group }}" + become: yes + +- name: Ensure config directory is group-writable for Airflow initialization + file: + path: "{{ airflow_master_dir }}/config" + state: directory + mode: '0775' + owner: "{{ ssh_user }}" + group: "{{ deploy_group }}" + become: yes + +- name: Ensure airflow.cfg is group-writable for Airflow initialization + file: + path: "{{ airflow_master_dir }}/config/airflow.cfg" + state: file + mode: '0664' + owner: "{{ ssh_user }}" + group: "{{ deploy_group }}" + become: yes - name: Create symlink for docker-compose.yaml file: - src: "{{ airflow_master_dir }}/docker-compose-master.yaml" + src: "{{ airflow_master_dir }}/configs/docker-compose-master.yaml" dest: "{{ airflow_master_dir }}/docker-compose.yaml" state: link owner: "{{ ssh_user }}" - group: ytdl + group: "{{ deploy_group }}" force: yes + follow: no + +- name: Ensure correct permissions for build context + file: + path: "{{ airflow_master_dir }}" + state: directory + owner: "{{ ssh_user }}" + group: "{{ deploy_group }}" + recurse: yes + become: yes + +- name: Ensure postgres-data directory exists on master and has correct permissions + file: + path: "{{ airflow_master_dir }}/postgres-data" + state: directory + owner: "999" # UID for the 'postgres' user in the official postgres image + group: "999" # GID for the 'postgres' group in the official postgres image + mode: '0700' + become: yes + +- name: Set group-writable and setgid permissions on master logs directory contents + shell: | + find {{ airflow_master_dir }}/logs -type d -exec chmod g+rws {} + + find {{ airflow_master_dir }}/logs -type f -exec chmod g+rw {} + + become: yes - name: Verify Dockerfile exists in build directory stat: @@ -99,26 +236,96 @@ msg: "Dockerfile not found in {{ airflow_master_dir }}. Cannot build image." when: not dockerfile_stat.stat.exists +- name: "Log: Building Airflow Docker image" + debug: + msg: "Building the main Airflow Docker image ({{ airflow_image_name }}) locally on the master node. This may take a few minutes." + - name: Build Airflow master image community.docker.docker_image: name: "{{ airflow_image_name }}" build: path: "{{ airflow_master_dir }}" - dockerfile: "Dockerfile" + dockerfile: "Dockerfile" # Explicitly specify the Dockerfile name source: build force_source: true + when: not fast_deploy | default(false) -- name: Run Airflow init script - shell: - cmd: "chmod +x init-airflow.sh && ./init-airflow.sh" - chdir: "{{ airflow_master_dir }}" +- name: "Log: Preparing assets for Caddy image" + debug: + msg: "Extracting static assets from the Airflow image to build the Caddy reverse proxy." + when: not fast_deploy | default(false) + +- name: Prepare Caddy asset extraction directory + file: + path: "{{ airflow_master_dir }}/caddy_build_assets" + state: "{{ item }}" + owner: "{{ ssh_user }}" + group: "{{ deploy_group }}" + mode: '0755' + loop: + - absent + - directory become: yes - become_user: "{{ ssh_user }}" + when: not fast_deploy | default(false) + +- name: Ensure subdirectories exist with correct permissions + file: + path: "{{ airflow_master_dir }}/caddy_build_assets/{{ item }}" + state: directory + owner: "{{ ssh_user }}" + group: "{{ deploy_group }}" + mode: '0755' + loop: + - "appbuilder" + - "dist" + become: yes + when: not fast_deploy | default(false) + +- name: Extract static assets from Airflow image for Caddy build + shell: | + set -e + CONTAINER_ID=$(docker create {{ airflow_image_name }}) + # Dynamically find paths inside the container + APPBUILDER_PATH=$(docker run --rm --entrypoint "" {{ airflow_image_name }} python -c 'import os, flask_appbuilder; print(os.path.join(os.path.dirname(flask_appbuilder.__file__), "static", "appbuilder"))') + AIRFLOW_DIST_PATH=$(docker run --rm --entrypoint "" {{ airflow_image_name }} python -c 'import os, airflow; print(os.path.join(os.path.dirname(airflow.__file__), "www/static/dist"))') + # Copy assets from container to host + docker cp "${CONTAINER_ID}:${APPBUILDER_PATH}/." "./caddy_build_assets/appbuilder" + docker cp "${CONTAINER_ID}:${AIRFLOW_DIST_PATH}/." "./caddy_build_assets/dist" + docker rm -f $CONTAINER_ID + # Pre-compress assets + find ./caddy_build_assets/appbuilder -type f -print0 | xargs -0 gzip -k -9 + find ./caddy_build_assets/dist -type f -print0 | xargs -0 gzip -k -9 + args: + chdir: "{{ airflow_master_dir }}" + executable: /bin/bash + become: yes + register: asset_extraction + changed_when: asset_extraction.rc == 0 + when: not fast_deploy | default(false) + +- name: "Log: Building Caddy reverse proxy image" + debug: + msg: "Building the Caddy image (pangramia/ytdlp-ops-caddy:latest) to serve static assets." + +- name: Build Caddy image + community.docker.docker_image: + name: "pangramia/ytdlp-ops-caddy:latest" + build: + path: "{{ airflow_master_dir }}" + dockerfile: "Dockerfile.caddy" + source: build + force_source: true + when: not fast_deploy | default(false) + +- name: "Log: Starting Airflow services" + debug: + msg: "Starting Airflow core services (webserver, scheduler, etc.) on the master node using docker-compose." - name: Start Airflow master service community.docker.docker_compose_v2: project_src: "{{ airflow_master_dir }}" files: - - "docker-compose-master.yaml" + - "configs/docker-compose-master.yaml" state: present remove_orphans: true + pull: "{{ 'never' if fast_deploy | default(false) else 'missing' }}" diff --git a/airflow/roles/airflow-worker/tasks/main.yml b/airflow/roles/airflow-worker/tasks/main.yml deleted file mode 100644 index 4c63e07..0000000 --- a/airflow/roles/airflow-worker/tasks/main.yml +++ /dev/null @@ -1,103 +0,0 @@ ---- -- name: Check if Airflow worker deployment directory exists - stat: - path: "{{ airflow_worker_dir }}" - register: worker_dir_stat - -- name: Ensure Airflow worker deployment directory exists - file: - path: "{{ airflow_worker_dir }}" - state: directory - owner: "{{ ssh_user }}" - group: ytdl - mode: '0755' - become: yes - when: not worker_dir_stat.stat.exists - -- name: Sync Airflow worker files - synchronize: - src: "{{ playbook_dir }}/../{{ item }}" - dest: "{{ airflow_worker_dir }}/" - archive: yes - recursive: yes - delete: yes - rsync_opts: "{{ rsync_default_opts }}" - loop: - - "airflow/Dockerfile" - - "airflow/docker-compose-dl.yaml" - - "airflow/dags/" - - "airflow/config/" - - "setup.py" - - "yt_ops_services/" - - "thrift_model/" - - "VERSION" - - "airflow/init-airflow.sh" - - "get_info_json_client.py" - - "proxy_manager_client.py" - - "token_generator/" - - "utils/" - -- name: Check if inputfiles directory exists - stat: - path: "{{ playbook_dir }}/../airflow/inputfiles" - register: inputfiles_stat - -- name: Sync inputfiles directory if it exists - synchronize: - src: "{{ playbook_dir }}/../airflow/inputfiles/" - dest: "{{ airflow_worker_dir }}/inputfiles/" - archive: yes - recursive: yes - delete: yes - rsync_opts: "{{ rsync_default_opts }}" - when: inputfiles_stat.stat.exists - -- name: Sync pangramia thrift files - synchronize: - src: "{{ playbook_dir }}/../thrift_model/gen_py/pangramia/" - dest: "{{ airflow_worker_dir }}/pangramia/" - archive: yes - recursive: yes - delete: yes - rsync_opts: "{{ rsync_default_opts }}" - -- name: Create .env file for Airflow worker service - template: - src: "../../templates/.env.worker.j2" - dest: "{{ airflow_worker_dir }}/.env" - mode: "{{ file_permissions }}" - owner: "{{ ssh_user }}" - group: ytdl - vars: - service_role: "worker" - -- name: Create symlink for docker-compose.yaml - file: - src: "{{ airflow_worker_dir }}/docker-compose-dl.yaml" - dest: "{{ airflow_worker_dir }}/docker-compose.yaml" - state: link - owner: "{{ ssh_user }}" - group: ytdl - -- name: Build Airflow worker image - community.docker.docker_image: - name: "{{ airflow_image_name }}" - build: - path: "{{ airflow_worker_dir }}" - source: build - force_source: true - -- name: Run Airflow init script - shell: - cmd: "chmod +x init-airflow.sh && ./init-airflow.sh" - chdir: "{{ airflow_worker_dir }}" - become: yes - become_user: "{{ ssh_user }}" - -- name: Start Airflow worker service - community.docker.docker_compose_v2: - project_src: "{{ airflow_worker_dir }}" - files: - - "docker-compose-dl.yaml" - state: present - remove_orphans: true diff --git a/airflow/scripts/minio-init.sh b/airflow/scripts/minio-init.sh deleted file mode 100644 index 3d62c30..0000000 --- a/airflow/scripts/minio-init.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/sh -set -e - -# Wait for MinIO to be ready -until (mc alias set local http://minio:9000 admin 0153093693-0009) do - echo 'Waiting for MinIO...' - sleep 1 -done - -# Create bucket if it doesn't exist -if ! mc ls local/airflow-logs >/dev/null 2>&1; then - mc mb local/airflow-logs - mc anonymous set download local/airflow-logs - echo 'MinIO bucket initialized' -else - echo 'MinIO bucket already exists' -fi diff --git a/airflow/ytdlp-ops-auth/__pycache__/thrift_exceptions_patch.cpython-311.pyc b/airflow/ytdlp-ops-auth/__pycache__/thrift_exceptions_patch.cpython-311.pyc deleted file mode 100644 index 00632a2e3ddae6e2d1b320abec606fefb46a959c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 3682 zcmd5?8v<3yg5!ES*HO$1szOLRz4 zIG*BKkOO1rgJ%qA-x8oJU<^arAbZ$jAKP>fd!z~iH4X?cVCciJJqXCdiahN*s+1*L zE3yFtcKZ17c*pyF-+lM}p*|WINFZqc*!p|vb{wI<@`td=-*le-NI>WwGEg2Ff)TNV zyb#hIuo{!s!e8NuVlLCq$EMfHlU_FAX-Bt&} z-p*jy>%KpLhew@(dVy@8160TsPQ*3DXog%JM z)2V1hOLtxDYShte{WjKU8QWUXv8y^Ymo2QVnY66Um}JFr)?Rcq7Z(Yp@Tz;;tdyn_ zf8!c1WCSLGv~osf;!I_oNtetbWzxLqQl?xF1nCxwF4d|QX3;k)Ah;NAjo+hiiEuq# z0t?(j@%Bk0Ti8H$WJ3^81FiD;plw7NLf4aMi0`5e$&i3Y8&U%mI&U_j4PozzDj@heaAG`mky5Q%#-+C4+joCL` zOgaO_j2>XGbb{4P*xEL)rD0gXzM7WmB(NPEohq^HY6R zodC_;^T0OI6ZP@=(UdxU~OuuIbY3rY7S<z8%}1c`n;_FFDCk_ZjvIH|&)* zHPty^o%7VWZ#OJC4mkBIi?t$nin~&?taUBW1*%&+y!#Y6UJJjeWt|#lS|){xwS-*? zWh`a`nqI;gfuy;Bm*2!tI?aBdfi%r@wKa$EVhD(YfZJQ^^&cM4p#Acl0IUe56(VUx zH!W;TYYX+$7oD1AXcdQQrd@Ri{kJ=q*6J6-4PZiGJ7v9MSX{cTuLlTR8?8#5Dkfl( zpnUJw#;s>A2a5x(_~QL6RRcpKYNIQ#!q1EP-D7xu+cWm5JSnb6Y828T(2Zd@s43 z^oDLWm0P}Y%TsQ(%My33+d}8sgV{EV0Aa|WU$8boUqfq&9D?`0N*;sFL~jA!9)AKV z4{i^7!*ieh5*ScZx#}xdJ>_a=>g}d-(^qbK{fGlUNe+REVBBubSF78qcl6SyF8~8- zDwlobve%C|3MdK?&~fMB(0(-|&7MY|#U^LYq0i1GW>1NaQF?Yve4OUvkH?~XVPFzb%HDKO{_1wjs-=PsSv5W zRu+ikP!K^0leh>bn-$2jl*LP!&bO;8;PBd6CIsqCiu7g65)@n)kb4*BBH{foby^TctWo#OAyA348s{-}R)1b%Nf zU%lqPdaXH;^CxoNL=G?$-`N`W6}w!@Mk4C+a H0>k_Zv6RH- diff --git a/airflow/ytdlp-ops-auth/build/lib/pangramia/__init__.py b/airflow/ytdlp-ops-auth/build/lib/pangramia/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/airflow/ytdlp-ops-auth/build/lib/pangramia/base_service/BaseService.py b/airflow/ytdlp-ops-auth/build/lib/pangramia/base_service/BaseService.py deleted file mode 100644 index b6cf1f4..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/pangramia/base_service/BaseService.py +++ /dev/null @@ -1,564 +0,0 @@ -# -# Autogenerated by Thrift Compiler (0.20.0) -# -# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING -# -# options string: py -# - -from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException -from thrift.protocol.TProtocol import TProtocolException -from thrift.TRecursive import fix_spec - -import sys -import logging -from .ttypes import * -from thrift.Thrift import TProcessor -from thrift.transport import TTransport -all_structs = [] - - -class Iface(object): - def ping(self): - pass - - def reportError(self, message, details): - """ - Parameters: - - message - - details - - """ - pass - - def shutdown(self): - pass - - -class Client(Iface): - def __init__(self, iprot, oprot=None): - self._iprot = self._oprot = iprot - if oprot is not None: - self._oprot = oprot - self._seqid = 0 - - def ping(self): - self.send_ping() - return self.recv_ping() - - def send_ping(self): - self._oprot.writeMessageBegin('ping', TMessageType.CALL, self._seqid) - args = ping_args() - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_ping(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = ping_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "ping failed: unknown result") - - def reportError(self, message, details): - """ - Parameters: - - message - - details - - """ - self.send_reportError(message, details) - return self.recv_reportError() - - def send_reportError(self, message, details): - self._oprot.writeMessageBegin('reportError', TMessageType.CALL, self._seqid) - args = reportError_args() - args.message = message - args.details = details - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_reportError(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = reportError_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "reportError failed: unknown result") - - def shutdown(self): - self.send_shutdown() - - def send_shutdown(self): - self._oprot.writeMessageBegin('shutdown', TMessageType.ONEWAY, self._seqid) - args = shutdown_args() - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - -class Processor(Iface, TProcessor): - def __init__(self, handler): - self._handler = handler - self._processMap = {} - self._processMap["ping"] = Processor.process_ping - self._processMap["reportError"] = Processor.process_reportError - self._processMap["shutdown"] = Processor.process_shutdown - self._on_message_begin = None - - def on_message_begin(self, func): - self._on_message_begin = func - - def process(self, iprot, oprot): - (name, type, seqid) = iprot.readMessageBegin() - if self._on_message_begin: - self._on_message_begin(name, type, seqid) - if name not in self._processMap: - iprot.skip(TType.STRUCT) - iprot.readMessageEnd() - x = TApplicationException(TApplicationException.UNKNOWN_METHOD, 'Unknown function %s' % (name)) - oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid) - x.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - return - else: - self._processMap[name](self, seqid, iprot, oprot) - return True - - def process_ping(self, seqid, iprot, oprot): - args = ping_args() - args.read(iprot) - iprot.readMessageEnd() - result = ping_result() - try: - result.success = self._handler.ping() - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("ping", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_reportError(self, seqid, iprot, oprot): - args = reportError_args() - args.read(iprot) - iprot.readMessageEnd() - result = reportError_result() - try: - result.success = self._handler.reportError(args.message, args.details) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("reportError", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_shutdown(self, seqid, iprot, oprot): - args = shutdown_args() - args.read(iprot) - iprot.readMessageEnd() - try: - self._handler.shutdown() - except TTransport.TTransportException: - raise - except Exception: - logging.exception('Exception in oneway handler') - -# HELPER FUNCTIONS AND STRUCTURES - - -class ping_args(object): - - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('ping_args') - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(ping_args) -ping_args.thrift_spec = ( -) - - -class ping_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.BOOL: - self.success = iprot.readBool() - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('ping_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.BOOL, 0) - oprot.writeBool(self.success) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(ping_result) -ping_result.thrift_spec = ( - (0, TType.BOOL, 'success', None, None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class reportError_args(object): - """ - Attributes: - - message - - details - - """ - - - def __init__(self, message=None, details=None,): - self.message = message - self.details = details - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.message = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.MAP: - self.details = {} - (_ktype1, _vtype2, _size0) = iprot.readMapBegin() - for _i4 in range(_size0): - _key5 = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - _val6 = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - self.details[_key5] = _val6 - iprot.readMapEnd() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('reportError_args') - if self.message is not None: - oprot.writeFieldBegin('message', TType.STRING, 1) - oprot.writeString(self.message.encode('utf-8') if sys.version_info[0] == 2 else self.message) - oprot.writeFieldEnd() - if self.details is not None: - oprot.writeFieldBegin('details', TType.MAP, 2) - oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.details)) - for kiter7, viter8 in self.details.items(): - oprot.writeString(kiter7.encode('utf-8') if sys.version_info[0] == 2 else kiter7) - oprot.writeString(viter8.encode('utf-8') if sys.version_info[0] == 2 else viter8) - oprot.writeMapEnd() - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(reportError_args) -reportError_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'message', 'UTF8', None, ), # 1 - (2, TType.MAP, 'details', (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), None, ), # 2 -) - - -class reportError_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.BOOL: - self.success = iprot.readBool() - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('reportError_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.BOOL, 0) - oprot.writeBool(self.success) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(reportError_result) -reportError_result.thrift_spec = ( - (0, TType.BOOL, 'success', None, None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class shutdown_args(object): - - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('shutdown_args') - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(shutdown_args) -shutdown_args.thrift_spec = ( -) -fix_spec(all_structs) -del all_structs diff --git a/airflow/ytdlp-ops-auth/build/lib/pangramia/base_service/__init__.py b/airflow/ytdlp-ops-auth/build/lib/pangramia/base_service/__init__.py deleted file mode 100644 index f8be3f5..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/pangramia/base_service/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__all__ = ['ttypes', 'constants', 'BaseService'] diff --git a/airflow/ytdlp-ops-auth/build/lib/pangramia/base_service/constants.py b/airflow/ytdlp-ops-auth/build/lib/pangramia/base_service/constants.py deleted file mode 100644 index 09a78b3..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/pangramia/base_service/constants.py +++ /dev/null @@ -1,14 +0,0 @@ -# -# Autogenerated by Thrift Compiler (0.20.0) -# -# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING -# -# options string: py -# - -from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException -from thrift.protocol.TProtocol import TProtocolException -from thrift.TRecursive import fix_spec - -import sys -from .ttypes import * diff --git a/airflow/ytdlp-ops-auth/build/lib/pangramia/base_service/ttypes.py b/airflow/ytdlp-ops-auth/build/lib/pangramia/base_service/ttypes.py deleted file mode 100644 index 3bfb47f..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/pangramia/base_service/ttypes.py +++ /dev/null @@ -1,20 +0,0 @@ -# -# Autogenerated by Thrift Compiler (0.20.0) -# -# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING -# -# options string: py -# - -from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException -from thrift.protocol.TProtocol import TProtocolException -from thrift.TRecursive import fix_spec - -import sys -import pangramia.yt.common.ttypes -import pangramia.yt.exceptions.ttypes - -from thrift.transport import TTransport -all_structs = [] -fix_spec(all_structs) -del all_structs diff --git a/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/__init__.py b/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/admin_ops/YTAccountsOpService.py b/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/admin_ops/YTAccountsOpService.py deleted file mode 100644 index 609fd61..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/admin_ops/YTAccountsOpService.py +++ /dev/null @@ -1,3491 +0,0 @@ -# -# Autogenerated by Thrift Compiler (0.20.0) -# -# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING -# -# options string: py -# - -from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException -from thrift.protocol.TProtocol import TProtocolException -from thrift.TRecursive import fix_spec - -import sys -import pangramia.base_service.BaseService -import logging -from .ttypes import * -from thrift.Thrift import TProcessor -from thrift.transport import TTransport -all_structs = [] - - -class Iface(pangramia.base_service.BaseService.Iface): - def addAccountPair(self, accountId, proxyId, machineId, proxyData, accountData): - """ - Parameters: - - accountId - - proxyId - - machineId - - proxyData - - accountData - - """ - pass - - def getPair(self, machineId): - """ - Parameters: - - machineId - - """ - pass - - def pair(self, accountId, proxyId, machineId): - """ - Parameters: - - accountId - - proxyId - - machineId - - """ - pass - - def unpair(self, accountId, proxyId, machineId): - """ - Parameters: - - accountId - - proxyId - - machineId - - """ - pass - - def listAccountPairs(self, filter): - """ - Parameters: - - filter - - """ - pass - - def addAccount(self, accountId, accountData): - """ - Parameters: - - accountId - - accountData - - """ - pass - - def suspendAccount(self, accountId): - """ - Parameters: - - accountId - - """ - pass - - def resumeAccount(self, accountId): - """ - Parameters: - - accountId - - """ - pass - - def removeAccount(self, accountId): - """ - Parameters: - - accountId - - """ - pass - - def listActiveAccounts(self): - pass - - def addProxy(self, proxyId, proxyData): - """ - Parameters: - - proxyId - - proxyData - - """ - pass - - def suspendProxy(self, proxyId): - """ - Parameters: - - proxyId - - """ - pass - - def resumeProxy(self, proxyId): - """ - Parameters: - - proxyId - - """ - pass - - def removeProxy(self, proxyId): - """ - Parameters: - - proxyId - - """ - pass - - def listActiveProxies(self): - pass - - -class Client(pangramia.base_service.BaseService.Client, Iface): - def __init__(self, iprot, oprot=None): - pangramia.base_service.BaseService.Client.__init__(self, iprot, oprot) - - def addAccountPair(self, accountId, proxyId, machineId, proxyData, accountData): - """ - Parameters: - - accountId - - proxyId - - machineId - - proxyData - - accountData - - """ - self.send_addAccountPair(accountId, proxyId, machineId, proxyData, accountData) - return self.recv_addAccountPair() - - def send_addAccountPair(self, accountId, proxyId, machineId, proxyData, accountData): - self._oprot.writeMessageBegin('addAccountPair', TMessageType.CALL, self._seqid) - args = addAccountPair_args() - args.accountId = accountId - args.proxyId = proxyId - args.machineId = machineId - args.proxyData = proxyData - args.accountData = accountData - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_addAccountPair(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = addAccountPair_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "addAccountPair failed: unknown result") - - def getPair(self, machineId): - """ - Parameters: - - machineId - - """ - self.send_getPair(machineId) - return self.recv_getPair() - - def send_getPair(self, machineId): - self._oprot.writeMessageBegin('getPair', TMessageType.CALL, self._seqid) - args = getPair_args() - args.machineId = machineId - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_getPair(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = getPair_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "getPair failed: unknown result") - - def pair(self, accountId, proxyId, machineId): - """ - Parameters: - - accountId - - proxyId - - machineId - - """ - self.send_pair(accountId, proxyId, machineId) - return self.recv_pair() - - def send_pair(self, accountId, proxyId, machineId): - self._oprot.writeMessageBegin('pair', TMessageType.CALL, self._seqid) - args = pair_args() - args.accountId = accountId - args.proxyId = proxyId - args.machineId = machineId - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_pair(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = pair_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "pair failed: unknown result") - - def unpair(self, accountId, proxyId, machineId): - """ - Parameters: - - accountId - - proxyId - - machineId - - """ - self.send_unpair(accountId, proxyId, machineId) - return self.recv_unpair() - - def send_unpair(self, accountId, proxyId, machineId): - self._oprot.writeMessageBegin('unpair', TMessageType.CALL, self._seqid) - args = unpair_args() - args.accountId = accountId - args.proxyId = proxyId - args.machineId = machineId - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_unpair(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = unpair_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "unpair failed: unknown result") - - def listAccountPairs(self, filter): - """ - Parameters: - - filter - - """ - self.send_listAccountPairs(filter) - return self.recv_listAccountPairs() - - def send_listAccountPairs(self, filter): - self._oprot.writeMessageBegin('listAccountPairs', TMessageType.CALL, self._seqid) - args = listAccountPairs_args() - args.filter = filter - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_listAccountPairs(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = listAccountPairs_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "listAccountPairs failed: unknown result") - - def addAccount(self, accountId, accountData): - """ - Parameters: - - accountId - - accountData - - """ - self.send_addAccount(accountId, accountData) - return self.recv_addAccount() - - def send_addAccount(self, accountId, accountData): - self._oprot.writeMessageBegin('addAccount', TMessageType.CALL, self._seqid) - args = addAccount_args() - args.accountId = accountId - args.accountData = accountData - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_addAccount(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = addAccount_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "addAccount failed: unknown result") - - def suspendAccount(self, accountId): - """ - Parameters: - - accountId - - """ - self.send_suspendAccount(accountId) - return self.recv_suspendAccount() - - def send_suspendAccount(self, accountId): - self._oprot.writeMessageBegin('suspendAccount', TMessageType.CALL, self._seqid) - args = suspendAccount_args() - args.accountId = accountId - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_suspendAccount(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = suspendAccount_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "suspendAccount failed: unknown result") - - def resumeAccount(self, accountId): - """ - Parameters: - - accountId - - """ - self.send_resumeAccount(accountId) - return self.recv_resumeAccount() - - def send_resumeAccount(self, accountId): - self._oprot.writeMessageBegin('resumeAccount', TMessageType.CALL, self._seqid) - args = resumeAccount_args() - args.accountId = accountId - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_resumeAccount(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = resumeAccount_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "resumeAccount failed: unknown result") - - def removeAccount(self, accountId): - """ - Parameters: - - accountId - - """ - self.send_removeAccount(accountId) - return self.recv_removeAccount() - - def send_removeAccount(self, accountId): - self._oprot.writeMessageBegin('removeAccount', TMessageType.CALL, self._seqid) - args = removeAccount_args() - args.accountId = accountId - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_removeAccount(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = removeAccount_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "removeAccount failed: unknown result") - - def listActiveAccounts(self): - self.send_listActiveAccounts() - return self.recv_listActiveAccounts() - - def send_listActiveAccounts(self): - self._oprot.writeMessageBegin('listActiveAccounts', TMessageType.CALL, self._seqid) - args = listActiveAccounts_args() - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_listActiveAccounts(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = listActiveAccounts_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "listActiveAccounts failed: unknown result") - - def addProxy(self, proxyId, proxyData): - """ - Parameters: - - proxyId - - proxyData - - """ - self.send_addProxy(proxyId, proxyData) - return self.recv_addProxy() - - def send_addProxy(self, proxyId, proxyData): - self._oprot.writeMessageBegin('addProxy', TMessageType.CALL, self._seqid) - args = addProxy_args() - args.proxyId = proxyId - args.proxyData = proxyData - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_addProxy(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = addProxy_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "addProxy failed: unknown result") - - def suspendProxy(self, proxyId): - """ - Parameters: - - proxyId - - """ - self.send_suspendProxy(proxyId) - return self.recv_suspendProxy() - - def send_suspendProxy(self, proxyId): - self._oprot.writeMessageBegin('suspendProxy', TMessageType.CALL, self._seqid) - args = suspendProxy_args() - args.proxyId = proxyId - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_suspendProxy(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = suspendProxy_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "suspendProxy failed: unknown result") - - def resumeProxy(self, proxyId): - """ - Parameters: - - proxyId - - """ - self.send_resumeProxy(proxyId) - return self.recv_resumeProxy() - - def send_resumeProxy(self, proxyId): - self._oprot.writeMessageBegin('resumeProxy', TMessageType.CALL, self._seqid) - args = resumeProxy_args() - args.proxyId = proxyId - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_resumeProxy(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = resumeProxy_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "resumeProxy failed: unknown result") - - def removeProxy(self, proxyId): - """ - Parameters: - - proxyId - - """ - self.send_removeProxy(proxyId) - return self.recv_removeProxy() - - def send_removeProxy(self, proxyId): - self._oprot.writeMessageBegin('removeProxy', TMessageType.CALL, self._seqid) - args = removeProxy_args() - args.proxyId = proxyId - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_removeProxy(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = removeProxy_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "removeProxy failed: unknown result") - - def listActiveProxies(self): - self.send_listActiveProxies() - return self.recv_listActiveProxies() - - def send_listActiveProxies(self): - self._oprot.writeMessageBegin('listActiveProxies', TMessageType.CALL, self._seqid) - args = listActiveProxies_args() - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_listActiveProxies(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = listActiveProxies_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "listActiveProxies failed: unknown result") - - -class Processor(pangramia.base_service.BaseService.Processor, Iface, TProcessor): - def __init__(self, handler): - pangramia.base_service.BaseService.Processor.__init__(self, handler) - self._processMap["addAccountPair"] = Processor.process_addAccountPair - self._processMap["getPair"] = Processor.process_getPair - self._processMap["pair"] = Processor.process_pair - self._processMap["unpair"] = Processor.process_unpair - self._processMap["listAccountPairs"] = Processor.process_listAccountPairs - self._processMap["addAccount"] = Processor.process_addAccount - self._processMap["suspendAccount"] = Processor.process_suspendAccount - self._processMap["resumeAccount"] = Processor.process_resumeAccount - self._processMap["removeAccount"] = Processor.process_removeAccount - self._processMap["listActiveAccounts"] = Processor.process_listActiveAccounts - self._processMap["addProxy"] = Processor.process_addProxy - self._processMap["suspendProxy"] = Processor.process_suspendProxy - self._processMap["resumeProxy"] = Processor.process_resumeProxy - self._processMap["removeProxy"] = Processor.process_removeProxy - self._processMap["listActiveProxies"] = Processor.process_listActiveProxies - self._on_message_begin = None - - def on_message_begin(self, func): - self._on_message_begin = func - - def process(self, iprot, oprot): - (name, type, seqid) = iprot.readMessageBegin() - if self._on_message_begin: - self._on_message_begin(name, type, seqid) - if name not in self._processMap: - iprot.skip(TType.STRUCT) - iprot.readMessageEnd() - x = TApplicationException(TApplicationException.UNKNOWN_METHOD, 'Unknown function %s' % (name)) - oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid) - x.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - return - else: - self._processMap[name](self, seqid, iprot, oprot) - return True - - def process_addAccountPair(self, seqid, iprot, oprot): - args = addAccountPair_args() - args.read(iprot) - iprot.readMessageEnd() - result = addAccountPair_result() - try: - result.success = self._handler.addAccountPair(args.accountId, args.proxyId, args.machineId, args.proxyData, args.accountData) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("addAccountPair", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_getPair(self, seqid, iprot, oprot): - args = getPair_args() - args.read(iprot) - iprot.readMessageEnd() - result = getPair_result() - try: - result.success = self._handler.getPair(args.machineId) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("getPair", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_pair(self, seqid, iprot, oprot): - args = pair_args() - args.read(iprot) - iprot.readMessageEnd() - result = pair_result() - try: - result.success = self._handler.pair(args.accountId, args.proxyId, args.machineId) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("pair", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_unpair(self, seqid, iprot, oprot): - args = unpair_args() - args.read(iprot) - iprot.readMessageEnd() - result = unpair_result() - try: - result.success = self._handler.unpair(args.accountId, args.proxyId, args.machineId) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("unpair", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_listAccountPairs(self, seqid, iprot, oprot): - args = listAccountPairs_args() - args.read(iprot) - iprot.readMessageEnd() - result = listAccountPairs_result() - try: - result.success = self._handler.listAccountPairs(args.filter) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("listAccountPairs", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_addAccount(self, seqid, iprot, oprot): - args = addAccount_args() - args.read(iprot) - iprot.readMessageEnd() - result = addAccount_result() - try: - result.success = self._handler.addAccount(args.accountId, args.accountData) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("addAccount", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_suspendAccount(self, seqid, iprot, oprot): - args = suspendAccount_args() - args.read(iprot) - iprot.readMessageEnd() - result = suspendAccount_result() - try: - result.success = self._handler.suspendAccount(args.accountId) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("suspendAccount", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_resumeAccount(self, seqid, iprot, oprot): - args = resumeAccount_args() - args.read(iprot) - iprot.readMessageEnd() - result = resumeAccount_result() - try: - result.success = self._handler.resumeAccount(args.accountId) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("resumeAccount", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_removeAccount(self, seqid, iprot, oprot): - args = removeAccount_args() - args.read(iprot) - iprot.readMessageEnd() - result = removeAccount_result() - try: - result.success = self._handler.removeAccount(args.accountId) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("removeAccount", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_listActiveAccounts(self, seqid, iprot, oprot): - args = listActiveAccounts_args() - args.read(iprot) - iprot.readMessageEnd() - result = listActiveAccounts_result() - try: - result.success = self._handler.listActiveAccounts() - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("listActiveAccounts", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_addProxy(self, seqid, iprot, oprot): - args = addProxy_args() - args.read(iprot) - iprot.readMessageEnd() - result = addProxy_result() - try: - result.success = self._handler.addProxy(args.proxyId, args.proxyData) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("addProxy", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_suspendProxy(self, seqid, iprot, oprot): - args = suspendProxy_args() - args.read(iprot) - iprot.readMessageEnd() - result = suspendProxy_result() - try: - result.success = self._handler.suspendProxy(args.proxyId) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("suspendProxy", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_resumeProxy(self, seqid, iprot, oprot): - args = resumeProxy_args() - args.read(iprot) - iprot.readMessageEnd() - result = resumeProxy_result() - try: - result.success = self._handler.resumeProxy(args.proxyId) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("resumeProxy", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_removeProxy(self, seqid, iprot, oprot): - args = removeProxy_args() - args.read(iprot) - iprot.readMessageEnd() - result = removeProxy_result() - try: - result.success = self._handler.removeProxy(args.proxyId) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("removeProxy", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_listActiveProxies(self, seqid, iprot, oprot): - args = listActiveProxies_args() - args.read(iprot) - iprot.readMessageEnd() - result = listActiveProxies_result() - try: - result.success = self._handler.listActiveProxies() - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("listActiveProxies", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - -# HELPER FUNCTIONS AND STRUCTURES - - -class addAccountPair_args(object): - """ - Attributes: - - accountId - - proxyId - - machineId - - proxyData - - accountData - - """ - - - def __init__(self, accountId=None, proxyId=None, machineId=None, proxyData=None, accountData=None,): - self.accountId = accountId - self.proxyId = proxyId - self.machineId = machineId - self.proxyData = proxyData - self.accountData = accountData - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.accountId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRING: - self.proxyId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 3: - if ftype == TType.STRING: - self.machineId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 4: - if ftype == TType.STRUCT: - self.proxyData = pangramia.yt.common.ttypes.ProxyData() - self.proxyData.read(iprot) - else: - iprot.skip(ftype) - elif fid == 5: - if ftype == TType.STRUCT: - self.accountData = pangramia.yt.common.ttypes.AccountData() - self.accountData.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('addAccountPair_args') - if self.accountId is not None: - oprot.writeFieldBegin('accountId', TType.STRING, 1) - oprot.writeString(self.accountId.encode('utf-8') if sys.version_info[0] == 2 else self.accountId) - oprot.writeFieldEnd() - if self.proxyId is not None: - oprot.writeFieldBegin('proxyId', TType.STRING, 2) - oprot.writeString(self.proxyId.encode('utf-8') if sys.version_info[0] == 2 else self.proxyId) - oprot.writeFieldEnd() - if self.machineId is not None: - oprot.writeFieldBegin('machineId', TType.STRING, 3) - oprot.writeString(self.machineId.encode('utf-8') if sys.version_info[0] == 2 else self.machineId) - oprot.writeFieldEnd() - if self.proxyData is not None: - oprot.writeFieldBegin('proxyData', TType.STRUCT, 4) - self.proxyData.write(oprot) - oprot.writeFieldEnd() - if self.accountData is not None: - oprot.writeFieldBegin('accountData', TType.STRUCT, 5) - self.accountData.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(addAccountPair_args) -addAccountPair_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'accountId', 'UTF8', None, ), # 1 - (2, TType.STRING, 'proxyId', 'UTF8', None, ), # 2 - (3, TType.STRING, 'machineId', 'UTF8', None, ), # 3 - (4, TType.STRUCT, 'proxyData', [pangramia.yt.common.ttypes.ProxyData, None], None, ), # 4 - (5, TType.STRUCT, 'accountData', [pangramia.yt.common.ttypes.AccountData, None], None, ), # 5 -) - - -class addAccountPair_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.BOOL: - self.success = iprot.readBool() - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('addAccountPair_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.BOOL, 0) - oprot.writeBool(self.success) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(addAccountPair_result) -addAccountPair_result.thrift_spec = ( - (0, TType.BOOL, 'success', None, None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class getPair_args(object): - """ - Attributes: - - machineId - - """ - - - def __init__(self, machineId=None,): - self.machineId = machineId - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.machineId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('getPair_args') - if self.machineId is not None: - oprot.writeFieldBegin('machineId', TType.STRING, 1) - oprot.writeString(self.machineId.encode('utf-8') if sys.version_info[0] == 2 else self.machineId) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(getPair_args) -getPair_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'machineId', 'UTF8', None, ), # 1 -) - - -class getPair_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.STRUCT: - self.success = pangramia.yt.common.ttypes.AccountPairWithState() - self.success.read(iprot) - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('getPair_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.STRUCT, 0) - self.success.write(oprot) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(getPair_result) -getPair_result.thrift_spec = ( - (0, TType.STRUCT, 'success', [pangramia.yt.common.ttypes.AccountPairWithState, None], None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class pair_args(object): - """ - Attributes: - - accountId - - proxyId - - machineId - - """ - - - def __init__(self, accountId=None, proxyId=None, machineId=None,): - self.accountId = accountId - self.proxyId = proxyId - self.machineId = machineId - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.accountId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRING: - self.proxyId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 3: - if ftype == TType.STRING: - self.machineId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('pair_args') - if self.accountId is not None: - oprot.writeFieldBegin('accountId', TType.STRING, 1) - oprot.writeString(self.accountId.encode('utf-8') if sys.version_info[0] == 2 else self.accountId) - oprot.writeFieldEnd() - if self.proxyId is not None: - oprot.writeFieldBegin('proxyId', TType.STRING, 2) - oprot.writeString(self.proxyId.encode('utf-8') if sys.version_info[0] == 2 else self.proxyId) - oprot.writeFieldEnd() - if self.machineId is not None: - oprot.writeFieldBegin('machineId', TType.STRING, 3) - oprot.writeString(self.machineId.encode('utf-8') if sys.version_info[0] == 2 else self.machineId) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(pair_args) -pair_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'accountId', 'UTF8', None, ), # 1 - (2, TType.STRING, 'proxyId', 'UTF8', None, ), # 2 - (3, TType.STRING, 'machineId', 'UTF8', None, ), # 3 -) - - -class pair_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.BOOL: - self.success = iprot.readBool() - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('pair_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.BOOL, 0) - oprot.writeBool(self.success) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(pair_result) -pair_result.thrift_spec = ( - (0, TType.BOOL, 'success', None, None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class unpair_args(object): - """ - Attributes: - - accountId - - proxyId - - machineId - - """ - - - def __init__(self, accountId=None, proxyId=None, machineId=None,): - self.accountId = accountId - self.proxyId = proxyId - self.machineId = machineId - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.accountId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRING: - self.proxyId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 3: - if ftype == TType.STRING: - self.machineId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('unpair_args') - if self.accountId is not None: - oprot.writeFieldBegin('accountId', TType.STRING, 1) - oprot.writeString(self.accountId.encode('utf-8') if sys.version_info[0] == 2 else self.accountId) - oprot.writeFieldEnd() - if self.proxyId is not None: - oprot.writeFieldBegin('proxyId', TType.STRING, 2) - oprot.writeString(self.proxyId.encode('utf-8') if sys.version_info[0] == 2 else self.proxyId) - oprot.writeFieldEnd() - if self.machineId is not None: - oprot.writeFieldBegin('machineId', TType.STRING, 3) - oprot.writeString(self.machineId.encode('utf-8') if sys.version_info[0] == 2 else self.machineId) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(unpair_args) -unpair_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'accountId', 'UTF8', None, ), # 1 - (2, TType.STRING, 'proxyId', 'UTF8', None, ), # 2 - (3, TType.STRING, 'machineId', 'UTF8', None, ), # 3 -) - - -class unpair_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.BOOL: - self.success = iprot.readBool() - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('unpair_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.BOOL, 0) - oprot.writeBool(self.success) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(unpair_result) -unpair_result.thrift_spec = ( - (0, TType.BOOL, 'success', None, None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class listAccountPairs_args(object): - """ - Attributes: - - filter - - """ - - - def __init__(self, filter=None,): - self.filter = filter - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.I32: - self.filter = iprot.readI32() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('listAccountPairs_args') - if self.filter is not None: - oprot.writeFieldBegin('filter', TType.I32, 1) - oprot.writeI32(self.filter) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(listAccountPairs_args) -listAccountPairs_args.thrift_spec = ( - None, # 0 - (1, TType.I32, 'filter', None, None, ), # 1 -) - - -class listAccountPairs_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.LIST: - self.success = [] - (_etype3, _size0) = iprot.readListBegin() - for _i4 in range(_size0): - _elem5 = pangramia.yt.common.ttypes.AccountPairWithState() - _elem5.read(iprot) - self.success.append(_elem5) - iprot.readListEnd() - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('listAccountPairs_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.LIST, 0) - oprot.writeListBegin(TType.STRUCT, len(self.success)) - for iter6 in self.success: - iter6.write(oprot) - oprot.writeListEnd() - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(listAccountPairs_result) -listAccountPairs_result.thrift_spec = ( - (0, TType.LIST, 'success', (TType.STRUCT, [pangramia.yt.common.ttypes.AccountPairWithState, None], False), None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class addAccount_args(object): - """ - Attributes: - - accountId - - accountData - - """ - - - def __init__(self, accountId=None, accountData=None,): - self.accountId = accountId - self.accountData = accountData - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.accountId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.accountData = pangramia.yt.common.ttypes.AccountData() - self.accountData.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('addAccount_args') - if self.accountId is not None: - oprot.writeFieldBegin('accountId', TType.STRING, 1) - oprot.writeString(self.accountId.encode('utf-8') if sys.version_info[0] == 2 else self.accountId) - oprot.writeFieldEnd() - if self.accountData is not None: - oprot.writeFieldBegin('accountData', TType.STRUCT, 2) - self.accountData.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(addAccount_args) -addAccount_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'accountId', 'UTF8', None, ), # 1 - (2, TType.STRUCT, 'accountData', [pangramia.yt.common.ttypes.AccountData, None], None, ), # 2 -) - - -class addAccount_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.BOOL: - self.success = iprot.readBool() - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('addAccount_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.BOOL, 0) - oprot.writeBool(self.success) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(addAccount_result) -addAccount_result.thrift_spec = ( - (0, TType.BOOL, 'success', None, None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class suspendAccount_args(object): - """ - Attributes: - - accountId - - """ - - - def __init__(self, accountId=None,): - self.accountId = accountId - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.accountId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('suspendAccount_args') - if self.accountId is not None: - oprot.writeFieldBegin('accountId', TType.STRING, 1) - oprot.writeString(self.accountId.encode('utf-8') if sys.version_info[0] == 2 else self.accountId) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(suspendAccount_args) -suspendAccount_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'accountId', 'UTF8', None, ), # 1 -) - - -class suspendAccount_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.BOOL: - self.success = iprot.readBool() - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('suspendAccount_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.BOOL, 0) - oprot.writeBool(self.success) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(suspendAccount_result) -suspendAccount_result.thrift_spec = ( - (0, TType.BOOL, 'success', None, None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class resumeAccount_args(object): - """ - Attributes: - - accountId - - """ - - - def __init__(self, accountId=None,): - self.accountId = accountId - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.accountId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('resumeAccount_args') - if self.accountId is not None: - oprot.writeFieldBegin('accountId', TType.STRING, 1) - oprot.writeString(self.accountId.encode('utf-8') if sys.version_info[0] == 2 else self.accountId) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(resumeAccount_args) -resumeAccount_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'accountId', 'UTF8', None, ), # 1 -) - - -class resumeAccount_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.BOOL: - self.success = iprot.readBool() - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('resumeAccount_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.BOOL, 0) - oprot.writeBool(self.success) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(resumeAccount_result) -resumeAccount_result.thrift_spec = ( - (0, TType.BOOL, 'success', None, None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class removeAccount_args(object): - """ - Attributes: - - accountId - - """ - - - def __init__(self, accountId=None,): - self.accountId = accountId - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.accountId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('removeAccount_args') - if self.accountId is not None: - oprot.writeFieldBegin('accountId', TType.STRING, 1) - oprot.writeString(self.accountId.encode('utf-8') if sys.version_info[0] == 2 else self.accountId) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(removeAccount_args) -removeAccount_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'accountId', 'UTF8', None, ), # 1 -) - - -class removeAccount_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.BOOL: - self.success = iprot.readBool() - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('removeAccount_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.BOOL, 0) - oprot.writeBool(self.success) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(removeAccount_result) -removeAccount_result.thrift_spec = ( - (0, TType.BOOL, 'success', None, None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class listActiveAccounts_args(object): - - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('listActiveAccounts_args') - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(listActiveAccounts_args) -listActiveAccounts_args.thrift_spec = ( -) - - -class listActiveAccounts_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.LIST: - self.success = [] - (_etype10, _size7) = iprot.readListBegin() - for _i11 in range(_size7): - _elem12 = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - self.success.append(_elem12) - iprot.readListEnd() - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('listActiveAccounts_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.LIST, 0) - oprot.writeListBegin(TType.STRING, len(self.success)) - for iter13 in self.success: - oprot.writeString(iter13.encode('utf-8') if sys.version_info[0] == 2 else iter13) - oprot.writeListEnd() - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(listActiveAccounts_result) -listActiveAccounts_result.thrift_spec = ( - (0, TType.LIST, 'success', (TType.STRING, 'UTF8', False), None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class addProxy_args(object): - """ - Attributes: - - proxyId - - proxyData - - """ - - - def __init__(self, proxyId=None, proxyData=None,): - self.proxyId = proxyId - self.proxyData = proxyData - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.proxyId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.proxyData = pangramia.yt.common.ttypes.ProxyData() - self.proxyData.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('addProxy_args') - if self.proxyId is not None: - oprot.writeFieldBegin('proxyId', TType.STRING, 1) - oprot.writeString(self.proxyId.encode('utf-8') if sys.version_info[0] == 2 else self.proxyId) - oprot.writeFieldEnd() - if self.proxyData is not None: - oprot.writeFieldBegin('proxyData', TType.STRUCT, 2) - self.proxyData.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(addProxy_args) -addProxy_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'proxyId', 'UTF8', None, ), # 1 - (2, TType.STRUCT, 'proxyData', [pangramia.yt.common.ttypes.ProxyData, None], None, ), # 2 -) - - -class addProxy_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.BOOL: - self.success = iprot.readBool() - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('addProxy_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.BOOL, 0) - oprot.writeBool(self.success) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(addProxy_result) -addProxy_result.thrift_spec = ( - (0, TType.BOOL, 'success', None, None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class suspendProxy_args(object): - """ - Attributes: - - proxyId - - """ - - - def __init__(self, proxyId=None,): - self.proxyId = proxyId - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.proxyId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('suspendProxy_args') - if self.proxyId is not None: - oprot.writeFieldBegin('proxyId', TType.STRING, 1) - oprot.writeString(self.proxyId.encode('utf-8') if sys.version_info[0] == 2 else self.proxyId) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(suspendProxy_args) -suspendProxy_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'proxyId', 'UTF8', None, ), # 1 -) - - -class suspendProxy_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.BOOL: - self.success = iprot.readBool() - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('suspendProxy_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.BOOL, 0) - oprot.writeBool(self.success) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(suspendProxy_result) -suspendProxy_result.thrift_spec = ( - (0, TType.BOOL, 'success', None, None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class resumeProxy_args(object): - """ - Attributes: - - proxyId - - """ - - - def __init__(self, proxyId=None,): - self.proxyId = proxyId - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.proxyId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('resumeProxy_args') - if self.proxyId is not None: - oprot.writeFieldBegin('proxyId', TType.STRING, 1) - oprot.writeString(self.proxyId.encode('utf-8') if sys.version_info[0] == 2 else self.proxyId) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(resumeProxy_args) -resumeProxy_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'proxyId', 'UTF8', None, ), # 1 -) - - -class resumeProxy_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.BOOL: - self.success = iprot.readBool() - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('resumeProxy_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.BOOL, 0) - oprot.writeBool(self.success) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(resumeProxy_result) -resumeProxy_result.thrift_spec = ( - (0, TType.BOOL, 'success', None, None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class removeProxy_args(object): - """ - Attributes: - - proxyId - - """ - - - def __init__(self, proxyId=None,): - self.proxyId = proxyId - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.proxyId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('removeProxy_args') - if self.proxyId is not None: - oprot.writeFieldBegin('proxyId', TType.STRING, 1) - oprot.writeString(self.proxyId.encode('utf-8') if sys.version_info[0] == 2 else self.proxyId) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(removeProxy_args) -removeProxy_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'proxyId', 'UTF8', None, ), # 1 -) - - -class removeProxy_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.BOOL: - self.success = iprot.readBool() - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('removeProxy_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.BOOL, 0) - oprot.writeBool(self.success) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(removeProxy_result) -removeProxy_result.thrift_spec = ( - (0, TType.BOOL, 'success', None, None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class listActiveProxies_args(object): - - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('listActiveProxies_args') - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(listActiveProxies_args) -listActiveProxies_args.thrift_spec = ( -) - - -class listActiveProxies_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.LIST: - self.success = [] - (_etype17, _size14) = iprot.readListBegin() - for _i18 in range(_size14): - _elem19 = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - self.success.append(_elem19) - iprot.readListEnd() - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('listActiveProxies_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.LIST, 0) - oprot.writeListBegin(TType.STRING, len(self.success)) - for iter20 in self.success: - oprot.writeString(iter20.encode('utf-8') if sys.version_info[0] == 2 else iter20) - oprot.writeListEnd() - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(listActiveProxies_result) -listActiveProxies_result.thrift_spec = ( - (0, TType.LIST, 'success', (TType.STRING, 'UTF8', False), None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) -fix_spec(all_structs) -del all_structs diff --git a/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/admin_ops/__init__.py b/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/admin_ops/__init__.py deleted file mode 100644 index 00b4776..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/admin_ops/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__all__ = ['ttypes', 'constants', 'YTAccountsOpService'] diff --git a/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/admin_ops/constants.py b/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/admin_ops/constants.py deleted file mode 100644 index 09a78b3..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/admin_ops/constants.py +++ /dev/null @@ -1,14 +0,0 @@ -# -# Autogenerated by Thrift Compiler (0.20.0) -# -# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING -# -# options string: py -# - -from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException -from thrift.protocol.TProtocol import TProtocolException -from thrift.TRecursive import fix_spec - -import sys -from .ttypes import * diff --git a/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/admin_ops/ttypes.py b/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/admin_ops/ttypes.py deleted file mode 100644 index de828aa..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/admin_ops/ttypes.py +++ /dev/null @@ -1,21 +0,0 @@ -# -# Autogenerated by Thrift Compiler (0.20.0) -# -# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING -# -# options string: py -# - -from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException -from thrift.protocol.TProtocol import TProtocolException -from thrift.TRecursive import fix_spec - -import sys -import pangramia.yt.common.ttypes -import pangramia.yt.exceptions.ttypes -import pangramia.base_service.ttypes - -from thrift.transport import TTransport -all_structs = [] -fix_spec(all_structs) -del all_structs diff --git a/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/common/__init__.py b/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/common/__init__.py deleted file mode 100644 index adefd8e..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/common/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__all__ = ['ttypes', 'constants'] diff --git a/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/common/constants.py b/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/common/constants.py deleted file mode 100644 index 09a78b3..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/common/constants.py +++ /dev/null @@ -1,14 +0,0 @@ -# -# Autogenerated by Thrift Compiler (0.20.0) -# -# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING -# -# options string: py -# - -from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException -from thrift.protocol.TProtocol import TProtocolException -from thrift.TRecursive import fix_spec - -import sys -from .ttypes import * diff --git a/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/common/ttypes.py b/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/common/ttypes.py deleted file mode 100644 index a23d813..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/common/ttypes.py +++ /dev/null @@ -1,905 +0,0 @@ -# -# Autogenerated by Thrift Compiler (0.20.0) -# -# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING -# -# options string: py -# - -from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException -from thrift.protocol.TProtocol import TProtocolException -from thrift.TRecursive import fix_spec - -import sys - -from thrift.transport import TTransport -all_structs = [] - - -class JobState(object): - SUCCESS = 0 - FAIL = 1 - BOT_FORBIDDEN_ON_URL_ACCESS = 2 - BOT_FORBIDDEN_ON_FILE_DOWNLOAD = 3 - BOT_CAPTCHA = 4 - BOT_AUTH_RELOGIN_REQUIRED = 5 - BOT_AUTH_SMS_REQUIRED = 6 - BOT_AUTH_DEVICE_QR_REQUIRED = 7 - BOT_ACCOUNT_BANNED = 8 - BOT_IP_BANNED = 9 - - _VALUES_TO_NAMES = { - 0: "SUCCESS", - 1: "FAIL", - 2: "BOT_FORBIDDEN_ON_URL_ACCESS", - 3: "BOT_FORBIDDEN_ON_FILE_DOWNLOAD", - 4: "BOT_CAPTCHA", - 5: "BOT_AUTH_RELOGIN_REQUIRED", - 6: "BOT_AUTH_SMS_REQUIRED", - 7: "BOT_AUTH_DEVICE_QR_REQUIRED", - 8: "BOT_ACCOUNT_BANNED", - 9: "BOT_IP_BANNED", - } - - _NAMES_TO_VALUES = { - "SUCCESS": 0, - "FAIL": 1, - "BOT_FORBIDDEN_ON_URL_ACCESS": 2, - "BOT_FORBIDDEN_ON_FILE_DOWNLOAD": 3, - "BOT_CAPTCHA": 4, - "BOT_AUTH_RELOGIN_REQUIRED": 5, - "BOT_AUTH_SMS_REQUIRED": 6, - "BOT_AUTH_DEVICE_QR_REQUIRED": 7, - "BOT_ACCOUNT_BANNED": 8, - "BOT_IP_BANNED": 9, - } - - -class TokenUpdateMode(object): - AUTOREFRESH_AND_REMAIN_ANONYMOUS = 0 - AUTOREFRESH_AND_ALLOW_AUTH = 1 - AUTOREFRESH_AND_ONLY_AUTH = 2 - CLEANUP_THEN_AUTOREFRESH_AND_ONLY_AUTH = 3 - CLEANUP_THEN_AUTOREFRESH_AND_REMAIN_ANONYMOUS = 4 - CLEANUP_THEN_AUTOREFRESH_AND_ALLOW_AUTH = 5 - AUTO = 6 - - _VALUES_TO_NAMES = { - 0: "AUTOREFRESH_AND_REMAIN_ANONYMOUS", - 1: "AUTOREFRESH_AND_ALLOW_AUTH", - 2: "AUTOREFRESH_AND_ONLY_AUTH", - 3: "CLEANUP_THEN_AUTOREFRESH_AND_ONLY_AUTH", - 4: "CLEANUP_THEN_AUTOREFRESH_AND_REMAIN_ANONYMOUS", - 5: "CLEANUP_THEN_AUTOREFRESH_AND_ALLOW_AUTH", - 6: "AUTO", - } - - _NAMES_TO_VALUES = { - "AUTOREFRESH_AND_REMAIN_ANONYMOUS": 0, - "AUTOREFRESH_AND_ALLOW_AUTH": 1, - "AUTOREFRESH_AND_ONLY_AUTH": 2, - "CLEANUP_THEN_AUTOREFRESH_AND_ONLY_AUTH": 3, - "CLEANUP_THEN_AUTOREFRESH_AND_REMAIN_ANONYMOUS": 4, - "CLEANUP_THEN_AUTOREFRESH_AND_ALLOW_AUTH": 5, - "AUTO": 6, - } - - -class AccountPairState(object): - ACTIVE = 0 - PAUSED = 1 - REMOVED = 2 - IN_PROGRESS = 3 - ALL = 4 - - _VALUES_TO_NAMES = { - 0: "ACTIVE", - 1: "PAUSED", - 2: "REMOVED", - 3: "IN_PROGRESS", - 4: "ALL", - } - - _NAMES_TO_VALUES = { - "ACTIVE": 0, - "PAUSED": 1, - "REMOVED": 2, - "IN_PROGRESS": 3, - "ALL": 4, - } - - -class JobTokenData(object): - """ - Attributes: - - infoJson - - ytdlpCommand - - socks - - jobId - - url - - cookiesBlob - - """ - - - def __init__(self, infoJson=None, ytdlpCommand=None, socks=None, jobId=None, url=None, cookiesBlob=None,): - self.infoJson = infoJson - self.ytdlpCommand = ytdlpCommand - self.socks = socks - self.jobId = jobId - self.url = url - self.cookiesBlob = cookiesBlob - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.infoJson = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRING: - self.ytdlpCommand = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 3: - if ftype == TType.STRING: - self.socks = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 4: - if ftype == TType.STRING: - self.jobId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 5: - if ftype == TType.STRING: - self.url = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 6: - if ftype == TType.STRING: - self.cookiesBlob = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('JobTokenData') - if self.infoJson is not None: - oprot.writeFieldBegin('infoJson', TType.STRING, 1) - oprot.writeString(self.infoJson.encode('utf-8') if sys.version_info[0] == 2 else self.infoJson) - oprot.writeFieldEnd() - if self.ytdlpCommand is not None: - oprot.writeFieldBegin('ytdlpCommand', TType.STRING, 2) - oprot.writeString(self.ytdlpCommand.encode('utf-8') if sys.version_info[0] == 2 else self.ytdlpCommand) - oprot.writeFieldEnd() - if self.socks is not None: - oprot.writeFieldBegin('socks', TType.STRING, 3) - oprot.writeString(self.socks.encode('utf-8') if sys.version_info[0] == 2 else self.socks) - oprot.writeFieldEnd() - if self.jobId is not None: - oprot.writeFieldBegin('jobId', TType.STRING, 4) - oprot.writeString(self.jobId.encode('utf-8') if sys.version_info[0] == 2 else self.jobId) - oprot.writeFieldEnd() - if self.url is not None: - oprot.writeFieldBegin('url', TType.STRING, 5) - oprot.writeString(self.url.encode('utf-8') if sys.version_info[0] == 2 else self.url) - oprot.writeFieldEnd() - if self.cookiesBlob is not None: - oprot.writeFieldBegin('cookiesBlob', TType.STRING, 6) - oprot.writeString(self.cookiesBlob.encode('utf-8') if sys.version_info[0] == 2 else self.cookiesBlob) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) - - -class AccountData(object): - """ - Attributes: - - username - - password - - countryCode - - """ - - - def __init__(self, username=None, password=None, countryCode=None,): - self.username = username - self.password = password - self.countryCode = countryCode - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.username = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRING: - self.password = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 3: - if ftype == TType.STRING: - self.countryCode = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('AccountData') - if self.username is not None: - oprot.writeFieldBegin('username', TType.STRING, 1) - oprot.writeString(self.username.encode('utf-8') if sys.version_info[0] == 2 else self.username) - oprot.writeFieldEnd() - if self.password is not None: - oprot.writeFieldBegin('password', TType.STRING, 2) - oprot.writeString(self.password.encode('utf-8') if sys.version_info[0] == 2 else self.password) - oprot.writeFieldEnd() - if self.countryCode is not None: - oprot.writeFieldBegin('countryCode', TType.STRING, 3) - oprot.writeString(self.countryCode.encode('utf-8') if sys.version_info[0] == 2 else self.countryCode) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - if self.username is None: - raise TProtocolException(message='Required field username is unset!') - if self.password is None: - raise TProtocolException(message='Required field password is unset!') - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) - - -class ProxyData(object): - """ - Attributes: - - proxyUrl - - countryCode - - """ - - - def __init__(self, proxyUrl=None, countryCode=None,): - self.proxyUrl = proxyUrl - self.countryCode = countryCode - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.proxyUrl = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRING: - self.countryCode = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('ProxyData') - if self.proxyUrl is not None: - oprot.writeFieldBegin('proxyUrl', TType.STRING, 1) - oprot.writeString(self.proxyUrl.encode('utf-8') if sys.version_info[0] == 2 else self.proxyUrl) - oprot.writeFieldEnd() - if self.countryCode is not None: - oprot.writeFieldBegin('countryCode', TType.STRING, 2) - oprot.writeString(self.countryCode.encode('utf-8') if sys.version_info[0] == 2 else self.countryCode) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - if self.proxyUrl is None: - raise TProtocolException(message='Required field proxyUrl is unset!') - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) - - -class AccountPairWithState(object): - """ - Attributes: - - accountId - - proxyId - - accountPairState - - machineId - - """ - - - def __init__(self, accountId=None, proxyId=None, accountPairState=None, machineId=None,): - self.accountId = accountId - self.proxyId = proxyId - self.accountPairState = accountPairState - self.machineId = machineId - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.accountId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRING: - self.proxyId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 3: - if ftype == TType.I32: - self.accountPairState = iprot.readI32() - else: - iprot.skip(ftype) - elif fid == 4: - if ftype == TType.STRING: - self.machineId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('AccountPairWithState') - if self.accountId is not None: - oprot.writeFieldBegin('accountId', TType.STRING, 1) - oprot.writeString(self.accountId.encode('utf-8') if sys.version_info[0] == 2 else self.accountId) - oprot.writeFieldEnd() - if self.proxyId is not None: - oprot.writeFieldBegin('proxyId', TType.STRING, 2) - oprot.writeString(self.proxyId.encode('utf-8') if sys.version_info[0] == 2 else self.proxyId) - oprot.writeFieldEnd() - if self.accountPairState is not None: - oprot.writeFieldBegin('accountPairState', TType.I32, 3) - oprot.writeI32(self.accountPairState) - oprot.writeFieldEnd() - if self.machineId is not None: - oprot.writeFieldBegin('machineId', TType.STRING, 4) - oprot.writeString(self.machineId.encode('utf-8') if sys.version_info[0] == 2 else self.machineId) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - if self.accountId is None: - raise TProtocolException(message='Required field accountId is unset!') - if self.proxyId is None: - raise TProtocolException(message='Required field proxyId is unset!') - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) - - -class JobData(object): - """ - Attributes: - - jobId - - url - - cookiesBlob - - potoken - - visitorId - - ytdlpCommand - - createdTime - - telemetry - - state - - errorMessage - - socks5Id - - """ - - - def __init__(self, jobId=None, url=None, cookiesBlob=None, potoken=None, visitorId=None, ytdlpCommand=None, createdTime=None, telemetry=None, state=None, errorMessage=None, socks5Id=None,): - self.jobId = jobId - self.url = url - self.cookiesBlob = cookiesBlob - self.potoken = potoken - self.visitorId = visitorId - self.ytdlpCommand = ytdlpCommand - self.createdTime = createdTime - self.telemetry = telemetry - self.state = state - self.errorMessage = errorMessage - self.socks5Id = socks5Id - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.jobId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRING: - self.url = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 3: - if ftype == TType.STRING: - self.cookiesBlob = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 4: - if ftype == TType.STRING: - self.potoken = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 5: - if ftype == TType.STRING: - self.visitorId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 6: - if ftype == TType.STRING: - self.ytdlpCommand = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 7: - if ftype == TType.STRING: - self.createdTime = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 8: - if ftype == TType.MAP: - self.telemetry = {} - (_ktype1, _vtype2, _size0) = iprot.readMapBegin() - for _i4 in range(_size0): - _key5 = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - _val6 = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - self.telemetry[_key5] = _val6 - iprot.readMapEnd() - else: - iprot.skip(ftype) - elif fid == 9: - if ftype == TType.I32: - self.state = iprot.readI32() - else: - iprot.skip(ftype) - elif fid == 10: - if ftype == TType.STRING: - self.errorMessage = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 11: - if ftype == TType.STRING: - self.socks5Id = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('JobData') - if self.jobId is not None: - oprot.writeFieldBegin('jobId', TType.STRING, 1) - oprot.writeString(self.jobId.encode('utf-8') if sys.version_info[0] == 2 else self.jobId) - oprot.writeFieldEnd() - if self.url is not None: - oprot.writeFieldBegin('url', TType.STRING, 2) - oprot.writeString(self.url.encode('utf-8') if sys.version_info[0] == 2 else self.url) - oprot.writeFieldEnd() - if self.cookiesBlob is not None: - oprot.writeFieldBegin('cookiesBlob', TType.STRING, 3) - oprot.writeString(self.cookiesBlob.encode('utf-8') if sys.version_info[0] == 2 else self.cookiesBlob) - oprot.writeFieldEnd() - if self.potoken is not None: - oprot.writeFieldBegin('potoken', TType.STRING, 4) - oprot.writeString(self.potoken.encode('utf-8') if sys.version_info[0] == 2 else self.potoken) - oprot.writeFieldEnd() - if self.visitorId is not None: - oprot.writeFieldBegin('visitorId', TType.STRING, 5) - oprot.writeString(self.visitorId.encode('utf-8') if sys.version_info[0] == 2 else self.visitorId) - oprot.writeFieldEnd() - if self.ytdlpCommand is not None: - oprot.writeFieldBegin('ytdlpCommand', TType.STRING, 6) - oprot.writeString(self.ytdlpCommand.encode('utf-8') if sys.version_info[0] == 2 else self.ytdlpCommand) - oprot.writeFieldEnd() - if self.createdTime is not None: - oprot.writeFieldBegin('createdTime', TType.STRING, 7) - oprot.writeString(self.createdTime.encode('utf-8') if sys.version_info[0] == 2 else self.createdTime) - oprot.writeFieldEnd() - if self.telemetry is not None: - oprot.writeFieldBegin('telemetry', TType.MAP, 8) - oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.telemetry)) - for kiter7, viter8 in self.telemetry.items(): - oprot.writeString(kiter7.encode('utf-8') if sys.version_info[0] == 2 else kiter7) - oprot.writeString(viter8.encode('utf-8') if sys.version_info[0] == 2 else viter8) - oprot.writeMapEnd() - oprot.writeFieldEnd() - if self.state is not None: - oprot.writeFieldBegin('state', TType.I32, 9) - oprot.writeI32(self.state) - oprot.writeFieldEnd() - if self.errorMessage is not None: - oprot.writeFieldBegin('errorMessage', TType.STRING, 10) - oprot.writeString(self.errorMessage.encode('utf-8') if sys.version_info[0] == 2 else self.errorMessage) - oprot.writeFieldEnd() - if self.socks5Id is not None: - oprot.writeFieldBegin('socks5Id', TType.STRING, 11) - oprot.writeString(self.socks5Id.encode('utf-8') if sys.version_info[0] == 2 else self.socks5Id) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - if self.jobId is None: - raise TProtocolException(message='Required field jobId is unset!') - if self.url is None: - raise TProtocolException(message='Required field url is unset!') - if self.cookiesBlob is None: - raise TProtocolException(message='Required field cookiesBlob is unset!') - if self.potoken is None: - raise TProtocolException(message='Required field potoken is unset!') - if self.visitorId is None: - raise TProtocolException(message='Required field visitorId is unset!') - if self.ytdlpCommand is None: - raise TProtocolException(message='Required field ytdlpCommand is unset!') - if self.createdTime is None: - raise TProtocolException(message='Required field createdTime is unset!') - if self.telemetry is None: - raise TProtocolException(message='Required field telemetry is unset!') - if self.state is None: - raise TProtocolException(message='Required field state is unset!') - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) - - -class RichCollectionPagination(object): - """ - Attributes: - - hasNext - - totalCount - - page - - pageSize - - """ - - - def __init__(self, hasNext=None, totalCount=None, page=None, pageSize=None,): - self.hasNext = hasNext - self.totalCount = totalCount - self.page = page - self.pageSize = pageSize - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.BOOL: - self.hasNext = iprot.readBool() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.I32: - self.totalCount = iprot.readI32() - else: - iprot.skip(ftype) - elif fid == 3: - if ftype == TType.I32: - self.page = iprot.readI32() - else: - iprot.skip(ftype) - elif fid == 4: - if ftype == TType.I32: - self.pageSize = iprot.readI32() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('RichCollectionPagination') - if self.hasNext is not None: - oprot.writeFieldBegin('hasNext', TType.BOOL, 1) - oprot.writeBool(self.hasNext) - oprot.writeFieldEnd() - if self.totalCount is not None: - oprot.writeFieldBegin('totalCount', TType.I32, 2) - oprot.writeI32(self.totalCount) - oprot.writeFieldEnd() - if self.page is not None: - oprot.writeFieldBegin('page', TType.I32, 3) - oprot.writeI32(self.page) - oprot.writeFieldEnd() - if self.pageSize is not None: - oprot.writeFieldBegin('pageSize', TType.I32, 4) - oprot.writeI32(self.pageSize) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - if self.hasNext is None: - raise TProtocolException(message='Required field hasNext is unset!') - if self.totalCount is None: - raise TProtocolException(message='Required field totalCount is unset!') - if self.page is None: - raise TProtocolException(message='Required field page is unset!') - if self.pageSize is None: - raise TProtocolException(message='Required field pageSize is unset!') - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) - - -class RichCollectionJobData(object): - """ - Attributes: - - items - - pagination - - """ - - - def __init__(self, items=None, pagination=None,): - self.items = items - self.pagination = pagination - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.LIST: - self.items = [] - (_etype12, _size9) = iprot.readListBegin() - for _i13 in range(_size9): - _elem14 = JobData() - _elem14.read(iprot) - self.items.append(_elem14) - iprot.readListEnd() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.pagination = RichCollectionPagination() - self.pagination.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('RichCollectionJobData') - if self.items is not None: - oprot.writeFieldBegin('items', TType.LIST, 1) - oprot.writeListBegin(TType.STRUCT, len(self.items)) - for iter15 in self.items: - iter15.write(oprot) - oprot.writeListEnd() - oprot.writeFieldEnd() - if self.pagination is not None: - oprot.writeFieldBegin('pagination', TType.STRUCT, 2) - self.pagination.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - if self.items is None: - raise TProtocolException(message='Required field items is unset!') - if self.pagination is None: - raise TProtocolException(message='Required field pagination is unset!') - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(JobTokenData) -JobTokenData.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'infoJson', 'UTF8', None, ), # 1 - (2, TType.STRING, 'ytdlpCommand', 'UTF8', None, ), # 2 - (3, TType.STRING, 'socks', 'UTF8', None, ), # 3 - (4, TType.STRING, 'jobId', 'UTF8', None, ), # 4 - (5, TType.STRING, 'url', 'UTF8', None, ), # 5 - (6, TType.STRING, 'cookiesBlob', 'UTF8', None, ), # 6 -) -all_structs.append(AccountData) -AccountData.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'username', 'UTF8', None, ), # 1 - (2, TType.STRING, 'password', 'UTF8', None, ), # 2 - (3, TType.STRING, 'countryCode', 'UTF8', None, ), # 3 -) -all_structs.append(ProxyData) -ProxyData.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'proxyUrl', 'UTF8', None, ), # 1 - (2, TType.STRING, 'countryCode', 'UTF8', None, ), # 2 -) -all_structs.append(AccountPairWithState) -AccountPairWithState.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'accountId', 'UTF8', None, ), # 1 - (2, TType.STRING, 'proxyId', 'UTF8', None, ), # 2 - (3, TType.I32, 'accountPairState', None, None, ), # 3 - (4, TType.STRING, 'machineId', 'UTF8', None, ), # 4 -) -all_structs.append(JobData) -JobData.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'jobId', 'UTF8', None, ), # 1 - (2, TType.STRING, 'url', 'UTF8', None, ), # 2 - (3, TType.STRING, 'cookiesBlob', 'UTF8', None, ), # 3 - (4, TType.STRING, 'potoken', 'UTF8', None, ), # 4 - (5, TType.STRING, 'visitorId', 'UTF8', None, ), # 5 - (6, TType.STRING, 'ytdlpCommand', 'UTF8', None, ), # 6 - (7, TType.STRING, 'createdTime', 'UTF8', None, ), # 7 - (8, TType.MAP, 'telemetry', (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), None, ), # 8 - (9, TType.I32, 'state', None, None, ), # 9 - (10, TType.STRING, 'errorMessage', 'UTF8', None, ), # 10 - (11, TType.STRING, 'socks5Id', 'UTF8', None, ), # 11 -) -all_structs.append(RichCollectionPagination) -RichCollectionPagination.thrift_spec = ( - None, # 0 - (1, TType.BOOL, 'hasNext', None, None, ), # 1 - (2, TType.I32, 'totalCount', None, None, ), # 2 - (3, TType.I32, 'page', None, None, ), # 3 - (4, TType.I32, 'pageSize', None, None, ), # 4 -) -all_structs.append(RichCollectionJobData) -RichCollectionJobData.thrift_spec = ( - None, # 0 - (1, TType.LIST, 'items', (TType.STRUCT, [JobData, None], False), None, ), # 1 - (2, TType.STRUCT, 'pagination', [RichCollectionPagination, None], None, ), # 2 -) -fix_spec(all_structs) -del all_structs diff --git a/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/exceptions/__init__.py b/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/exceptions/__init__.py deleted file mode 100644 index adefd8e..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/exceptions/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__all__ = ['ttypes', 'constants'] diff --git a/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/exceptions/constants.py b/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/exceptions/constants.py deleted file mode 100644 index 09a78b3..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/exceptions/constants.py +++ /dev/null @@ -1,14 +0,0 @@ -# -# Autogenerated by Thrift Compiler (0.20.0) -# -# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING -# -# options string: py -# - -from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException -from thrift.protocol.TProtocol import TProtocolException -from thrift.TRecursive import fix_spec - -import sys -from .ttypes import * diff --git a/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/exceptions/ttypes.py b/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/exceptions/ttypes.py deleted file mode 100644 index e930913..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/exceptions/ttypes.py +++ /dev/null @@ -1,254 +0,0 @@ -# -# Autogenerated by Thrift Compiler (0.20.0) -# -# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING -# -# options string: py -# - -from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException -from thrift.protocol.TProtocol import TProtocolException -from thrift.TRecursive import fix_spec - -import sys - -from thrift.transport import TTransport -all_structs = [] - - -class PBServiceException(TException): - """ - Attributes: - - message - - errorCode - - context - - """ - - - def __init__(self, message=None, errorCode=None, context=None,): - super(PBServiceException, self).__setattr__('message', message) - super(PBServiceException, self).__setattr__('errorCode', errorCode) - super(PBServiceException, self).__setattr__('context', context) - - def __setattr__(self, *args): - raise TypeError("can't modify immutable instance") - - def __delattr__(self, *args): - raise TypeError("can't modify immutable instance") - - def __hash__(self): - return hash(self.__class__) ^ hash((self.message, self.errorCode, self.context, )) - - @classmethod - def read(cls, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and cls.thrift_spec is not None: - return iprot._fast_decode(None, iprot, [cls, cls.thrift_spec]) - iprot.readStructBegin() - message = None - errorCode = None - context = None - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - message = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRING: - errorCode = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 3: - if ftype == TType.MAP: - context = {} - (_ktype1, _vtype2, _size0) = iprot.readMapBegin() - for _i4 in range(_size0): - _key5 = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - _val6 = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - context[_key5] = _val6 - iprot.readMapEnd() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - return cls( - message=message, - errorCode=errorCode, - context=context, - ) - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('PBServiceException') - if self.message is not None: - oprot.writeFieldBegin('message', TType.STRING, 1) - oprot.writeString(self.message.encode('utf-8') if sys.version_info[0] == 2 else self.message) - oprot.writeFieldEnd() - if self.errorCode is not None: - oprot.writeFieldBegin('errorCode', TType.STRING, 2) - oprot.writeString(self.errorCode.encode('utf-8') if sys.version_info[0] == 2 else self.errorCode) - oprot.writeFieldEnd() - if self.context is not None: - oprot.writeFieldBegin('context', TType.MAP, 3) - oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.context)) - for kiter7, viter8 in self.context.items(): - oprot.writeString(kiter7.encode('utf-8') if sys.version_info[0] == 2 else kiter7) - oprot.writeString(viter8.encode('utf-8') if sys.version_info[0] == 2 else viter8) - oprot.writeMapEnd() - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - if self.message is None: - raise TProtocolException(message='Required field message is unset!') - return - - def __str__(self): - return repr(self) - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) - - -class PBUserException(TException): - """ - Attributes: - - message - - errorCode - - context - - """ - - - def __init__(self, message=None, errorCode=None, context=None,): - super(PBUserException, self).__setattr__('message', message) - super(PBUserException, self).__setattr__('errorCode', errorCode) - super(PBUserException, self).__setattr__('context', context) - - def __setattr__(self, *args): - raise TypeError("can't modify immutable instance") - - def __delattr__(self, *args): - raise TypeError("can't modify immutable instance") - - def __hash__(self): - return hash(self.__class__) ^ hash((self.message, self.errorCode, self.context, )) - - @classmethod - def read(cls, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and cls.thrift_spec is not None: - return iprot._fast_decode(None, iprot, [cls, cls.thrift_spec]) - iprot.readStructBegin() - message = None - errorCode = None - context = None - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - message = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRING: - errorCode = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 3: - if ftype == TType.MAP: - context = {} - (_ktype10, _vtype11, _size9) = iprot.readMapBegin() - for _i13 in range(_size9): - _key14 = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - _val15 = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - context[_key14] = _val15 - iprot.readMapEnd() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - return cls( - message=message, - errorCode=errorCode, - context=context, - ) - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('PBUserException') - if self.message is not None: - oprot.writeFieldBegin('message', TType.STRING, 1) - oprot.writeString(self.message.encode('utf-8') if sys.version_info[0] == 2 else self.message) - oprot.writeFieldEnd() - if self.errorCode is not None: - oprot.writeFieldBegin('errorCode', TType.STRING, 2) - oprot.writeString(self.errorCode.encode('utf-8') if sys.version_info[0] == 2 else self.errorCode) - oprot.writeFieldEnd() - if self.context is not None: - oprot.writeFieldBegin('context', TType.MAP, 3) - oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.context)) - for kiter16, viter17 in self.context.items(): - oprot.writeString(kiter16.encode('utf-8') if sys.version_info[0] == 2 else kiter16) - oprot.writeString(viter17.encode('utf-8') if sys.version_info[0] == 2 else viter17) - oprot.writeMapEnd() - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - if self.message is None: - raise TProtocolException(message='Required field message is unset!') - return - - def __str__(self): - return repr(self) - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(PBServiceException) -PBServiceException.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'message', 'UTF8', None, ), # 1 - (2, TType.STRING, 'errorCode', 'UTF8', None, ), # 2 - (3, TType.MAP, 'context', (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), None, ), # 3 -) -all_structs.append(PBUserException) -PBUserException.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'message', 'UTF8', None, ), # 1 - (2, TType.STRING, 'errorCode', 'UTF8', None, ), # 2 - (3, TType.MAP, 'context', (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), None, ), # 3 -) -fix_spec(all_structs) -del all_structs diff --git a/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/tokens_ops/YTTokenOpService.py b/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/tokens_ops/YTTokenOpService.py deleted file mode 100644 index 8589aee..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/tokens_ops/YTTokenOpService.py +++ /dev/null @@ -1,1360 +0,0 @@ -# -# Autogenerated by Thrift Compiler (0.20.0) -# -# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING -# -# options string: py -# - -from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException -from thrift.protocol.TProtocol import TProtocolException -from thrift.TRecursive import fix_spec - -import sys -import pangramia.base_service.BaseService -import logging -from .ttypes import * -from thrift.Thrift import TProcessor -from thrift.transport import TTransport -all_structs = [] - - -class Iface(pangramia.base_service.BaseService.Iface): - def getOrRefreshTokenWithReport(self, accountId, oldUrl, status, details, jobId, updateType, url): - """ - Parameters: - - accountId - - oldUrl - - status - - details - - jobId - - updateType - - url - - """ - pass - - def getOrRefreshToken(self, accountId, updateType, url): - """ - Parameters: - - accountId - - updateType - - url - - """ - pass - - def getLatestToken(self, accountId): - """ - Parameters: - - accountId - - """ - pass - - def refreshToken(self, accountId, updateType, url): - """ - Parameters: - - accountId - - updateType - - url - - """ - pass - - def reportState(self, url, status, details, jobId): - """ - Parameters: - - url - - status - - details - - jobId - - """ - pass - - -class Client(pangramia.base_service.BaseService.Client, Iface): - def __init__(self, iprot, oprot=None): - pangramia.base_service.BaseService.Client.__init__(self, iprot, oprot) - - def getOrRefreshTokenWithReport(self, accountId, oldUrl, status, details, jobId, updateType, url): - """ - Parameters: - - accountId - - oldUrl - - status - - details - - jobId - - updateType - - url - - """ - self.send_getOrRefreshTokenWithReport(accountId, oldUrl, status, details, jobId, updateType, url) - return self.recv_getOrRefreshTokenWithReport() - - def send_getOrRefreshTokenWithReport(self, accountId, oldUrl, status, details, jobId, updateType, url): - self._oprot.writeMessageBegin('getOrRefreshTokenWithReport', TMessageType.CALL, self._seqid) - args = getOrRefreshTokenWithReport_args() - args.accountId = accountId - args.oldUrl = oldUrl - args.status = status - args.details = details - args.jobId = jobId - args.updateType = updateType - args.url = url - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_getOrRefreshTokenWithReport(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = getOrRefreshTokenWithReport_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "getOrRefreshTokenWithReport failed: unknown result") - - def getOrRefreshToken(self, accountId, updateType, url): - """ - Parameters: - - accountId - - updateType - - url - - """ - self.send_getOrRefreshToken(accountId, updateType, url) - return self.recv_getOrRefreshToken() - - def send_getOrRefreshToken(self, accountId, updateType, url): - self._oprot.writeMessageBegin('getOrRefreshToken', TMessageType.CALL, self._seqid) - args = getOrRefreshToken_args() - args.accountId = accountId - args.updateType = updateType - args.url = url - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_getOrRefreshToken(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = getOrRefreshToken_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "getOrRefreshToken failed: unknown result") - - def getLatestToken(self, accountId): - """ - Parameters: - - accountId - - """ - self.send_getLatestToken(accountId) - return self.recv_getLatestToken() - - def send_getLatestToken(self, accountId): - self._oprot.writeMessageBegin('getLatestToken', TMessageType.CALL, self._seqid) - args = getLatestToken_args() - args.accountId = accountId - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_getLatestToken(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = getLatestToken_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "getLatestToken failed: unknown result") - - def refreshToken(self, accountId, updateType, url): - """ - Parameters: - - accountId - - updateType - - url - - """ - self.send_refreshToken(accountId, updateType, url) - return self.recv_refreshToken() - - def send_refreshToken(self, accountId, updateType, url): - self._oprot.writeMessageBegin('refreshToken', TMessageType.CALL, self._seqid) - args = refreshToken_args() - args.accountId = accountId - args.updateType = updateType - args.url = url - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_refreshToken(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = refreshToken_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "refreshToken failed: unknown result") - - def reportState(self, url, status, details, jobId): - """ - Parameters: - - url - - status - - details - - jobId - - """ - self.send_reportState(url, status, details, jobId) - return self.recv_reportState() - - def send_reportState(self, url, status, details, jobId): - self._oprot.writeMessageBegin('reportState', TMessageType.CALL, self._seqid) - args = reportState_args() - args.url = url - args.status = status - args.details = details - args.jobId = jobId - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_reportState(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = reportState_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "reportState failed: unknown result") - - -class Processor(pangramia.base_service.BaseService.Processor, Iface, TProcessor): - def __init__(self, handler): - pangramia.base_service.BaseService.Processor.__init__(self, handler) - self._processMap["getOrRefreshTokenWithReport"] = Processor.process_getOrRefreshTokenWithReport - self._processMap["getOrRefreshToken"] = Processor.process_getOrRefreshToken - self._processMap["getLatestToken"] = Processor.process_getLatestToken - self._processMap["refreshToken"] = Processor.process_refreshToken - self._processMap["reportState"] = Processor.process_reportState - self._on_message_begin = None - - def on_message_begin(self, func): - self._on_message_begin = func - - def process(self, iprot, oprot): - (name, type, seqid) = iprot.readMessageBegin() - if self._on_message_begin: - self._on_message_begin(name, type, seqid) - if name not in self._processMap: - iprot.skip(TType.STRUCT) - iprot.readMessageEnd() - x = TApplicationException(TApplicationException.UNKNOWN_METHOD, 'Unknown function %s' % (name)) - oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid) - x.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - return - else: - self._processMap[name](self, seqid, iprot, oprot) - return True - - def process_getOrRefreshTokenWithReport(self, seqid, iprot, oprot): - args = getOrRefreshTokenWithReport_args() - args.read(iprot) - iprot.readMessageEnd() - result = getOrRefreshTokenWithReport_result() - try: - result.success = self._handler.getOrRefreshTokenWithReport(args.accountId, args.oldUrl, args.status, args.details, args.jobId, args.updateType, args.url) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("getOrRefreshTokenWithReport", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_getOrRefreshToken(self, seqid, iprot, oprot): - args = getOrRefreshToken_args() - args.read(iprot) - iprot.readMessageEnd() - result = getOrRefreshToken_result() - try: - result.success = self._handler.getOrRefreshToken(args.accountId, args.updateType, args.url) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("getOrRefreshToken", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_getLatestToken(self, seqid, iprot, oprot): - args = getLatestToken_args() - args.read(iprot) - iprot.readMessageEnd() - result = getLatestToken_result() - try: - result.success = self._handler.getLatestToken(args.accountId) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("getLatestToken", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_refreshToken(self, seqid, iprot, oprot): - args = refreshToken_args() - args.read(iprot) - iprot.readMessageEnd() - result = refreshToken_result() - try: - result.success = self._handler.refreshToken(args.accountId, args.updateType, args.url) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("refreshToken", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_reportState(self, seqid, iprot, oprot): - args = reportState_args() - args.read(iprot) - iprot.readMessageEnd() - result = reportState_result() - try: - result.success = self._handler.reportState(args.url, args.status, args.details, args.jobId) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("reportState", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - -# HELPER FUNCTIONS AND STRUCTURES - - -class getOrRefreshTokenWithReport_args(object): - """ - Attributes: - - accountId - - oldUrl - - status - - details - - jobId - - updateType - - url - - """ - - - def __init__(self, accountId=None, oldUrl=None, status=None, details=None, jobId=None, updateType= 6, url=None,): - self.accountId = accountId - self.oldUrl = oldUrl - self.status = status - self.details = details - self.jobId = jobId - self.updateType = updateType - self.url = url - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.accountId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRING: - self.oldUrl = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 3: - if ftype == TType.I32: - self.status = iprot.readI32() - else: - iprot.skip(ftype) - elif fid == 4: - if ftype == TType.STRING: - self.details = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 5: - if ftype == TType.STRING: - self.jobId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 6: - if ftype == TType.I32: - self.updateType = iprot.readI32() - else: - iprot.skip(ftype) - elif fid == 7: - if ftype == TType.STRING: - self.url = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('getOrRefreshTokenWithReport_args') - if self.accountId is not None: - oprot.writeFieldBegin('accountId', TType.STRING, 1) - oprot.writeString(self.accountId.encode('utf-8') if sys.version_info[0] == 2 else self.accountId) - oprot.writeFieldEnd() - if self.oldUrl is not None: - oprot.writeFieldBegin('oldUrl', TType.STRING, 2) - oprot.writeString(self.oldUrl.encode('utf-8') if sys.version_info[0] == 2 else self.oldUrl) - oprot.writeFieldEnd() - if self.status is not None: - oprot.writeFieldBegin('status', TType.I32, 3) - oprot.writeI32(self.status) - oprot.writeFieldEnd() - if self.details is not None: - oprot.writeFieldBegin('details', TType.STRING, 4) - oprot.writeString(self.details.encode('utf-8') if sys.version_info[0] == 2 else self.details) - oprot.writeFieldEnd() - if self.jobId is not None: - oprot.writeFieldBegin('jobId', TType.STRING, 5) - oprot.writeString(self.jobId.encode('utf-8') if sys.version_info[0] == 2 else self.jobId) - oprot.writeFieldEnd() - if self.updateType is not None: - oprot.writeFieldBegin('updateType', TType.I32, 6) - oprot.writeI32(self.updateType) - oprot.writeFieldEnd() - if self.url is not None: - oprot.writeFieldBegin('url', TType.STRING, 7) - oprot.writeString(self.url.encode('utf-8') if sys.version_info[0] == 2 else self.url) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(getOrRefreshTokenWithReport_args) -getOrRefreshTokenWithReport_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'accountId', 'UTF8', None, ), # 1 - (2, TType.STRING, 'oldUrl', 'UTF8', None, ), # 2 - (3, TType.I32, 'status', None, None, ), # 3 - (4, TType.STRING, 'details', 'UTF8', None, ), # 4 - (5, TType.STRING, 'jobId', 'UTF8', None, ), # 5 - (6, TType.I32, 'updateType', None, 6, ), # 6 - (7, TType.STRING, 'url', 'UTF8', None, ), # 7 -) - - -class getOrRefreshTokenWithReport_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.STRUCT: - self.success = pangramia.yt.common.ttypes.JobTokenData() - self.success.read(iprot) - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('getOrRefreshTokenWithReport_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.STRUCT, 0) - self.success.write(oprot) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(getOrRefreshTokenWithReport_result) -getOrRefreshTokenWithReport_result.thrift_spec = ( - (0, TType.STRUCT, 'success', [pangramia.yt.common.ttypes.JobTokenData, None], None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class getOrRefreshToken_args(object): - """ - Attributes: - - accountId - - updateType - - url - - """ - - - def __init__(self, accountId=None, updateType= 6, url=None,): - self.accountId = accountId - self.updateType = updateType - self.url = url - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.accountId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.I32: - self.updateType = iprot.readI32() - else: - iprot.skip(ftype) - elif fid == 3: - if ftype == TType.STRING: - self.url = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('getOrRefreshToken_args') - if self.accountId is not None: - oprot.writeFieldBegin('accountId', TType.STRING, 1) - oprot.writeString(self.accountId.encode('utf-8') if sys.version_info[0] == 2 else self.accountId) - oprot.writeFieldEnd() - if self.updateType is not None: - oprot.writeFieldBegin('updateType', TType.I32, 2) - oprot.writeI32(self.updateType) - oprot.writeFieldEnd() - if self.url is not None: - oprot.writeFieldBegin('url', TType.STRING, 3) - oprot.writeString(self.url.encode('utf-8') if sys.version_info[0] == 2 else self.url) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(getOrRefreshToken_args) -getOrRefreshToken_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'accountId', 'UTF8', None, ), # 1 - (2, TType.I32, 'updateType', None, 6, ), # 2 - (3, TType.STRING, 'url', 'UTF8', None, ), # 3 -) - - -class getOrRefreshToken_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.STRUCT: - self.success = pangramia.yt.common.ttypes.JobTokenData() - self.success.read(iprot) - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('getOrRefreshToken_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.STRUCT, 0) - self.success.write(oprot) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(getOrRefreshToken_result) -getOrRefreshToken_result.thrift_spec = ( - (0, TType.STRUCT, 'success', [pangramia.yt.common.ttypes.JobTokenData, None], None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class getLatestToken_args(object): - """ - Attributes: - - accountId - - """ - - - def __init__(self, accountId=None,): - self.accountId = accountId - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.accountId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('getLatestToken_args') - if self.accountId is not None: - oprot.writeFieldBegin('accountId', TType.STRING, 1) - oprot.writeString(self.accountId.encode('utf-8') if sys.version_info[0] == 2 else self.accountId) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(getLatestToken_args) -getLatestToken_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'accountId', 'UTF8', None, ), # 1 -) - - -class getLatestToken_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.STRUCT: - self.success = pangramia.yt.common.ttypes.JobTokenData() - self.success.read(iprot) - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('getLatestToken_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.STRUCT, 0) - self.success.write(oprot) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(getLatestToken_result) -getLatestToken_result.thrift_spec = ( - (0, TType.STRUCT, 'success', [pangramia.yt.common.ttypes.JobTokenData, None], None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class refreshToken_args(object): - """ - Attributes: - - accountId - - updateType - - url - - """ - - - def __init__(self, accountId=None, updateType= 6, url=None,): - self.accountId = accountId - self.updateType = updateType - self.url = url - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.accountId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.I32: - self.updateType = iprot.readI32() - else: - iprot.skip(ftype) - elif fid == 3: - if ftype == TType.STRING: - self.url = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('refreshToken_args') - if self.accountId is not None: - oprot.writeFieldBegin('accountId', TType.STRING, 1) - oprot.writeString(self.accountId.encode('utf-8') if sys.version_info[0] == 2 else self.accountId) - oprot.writeFieldEnd() - if self.updateType is not None: - oprot.writeFieldBegin('updateType', TType.I32, 2) - oprot.writeI32(self.updateType) - oprot.writeFieldEnd() - if self.url is not None: - oprot.writeFieldBegin('url', TType.STRING, 3) - oprot.writeString(self.url.encode('utf-8') if sys.version_info[0] == 2 else self.url) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(refreshToken_args) -refreshToken_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'accountId', 'UTF8', None, ), # 1 - (2, TType.I32, 'updateType', None, 6, ), # 2 - (3, TType.STRING, 'url', 'UTF8', None, ), # 3 -) - - -class refreshToken_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.STRUCT: - self.success = pangramia.yt.common.ttypes.JobTokenData() - self.success.read(iprot) - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('refreshToken_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.STRUCT, 0) - self.success.write(oprot) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(refreshToken_result) -refreshToken_result.thrift_spec = ( - (0, TType.STRUCT, 'success', [pangramia.yt.common.ttypes.JobTokenData, None], None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class reportState_args(object): - """ - Attributes: - - url - - status - - details - - jobId - - """ - - - def __init__(self, url=None, status=None, details=None, jobId=None,): - self.url = url - self.status = status - self.details = details - self.jobId = jobId - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.url = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.I32: - self.status = iprot.readI32() - else: - iprot.skip(ftype) - elif fid == 3: - if ftype == TType.STRING: - self.details = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 4: - if ftype == TType.STRING: - self.jobId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('reportState_args') - if self.url is not None: - oprot.writeFieldBegin('url', TType.STRING, 1) - oprot.writeString(self.url.encode('utf-8') if sys.version_info[0] == 2 else self.url) - oprot.writeFieldEnd() - if self.status is not None: - oprot.writeFieldBegin('status', TType.I32, 2) - oprot.writeI32(self.status) - oprot.writeFieldEnd() - if self.details is not None: - oprot.writeFieldBegin('details', TType.STRING, 3) - oprot.writeString(self.details.encode('utf-8') if sys.version_info[0] == 2 else self.details) - oprot.writeFieldEnd() - if self.jobId is not None: - oprot.writeFieldBegin('jobId', TType.STRING, 4) - oprot.writeString(self.jobId.encode('utf-8') if sys.version_info[0] == 2 else self.jobId) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(reportState_args) -reportState_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'url', 'UTF8', None, ), # 1 - (2, TType.I32, 'status', None, None, ), # 2 - (3, TType.STRING, 'details', 'UTF8', None, ), # 3 - (4, TType.STRING, 'jobId', 'UTF8', None, ), # 4 -) - - -class reportState_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.BOOL: - self.success = iprot.readBool() - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('reportState_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.BOOL, 0) - oprot.writeBool(self.success) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(reportState_result) -reportState_result.thrift_spec = ( - (0, TType.BOOL, 'success', None, None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) -fix_spec(all_structs) -del all_structs diff --git a/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/tokens_ops/__init__.py b/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/tokens_ops/__init__.py deleted file mode 100644 index e97f47d..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/tokens_ops/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__all__ = ['ttypes', 'constants', 'YTTokenOpService'] diff --git a/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/tokens_ops/constants.py b/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/tokens_ops/constants.py deleted file mode 100644 index 09a78b3..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/tokens_ops/constants.py +++ /dev/null @@ -1,14 +0,0 @@ -# -# Autogenerated by Thrift Compiler (0.20.0) -# -# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING -# -# options string: py -# - -from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException -from thrift.protocol.TProtocol import TProtocolException -from thrift.TRecursive import fix_spec - -import sys -from .ttypes import * diff --git a/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/tokens_ops/ttypes.py b/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/tokens_ops/ttypes.py deleted file mode 100644 index de828aa..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/tokens_ops/ttypes.py +++ /dev/null @@ -1,21 +0,0 @@ -# -# Autogenerated by Thrift Compiler (0.20.0) -# -# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING -# -# options string: py -# - -from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException -from thrift.protocol.TProtocol import TProtocolException -from thrift.TRecursive import fix_spec - -import sys -import pangramia.yt.common.ttypes -import pangramia.yt.exceptions.ttypes -import pangramia.base_service.ttypes - -from thrift.transport import TTransport -all_structs = [] -fix_spec(all_structs) -del all_structs diff --git a/airflow/ytdlp-ops-auth/build/lib/thrift_model/__init__.py b/airflow/ytdlp-ops-auth/build/lib/thrift_model/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/__init__.py b/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/__init__.py b/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/base_service/BaseService.py b/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/base_service/BaseService.py deleted file mode 100644 index b6cf1f4..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/base_service/BaseService.py +++ /dev/null @@ -1,564 +0,0 @@ -# -# Autogenerated by Thrift Compiler (0.20.0) -# -# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING -# -# options string: py -# - -from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException -from thrift.protocol.TProtocol import TProtocolException -from thrift.TRecursive import fix_spec - -import sys -import logging -from .ttypes import * -from thrift.Thrift import TProcessor -from thrift.transport import TTransport -all_structs = [] - - -class Iface(object): - def ping(self): - pass - - def reportError(self, message, details): - """ - Parameters: - - message - - details - - """ - pass - - def shutdown(self): - pass - - -class Client(Iface): - def __init__(self, iprot, oprot=None): - self._iprot = self._oprot = iprot - if oprot is not None: - self._oprot = oprot - self._seqid = 0 - - def ping(self): - self.send_ping() - return self.recv_ping() - - def send_ping(self): - self._oprot.writeMessageBegin('ping', TMessageType.CALL, self._seqid) - args = ping_args() - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_ping(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = ping_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "ping failed: unknown result") - - def reportError(self, message, details): - """ - Parameters: - - message - - details - - """ - self.send_reportError(message, details) - return self.recv_reportError() - - def send_reportError(self, message, details): - self._oprot.writeMessageBegin('reportError', TMessageType.CALL, self._seqid) - args = reportError_args() - args.message = message - args.details = details - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_reportError(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = reportError_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "reportError failed: unknown result") - - def shutdown(self): - self.send_shutdown() - - def send_shutdown(self): - self._oprot.writeMessageBegin('shutdown', TMessageType.ONEWAY, self._seqid) - args = shutdown_args() - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - -class Processor(Iface, TProcessor): - def __init__(self, handler): - self._handler = handler - self._processMap = {} - self._processMap["ping"] = Processor.process_ping - self._processMap["reportError"] = Processor.process_reportError - self._processMap["shutdown"] = Processor.process_shutdown - self._on_message_begin = None - - def on_message_begin(self, func): - self._on_message_begin = func - - def process(self, iprot, oprot): - (name, type, seqid) = iprot.readMessageBegin() - if self._on_message_begin: - self._on_message_begin(name, type, seqid) - if name not in self._processMap: - iprot.skip(TType.STRUCT) - iprot.readMessageEnd() - x = TApplicationException(TApplicationException.UNKNOWN_METHOD, 'Unknown function %s' % (name)) - oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid) - x.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - return - else: - self._processMap[name](self, seqid, iprot, oprot) - return True - - def process_ping(self, seqid, iprot, oprot): - args = ping_args() - args.read(iprot) - iprot.readMessageEnd() - result = ping_result() - try: - result.success = self._handler.ping() - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("ping", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_reportError(self, seqid, iprot, oprot): - args = reportError_args() - args.read(iprot) - iprot.readMessageEnd() - result = reportError_result() - try: - result.success = self._handler.reportError(args.message, args.details) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("reportError", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_shutdown(self, seqid, iprot, oprot): - args = shutdown_args() - args.read(iprot) - iprot.readMessageEnd() - try: - self._handler.shutdown() - except TTransport.TTransportException: - raise - except Exception: - logging.exception('Exception in oneway handler') - -# HELPER FUNCTIONS AND STRUCTURES - - -class ping_args(object): - - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('ping_args') - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(ping_args) -ping_args.thrift_spec = ( -) - - -class ping_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.BOOL: - self.success = iprot.readBool() - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('ping_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.BOOL, 0) - oprot.writeBool(self.success) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(ping_result) -ping_result.thrift_spec = ( - (0, TType.BOOL, 'success', None, None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class reportError_args(object): - """ - Attributes: - - message - - details - - """ - - - def __init__(self, message=None, details=None,): - self.message = message - self.details = details - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.message = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.MAP: - self.details = {} - (_ktype1, _vtype2, _size0) = iprot.readMapBegin() - for _i4 in range(_size0): - _key5 = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - _val6 = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - self.details[_key5] = _val6 - iprot.readMapEnd() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('reportError_args') - if self.message is not None: - oprot.writeFieldBegin('message', TType.STRING, 1) - oprot.writeString(self.message.encode('utf-8') if sys.version_info[0] == 2 else self.message) - oprot.writeFieldEnd() - if self.details is not None: - oprot.writeFieldBegin('details', TType.MAP, 2) - oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.details)) - for kiter7, viter8 in self.details.items(): - oprot.writeString(kiter7.encode('utf-8') if sys.version_info[0] == 2 else kiter7) - oprot.writeString(viter8.encode('utf-8') if sys.version_info[0] == 2 else viter8) - oprot.writeMapEnd() - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(reportError_args) -reportError_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'message', 'UTF8', None, ), # 1 - (2, TType.MAP, 'details', (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), None, ), # 2 -) - - -class reportError_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.BOOL: - self.success = iprot.readBool() - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('reportError_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.BOOL, 0) - oprot.writeBool(self.success) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(reportError_result) -reportError_result.thrift_spec = ( - (0, TType.BOOL, 'success', None, None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class shutdown_args(object): - - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('shutdown_args') - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(shutdown_args) -shutdown_args.thrift_spec = ( -) -fix_spec(all_structs) -del all_structs diff --git a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/base_service/__init__.py b/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/base_service/__init__.py deleted file mode 100644 index f8be3f5..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/base_service/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__all__ = ['ttypes', 'constants', 'BaseService'] diff --git a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/base_service/constants.py b/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/base_service/constants.py deleted file mode 100644 index 09a78b3..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/base_service/constants.py +++ /dev/null @@ -1,14 +0,0 @@ -# -# Autogenerated by Thrift Compiler (0.20.0) -# -# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING -# -# options string: py -# - -from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException -from thrift.protocol.TProtocol import TProtocolException -from thrift.TRecursive import fix_spec - -import sys -from .ttypes import * diff --git a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/base_service/ttypes.py b/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/base_service/ttypes.py deleted file mode 100644 index 3bfb47f..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/base_service/ttypes.py +++ /dev/null @@ -1,20 +0,0 @@ -# -# Autogenerated by Thrift Compiler (0.20.0) -# -# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING -# -# options string: py -# - -from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException -from thrift.protocol.TProtocol import TProtocolException -from thrift.TRecursive import fix_spec - -import sys -import pangramia.yt.common.ttypes -import pangramia.yt.exceptions.ttypes - -from thrift.transport import TTransport -all_structs = [] -fix_spec(all_structs) -del all_structs diff --git a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/__init__.py b/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/admin_ops/YTAccountsOpService.py b/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/admin_ops/YTAccountsOpService.py deleted file mode 100644 index 609fd61..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/admin_ops/YTAccountsOpService.py +++ /dev/null @@ -1,3491 +0,0 @@ -# -# Autogenerated by Thrift Compiler (0.20.0) -# -# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING -# -# options string: py -# - -from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException -from thrift.protocol.TProtocol import TProtocolException -from thrift.TRecursive import fix_spec - -import sys -import pangramia.base_service.BaseService -import logging -from .ttypes import * -from thrift.Thrift import TProcessor -from thrift.transport import TTransport -all_structs = [] - - -class Iface(pangramia.base_service.BaseService.Iface): - def addAccountPair(self, accountId, proxyId, machineId, proxyData, accountData): - """ - Parameters: - - accountId - - proxyId - - machineId - - proxyData - - accountData - - """ - pass - - def getPair(self, machineId): - """ - Parameters: - - machineId - - """ - pass - - def pair(self, accountId, proxyId, machineId): - """ - Parameters: - - accountId - - proxyId - - machineId - - """ - pass - - def unpair(self, accountId, proxyId, machineId): - """ - Parameters: - - accountId - - proxyId - - machineId - - """ - pass - - def listAccountPairs(self, filter): - """ - Parameters: - - filter - - """ - pass - - def addAccount(self, accountId, accountData): - """ - Parameters: - - accountId - - accountData - - """ - pass - - def suspendAccount(self, accountId): - """ - Parameters: - - accountId - - """ - pass - - def resumeAccount(self, accountId): - """ - Parameters: - - accountId - - """ - pass - - def removeAccount(self, accountId): - """ - Parameters: - - accountId - - """ - pass - - def listActiveAccounts(self): - pass - - def addProxy(self, proxyId, proxyData): - """ - Parameters: - - proxyId - - proxyData - - """ - pass - - def suspendProxy(self, proxyId): - """ - Parameters: - - proxyId - - """ - pass - - def resumeProxy(self, proxyId): - """ - Parameters: - - proxyId - - """ - pass - - def removeProxy(self, proxyId): - """ - Parameters: - - proxyId - - """ - pass - - def listActiveProxies(self): - pass - - -class Client(pangramia.base_service.BaseService.Client, Iface): - def __init__(self, iprot, oprot=None): - pangramia.base_service.BaseService.Client.__init__(self, iprot, oprot) - - def addAccountPair(self, accountId, proxyId, machineId, proxyData, accountData): - """ - Parameters: - - accountId - - proxyId - - machineId - - proxyData - - accountData - - """ - self.send_addAccountPair(accountId, proxyId, machineId, proxyData, accountData) - return self.recv_addAccountPair() - - def send_addAccountPair(self, accountId, proxyId, machineId, proxyData, accountData): - self._oprot.writeMessageBegin('addAccountPair', TMessageType.CALL, self._seqid) - args = addAccountPair_args() - args.accountId = accountId - args.proxyId = proxyId - args.machineId = machineId - args.proxyData = proxyData - args.accountData = accountData - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_addAccountPair(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = addAccountPair_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "addAccountPair failed: unknown result") - - def getPair(self, machineId): - """ - Parameters: - - machineId - - """ - self.send_getPair(machineId) - return self.recv_getPair() - - def send_getPair(self, machineId): - self._oprot.writeMessageBegin('getPair', TMessageType.CALL, self._seqid) - args = getPair_args() - args.machineId = machineId - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_getPair(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = getPair_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "getPair failed: unknown result") - - def pair(self, accountId, proxyId, machineId): - """ - Parameters: - - accountId - - proxyId - - machineId - - """ - self.send_pair(accountId, proxyId, machineId) - return self.recv_pair() - - def send_pair(self, accountId, proxyId, machineId): - self._oprot.writeMessageBegin('pair', TMessageType.CALL, self._seqid) - args = pair_args() - args.accountId = accountId - args.proxyId = proxyId - args.machineId = machineId - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_pair(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = pair_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "pair failed: unknown result") - - def unpair(self, accountId, proxyId, machineId): - """ - Parameters: - - accountId - - proxyId - - machineId - - """ - self.send_unpair(accountId, proxyId, machineId) - return self.recv_unpair() - - def send_unpair(self, accountId, proxyId, machineId): - self._oprot.writeMessageBegin('unpair', TMessageType.CALL, self._seqid) - args = unpair_args() - args.accountId = accountId - args.proxyId = proxyId - args.machineId = machineId - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_unpair(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = unpair_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "unpair failed: unknown result") - - def listAccountPairs(self, filter): - """ - Parameters: - - filter - - """ - self.send_listAccountPairs(filter) - return self.recv_listAccountPairs() - - def send_listAccountPairs(self, filter): - self._oprot.writeMessageBegin('listAccountPairs', TMessageType.CALL, self._seqid) - args = listAccountPairs_args() - args.filter = filter - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_listAccountPairs(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = listAccountPairs_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "listAccountPairs failed: unknown result") - - def addAccount(self, accountId, accountData): - """ - Parameters: - - accountId - - accountData - - """ - self.send_addAccount(accountId, accountData) - return self.recv_addAccount() - - def send_addAccount(self, accountId, accountData): - self._oprot.writeMessageBegin('addAccount', TMessageType.CALL, self._seqid) - args = addAccount_args() - args.accountId = accountId - args.accountData = accountData - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_addAccount(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = addAccount_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "addAccount failed: unknown result") - - def suspendAccount(self, accountId): - """ - Parameters: - - accountId - - """ - self.send_suspendAccount(accountId) - return self.recv_suspendAccount() - - def send_suspendAccount(self, accountId): - self._oprot.writeMessageBegin('suspendAccount', TMessageType.CALL, self._seqid) - args = suspendAccount_args() - args.accountId = accountId - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_suspendAccount(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = suspendAccount_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "suspendAccount failed: unknown result") - - def resumeAccount(self, accountId): - """ - Parameters: - - accountId - - """ - self.send_resumeAccount(accountId) - return self.recv_resumeAccount() - - def send_resumeAccount(self, accountId): - self._oprot.writeMessageBegin('resumeAccount', TMessageType.CALL, self._seqid) - args = resumeAccount_args() - args.accountId = accountId - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_resumeAccount(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = resumeAccount_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "resumeAccount failed: unknown result") - - def removeAccount(self, accountId): - """ - Parameters: - - accountId - - """ - self.send_removeAccount(accountId) - return self.recv_removeAccount() - - def send_removeAccount(self, accountId): - self._oprot.writeMessageBegin('removeAccount', TMessageType.CALL, self._seqid) - args = removeAccount_args() - args.accountId = accountId - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_removeAccount(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = removeAccount_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "removeAccount failed: unknown result") - - def listActiveAccounts(self): - self.send_listActiveAccounts() - return self.recv_listActiveAccounts() - - def send_listActiveAccounts(self): - self._oprot.writeMessageBegin('listActiveAccounts', TMessageType.CALL, self._seqid) - args = listActiveAccounts_args() - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_listActiveAccounts(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = listActiveAccounts_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "listActiveAccounts failed: unknown result") - - def addProxy(self, proxyId, proxyData): - """ - Parameters: - - proxyId - - proxyData - - """ - self.send_addProxy(proxyId, proxyData) - return self.recv_addProxy() - - def send_addProxy(self, proxyId, proxyData): - self._oprot.writeMessageBegin('addProxy', TMessageType.CALL, self._seqid) - args = addProxy_args() - args.proxyId = proxyId - args.proxyData = proxyData - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_addProxy(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = addProxy_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "addProxy failed: unknown result") - - def suspendProxy(self, proxyId): - """ - Parameters: - - proxyId - - """ - self.send_suspendProxy(proxyId) - return self.recv_suspendProxy() - - def send_suspendProxy(self, proxyId): - self._oprot.writeMessageBegin('suspendProxy', TMessageType.CALL, self._seqid) - args = suspendProxy_args() - args.proxyId = proxyId - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_suspendProxy(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = suspendProxy_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "suspendProxy failed: unknown result") - - def resumeProxy(self, proxyId): - """ - Parameters: - - proxyId - - """ - self.send_resumeProxy(proxyId) - return self.recv_resumeProxy() - - def send_resumeProxy(self, proxyId): - self._oprot.writeMessageBegin('resumeProxy', TMessageType.CALL, self._seqid) - args = resumeProxy_args() - args.proxyId = proxyId - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_resumeProxy(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = resumeProxy_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "resumeProxy failed: unknown result") - - def removeProxy(self, proxyId): - """ - Parameters: - - proxyId - - """ - self.send_removeProxy(proxyId) - return self.recv_removeProxy() - - def send_removeProxy(self, proxyId): - self._oprot.writeMessageBegin('removeProxy', TMessageType.CALL, self._seqid) - args = removeProxy_args() - args.proxyId = proxyId - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_removeProxy(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = removeProxy_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "removeProxy failed: unknown result") - - def listActiveProxies(self): - self.send_listActiveProxies() - return self.recv_listActiveProxies() - - def send_listActiveProxies(self): - self._oprot.writeMessageBegin('listActiveProxies', TMessageType.CALL, self._seqid) - args = listActiveProxies_args() - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_listActiveProxies(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = listActiveProxies_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "listActiveProxies failed: unknown result") - - -class Processor(pangramia.base_service.BaseService.Processor, Iface, TProcessor): - def __init__(self, handler): - pangramia.base_service.BaseService.Processor.__init__(self, handler) - self._processMap["addAccountPair"] = Processor.process_addAccountPair - self._processMap["getPair"] = Processor.process_getPair - self._processMap["pair"] = Processor.process_pair - self._processMap["unpair"] = Processor.process_unpair - self._processMap["listAccountPairs"] = Processor.process_listAccountPairs - self._processMap["addAccount"] = Processor.process_addAccount - self._processMap["suspendAccount"] = Processor.process_suspendAccount - self._processMap["resumeAccount"] = Processor.process_resumeAccount - self._processMap["removeAccount"] = Processor.process_removeAccount - self._processMap["listActiveAccounts"] = Processor.process_listActiveAccounts - self._processMap["addProxy"] = Processor.process_addProxy - self._processMap["suspendProxy"] = Processor.process_suspendProxy - self._processMap["resumeProxy"] = Processor.process_resumeProxy - self._processMap["removeProxy"] = Processor.process_removeProxy - self._processMap["listActiveProxies"] = Processor.process_listActiveProxies - self._on_message_begin = None - - def on_message_begin(self, func): - self._on_message_begin = func - - def process(self, iprot, oprot): - (name, type, seqid) = iprot.readMessageBegin() - if self._on_message_begin: - self._on_message_begin(name, type, seqid) - if name not in self._processMap: - iprot.skip(TType.STRUCT) - iprot.readMessageEnd() - x = TApplicationException(TApplicationException.UNKNOWN_METHOD, 'Unknown function %s' % (name)) - oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid) - x.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - return - else: - self._processMap[name](self, seqid, iprot, oprot) - return True - - def process_addAccountPair(self, seqid, iprot, oprot): - args = addAccountPair_args() - args.read(iprot) - iprot.readMessageEnd() - result = addAccountPair_result() - try: - result.success = self._handler.addAccountPair(args.accountId, args.proxyId, args.machineId, args.proxyData, args.accountData) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("addAccountPair", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_getPair(self, seqid, iprot, oprot): - args = getPair_args() - args.read(iprot) - iprot.readMessageEnd() - result = getPair_result() - try: - result.success = self._handler.getPair(args.machineId) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("getPair", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_pair(self, seqid, iprot, oprot): - args = pair_args() - args.read(iprot) - iprot.readMessageEnd() - result = pair_result() - try: - result.success = self._handler.pair(args.accountId, args.proxyId, args.machineId) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("pair", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_unpair(self, seqid, iprot, oprot): - args = unpair_args() - args.read(iprot) - iprot.readMessageEnd() - result = unpair_result() - try: - result.success = self._handler.unpair(args.accountId, args.proxyId, args.machineId) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("unpair", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_listAccountPairs(self, seqid, iprot, oprot): - args = listAccountPairs_args() - args.read(iprot) - iprot.readMessageEnd() - result = listAccountPairs_result() - try: - result.success = self._handler.listAccountPairs(args.filter) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("listAccountPairs", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_addAccount(self, seqid, iprot, oprot): - args = addAccount_args() - args.read(iprot) - iprot.readMessageEnd() - result = addAccount_result() - try: - result.success = self._handler.addAccount(args.accountId, args.accountData) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("addAccount", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_suspendAccount(self, seqid, iprot, oprot): - args = suspendAccount_args() - args.read(iprot) - iprot.readMessageEnd() - result = suspendAccount_result() - try: - result.success = self._handler.suspendAccount(args.accountId) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("suspendAccount", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_resumeAccount(self, seqid, iprot, oprot): - args = resumeAccount_args() - args.read(iprot) - iprot.readMessageEnd() - result = resumeAccount_result() - try: - result.success = self._handler.resumeAccount(args.accountId) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("resumeAccount", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_removeAccount(self, seqid, iprot, oprot): - args = removeAccount_args() - args.read(iprot) - iprot.readMessageEnd() - result = removeAccount_result() - try: - result.success = self._handler.removeAccount(args.accountId) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("removeAccount", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_listActiveAccounts(self, seqid, iprot, oprot): - args = listActiveAccounts_args() - args.read(iprot) - iprot.readMessageEnd() - result = listActiveAccounts_result() - try: - result.success = self._handler.listActiveAccounts() - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("listActiveAccounts", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_addProxy(self, seqid, iprot, oprot): - args = addProxy_args() - args.read(iprot) - iprot.readMessageEnd() - result = addProxy_result() - try: - result.success = self._handler.addProxy(args.proxyId, args.proxyData) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("addProxy", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_suspendProxy(self, seqid, iprot, oprot): - args = suspendProxy_args() - args.read(iprot) - iprot.readMessageEnd() - result = suspendProxy_result() - try: - result.success = self._handler.suspendProxy(args.proxyId) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("suspendProxy", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_resumeProxy(self, seqid, iprot, oprot): - args = resumeProxy_args() - args.read(iprot) - iprot.readMessageEnd() - result = resumeProxy_result() - try: - result.success = self._handler.resumeProxy(args.proxyId) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("resumeProxy", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_removeProxy(self, seqid, iprot, oprot): - args = removeProxy_args() - args.read(iprot) - iprot.readMessageEnd() - result = removeProxy_result() - try: - result.success = self._handler.removeProxy(args.proxyId) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("removeProxy", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_listActiveProxies(self, seqid, iprot, oprot): - args = listActiveProxies_args() - args.read(iprot) - iprot.readMessageEnd() - result = listActiveProxies_result() - try: - result.success = self._handler.listActiveProxies() - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("listActiveProxies", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - -# HELPER FUNCTIONS AND STRUCTURES - - -class addAccountPair_args(object): - """ - Attributes: - - accountId - - proxyId - - machineId - - proxyData - - accountData - - """ - - - def __init__(self, accountId=None, proxyId=None, machineId=None, proxyData=None, accountData=None,): - self.accountId = accountId - self.proxyId = proxyId - self.machineId = machineId - self.proxyData = proxyData - self.accountData = accountData - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.accountId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRING: - self.proxyId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 3: - if ftype == TType.STRING: - self.machineId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 4: - if ftype == TType.STRUCT: - self.proxyData = pangramia.yt.common.ttypes.ProxyData() - self.proxyData.read(iprot) - else: - iprot.skip(ftype) - elif fid == 5: - if ftype == TType.STRUCT: - self.accountData = pangramia.yt.common.ttypes.AccountData() - self.accountData.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('addAccountPair_args') - if self.accountId is not None: - oprot.writeFieldBegin('accountId', TType.STRING, 1) - oprot.writeString(self.accountId.encode('utf-8') if sys.version_info[0] == 2 else self.accountId) - oprot.writeFieldEnd() - if self.proxyId is not None: - oprot.writeFieldBegin('proxyId', TType.STRING, 2) - oprot.writeString(self.proxyId.encode('utf-8') if sys.version_info[0] == 2 else self.proxyId) - oprot.writeFieldEnd() - if self.machineId is not None: - oprot.writeFieldBegin('machineId', TType.STRING, 3) - oprot.writeString(self.machineId.encode('utf-8') if sys.version_info[0] == 2 else self.machineId) - oprot.writeFieldEnd() - if self.proxyData is not None: - oprot.writeFieldBegin('proxyData', TType.STRUCT, 4) - self.proxyData.write(oprot) - oprot.writeFieldEnd() - if self.accountData is not None: - oprot.writeFieldBegin('accountData', TType.STRUCT, 5) - self.accountData.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(addAccountPair_args) -addAccountPair_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'accountId', 'UTF8', None, ), # 1 - (2, TType.STRING, 'proxyId', 'UTF8', None, ), # 2 - (3, TType.STRING, 'machineId', 'UTF8', None, ), # 3 - (4, TType.STRUCT, 'proxyData', [pangramia.yt.common.ttypes.ProxyData, None], None, ), # 4 - (5, TType.STRUCT, 'accountData', [pangramia.yt.common.ttypes.AccountData, None], None, ), # 5 -) - - -class addAccountPair_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.BOOL: - self.success = iprot.readBool() - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('addAccountPair_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.BOOL, 0) - oprot.writeBool(self.success) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(addAccountPair_result) -addAccountPair_result.thrift_spec = ( - (0, TType.BOOL, 'success', None, None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class getPair_args(object): - """ - Attributes: - - machineId - - """ - - - def __init__(self, machineId=None,): - self.machineId = machineId - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.machineId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('getPair_args') - if self.machineId is not None: - oprot.writeFieldBegin('machineId', TType.STRING, 1) - oprot.writeString(self.machineId.encode('utf-8') if sys.version_info[0] == 2 else self.machineId) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(getPair_args) -getPair_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'machineId', 'UTF8', None, ), # 1 -) - - -class getPair_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.STRUCT: - self.success = pangramia.yt.common.ttypes.AccountPairWithState() - self.success.read(iprot) - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('getPair_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.STRUCT, 0) - self.success.write(oprot) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(getPair_result) -getPair_result.thrift_spec = ( - (0, TType.STRUCT, 'success', [pangramia.yt.common.ttypes.AccountPairWithState, None], None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class pair_args(object): - """ - Attributes: - - accountId - - proxyId - - machineId - - """ - - - def __init__(self, accountId=None, proxyId=None, machineId=None,): - self.accountId = accountId - self.proxyId = proxyId - self.machineId = machineId - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.accountId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRING: - self.proxyId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 3: - if ftype == TType.STRING: - self.machineId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('pair_args') - if self.accountId is not None: - oprot.writeFieldBegin('accountId', TType.STRING, 1) - oprot.writeString(self.accountId.encode('utf-8') if sys.version_info[0] == 2 else self.accountId) - oprot.writeFieldEnd() - if self.proxyId is not None: - oprot.writeFieldBegin('proxyId', TType.STRING, 2) - oprot.writeString(self.proxyId.encode('utf-8') if sys.version_info[0] == 2 else self.proxyId) - oprot.writeFieldEnd() - if self.machineId is not None: - oprot.writeFieldBegin('machineId', TType.STRING, 3) - oprot.writeString(self.machineId.encode('utf-8') if sys.version_info[0] == 2 else self.machineId) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(pair_args) -pair_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'accountId', 'UTF8', None, ), # 1 - (2, TType.STRING, 'proxyId', 'UTF8', None, ), # 2 - (3, TType.STRING, 'machineId', 'UTF8', None, ), # 3 -) - - -class pair_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.BOOL: - self.success = iprot.readBool() - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('pair_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.BOOL, 0) - oprot.writeBool(self.success) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(pair_result) -pair_result.thrift_spec = ( - (0, TType.BOOL, 'success', None, None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class unpair_args(object): - """ - Attributes: - - accountId - - proxyId - - machineId - - """ - - - def __init__(self, accountId=None, proxyId=None, machineId=None,): - self.accountId = accountId - self.proxyId = proxyId - self.machineId = machineId - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.accountId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRING: - self.proxyId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 3: - if ftype == TType.STRING: - self.machineId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('unpair_args') - if self.accountId is not None: - oprot.writeFieldBegin('accountId', TType.STRING, 1) - oprot.writeString(self.accountId.encode('utf-8') if sys.version_info[0] == 2 else self.accountId) - oprot.writeFieldEnd() - if self.proxyId is not None: - oprot.writeFieldBegin('proxyId', TType.STRING, 2) - oprot.writeString(self.proxyId.encode('utf-8') if sys.version_info[0] == 2 else self.proxyId) - oprot.writeFieldEnd() - if self.machineId is not None: - oprot.writeFieldBegin('machineId', TType.STRING, 3) - oprot.writeString(self.machineId.encode('utf-8') if sys.version_info[0] == 2 else self.machineId) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(unpair_args) -unpair_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'accountId', 'UTF8', None, ), # 1 - (2, TType.STRING, 'proxyId', 'UTF8', None, ), # 2 - (3, TType.STRING, 'machineId', 'UTF8', None, ), # 3 -) - - -class unpair_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.BOOL: - self.success = iprot.readBool() - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('unpair_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.BOOL, 0) - oprot.writeBool(self.success) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(unpair_result) -unpair_result.thrift_spec = ( - (0, TType.BOOL, 'success', None, None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class listAccountPairs_args(object): - """ - Attributes: - - filter - - """ - - - def __init__(self, filter=None,): - self.filter = filter - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.I32: - self.filter = iprot.readI32() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('listAccountPairs_args') - if self.filter is not None: - oprot.writeFieldBegin('filter', TType.I32, 1) - oprot.writeI32(self.filter) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(listAccountPairs_args) -listAccountPairs_args.thrift_spec = ( - None, # 0 - (1, TType.I32, 'filter', None, None, ), # 1 -) - - -class listAccountPairs_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.LIST: - self.success = [] - (_etype3, _size0) = iprot.readListBegin() - for _i4 in range(_size0): - _elem5 = pangramia.yt.common.ttypes.AccountPairWithState() - _elem5.read(iprot) - self.success.append(_elem5) - iprot.readListEnd() - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('listAccountPairs_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.LIST, 0) - oprot.writeListBegin(TType.STRUCT, len(self.success)) - for iter6 in self.success: - iter6.write(oprot) - oprot.writeListEnd() - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(listAccountPairs_result) -listAccountPairs_result.thrift_spec = ( - (0, TType.LIST, 'success', (TType.STRUCT, [pangramia.yt.common.ttypes.AccountPairWithState, None], False), None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class addAccount_args(object): - """ - Attributes: - - accountId - - accountData - - """ - - - def __init__(self, accountId=None, accountData=None,): - self.accountId = accountId - self.accountData = accountData - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.accountId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.accountData = pangramia.yt.common.ttypes.AccountData() - self.accountData.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('addAccount_args') - if self.accountId is not None: - oprot.writeFieldBegin('accountId', TType.STRING, 1) - oprot.writeString(self.accountId.encode('utf-8') if sys.version_info[0] == 2 else self.accountId) - oprot.writeFieldEnd() - if self.accountData is not None: - oprot.writeFieldBegin('accountData', TType.STRUCT, 2) - self.accountData.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(addAccount_args) -addAccount_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'accountId', 'UTF8', None, ), # 1 - (2, TType.STRUCT, 'accountData', [pangramia.yt.common.ttypes.AccountData, None], None, ), # 2 -) - - -class addAccount_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.BOOL: - self.success = iprot.readBool() - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('addAccount_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.BOOL, 0) - oprot.writeBool(self.success) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(addAccount_result) -addAccount_result.thrift_spec = ( - (0, TType.BOOL, 'success', None, None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class suspendAccount_args(object): - """ - Attributes: - - accountId - - """ - - - def __init__(self, accountId=None,): - self.accountId = accountId - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.accountId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('suspendAccount_args') - if self.accountId is not None: - oprot.writeFieldBegin('accountId', TType.STRING, 1) - oprot.writeString(self.accountId.encode('utf-8') if sys.version_info[0] == 2 else self.accountId) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(suspendAccount_args) -suspendAccount_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'accountId', 'UTF8', None, ), # 1 -) - - -class suspendAccount_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.BOOL: - self.success = iprot.readBool() - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('suspendAccount_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.BOOL, 0) - oprot.writeBool(self.success) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(suspendAccount_result) -suspendAccount_result.thrift_spec = ( - (0, TType.BOOL, 'success', None, None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class resumeAccount_args(object): - """ - Attributes: - - accountId - - """ - - - def __init__(self, accountId=None,): - self.accountId = accountId - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.accountId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('resumeAccount_args') - if self.accountId is not None: - oprot.writeFieldBegin('accountId', TType.STRING, 1) - oprot.writeString(self.accountId.encode('utf-8') if sys.version_info[0] == 2 else self.accountId) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(resumeAccount_args) -resumeAccount_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'accountId', 'UTF8', None, ), # 1 -) - - -class resumeAccount_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.BOOL: - self.success = iprot.readBool() - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('resumeAccount_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.BOOL, 0) - oprot.writeBool(self.success) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(resumeAccount_result) -resumeAccount_result.thrift_spec = ( - (0, TType.BOOL, 'success', None, None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class removeAccount_args(object): - """ - Attributes: - - accountId - - """ - - - def __init__(self, accountId=None,): - self.accountId = accountId - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.accountId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('removeAccount_args') - if self.accountId is not None: - oprot.writeFieldBegin('accountId', TType.STRING, 1) - oprot.writeString(self.accountId.encode('utf-8') if sys.version_info[0] == 2 else self.accountId) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(removeAccount_args) -removeAccount_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'accountId', 'UTF8', None, ), # 1 -) - - -class removeAccount_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.BOOL: - self.success = iprot.readBool() - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('removeAccount_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.BOOL, 0) - oprot.writeBool(self.success) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(removeAccount_result) -removeAccount_result.thrift_spec = ( - (0, TType.BOOL, 'success', None, None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class listActiveAccounts_args(object): - - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('listActiveAccounts_args') - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(listActiveAccounts_args) -listActiveAccounts_args.thrift_spec = ( -) - - -class listActiveAccounts_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.LIST: - self.success = [] - (_etype10, _size7) = iprot.readListBegin() - for _i11 in range(_size7): - _elem12 = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - self.success.append(_elem12) - iprot.readListEnd() - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('listActiveAccounts_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.LIST, 0) - oprot.writeListBegin(TType.STRING, len(self.success)) - for iter13 in self.success: - oprot.writeString(iter13.encode('utf-8') if sys.version_info[0] == 2 else iter13) - oprot.writeListEnd() - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(listActiveAccounts_result) -listActiveAccounts_result.thrift_spec = ( - (0, TType.LIST, 'success', (TType.STRING, 'UTF8', False), None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class addProxy_args(object): - """ - Attributes: - - proxyId - - proxyData - - """ - - - def __init__(self, proxyId=None, proxyData=None,): - self.proxyId = proxyId - self.proxyData = proxyData - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.proxyId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.proxyData = pangramia.yt.common.ttypes.ProxyData() - self.proxyData.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('addProxy_args') - if self.proxyId is not None: - oprot.writeFieldBegin('proxyId', TType.STRING, 1) - oprot.writeString(self.proxyId.encode('utf-8') if sys.version_info[0] == 2 else self.proxyId) - oprot.writeFieldEnd() - if self.proxyData is not None: - oprot.writeFieldBegin('proxyData', TType.STRUCT, 2) - self.proxyData.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(addProxy_args) -addProxy_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'proxyId', 'UTF8', None, ), # 1 - (2, TType.STRUCT, 'proxyData', [pangramia.yt.common.ttypes.ProxyData, None], None, ), # 2 -) - - -class addProxy_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.BOOL: - self.success = iprot.readBool() - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('addProxy_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.BOOL, 0) - oprot.writeBool(self.success) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(addProxy_result) -addProxy_result.thrift_spec = ( - (0, TType.BOOL, 'success', None, None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class suspendProxy_args(object): - """ - Attributes: - - proxyId - - """ - - - def __init__(self, proxyId=None,): - self.proxyId = proxyId - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.proxyId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('suspendProxy_args') - if self.proxyId is not None: - oprot.writeFieldBegin('proxyId', TType.STRING, 1) - oprot.writeString(self.proxyId.encode('utf-8') if sys.version_info[0] == 2 else self.proxyId) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(suspendProxy_args) -suspendProxy_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'proxyId', 'UTF8', None, ), # 1 -) - - -class suspendProxy_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.BOOL: - self.success = iprot.readBool() - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('suspendProxy_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.BOOL, 0) - oprot.writeBool(self.success) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(suspendProxy_result) -suspendProxy_result.thrift_spec = ( - (0, TType.BOOL, 'success', None, None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class resumeProxy_args(object): - """ - Attributes: - - proxyId - - """ - - - def __init__(self, proxyId=None,): - self.proxyId = proxyId - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.proxyId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('resumeProxy_args') - if self.proxyId is not None: - oprot.writeFieldBegin('proxyId', TType.STRING, 1) - oprot.writeString(self.proxyId.encode('utf-8') if sys.version_info[0] == 2 else self.proxyId) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(resumeProxy_args) -resumeProxy_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'proxyId', 'UTF8', None, ), # 1 -) - - -class resumeProxy_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.BOOL: - self.success = iprot.readBool() - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('resumeProxy_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.BOOL, 0) - oprot.writeBool(self.success) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(resumeProxy_result) -resumeProxy_result.thrift_spec = ( - (0, TType.BOOL, 'success', None, None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class removeProxy_args(object): - """ - Attributes: - - proxyId - - """ - - - def __init__(self, proxyId=None,): - self.proxyId = proxyId - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.proxyId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('removeProxy_args') - if self.proxyId is not None: - oprot.writeFieldBegin('proxyId', TType.STRING, 1) - oprot.writeString(self.proxyId.encode('utf-8') if sys.version_info[0] == 2 else self.proxyId) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(removeProxy_args) -removeProxy_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'proxyId', 'UTF8', None, ), # 1 -) - - -class removeProxy_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.BOOL: - self.success = iprot.readBool() - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('removeProxy_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.BOOL, 0) - oprot.writeBool(self.success) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(removeProxy_result) -removeProxy_result.thrift_spec = ( - (0, TType.BOOL, 'success', None, None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class listActiveProxies_args(object): - - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('listActiveProxies_args') - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(listActiveProxies_args) -listActiveProxies_args.thrift_spec = ( -) - - -class listActiveProxies_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.LIST: - self.success = [] - (_etype17, _size14) = iprot.readListBegin() - for _i18 in range(_size14): - _elem19 = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - self.success.append(_elem19) - iprot.readListEnd() - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('listActiveProxies_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.LIST, 0) - oprot.writeListBegin(TType.STRING, len(self.success)) - for iter20 in self.success: - oprot.writeString(iter20.encode('utf-8') if sys.version_info[0] == 2 else iter20) - oprot.writeListEnd() - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(listActiveProxies_result) -listActiveProxies_result.thrift_spec = ( - (0, TType.LIST, 'success', (TType.STRING, 'UTF8', False), None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) -fix_spec(all_structs) -del all_structs diff --git a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/admin_ops/__init__.py b/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/admin_ops/__init__.py deleted file mode 100644 index 00b4776..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/admin_ops/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__all__ = ['ttypes', 'constants', 'YTAccountsOpService'] diff --git a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/admin_ops/constants.py b/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/admin_ops/constants.py deleted file mode 100644 index 09a78b3..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/admin_ops/constants.py +++ /dev/null @@ -1,14 +0,0 @@ -# -# Autogenerated by Thrift Compiler (0.20.0) -# -# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING -# -# options string: py -# - -from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException -from thrift.protocol.TProtocol import TProtocolException -from thrift.TRecursive import fix_spec - -import sys -from .ttypes import * diff --git a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/admin_ops/ttypes.py b/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/admin_ops/ttypes.py deleted file mode 100644 index de828aa..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/admin_ops/ttypes.py +++ /dev/null @@ -1,21 +0,0 @@ -# -# Autogenerated by Thrift Compiler (0.20.0) -# -# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING -# -# options string: py -# - -from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException -from thrift.protocol.TProtocol import TProtocolException -from thrift.TRecursive import fix_spec - -import sys -import pangramia.yt.common.ttypes -import pangramia.yt.exceptions.ttypes -import pangramia.base_service.ttypes - -from thrift.transport import TTransport -all_structs = [] -fix_spec(all_structs) -del all_structs diff --git a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/common/__init__.py b/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/common/__init__.py deleted file mode 100644 index adefd8e..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/common/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__all__ = ['ttypes', 'constants'] diff --git a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/common/constants.py b/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/common/constants.py deleted file mode 100644 index 09a78b3..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/common/constants.py +++ /dev/null @@ -1,14 +0,0 @@ -# -# Autogenerated by Thrift Compiler (0.20.0) -# -# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING -# -# options string: py -# - -from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException -from thrift.protocol.TProtocol import TProtocolException -from thrift.TRecursive import fix_spec - -import sys -from .ttypes import * diff --git a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/common/ttypes.py b/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/common/ttypes.py deleted file mode 100644 index a23d813..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/common/ttypes.py +++ /dev/null @@ -1,905 +0,0 @@ -# -# Autogenerated by Thrift Compiler (0.20.0) -# -# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING -# -# options string: py -# - -from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException -from thrift.protocol.TProtocol import TProtocolException -from thrift.TRecursive import fix_spec - -import sys - -from thrift.transport import TTransport -all_structs = [] - - -class JobState(object): - SUCCESS = 0 - FAIL = 1 - BOT_FORBIDDEN_ON_URL_ACCESS = 2 - BOT_FORBIDDEN_ON_FILE_DOWNLOAD = 3 - BOT_CAPTCHA = 4 - BOT_AUTH_RELOGIN_REQUIRED = 5 - BOT_AUTH_SMS_REQUIRED = 6 - BOT_AUTH_DEVICE_QR_REQUIRED = 7 - BOT_ACCOUNT_BANNED = 8 - BOT_IP_BANNED = 9 - - _VALUES_TO_NAMES = { - 0: "SUCCESS", - 1: "FAIL", - 2: "BOT_FORBIDDEN_ON_URL_ACCESS", - 3: "BOT_FORBIDDEN_ON_FILE_DOWNLOAD", - 4: "BOT_CAPTCHA", - 5: "BOT_AUTH_RELOGIN_REQUIRED", - 6: "BOT_AUTH_SMS_REQUIRED", - 7: "BOT_AUTH_DEVICE_QR_REQUIRED", - 8: "BOT_ACCOUNT_BANNED", - 9: "BOT_IP_BANNED", - } - - _NAMES_TO_VALUES = { - "SUCCESS": 0, - "FAIL": 1, - "BOT_FORBIDDEN_ON_URL_ACCESS": 2, - "BOT_FORBIDDEN_ON_FILE_DOWNLOAD": 3, - "BOT_CAPTCHA": 4, - "BOT_AUTH_RELOGIN_REQUIRED": 5, - "BOT_AUTH_SMS_REQUIRED": 6, - "BOT_AUTH_DEVICE_QR_REQUIRED": 7, - "BOT_ACCOUNT_BANNED": 8, - "BOT_IP_BANNED": 9, - } - - -class TokenUpdateMode(object): - AUTOREFRESH_AND_REMAIN_ANONYMOUS = 0 - AUTOREFRESH_AND_ALLOW_AUTH = 1 - AUTOREFRESH_AND_ONLY_AUTH = 2 - CLEANUP_THEN_AUTOREFRESH_AND_ONLY_AUTH = 3 - CLEANUP_THEN_AUTOREFRESH_AND_REMAIN_ANONYMOUS = 4 - CLEANUP_THEN_AUTOREFRESH_AND_ALLOW_AUTH = 5 - AUTO = 6 - - _VALUES_TO_NAMES = { - 0: "AUTOREFRESH_AND_REMAIN_ANONYMOUS", - 1: "AUTOREFRESH_AND_ALLOW_AUTH", - 2: "AUTOREFRESH_AND_ONLY_AUTH", - 3: "CLEANUP_THEN_AUTOREFRESH_AND_ONLY_AUTH", - 4: "CLEANUP_THEN_AUTOREFRESH_AND_REMAIN_ANONYMOUS", - 5: "CLEANUP_THEN_AUTOREFRESH_AND_ALLOW_AUTH", - 6: "AUTO", - } - - _NAMES_TO_VALUES = { - "AUTOREFRESH_AND_REMAIN_ANONYMOUS": 0, - "AUTOREFRESH_AND_ALLOW_AUTH": 1, - "AUTOREFRESH_AND_ONLY_AUTH": 2, - "CLEANUP_THEN_AUTOREFRESH_AND_ONLY_AUTH": 3, - "CLEANUP_THEN_AUTOREFRESH_AND_REMAIN_ANONYMOUS": 4, - "CLEANUP_THEN_AUTOREFRESH_AND_ALLOW_AUTH": 5, - "AUTO": 6, - } - - -class AccountPairState(object): - ACTIVE = 0 - PAUSED = 1 - REMOVED = 2 - IN_PROGRESS = 3 - ALL = 4 - - _VALUES_TO_NAMES = { - 0: "ACTIVE", - 1: "PAUSED", - 2: "REMOVED", - 3: "IN_PROGRESS", - 4: "ALL", - } - - _NAMES_TO_VALUES = { - "ACTIVE": 0, - "PAUSED": 1, - "REMOVED": 2, - "IN_PROGRESS": 3, - "ALL": 4, - } - - -class JobTokenData(object): - """ - Attributes: - - infoJson - - ytdlpCommand - - socks - - jobId - - url - - cookiesBlob - - """ - - - def __init__(self, infoJson=None, ytdlpCommand=None, socks=None, jobId=None, url=None, cookiesBlob=None,): - self.infoJson = infoJson - self.ytdlpCommand = ytdlpCommand - self.socks = socks - self.jobId = jobId - self.url = url - self.cookiesBlob = cookiesBlob - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.infoJson = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRING: - self.ytdlpCommand = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 3: - if ftype == TType.STRING: - self.socks = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 4: - if ftype == TType.STRING: - self.jobId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 5: - if ftype == TType.STRING: - self.url = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 6: - if ftype == TType.STRING: - self.cookiesBlob = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('JobTokenData') - if self.infoJson is not None: - oprot.writeFieldBegin('infoJson', TType.STRING, 1) - oprot.writeString(self.infoJson.encode('utf-8') if sys.version_info[0] == 2 else self.infoJson) - oprot.writeFieldEnd() - if self.ytdlpCommand is not None: - oprot.writeFieldBegin('ytdlpCommand', TType.STRING, 2) - oprot.writeString(self.ytdlpCommand.encode('utf-8') if sys.version_info[0] == 2 else self.ytdlpCommand) - oprot.writeFieldEnd() - if self.socks is not None: - oprot.writeFieldBegin('socks', TType.STRING, 3) - oprot.writeString(self.socks.encode('utf-8') if sys.version_info[0] == 2 else self.socks) - oprot.writeFieldEnd() - if self.jobId is not None: - oprot.writeFieldBegin('jobId', TType.STRING, 4) - oprot.writeString(self.jobId.encode('utf-8') if sys.version_info[0] == 2 else self.jobId) - oprot.writeFieldEnd() - if self.url is not None: - oprot.writeFieldBegin('url', TType.STRING, 5) - oprot.writeString(self.url.encode('utf-8') if sys.version_info[0] == 2 else self.url) - oprot.writeFieldEnd() - if self.cookiesBlob is not None: - oprot.writeFieldBegin('cookiesBlob', TType.STRING, 6) - oprot.writeString(self.cookiesBlob.encode('utf-8') if sys.version_info[0] == 2 else self.cookiesBlob) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) - - -class AccountData(object): - """ - Attributes: - - username - - password - - countryCode - - """ - - - def __init__(self, username=None, password=None, countryCode=None,): - self.username = username - self.password = password - self.countryCode = countryCode - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.username = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRING: - self.password = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 3: - if ftype == TType.STRING: - self.countryCode = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('AccountData') - if self.username is not None: - oprot.writeFieldBegin('username', TType.STRING, 1) - oprot.writeString(self.username.encode('utf-8') if sys.version_info[0] == 2 else self.username) - oprot.writeFieldEnd() - if self.password is not None: - oprot.writeFieldBegin('password', TType.STRING, 2) - oprot.writeString(self.password.encode('utf-8') if sys.version_info[0] == 2 else self.password) - oprot.writeFieldEnd() - if self.countryCode is not None: - oprot.writeFieldBegin('countryCode', TType.STRING, 3) - oprot.writeString(self.countryCode.encode('utf-8') if sys.version_info[0] == 2 else self.countryCode) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - if self.username is None: - raise TProtocolException(message='Required field username is unset!') - if self.password is None: - raise TProtocolException(message='Required field password is unset!') - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) - - -class ProxyData(object): - """ - Attributes: - - proxyUrl - - countryCode - - """ - - - def __init__(self, proxyUrl=None, countryCode=None,): - self.proxyUrl = proxyUrl - self.countryCode = countryCode - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.proxyUrl = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRING: - self.countryCode = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('ProxyData') - if self.proxyUrl is not None: - oprot.writeFieldBegin('proxyUrl', TType.STRING, 1) - oprot.writeString(self.proxyUrl.encode('utf-8') if sys.version_info[0] == 2 else self.proxyUrl) - oprot.writeFieldEnd() - if self.countryCode is not None: - oprot.writeFieldBegin('countryCode', TType.STRING, 2) - oprot.writeString(self.countryCode.encode('utf-8') if sys.version_info[0] == 2 else self.countryCode) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - if self.proxyUrl is None: - raise TProtocolException(message='Required field proxyUrl is unset!') - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) - - -class AccountPairWithState(object): - """ - Attributes: - - accountId - - proxyId - - accountPairState - - machineId - - """ - - - def __init__(self, accountId=None, proxyId=None, accountPairState=None, machineId=None,): - self.accountId = accountId - self.proxyId = proxyId - self.accountPairState = accountPairState - self.machineId = machineId - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.accountId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRING: - self.proxyId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 3: - if ftype == TType.I32: - self.accountPairState = iprot.readI32() - else: - iprot.skip(ftype) - elif fid == 4: - if ftype == TType.STRING: - self.machineId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('AccountPairWithState') - if self.accountId is not None: - oprot.writeFieldBegin('accountId', TType.STRING, 1) - oprot.writeString(self.accountId.encode('utf-8') if sys.version_info[0] == 2 else self.accountId) - oprot.writeFieldEnd() - if self.proxyId is not None: - oprot.writeFieldBegin('proxyId', TType.STRING, 2) - oprot.writeString(self.proxyId.encode('utf-8') if sys.version_info[0] == 2 else self.proxyId) - oprot.writeFieldEnd() - if self.accountPairState is not None: - oprot.writeFieldBegin('accountPairState', TType.I32, 3) - oprot.writeI32(self.accountPairState) - oprot.writeFieldEnd() - if self.machineId is not None: - oprot.writeFieldBegin('machineId', TType.STRING, 4) - oprot.writeString(self.machineId.encode('utf-8') if sys.version_info[0] == 2 else self.machineId) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - if self.accountId is None: - raise TProtocolException(message='Required field accountId is unset!') - if self.proxyId is None: - raise TProtocolException(message='Required field proxyId is unset!') - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) - - -class JobData(object): - """ - Attributes: - - jobId - - url - - cookiesBlob - - potoken - - visitorId - - ytdlpCommand - - createdTime - - telemetry - - state - - errorMessage - - socks5Id - - """ - - - def __init__(self, jobId=None, url=None, cookiesBlob=None, potoken=None, visitorId=None, ytdlpCommand=None, createdTime=None, telemetry=None, state=None, errorMessage=None, socks5Id=None,): - self.jobId = jobId - self.url = url - self.cookiesBlob = cookiesBlob - self.potoken = potoken - self.visitorId = visitorId - self.ytdlpCommand = ytdlpCommand - self.createdTime = createdTime - self.telemetry = telemetry - self.state = state - self.errorMessage = errorMessage - self.socks5Id = socks5Id - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.jobId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRING: - self.url = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 3: - if ftype == TType.STRING: - self.cookiesBlob = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 4: - if ftype == TType.STRING: - self.potoken = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 5: - if ftype == TType.STRING: - self.visitorId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 6: - if ftype == TType.STRING: - self.ytdlpCommand = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 7: - if ftype == TType.STRING: - self.createdTime = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 8: - if ftype == TType.MAP: - self.telemetry = {} - (_ktype1, _vtype2, _size0) = iprot.readMapBegin() - for _i4 in range(_size0): - _key5 = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - _val6 = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - self.telemetry[_key5] = _val6 - iprot.readMapEnd() - else: - iprot.skip(ftype) - elif fid == 9: - if ftype == TType.I32: - self.state = iprot.readI32() - else: - iprot.skip(ftype) - elif fid == 10: - if ftype == TType.STRING: - self.errorMessage = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 11: - if ftype == TType.STRING: - self.socks5Id = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('JobData') - if self.jobId is not None: - oprot.writeFieldBegin('jobId', TType.STRING, 1) - oprot.writeString(self.jobId.encode('utf-8') if sys.version_info[0] == 2 else self.jobId) - oprot.writeFieldEnd() - if self.url is not None: - oprot.writeFieldBegin('url', TType.STRING, 2) - oprot.writeString(self.url.encode('utf-8') if sys.version_info[0] == 2 else self.url) - oprot.writeFieldEnd() - if self.cookiesBlob is not None: - oprot.writeFieldBegin('cookiesBlob', TType.STRING, 3) - oprot.writeString(self.cookiesBlob.encode('utf-8') if sys.version_info[0] == 2 else self.cookiesBlob) - oprot.writeFieldEnd() - if self.potoken is not None: - oprot.writeFieldBegin('potoken', TType.STRING, 4) - oprot.writeString(self.potoken.encode('utf-8') if sys.version_info[0] == 2 else self.potoken) - oprot.writeFieldEnd() - if self.visitorId is not None: - oprot.writeFieldBegin('visitorId', TType.STRING, 5) - oprot.writeString(self.visitorId.encode('utf-8') if sys.version_info[0] == 2 else self.visitorId) - oprot.writeFieldEnd() - if self.ytdlpCommand is not None: - oprot.writeFieldBegin('ytdlpCommand', TType.STRING, 6) - oprot.writeString(self.ytdlpCommand.encode('utf-8') if sys.version_info[0] == 2 else self.ytdlpCommand) - oprot.writeFieldEnd() - if self.createdTime is not None: - oprot.writeFieldBegin('createdTime', TType.STRING, 7) - oprot.writeString(self.createdTime.encode('utf-8') if sys.version_info[0] == 2 else self.createdTime) - oprot.writeFieldEnd() - if self.telemetry is not None: - oprot.writeFieldBegin('telemetry', TType.MAP, 8) - oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.telemetry)) - for kiter7, viter8 in self.telemetry.items(): - oprot.writeString(kiter7.encode('utf-8') if sys.version_info[0] == 2 else kiter7) - oprot.writeString(viter8.encode('utf-8') if sys.version_info[0] == 2 else viter8) - oprot.writeMapEnd() - oprot.writeFieldEnd() - if self.state is not None: - oprot.writeFieldBegin('state', TType.I32, 9) - oprot.writeI32(self.state) - oprot.writeFieldEnd() - if self.errorMessage is not None: - oprot.writeFieldBegin('errorMessage', TType.STRING, 10) - oprot.writeString(self.errorMessage.encode('utf-8') if sys.version_info[0] == 2 else self.errorMessage) - oprot.writeFieldEnd() - if self.socks5Id is not None: - oprot.writeFieldBegin('socks5Id', TType.STRING, 11) - oprot.writeString(self.socks5Id.encode('utf-8') if sys.version_info[0] == 2 else self.socks5Id) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - if self.jobId is None: - raise TProtocolException(message='Required field jobId is unset!') - if self.url is None: - raise TProtocolException(message='Required field url is unset!') - if self.cookiesBlob is None: - raise TProtocolException(message='Required field cookiesBlob is unset!') - if self.potoken is None: - raise TProtocolException(message='Required field potoken is unset!') - if self.visitorId is None: - raise TProtocolException(message='Required field visitorId is unset!') - if self.ytdlpCommand is None: - raise TProtocolException(message='Required field ytdlpCommand is unset!') - if self.createdTime is None: - raise TProtocolException(message='Required field createdTime is unset!') - if self.telemetry is None: - raise TProtocolException(message='Required field telemetry is unset!') - if self.state is None: - raise TProtocolException(message='Required field state is unset!') - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) - - -class RichCollectionPagination(object): - """ - Attributes: - - hasNext - - totalCount - - page - - pageSize - - """ - - - def __init__(self, hasNext=None, totalCount=None, page=None, pageSize=None,): - self.hasNext = hasNext - self.totalCount = totalCount - self.page = page - self.pageSize = pageSize - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.BOOL: - self.hasNext = iprot.readBool() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.I32: - self.totalCount = iprot.readI32() - else: - iprot.skip(ftype) - elif fid == 3: - if ftype == TType.I32: - self.page = iprot.readI32() - else: - iprot.skip(ftype) - elif fid == 4: - if ftype == TType.I32: - self.pageSize = iprot.readI32() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('RichCollectionPagination') - if self.hasNext is not None: - oprot.writeFieldBegin('hasNext', TType.BOOL, 1) - oprot.writeBool(self.hasNext) - oprot.writeFieldEnd() - if self.totalCount is not None: - oprot.writeFieldBegin('totalCount', TType.I32, 2) - oprot.writeI32(self.totalCount) - oprot.writeFieldEnd() - if self.page is not None: - oprot.writeFieldBegin('page', TType.I32, 3) - oprot.writeI32(self.page) - oprot.writeFieldEnd() - if self.pageSize is not None: - oprot.writeFieldBegin('pageSize', TType.I32, 4) - oprot.writeI32(self.pageSize) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - if self.hasNext is None: - raise TProtocolException(message='Required field hasNext is unset!') - if self.totalCount is None: - raise TProtocolException(message='Required field totalCount is unset!') - if self.page is None: - raise TProtocolException(message='Required field page is unset!') - if self.pageSize is None: - raise TProtocolException(message='Required field pageSize is unset!') - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) - - -class RichCollectionJobData(object): - """ - Attributes: - - items - - pagination - - """ - - - def __init__(self, items=None, pagination=None,): - self.items = items - self.pagination = pagination - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.LIST: - self.items = [] - (_etype12, _size9) = iprot.readListBegin() - for _i13 in range(_size9): - _elem14 = JobData() - _elem14.read(iprot) - self.items.append(_elem14) - iprot.readListEnd() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.pagination = RichCollectionPagination() - self.pagination.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('RichCollectionJobData') - if self.items is not None: - oprot.writeFieldBegin('items', TType.LIST, 1) - oprot.writeListBegin(TType.STRUCT, len(self.items)) - for iter15 in self.items: - iter15.write(oprot) - oprot.writeListEnd() - oprot.writeFieldEnd() - if self.pagination is not None: - oprot.writeFieldBegin('pagination', TType.STRUCT, 2) - self.pagination.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - if self.items is None: - raise TProtocolException(message='Required field items is unset!') - if self.pagination is None: - raise TProtocolException(message='Required field pagination is unset!') - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(JobTokenData) -JobTokenData.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'infoJson', 'UTF8', None, ), # 1 - (2, TType.STRING, 'ytdlpCommand', 'UTF8', None, ), # 2 - (3, TType.STRING, 'socks', 'UTF8', None, ), # 3 - (4, TType.STRING, 'jobId', 'UTF8', None, ), # 4 - (5, TType.STRING, 'url', 'UTF8', None, ), # 5 - (6, TType.STRING, 'cookiesBlob', 'UTF8', None, ), # 6 -) -all_structs.append(AccountData) -AccountData.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'username', 'UTF8', None, ), # 1 - (2, TType.STRING, 'password', 'UTF8', None, ), # 2 - (3, TType.STRING, 'countryCode', 'UTF8', None, ), # 3 -) -all_structs.append(ProxyData) -ProxyData.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'proxyUrl', 'UTF8', None, ), # 1 - (2, TType.STRING, 'countryCode', 'UTF8', None, ), # 2 -) -all_structs.append(AccountPairWithState) -AccountPairWithState.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'accountId', 'UTF8', None, ), # 1 - (2, TType.STRING, 'proxyId', 'UTF8', None, ), # 2 - (3, TType.I32, 'accountPairState', None, None, ), # 3 - (4, TType.STRING, 'machineId', 'UTF8', None, ), # 4 -) -all_structs.append(JobData) -JobData.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'jobId', 'UTF8', None, ), # 1 - (2, TType.STRING, 'url', 'UTF8', None, ), # 2 - (3, TType.STRING, 'cookiesBlob', 'UTF8', None, ), # 3 - (4, TType.STRING, 'potoken', 'UTF8', None, ), # 4 - (5, TType.STRING, 'visitorId', 'UTF8', None, ), # 5 - (6, TType.STRING, 'ytdlpCommand', 'UTF8', None, ), # 6 - (7, TType.STRING, 'createdTime', 'UTF8', None, ), # 7 - (8, TType.MAP, 'telemetry', (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), None, ), # 8 - (9, TType.I32, 'state', None, None, ), # 9 - (10, TType.STRING, 'errorMessage', 'UTF8', None, ), # 10 - (11, TType.STRING, 'socks5Id', 'UTF8', None, ), # 11 -) -all_structs.append(RichCollectionPagination) -RichCollectionPagination.thrift_spec = ( - None, # 0 - (1, TType.BOOL, 'hasNext', None, None, ), # 1 - (2, TType.I32, 'totalCount', None, None, ), # 2 - (3, TType.I32, 'page', None, None, ), # 3 - (4, TType.I32, 'pageSize', None, None, ), # 4 -) -all_structs.append(RichCollectionJobData) -RichCollectionJobData.thrift_spec = ( - None, # 0 - (1, TType.LIST, 'items', (TType.STRUCT, [JobData, None], False), None, ), # 1 - (2, TType.STRUCT, 'pagination', [RichCollectionPagination, None], None, ), # 2 -) -fix_spec(all_structs) -del all_structs diff --git a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/exceptions/__init__.py b/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/exceptions/__init__.py deleted file mode 100644 index adefd8e..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/exceptions/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__all__ = ['ttypes', 'constants'] diff --git a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/exceptions/constants.py b/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/exceptions/constants.py deleted file mode 100644 index 09a78b3..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/exceptions/constants.py +++ /dev/null @@ -1,14 +0,0 @@ -# -# Autogenerated by Thrift Compiler (0.20.0) -# -# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING -# -# options string: py -# - -from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException -from thrift.protocol.TProtocol import TProtocolException -from thrift.TRecursive import fix_spec - -import sys -from .ttypes import * diff --git a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/exceptions/ttypes.py b/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/exceptions/ttypes.py deleted file mode 100644 index e930913..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/exceptions/ttypes.py +++ /dev/null @@ -1,254 +0,0 @@ -# -# Autogenerated by Thrift Compiler (0.20.0) -# -# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING -# -# options string: py -# - -from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException -from thrift.protocol.TProtocol import TProtocolException -from thrift.TRecursive import fix_spec - -import sys - -from thrift.transport import TTransport -all_structs = [] - - -class PBServiceException(TException): - """ - Attributes: - - message - - errorCode - - context - - """ - - - def __init__(self, message=None, errorCode=None, context=None,): - super(PBServiceException, self).__setattr__('message', message) - super(PBServiceException, self).__setattr__('errorCode', errorCode) - super(PBServiceException, self).__setattr__('context', context) - - def __setattr__(self, *args): - raise TypeError("can't modify immutable instance") - - def __delattr__(self, *args): - raise TypeError("can't modify immutable instance") - - def __hash__(self): - return hash(self.__class__) ^ hash((self.message, self.errorCode, self.context, )) - - @classmethod - def read(cls, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and cls.thrift_spec is not None: - return iprot._fast_decode(None, iprot, [cls, cls.thrift_spec]) - iprot.readStructBegin() - message = None - errorCode = None - context = None - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - message = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRING: - errorCode = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 3: - if ftype == TType.MAP: - context = {} - (_ktype1, _vtype2, _size0) = iprot.readMapBegin() - for _i4 in range(_size0): - _key5 = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - _val6 = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - context[_key5] = _val6 - iprot.readMapEnd() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - return cls( - message=message, - errorCode=errorCode, - context=context, - ) - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('PBServiceException') - if self.message is not None: - oprot.writeFieldBegin('message', TType.STRING, 1) - oprot.writeString(self.message.encode('utf-8') if sys.version_info[0] == 2 else self.message) - oprot.writeFieldEnd() - if self.errorCode is not None: - oprot.writeFieldBegin('errorCode', TType.STRING, 2) - oprot.writeString(self.errorCode.encode('utf-8') if sys.version_info[0] == 2 else self.errorCode) - oprot.writeFieldEnd() - if self.context is not None: - oprot.writeFieldBegin('context', TType.MAP, 3) - oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.context)) - for kiter7, viter8 in self.context.items(): - oprot.writeString(kiter7.encode('utf-8') if sys.version_info[0] == 2 else kiter7) - oprot.writeString(viter8.encode('utf-8') if sys.version_info[0] == 2 else viter8) - oprot.writeMapEnd() - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - if self.message is None: - raise TProtocolException(message='Required field message is unset!') - return - - def __str__(self): - return repr(self) - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) - - -class PBUserException(TException): - """ - Attributes: - - message - - errorCode - - context - - """ - - - def __init__(self, message=None, errorCode=None, context=None,): - super(PBUserException, self).__setattr__('message', message) - super(PBUserException, self).__setattr__('errorCode', errorCode) - super(PBUserException, self).__setattr__('context', context) - - def __setattr__(self, *args): - raise TypeError("can't modify immutable instance") - - def __delattr__(self, *args): - raise TypeError("can't modify immutable instance") - - def __hash__(self): - return hash(self.__class__) ^ hash((self.message, self.errorCode, self.context, )) - - @classmethod - def read(cls, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and cls.thrift_spec is not None: - return iprot._fast_decode(None, iprot, [cls, cls.thrift_spec]) - iprot.readStructBegin() - message = None - errorCode = None - context = None - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - message = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRING: - errorCode = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 3: - if ftype == TType.MAP: - context = {} - (_ktype10, _vtype11, _size9) = iprot.readMapBegin() - for _i13 in range(_size9): - _key14 = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - _val15 = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - context[_key14] = _val15 - iprot.readMapEnd() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - return cls( - message=message, - errorCode=errorCode, - context=context, - ) - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('PBUserException') - if self.message is not None: - oprot.writeFieldBegin('message', TType.STRING, 1) - oprot.writeString(self.message.encode('utf-8') if sys.version_info[0] == 2 else self.message) - oprot.writeFieldEnd() - if self.errorCode is not None: - oprot.writeFieldBegin('errorCode', TType.STRING, 2) - oprot.writeString(self.errorCode.encode('utf-8') if sys.version_info[0] == 2 else self.errorCode) - oprot.writeFieldEnd() - if self.context is not None: - oprot.writeFieldBegin('context', TType.MAP, 3) - oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.context)) - for kiter16, viter17 in self.context.items(): - oprot.writeString(kiter16.encode('utf-8') if sys.version_info[0] == 2 else kiter16) - oprot.writeString(viter17.encode('utf-8') if sys.version_info[0] == 2 else viter17) - oprot.writeMapEnd() - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - if self.message is None: - raise TProtocolException(message='Required field message is unset!') - return - - def __str__(self): - return repr(self) - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(PBServiceException) -PBServiceException.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'message', 'UTF8', None, ), # 1 - (2, TType.STRING, 'errorCode', 'UTF8', None, ), # 2 - (3, TType.MAP, 'context', (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), None, ), # 3 -) -all_structs.append(PBUserException) -PBUserException.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'message', 'UTF8', None, ), # 1 - (2, TType.STRING, 'errorCode', 'UTF8', None, ), # 2 - (3, TType.MAP, 'context', (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), None, ), # 3 -) -fix_spec(all_structs) -del all_structs diff --git a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/tokens_ops/YTTokenOpService.py b/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/tokens_ops/YTTokenOpService.py deleted file mode 100644 index 8589aee..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/tokens_ops/YTTokenOpService.py +++ /dev/null @@ -1,1360 +0,0 @@ -# -# Autogenerated by Thrift Compiler (0.20.0) -# -# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING -# -# options string: py -# - -from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException -from thrift.protocol.TProtocol import TProtocolException -from thrift.TRecursive import fix_spec - -import sys -import pangramia.base_service.BaseService -import logging -from .ttypes import * -from thrift.Thrift import TProcessor -from thrift.transport import TTransport -all_structs = [] - - -class Iface(pangramia.base_service.BaseService.Iface): - def getOrRefreshTokenWithReport(self, accountId, oldUrl, status, details, jobId, updateType, url): - """ - Parameters: - - accountId - - oldUrl - - status - - details - - jobId - - updateType - - url - - """ - pass - - def getOrRefreshToken(self, accountId, updateType, url): - """ - Parameters: - - accountId - - updateType - - url - - """ - pass - - def getLatestToken(self, accountId): - """ - Parameters: - - accountId - - """ - pass - - def refreshToken(self, accountId, updateType, url): - """ - Parameters: - - accountId - - updateType - - url - - """ - pass - - def reportState(self, url, status, details, jobId): - """ - Parameters: - - url - - status - - details - - jobId - - """ - pass - - -class Client(pangramia.base_service.BaseService.Client, Iface): - def __init__(self, iprot, oprot=None): - pangramia.base_service.BaseService.Client.__init__(self, iprot, oprot) - - def getOrRefreshTokenWithReport(self, accountId, oldUrl, status, details, jobId, updateType, url): - """ - Parameters: - - accountId - - oldUrl - - status - - details - - jobId - - updateType - - url - - """ - self.send_getOrRefreshTokenWithReport(accountId, oldUrl, status, details, jobId, updateType, url) - return self.recv_getOrRefreshTokenWithReport() - - def send_getOrRefreshTokenWithReport(self, accountId, oldUrl, status, details, jobId, updateType, url): - self._oprot.writeMessageBegin('getOrRefreshTokenWithReport', TMessageType.CALL, self._seqid) - args = getOrRefreshTokenWithReport_args() - args.accountId = accountId - args.oldUrl = oldUrl - args.status = status - args.details = details - args.jobId = jobId - args.updateType = updateType - args.url = url - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_getOrRefreshTokenWithReport(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = getOrRefreshTokenWithReport_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "getOrRefreshTokenWithReport failed: unknown result") - - def getOrRefreshToken(self, accountId, updateType, url): - """ - Parameters: - - accountId - - updateType - - url - - """ - self.send_getOrRefreshToken(accountId, updateType, url) - return self.recv_getOrRefreshToken() - - def send_getOrRefreshToken(self, accountId, updateType, url): - self._oprot.writeMessageBegin('getOrRefreshToken', TMessageType.CALL, self._seqid) - args = getOrRefreshToken_args() - args.accountId = accountId - args.updateType = updateType - args.url = url - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_getOrRefreshToken(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = getOrRefreshToken_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "getOrRefreshToken failed: unknown result") - - def getLatestToken(self, accountId): - """ - Parameters: - - accountId - - """ - self.send_getLatestToken(accountId) - return self.recv_getLatestToken() - - def send_getLatestToken(self, accountId): - self._oprot.writeMessageBegin('getLatestToken', TMessageType.CALL, self._seqid) - args = getLatestToken_args() - args.accountId = accountId - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_getLatestToken(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = getLatestToken_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "getLatestToken failed: unknown result") - - def refreshToken(self, accountId, updateType, url): - """ - Parameters: - - accountId - - updateType - - url - - """ - self.send_refreshToken(accountId, updateType, url) - return self.recv_refreshToken() - - def send_refreshToken(self, accountId, updateType, url): - self._oprot.writeMessageBegin('refreshToken', TMessageType.CALL, self._seqid) - args = refreshToken_args() - args.accountId = accountId - args.updateType = updateType - args.url = url - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_refreshToken(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = refreshToken_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "refreshToken failed: unknown result") - - def reportState(self, url, status, details, jobId): - """ - Parameters: - - url - - status - - details - - jobId - - """ - self.send_reportState(url, status, details, jobId) - return self.recv_reportState() - - def send_reportState(self, url, status, details, jobId): - self._oprot.writeMessageBegin('reportState', TMessageType.CALL, self._seqid) - args = reportState_args() - args.url = url - args.status = status - args.details = details - args.jobId = jobId - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_reportState(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = reportState_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "reportState failed: unknown result") - - -class Processor(pangramia.base_service.BaseService.Processor, Iface, TProcessor): - def __init__(self, handler): - pangramia.base_service.BaseService.Processor.__init__(self, handler) - self._processMap["getOrRefreshTokenWithReport"] = Processor.process_getOrRefreshTokenWithReport - self._processMap["getOrRefreshToken"] = Processor.process_getOrRefreshToken - self._processMap["getLatestToken"] = Processor.process_getLatestToken - self._processMap["refreshToken"] = Processor.process_refreshToken - self._processMap["reportState"] = Processor.process_reportState - self._on_message_begin = None - - def on_message_begin(self, func): - self._on_message_begin = func - - def process(self, iprot, oprot): - (name, type, seqid) = iprot.readMessageBegin() - if self._on_message_begin: - self._on_message_begin(name, type, seqid) - if name not in self._processMap: - iprot.skip(TType.STRUCT) - iprot.readMessageEnd() - x = TApplicationException(TApplicationException.UNKNOWN_METHOD, 'Unknown function %s' % (name)) - oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid) - x.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - return - else: - self._processMap[name](self, seqid, iprot, oprot) - return True - - def process_getOrRefreshTokenWithReport(self, seqid, iprot, oprot): - args = getOrRefreshTokenWithReport_args() - args.read(iprot) - iprot.readMessageEnd() - result = getOrRefreshTokenWithReport_result() - try: - result.success = self._handler.getOrRefreshTokenWithReport(args.accountId, args.oldUrl, args.status, args.details, args.jobId, args.updateType, args.url) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("getOrRefreshTokenWithReport", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_getOrRefreshToken(self, seqid, iprot, oprot): - args = getOrRefreshToken_args() - args.read(iprot) - iprot.readMessageEnd() - result = getOrRefreshToken_result() - try: - result.success = self._handler.getOrRefreshToken(args.accountId, args.updateType, args.url) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("getOrRefreshToken", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_getLatestToken(self, seqid, iprot, oprot): - args = getLatestToken_args() - args.read(iprot) - iprot.readMessageEnd() - result = getLatestToken_result() - try: - result.success = self._handler.getLatestToken(args.accountId) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("getLatestToken", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_refreshToken(self, seqid, iprot, oprot): - args = refreshToken_args() - args.read(iprot) - iprot.readMessageEnd() - result = refreshToken_result() - try: - result.success = self._handler.refreshToken(args.accountId, args.updateType, args.url) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("refreshToken", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_reportState(self, seqid, iprot, oprot): - args = reportState_args() - args.read(iprot) - iprot.readMessageEnd() - result = reportState_result() - try: - result.success = self._handler.reportState(args.url, args.status, args.details, args.jobId) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("reportState", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - -# HELPER FUNCTIONS AND STRUCTURES - - -class getOrRefreshTokenWithReport_args(object): - """ - Attributes: - - accountId - - oldUrl - - status - - details - - jobId - - updateType - - url - - """ - - - def __init__(self, accountId=None, oldUrl=None, status=None, details=None, jobId=None, updateType= 6, url=None,): - self.accountId = accountId - self.oldUrl = oldUrl - self.status = status - self.details = details - self.jobId = jobId - self.updateType = updateType - self.url = url - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.accountId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRING: - self.oldUrl = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 3: - if ftype == TType.I32: - self.status = iprot.readI32() - else: - iprot.skip(ftype) - elif fid == 4: - if ftype == TType.STRING: - self.details = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 5: - if ftype == TType.STRING: - self.jobId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 6: - if ftype == TType.I32: - self.updateType = iprot.readI32() - else: - iprot.skip(ftype) - elif fid == 7: - if ftype == TType.STRING: - self.url = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('getOrRefreshTokenWithReport_args') - if self.accountId is not None: - oprot.writeFieldBegin('accountId', TType.STRING, 1) - oprot.writeString(self.accountId.encode('utf-8') if sys.version_info[0] == 2 else self.accountId) - oprot.writeFieldEnd() - if self.oldUrl is not None: - oprot.writeFieldBegin('oldUrl', TType.STRING, 2) - oprot.writeString(self.oldUrl.encode('utf-8') if sys.version_info[0] == 2 else self.oldUrl) - oprot.writeFieldEnd() - if self.status is not None: - oprot.writeFieldBegin('status', TType.I32, 3) - oprot.writeI32(self.status) - oprot.writeFieldEnd() - if self.details is not None: - oprot.writeFieldBegin('details', TType.STRING, 4) - oprot.writeString(self.details.encode('utf-8') if sys.version_info[0] == 2 else self.details) - oprot.writeFieldEnd() - if self.jobId is not None: - oprot.writeFieldBegin('jobId', TType.STRING, 5) - oprot.writeString(self.jobId.encode('utf-8') if sys.version_info[0] == 2 else self.jobId) - oprot.writeFieldEnd() - if self.updateType is not None: - oprot.writeFieldBegin('updateType', TType.I32, 6) - oprot.writeI32(self.updateType) - oprot.writeFieldEnd() - if self.url is not None: - oprot.writeFieldBegin('url', TType.STRING, 7) - oprot.writeString(self.url.encode('utf-8') if sys.version_info[0] == 2 else self.url) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(getOrRefreshTokenWithReport_args) -getOrRefreshTokenWithReport_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'accountId', 'UTF8', None, ), # 1 - (2, TType.STRING, 'oldUrl', 'UTF8', None, ), # 2 - (3, TType.I32, 'status', None, None, ), # 3 - (4, TType.STRING, 'details', 'UTF8', None, ), # 4 - (5, TType.STRING, 'jobId', 'UTF8', None, ), # 5 - (6, TType.I32, 'updateType', None, 6, ), # 6 - (7, TType.STRING, 'url', 'UTF8', None, ), # 7 -) - - -class getOrRefreshTokenWithReport_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.STRUCT: - self.success = pangramia.yt.common.ttypes.JobTokenData() - self.success.read(iprot) - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('getOrRefreshTokenWithReport_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.STRUCT, 0) - self.success.write(oprot) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(getOrRefreshTokenWithReport_result) -getOrRefreshTokenWithReport_result.thrift_spec = ( - (0, TType.STRUCT, 'success', [pangramia.yt.common.ttypes.JobTokenData, None], None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class getOrRefreshToken_args(object): - """ - Attributes: - - accountId - - updateType - - url - - """ - - - def __init__(self, accountId=None, updateType= 6, url=None,): - self.accountId = accountId - self.updateType = updateType - self.url = url - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.accountId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.I32: - self.updateType = iprot.readI32() - else: - iprot.skip(ftype) - elif fid == 3: - if ftype == TType.STRING: - self.url = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('getOrRefreshToken_args') - if self.accountId is not None: - oprot.writeFieldBegin('accountId', TType.STRING, 1) - oprot.writeString(self.accountId.encode('utf-8') if sys.version_info[0] == 2 else self.accountId) - oprot.writeFieldEnd() - if self.updateType is not None: - oprot.writeFieldBegin('updateType', TType.I32, 2) - oprot.writeI32(self.updateType) - oprot.writeFieldEnd() - if self.url is not None: - oprot.writeFieldBegin('url', TType.STRING, 3) - oprot.writeString(self.url.encode('utf-8') if sys.version_info[0] == 2 else self.url) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(getOrRefreshToken_args) -getOrRefreshToken_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'accountId', 'UTF8', None, ), # 1 - (2, TType.I32, 'updateType', None, 6, ), # 2 - (3, TType.STRING, 'url', 'UTF8', None, ), # 3 -) - - -class getOrRefreshToken_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.STRUCT: - self.success = pangramia.yt.common.ttypes.JobTokenData() - self.success.read(iprot) - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('getOrRefreshToken_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.STRUCT, 0) - self.success.write(oprot) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(getOrRefreshToken_result) -getOrRefreshToken_result.thrift_spec = ( - (0, TType.STRUCT, 'success', [pangramia.yt.common.ttypes.JobTokenData, None], None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class getLatestToken_args(object): - """ - Attributes: - - accountId - - """ - - - def __init__(self, accountId=None,): - self.accountId = accountId - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.accountId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('getLatestToken_args') - if self.accountId is not None: - oprot.writeFieldBegin('accountId', TType.STRING, 1) - oprot.writeString(self.accountId.encode('utf-8') if sys.version_info[0] == 2 else self.accountId) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(getLatestToken_args) -getLatestToken_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'accountId', 'UTF8', None, ), # 1 -) - - -class getLatestToken_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.STRUCT: - self.success = pangramia.yt.common.ttypes.JobTokenData() - self.success.read(iprot) - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('getLatestToken_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.STRUCT, 0) - self.success.write(oprot) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(getLatestToken_result) -getLatestToken_result.thrift_spec = ( - (0, TType.STRUCT, 'success', [pangramia.yt.common.ttypes.JobTokenData, None], None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class refreshToken_args(object): - """ - Attributes: - - accountId - - updateType - - url - - """ - - - def __init__(self, accountId=None, updateType= 6, url=None,): - self.accountId = accountId - self.updateType = updateType - self.url = url - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.accountId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.I32: - self.updateType = iprot.readI32() - else: - iprot.skip(ftype) - elif fid == 3: - if ftype == TType.STRING: - self.url = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('refreshToken_args') - if self.accountId is not None: - oprot.writeFieldBegin('accountId', TType.STRING, 1) - oprot.writeString(self.accountId.encode('utf-8') if sys.version_info[0] == 2 else self.accountId) - oprot.writeFieldEnd() - if self.updateType is not None: - oprot.writeFieldBegin('updateType', TType.I32, 2) - oprot.writeI32(self.updateType) - oprot.writeFieldEnd() - if self.url is not None: - oprot.writeFieldBegin('url', TType.STRING, 3) - oprot.writeString(self.url.encode('utf-8') if sys.version_info[0] == 2 else self.url) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(refreshToken_args) -refreshToken_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'accountId', 'UTF8', None, ), # 1 - (2, TType.I32, 'updateType', None, 6, ), # 2 - (3, TType.STRING, 'url', 'UTF8', None, ), # 3 -) - - -class refreshToken_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.STRUCT: - self.success = pangramia.yt.common.ttypes.JobTokenData() - self.success.read(iprot) - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('refreshToken_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.STRUCT, 0) - self.success.write(oprot) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(refreshToken_result) -refreshToken_result.thrift_spec = ( - (0, TType.STRUCT, 'success', [pangramia.yt.common.ttypes.JobTokenData, None], None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class reportState_args(object): - """ - Attributes: - - url - - status - - details - - jobId - - """ - - - def __init__(self, url=None, status=None, details=None, jobId=None,): - self.url = url - self.status = status - self.details = details - self.jobId = jobId - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.url = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.I32: - self.status = iprot.readI32() - else: - iprot.skip(ftype) - elif fid == 3: - if ftype == TType.STRING: - self.details = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 4: - if ftype == TType.STRING: - self.jobId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('reportState_args') - if self.url is not None: - oprot.writeFieldBegin('url', TType.STRING, 1) - oprot.writeString(self.url.encode('utf-8') if sys.version_info[0] == 2 else self.url) - oprot.writeFieldEnd() - if self.status is not None: - oprot.writeFieldBegin('status', TType.I32, 2) - oprot.writeI32(self.status) - oprot.writeFieldEnd() - if self.details is not None: - oprot.writeFieldBegin('details', TType.STRING, 3) - oprot.writeString(self.details.encode('utf-8') if sys.version_info[0] == 2 else self.details) - oprot.writeFieldEnd() - if self.jobId is not None: - oprot.writeFieldBegin('jobId', TType.STRING, 4) - oprot.writeString(self.jobId.encode('utf-8') if sys.version_info[0] == 2 else self.jobId) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(reportState_args) -reportState_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'url', 'UTF8', None, ), # 1 - (2, TType.I32, 'status', None, None, ), # 2 - (3, TType.STRING, 'details', 'UTF8', None, ), # 3 - (4, TType.STRING, 'jobId', 'UTF8', None, ), # 4 -) - - -class reportState_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.BOOL: - self.success = iprot.readBool() - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('reportState_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.BOOL, 0) - oprot.writeBool(self.success) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(reportState_result) -reportState_result.thrift_spec = ( - (0, TType.BOOL, 'success', None, None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) -fix_spec(all_structs) -del all_structs diff --git a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/tokens_ops/__init__.py b/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/tokens_ops/__init__.py deleted file mode 100644 index e97f47d..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/tokens_ops/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__all__ = ['ttypes', 'constants', 'YTTokenOpService'] diff --git a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/tokens_ops/constants.py b/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/tokens_ops/constants.py deleted file mode 100644 index 09a78b3..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/tokens_ops/constants.py +++ /dev/null @@ -1,14 +0,0 @@ -# -# Autogenerated by Thrift Compiler (0.20.0) -# -# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING -# -# options string: py -# - -from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException -from thrift.protocol.TProtocol import TProtocolException -from thrift.TRecursive import fix_spec - -import sys -from .ttypes import * diff --git a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/tokens_ops/ttypes.py b/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/tokens_ops/ttypes.py deleted file mode 100644 index de828aa..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/tokens_ops/ttypes.py +++ /dev/null @@ -1,21 +0,0 @@ -# -# Autogenerated by Thrift Compiler (0.20.0) -# -# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING -# -# options string: py -# - -from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException -from thrift.protocol.TProtocol import TProtocolException -from thrift.TRecursive import fix_spec - -import sys -import pangramia.yt.common.ttypes -import pangramia.yt.exceptions.ttypes -import pangramia.base_service.ttypes - -from thrift.transport import TTransport -all_structs = [] -fix_spec(all_structs) -del all_structs diff --git a/airflow/ytdlp-ops-auth/dist/yt_ops_services-1.6.2.dev0-py3.11.egg b/airflow/ytdlp-ops-auth/dist/yt_ops_services-1.6.2.dev0-py3.11.egg deleted file mode 100644 index c3958abe4d01a3678d9553c047156c316c47563a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 187877 zcmb4q19UE1(`Ibjwr%Icwr$%^PI7W$+qP}nw(S!;ll#xC`R;dTeKY@buiD-HuC86X zYVYn<)$jAjO96wR0000$08BauXv%k7tH*s`oqP-8w}^;|(MrgQ%F)Y9{m&Ddh;gVs z1_Y6(moRUC+&Ku)pvy8X5$zV*T+CB{pOjRz*C$mG(9F*Zj0moMpMubg3{#7&s*&cJx7|4Pj3lx-mB&^gSJAJo$ffpowu}RTK~aKn?-` z;4ioSw*yLYDvH7)N_5Wd&Y20bvOm9l*#1Zkwv*2vN@+3G!1uPaOdT}Mt#n?o{?ZM_ zfWByO-lpJnySgM{Myp{?bzs-9Z7I{-63=2LOZ+i(^u^x_YMRR{0r;ZhN8#HYv{7a# zL>;c33#%A9)dS=p?s>1)VO?Ojw(@S|NB#54Grp2AQ0`epWON5P42I5m-W;_K>vzm_ zXbhP>n!fxas*oriD$^aq9Kr))0&aR2K3r~;moq_@Sn!@l3O;8j`wG5&U8T)n*y$r_sc_zyZ6_Y!rz8(UG07p zOgzM}iTVPGIX{gWmBp}&w4Sewp%I}P4OlaB@M`e?J?`;f%S^!E@%|1y0Pf%8Zfs(2 zVry(-YviG4ZDDKW^w$LJ0|ETIHUz3CEz`Fa%eSEZUE9&b!NtPSQyL0q! zLHoNAXFGd6YZF%!>%YcEEj}SdFF7$OeL(eJ_I^)K%1KP_{&U2C_xIl(6i*9#S|?9SM$C7<)giUt(l{NjfDZdo}PuRg|nU>oxKO}KgLYjW7`(=eRI*b z(EUTn-owbi$lOFv?_V7l*?Tyf+u71GGcta6W~4PWK0XKl;)t$W9CR`aBkqqV>1PUz zG>@r>5s1lx`Ju{$!WaM|$ka^}FUjnrcptCG6fSk|h#Y?xPVEc}dk+)oLQONVHz|Fu zG(Z0UjdV8`j^xCQ{II{ze8(iUxBmb|<>#<+a8#UdRIr9xD|AV91dfECf{BUA&(IS} zWSWEVH+TMufkSF&&LJcKz?C!r0O`NEV`$)HqUU7d=xSkPLND}vq4Yn`zumahzLeYN zK>o_=?gPqYip} z^x*=HKLv`AokrK^#Ed?2x^ZX&>0gDINIsd~`>+?3^i7xM_lJLnUe1=CChtZfBajGZ z`0W&fu7R{E8pE)k(O%L0iK7HTT`aY2AQG<|Icg<~w+vTtkN@NZ{e3R03_?6_yv9K%W(*RB6t(@dILQqHmIZo{Z$yQ8CK&Q?1QUW0z6`_`9f9b@ zvM83C$^HC&sAcC`k#|B^#|*ly1xskCm^4co(&x-4{I{=)R)He}abri`V|_qO)1Kh5 zy{_CT1^KV(PE0t_UUfT#IJ&Okm2gU}&zAGu zy4jRnm=+~2z!;fSj8{Pg6C$F*r&;Gpn-x)v#^>cid-QY4EbhDGXAk$hxfCLIAsJcs z;s)Xyh%;Scq>!LV4W{;CkbY@n5*8o+oDL2fghBYWra`g8|8>mia`>Jn8k> z%GlT$16Pes`e&8adcq^Bh4gotL>t%Z4J&CI=Xi966p#13`NYc8xv?jc_eJ0@3?hR7 zagIv7vi7yN{FITtWMv`WTq!eRw@f?hqaNS+8Ho$(wxnIIbBh?~ir>K_{R`DxN z{uEi%PL_N?7iQPVrX5j;HzpvTb_{oMsn7*HCmFN@^l*vQh7`#QhNFlqcv$Hjb>2yl z6~zz1PtOgS_x;ln65K$9oTFePHwJq~yunScn>$gNN2nSVGPu?7no!quqquNT-EC!6 zJ#yRR7O4qofuky8SOX?Gs&0uPPK)(?^SjibuiQmtm@dfu2@D%NZ}3xbEp^M^f<=AU zcX6j|!k(KlrMM5{7VfuNavn42ac0dbO)w+mteDEBl4*n$WYSLukvl3cDpOzlBAvK= z?z=&2kGd7ig>yAHW-wZEcRzzY-3YYn;$q$z4W^D>WPdIHpblWT5=WcHYOahNF2-sn zR9eJbOaPKa7wh=_WEjgY-H(C${UEP)J8-P*8KVY7Lle9R4dwe3g&nt zTq3LaE_9kEY^Ee4uO&5Y6BE1=fn)O*CSSZ;t>ht=Gv#T~)R24Hd zV`ngm^j#tWo+q5s!N8mbQ;&fe<8Dq}b@fy`hdJ7p(Ru`J6-i7}7y!aBwUseaNX+6$ zbp(6NgZlyF04IWwYTctx4qrDCmuWT)iG6O|Otc7x7_%u3&Q0~ufPf*0IZZbb=xswh zFhq1l&j~{KcI{nR{_QzuFUE{%G9gB8Ea@_45GvC~2?j&DAOsQ}k^yH731DEngw~+! zs|(lxPRQnLrs5$d2IvUu2}HhdNvvdK?Luihe4HMal&zNG9<)b|DNn%+aBX4sn=&%=iPM92 z4^U<%OgXV9MI3cSyY{?gw(5k(;+gn8n(6c{oq_4;H&E~6Q4^l*0OG)WemUs6*TSFT z%$#12rg5d_%66nfgb}63AQjFl2|IFVbh9fb3Qww5pS-sk_f)Ny*L2xRYf4(B#`jC` zhJQGti^d!fmZ^JBGv#EVoJjo9fXGjriYR46&`P2q&eScN_~R7RvO!(A_+EwsAYl)= z-8PW~QDB2(X;j?U$2T^aU*>e%FT%#iz=k#QSWnac>nA^y7ugsZ@)pNFQrxK07Gjs7 zsB&JnM{EJ=OuV`UmiK+0{UIZ%)s~sh)tii3bkiHSk%M1;8(^razTEr&A3oM z|3HYY;s)iqM@08egaXj*v4c)={T`D+U%|C7q}Fi^+cTtfbPGa^)!R+rVJW3hQ;CTt zo8M}K%HTOxt5h*{GwQ8nJ=0|&BC)~j=RmjnRl+&e)%dDHox+IvkfOKZi64qXVcK9P z5*g}%MnAf#xR*>asxF`EVK0yD6IjZ?f-k9VGpmSFkhV`T{JRVoOKW`CGo#8cVTM$o zKIkNfWVGK?lB*THT%<4}BxAR&iJ2mU(tak%_#PYAJeuERV;#)$GmXrdxQ&zOS-9Jr z+^*$9>tq(}1mpKu^Pn!}iZoZjRX_y^*hEN2lTbSHaN*jS{^;fOelDLMA9qjbpBZcJJ|LS=(2J4;(KW%yrHHQQvl*LcTFkdJ}2Vv z4)+eTCOiiA_F}yCUa1|;d(7aDW*Cn}CVYd9EB|EuEhT|rw)6Usw-cT#I4Fc%7%#3C zd{BJ8W#kL7yCL=-I5Rxi&u^gDzOz?YM0pIaTfcEb-l0xt*AlEV)K7!ytKBg0@y<#v z4iT8Ns@DZ7c<|Cjbk^Hk6zUG^IzU$J=>*DeQ*|qIMD1b~DirGY}r3YEOBU%Y^wWsA6Z8=;}uSlZ{Sr$>oBvwsl7vpvQbteXg9< zBwa)4L322L+4@jwN&6)Yx~VPKG-R^94NHXozCl9xGKEbCSN)O?KUTW z%5s10(O-Vd;_DYGm~J#B$o2CoerDUdkg7H{1|r5lReS;es~-3(Pt?_&zM=ol4;jCO z=s)L)e@mRhqva+>e@Cm;sD!H0Qd|7UYXcD7E=2DZ*l-$|)Wl%Ujr08H@p9W_vA$%s6UCw0?V zo~tpp$)AP=#!6rt<_Tky^J=%ICg{NAm?zR}(%<3)+zc)pd3G55J3c@kr#9pEVnOx1 zo-N{sGF0+Udf~71dFQURaeCR5mc4Z;fS^_G=dHBq{@x_Xl`psqYJZkB;25^9m z))p`bE9P- zb}~+iX7A*i9)?T{QXg&3$5PbK+a=BBaC|HD+OU>3~ElakDAE8`B^(;v@x>;}jqPsZ3RC#1V*U0w+My4MhR*2m7_ctx1q zlkRLr@hdtD@{2%YhIINWok`=Q-V4+t=%h4ijG@QcFb9QY%QdoR>hxzG4=gl*=i*w( z1To7+t-#A-Q?6UC0zusP(KDI#SpuP3yRf0`FoJ9xWny+;Gbox0D2IZ(HLORiPL-;+ zC9=zlgVI|imOTbjMr`Vn4iLcHu{gzrOafeshFDv(WU_2!2h+>|N7#B ze1G1_{=LSD{Qv;?{&QIM^!`=u{k=T;N5yyP<7bbomUV0;LEBi-MEV63R77>3rIBH_s(OfXanm*Y>x@vM9r{!vO z#1_xs(pvtJueHu;)fQE5efh!MsG?>V-=3atX?NwLSFW|GI8Fg$Ox`%Mx4wCO{M7l` z_4MJ(f6eMO< zzcx^?1cmXSJ+Xo_w}p3^s%~+qg|0PKLPE;KGoXiSwN-SrS$|UxJD_T#s(r#8z38N) zX$c!{ax5vyThbnDsE0`NXrz}fWv9)OXkeR6NGL=J*RZwf1>IkNW=8LM8 zOz)=E&{}hg{j$JmQ-rQOJQGtZmCJ`+!!6%rwY0DyMpNu#5{4YV))G?kg*>j%*3q>( zx3^M5X;{)~Xxs{1S`!~nYiv^uIcK%iYM4<8VR;xTwg;B2zB!)UuSUFER_@8Jrts^s zTUft2Y+MT0TGyUgTgt1nVU0aj^-0Z$zdI=AJcL{alNTGO->nTwIu+Qt1tG5wAlBx= z!`dB*r?!v5&)zcxC+df6rJ3=vNJ$s}F=XUuqklc{gM|qvkes$9D6Skx?B&nE^NqVQ z{#Zu1Hq;P#i@_It04R=7Qu|WVo=fG6t+h z-@?;O46AkT0$P)yWIWhz5)KzS)=@}l2T=}rcMY-06t)06#TTr!u~tTSR=6fKFUPQ6 z1dJ2*- z?0iCP+T@+*XIa{b^;2A>tzS>Jz$c-;T@zb>|EE3OEX~fXClCine^_gxVtk!Jy|n6XO7A%|K80l_P{9Z zP%B5ejURJwmfWH2N^TJT7+0rg;|XFhbnD3!EsLndQjq+`1nX#}aQK*`V5_5UN=ehI zO4Q0m)XGxySc9u1wh};zY|7vg*f(I=1fzr=3$EI99WC0T-e-q^TfVmaCaxlJhLre2 zkQQs(%8uN^V=%ViM7S?zDUX*Bftw#55La~-FG6|;FtU1o)hgpnmSiIql~nV)xG7MG zxG4`CAo{IuKUP_@Q3zHMlBJgq6TAc$aGFv(*>==trz_sh6KJL4X)-?%uxd6xo+SJX z^GS`u5Wco!BpEad#Prq?^wn7&FPRb5=GRtHoh7jV?ZWiogmZM642%xfv=v%Ugl?E3|Ij5VPAyU4 zLHciV$V^*O%KCJ>sk#wYVYm_*mjKLzcvji^Cz1LwTU>X%H1zP^_oq8q45yT8@;W=cEJ_ejw& zfu9cU3kL38eS81h$`26!@Wlfwu?{kJ45nj*0YJwjmJFyP=HrApO4Uwkbo8W`8Htj3 z2Vh4PgM)>^x#h>@S>k3`L2ub5$Pd2%ZPtO)-T`w0ijh?`fs80;9U?21pX%aEi0%;=EQQ^J2 z&m2>!X75;KFp^_Nt!}MqJMt7UEgYyoNlCOQ@{ZL=G*sPfBPmkQ$C|GiTipySr`74) zo7@CikH{unrs{1Z(@`N;c>!4}E-WeYs+ZF5bffalZoG=o%tnNZ4n;jK47&qz(#34{ zB#M0Yk~eWKK0PAb@hNmEE?M$;ZSP?Bcu)9SkeG#ABNKuQIAo)ieB zx%nGNM>spN65cvmG@_jd;N01B(4G~!M+2Pbo&U||mw1S(28Pt2=|6BPeq8e5Ju z!p}--!hK(ORHKW96qgpz2Xfnx$!pheWH-8O=|D{PukiG!psu%jsjd1s{o!m9F8iLO zn))6uj!VQVpx<-X?td}#qiPNwjg+_>+_2Z{p!aa^37FGgBG>R-xXmDzzt+Quj{QR;aN$eqD z#}TCXAu!NVbRb`VJ#x5eZ>|7yoQHs5L5|8xjdHr=TCTx00Wpn9{yEc#xH|OZBtawt z%VwM)R%uBAu`<}HIk#~26hY(y2SKT58PzFdoMQeZ$|k^xYSK#Nq16uyAO_p0)xgnO z7E`Gh`ZNMP*FbO18ga%trJ#^sv~7W`1?OCfF?76BsEhqO z5ee16hK+h@8nAhwqx8hkdhCs@m?CInmP)BAlUbc@u|R6FA@tj-AhC51PhUeY(+3td z|5PS>bB|oUGsXVd(r6dad~3MfAYdxLuGpfe+$!hUUu|UF+yH~mk-Je#)qA*BoCjqk zyH;f1N2V=p8g#W~VymzVy%@E7To}_Xzuor**F09xH0#USLai;o@SuhAf*!drGK4Y| zMo4$!>fK33cbbk{Dxj4(rUu0c6K$+&6aRz1w^nKT0t*&PmH=chcm5?*`nSjK!gnc;_Rq-s7_v2(kd=Vp%NDQO3~4OMkA){ zI$qUg5#blP#hxcNTCM{fG~S--m-2)DQVR%Ta3Z~(7SGJCLWNX}fiFY=$T0m6GI#S< zYkUYC>#&E%cJyDdvssiQh$AVHlWTK5Rct# zL9(LKi;*cprNmaic-XW67}cDs_{pMW~CrFDI%U)1D>$EP{=d;~wxvo@{Rn^}%s4GiC zPN?Zk*^T^^jreG^{!)R$FR5)?{7wvW>qA$O`NSV(QTnP&Z~aZNu2;YRz-6zR<&=r{%%JBq5=)@2Djt`grpwsw$O3|-Qk64w zq0`^%AzKE0A14J`3$WrZ+Mf*e17<)IeSu_&Y;a-6Dz0f;PteT&PoH zEb#Y6+PX^ZjJzbAq08*2*W{}Yl&g zWQWx(e+eEsHQY`19HN_5aUi~ttNb@GNSyENlO@m`NzB>X_-BQ-B`6Xu5x0|y8qO`a zXY|X{BdAw2?m2&vIW{t`Q@->pBVvIX3kb*oykP2tx;F_##}NbKYM)M6|BqZ7=@G1~ zVun;r)f|V=%C|QTx-RjBD1*oek44VqB&{E`J=84Ed`6{?a7w2kM^=NU4R{<3b8SG+6rj#cA`tbS|l zu17foY!E}&)i=X2e&`35Vg%0qxJ z_8ypEoYW}b?!CU5t-s%V+Xt-CQ!f4SYSmU-1+Brwr8|2T;+p~WyQj+Oz^gq)!ZdOV z6}uc_in2MEt~Miln_BCe$6p8-mo^}fCp*}Z>C02OT}UmsccoeCyF_KM(H1AtVITZW*{X}> znbAwhYw6Id?1F9Oc0X9^OTlQ%Qq@um*%l{APc*;06KZ#N#H>Ag?y1p)jvcEfF42i`~XKt z3%Jyh1EdL2`&fi2^aci$OEZUz%W~yQPYKe$06f0f%GVvOv`H=4VLRAQny-|FpC$*u zN27t7<-%cy3qp*dA2x!{TW`;kZ5P8Pl#uQcdUzh>@3o({Hc!mjxxL&p`E}Jgw=F*g z*1jWZQMt8k^ifcZv%T%wgtKDa*@<}Gnz@vHox{L-!fE3&C+zd#8Fj*B^Qm5w7zaMY zz9eDNio&T=d`gm;9oT<%^R8YhRShT+eD@%s*v0i0i_6&QneM|g)QQkmk#>b^V!Wxr zo*UCT5DudRhWWUrJR^wC>aw3R;bQa&L*`k*h#&i5HR1$I39Q=;;n2OCRXa1KW3j~S z^#f3arRK*A9c$UHt6EeE@xDatW$X%`;M1hK0}mdc!^sQ3UTrhjM@5knzRkyqsKME_ zeOi@m1}MF@^`(Xq!m4p!fK#T)tzihKvj3^JvhQ+{Dkrk6TG3~>B8O+?s zREHROq?V>QimHFWuXY;24nE+}%G?2dWi-gr4Z}6+X)@;&+NjxEQfKoDTZ@^Tla~hx z+`KnQEyj0F34fQ8Z-a zpG?C+cdZ$?J(N;?L5-xr*r-g{Jm_KMfd;njJQJfPvF;>jaz73GGx9EeH|sWjVW30( zniA!lv`W_bedm3k9f)HZli9mt&+=2uTi&&wuPp$CpEHBN>nb7A1)Nx$rQ)KO*uC0@ zV8$;2zMr;yu)nGOR@?R6J!na=tuyqZnAp7PL7OClAG-H|O(oaH<}xtu6CCbskNL-p zRs?Ej5ixutN$0W;{_c^RtQqI4uZ^jdX3~-S6Pu^MI!= z7ixk_E~1L3aO>`YM`&w)Kxybj0=${2Bj@q6!K5o?spgeB9m`d^IvM65r_MXAxvRA1 z6Dt!+x9}ZX2`@|jr-jd)i^#eco4jI;TNdan(@Ndg2gwRG{mxWJZGsD;{<2oS!G&!v2`ZeQZx7In)%PdAa$n-ckbo;kUU$T|Y&-hq#f)cCUju`=st!8Y zNa2iJBJql$%Mykw%7~&C#DuI9h`F-IetDD%!nMVn0 z6(N6=z)hY`vc4@$RiZ|ov0N)~h1LdluK(&`TkBv{=CD7Rrqz_K>}?sJ<)Q1oSFplC z!V35Mg7P;nX-%;s(rSt!*0CN+stZ!Rto~A}wFM=_EdBcjSg{&iNM5WxMX!v?Ay!$) zrf{1QKPvMhDO+ksu5yUV><4=qhTflmzDVLW)QSwiCvfEzq^7iVhZn6bd(CyN1D?cg zf1Q&6dQFIS;*?e93kzsvE*l9&4(uknub$ha2bXbc&WyS?vCA<$zBpPPY?E`)t2*iZ ztJDLSH_7Y7habd$25z2L2ChmUwZ*`CgZ6@YtFlg>?J2OfHxs3vzWk%f#g7f=b33@j zd=)hK)p&bsU1(&7 zfEj-pub;S+6lFo=yuwh=M2r>cf-Kbd39K4X9e67=ojJ!}uiYEYA1Tk9)>@P=G@zy% z9d5c2o=FifFC0I}EW1iQq=VjedhATYeLT-p&SXFO%(x+cL0)wN)i>J{Eo=nyC0x@X zPz!po$*S+$_NNuwhAFc&m$6sAKgqi$?brzjH1VJO8SvPD1X{&|aoq0Z-J5-a_^A&s z5Q|q6rZ&+`yU?*o(Z()$6&c8F&wSIH8r`y9yTG;r)i#g>2(Q>=OiCFnyu=~y7ZJgX zScANOyBdIiwc>#6I>+jA*J+2->nW+95J~%q4Qk5phjc_6Ql0f{Sbz`U)~sX(W-BkQ z8!!ubL?3Kjqm40$rEW}?JwPynbcM<_==?GmRCE(#-MC~*s=!XiYT1m2`USB8C(4BRSN1uC^4C)py#3`H2)`+boTgZ`(C0V<%rSmVM1|Ozz z0sAx5-ceh5-PJJqtBd)+z7FY6>O9vz)VAB>yY(`O4YHBi7)U*O$mx2j>Aofww!)}$ zvA7XFx^c(_Yh(%iB6-9-8ceISqKHO^-JBY-lW}*c#cE?LI~+CG&c~|jln`}vkN(X(yYPc`Cbyl*uGX$;E7kX%8|+`l|f^eUaACAv<U))#?+V{A@`*I3(jH5`IIlR>_+fr9Ajhwf9)k=3Hiu^xU_-xnoQFBL>aj7S`dEFdQ&KsJ8b?MqGzgdpwhIgt7R$bySh z<$1{B_0zWfp#fnXF$PRD{Jt%TRgfMeriUacO=6JM|{$^Ic3N2;KS?HuBf_YVjIloe3s#zu??!{^@O4ty`BPnsAq)G+`ZD3(#&=Gv2{ zRTR80a`%z2_xXJ+3Ls!2-Jts%aS8Jg-gP6E0uYu1W?Zb>F`t>3+#i{AY-Yo!a1G|> z0%DBQYa`{YGGN`G&sqH|U$tj*k^@8~t9C6u`iZJ(hgA1B4riB~JVsG_YKITMT(fQV zzsQP#Sr>qo3%FC1BlvG!Pb}UVgu9jKPZ=DY05V_xu-wUtWO{-nX7hKYg3#v<&F2>3qhy`QJ&!SWOAr- zYQN}FvSDGFaFMUe#NiKht8ka^fRs(gz2&FX+E3nR@`~NxkNty@)iND9ZnF9TZm-)@ zWgRBJ0I*uF&s+CIp#U*f6-=B9X`+#GBwULqI~oDpJ3MPoQ85(Zh4JhMdBFPPA_Tf7 z(*U`v6@oo|*X)o;X+v}sE5bVQbO-e6md(4D%*hSssByzqiZ@3^D<{N17NaERTC0mq z*=iLyUy~}%`AloB6k5nxnhO>A+5Jpen*&vErRiin?$!p;XPpGU>ssQ)(>C+@zF_AZ z(TAL58xqiuzWd(Ko?ZSItzWhFRbS!sZJ{F|!kG&)g5&~0<#K(@Qm(Q~y8LF+W20c! zebLpR%$rjBV|cn}p-iWt%<{(Xm)PKNw=_-ksISlaMhk24MO#wiEzjc@rMbJ~;E|I1 zcTfXwqKEgNH}3;LohO?qx3(yq>7>_3BG*v~Zo`#}*{%>74Ky55M)UqBBm z4DiYA_M9LS{U%wy&ZNcdO%v-?wa`I^n_RiJI&G$dhB8)VMJg2*$(C!Nw?6h4Rmjjk zip~LR+DnD7PtqedJKs-gxbK+Z4bo-9XV=PNqR8(e4Gr3BaT(uyYI>2JF;1ann5Ps5 zz*w(gf90|_cp*F2gcdPfPZH(Yw951T8d6Qu_t6{+wluyS{Y5zS4krjia}%!F{fE1P z*7cx$XHtQzJriH_V=3;Oz6Q(GubD;bj#?beq-)MJu8PDecyZJ4X1~;p;JI74hPNH`vU;Ju+Wxp4ki zgpH^cF(48}P;T&@OKvOghimb`70ODcW)^?mw(0iqR`{p$msrz!#fsC9Gftv9-`@|= z75tlXpO3GDNtLBZm4;Hy3v-!*Nw@V0LVDFH_0#L~S!*J4PGj@$^B;w0<9G;rkv(@JV$?1fH(Njbj5nGQ@D+NugNJwQ z7;l>AL6oVY0J=YNAZjpO3zhGFbw@JXI3l|OLU(Z0B0Ew#+^G0JVFD7JKW5eKV$g<4zib0&k)kpC69vIkJ z?|%`;bpJT=T0VpJ>yQ_=`a;?|6)O_Dch6qi_puz?{BBOto8=LJ1 z0RS%9CZN6`QzxOKpD=m1%#@+3zYd--5ly&$j&)Sl`V% z7HEm;RScV;CYFY|{qfSs%}squyLP@jFQC!xK-p8UDhE4zzut1Ta#~#~l<&*8Mg8o9 zDf@R2Kl}4~9>qk=h$Dr3IJB5SCi2h@K8X=f{@yB?C>6K@2ZgTR9xSC~fAcbK0Ok{+v#!R@0+r zZcps@Hu`-7YXlyCU*aWp7IoDbdG+hQQR?kl z&qH3?u~2i0(bhdKT8z$-$9-muir{6E)Ug5<<@d#CkxU7HQ{FpNI5kQn*#1E=*2C{bU_43uEtK-`E<-Y9sV9qxAG#tdaA<4US-dL1G*YA63$r zzJcGSk!Qk}jWfHR1SPZ7JMacICbft{D%B64aHN@3#V(5m9R}a|lJACJ%tUQO4@*Ux z(9c4dx@4Qh5;bC*MUxhz`xI&#Rc%ODFwsn&FqP9ColfsaNDGXjj5KKCbYe%x@k0?G zxzRg4&($P`Gc*6V=;}MGdT{tvX_G)71T~uRL8sj{5&WnSYKX3&0;Mu}iJ%bl6ANS^ zrdFPsMlA!WY?8qTrGT~A>C$cVLCR+$hE(k!p2#vzKHfi#ulSPr-g%?t5`(_%G2IsFGW`SN^zjNxnGca=;)vAe#?S$ku4|`gjg4|v zrW5xOc{OP@emCcH?`d=!|0Wp=A3010g0*l8Y_*609%SX1GR>mS-FVU!U2dDqkIOis zjOe0nO@@3GB+Ch96y(?-sox`Xh^$g28UmB*Kd@-j#z-6XoCdTFOwUA=sVRYnkGgIz z63s?SKEM7QlG1Lz#Wws6Mfr;a#Qoocr2KCr)<5tle`6qE1D*awL4cI>JNk=)F!xPD zKw}hTEMNd*HfOS6+BT&AFW6SsU}{FTns!`@nxc33I%O>fD@S7q4KwGWT>Kw2FK=%{ zSy}IQ$&hJ+c;8miMAM^!6Y@f1wB(aRGO}ZIwG%9?6HKggRrQp?^wg8$qT+LOlA}U& z(j!x{Gjc$gaQYC8VtoAU)MO1L{AxA%m+*cvcK$*Z^2Mr9WOEu~1oUu#-|!O=5fSVy zFm(P3z}ygMwZNo*gNywI*~tbhVe|P$Lh*bfp?Lm#@%a~Y>>mM&l#`JHW<-!_+w`q? z)SnGPj}`)TLlxl(C3pr5BubkPOtN9C3NUt)QMBNFp?`s~b)mAE0%h3fQR(kNR= zj~`A?W#5Ipo3r%j8Ib&wT7cm1c0*1z&%?=j4`YZR48{RNSp5_A>L*-vh{{z6T|+%o z`5#YpdaGZo?#$LnE{Ptw=Xn@c@&=$ z4oiHTnFWZ~o~2ooIw6?G9E`p6J%n4ZaVBI++>Q89!ESb*N#yK3#-FJQ!m4sPa*^7E z^ep#QCfiMkEzxwOU8GrrJv}___O`tIqC+zGBQ+<2mhP`}qggyuC9&C00^Dli6*Km!99!th>_8s0tJCLL> zjnJmmSJ;1F_TVj}P#53R3i8ck;{5MT>;D3~{c}zgWULkhFnrD8FHGw zGCbU~Jor=0__|!%;S!9 zrdI@OEw1ITS?#+vvXus9N^-mB&)F8DZ5s1#2qOSrvHLejd_e;}M_GD-T))P?poL6s zbKz)?-F|q8E`)r=K_CtkgmHaBXJy5XDj-Q7K2MtwjvB|wF7)^%nXo_t<5tcI7c_k= z^CViN%%y9*k)%fc5baYl*?H!JCnJ(-#j!1|dLM~LK7CEJ|DfAgFt)I?v`j3prGKkC ze7qV9U2p4BxxNeesYDm2)VI!M7uqWCR;Q12=T92Ioot)Jlq9Q7IZh3=sJpl@rHNfi zPjI&MIVZeyeEb@uLZ0JHLXsV#=p97;k+UqeZy`i%heImVMgyV5xW4IiaK*coiF**(Vnt4wR zL!c)H2!HFM?O$JF3ix!31-db9rQOr-jXP-SH%r;0yK?O2*Rw+oYpiM#un8hOGP0l8=QJp$H zKgw7n4+_aVTGbY3^vO$uha+4cj%;;R^iy)bgwLbLt~|K-g;}_wr#Q_e7<>UWCdTp zKQZe3hAEH7+qu_}D=K^uX24z@Aiu`WF*G+JFB1yEkwCLLZ}LGTz=zZ{AHDgCKDyFC zx?HR`H<~mnM>igQ&aF-5#EV;|J}3;HjK=f)l$h!-PVbGGG^)^gdt5uBX ztSGnDNJkEy)HRXOU6!cM-Eb2%b5Is(Q~$Vxk#vUTZn=tLDLJ#8%^91*<&PPvnJ(vb zI%;@#d@%BmhC;m~+muSrL?In+?9K~q8Y$CWe8oVyK=gYMa8hl ziI6u^cT5v$SYNB^u&mcxlF={inK|h5EW4`6J`Pf)obFP z#uWmu>}{J=e57&tz>CfdMvfTn@Q@ve@=VB|{le9wK?g~&R+r=-Hdg}=@RGyrYkKhE z(ixJ>FK1=1J&)Btq)qXNmi8?`JAJmc$p6YyN6J2KmYSAop`_O%^+8HAK4W z>*e6RV_4?-fZmJ>%goD-P%8Lkcy|#_dO=PTflQ8flLFEXQznP+^u%>eH;v$9BRII` zv=){D>cI2n$88C~XrAnu_~sJr#rXVSL%tm9?8m2d*X{M;a3wY@RNRmGxTScFa9`3V zu6oc+E@vE&S=Vu+=n%4~xY=4$xn3nS7bjDico%P&?E6zc{%kl=vt;x+;{w0vg%a0! zvek$=kziMx6+H>=1FGJsu%q9X&jX$bN0HrW;HSNVRc>vPLN>FYgIx6-_1$Sd*ICB@ zRDxdKAgCCi-)O+ZpTjUF@cKAAS&Puhn9Qha&9)yExu2nRl=|7Pm4Dco_3y3@R_~Y3 z?>@Ygm~{sF*iBlS$rj$)_zA0mn4CX2M{-{=lN0RO%Fg) zC@Yp?oS)cFf)50dpiyKAAh~o5Cf`cB9Pi(u9MDl>ogj56|7WTJ78Id4jS}9q43zew z)^c{Cqq}FavNy458lhzc;GiebGXuilTiu_b@d>u)0<*QjXW#Wdc~oKFbFPzatF<1+{1K%Mj=fML#gB!jiq^~)FrS+YflfnyX)NNQs)LO%(! z3WN06766RW8w_IvY$r(n!SHX4B$F~UnE1A>6}_0JNq?$`poUZ%o!=YkU@XQpM|A|UZjYJ#;k<$oZQUNm`x7ON86m0AqbmYZfI=_)ZQ4i-EweP~PK zq?J`Rv-#yqKYnryhG5CBBNn+9G^Xb(e<&a;M{eR2{;=$9>mrjRB`Z5SlM$d?kqcF{ zE&=4lRj?GZY@?J!71f-TICW4YCe^#b( zQXVqZ&JS+n?p`E_lQT;qq!gC|ZgD07h*uBHwz(dDisNmjR7An5sAMgZ+CwMxHR)=X zP=Z@+d)k9?B~;uou0WmIX&7nJ2oTrFm5gU*4 z=$%DKDPmB4ye^@L@xTGz-3NDks!Uab!C!M|_RgyvQ5uUd3W_U_JfMeMg%-~uD2WX{ zFG0wqUg>Ob^0N}o2tI-*VGr(AvqwcDTU4pYnxg}|(s7FDfhNa8Rpv`>`0|HC+*|+b z;={3m#a!&9^?mvNg9yg{i{bmF^Zoi^a@Jzkk(K^4Kp)BycDGHZLFRg}iOC<$C2~)^ zq_e`3xr`jD1!Xqii7U>2Z*DFno$_q=|58=d|=O&VIiO%QSK!&W{+z---6`&Q%-NSk}kCZ zKYT0XU0T~gulbr-GS(X6mJxp?S&m3Nq%DU3ey=TzyKi}O2}Mql!!wxWv2QE!E~Y(h zj5{Zcx)oez!$ya^G1nBBj}vR0!|WGU8axN}a2_RrY0=XI9oK(Z9Xz=|+dJ^~_H=Ra z5UG8)MntePn5NzHkZKju7^$>{@|0*)YMBHNKx+vX;8qipe}pm7XQo%(;&M;L?5U)Y zAF>I=2!;6dy*HEo3=a;|5lYuWT4plhuLQPyH5W2LoTQa{M!5KEZvt_>s)~ghm|^Z~ z%Mf=v&(Tr6Z>V!e|AqBqKzIPzFVuY+^iK=Bd2&iJqS!RVM}*LJWl+#y@5&gFVOi_{ zAnzTREDNGF(XOg4blJ9TcGEz%eJj9+qP}n_N%@(-psx8-o*TXi50O=M(j8% z*U4PDGtc*Bep|tnC-9$j((vnyCU!`|OARbgojjF%HwFf%C&teqO>mG1fri8ZeijLWZUG^lZ|kHX$i8jerk#On7gYm$=NtUqa$4WXdf%f$ zcuVkY7e8ON%O~1EI~}EX1-Gdn`IYyqa#?n{swz30Y%}$ zsZdWt(~7fUg@5{IjYoT-#`_M}wh1Eby?g;!*VdL0FMLQw_k#8RY^_Si;3<^Mt59dN zSRQV3q>*iZim$#7{sR8O`E1M;R=0RimU?HX1nS&g|3>*v4}V!5!WqK3?Lp@p_xzp! zN|KWfjAI9lZLcj@n&+z(%BQwwS7sKROm#)|5?Xd24~7OAnhrIp z_4(_mW6IBtdC*)9?=L(rOlF8Mv5!;KLW}9Z--rRv-@qiMxUCL5de9j`2)NF3=%sg19#D;c*^OY5_^6@ z<8?Xz78a$n`g*P>8bkvl@dj6u&t*(mE8g^3aU=E;`|K5{nacM~b=M#s3ngB+$0q{2Z zb~s?0=7=Q>U!R>Whs5ElVC5*H_*peA?23XA-cG}wWte*aPJ>~=9pc`Mon!kSks&@? zrHQYO z4RYMw_^mH~u54i3OyjzPi5@GpfP8YttU>cK))RP->k%P4*!-lPc0=0MJyW6$f+C}Q z&bJu{zgZ_B3pF#=aEGOQw8${5uL(CW3u& zzhcz&-&qG(htY|m>$SSgzHoA?Ao1zF49GEfZ95)wst;2dSj)6p)O1h62ZdJuE?r4GzGe^{UPL9-B7u)T`|6JXB;=f^c6N4Y%Yva%|g> zqKN=ohL=VUc-LHqvbnDOS|*z}8zZcI+9%bzN}#PYXWPctRveH2owaHe^9$9=7;S)u zE$q)Gx{I+qM9F%-n{w|l$K#o3Q9(71^MuPxhA8jBLiM{KJP7O!6Q%6 zI@f8(lL%cFuEyW;c9!7Q9gVPg*_nbUzq?mwI&r;VN-4$`A{lXO|A?0U0rqiPiBfBH zDYaw_KVj@Nz}PCk1KaM7{FQ6yY?J%)H2xlv;Kpup2N}bj-WUp3X`M9MH(;cg3670< z$4ZjZDQ!5W|0^VYH}22hemYx0cP|lz)?u1lNQwt!q)ecWaE4<9CJT9tEW| zvJsjv>*o8`Ea;F0tXS)2Woy=su-iYA54rbJ8D$qO#nDl~9dUL;(%>{pHZIw%YCgid#IeQ3RxkPI)j$sM1)_v z_@$8-^cWUlxgG=2O^-Ug(ek&lcxx!McaT4<3^(#X_TY%@xu8E&fP0U|E?|6rUh1E2 z3^YGOcCA{X>FjLnsXp7uHdcEusy_3qcX&iW!Ycs_UvnJK#=G5D9&|1 zktZmVV#--wLslpPMTOzmn`)tU(p^D3nWTE&XOE@dW$MMVsWat?IXTpc@5>M&&v zCrC3!32~#MKea5W<4^EC#2N(sp2Qx_!;EGx7vQSGL;UrAv({mQCze^@DjZ@i_?TXej~%NFAZ{tVn5#3~sm% zMz%6A_PD3!fc4062(Q*v*NZZ(x6w=ttQHJQUHWPem9M`e~4u*JJW)Nj^6*&uK&DagcrPznlNBvkv$NfqAHZ&a^|_|I=BJc1mZs zd+rK?uwVRrhPxQT?N-sPP61(qLhPT^xj_`uFV&nsCU~OmG80^SsD({RnN<5^^%DP)K?NlFZOHx#hb{g=nL%YPzyC^{zsdp*2=su z!ETK$Om9KBhD_H<(Xt1d7Y=>YyYT79#XAVBTvS`Yq#O_Fd|B*<4EAB$&YsZV6(E;4 zzTZRmhV@Q62b~+nlgf4u3z%ND77iP|f8-XnLfmr4I@hfY)b}|JM}W&>w3aukdN(45 z%Vl5*-|dppP<1>)0)~|*M`RqMpk2&uA!b=$L}cghz*P8^dH9IhsV=6Q*b$Cod;^evwl_&@g#fOw3_x3dN)MbIxDXEfF35MDCi(xyKlLpE zxx~Q}qJeMVD;T3or==2|$RSnx%)vRx=GH zpo2ccUs)7i0Xab$_=p)FMC`VIhUBZ8%0R<`<)OY~@c$Q9u)hYrxnBwBB^KI$hBZo; zM%hQCF9%iO%qO*M1KV?__Zaj{2ws{Pa7+e^3wH;+i2ISByhkv$6bTy=#+0+~9|x6j zJnc!Ol7*8AbBDx>b$IPn*r(^M0vnQReBS{1wR}U>#dS5O=RITM&mB&;$w>LQzFuX4{NZrbm{*g9WzPldmTs>BMxza9S00 z|D&~%B2{xXXz}ZR0ovmavVXCDW6J+{n*S~k@eP>&Pi*;rz~#1X|0jV6LP6~K_&Z5I zt}p-X`MlckeWeFA+W$iQ{e$=a_c{^(t=9iZ_Wj?e6XB&P@=E+oqmlf7bRzy+BmEDZ z2&WkSe++2Q)e8zpM?HS7D>4`LWts3sBCD`aLBWY5%sDARYEo6#rsGNi>M6NZf7|<#5kpr_b4{b+@H>z>Besq01pLrt(Oh=fN`|_FB1CB>D>PA?!n<1z56n`H%N@Er*fWzts1R3tYMR2!`-cZ3+;oxqC{cXXCCz_dW`Ju2x_X@TRC zN$;hL=SX@D=PGswT>eQ$EU`(h44LpEHTHmgOGf+`C60gmGtLhsD$H*^!PG9`A0q#I zV&%U|M*RPgEdL8@dC>voiM-@R$MZHXreXG=<`=lBQ5l#85#t&j!sp9?uMPa?2bDmZ zuAg=mU?m4)$l4z3oVO_nscFV3Q)GmMptpI_eDa)2HD8$OXO_{6naSa}+XP3~)s@}h`{wYlSpIc!;K$3$cvqdA zkF+?C*-Y2dYqH`z8aeMl)4d`{67868!`N@#vs#$twsVju+7Qqx^|wqA3IN4^Q)M=; z*o6d1*Tsh!z--^fJvrii;&}9$^L_GjT&6k8|IX2+MO>qm*8;zdYE2-sxYfR59(OeR z6=O6T)fO;}Lg~~)^f`*;?KL4H-(`dp)p|RedGaiiJefHbsN>gOy>ipxe6wk^zX|PW zQ#9EzX7B{IuH&%XhO@gdHnL&u>7s(KBVX>%H@Ocfv4=$doCaE44>8nCcJ3|OZJ#U0 zzBl!F^n7uI{i(I0E_-j8b$(uZymIht8SBc5*HoltDH!%lrTDH)S=5l-6o(CI$Wf8P z6vAi(dz9hle`&u%eUdJ)&L&*eSvq}@4yn8)*Q}ylQ=q@DDyJG!QAJYQs$xFBP;72h zMIAEB;ZUN`u!veBSKkQT)Tp(vYQC|kGR1+IC;ODaB3DUX4xD_ToYvUrni1ZLZvL>UgI7BsUTCTi8N% zcX6>qL$i8q^_7@A$X&R@+^*wP>A`%uQngR;0T~myvT~@>O0J<%zNsPdx2#^GnhMdD z_INBNicw{+5liW)`7BMjc%6xA1PO}dn?csP*#pNKqC2H@c$P+hhk0dL@#690e45)t zPD>wZdsOllZcR3qR>_E|(oHm-5^7 ziu5C65Oxd?dk#tHH&!Z*F7T#L2IVtNC(W(jM`{nI`Z~av^i@{nd5s=+R(Prl&JosD zOZ?+asEldCrTx^pXOWOcigR&R!iNT&caz_c3FTjE|-(Tmb2W!PQdIUxB>{r=we1M(FM7( z!>Jd0g`E&~W@d5-2!iU7A0Pd)0}o%+ET;>TV7_0^W9OzEaizmj;bymOiH#|J+|mjw z*ZS7d8i?rb38o}*@f|!9GUh4HX>wNK)C1rY78cx{b2Ef;n<+^{Y8Ky)j@BlWKr~Zo zO4rsk^w)z#cA(sehphZIHiR4im3P7DL6b+&Y2{r->cShV2(}AHv3SeREPN}B;zG8E zc|q174GiqF%Fze2h?aiE#poH3V(kB`bL?WOpBW1J6tMD1z0RX)@y z{0atwd9vyuz>UcfH@Eu|l|C{!Ulr>puFY?Xmc1ex zG8J|I@TQvi@B}It`8=7sT^PI|9a+A_s9*5b$=w&pYP0V6)3uG!3}`=U45io~n?hiYg+2Hdss#CAC*_fH;o zIt@{@QX&wf#wFaYD7H$xOpWK7Jj;ko8>r`s6eIoe^BJX-L1CKP0fx#L38DBMaM z?q8%q6e$*=GfNYSGbB=b5dQoK(6`Eu*YSRgY418JA?HmRKGv+JrVaS@QxptXz^uj7 ztTtYj+E5QfaH<3zKK~l>9y2E^Cc$1OpRHqyeToR3R>0d@AS0)_J)xt^6TY|a#zt)Y<)t6Nml*H6 zvy%%i_w3x1jvn8$@u;uwk8xkLV6&gKV8dYe>Av;AE3k%WF++am;IlycG0Q*5)0Hyq zpaP*oeTe5opV=&=9Sp}62uECc>0%9Mt|f_a2XYDRAm0(iqjt^T1)<&%!yI;M5r1tF zO~Wy|X`y>OQPMEFwf`F1MWv2i3l*{^Lu?5U+m|7XErt)vAVqYej2)f}70Qw!wnvK{ zX8JCP6Wg=J4%dB`BF&G!M!88emXlt)m|YDJtE)!L-#|5oRRKw{4!^8lcMCJXd9p zss^oy8DjvJPfRI`p4>I%qw2nT-1STV%qN%>)<zbkD$-7IIniY1hnHWET*#k? z-+?cVY2(E)`?H{Sn*jLYSJ0bH@lqvH@4;_%sTAd^Ee-uT$rO-(8tviDvd%dc==Sv9` zk7zR^j}e8I2;Co;J1C@VS{FPyuc89-IpH^YAa$~K#1<-%-M}N|<25cwCvfW`SB9`} zg%wbn`|_NImaf0H5Tmb=YUx*|g%`!F;p-l)TSO2?jDS0)$YmJ+>PzO)-2i~N%$Vs` zQEsucgYGTJ<*QSR#(EuB40|HE70GOhH&@1I$l$&F3uVWf9f~&=^juhus`xK4ld9yO z{;)gp=C4F-wbn5P!K)FHj0`x$22SyIyJRd-Ez_zhCUf9VB#Hg(0iFz;OzG-HWN)+ zGky}SOfh=p^35yN64`dz#Q0hNS$6rB*ojsI1(AJvA!@lrnBM5ifTO%;B!oj~;n~G^ z6Iq|FaDjOxih@rOCX90UAnGYLSE#S&6sL72Cx)2Hllo1c-EZh9?5J!0=Z0?YtVbt5 z&wsb!-fR`E$jfq}^{RC`xWgrFD%KKxiAi8c~-2WCz?>XAST{(|TT9zZXh9DJe>MJwvaLFKv7 z3*pF?3gn{1v>k3Vu5Y-RJ@}+kqg?~2iC(EU84~XGOpVM{>fowkAH@3aoN-_>YTQE{pI8Y`2MlCet ztpjh;zIDs8-qmFkxe;KpS|ZCsKQ{Sr{{a4;J}Lr62Mf#1sRkxs=TkBmWP$-Ft}c}` zRGk%G+LpE4awd1Q8Gr*l9vq4jC(K9^re}XZQAD2YK}^Cv3l%&frh!c80>F$a__Gc2 z9P6fgHIwhb`RItZ;&i|KIhj7g;RYFQmjQCayo(Ou2edG9ozZRo>x)>uyx5qHBsC3F z?9R#aV>4Uc(A?NfTe|Bsg`zr*RtJw(XDZP~=m3kOb>Dom+2B1j^}a%9BbuvW=U#)g zP8bu0qB3a8m!cQBFrf)c82lwB3nUioiTcxuLaxyPlk zs@)2e{Nrf(B*yQ%gZt#!As4%QNTF)o6BH(9k%~NeKtE7I@?XahDfh>QO3PLo(x&Z& z%8GJqy!?AYR<8po8J=&Y6WZz&Lb9&~kws-^wz;>y(a4fphR2)Y7AY_zM(T+Y6im(g zuMQbXp7ZkD9{TkFRn)$Yh`+rh%5=Mg9eCXO-QrXW8KG4SX&7E|GfGVa@-!qGz{My2 z^ZeNLU{Tof-W0DbUZE>E$F&w zDd=8tq@=wjv{KYzQS1PE!&f^EV_(FgZhZKZ{&DyiJxJ)ky@uO1`smGwKLsk(i1>7w z%Mnf$6AZz`+=#(P(7@9qTF&hg0G$xij(%h9p^S93_reWOLi_W&oLQ1Y%J#bX*N$_& zzUgRvij;b6+bm%Pao!=>m|3X$Kv>ezJOpC*ij={U+f!~FcZK6OWAO`Y*FxIKG(Bv`}Yk1JKD|WoAMjAzcBBRwpjl%*%>j9o%hB5fdVS34mCI%X% zZfaL07L3=F#NQRgl;(U((ji5^#rUAJvJ4Tw7)kTh70?6xKXDcOCsd>KjygA^F`G6X z+ck%0z~Q}o3*pwCj=G*lEVAbnU!{a1cubIHwwciKP++&{G790y0*$f@iRbwCOTU24 z5E{_=RPH5i$_FBT7#y{X7t0+gQh&^Y_%j@K$^Gn~{Z+S<`L*rU3z)<+1;SAKBg&K) z9U^q!hnn;bkCo}gPWy7 z?|l%s0db(P9eVdI#F#uSOvHZdq-vJ&DMio^at~A%1Ue>MM4gDhD02^`KnYrL@wLYT;aS(&i zqoyviR*{l+Gt5?{X!_&EiRTrvXtvP3coL7>5@4jIJf;oOV6~R;kNyd1_^;?CA%^Gi zApM2W85bSbMlBZQ!$`|*Z`7j@QilyAb!Lw}3EDPPqa|A4QP7rg_)+K@N3o9IiW#0m zr7g%TFGN{rlPvN-kJiJTfDoE`u|~Sr=FQPKGWFUh%)oj z5jMtWCw?xs>op}iyv1Ol$G5%^{;l+&?>6qkv4}UbWGx*mf>8?AlTqx8fO6QaSkh+A z2Y0xRxu#wW;&3_d5x-|vlgSIScOD3L#68`2e?Lq4Lp!H!V~+|pbeEuDb>`#2Yrg5k zd4Ea9@k*W-U3)*%c8NF%2wpd07?{vf*7pb8GvWQEiiY{_vKGq`rBt?Vuh=*#NKl)O zX4h$z9wXJf^HE{nMj%=Tcq7=_`)v@pZEKAbrvyX!;& zj>OYJy5s%h-$DU-MNKhtN*kzrP66OZvQA@(ywty{%vxp&=+sM5n$8mJZwk2XrcO(& z22o|x#8AEJN#lA|Li0-`}!gG(nLX{#L7>>FXz9R1(HB4 ztGk(VPa^>%5%>Q2Gwb+DAz@H+jg+0*Fia8(o0w?ZyNCZb`w-GU+Nd4Jz5UoXAq z-M3|qJ7n$eDTK21PZV81mEq5TixAZynKjoBgu_Yt~evf4>7JZ3bq=e^yY!hkz``4ddkdO61`T#UG=Cc5l0qD zRV4Pipbef$K;*jZAhUnzrPw?KSTso%0=4FA2{AtE5Q%J6IpEgk&xmPfuw<5D2d;?U zswmU5Gp|O|QPfd~Kon2#u>rN$dDUMAQRptI?QkAtq~`DW#MZ44rL9ClFz+2oj2Sfbv}1)S$#KivXO*e)7ZqU&=aiJQXBPQ(_bFqk@T0r8a| z#n3oK`C*yAUz2Qp(uT|VRiT3;Sstf)>s{4#R9%=FseV}Y3%*K_;ra%e?txfwU;7PY zw^#<8MAWZ$%h*LE*Kg&r(MK9!Eg1V$L2`%C!jPmNT49{t5w;{*@_i_li8Ovx36JX@{5gMC|)xYCA5TZ?}38t-gW8Fhr*1x1@uHy zu%L&oc%cp+08W9b@YIT~e~5yh2_;cz>CD`fEr7rq@y$;tjp)>kqQMOpvcf-G^j&I9 z0;1O+r~x*#xH*VATt8$9H+TJv%8K*boPA$#3w=kj^%ylOI!y7?)@;-omY^-)t*KVq zzoNc6BCt1tj%kxFTdB|~CRjjfj!f;Hp2r6kRLcf8ylx&n9|}^~LcYGpN7ko}({%t` zSjsTTfRNfVRQ_W)yHp}_hrmM7XIILf@+wRrJ8ZtF4#*Jw)7wO=-7+7BvbH>pFy}l{ zthxvJ>+g*4%Y9MNE+Cu+u{eIu!)$;1AXY^DqWwVV^x~6cUQF4qF|phXLZTx;t~g?h zHar8d)$}A#Z*OYsbw7~Q4|+r=fMgl$VsCC7y5D7>qdbXm=&j(rqM}Pd1W5TEd0Ho@ z0*9*q)Hz7u@6qdj_EARH8Q5Xh^I>CxIu&u6n^447h&d?n5oeK)OgaCwllWCBVdqLf zE|aj&PEaaVTZ{8STqstqfMZmbAeJ-}qePr#DT*gfp^(uD948X?`$ITRp`e-PK`xP8 ze#s=T0ND)XqQt*FuRk7GTsSWN0-caTIQ*_U$U;7EQfGsgC5BmI{siOuJ9jcRPRx@E zgt<81K2uksOvEK(cOO**6!V2PHi7=bDbi&M?E<`;j29pZambgXud%Q{Bpavu--dt#jXiX6ks5v??8&4KTIcSks`kIGsx7I4> z_b1^jUjPxJb33%K9Y5HQtw$QYwu55ND?ZmRou{7=Eyv)%o?F75b@g(geqs)(R#wstjtk8?PON@q?To_Md`$zdCsV0_Q1SoOsVkBw8%m5 zH!sgpCRr8;2|>95kl@$?wSi2x4Wh*@aZFc!o-Lj_>-ZF@QFH12<#7-QcOODij2N^6 zK?eR?sWGJJ%!l$_kimkp6UXVSK8xIWWDHqGau*8~2aOk&Sx~#w%Ihn&cxijXOg?!$ zZ#os7yc47G_h!swQq#Sa4Z(m75Kr6W%Q|?-B2vtstOuo8{61Y=kkoih)S`xF`p30j zK>FlAo8p6LEwYN50#9g~-kY9m@4bbtOin&ruoiQJK@0^k8*_s5Ev{551(er@!@_{0 z9ycEriLOLGT`@$HHy&6Bw@^RXv-7Dgt5$tgaH5RmhGrgIEEaRp0G`0qzAB}XB{uvI zZ_hu~ddlT-sy72w@FF&+yJsR@%G2eXu(EYQMO8N|Ekv9=UiJ{9wB4GVY05VG@X#C_ z8%~?eh|GUuPW;Wv$1)!Evi<4L@L2}al_w77{t5MWHSrXcZm@SoJ>@HqcLv;$myL0m zDte~eGbx`+KU%`=B6=ql2&*?OK;)soLOE05VYI^Yfs~>$Iyfo9Nvon%0zXo{%0x6g zIA2js?ppm9w0^q}R2G37#lVBB95hV47eLCbu0I*xOFnw7FTxEh-M zFdT7TsSV@eNAF$(6=jQxcnl2+W%ouFFx_}-BA4!*I)&LzygHPAb71}xU2#@a5$`eD zma*`j?nQ<)59%_8KKLNtN25+%6KT|H#y)goV+!=1o(Qi#=z6YQu2r}F|^+w6(`y=v0d~tl7M=FBuF!pr5%z*S~PnU0`fI z=npkSmE#Z_xh*SCwzLlm-&?CbHh9fd?fxxKaY!Uor8uL^v^&t+_%Ki%Sa9B_Ogk1T zcy&{ZqFDE(8Ey!2vP!i((uOHzgFxGpCd7(0AW_uR zb-E4oNk?lEP?16N?0sRclrLMIF@3u1W=2Je9O1Tttm}KmV(OFBYRLKh`k5X5+Q8Z8 z=L-XwiVh0?ZU*(TMECKcG@8Z{#>xar_`d*hv?x-lKkNL49->2{2borVeIH&O+<}SU zv=3mxBh{TfwIwmn@+UPt-Gw`RHssq&GBGVK^vJqk9ABmGTS{4B*N4%91ZJW{;%yIv zoiP&`Qz9>tOZkN4izHkWJbBcpKRcLDRv7Za&V;L_I>#twK9?ZGoz!qp@StHDIH=A} zrQJlQ-E=L#<&1Qp+$uN8W{=EbAL3bv&T0k-|HLnJ0X_``4oNq-(xXh*~?`Xwp30Hr4 z%DxeG{z7dYLnGS#7)X@MEZ^n*oE%jjO-jQfasjl+iHTz!t@>$ksx86B@qI}t;uijKia}2yPzIq9@M2t`@PwE*D)ui zjd+wS*A|r~HaXkf!wOyLp5W+4x^(Pg@&*qe2%*(F61bWE1yk648vW6Q)~}ANc(aB7 zcjT(DeulHQmsw={{1b}spTTI(S?cW`pN~0%30lt_itn2b^xzf zlWrLf<^HQ6?XTf8-x;ZK&IFV;B2pBgSb$RWue*E%bf<4R@OFF|pDSN6vXn*rw(G!*GdKH1hoSL7=(* zQDJ2>zB!iz;oCPmogZ-?q*2Ne+aX3(2v!@x7~1$JH{e_fj8BFXzKO)VtDM3}JI65R z-3(v7*9kXB_}r*=2N;^36x8I>`Gf;`WGdq9FSPB;nn8Q1%nG z_CNOP$K(StWH@qz|2;SKDK9hdi+_o<;;U99Y9Q@H^P_@s$P=&@U7z}5KNp*irlL_` z3F=i_v(W=vGyAs{j zKC8~Ym%sc~&(M@S287zXp-1ifvO7nAfo8DO-243Twa;KVAG|#JtVbU|7FswJt*#1^ zzT0B{uu;?MO5ByzM{M4i65{@1*O+V<7=gdK zOYn6ytQk}gZTCQKhszP+@u&$T8Q>zYEhQL_5HU{B+dQ?sIV~M+!n9HC)SSXS z6$;7P4{wvVYYKA@%g6OHIGCr6+6+9FrtUcTl#4F?dkjj+S{YsRFjLPH*M`e_M)ZqD zV^m6xF~vO+Q8%2LjMz^g8JpcJbi_0WGb2F~G8#zggB?<_#VR|2}4JfLi1XLq7$ zwW}_g${QMm*BE%->v>^Oirbm0d*&k4+T`T%O{jzs5SZpZj37~_JMw~fIGxvuG8(pl zqsf9KTyFF|uPCq~R1&XK^N0*2WRh!~B#2K-v={lQ3`!y+kcX?dlK^XkrBwK-G<=U1 zQxbU<{o@=zm03jLgazSpmhXUf+`uIkH+B{?0`q_Rw!1o1Tx_(xvi+Nh2 zpZo%a6>KJ42!h6-Q)p!RbS%MF#c;!ediy9Ypu(pB-_f=`i105X)AJm;+CW}7CNLF) zZXR@nM(+TP;%5_>&D+f7p4QDTCC^@=ihgCvrO0Ay4t95{yr$awMNkIfDvLD+zx zNIw};_Amerizn8$sBj#aq_5aU0hlM2Ei##u=L0x?MuCNAL>tQ+i8t882P3FhO-M=b znt0GXp=U1G_9j>}>nb6|xc5!DbzSkLNrSx=X2yzo!#FR#-ViC194T}Au116maj8Dx zg_T6Gc&qrQBcY})wY1}_i3D6GahvKpfj6`i406s?Cn>JLBS{yytYU1c+0_qtBlR@e zAF)Pet-p8c;>gElDdPW9+d-QDrT!kY8)Ws?VfCY4geL0B-+f6f*$YYA-xw}mM{O3S z-{GXLl3#(XtG;?rqMr0UZ2x1rCoZ2j)K{;OMj*xmBo&TqGpi?&I?U;+6Iw7GeopJD zuiXr!As(`l7^q*#0gw5gNk(gaK#}Grx@)_f6#eD}@tDdM1vOV!n`r?xXOPR2%GpGu z-RwoN5gb^P(tG8x|zeW%SC;=T^9!&16Z{W;krE70D+$DDxK*~-6 zE0d~tPxJn>k;t7>(uS=AV>V9OHc!gde`*LbL(F$d;ZQUII^#0~u{+FivxB+kWQ#mB zJMYMXY`K1b_NIO)FA%erlmRjiI(;!HVo5q^6LT+oP2R38OghX!aWGP{>6YCcE3c{M zo)sy*fFylcD0TT~b~FoXY;$(uxGXsa>qi3O-?07!wY25?s{~xuFgvVPTd!2o;m!NH zbZ_Umsj5#dM{j4ut)f7;>^O#sY(?Nc!k(=p?CMuIDUptw_wyNB#;H{jYiD)h$(BMh zhV~q(G;0{NCi?+R`5P6*xK22>x7AWI>qy&sTA?fb6TlON#L^!4x_o>wMl{0raoZC< zT_XEhDyo(8D9bIuMsa+)thgn|VGYyT{XZ4^_|~-ga>U_S{9=sv2@LRGGWc7;(f}&z z+iye#{!x}#8TFY2v3PAKwNOYw{+K+iP^k7KmJ|8cme;gdExQ|VR|oh_P=VcTtK}zi z*9B7&EoWHF5kA6g2C;xPWLS)7JcL{6cM&t}El!5@4zD8x>`y)ovi|F2R3LDV^nPR&j^B$iT-Tr7%m zb_?D#4#sZ3KEMB0=J`ODKdUM1mq+1}1e?6VkZoP6iS4321`+qfU8CIasMj~C{jk7* zV0ti!laru(z6a)wjO|RK=8Ro;+AIq+IhN0e&%ItKjfuPHO~PQYQRyvY^cQBYtbgr8AZA>Z?b~fMfTD60)q> z>)^S{va)R+7+xkb*rV?=^dWdT*lujw;_et`Y`fUbW_kvd>nrp=*c7DaVvJ=IE8r{Y zjgUp-i(gnLbTR+>uO$m*&>yGmjP6gBQ*kLkjb^=o#|0TbEcWN zX)%x{gtdv?c~d{7eto9XovBIIO+9-dLECuq17^q^rcOrU=FG?sS>qheE%4h|17gQ2 zJwf82U(ioa_h8l8d!+4AkoIX0Ahb{Xj2Ouml0g*1VF^BrSugekBq`~xq z*xx4Z8G(iflhU&rNoe)2a5B@=GmG7`Ba)>gJG-_3*Y}CJYf9}B#EJBisMs#$@WvsO z^YXNx#PoN0pek@q#!kC5I(U8JJLU@g_Inq-YbZb(zGLHOTrnhy6L&|IpfjGc==9!E z*>r*Qu?4!gJiz=YnTejN@d7Eix~0Z_OxjREw5TFJr%Z*A#%-H>lD zJ5&o?eTg5@M5fYWTw_de%!IP6m})VIYS9h_OKt#cQrZ1s`MD~rfysIb4X3&|Xal99 zZC6y?jp;JuCM%g1(7_y^{xtxozJfcdGIUVg%Cr<9QZq^HFbOvE1~qSm#1p*IAiyMSPoS=tX9tr_rG91LX6l`Q`U@2lRg8t0sJH z(*Cg!rTW|ZoZ`86FWbXYKk)83W@yOW^La?B(WLjb<^9If%}9njOa@^120mTNpY$tO z7Ia0iXC%q&YW1%R&5;$q_In%4xmAbL?xgtQ+Nhf>{?#v3A3EL;Dn@fRY)W7s9$((K zT*7N`Rl_f;wCdGi#4priv!QM7#qBOro9oL27^^aoJXZC zTiYy=eKW%m7oHVWHUOUB!Z#|k=J?=e`v6q+ieJXVz8S@p$nGOm%*~=FI{~u}kRG1v zkE=EY;+-6Q6IeZ(CDmxIuuo8)G){~VOm|2mP{lZ-eA6ukgT@IzMI&lW zLFqK-0Q^a0zmyv};Vk4=&@oMvI9=V5*&E(Pbkk0wO6oU442(MxJ+POKuUsu4PkhpO zPRQcAYBKjQd1p44`&(8wsGbL@KYUMhYHRa5VFALKgabst1-(_t&u zL^o^bMIe#=>8PwDc00ER`6RC@FB__;nk)lgVm_rjY}$5ekaCAE8e@u!5-Uo@@uZg^ zYz`x&tb;Xde6Z)Rt`q+IS+DUEX&Mzus0xjEz8fC_c#YcMFLQa##WyGUfqD)NQY=tL zLBC&GhDu1*m~B0lcHQql(iqP01swtJMSu{ahO-8~q+XUI2vda7;3O(II5>ZZ;UpH0 zN=9FHRKHVjK@_T>)BxbMYQz&0 zJgIED*Z|AJ^I(X3R3=hozNlQ8iX=g|FQ_G2XYR^*Rhg$L>rlnIZ*BmvVyD`t3>O)( z-qB(iR_<#xXTDW%3!677AUG?uwtkj2r1|kg*I&`xPN#?U$4(3p=1ZD)Zf@)7>KADJ zMMLjGSIB^VH`bQBAj<)m6Xw%?>itBHtw2hO|*(D<&II#Qb4yvBGr91UWM1F7A-JD zOVg90sWrvmHtU5eBBc@52D%SIL0qQ9@J)%R1Foi{XEl1*LLb(4D)XgRNo^}GnKm{e zD?bD~xIcO-BWpM>`Pr(;Hn=3!85akJqidmt9EL#ZSj%!udxO7l(%A=%T~SJh4PJ(8 zZ;33tRz#E1(xV_YHZHEtVDuul{5Tf*Vr=zyyTX_0pdVmF+;sYDbw*#dSv!;5jXgh4 zdHKAeuOAv96H_Q2I=zir9i^KcVN_TDNTi!sJ)eWyo(STr((uw!dwEe&YW2lkDZO1? zd<<@Ca(A_TRemTwy?8(S$5etHaN`Wtl1ACz^A;VCGQ^J@c7Q3dxHsPBiS<$s??rFgwkx4fIXOA{up4eqK_KP5N z(!i!SQ3|ZCCI=NhANuWLvrU3es0`l{5{g=x_p@F&hA!?{HLzqG?_%+e3QW{pkHz1W zJn$A5b@ERP`evX1*mPA18HR!M{JU1L_sAcDGnW40b+(IkY!Y6qdd2n^>0K;p`9HF1 zN$Yrs9RVvToLf};L#FdI&3TAlcB0W$yFPXqep`Q2C%ZW^{gQeX5KXopIk0)CY+q=< zu^q#sD?4^iT({-BDkV=V?PpP65R*u)}d*(O{xGG^V8ptJ)P#kLv~52hOC$I_yrWYw;S zD9El?69UFm^ea|*jN&SRRv~??2o)~CX5a$ILx3EDRTpfoW?q2+exIRfIo_$bP+5V? zFgaQ>qTwPn#YLbl*f43(01<;u_KiKt_LsIwUUf3yT$WA_8aN{ zu2^=X%W<)jsGF&=VePxU9mx2m(9+*0nC}L2+#DxgAAP9u*|yKHQcLJmNxxe2O``sV zYenWjd!kF7()?(ut4%OkoGf=LLRVn<&?gX+uGsmE206IX8p%H&fts0!lNZB9ed*^1 zGki%I%@NH>C%bo5MxUBB(Y<-WuN(v+R2LK}!9-AE0A0B*^Q;Q$q&NO8VidsY#+HAY z6%+9smc^*zkDhVfK$Q!!HD8t1zL*`yuz9foZKiF!P^CtwvcozL4^QSX8$AO6o_qv_ zU|U%>292IU$l2CVC@G~ygt;lwROOp4^2B5qN6Rp>2pbyteB>?ItZ9q(TqLTFFF|Xv zrGOezXAsXC^>PMiT;fW|U-=mHuBmS*9YC5;a!2zafIfTp z@h>4+y+VXo@-1R>oQ16xQcNB*%0B5R$GV-AS|f*fiuk zXY_dHEiY8bZNu=CV|bowrnO z3hQQ1S@pyjLk*JuQX{!^j0PV3;Q}M;b=0D$w6B*qaZ&p$hhT}5I14vBjTMiLGfA~H z=!i1;JxqPOFj;zOMF7$49rdy(>`{}oAjtlc3Dd6p-g$S8re}U-_vNGM$Pgv1S+2V> z7srv?Lw1Sv@XcK2;SbCH8z~M7-gV%3{6~7tm!5$i+ZN|rXZ|fKz@x1xZ-(arU2i4% zqd<(Rp;5bfV@=|mXfDrB@FAhIId0kDa+X8;lW*VZcz4#{N#N4)Ek z{I@eQ;hlgJ%9QPqW1~L z&J#VRTArjKE3yBD9mpVdy5~j@p+lo#cDVYq@cO{qlUj3dP13QuPt=kc=}77mMA!D z|C4t z(7()R;NsbHl_`NiCXsfs0q>lwWyUK1(a3ssKM53H~@^U-_#FA-{iT& zE{3gtg*E-sJ85`(Y_%{QjnG~eIOf+I*;%PbpVwKL$d*SLZBxS9lAok`yTB6(X6|~I zlhf(4dM_m)k%2LCn?E&sOqgEJNITd}|Ga3L`AjJ=G4B>xXL8Epa-Jn=iIXNKh!qRz{^ z-(v`pnk?>r0ZE%BCc4K?ZE~|B?-kCDDNhg_!8uk;<5KX=r6B%<>8m$+FaUdVO$?Kt zj&X{U)EbJ06dMa9&|NfzJ_<&2(Sam$`&f0T7TV#z3`RTBxW^d(V{}N10;MW7GGz~6 zBBH;00wu=T;2P3-JP=Id_?=tU7jaZ|_g=LlZ3qf0q9?Ti^?06kccipp^bnysJ?vek z37uP9>i`2gq-ujb=ubFaAMqAQd2$6;FPNDeg{adj6q~TaNT`LWVz2pL^{-opQH|W` zFqES~7#E|^PG%9?ebFA~U@tt+&Gx~RwIzFa) zW)!%%aIqJ6{1KhWp63o;TRf5AmJV68&&F15kJ>@@v_t~S; zR^IXxOp7)${Mq`8_}nTW;#f;siw?9~7O(Qh1}h1@+o?jngZ_)UFD2w~x=u0+eB}ju zCu))UBMa4XbEmhHMQ(d8i zot$(sZYeDydgU^@N%Yb&ons6<0snoNCx!eyt<(5`|+~nSeG#Ku-mM<29>_5i zatwmXxg!>jbdI9s$rN%NWL%@K=89B$zST7Aq~gU+K!wip)NzT9G=62mjOJn~D@8+D zgd({~97lSp$cUVL0mD0XSzoZ+h$(G7q?LfI0&uj6B?Wr4N;&q$av?wc?Um$>df-n6eLeA?mh*s^eL|GLmz&#p30x*To<@{aZI0ve z36B5|`771scvUCdPaglY@s_(%^8Gq8Q$;8r(ixkPAgHU<60f3&;hqUrxdH6As?7&) zmY4l~go4Fuh_Kis{%=Yy=Y8qJ4^syM=Q;q88>}O{^=?+U` zNx!Nre$1LTLbK(~G-6DR5_H@dq}eX5;a69uH*>cYdlsE%$EppvLf4lJZ)5n^5tN{J z4}KyFsZll(N9{=eVlqBQLXgni!w8h<-Q2ga1NS?} z&!})?j4usliqngJa2k3qwk-vP2~6I)(lKA0=gqN`BL3L(+wDZ;+yWfpZP>N&Auiv?UR<^>iWnuvp@11 ziz1xHj9BcZ9UG=m#+e;c9l(aDkBk_wK1!;VcW2UXV$OWwHisL8obnXhSOi?!Pq!i9 zvMWkz31#W|cI%*o`lPlJ102I|fa!@0Jz{N09{ORbV5s`)V<`7eW84_AOf^o1VRr2p*zk(DqeRyd2dx`J#Sg^9+yJ-**Sf4%VE8;HE-oNcJ@a$ zId!rE2afq4s+uEy@0j#8*blps1sLhQ_l}q4kY|YskuL>A;pQ*r9wmC&Rmf*D?2ftA zE={O7Ya8zuHjbnD(sOgfQ^xDZodD^3Uxj;f;uhf7?$0zU5gm?u)m+a{GdG9R!gR$K z>!KT~jhiU8cp+nKf)%6w1fP-=aL6O^ltp;AX8&+-rY=TfQ=LQ4WCYLW7OJ9a!a?~iTSf-$9rwDdb88kBlzF& zW!F}b`oWW&hE=}av<5%Ye^%As^XFA8l+$3}9U?G^w=x-2_Z2?Jk;~pJ(xa&M%YFv} zwyWRGg4li{HfD6}s-m*jayn8Hsm)ys$UpO6=3s8+x92Z-Cv;PDp@tszWnZpk6jJHO z)VuUnaunt8Xxx+ab$sw9v|w>~ZApuUU)iv7l?GvnkoQgGzgs$UWXh<_|12G|%_oLPEn1HXyj& zgJE30YiT|4stTGavMU95AHB{QswzsVOx*N3pqx@#`WFMnQs}+vWa7vtC#18E{Y`|5 zv?^LzVXQ4(1?*k>5o@Qq%v`zwOfNI?FSbK0(Spn}-@Mc^8bR;5A&W_i+nSq$?L%A_ zu+Z)AnMh?tdL46YEmMm#Q@<3jyS^lErfR!x4f;lR$OpHz-ytq3CTRQ8|HsJ^BIxQ4 zOiDQAD-M-2V2ZbfRW#}=>D8>5xV~Lh7?b6ga;gVB8y&*nWxgqF_qup+$yP=4Fj?N+A%RH>641*;}eu&%X<4PSQ zM>c^f(n~-45Y5OZR+yRZW83PDChysYrLA6Zi=X(S`=EfiATxV8W%a_|1*bNNPS3DrOt$sdY%Fz76M22WBq@7WP zzmFTpPEPAHR?JB+G%lnXz7YqOFbD$+70V|J@!85Kq`805p)Z91tx`!$uCL$^ESyc4 zbCWBfMYcC}L??jY^!LBNqy-w0C6>T>m5}ho_#IInh#@s0Ty)mFm5%;KkEC`UxBOf_ zbMlm@nF{Q&E2<+PV8w1=YzqUJ7ynMf-b^INHDX<&ivMFc?ca&Ga&-koGyEcDCC}qp(mICQiv4|dV6nRm0+3krsj}|2()&-EEz8F?UkCwT&L3Ex! zHJuRudLPbL=uqOiZOEB@8>*EI#4jyo9&Sd;q&$F4gi6{-16#5SvW2n)Gm~rrbT2~Y zG{lsQ!+w2|+Z1^i_cOT4p(1u)34IOM)!JX4WpL-0%zP`{HQj=ZImu<9=a)n>HQlT( znjZb2(=9rwoN>~rp%4EP8^T>#@Vc3w$jC^cuIQr@jnUpTtdrZoA`CaqKTf<|_>bi% z#I-^|IroT6)yQV&zq4p#FnVJjU(U-^e;t9qt+N{fwuP6UXr_q2wOGpAr|M$Ve(j)} z?IqSWWQrtNp^Ra(DQ=^TnM>CkidzmKk7xh%KVym~8+u^EI5*%-cs<4e#HgfOl`b`e zZx2~b#g#yAIi*~lsJ`xVf~>(nP`=MZivhV{q-K7<_oK$i0@)Er-%AJ7UaK6nk|UyL1@e)$)5hHeuv+M(JaDzfdPt zrU9_({G-AezRFrsjjF6#A);EL*<#jNt+%2Ii}UmUK%#=@arX({a0N0vN6psB3eC4y zQ=~=!<}+Ti{1bU-pi1-l04QJhw5FSUP! zacPFxbX78vJ8p_VY&|P}if07kTcsGR_#%Mfh!aUL?TdA##x1XFQP8`1vDm!Aa)LD(k>; zFEi6pP5+1X&3fyhxE{JZd)xX@-A=N)ZOoXwj@ql2&}IY2f`N~~W7hOg-Xblcd9uG7 zt@X0s1-q5dG{HSwhj(pxcA|aFQFh?<;m4&in}#)xmRbCU1I3gfPP-|ymjCZU&L@JP zTyT?vV;ZktQdAIe5XmOsXeGHNX+deq0Hf4oipA$XNpCgU73Yq8Xahe=-M5j-geQXI&nfj?UJOrxuq0hYDAB~D> z2UXN&W8`OJinZ4yAn!rhY{)I!(2H%WeB5rF|1un_UcvbT8r= zo9<;-V5$Gx0c91ru@OfzAc^c)))3Joh{|uJ$#S{jPiU=Cu+{NW%E^*=TL4HNu4$Ix zvuYwgMy_qvV;!Q;it`rc!AQT((AKgJ)n%?}Z*A@q#BILe&C{(*|4%tpce zW}>g2Dqc-$Z1ljU#~=5FgLu2mYJ01}f$8{ngzuZ900C?My9RF=KhOy9fCznTC6+Id z?u*9BOJ=wiIXRAS^qALpyCuuMbp$5$-UBo35a=_8S*r~e;9^jcZP*e z76v=uE?GX0`iW}eeB6+e*c}H&NGg|pz1x|NdpOBW&l1#wMBm1bLzx%_aJr`zt2aJX zlRyBO*Td}p9nxaFO@>FqoC>HU9LHI<0yba$WbS|0eVM|sDk!S1Zq zTz3X-@2WU{99*<(ASW7FEYK~N94FU)BDlH(4@5NIMj-O2So=sYLrA9Ow;^bb1c{0E)eUrQ>E z?lCz`ViDyVE2#+Z3Eics<)t3cRUc%$OwvAB_BEX;H9`IwjR~U2bf|`V%5tcl?XM$x zYgdDvrh0F)&Ue;E#jr=SS7MgN6XkHaw-&26KLs9*Cc>BPR)#VPkDW_#y73VZ)ax@; zoY{CuE}$*lvJD&cr4F6SD$ePY)iGFlkEN8JTBSmFZAo|5m7kOUB?~egL~PXX{|B7Y zVOrcKIWQ*JE`e6BX*qdB_E_!CxcSi3@{@+Grr>^CiRr+)1nr6Xwo?6E&}mR2A>-0N ze!`Em?VrTSFMTh7drixz0ucIT24!^1Z>7aHXWVu3)R_@$G5;&(aLQ@Q{0`0WBdgm_ zz__lT{NfSzy7pY5`wOAxw{*T+OaXgBsSTL|!5-^XmC+m6@q01nZM3{Ez@3U2>WoWB z7+|aB1K8Eb_<vdor1IlxhlmDOOYz-+M0Lh!$7mEdQmf%5CY)qR-H{b)ciH!;(`k^)9? ze#bEPIAJtjL9OgUg{(sOzqP_1YXxuzQ_dbZxiZB^4`kH)+&00x3;7XY`RjEJmaAHb zfYksYaD#?XwHmqOjMI1TlU#%8^4c5KXg**ph~MBI7+u(rEpr}K*`2BWEZX3-+qU?t zS#_OMuN*sD6yL?bhX<2%emoGr7i!){(|cy@=VB3*sPH(0OO{AoeB$py65$isjaEr5CDAzB)`MjaChO%&(Lq$zM^4%`6DLxLCDjYT5CHQJK4b0zIH8_in%e^ zon|tftd&;SVOXkT-`6&pp6Yy+7^4p~ouj4aGS4nSY(t1F8em@$9DP8eCX=p%fDx+L zmGOkcwd_K5RnD#pJ>gF%rW0l%6H4&}x>878vFgFzy52ccAe-kGe%)t_qW*x2omm-f z^!j{s6(>RKKVQu0aj>^bYztVb?TJ#60iIPqaIz=f`~Ui03na} z1uzHV#$2cur(az0!X}_77TOq)@Q^;3Kb!S<&qX8e!}DUczE~-_oL{$xKn(erWd+FC zvRGgIm-2=`FYx1Iby1Bqg&Oq9&|2SsnnFmH4S6Xr6+s2@7w4n3lfhZSo?H#CIo|$K zn`zt_QvvQ?{EaJ;XNUYu2(AJ0$j|JzIL#X!rpjdg;;44@7?Ayt796!LtjpbXS`pti zCbYWGJb~5z-Z!G?^r0Z*G^>pcSbs0oecD8hwDsA5JccX9T zIkVBOula@+;Fs?^7L_H>#VaoNP+|^-r-(6s>KTAmjUf9Bl(FXn&)Ru)G0=y2XkpI1 z$K>d3Ml^$7jrOIW3+?3a3lA}nn%j&a58sYL)eujw;7edo1xd7@Vg4#dM-N&U(fbOP z6Gn#B>uwsr@ow+Nc3$`jDvdRBP$Zs68Xv!jM&&t}f8*We43u_@agJn>4ryQ;!?38} zN;j69OI6!der$CbV;?*}nAXIbFIY#T&nYV`yfxe03bN zR1>=}%I%h#k^H2mEHtE;i9XXGkm7YgBSpYXR3s=6wpbjOivG&B#d=!_Qa} zv(rE?U}%iNt^Jh3@4oz4GQgJ%KR2LV?zbm=m}C9&Vy9n}sWigsjaW>_la^hjkz+bS zpk%M~mHP9qlq~iEirkm}6|t)qSA(}7c`5HIC$8ouext`z?N%TX_(xC>5HG|KiXKe> z_0#~on>-!3PaI6x&bZE)O@|sk_vFdd9z9c=ee-HKG?lfkw%DpO*s9a9Zdc)scU(yr zV7#~1ZPqt&XE587&n>HUrwx=hA)XRruE+IdWePYw$_C8xEU%)^s~UP;fnG-ODb!E$$OJM z&j4$-zPmAE%0c2~=3hwMGXB1Xv551qe?`}i|DVvcOF?&p|F7VJlRsS$?ntfG^B=p5 zw55-)MC4Au==-k&IHIV6aed+jTl!vkEY?>bYT`O@Fw&k5(^I9Ll=KeMRd#WYyy$eM z^d{;UZI^7}8>1Q)anioF;TIpIT{i1)BkxB>SDe}!C_#HmY0H(>dOzxn{o(aBrVhS` zoFWY0#%U`l^LnyV9OXnZ$S8Z#1g0#ldZzNlNe{1K!7wlCKx;ed(OoTkQCVafs?iC@#=QRx_0Bh(XMrK_0%4H!LO}M zv)Z0Km$?&@IkU7#7HI(7{ebD^Q=()s~7_nHiO-t>HeYKr*RdWVTu-{SNd&t<5-jyVQ#o%C=}g`>h( zk)|508#pMRWgDnXRY9zA!j9cw%f(<7_HHtqqVg}xn4(dI+lcL6Jnx*Ko6(=DrXXje zkn^rX(~6cA9u@tKcZ&B|;AP2DZHaS9LRT#5*p`>pClC=K_`LXhY`|-zF_yh-C z8U}im)pP#p04Ksf47eMZxhn!Cuj4-Xn^a?2Mp9Tp+1awB^U|4uXg zop>s!2Lww1@c;_H52n8k)C+R_I7klU1-|1I_x%;``GMm3!Hn1eO+DlT}an*ow zuIaLQqo%nF^;8OGI*-W#uc)2j;WkcHbXq`wTq!@`?~EdQ&2?VU_X4U<^9nxo=UK(H zns;7NwdUI^IrHVa)8<(VhV)o1nH>Gi7+s8UV5II0Q6d_VOiHNjvE>vicJRZeE`QYb z9%>VeNllNb8_Il31VQ3|Ojev_PJ1N1f2`~S?qrRK2OPoQjuE&q8)_RoqH_D97 z>xM_l-g7LfC;jaN+La%ZwH3FGylFL0#6Hj1HUT11%*<=hXXg^CtpsWp{reVvl4~YZ zT5qRovFP<_rOqg7Jm+hGvkG}C_W+^4{?xq-W@#;^6p}#V@09*>GV*xayeQd!OfMcp z7+$FW`hs!3wd3t*FY(QQTywg)hRxoZ58av~_LXy`5ed~s0yTW&1bmArKm*lW!y{n2 zY}K!$*ejB@U`7|3T`}dx?Q6P0GO>VWFoqn{;jThZGNV^3n_q1HrjzvGg;jiOp$9Dm z-g^3cgEYR4RH~sc)@9^NC#gnU65XAbN^HtOL; zPOLU_CidxlH1DQh`&lBMRzj6U7K4fz*+PW?a`{*VC5N<{X!kNOyb^l=Whi3sGV}kxc!)EH{8b5=c@NUwSs|pXsXWvDxu{ zgst5QUb`Nn&OhNlIL86CdiP;_3c`zSdND_@NTjweK`)u=!Qld3na@Em7Pl&lOVZh- zb4%aul!|pcsJz8RCQ}eszA9Tg*Nviv9bgEU9)Mikkcqz~nH*E@Baf?e-Y{6poIgRT zs_F=#Wi;FqrWnjRH;QPEGIQb`98*$@#F}Q~Ee1O-2&qWs<}u#XB~Y4CqC;s-E>)+j zB%^f_=f}>PiAUxg{K^oTVVFQgv8Q{#cnZ?2HUp_aF|V{w%PF}zOkN2vnGUO@w(htB zN{qchH6*t)KL@U%w6^`d#%~3Ty&r0&1LQ8Bo9pzFo2wE6!(>P}ZiEN58U0}|_^y-t zr`F)F6jp7q@On<0TqldT(D~|a*g4!Jz*qshi_-kiX3R0OW zcgfV<_hw`@Hc3hPoN6+1TUu@cl3VU(ve5Wn`#;9^Z|8C2tHf0qWP!#4+F`6Z;p|<{ zzOH*&ozIp8l0dSm{L-_O8A**j+~a*{?i>j%<%05hOlM*XU``o)@oZ}CfY`ueN=M|bG|pzrNUPdsw!lAsw zN4qdE&2P*pUn@^fUm>HsPQ{}-v!Y(pC|;vzIv3SYx!J>5(!{@%QS&fyo7dFZz^qE8 zoEv@4`cD8bFPuJXJdobsJ0YC!XcqQx-Cgfa+BLdw)SdAl?h)TLLv4z*b%TB! zZFD~R?US)#tlIuy(ySyOQ)>@nB>xPHnIi{M3LU4Q!^?Xbd7sF^$Se%I(RCupWBH}% z-lZ3ynPx%g&$Iy?-Xtfv%>gmI6!$HQkt&V`$7N5KLV|J5)I^+$UW!(<`@0nCilPb zi&p-he$lSYYxdpU4SUd-5H@R~Xfev($ifx+(O*8`k&J7mxZqiGnOO|o`Ivye;GaRH z`R`OHe4)8;`NOhB?fKKgGwZ$F|7sZhik-bbKZI3Ihs3 z4HeB?jJQ#VyIc|PW^D*wsY#~)Lw=*?QQLL_(_YPozpX*d3a?&cUF$@mRH8?^yCa4e zV#5Z+Drt)f;u-^JN9+lNzt3RSeJh_%L;<;r= zh%dcR9|V3l>LnFf$}2UO?5&eB>ESyt!8@sp_&jhkMGEec@{shDzGr7f?RlS3kUUu) ztg%|X#VV(-DyNf07q`Y@=+Ie}5DG0-a(<4CN_?in$y|g~?0g`3s^>FSqo3uC^}k4Fg&Nwn>am=&QL|Pd!27l2K&C z?%K2SW*MiUQz26_`3^LyIU$j^Wq1~mMJ)V;92BAIKcTttO{>Ru722Xz8jgS&L$x&$ z-+pVw;oDoVD;QDfi1*FswYxAMA4TA88(o7#;UGBsx%jgQ?rBWceMz0kfzQ0{Vx-X3j!U7-yBby{lNw?q#Qdqo!p$nNb&nMm-xJQ zr6b7b!N!YGm}3j%oi-eQ`U84mN~pz^($n^eAP>{&=O%RRr=Yz=~Qb3tPiY{O5PZT!A zO?*e{AgHzsCGe4UT@RsWImc0sWOt59DbzY`~fa`coMGht$WAGF4W z+CkS1s8yw13?qQpN%3KYQ%X?X%-P;$tI1OZdBu}ox3j%fX^_8U(%vNOS|j1K2x)X< z!OnM^o8$Gf`PEK2ln>m4={C=dv9CADL+ZQt30dX_`;BIJu?+0^Xbf9_6xXr&{?`(b zw|8J}Q{eS;McQL^KvN#W*p)lEfUp8E$6L^S5&v+m7|tDQxPCEEBG@*o{UPAxN>1oj zU%b2zXVoUq?wv9_j*C>|f8G?r{*lA?GvGJX4gu?0cbDa+aP6w2@*<yo zBddc8REE68QVf1z?j>bxvIT*gY*N6ldjRmGbOF$}b+qPEfY2gDx#lYbex@P9*S!dy z66;em#}ZJnuZyw4)oIw+O`ncjPu2ry#?7oF_(bFoqhsS$9KROr4;S(gGx*BoGo;sk zy>FCBDZfB1I5t#nE`WPjt*tdM*sr`E}i5nnBkObBD#MU9a@KN4~-k2j| zyJyhU8C-tw%U7%kB>Py3E#io?X-)BtsFS|ZD>L?S=^YVjn)RvxUB(TD#<{|tbWNlx zW!zF7sCB?veWuWqt>(r!Tr-#9pphAiAngK5*Qd_1iW&{ zZn=)aOG+g0#qRu=Vy-8G&covG7o7y_)R-6Pipg5!-NF<^^L;D2rs(>}4tS%lxX@9j`FiKa@A zl8hiHN!HmyrrN06Aex`os&RduT2_B6yz2IM>^c;6FaqqVBQ}d^>F(o+W#xCbFn69` z)N2Onfnb5MDaOHu>`s#^AM9-1REe=*U@?-~{7E-Pw9Bq*6@Qdzgn5#*v5WptmK*rE z(#|Q?il#a7Ze9-Oq8>p$gX<25V%aU-x17?k;7);9&`^F=6~ha7RT$5(=Onc6Son^V zG3rR>Grc1*FBNhdmtyhdv__=74=t0)oCF@pz^$-VHIZFqbA`r(Jg7|HID*oCHpx{R zlBbQzgU1864aEwEmx~i#9GpKs#m0CP4#`o!{@q#bVNF&>o(>cO6Mz7&YsiBIT51SF zu#4#Gz5T+NwYnjVSX(5;w@`&)5uc=&H} zq1l$f-ic?5WKs;cV=#;m2708BS_83;aQ1ahEpga;fHR#GuBlDtcxo>O`AkJ?n*{f7 zp{h4c(Gc6)4U1`)(g$GT5nO*VuY!3bn4xlf z%~R$~V|RYAw1k7&ti4QA25*a9PoVT@BFWu=6i9Z4XoYJs`yZc(e|`Cz9lU0%^sa+w zei)=GVMvqo8zQ})ydhP_wV!J@uSkxQVzhi>s@(_0?eOuOZ+nmqIKMAXyv? zy&YXnznV82#10R2WU2N`EJS({bbJ5?RMB;T8m140L|!YNm|P?=`!#1(gXFUy*6$Fd zGrUp%H0Qm57{;R=sj5fuOJAQz%XU5kV%{bGU~?!a>tRSIIqs&JO&sTaAK@1cgi6C3Z-CD zZLv$n`sc47(&YVQT6F{a#4szEoMs~VP(7g_DicGiuSw+$+Na$2%ZhzU@ku!+rkJvc ztC68s#mZ&e&C=niW`{YRASZ`Kjicck*L^yY*+ zVRZA&LE0Zi>4=|N)G@YwrdY7Tij+RrxDZM{f3mS*l8V8~VkwsS&-!3L=}d?TKc8Vm zubBwx{@7;hNw=U|)ap$raNl&LPS+N3b5JqNy{UzS$~1o8ewlTzXj;DVwcr)=V*Tp; zQuf2PUNCtd>0zh*R3&cU@;jLS6ib?+cSm+9Wv6Kv1!-NIK!NoaE`aq%uFpM@B-<8n zctR49Cf${9-0=;@;p{N~=1ZVP&pSp5BR!LZRnb3$EK?*XHFtANr4AdfP!+B8fbXcO z3bvYJ}sQnTLxY^s)&IM2VE(jf+VLs~#m3p(! z8&(Ax>a)lS3TM^g7F18# zd(_S|VP)7@x8#Z5Y6&W{`#Kqxq-Ip?)Sntf8RK7z#1xL`Ega$^^(9i4CvsQom&Tyw z9s6~*EviY|8xy+ht`^XK&-@4NMR@Bm9Cd>G4Ed^kA@!2823q>l=B{30al~{2s&4S5 zvT{qh3PUG_Mn9w`i6ldv5Thyipl?qetFfx-cWc&$q*~?8qD1~OsHf*5LiMKlomp;( zixhNT?orSQEzC)#R$e>cC#PO7N0t-C_FU(X0%WH)M((70xQ#tW$-+wx7mzufpc6R( zrkPOF>*1StdpN<(8MnTdt@TXA)w!B*CkZk&5sa5m;+s*9V&sHCJ+;kWX;K zzD%SP5~>vPTMZiIBa!vgQyE9XusES&4BP-^C>NZlWD(}WH`R7C3YlD0S|))OnZ2UEVk zh^ln5>Qh`6Z~41{AE_9aVw9t#N>ET=%y`ZVp5m^}M4Q6r#5z$^DM1X(%<;8Eye&Ea zqvu(3`OHNaMP*@rq_T=qJU~5Yq{}lva}IB|@vZ1yL^px~WsSuFqrl@)OD={USO}#} zWWRQ8`KF3zIgu%F*F^nrW%14US1R%joc5Qzzv;8Hv31lAWtIPK#kxLyO(243iRk&%eNBQ{N!EL>lF|oq3}7+RM@FmM#V~#t)~R zGkt`NPmmPYsK4HhZi3;%vO4lo_Z~gfGuWsM808W5r8&Nv)P0yKrC1Z@Ij6F8=roHEDT1!X`KHo@(p*1?PF`D1^DIjaPV>G`7i4cm zK}A<3ySxy0wsB@qP3{<ISj2F4U@x%l{ISG1buSa-FeMCh52O%&z+PKas_*m!Oe7U&S- z2rCnC1dh6hql~SLy_i*ut(+N*!0I=WlGN!v)ZUHZgZDi+JyCFeQIkzL_-0R5w2!5G*N@=gp8MkAjcKuuNz{*n_l=Er zzK`eU>7ME2XV2c?NdtV2&KH_e?n-xYd?8kxpbkxJDPKc#Q`4RS20oHkunc2xUKq?q z(2;-VT>1|sJY4sy$NP&C=Kf+1|8Xzo}iEu2YoTYBI>Yo)35~Uh}F}GdX^R# z+9nST`pe(!8q8Pe&uJe-n4D5@^@Pg0K{j7=+%CG-#VwOfHbtm3C#grw+5_Vu`p4El zqtYIK?Q)HFlJYH1Sqje4M3MdenAEG_J&|)9F;V!eiX*`>n8F_`clcEof?MaI(^YE^ zfkR7_Ce39|UX`vZ%P#9C>TUMORG{_NFDo`QixuEI>%@neykRuo(nUt4ypsC8eB9#N zis^#~I~GPhSXiXwRtw#Qd+WsRwYN6TDd5=Kg7>Et?txNzbYv2BW(Dz4?9FtETotv- z!W@-BoZe^(%7~BgMCN+VL2VHdw8-;3=rtVX&aGf(us=%zjV9gE4Omaat^9y93(`D` zPXB*OsLC5ruNy50$m`dl$Ul~}|EGlh*BtPLAH1TDR`wPUjYN+Sa}dfT)bmGMT%28l zya@u-pHW1xkZbVY3{z5VVZwtNgGf*w-Bhe9v%?d=dR45xpH~*~=3DXZ&w88tnCnf* zElZnQ=S|xKJ0~{_JJae;O*y-P(rUrwyv;g#(rU8-C2xtYkfw~B;7Z1y@_^|p zM^JdNe+3FE!7Q0Q)SvXcgLJ?~?CMPFL0f#s{3-+0#EjgAbEA%avJR-z)R_Hw@&^6z zOzb9H@&;T~4rDh5WoIpw{dwc>aPoG<;aZq(4605HFPEceny`YEcW{iyO4Mo1Cha+i zdTZAohWj)(wzrmZr+lSOZOfT}-8=0{6kTdt$^Sz22*~YhcbCyeP@$kb<-bfTlR%hu;*IX}k!oP@F8$GtGrgxzD z{h_3j+m5%eQ$YqZ3){~JbHya{nIC*|`ANMuTn$OQ7JxCAWpI<~Q z-iC)W9flDp=hy4@W@h~O$?Mi1J;UHYo)c0xUdgAgUkQ=HyH&_F4_o*Pc%{^^<; zY(6QASk1T8u>J#g*>h<~JHZ?qE5oQ)k~8;k9tn?6YiNxS+&;M}LY{D+`GUpE5)e*Y~Dy2Kh+(^f&LPx%XisT!_wU=z{C< z`$cpSJo}qg_!L&BFrnu``9M>gqt16VpIdrJW}J@)9#~Sw6A<``ek(s0>K+QZaQ<1> zEok0?H*wC&E#w?@02;If-ux{RM+y`fK2e04OTwI=&jCak0s@W)+<+P!GBJoLBnU1j zs6I9*_@d$X1}bCfitwpRw`cwk7s-=yY$waTlTp5Fte7!Na{9PCzU>LY4Y4cNk5SUc zVTHJj#G@gSXY+?Z*!M@Tw>9bQOpD)I2~5~kL>>tc!5Zxxw)GDG4|(qxqf7L@Yqo9M zwtKg2+qTWOZSS_R+qP}nw!3%Rp8Y$?%>SIpoRgVkGGFFHr7BfPt;(vZg=an2eXq4Y zUF1?|EY{fVJe;=5wSAppzqHMEUuj!zN*em`?(Jg0>?4wD3OL2)6YCKXCGG*zw~&5F z+Pydt#AF|1XC%eQ7gEJ4Y~ou;j2YVQtcTR&nu-(TiY0aMTPpl0yjbJu z1zSMMAsgVw-0y|v;hI2Yx#D4nWFU=c0#$*_A%tPX>5)1GGxK$Ou?=c$F5_EZFj=kGgnH?#@M z%R`MjO)N|uTue_Hbh8gX$D*tG>entpjR8cl#~8wkNMlZ8G7diaCr3NKu*i#ANK%sKT?;|)XHb~Y zMP)IRL5~^JALB{iEo(B$L13Tp`q4DFyzXl5KQ|K64XlZKwUZtu&*pvPLLX#5*kMjH z5mfl#=$vNEjI09!$!3aeWn&50XJ}Cs4%M@kXmYOO0a45wx3zp|#y%F5B0Gkx%$qG6 zKHn~ZgZ%*pOa`!CBNW_@X4lUEuuen6BV$FT7>F77XojSz^xI!3P`Zy^!H^>$7O_J! z%!w(t&T|H@uIXJ{P1c#tk#URGwkQCDMI2k@XbO6cNRv@3d=4wIB!Vp0pm7m+hXZSp$O@iNlK|!?o|5=@q$(McHyTw>pz#UD#W5i8vKGl5*gL>hyd2u!>4s z_tirsM5Co5E8*4OWJcV#YAGqjMaet#;4DZI)JJ166s7%@5Ci@O>ikiB9JI|S5f^jn z(c-JlEKK%WB>ND5O4NdJ)5jNB(@3!GXEq}r5ndSm`DYK)yS}$r*EFm0>-+NJGV^l7 zN^Xn}!1~X+Eujm&Fn9j-x2z4{Mf%$`*iQ-A&tN`DhE3e<(Yz!n+Ha1N)i|$dz0B2i zUUGps=(ap`Mt^@HZ{`DG#sKxDn2jG=JnJ0gGAp@*I*5;NBpH#;i6Y`2P`}=pn4)@g znZde=A|)c;*|JZ8SwS3{AH+Jg28|3hx&cI>~VcmNHjOgV7{z7TmA8qhxq-$ayw_R@1Ongn*s;NZp4Cl zSe6oAE~5dfS5A5O4^i=G4lsKER?LLKY7QFpY7&?|n9iKv5YAsgdw|5ef{_` z=@8z#ObCq4|48)NK<4;Q)g@ei2_*xi?uu`_$7eA6?u*%i){Gg4m!9R1`r_A|K?lOz z98>z`>|z|PiaQb095eMN_jy?*UQ!G3HdfR#l))V60$Pnhi<$yfjhl;;%jb~G=Q_7( zUA+e%JwRK{ml8eBugHsy^$r!?ZE95y7@0eGwbJ*Sy%orv!UYlfw%iGM=wyFo+R2`E`uFB1_!^kk2`(UovBnrr(+Q) z1C0`9S`D>YAX-NNNb6u9%vPNtLa*CvsOixflgD4)j=jUUSQ39CAG&}zaC6Gj$(GQ` zmU=UIU9CZ%^<2mG+;Ch1%)$mf+jSxhj)+@!u2Agu7@2p6!d6O006114uK7_)hDIW` z5H+?_&ao~q9qi2TpGl2@8rm^8GkfPAcaJuAPMyxQ%55HlO^g~lKoc-Nws^Ch?Uhu` zGg$>+Wc0*k1p*aKZ6#hG{8nyXHDOyIE?NqWX}R*z>EY*qM~nlm1@8K%Gea-laa!~) zgUsfyjWEm2BzyF48cc-4eCg}1d@@@qBlzO%d^Bi{dq;Q(e$zI08n3pMz;Jovq&xFy zpFR#s+_CY@j3p74^*VV1T7vjt_5>*xn)&^VIOSI_RjJra+qIVy-10Ct0yioHmRR%X z=ggsg;<$cE>QCXz;!`DUxY`sq64!e9A#pAR7p~-i*}~uch`ss*;`#gXg|I@V#O|^f z31aFnT4(B#7$geVW5RF`Xbnia&d6F=%3~yeP*dWP)Iho+CIw>H`slU@x)^FxBW+@> z2wpQHSmr5}F=0G9Iucm!P&()T;RfL+(PJc>p>+5E$Js_(t+Eq3t9M%2Q9^92W1jTx zTXnFWSVCfEJypCSnJ_U^IH3`7yJRN8pm|Z(faQDr;@~XYvfN6lp6P(STM3fQwx72 z@Npl(4Che6goeU6r?g~Crko0KQJy8%_}B6yad)tcmXj&Lpz&~Z@6JTGEmUQWS z#Ft08R!p1MUUwLP-@WtN0Y|7%{n^?f)dM7DhOoW;kaK)BMb8+P=m~<-0wnAlDmMUNQ zDS98I|E)^r8&HTy^d69Y!RX0qdqo7-UEu6h%c9wZhQ|O`1n_t z@p(61H|9K`n2ommWbij`+ljubDbW^hKqjPPCT0)jtNh-z*UH9_Ru)oET;4%ko{<%& zL?`l?1z(H|^j(sdUwjzXT6~{DK2TiH6h4_2pEb%{a~u7fC8h(9}Kn za!OC^mO-MYTaS9#QsE;@ z5^jJaa|tTo@|(jROG?9iW8&OEj`W3nVVg3Fhen2*%9O6wJPS&TM2+4#zSkcRrj|s4 z(frn?66Y-YeII2}c}nL|WxM0f;J`EC!tdzk`JV8C`R!PUik&@POEsfgTcTOh@8EM zZwQWhQuJj0DcA#ZL=o>##zKD1@fUpz?Dox0x@NQP&&&!o1{IN*x84;9y28D|f)*xK z0-lDJn;C4fjsA+c^R&dCA@;V#O;65-1 zq(n$~aAu>vAeT{An*LS{00Sp6Dp3A5dN$>UI+0zo|77{U%qL z6a9%akTgHD>dh1V&J@bcmA3DSKH)Kx%J7x4C?ptlL8#?cM&PU!I{rz>`tdnz2{@t9 z2q0Xd!-gOq-X*xXQnO$qKc>#ffe4Cds6^c<{6t@ZW0F5m=(yK1q(0_QV@lFxJ-?Oi zxwC=MMNhzx=Hg}26UpR$RC~JQ!=cB2;Q4FI@-{a&$7`C$8N51dlJt~27_Ut=qRe&s^SJQ+i;Ot9)3K#3DO0p9%h$>1U4aiv zLRQk7=1k*dH;$2+2@uKIW{UJ6?EJM3eGtapUfURf?tlZe>99o%C>?%7hsc{iL;R>W zqaeMzt<Vd{3C#!O86U8D2@Iz88%(?v5ocLQ#otP)4C*39*!nL};+E40 zP}V`)gT2 z-mr|roN2=io`8Bmm~DVuV<|^a1og!hY$5j4$lSR5^u!WBmiB0w+Jp9N+yzkEykiAx z0V+bZsC#CT|CIfqt(VD|Bw5(sed!^ZrS~c_IibemSMXPz!K8iDP!yNiZsG#U%o9A2 zh3x`i>_N%H6&Hu)e^SihdE`KO?2?U2E5S)oJJ+!bLn|>k^|+eq1jkE3w9lk5waEDB z2bC@JyKgfLrfq+1lbX?ga%61FOq@s47`d3S*L4J0WK`hNjzgoR7r71 zp#-PFsp$V$Hc=@yLX1-NGqc|t8Cf%p^y)JGLr#BQ(~zUa(;lD&;Tn|%Wp$;224zS3 znO>Ht71M8^O-rSLL+62EK*1O%r`?LN~5gn zqynJjZ0L`2a2;yFC(2()zmofGgU;UXtF<;PQ?^~8mc0*wGzDJ)RmfF(1E~gYqPB7U zXD55witoV1q6EVGhU0Nhf`fZ1|Bj3bmj_pn)ZaexHj%SGkz;E{su7u|Gmt`$_?wSE zkVa>1!bJC&Ii%L6);!{j?Tkp+rLM|@-Q#J{x) zS7{=%v-0@I)PVv{nrB5xyqEmw2EA^OIyM>`Q+T=Td}!=Upg~))5^HGUozMQGuk&yT zO>Y5-8;M6hXyO$|yRWk`pjD2DIDEubd(q#m%jVwzElwZSyd5A+=x8#L&Fj7YvccBbgGt^o11sMR9f;z zE9Lq(YJ}3kC0J@$F`wCpxpljM^SA(!`S`UFZfO8Rw-qLlD6hQ+Xg>U?G3$pBw@|d5C@p z#ux)WQId!xS1CTmOT_L?hNZHh14Z(KM#msad{P6Mm{GDQkmCf9bWK+3{{-2n_C8a5 z>zFE4Y5=)m(@mxqb&@OPOp|@Pi3m{w%^3cm%l8qww*d9^rbJ!aU=+Qo;~yj9pbS-2w=neME#85D>LfK0+x-+6;!KuH zfak*4oYM?xhp)jqt@-638DO=f35jL*cOX{B@RiF#k==_e4F5?m;lli^k_P3P< zzZWb|FRjN^0qE`wKJ%P z`o>-V@c!z+ttNWMCic~Roo@vVET1g)CsG)OKISK|ez$+k-(baMQRrNeuNh_HW-y=D zff}zPF*5<(;O__xz59nSo6C;!+?3SxSemrK`J{W)0;ZnzjfEsQg+<(SZeHi7lGgIpyubU`^AotV%9`j{)k)zYW&0s!yhAdNA2K+_z$oS1 zT_o3bmmOfOC8zmn&z9%J4lvl#jc7x-nPA6g-eHqRIT7pFJ?LN zN@*D~N0nkM(S5Q|EYW|SB7*9Nq=8DBIQjrRF?37?q5)};H?*OLUe5pc{FOA~K^*eI zMH`B9a{{f8-nje%}YPErH(0@$^w3wnc>&X*qt;&H^7_(s}llX|V6rV^_+S!fCV? z!(C_<>|0P=)@MOObBY|=U32&`-S)sdeS|&oppSt^c-U2e-_D)nN3?EBF`_>o-S+If zsgtnSbdL91)eK1S3|D2;LaVdIW8=G|-@cEZ*RNjP{llzEFaJw`&Rw{~0ErKLp3%d$ zZV&EpXBs}z0L(uJ6_z*#5m&8_Qu}M)J@a!uyl%*Ozz{oEYnE0=SfLG)UXswb(j?|f z)tFGRbf-7Ix}wiPR>9p^RPBelF3A7ooEytm!z&lGqn>d;LY7aMGd;9VHujmK+2}R z!1{@WFmkd()EAJEcL64FZw4P-PgZ`xnE0n`kntgjim+gdQAf}`#{Fgtq=CbFya)F@ zp1Ot^b@?UK%xv|K!xJ_+9qk~y7PS`dO7s-M%HO>y1W_L2e`aR~_cSaobteQfTft`p z`TON$^1)+&O{e8&{)#raBs5z_R~wL~7@|8llRO~V5FAX2L=b3`6fa%wD|anyc>gMX za#UNmk6V)2>j_2GTtR9399sK<9(pv>c}k^qf`;Ax2J+O647G1?HMH9l2z4zw1eZlP zFKrvTIt9sqq|f#!dnLDTuN#7w43*O85gbm6-KNppp(vDCmaFTNBSWnK(cGsE>arTfv@_TNHJ!xyrkq1q}=k9e~UhKm+?Un;1x~ z8xvb>e3nyZYOIQ>ZMY?&tW)>$%JltrdmjiW;uJR$jE%CvtR;XQn)`D%U^1 zUjc_V5Z7RiYdsk~vI83xyR%-5r`ugTUcYQ#WIZU1D*xWR^sIi^@8(9H9P%&n32o1MC(FG-Uhvyv-$wYU}fD~qC2GBtc*^cz|B z+?e&sUA?A#Hkl$d$o0xqyBJC)lL=~RUFpsosPb!Nw22mI;>{!2;EU)`pCt3a%!A?)*$E5)RhyQMbd_?^HOMeE~>Sj_gSo_{O#-GXY#Kj0J$zxY;oz zeOq0yAjw!xel3QbHmn0=Is5%dD8W?v0S=P-3bU$WwOgWoK!Fd4Wv}Mn6$DwObrtnSriY%kzkf7W{MvR4>W64 z-%%+2&4$xAe) z>?Idm?PNT<_I}=7qg({h1)BrlUR^21)I;Wsub=X#%av1ZidpCv8|5`fz@TK)P6r;p zN)|5Efj|@#ES!6S)DgfKKES~BHy8QNV$`L$fTo}JDDuj1Sb7UWs2R$ZJi7tL1D7wi z6#bZI_ynX3!83vR2o_D#f$uUcc>Fw1&j0vnnbbkp$-S3*?X+4W%Dm&C^S3ZPAwk7t zj6Mg|8rZ;E+cLLES2ww>gV9xWc6Uni(iC1^_h&-2_w`mM zmxr@q`(Jrs^JYS|wcY03`dM?asEHm4|L|N~<7f^Su`H$e6Uq6K&5<_}K--vKYbyOl zKl+v65lOUa^&6R;0M~S`ODc9@;i;6#{+RX5oe>g7Ul3*O%B+>{c1vTTRg%=?z~Nm)F?Ycb4+CD0Y&a zv+?9E15RY1IP9c{S&8Jr*=R%9ie=FaO9nH4rJoa|fS79|js2)GIt|&d9WEEo;xg%#cBRmm`B(lQoD3m!*dokQTq(lZfv4H?H_ZP15ZNf*Pkqb^I5+Kcbb zo`wEzOUO_1&e1KKiayw{uGnYk%yXaz(I(#%>4H>DC|H#q&!f4LeB-sK2iUi9vMfTo zW{jX;(hd5tv%k5*uv!IvAsY&tN{(l3nV8fVMr7bYgM0bz{iR~yhckqZ!5>2)0t8M0 z5la9iB>{~I-g6que5le7Wn8qQlbS8KSEee;)CD}ADmPxl&W&uNrD!m6^g zd%Zz#@h92hSD<*xQm~%p_Fb`#w-h7gCXU0!H6i&#wQP>6(dZ7PoXakvI{(OCJBu$W zz)iT8oItnxQ+RNdUK2r>aTodvU+0bMc`s2$3T4a~kH{wyv7}kKWkJAAJW^nwYzZWF z;w^PbdYW-Z7ax<9kLhx=`wsn)wLs)2ewjUDuvkit$}OETn&4d=Q09nhAf9-xUqPBb zcet>hXDxJL_o$3-5n}o{o!NdOvMV&?7XWxMgfu;sPwlK1y2ux8lcbNH#_bHB=)ro0 z$z9;GNuUe0V`zG26i@v6k2p$nUA!em;nYe>nM3+7>p>8DvG%lLX|4)1rQ&E&YVm|( zdiCA0ld=KuLM(8aGP82-xC#-~9J;t@7da0P;wyyyzgar|t3gehg(tSEihP(P2^0{T zOy`wL5eK$} z;T9n_R}d=&=<7}kp3}O$zsPzk(*rtL=K6V^K9~=yWa*_L9&|J7e|i#zmB3-MOoO(j zly!bt!xpqe+*oFx=faQgPycT?x05JNF|Nn)NTM+!fH9c>ukN9h|5OqFq~Dn zl7s%@hGmh{VZqj$iDr2mF5}iFa)11Q{?D0n(*JWJtC^>XnWL+fgT2fDd))bd0QdhP zTlT*zRZ-tJp*c@7BW1quhe|#)B|xB|C3HoP)$WO1q>-l$?F?uX*K5hs4WwO zrYKw*nys2vw%^oQZxBe9Ur;#FrA+*z?HyuLFIkym?ITk-aAL>fqC?PXKqAvNf3>YZ zO&fjxsL*>c5l2|y@Cd4eRdx>D7R&jOoq}B!UjliIy&tT`Hr6y$85Dt868a18Bd z%LE&7wKPnxvc%z7E8GcDoKmDE9smr(YmO)up(6237qq;7Ac3?P!+)$QZ0!;U87S@| z+)0*5E`3Kfu2Qs3JG~b#PJMepnPp zJ_MV7Y0+y=%Fq5*)AoV$7B6MTN54JE&JUq-e53XX`go7yZ4z8&*)Yh0p0=w?wnTO_ zJ1RM&aLS^}rT7_{uAxQ#LIjF{31|E-ZX9Jep`GJ*HgobM0_DUngC)HkNWjp9ut)I~ z-ehc*si$9APZ&?(TVvXE93?5{V27mg*}E>5%*a0aKe>GxE$zw5q<(5lKCn7NowDc+ zrQq624SAL%8CAdlSr#USivV2jDxx_Xua$x=!$Bk>51-RWp4Z=>H;o$*#-1ZZC7=V^ zZx&NVFyCNMo-+Y5H-j-w1$`r=Li$vGtrGg#{8Fa(lmi7H=-RxWv|Vvp=M=Qf-$KYw zE)19o6mu{t?4_z>NFXM65@{-4ghT5yHMSHdt_En;?)` zYiZNqV0)qZ(0_gs)3B0=O_!+eylLcCpHR-f8Pa^V@itS4+`4+f#yzrVJGH)_e{bjF zFIHUVaV$2>#8o~1p;Llhn2EWPop(WuPpg*K>9c9*0DuY1B86e|95HPN1ai=>i`_xC z>Je9E74@9DF`87dnQ)moJ6fk5^EnW<;hHuD>eT`Bs(XOizg9I@nxb7u;RpU*iBxvK zbP&F6s)ieU=7?bIyK0)*c_j}%^=Qf2y8Rt>mGsh6%=TI3(1_k0&}LKRREkOqGS?G! z!LocdVIp6(jHY4=eSyzdi3B*+L-F8SKGXATbZjg3CCszUUR1 zTdU1jxiUG}q+jK-RdLgB(aD?9Bhyrk!#E#(bKNDnDbu1~a1{z*oJYCBA8xzosp?>K zDbAC98GhxGOA|82 z6ukEd|6kqse*mNLpy;o@e|^UKzm4~Q(RciRk6pd|KgO<>qK=k=5@rA7|Iehj0#5!T z#l`f$OL6IV3P1jHD}``JqB91K0{!nqb^cQ``ai@pp8wD*zJKTy-~Y{;`LA)v{{?tj?J{CXHoSViX2#iId7iuboekj)mv6w#Zo(ROfE;I+Q zjW-fc8G|qI|M6od{AcsmhVm~}LKp~$`~T)E{ht6>JT|tt65frSK6K^BgDA%mXi3R* z>nr4`Vlt^_YVHIrbOPw31$bHR1BLCh85{;XdR#4W7u1luIoB69=X)DWe2|8S?L)%Z z!t@}j0w_cR3}yb0qjdH3Ua(k9+D(G*Ku0cf;r?hj>*N|f<4OeA+s92fzMZTO(>}XC z57See5AeIq>s3H}_OA}8j~GDT(8+x8!!N=9L_nUPbphlBMB-o(SlC1+ssT~hAYtju zhGdQe*zChoz>V*6V(01PhDyEgN;Z8_J0q4NG0;q4=|I4^(EsEPIv_MzEj+_P_V_A9 z*$g!vER!wD$6Xv-s!*4_etqynjJ`xD<0)~}smHa|l_U>v)WO+caW=F>sREdn1C?=~ zR4u>}RIp7}iF{PexBAwD<+vdyolah39Zxh5V!*OP8`WH#Mn-VNg)9zBe{EQ)#`yg# zla(@kn`9G(Q2b(F*%RoaENOfFmef?O`p%~{VxSgBaVlQ_(PhDo4H5ZOLg|4bUHJ1W z4+(W&w23!`;I}R`j1&WJ5Q%zk9^!DQX%Ob%aUzY|LogYLfGEspAW(l0&^QWkRP$_* zQ&4e@76`f>!sLmpt0rSi4jLD#TKoM;2#^R5{wQCx8Eq zOr5vzFJ#Pi+5)xM-y!g3S8QmT?|c5puPO)dL?|*DB<+tmAUA*30N&CqbPnf6k!VVh z5Feh-NQSDUH!v(NP%$UQgQqgGP_pfp6B0LF98;Lr ztYpL=+--iXriAfVpz#EId9Tt}dwD~2djxxYZMYLygByd+O0j`vSW6E)j_}o3OSdL8 z_m4#Sg9lID;I=SSxPD{9~JuaqS4-9S^5rAz112)eSD zy0Q~As?i)GTKWj3kOcN+TAg;!y>qKf(I2#TPEAMZhto9f=rdW|Qn|&GWGT8v2nJ1Q z1I%TPJJZYp|sBc(E<{p~|ZVuj!4;{W|d#*G*mO zFVDpZ>uice(N72DnnD8m301p5qQ72bpo5&!5NOuRbC`)jn2EsEcR3WVBQ(@!yThqp z0I+k?3V-;rhoyOO?+q3^be;*%I`#WH!miJ6*TI_*+l*ak20Ln80( z!=hh3)dtKDx-2*_E|!pn%Jh8uD!O$^75c*~G_sDoWD%C^2qq?TILOiz5yohDD}kdf z!3%9SV3@}Bkp(xD$q;wEgxHwN-C+lNO5bH;3Sq;i?U9nY^@ky#$P5z|P6Ue@aE0HP zakM2$KanD&gD~W}=x8S_qp+5Nq7d7j!u1%HkwcVdDhZUqj?9UwWhS%u8d%xBbjeGS zSjecXUmGjo#E2e`rHHYiMG`m6HSzB*CD>ey zHMEAEywJ1xo(1xS7nKY;FB*;EG0e5_b~8&jBwPNL1rS|Pv(@CVW9nX7&|j>QtQ%X= zH`$H1J{@no7q$4cR~s1fHkzF0%PREmdrx{UX1X*9No>|;PlHRCa6cZ>HfkU(w}#mh zOtagC%IVzX^3vzwg`=JxCm~}+pURKyk9&qqc6K^nGmLN3cXmEp>v6r*fIr~Fy51yi zQJTI$M+A93^#Y}fR=+ksclR+A;GUB(ilr6l z>f!J1NsLvoI8r$^Ituc+Adw|=DNVdpWwDt8bh_-=k9UeZUylJiPP;lWTei+acD55& z83A8Np_98pwp~$F-`arf(OQvo_)lrV4yQ9E+byaFCnz@%+Y73;?(_YiTE?5rw1=xC zACU$JNoTiWg9%wv#@(kT3xysoUl(s=#pzA8_ORG`Q(M2m_YU?(73Y?T8TB(;dM2Cu7@pBK@)25A{V~s*8{zl1 zHAK2+-tELf9Ww7iS%;8E@ghK@aR0Kh=B1G1yse}54Ja7@pdm6A-HP@gR#sev(Pyys zpr^rG$$oRc^WrZZbCxu@`$??c@+f#wuWD4a&bLfZ>S;QTAZ~>xjOaf0ONd9CUA-A5 z^YM2GCK)vb>&2$vqJKNs(*rcW$H*OleSqc;W#U<%apins@v(-_CpQ$VE8i+#M>Tq z+hx-c!q|1dPHFwvX&@TqA!dpfG{eF* zGf(lpadbbL^2kDBU#8S~HaBQCS6KAW%R^yin^l0W9n$QDNOV8N<(^h4GICrNj-~!x z6tegtELWe1%v{hMf{67`+FR(I-HGqD`(vRmA+F!JWbo)wzX=K}2`gzlHg%Jo-^aO+ zjzdkghV^CA<~s(k6_14FZ5W^3L^{mONmnELsqL3#9kbY$0U@i)C7XL>kTye zB>`S_`da-z+WLm$_nA(qgi;Mdb`9@LT2G3Kq$)e8TgBs0v-LYpyg|}Egbt#&@+08# zwpV0JM*I@ijnJ{c{3_MIBE@(bpm0&zJ1FamWqnFMf%13`_X{uDPVG&2ww^(3c^TLV z(r|x^oOlMd%w`hIt237IrPtVQQtjO(%;5#L21)gVX}K^_m5m-LED4N`dVx@wLB8kT z{Vu6d5b_qGHyA-NsIBy=A8L;B8%sUBPUx z=nm?vfm`GKv%tt(=e1N>&k>yJnJSCsVU{a_QiUrT%S&)j;vBo=@S*sLUtsQ0_b;u74taYkDD`=du{G2j)P*K%OU> zSV(0^4pYDl5*l_SMvhFw;~koZp@h*hcEt`4O7+5a@ew{ur>j@Gwe8@_dHFl$yj1DJQ6^kn*}&d#aD!X`1m=QGa%H4`dXexjGHMGIP*kUm|ecEarM7Y zQY?HC4V5V|#!Plp{bfAyte1(dibIOl+ek+G-6@EFJUpz-J9~#dzziO|MNXCp?#d*w_}1AH=E|JcJ3fJ4+OL zC^Q74Tv&lvvrrO578EIZA4c&7#1W^_Tx2wRnfy8yWv)WqCl%EOvJV(2@A=&-(uq?0WRm1SYiJjUjBqZPC@SS6jBM;ttR?6WZCt~!Y+vw%R)3zu=s>0QnkzP5cPpq zNJ%l7K2=mckv#?c%mAxtlJ@0@=MSi@QpLv&Z2EqMDt2#-K!gtJLQoZI5YcmJT`A;r zs>25wRBlly6B=xMl6y}<0g$qsqKSjn3;xi<*9Q}j(aVaFL;B$hwFt8s;akr~H)URR z3w}^(QYeR_R0bByir=G{Eq$~_l?DQBaWZ>O#d-p-QY56(uAQ>2r{~vr+kCx7Mw$gL zlaGFqH??WI4M+)R!7RQ=3y=b}T^Iy;v-+K~bIO~lUljfDbybDmHYUc4EV5^<;C>yv zsJ;E0ARAG2*h@Ht7s#I?7Y9FI?spHb1z5Ym zf~7eE5xt0knU$8GMsRV40Ul1HrJr-1}tYlAclYE1Tl{~&QF2^eyojY-VUk%oF zT8z?~6~M6vw-+sr7m+>p3w*d?lG2<@8T%L08m8aW4QiGto1e$3`QBCnbuaq{YIr)0 zE(4ahDO%%%(DdbqM{1EEnGavx2zwDsM($`$ynWa)dzN#UE#w@yS>n`spDK271SfE2B$CBv`HyQa2C9m0f z9%8Rqr!&IIE9_vvNQ2JK)w|c~CwIAB>sAYHf#XT`5!~bqlWSP7tB~$<>vrd)f?=!x z;ssv9KI+m2X+vV(GlO{lQV#+r9bxq-5I@t9hpSB-|6SyvfN_kd-utet2kmTdVwMdG zY}MBIguyTgphMksvQzyB95ah%JYn>ji$AoV4PuRU#2--=AJRlYt$R=W`o+&7#m z_X`X8ADEwnnQr1`muIMdk%Q}U5G4)36Mu%$l>Z+CormRPeZBL$O#eTN#cNHwg064N zY3|E#l?1=Taelub@Nzh&z>9U6-a&5ay(_dXVEX zr}}y4e^C??qs1OzTeHx?WJY7S)T9fKlVMsc<2qbSvtPYumsJLNO9T-t@lFEX5P@1e z6j%L2_DIB#hKxyx^ofZ)y~y){-i^$t0;cO&wm?^~pKb`&Kxjq!ud}T**~Fkh+=P77 zy$*WbzpHpp(Jz$57BDZ+pcMlQ@Bc7E;uxufgK=VTLp?EU@U$a@E0QG#yGd)u~c+xFSEjk9grwr$(CZQHiJ z_ndd~W#;DIWd4CKsif0sRCOgQ>9y)t&ok=n>LT4)D1%`2;bG=Bh8YT@$pASC!@Ei3{}+=*cut#v<8RCqjzZ__f7cB!_!cT%g8f$u z8wLQN{_piLCtFJs8%I4`J4bpAW##{z$k{2GIJlS_nb6s}U+esnu;xPYd95W-L}@{< zh$HK!fjT0T*;>qUzV1VJf)5aox1yRaCwiAx%)F}Aizp&dEU4*WkxdI-Ng3O>Z+U5?@$Zf0z$nkii)&h)phI=6%`eD%f2$jl*A2{y@26l?h z%zzIIzDW6I$pc=R2$(i=&)-;E%It8=UwH3&$NPh}&5y4a^+O=!*9v7d`|OWTu-W|$ zq8j_=F)Fmf3n6jIr8Ll^=S~68xPAy_q5lH=(c=!i-bz3AiSIOfE$Y_`gNGI;F`O%> zMm?&VHs5;GK;RwR)u-ha!`Eq?n~-t<{SeUzsU0-d>O0UDV8V~_14YM$>(cN0bQnv$ z*e!a}ZGFLB0uPH{koy93k$ay?`Hhq6)U~N)1-X%e2htYkvo+8=0TZ~aqhIJIL4iWB z`25Fz$nQQm=9~kp#j_2<%MPffhC6#^fYBRYTm1y+rNVjTPybITT%WI$nZFiB)I5kL z#D^>MEx1$fX8$&bSoO2lnK~8&IyAjgU}P~Ek13~kOi1VsjF8?S5YVE_U1)E2=9y2M zrlEKdF%D5EEAlrk4h~5!y-XD`48g(q&7PtAz1_9lGMMay!H*q~W%RZv)_T)nVhiKY70_H+`g7Pocyj#W)A zC!13sS7LU`l7PzscF6UHzd@buCSuXSI_|eC(}VWdbF&MzNBEr2xVEBO;BP_BC7)BO zSi1L0o%8}yfMA&R20=h%ZZWLGp=$XS74SjfAn9Qd>%O;C+gh{&%Tg|F1&hI@3L4~it7^R(now{EZHPR$Zv zr$8XQMAV4$M_LZGrtNNgmV0~zJ%ML^2LnYIFBXoJ9MTnK*7K5kMjn1-K6g~%DUm;M=AQ^WcFarg*?9x3=J~q(N>UZ#!Ei08 zYA=JLNwGki;B+lMD3Bta40i~TlOXeB3~uVlV?^6CDsd9f_uC%@>gO?TN_-Iul^?B< z=P0!b0LUF2!#$wRri;W@AclFl^dhAAb8 zhQRju)+}ps#;R`Gz_F-M=n)&BP^*JGI@N)o5Xaaunbg5jJrN+(m(GP)=$xF@0`Zl* zXo#-i78;F|5`{mhLRkEHP_q#t*5I=oo2vkgh21VC52|VZD&@3XNNKzAnG+#xl-yi!Cr3N|5gQ)~4~2`Z7Z;{mVrSD5}tV zc2b*F+#(wG6ewegwz2|j&rrejq6IAh_2K(c1SyEg=N$#dz3G*e4c&WzmRITJR;raz&t?m%E18%NE#H*(Vkq%(U}kncF$(2MRpmqeIXlA+oF6y$HAhT$C_cx+PY#3Wb_;zQV4@CjxWC?f+oMA-52qJS|3hq>ABNb!mjpSam3SJD7akM zn7d$&wTF(dPNy&`1%P3aeycv{T_q#ek=CdtcuQmtW+{zG3K^ho^!U{kK%SanFqdE6 zSj-7hbNBgHR_*18&wCl}*z@e3H9{z`e<^}EZL0-0Cyh%hnu{j1K=xd=O#Sd0`!ygA z>}!Vhbm)1F2&=A6m67cfPZHC7mDp@y}Q-0S?t%XjwL_}c@y;^yU1nDHTNzP*xbet-~ zLnerZR7R_>&`wvNPYl!#3IG-01`gmIwqZBi2PvTw?M3Bb?vg-{vvnQiHEsd|m4HgX zYr-!qN0KS6w8XN&fYY-J4o8R#moSbo}MK3vk-1mYhc};*z!=oP#+qJl?wV@Wg?vRSgAXj3JseY4GYVp z1Ir9N*1bCxZDB5yy`x+)TX_`#xRY`E}TUIEdJw;d2tT9*f6}RaXe-H_1CdQ8AK1TI0w~=Mpri$1&3^!bg#6 zyGf1ZQUbb#9Ne}{2&Mp-rSfh`X9x5?4X1-UmuLTA&A}oea%V@Ydt7(FNheKwVsH4(XBx4j9 zf4saXp<;h*>XPu=GhUb~UfJ}t$y^*5KZy01>>+ouL7DZFZl0b0UfE!%+x~1XQviLC z&l~WLYWLaXW{JB!J?A34WANNBHFZ@EUP=TK3+YG|oivRrZ5@ooIf50L8tR-Cop5Y~ zaVC^^uC{R07)3@Jg^wc%lQ1M#L{VG)*>%On>_p*t^|;%y9m23LTeDR1Qs0dYZfk?q z`)P~chkiv3GJc-8G#)THDYGVHTI~Gul7QDVQ+r*H%R^`FSHt36o*Q@v$(~vdeUMJY zx)8tM6*T$%9#nKiE+erpLo0J&6kzUEdxl{iEv4S^XZnnV@3a}qP0Q^hnlovwVa96E zt0g2f;u%^i@ja4s=)T0eWZ40iuVUFiv)#nj{FqJ0B)-F3&Ahp&5(9zNPJ7KKC{^3i z`YCGAQAt#zdn>Q2l9i&$Wm3VQdDAHsY_BAnwAn$&Td%UR$YwG|vIXMOIWe+U<}*VT zwfj!E8NEbB>wF6cQwWmAtBRGzz*2f_Uj2A`awi*XN76?NOUgGRraOs8V#pA=^*g{3 zcBn-w5>fTlbf%5U{Vo zv-X=0kq%7#i&8U8JRS{R5#5s+ufd>qmKyf=k4se7o~#>M(Dq*>io>GjkD>!D&3g(pM#!QDP4T9cdWOqIKat1>)EM(%*Y>{)YXp|3xYaLw zYP4b+8pW?o3*ZWzfR$<60P7S*&3tp3_>MBc!K!a-=c^mhCPfO{-|G9AjqIE~hcW|9 z%C}Wa3iqlOM3g%$#Q{f~ML2=U?q|`wC7Lti@?VpT#1@+lx2?4BmNl`bri~5P)+?wapO)lU?s-!nW(C75Ef6Z;nn^f+%2d&r=_w+f1NM0t2*JecE+79 z*b7Zsmw1~i?=xLk&F&JfES34(b(*exb% zvBdVGExM0mMU0=G-l-h=(OOG4+!RC?Kh}1T652^-a#Cp(RJltVD~7P^^V zB05JExPSZ%QLJi?;-Va9bXTI0TceRjn4xxbgZ`AJS$i9P$ua6wyx}(!Y@7s?g*~s6 zt}4Z1j63kMhQ%K)sDu^>qu>$|B~tjzrr<}wX|!HOz$rM5J?h$1Ac@2iP@VeTbNUiK z=R?HOBI32@hFZ+HDAQ9j1&Q%42$}qN=Dt%O2%02l&5|@il(5yPj66j#msE8o8dG}O zTYeS>w+C)CDto37$%Y-g(l{!3rd+z{Tx(SDPMT(!SHMa&8$Pm{c0ATl6c`?Tv}@G}l+l8doB(Thuoe4$H#%eq?VI zpE|*5y-1>qno@8ba?-G(WY^oQqGsoxMVWSX$Qy?cF)RE0COm$oO~h#{i;RSXM@>bp zUDPlRh}q&)=$Zw=?h+*FSQTjp*||H?|0-O@bnCfVg&eGHe&0tYU~_xxO#cmwHht*G znC|K{jnz{tR5jGn2@i+#H^ywCyvf6BZe+gEhpv4s6FlU}UiDlmbjTULX8SM$v;=_GbyTQ5m|~y$|oh7Na0ci)s9MtL`P|qHFW7eUXc;m|Mrj9 zD^Z2+n!)8JD6LzfZlPpvY(>qU?$0LMG+cojCgYiI`j|rE_UW8`;s0M);y<2GZ^l}# zEdP?i|ESdd55V`oB!~aUuFuZ>|7|P<=tv|i^w6mE_yrxH6gk0^sI+X<{$HS*|86w( zKY-u=(&_&NZT|lV{CaDNeUSV^lSu!eN&f@*{V%ihe}G@-I6d(Qq@2J4tO$7OF zC_FSb6{6e8Y@#AXMdwbiS7e0gDK))2PU}f%7o=044}VV>iP-|I#9PDyb&+iJxBcJl zJ*4jRwMwy{7;+vqDRX7lC^{aYCL{&oXF+HtR}Jg^W-TDsyZnZk_zpOcdEF^1;ol@K z-g4C%AIkB8R^nU81PE$nyuoWCv#h(V{Xed~Ao8_LdHoTx2eA=sF#@XWrOt9&8LNA&q{V$^pV=PVscZ@97E(WPMl%rq?oCuR; zGd3nwrFIb2TCA)0MHs)F!Ap|&^;R{(v_YRds~OVifD%&4o@7cFNe7Gp?;)$nP9h10wN{3yQ0Pmk<&okRkv{h(g>JA~2J8CZ42uSaT$UJXBe6 zI|xt5oDE5L=2^hxR`_d*#FoDP{FYd_?VM1-XNy+i>b&xvjeP1VfA!TgKHl?L@u{NX zs!Ftlcb!}y#gHpP2_C>1*la2qwa5}%ZV;$V&SS6tCkalF}`qnbQA%bFL5v-Gvd zlLqh7P7J(DuHSsa^NNIqJBu@6InhKPEG@^1*fH(C$S3%|$h|#33qn!2{fUa+;khK- zpvN|1>-i`?jSVMpF}$73;dt6}G3qeGe2bODhASpBAKA4UXVuboF~CU6?5dfh1$T7j z=|tS1c_x#$^PhmF`)ap&A0ceKr!-{a=m9GXsz{Do!#@e&N6<)e@n;tHFBW+IZGqgH z{`&oMO_jATyEW&E(#t_PXk|*PK1!6m(%_0ZILa%~mCl-0hKHo2R*2MAj*>38?FTNy ziOIdJe{$%u((eaj+i{7yCUey2u$FiX2}tV7lx_bStKu3)qEHMK=xYLHl1Qmt|B~%8@A1daiD-Y_cgCi&=tVWW`Bl4w-=}f59%6FCjy(ap;BD~)+ zkxn-gZxd@1MKql@jY-RhqERE2DKoW-G4yY=Xq#3JnX6Xi@HU-#S7u?|c1V47bbpH^ z`X<%1M^y3 z%fQ~U?r&Oa@3oNtAU~tBVH}wMr@c=hPy6NGr`J!t0^eM1kr&9h92+x1fJZuBVCXrc z2aKM9hcq5>$lq8<;mFab{AHtlhGC=OU@1rqO}eLN&(HO;qr4ve`h+7MFq}CzTefPB zFYcYu*Jw9j-JM;5TvRpG*XP&Rm`@;DyyL-n137a3Dz5|hC(x1a335E!JX<^UXEU>> zF0Sl*MmasdIPGT?a+%vr8z%Pkxb<7y9t6qav7jm>D8kAldkWGloiQOM$l~!oq0$_J z{BcAHC*3cPhk=7R8IL)FzIh%+wZ2Qf%@&s_7uC%wYOX7p+S z<;b#%n9KG7e%jZg5nzu%jcKz*C35Nf{#2LjqqV)6Bc7#d%D&-q*|Q_>TP%+R-;a4V zcMN=Y3`1|z28Is|9Kj8Sl5F*=rDJm@{E+A@i6}{~0)o*}Y)czM&{#TPh1y`f@%nW8 z+5YpE!%lbLBt?#t3lXKP5f%F=81xFPJTN*Wo;#>gj4^6R)LIj2?^fUA~e8^D-216^YdEULXLb zS9hfzBEM*{IHf*_g!{qJYAD8<2T(!N6>OFEsn8_cg<~r2c@{?aF^!ZD-P!OaVckU7 z;}N%|#hjoq=c|oHo7_J{^XHQCx+z1CVk<}3>u`tNqWkKkOK>Im+kSU=Sage z@@QcEO(aV|ZotFAd-?#f{w4!eOp&nUl8S?((1lf~;S}jSMTq9EyaIS)AD6}=i?&z? zA_3!3#oS;O5}3Mvx#7geR8og-f4}v&NkEn z883WcC0UY=ML5eG#FIExrCR;O9IhS+c0Y<*z&ZaM^E(G2cJ5>#NJe0?D5Vdp#$9-e0Q6q9XaL!SL+|ilo653}`s3U9(2viM1E4mB0LK?FV zfq=H^5PW;M7*F>C2XXIvKP##@6Sv(wXOO6)=8WCmUw(QC|8#Jo4`3Uf5E|mDgj`Hw zrupQ?kN`M2jN2J_ZhF<@E8=(#N=-weAc-keTc3F`q?3X-KNNK?Fmcv?FJV~TKvOzx zGEJodtit6z?gAUj-jnP;Xt=iN8}NoaKvo<_b%{4xi9S~-9|yQrlioc>1~OpZj3cvF zxbDKW#?C z-9i;z6x&ZTnWw8vP(QJohnc^HXjH0>rJdNkHxTNtdiqXtnS1YXYKET4-x)?c+>mrh zsdN{*>ap#0vMhJ9yk{r;1sty95D7(!-ItNxy~RG}Xe*Spid-~{q}NZMzBH%6d)0Y} z=DQrU>gVUfDciG)ODy=MRlnL?mR%7=jAT|lkvl+p-R z{5t&3n+#bP=<0gEP^U1#+3gb@sZU8H*V=%4%`xL!U*E827QI-8jVnC{l5ilLlmKXA zp{9pr45@reAh`ygQpWY5FFc!*vKPRIyZY6GIiE{!uiYs5r+B;Iej1bK7<#r9oYg6^ z!kEl%o!--+Kl$ZBJy8v_K?SOHoIQoa*a%~iB6F>`x1h)Ow2*&(t;g%~y|z>jN@tlv z4d*aSbSji3wWkfe&Z!i7YrGtxv8k}~LU{?4dEanNy6Mz7RwI7p;jh!%XSmW%IA((& z2w)FUWz?VI>|Lu*tudzYdZFv@c;IQgNw7A7-tefxeTP{MLzi57~MHH zLOd{kIvhoxA4Ly&m1?+*Frh2QECo$HZ^f)YcwFKSu7!)HxMCJtme0C@T9^-xc}9}19l7_X4V7S4n%lU3cLN4HI7nL#}MX>_mJl~n#5>sPQYjLKGTSUVFkci0NoR(UuQ<4UX1X@Eo{T4xyt5L%uA0%O<+V z7l=Ad!}zQAiwt*d)vohO_*Yve>>6pUhTA38Y&Yx*+vIPmvF(0^OauEH?eLD|lJ#wm zo0hIJxdxk2XCK8scX+>#&MDzTi(*4n&Iu}C2<_{JwEjXEUH$i23G5?-4S>gH8YVPK z(>fuIBBVD`7J)rYWX`6Iqdwn05o+HUg3C1b8?obRpLlBltH3@sgdSC-sIY4wtrX=S zL!Zk0FTv(vrJsg3)H81{eB{b~SY{|>L1}6&KaAcnk^4@K-PjO?@BPhoe6%ntazk#= zmdBWG^`_3a?c}9vO`0Cf4AxlfEkIe3~R zJZA`#`DKFqV_=?9Y!#nR!2j=Z9oZz4n2?2FxRU^bb*x=k?hx-l_^&?HJ-+e31J#L+ zdA{;RP)!5yBB5L4zMwlu1vm%|6GccZVI+=27~d-f7Nc^o)I5x76y&Hy2=j1|)G~}| z3}k8v<_yq2&s+KxgqR4BWoC^Y72fW}e#YC&4G2SjP^az~FWIuFb}qk`Eyq%QPyHO% zHby>cgxZ9A$kv!+ayFT`4^?EHMt5P)4&pds(JN0|w6cWmQhrjTH{Wbss@k*+!tbQj zMot-Y39{G{J+lH`n)BmT4ne2s%;StfytSxRWnasVIY$^4Sjkk_H2TVN{;%~zg~G4-ugg>a_kSRiKTSjujBm1uHk@uyHct(IL%A8b_KTUhp; zpXI__zILs@Sl`n^s(ku_y7^u{K>R@_ebncNitbVRd{TxwJu=aU4k-djB7z7sZ?P$z zig83Vl1F3Iik$FUe*rw2K1z48US;Wuglr|_3jbnJL8u<%9sfXkX465X2*5_78dV*| zaG~D-qTFamM~5F zzs3EGc#?PxIaeI>?sb6>stMTYVy2sk*4H}L-J==YF-`20%2-5^=I8&`Xy|;{$Nk)# z)5DcggWsI)x1&NB&?F&ad5|?6LLvDp0(=aK90mdgd2h?0xh_P+c^&rh^Yy7*sTj7~ z^&5#ZktdKI-g=2K&QyuX(J z#BYYRMFX`mstODaY|To#7A_PDS?2EHqG*K zJt3ZESO{%}q{E5K&32l!2Se9^+k1l8z^gu#0@agQUnKvmk_{A~>Tr*|S*@JEX-;pH zp)!>umYj8W{{5q(0p;|B**GP3?;LTUX#gugX#V0#EGKFf_WQ4Z0v0I`4I}`F+nH#n%G?=>-C{F90W`u4|A|(cMN3an4&CRM} zj@^*AA6*Zu!oUiL zA}KoinDSK`lH8?JCGXK9=9yB3Eu+)?mPuXLn zB|}@|X7l^JS`M0wZR(3B_u3fuPhj0x7r!Vpy}rrMkC4c29{vd_OF+ptGO==t$5G4% z7;>CH36`lYqp3~y2gX*M^Il!enV~Zn6Y*;83#bhucyO4x&_N3n89#BX09Awd6MQ~< z#zTc1NPtOSao=zLvIn|2?vhPD1HFnN;^1{@tp8GONTeB5$>i0$zfDcfEV*WzyBnKc z2O^)4;iKm5ge8BnV5j~vc@mGX_k_m%9payZZkuQcjhpveh95-L2}ekYBWFx2i`aL( zKl<+O1J)5E;-OB#uG^X{r!SUG4YT448?f@J>KE9L>oS{?y=JFD;nL%0*K?y#cRS1O z`<#W=F_m9+?{`FW7Ym*|JbP#s4Ous}V)B71EmY6{s~J7IU$g86b~A(dxp$K_Z`n1; z5Cd%&xWANtY3;hwF~1Q}POK#Ua~|W3+n@$0K9d4?;^lhvzDkamR$dwBy-Q>yy-gA* z@fCb9Q7#-tjj}Y#zn@F>gBPYk1dP>dN1Vqj`dgHSjgF~g(F1d1#~BxH)H{YNum$MP zEr3)u0C`lM%|-GX%p4-|4MrMObQd&sExg6&zlr{5Usm^cmhDB4%nXFelc6UN6^Z@4=v6Dbg_4!Cj;(f z*;&v97nT&)N1og_qk;dm5A0+cR^^{%-EHv+j#I{tBiH@&=SWt`bII#!>7a9ZCFkIi z;K&b(*W;%pEQZBw%I_N^D(#YRr^AAvmJ&oH)O&=ha$8n>Sp<|tJctPB zwU?nNXto!S%7dc`B}%6W4Jj2x8SSr=Xs}TATt-2FFm+K#nLsd>b-t*$@?PmZ?(aWQ z14e^P<={l$5CwbpI);Z{BzOGl62WxNUn&5zkFQlg1SPaKsK;8sG{8GpGQ-ecGGxXP zIJw^)EDH~L>o3^S&MQ^qtq@;-@KfX^?fz#%@9-k+c~@@Fa4`q>eE zgG24n*xa7&dxP4$C-VJuA#J@?F>f-)JGxm9-msK((y2N1FjyQ-5ZMv_HMn{b^@c1JxJh?4eh(=Sx$82 z!@MjXcnUP=$qARVP4fqciE9@3d_z4vSRa%V7cO@s8*$DQuvRG^aaLawXGdeBx5|vo z=Qouj;$I2 z>L#+*#iIhOt(b?|p|?V~R$`lX(JxW4;p@I!fddy~hY-6?@t9F>$iQ$z8NAZ8*>)(H zn?i=bMDKTGL%StWjp02PP-xd_>o5Ce^|b&ftpuxUzO{`Mtz*Y&B2R#NaFCZiZwpr> zt<45s^2fz&m1*Y^S1AX$E5`25(eT<`$0M97_HMP@nA%Ac4EADS)U|@`p!w|plw9Gk;hFu8wa_(hpw8Q$ zH~in+v@%Hf3vByOMib)+Ncm?ke!Q`BCo{hba7pkJ1|=nl`-T#c^bAA0h}nsw@G%CZ z#qmT2RxSxkhUOv1qA04Vr6&iF60u!#hGq*+-L!IRl_HW$;d*JAbmHPdI;BdrV_<0+ z?p*bflEl~qN-C-RGzy)Fa`OgCosyFA3YK)ucvT755>=e`#T*WqxN^i+PITX31;*w| z>7a7NS|wWeck!6U?yuI+CCjLqF3%*?BxTm+-SpCPIdReYMNUeve!Wmkt? z`nKzLJ9Umv7_Lkf%^h|}sa|_$I=LPHGk@uAO!y+k+Y%MrhK!xWi#`Cw2Zyq(J!t?KuZ_2lS`^dmxHmR&Ka<=M-5`N(U^mo$Nrot8cA=Ha##XQaA zL&E2g?C@JrB*K6^Hn=OqK80V? zc-Ug!sYCFX@BLd4@Xe_2{bdRH=4jUcuqIy>Wr*RNaU6UzF#d7r+{TBv3p^bK-WkK| zVEm-0eC<=cEyZq5t+Cj>Tr9B^$o&BNTK zR5tJ;pF~jgBLCDW-Q!+wOy}w;Avw6pZ=9sChp17vXq31hhf$?5Ja+TfcyWnPdZS-y zPn8UVp`Mq~m3TTx1PrF9_7OfT|LQpgwWX4sw9Qd=>*!H0I8Bvu8BlSHqBepxyNQcb zTG+ZL$xtg18Ut}*SD+1=ND&^taoP`-A2Cn7eJWZvg4ylGK;1z@?GfM9KTvq9@R-`K zL&aDHX<=D@i4CSF?M^Nz>7^&HN3qW=Q9NmyM<1$hXfhm@(DocuWWS|Ac54^2qOkD<&pz5S|7+15gTM9icg)Yo*vSZqNvq+9hG;XCN zqv;ElrfuHLtMkV5%I{5P6Mqc7_FXi2y0bRcl0AU(j3C5*OAy5?fey?$+lCd;BF7dP zMh74St~>rPSSLjLu}U9PrC6id!ONKZwC@KOK9lcNzZP7^`;x}M5e%HA9bg&~7Hb$9 zKs>@>_FtdD#~}mIXun^dc~7|RPq;Q5hf{7a>M^#hGrrwwY|TZMe2F04E-TWaIM)TS zckI2#-(Rx$;y!mmm$lwgfG#~RQQOot|DH{viGkmIUpjtfiRx;iNtV%1*9oTLz5EeXOI zp6D5Eq!95r%-p`uK+~@Q%-^dQcUS8<9qNu&V)h;zm=5gI4jWhK8?pKA;4K{>z8p_H zbi)BV6ez&bPCJWWp*~3Bg%8O~2ZVFJ$PVz}e5o=|b&O7&s~#FBy`1SxR~uNa)`%I+ zqpECIHjr#sM{b*2q%7EiMr2NIP%j?taKv^C4xq5UbGodU|20aB=fISR4sw&eI?|yoHAN`(zqMGnlYQ% zCKOoHzowxs1O-b9LS;3nv3gWpIJ{L&aA4u^%)_h8Lz2SLI`NF~)K{|O!_XednUlyu zW1aGu2}->icGA%<^;jtvJQP{X5oON5Q9icAYnPMYmhLP#T}-z~K8x9FignL(yb3~B z2oB|gmI_BcsT6wt&oXw*N`Y;%$5<+8svo`MHxxH zn}qq%cO=hHN96b6BP`;PTiW<0T6%bzu?`#fy(~_sm`FXSmldU0{hmKWtwjQ+iua!1 zfAkj>Ppep3=-ffN3Yb#^Jcv7O#Mw)wQK;NEXX9d;+iFo1G!j= zN{*!YnWLb0v}hs6MnBrTR^D5={=#d7`*sb<$*Mh>+Px!>xHK4dyPy!9(?jG2=3oY5 z3sb`+pj$P#;JX31aGK7V0Yzh-uk-x+zLo4W2dSML;Wem_S8zu_7C&vS=WsxYe6A-^ zX!U%rXK|oskvV78`!QxCukdBXV17OfCmM)PA9A>EESkVZWKT>2NK~*!;wUT3F~K|L zYSk5*Ej7#heRJBjd4^N82GKqmXjOvwd;Cw;_x6pjXACrMc0X_SK#pwIX|Y}UYDA3A zSiMx=Xu7V`Nx&E^fsMc(7}9j(@rp_OW^kJhP%Cr`qM2v8b;-^1Hj!FL8U)y|FA<<*v|yEXrCH~zJCr#I%cx8xZQ>XxVEnkUpDAJ6R?mXlSt3KOAc(nH07zr%gakE)@g%?4f}x$^p*|hY+zaa^NUhhy$;7ZqSX}w z7PnoF@MBCyUg3#q#MxRn&c9H7;)(X)AvzV!Qrqj=aOWdjlRV~O{~dc6RCfNCns^8u zu@TxN_E|5;nwIKQCoEFBbKh3OT6BckyB3BNwzd|87_}vXV#`9nk~7?(`&vI`7P5mC z$q<2f^?`ThAhz=jJws~eipA7_S`G6k+)Nid5d~Nlr1}wjW$>7ic@r!ObTs1w1-o|W z*FM3*4>aikIU2_2dD@TufT$x|t*0`YL=OjaJo?BC4BRDyw74n9{?q2ilMeYa-JRWI za)-f7mW`WJz=Dgj@@36@e;Ou;9#kWz4YCawb;Ws8g~${<{cF$icj1lpuvMlOqqcM~ zG~@>JMFNC1IBHo-s6=Y#R0vHv$G+A=>C4kRdNA-u;P-57Z6}-;a*Fd|hq0%%wZq=6 zsn>R`w^cL72f&POT9}S0Feb%XFjD%y7jzjd8zsCS8_r8y7WVX`Wa3o%itM zz%wKw6YVXc*tCv+ZWFsx<5k5Us4BXoQ4}qzvf(_M>Se=X|oC6o7K=+~QDEw^q zq4zHO7M!QS$T~AsV*E>K7K=ofV+En#uNK*b)*yydF1sKNp+2;}Ik`IB(a9`}?{%8K zD`Ztds2&38SWP_#8Ya{XwsuN^Xh;AtSl}=m9I=5us|K8(S`5r0 zOp0v7D}JzQAgG5rOdLW9)8DbJb$hc?52pqk8>%|MJ+fkM-ZNN60ZOLd1JuELpaa->x;lc z`&TRP_U>pI58X)aQIo^Hl+oC0Xx=qfE}fm;Feb}xYZ-VQSO!OC$bVo`7g5r;^$N0_ zxsH=sl@n4|`Gs%4W=}IXxutOiI&n&5$Ph$_3Il)s|3l-};7MlhZn+7|c4lnp9Z}fhJGeNF{9qq(!$xV04 zfo_)u)if4-;yC02CzZ0Ez#(T}R}3_1SLya^tM!~SW?n9|f|K*D7H5?%P}Yx}C7!+7 zowZ?ut2quhDooT~@VIH0lXAk9L7;6393=^S^9u5MH z>rcG{WO_sTtAF~-jg0xW3}*P_JGb_m7=8yh0Q3__CbobG5C#wsmEz9!vYAs)%)vPK z1B7AL9iqZGhljFGHpB*Lr9|{HGId#P(8ECIKpIAk*p?td>#?Pcufq&T*(_e=M5Gqe z=HO4Qmk!G!n58f7j-Oc00vKRR`_s~~1{H-_r>{j_lkKhm) z@;vNgrOg)DoIfHi<<|(n7idoo@atd5=ofsP=#gpq#z*OeO8-Qx><%{C-}?@lVMK(P zll9EUq5)KC>;gXBS`_puYN0a1kcI)JRqOzu@I~j@ErO(!_m-AVwiF*C+HGLmAWLp0 zpjt@*G|1>PDF!yd>Z=yyF=Fe=%m3ujN->4zf9a#H;_?O}x34DMbriZ?hK5;R_G5<^ zw7~k^3r>8VAL9Z2C!@rvRI4t;Z=}BGvt?4sR5V&hm67Qv8Uak z3`~C|ZzT2Ai@cET|6=>O0*vXt@9_=vPL_eKfWU}@av9NJMEU?7TBhIUV@5vUIg5Xo z!-{{|M9h4-__i;DSBxwS^ei1vtjfWcpqA_#VFf-esx`b z4*yMX5q;1UjSRNE4rO#@#7Lu|;b`4m4nM>(NG643NQdbi1WNe9L;DgUZ z1qY$$m`Y`3BN@l{M3o?x`HaI`9CFb;Edf-Dxj(VS##W|AhQ zZJ@oJ6!CQ|0JF#*5fFrXQ^W`URX&$OOH-++TYWUywnMr)uEGxUG6AOWWI?(3Qy=Wb z?Q2;!@ywtEJWK?axb%K6$H$)5ND*IK5TDuI_leOpOufhAO-6^3lednZgp&sm4K*#} zxo&)C#N##ORQ0Llvz&Zl*BtvSf0qhI3kdlNI$nMFv&cyi`s%>~?Mbb^YzK!+-=`OK zf!-6RdEm=k86q9vp_W_RAM?bXw^H_2jreB*a^}>GHtkZ@0)*ikDi}(A23^qM8d>93 zVwcW7n_sKHwD>hW#grlzb*#M<}}w z_TK|?_lWj}!%AMD8`+6v{`ge~U%K66Hv(61{@d|cyPkd@B{6a6^Blf8OB~NEGu9pl z^*uB0ro~*PH57Em&%St~)y$f5s?+D+_ZH{b9qj=p@1~HlXRxAuQ656@c%)UQw7%yo z?;GwRfH-<;2FJ{~kjH*2wZ&J}r_ShHzsLxLQc5S1HGgIQcl(Riulc@J^Y_G2zi2$V zlAtCWw@QHXOAVv!;ea}Hyf`m*y-CY~(roHBvI`|?OdgUB^mz56vfa`s6yqEi^<-Hq zw_V2A5>bn~u6g`9J@(;48dig|fL<*#@uMN8N4vV%;Ouf@bBcN?nUxijGPn9fTwZQw zbpYMz$m5afd@O@Az^7}nY9SW-`RQ;T;!P{A4M&+jM7)v%-UIW&?d>rS`;EgAchk>! zTthxPZd_yTh{kN-joegl`KMEJ5biyQ6^3)1b55e`2j_Wm3&6zNSL(qk>?WUlWp=Mp`1Zo#MroJ)J-e0eM;sj=|j#k3eX z%OH<+mo9I6R&8%7X;UlPJCo^f2YAn3>RoynY)j#qED*K-qag-D8e$Lp6nM%uf00Sw z<*ldhwaQk?2Wqr+x#)IxvyRDb<%i9(tHn-PzkROP@wmu=7?k~qxzkAk15d!r8^{hm z&msEEn`!H8r)A)c=6pA9`@OT#+&0ImbElRad&CXh(GAkw{nld}0&+;9JOsyfuo5*2 zkgR>rZAqZStHu&91&of+_Ta$UvHq(`{^x_$PS-zCy>=C+6(ZB?u13N$tF%pf&(u9D zgzW6Gb;`ZN^*>Q#D$;GeiL$QyPACZe|H=(#PxHyP58b$}iNhbLG-W@H>eQd7n@7n{ zR}vmBBHw)IIk-~@sAz|CL^7dhE&n%VADkp5yhX*KqmQ=}A=%#u8=-*&*K7!T@fQn^m+U=l`++}4ncsZN~sTG(yfN?+J52bo{GlV%xWGx8e{z+R5~!X+53@)!k;Svw-neTh+;<%4EVd zPjr=+9Vo}l9dVdikv?{VyKhvn?qMM)t9k%}RVzxrX|yCi>X2GYc)0pXlWsoGV15lK@tNmOyASV${Lcm9(|@pHfBdjsyf{!Om% zwS2=Levq5$wr0}yo2Ak6DJR+wH62X&=>PJvf&eYn1zcD-n1BylkCY(iy9)fK0`UK> zj#0R70`YM|A+x|lnl;V0CjzTeerRvJ*yGD z+4x`NopW?;OPlUv+qQOW+qP|+J9c)sW81cE+qRt@?)c{1?(v<|r|);WzaIC$n=xu; ztTi%g&3b07npKte_q>PL6OvY27@~!q@)pRHmuYl7*wj2!8C-NpkNI;ShrUMBozGFC z2e$lI?<4OE9k0qWUwa0eRBh9QPp#}Nl~+G*<7|z>)@PDx99A3lDrQinE?LRs?#x@G zDhE)h6tqukQft;zBFmH3F=Vd&_uH`28R~4}5-^r+tVeM%C9GG69I=vV6C_~>z7`Qe zX;*3%ZBm%Zw=7`hafTd`Gwxkf)rW*74AVUb>`shGvh%dHee<<#*6;s-cof_f=tjWtN~ zgItsJlh~~dkQ{~Du>d~}msWl&JO?)u(UVd13Vrlp6KE(POTud(fYX7Lag9iP{kwn4 z-Tai>*%vRx#9W7c1A`9>;}RdY>WUq&YfoSw)fsse zcUNq0*V&!7k6rclxE|)klPZ1>8+oCX#Qi54a_ox$8iYv*GVF~vFW&;pER$eU*Y2*a zuOI$3{O@O61P^im{d6xHKS&4@TpqrIOZXd^gCaZYiSs-0JD`DExt0wFvcAgDIRigS z7A&GO@LBHAu3sA653YI_o(&i0APEQy`@hl9y9*wu>$5_g z8To%hUKkb>J%Vl$vC1U$jtv7+fu2xt{; zlgr%Vm}iLCRukQVpQ>t&rAlcQz5+L`ng9cFav|~Bx4w$1WaeeN%;aEMCrn|ZGcB}b zfRbh~WTiX8C&sYs$F{nPyDJ`JOCcRST%l!r2rWfVd5NY_M76UPt-2gp4ArJ-J_k8= z*d6%RyMfzl_`T^hs=lNEF%d_Yv^Z$qEI8^oXv?S9%RnG=fc9YRhc-!5z0?UWc}lj1 zG=p7#N;`s5XUeRk{agrB@xOf%kK_cEa1jziB9Kz47O(?FisiS#?-c@(Rt!vdpk`In zBE2$1^_XfwJ=Q+zDAdC#$Hb=lhN3(|3OCl%{(UVeHX~{;4p&3FY zn`;$HDab1*!G{GjTrssl$gbIAJ%!T-wC+u&AUQRUSPKWaP*7mx&NvSDi=;PV>?ek$ zz(%Y(Sh-FrvS<-87K$7GAc=1{O-l6xDt#r(fhJCi!WcHhG7|Up&!j}i6+x=9Z0f!_ zfR;mg{QLy`lT-YI)M`L6zM1(Vw64C&e{wMY3k>&HQtKbg;@=1@t*P!d(blu`~ zz|&zEaX&~&-y>k8dCWOXMGQ0sQHBC~FeY`V+MLn^aM9GFI3O2#FoKz7>~3GF~Hv!+yY9Q4)~6LHvX?q%#PLnP`gjTZwE$ zbCR(KhEEH=v)S@5c8})Hl=mo(kOxUh1%t4^D)WcaQ`z=l@5+~<{i2!=y>&e_(h27Enx%wB0|}L{}1lw z|K#@mpWV%)6lEgj`7wNoC7LYSHf^9$`RoPk6Y*Fb6q0yC30ic~M3{>aX)GAA1DR6k znRGPe2ID__d~-s{KP5om35!AH_Dl@yJQ>4eHn%dkxu-kuCtkalo;Oa_7dg7LT~+CG@K-DX=Sg`XQ9z8o^M9q6FhB4tY;=ZK*mBE;tZJD)6{&Vdkr87g{Fv{n z(N_CjE8QaB@3W;~C+u8c-HfEL6+I)xD5*(Rsv+-V5}Qo@vpG39NMp$Jfnz$150R0R zEKlWyg(LoXaUeoyw|1o7Z0wQ4h`T9~QZ&Kc3Uj>khpYUKeVekhL zVC=SzlVoyIypLC8jF7r_M2^3UpmK(Vy@!c%p`xDHo0Ps+nxB7=Vhm@Tm)vu@Q&M!g z-`{7tW0cz4e}JO&byxuylyp+O3|G%xLcIeHgP(|ujKse-`~hxyAm`Uj-`)r@~= zf&ce5nVHz?*}MPQ9DDb_OM+nnuo(GuTFqBs|EH(_x0U(hVfBAkCL{h|Rc2o4ZV#IB z;co&J{<}a;X#avh-~LGp|4ilekB9MpmP)At$}H)tg0`>nm9ybL_PT#p`mYuJKNQXD ze<~U@(%oDHk`oj1-{nJ1JStd2trfhaI|4_-Pr<~*h1%|WsR0x5OI5|h8 zP|$0bhRUTVfIEPYpq0>QnSZ~I-ZtWrU8@`J+91=M9e9i!-cQXov$lGw^rqKFAEk*j zI6}1twax0?qlFFx${>Na!03f>ew|X-K3!kpwe;o$jXwp7l$}P`=fI3Ua=LM71L;-ohI)_A|sH9pbvA3LD#_8#tXd1OwbdyY>+ET2cNaBOU z5h3+R=CKEA+BXREa!w78Pe=~YN6sdfYo}2`;5+qfm9Vju_e%jzR)L__N%kqbXqSl4 zi+O=W2@5q9{kXti9-6MUgTgVEb=fC25U{1KiF!P`gB$$V|G3}Q@s8^r`gH~9M57m= zK)lKpF5fS4mt5qMW|k;07C8q()XCw#XridF$5194+VUSoYgjMEi1w?<9S=owo^g!mh_>jXl;&ihryD%U*DYLlmj-Nf;^Ws#9+J$6b*^3*9Zy?Te6;hEh#HkZ63lHV_ZUEOC zZ;b!UN%?zXhHlWy-oLgVHZvHId}^6jw9JD}pS6sYjUiyw=%jyEX{{$BvRY7or%AMN zz22~rrg4s2XGrmQ&x=>AJe>=BGI?JF{=y(C5Fi&@8#mC!3!nJHaPx9?8*U_NhDb^= zHW$yqm=X88E7k^>aNPGxIBBF>#OxIHlENxJ#mOL%MeStC2XrAeoot#Bg?M8E@@dBi z7ncfMz;lv8J3x1rSZzp=ykIzrsDg);-cjeB6j@RH5d8GqzncBZrfr+6k2gF&7hnWYNVsem~hpxjFg#)RHJ% zKLHzAdi2{3L{(}dqB~3ee1D5bQwXXRO*{otyb&&u)qEE^brUvY5|QVUnzo4vUJ3uP z`7@&r-mO;h5c8Sxv}p2vbYWdsp#(Oy>@#J>OwHICj3Ql^h`+}P$8<0-hr!fiK*qS6 zQ&(L*<<4P__GPpl!IxD6;}iyf5KL`lj1&@+I8q(K9@F5y|2V*j0Hj*?=##_ejl^Y| zO+#Xz8y6!D!Xd_Nii2}gJv1O-2x3mtjRbnz5H}1FtLFkP`!Rg!Kd>AGjnI zGO~8TG;UrF_segsmJ#kWM~x{@!Sr&n7F}fb-%fuewJfJ>cLCf}+85;|C#+eK-LKZv zcf$TO?iX6!CcoCGlI6W3q3`$j~QF58ORKnVB%(W zZR{uF2C^m;UnGc{MXBOM}~C_M(La9&Bs zkzJ#kO*v6$QnmWzt<|`vYQ4Os%T`)b(keB+UxFv%-5Fgp=76wF-D{dLCky37;+Fu8yric;>JGSvB~^0r`vuJ zRt9=jtdYlh>i!==d{CZbV`#`*?E6S@qe@$dU528{dEM@@1*kLe>K0gD_j&e*45X54 zMo1FvMi9A7O=K9}t z&!1WLHhD&D`m|N3Jo>?YKCBCTW^5xD<&UG0Noxt=oHuQ zF&Xp`SPMsL9mlXeLt00-AjDX`-2@(%QVKPdm}s&IQyWwU&#_vijH#PZZ!PPYE(;Nf z4Q4wBy4|l5%CWA-R~76OLez&8y%kS7c=3NcEZsAFTV!X-WULURg>QTik$ty!N?!I~xSReoX+bH%Xw~AQ%nmb@& zr*?T}XKp603MW;jv6I}Gpy|btc=#z5*V#JQ;}|aW<%gyb#x#V+!qmxxg6De_ouW?X zuSKwfZEyZA8)r}6=SIRCDjG2bAP#falrpY!B6hC`uW)O^V_+{&hFhV$SJ!8$|z)R;cn4Fm6QEac)40ZFTRU7!L7&uv6!z0F0T-(g(` z$cjCjK>2K{Ze@-rU&^e+)!73svE-OIIkH6MHNrkrMt|!NZI$;o^uYK+GbNYm#qrvw zthuyA%EF-g8<4uiL|?LpmyRiP6N$qg4={9&{R}z|ReRJ!$=%;>CK5IS;TEj+kXN}( zn7@K5c252cc{U(%qR+Hy@p zCfj>QA~Uz0(nyh-<~DwKbS+i%gqq6=C^^Zm`<#ladPFJU0kx15_^?S8@x++J3u&nR z4UZdf#}sR+;mKTgYA)RAyuO92!lP~SsWZ@SbMmV!_s1UH<@+qYexZWtMpJ@ZKab)^ zw!I6fYExqXVhmKp-_!Ey>Q3L#ea#vFSZn>^-1Q&UY5!@){2%6yf0>8>S4LhLND)eb z{~Zr6wtwvs{@iW)|IyZ~ubhe+^J{mu zy9O0YE&5;l$Z^GQi2)|GLG?dkX5cx(p9BTvTI%qOI~XkDHN ze7e^Vc-F_x5qL$I-IL#0jpA2y7UUO!#tdooRXUT#Nxc@RM$k#A)fhsLwP6kl%a&_o z&(!J8+#i^!0nf#?kO^Xzi&}w~#im@hT=@gJ@S|rk>$CVnw{~Ge*lF+vWEfUG0HM2f0M#m1V*Ns;Z0K&GZ|I;n3Dj+)qZG#ALS zq)+ycu9_UjX}MY*vBuN8w3ff;Ypruwwf!u&zIotTo!+WG1S+jOEZ{ z6;zt0Is&Po`9rED)46FeG{#(FKg@C36rn2*&xTb?<&0q0aLYGYEG=yE(G>d_g&@bT zwFH%XAdf4wb#$%H?XA>M8kV#g8n*(L*2Krt8rxJu&RA@<8fH{Nm>-6U<$-0ZZ;mJT zs}b*(m3y+QDSW%^7S?YL8`Fji z4lD?`9)2`)U%)UX)$E}(sxKE;FWCgE_RIWJCx6U3W5>75Bx?gw*HSQ*W{0;iQJ@oNYAJTFoa-Nv*d7Z)&mG=jUVZ9Y5c+ ze%JRR8W3TPbe!!uIE0PwJ)ywIfd^;HfE{Y#kKCsV5o>{cz3KOJ+-7mtXp#xZ&!0C5 z>ZEP?vHTni98X0tSySIc=eLY_juffX5}f1-KGv~1JxfrP6yfA$U5s{gdH<8@S{@%| zZ?D&$M71dR`I%Dt2$o7Okgkq6UBuNGc_>FSBeFW!9DTai^Yri;(x*+)!k-pGRCPiE zE3TA(>Q)0=4_u~onrnyZ7mYz+bqLakmOQ1g;c@X^ZWwyrJgTJ48Iv>__Xulg@3uj5 zDnCIu7a>B-IM9t|5t02g7|?bqlxPA5o=4h`*>TTCN<6XNq}(aWs`Acr26E;KP)@Icw56YvMU+BTM*I2A3e56h=ZYC2|Ud z3XgGm7ese`lh+UNl!&2su-V$O_s(eL#2i`$NR*kpU!9MxNJ(lqkP*BjAQm)h$UwDs zcL*~EiI@I2BzOnEIb%DATCRhHa5WV264ld$u?8W!%j}tP;6aKup~iJAdn`|$S;pke zFA-9HYOH5i@!lT=H;favs1~ZRPU59_vUXZ?1WoUC3j)@c!15L`W01zK9^<}U>trhL zIWN4=n*=xZ6LA@7Go$$JQgON49uT!oChHN|&sSL!6N-?NmoS(vT*cwvfLGY7tC!bSKl`) z0ZD8~U(7!y(d)-(aZWj;^+IjMF^WuD){xXy5zXEI$`gn{+IkDhi*sBDWrNR#_49Vd zW3|cGP7i+@uvzvyn?C^`R=h==rt3M=7Yf3N3*JaFe@vuQd&fo{9=>)qCi|!-5z~^SQ`T=l~943bEUUOLZ6f(_RJa zK>mg0>e~`qHcBd47+~zk(L*6d_DF+-bb3!iEPa?p?Fe&W3R$d0Qi`d-JK7jRL&nHV zF({+(mK|i^4)YYBQEKhf3X-bRa~uB=jE~rcMnxnhrvSM{eH)UpKL~1U8&GR*x76UK zh*bmm1$?Kdr!4QUjZ#l6KlWlmSB57T@j0gIoQu z>?&}MyR3f6bm+@_SrIzn+O+#yZEH%TYkl_=4HNijP@!Pp-qqK)kFET`A76MTjQlFt z=)qSOCJX>tM$rTy6ESN`5NXy{YOBM0-LwQ;rQd+Ils|E>FxU@#IqfUl^h@k+LINYv zd}vT)vw=;h=l31xX)d`Huozt+x8w4RdIXo)6+#xk3a}N|wA*oi43P+jBorUXEK_IN z+Ui;R5G}q1t#k-kZV@s@)i>VVzll_Eb#0R`R>-t2K~*)naogmXH6Cd2w6jenb-Ui2 z^*-BFbn5u+p1fR#Hi27Sdw^JUwLRU38KZLWnL0dEVBx#p}ET=iEWGWvbt7j+W zm0eBJ`af)y-gqyy30ZoJvU5|aW+YQiK~K1wY@9u`u-ZoF7cOW9NUBH9DYRLh=X}30 zsByfD36nBg^*`A0Sq{E=nECW*l!YljrM%>^kjM^BIFB_&qh*BsR%Bqn{2VZ19g_kJ zR1bLGCE{i^wn^ExPU_a)(}4cySM=WY>Cljk`c%o1GYP$-SO>=Jom4~SV#M|O%E`SAxy1+`Ix0$_6J3YHZAiRN^NBQ^zqB+>ZsmwvLhk|m0jIBl>adYpCHd47 zBH!>rL9b)MlOwj_4*d^=6lIx4jHa(v?>qSq#-5- zkbYvP%oK9jVg<|yj2;@>Dlh~9*-b#dSGHwXmRpq-~JJIn;>q*55CxP>)lM^JuL#XpXy@2$|P-}wz0Gbp( z0tAf>P(AEdw4t>45z-kLS(6Y|HBLZmK-0h$$lN#MECps!kQeurjkqBH5~h}(YLc0j-$h%kP zshlduqF-^%GsP@2Aw50R2CC|g8!0Zn9vj00C<7q`tjEp)y>xU(S)>(^S^-mPQ0ypC z7CJ7mdwc_pT9dcfFhnvG_zPb1*b%>=bVfC42ruwt_P1%wS)Cf3ogFK#H7NJr@TBKZT1(EH-K8?>9?Veqim_{1lqK@&?cZu!oy`F7!P zPI0}8KK-WLiIYlG?TY$!e!CO^1$3@J+zTQf1p9)erWDl?bN!H3I|7qtHAiDq|Hdeb zpC!^nI@mLd3*~w*;1(G?hawOz4 zg!kSEKUnVUkmj}&{vA5FD`Ky0(Q*^2g;m-K*kPh_e4T!uLe;F#pX07%n*7&HhC|0e z=tr=lxBm+iiz=3-arn~64pyUT99C*_$_dpN>hem~_zx*fzWqqI6Uj1_YeCSh`drZ{ zJMDa@5V8k$Jx9@K3N>xHI}G z=r;-36O#dLawADK;_yf}a)2CR%JBN| zikSp2>esVf@!Swa+L`N*G3$D}3$7?5N#hQ!(ndsN>-+R!!v|&guVBg`yx0;w;H%!w z+X(*qi}C@Y2r_%2(vd&o5~V8n^Ms7es?NfNK6`sMhoTlX;MXhF8^ggiXzQAz@c8x& z)Ww9QujJGcOdaRB;{lu&Y$Ed&>Z&(VyPW=0ED)yyVB@$M{Lx1ps!$Q4q}(D>7SbvY zQoES`Qwixtsm|+a8_!&u&u(jux~FOxPdQ4W13#~}Ziv7X)F7bjS~U}!!?s;=$|X=e z?hNrGB^O`NB|z9$Se(Z#E5OCF`{Ln_O0_rxRfF;v8`#`E)9_kn`s_5T+oP)npKaOJ zQ_#acdD^dca+k$dHch@&b-1T?O}GY4yXGsGogdk5iYaMUJL%i}uo*Q`t1eHP7DF5u zR>U$~saD$+rmTxWX{Jk)gQl9_rRQtiJ?0cDWb;y^JmyMLrK)S!zA}b3h|Wx;VH@P7 zE+5^Rus9W0k=VFpgqbcamss@YrMcJV;>}DPJ)l?~5j_knAND;|&RjBeWPeqhBf@e= zS-((})=I(?!BXEO$GEC2`Ln|Gi1>FHJ~)zyhH*jv_-m{{DV|7~BI$^P?Xv~E;wDiK zK$znpxj(?n_#M9?KLFW5aZLE4o-n1p9Ki>=pSEC`lMChuvm&81Q_!u5v~=Q{g>(@% zRAL%j-7|d5lU;oqP4kdSoa=m#XQ0oyEQ|AYq;Je`^P5MD+TD}|`-|EvPkSCUpZB>+ z%~*Fy!z`qfV!ZII^R0I5cWXDZYuWR!oRCjimD%d)Uy4>PzIs+<-zV(9u1_TaV^=n_ z8S*sGzue)X6oK&+SIT;nRO?M6>J&}lOK5#_*Y`5YR8EqLop(u-oqPVqUI8~9o!EAd z6{F0qmrZh5UXQ#U%}1~#V6 zb~LS@H;i?MxY`}Uy&S{1;}U`JgnfkxeYufn%Ht%4A}<0gm2jXd9HG54hsfvmp(F8g1c|O|vy-U=W~i;%3htZw1V#BjHxpQfyHmtoL{7 zRp&<76D1SO&_??ioL~E$p0n@USl0J*mDgy8UilmAp!p8h)kGd70B{^*GXi=f$UQ{- zn9Z7KTY@RMsDaTeE)%28UV3!MZ=N?0TUOW@7kpzsc9ItN4AynrX}Nql1kMRIb%h`2 z5t)}gXmQ4}gY_NLt7e*-Tm>XPfq!`a!X!GW94BtY0#O@&HEOo>Rs6hTINb?dL5*t@ zWS54Bmf9W1d@CynL<34=Em|20{_c44vg5hk#zJw)NmTI^Vck9O2yM*=C=Ihm~^Eq)x1)tW4TIOCsQwU*9omPca_$BVrBBpEn>%3!qbxPY2hR1BC0OfCa+lI zmKi$Bv{E)S$PfrzJ>9S))YG;t)>`aq3fcgx**ld z>Mx~QTYQ6^eP>I<(EH`z z7e(BLT9Ejup;4-ewkDtVpwAlz5`dt~7E!PV)bwisAX z;2yAlRo2LWI6cz(W}?*7r(ZO=__5)9ZU>i`kAeoD9?!H%h529^DAfnHCI-@&w|nWx zc#BwNtC_}ir`id}nSr$aj3k=mvN88_rl$g57E`eu8gg^gg|glk#^DgjScS#@OF{Ja`;^1P7W5%EM9>7T za7l*Yo`&|leqShOD9KT|dej#~Hi%tH`(wrNXyjyQ-DAJ$Uk=&{2YZS{9G0>-&vpvh~LcVs6*A;SS{9gQ={lfYrio% zH*(wMy$UeN3b&P95lp(c%j&qV>bR$pa>A@}Hop?QzqZeUs^<=dlelM^h+xrLw#TEx zYRin;#(Fw4WN|i<9*^(u8vb%V*NBkjeJ;bit0w%I-F}67lQa0RiT3k_eWFeEg$0bR z`g&_QDGRNbS*La?dDY$W3PskcP1nX$Y&+*Y@W7lJzVAh+#O~DDzeZ{;; zeOXwm&j;&6g@C^8cR7S~xcB?JHzP3IdRJIjC?lf3)5upqIz@U(4mbDflivj3GkOU^ zDG_g;7+-`y;RAi(JLE&7dsri>M_BjGLeZwmU2l1^DP3Xt`PNydE4#`jt#Wg*x0O%B z>yLJkZM4y5laHQs?>q18<%-|EyKHcD(qSw%iZbc^U8iGdpl4BS&zz)gdSxC_@|{Pr zuOZFEim$Y#?|utDhPCg`W$z94M-?l)M2*oqb1%oQ-I5kv4|Y%DU72xq)d!xovu@mn zT(lYNG%r))T})Cps^OoWZn#%v6EEO5zUNSlV{Fr(ix$OdFA)~mF{D?cJg4+VR=s8D ziPS=5_})BZcVZI9l}K%<%cS2MF~2W1$d%Y57gTi^+8wj)sHF2Gf8|*SQu_f}-Fle_ zpRQKc0GrO9S)AYu0&MU$Mw`jQKSCl)rhouvg7gj&kDd6|EqJzI10wpHZE<5YaNqp~ z@6##NF^(c-=I~a#Y|C7{G-}@Rl{dy0_5geYhOKmAdbyzBvW48ptW?ZDVl{&=*9P9CZvQ>^Da1}C{30IdwV;O=@|FFy`jHN&Wr@5F7ipca2A9M|DclBW z)ilh8Oe|!>Ne;5=QJdqOuYgbA4zy8)Q+@AbdZ;n%0xL|1xW;Niit>w5tCVTANoy(S zs?bbBT4fEhL0(~vkL#jU4rTu#+ARv7J+q{aX$KdT)Eq<9s{s&1ldU;=OU` z#0*u;=cgckSLGw&rx<_E=3BI1&n&_RPXb%fN6SuB3Cq2Zg>PPGY&E>x?k7sBJ}QXp z2$2L2GgaFO$Igt7R$bySh<$1{B_0zWfp#fnXF#?2459$~))G+`ZDCSYz=Gv2{RTR8W^6w+zZ}a2p0oN_K4;J7BnOB} zR_$86^%GUo4k_<%9L_E|xQ(**)D9nhxMthzf07jgvn&8D7jUI0NAlgeo?yO43Uw>d z?b16s0c1Y^g1M79xyC$Z3mC5x))4$bPnuD~XLq%|zfsg3p2GJCQKp=SgQ#3O(7=A^ zSf4;vGFR=e)}}aT$`daz2ur97S(4&;ul%Nc?7He-ye4iRbe?0(#o>c+uKp|MJ8}jq z*MYBQ(-7^j@gvdja38V)w?lS%%y``^{7#3T&MHc731FRCuZQuqLLNeb4zyT6{8%0R zBzS&)MjR%1Ky?1Dym}Po6aCF6(uh^)3M{%lS3jAT6QV6^|MXaJacfi^3&IxVyg&5D zq0R3AnWHO=39Hu4BoF@bCVp5zVg-4D?Pf2VlBGIu(MDyG(}m^&CG?B58KS&_XRuWh~DHR+mL|1blvy9_H6P$Xnd=+ulfq7 zZwnm(5zbtY5hND~DwpeHmU5L{(&aak9vcO+?u)JlWnPuiA0yH|3S~MCWtQKZV7x{OIK*BufGRY%8zlK7960X-8z20wzzp6Ei~)pm4g`d zX4w+0Z+t9Ute#)Z2v6NYT7u zLlb4e{-{^J`iQNy*n$tfbqxrv=!@WdkNUg?Bqy|}p+t@3nk zZ6ev%J}jd~)YpT6ejs(5P}~pHN8y~!=SSnov4cZ0r9(1oY7tRfg+rM@A6Xww&MzG!A*8I-HM>M7r73#Hqgn|gR|zYbKS0~^)N7`A7G$(0!JV5T|R2=ii8+qLEdmbnob}k zvvPJIdF@3<${$m%v_JV6ue3s=OLpvT8GdWQyj?N_CWaID*zr*SS%>I+TYTHKBZleN z2-4{ZsOF=jUa1fS2-;===)*>Tkz6O2Gd3l9R~Vd8B1RdFt;c^ zO+4hnVnV(i0tRnOj&-s_4B|4XF_f);Tf^4$>l1NAkLa$`>LHjG?h`Kl~&zbA6Yx-i3`_0#l7 zb;rF-+7<-WiJzuAIdVx7D@cd71a90gkstT@m~Iqwq;wZ2HHrC6_jM&_=JDf+*&L@> zB#12Ow>m7uvD}OeJzx!O17CTQ6smRHEA%qPobt!)rNEh z63yfZQ+d9l)9M`wYJpLdkp@njUd-q?zANG**LtPrxthdqWELM6U&po6JvjWRv`L@~ zgc|L5r`7J72!2!uHAGiXfl`^gMo-24bfz0hSTIRj+z{lWJZ}q3X?Mb9Pw|T zb;rRPY?&RZNqQT{*UZowo~%=iXpNm{cKr4;xYR1_eAo`o+x{j0O0F3)Y8n!l9Aur| zwBHIbMoh4r5mm}%xv#FUU8=+uS)siE;yC}b8g0TnF$jwZbx`1{7 zEf#D%(GjxpHx7eZ%BlzAqU2alZEH2R=b;8;+2>y%oIj#gu4&pY*1n=u-oA?5|2%5t zAAw$f3t9OK{fG_pFVl|^>)&eGS^f$581ExO`riN_lZ{bKiV_EokBSS9PKt|<2GNzz z{)8dP0c}F*Lez8d@v~EtHIVSD)#RTe`oT)_naLNcLXpi;i4o8v0R9c#VQYb*^-}=m zfk=30(2d6LW|P&ZBr%(TKO5{rRwUe4t;mC2|n1AXiGt;Y7s9gq=!^Cu5@|3 zlAm;My*tu%=>S++%Z5Ar05$^UW>b!jvy-a6z{nAR9}EXkExny9`;#rB?_Xf#l0MDW z&-I5*Biya1_J_{V0QQl$iov9q%Bh~5_Z?;Mu3w00P=-mfhzgXG5;`=w_;np5gMfo; zASd<#VKWWX6Ig5L0SXyyXZc?iNYS#!rBd2;)0`m6W%dJ@wv zp=TsKTyZjDUHM7*O{Dd_zT`)9yFbD%TAs(+r@r+Xm7ajc>MTDkD5oYZxuUv2XKfbI zb&DE})tM(K6bfbSDatf8b%(5PH<9&k8oQAbx#M-7l6Fi$L^vvv7-EGrjF^mBU_~j4 zAc-7+4wCel9y_lVj1|sl20KNfJ-MsqJ1NU1ZGSz6% z?7@MkJ*4_KS|Zlj>FE8978bF(GkE^G_ZVN0q|E>P-v1MBQeoO^fgi=EUc$A0)z(c* zT{t)ZJduaFmZA_G5#&J;Wfq4dk1$<&zDF9#)O6fd&hwt{hE5*wGZg}_F9RxTKq|L6 zYoZ{%(9`K?|Mlp7r<09o`^6i&tWoJLDi`zyyNbr96ulVLI|PDA*pCQ)HfcF8typ1m zzH}E;d{DDM<_nVJwHheUsn*JNF?3h0Yi81HFif5v38; zg))vU+5Ne$qtkLg8g<+9tfO}~;nx93ebud%*VJJpx40473`xy#9~_fp{Q_EE8&UG6 ziu6@Q;qDPohXgLIXQ-)95wH3qkf0as3Ny+Zmv#wwrx#l)8HppALSL{ypJyvfj2+k``eLTs))FOy0Z#Of(+OtwXb`AD} z7|;zJYNi!L?wt)kz5*cr2RJDX$~ncZZtYK|8ng%6UD?^3pmvc!rWU=5{0$sh57HFq zyZOs(`WMiAAkjizbsSo_)snRPzh%z->Q?{Y1F8SZKIZQHe}e_x#o-iEzv}4xDkA^A zy8a(Ra6tbuQ~B@Y;GYm2mDJxk%f)}^;4?CLZ7amT>#%{d{?c++1uz|u_ zuSGPj$xX}OOMKlK+BOZNHgJ;=0%oxYd6T1wY*eRC&-Ys_k~@WDp7Hq4jCgo0n%Ez7 zMEJa3Kb?QKJ*;tRfR1z35oF=Uls>$VVr_a(QP=R&;cJ`8a+f>J@Nu5tJ8M;2oY5sO z4IYkgzB{tkRnbk!{SZ2j9=AF_I9tHdo&SvV;ldPA`*3o2Zh!E6qw4rPyuH|49tmx9 z1~zNGq2as97~B*a_s(0^VzPVI2UR%r)`dLo)$#CW!QyExFpKI!+-|;x_{zlICQEWwb#r@h_`?_tD9qIjr?2#3GhJ9ev`3`?O9&hJbL$0XsLHJrg zIzWDoonvTjLS7~mf+K-ub>8HINPrKiX+C=K7QJ_+fpoc8Z*DYcR*r5wdY@aH%83`Z zOubVWJQGip?!_4c@S{FFPY&oZf}GMmp5@qGRD?QJ-ar_~LOr1fzS z?t;YRcOW+Gr*5Z5;+v=bJXWmV_pTPZPE?T*ba#W>!h#j$wi@ZE!IQcsGTO@$)wvrk zqGtAQMcP!tmoSpfuv{%yKbcF;EN64Zrf~UUhH9qEd7O?K-W(r{+@+yV@5nZ#(lb#= zha0=|LYqd)v=?76P%aRCANXAruKI6>;l@n*Y3<|cR(s+lrK3^=*yr3)<4Y6#G+Ns% zO{4ro?i}(;6&pP;qGK+TFn&haBvFWpsFw*bf`RHa@lE3jf>-voO)5T8yS(E?X9goj z4tKcAj{NjU$e;bh)uToSNw8LzVXHll)jy<3agUbv zDLE?KT1xCnrHz)v7n5uLY(54FVtSAZnp=~_PFf9-?)rQ_cN)n3C+dHlsewU3nu&i)R!+CPSh+Jea^VRH+rGOb)IZBa!w@J6=y|Hg6n{?cPjiS z?DF@3N5WB5cN+L!MHLZLF&_72wJ z_=Ew9Nu^l)VQy`JSUgt2LOm&3;!IBVlZ*bE!6E2h`|yx^8<}&xm4pQmHzmPh=(P^x zD<^4y`%LJ*pv<@7s-R+7I>Ez|lXL*$ruE#ybzXD;B!#kKDaQGU{UrE65D6MZmH?7V z$6)fUq|5Ps9m@V4CDsX2hw{Is8vYk~?;K-W69szScHg#b^R#W-Hf~$DZQHhO+ct08 zwrx#+d6_r!CX>ARf3lLQowI9K)>&1Ry|Y%;Z^42gl%!L_JC}peUesC4E_C+vPF3|K zHP0ZluKYUaO>)nKH28Vkm%iy4w)XK9n06e z9abPRiVx0k97Kd*f>8w|wy&G*SHopj26-(^vN^CZeMq=bu{FgnM*U*$XupdYyx}>o zV2a^jJTZnBcErJY>BV1$x#v;zR$AAu;}~Qq=D`N`(agbVO?e0dBupyw(&JmdV3gir z7$RZ2KnD&+!Z4DJ%h6yG+P7BpVxOmcsU8CwgBW|_vHW8Uc14hSta8go>DK=N|4UB# z=#&)*o#ka2Orue1Dtop6&NzJy$nv%YRrXXxl_9NXpdBkxxQ^0Nty~u?rYd_=0K>cR!*G@OqQOR zlzWPRCOD`G)LB>DLMpv#atkh2C%P)N8m2Ed|C0n%VN@P0xLSC6m?*koul-{+n2eTVr$Di*n>D zNO~h3&GOae6^!Z4M&6T=wK_F3);&8uI$?8kpvo6BT3bNIbhg~jJ5>j~eKAoBihQUJ zQnO-&0b*`%Em3_Xt)-aWzm>A7(e_D4KIsFD=rVfsed26)eXJ3efb!&-O-LzXP;qa9fmhcE_)D~~*=hg^-8z$_q%4LvVG$f;iCXmAo( z1@{L&k~?t^?oG2-MIuL3so09W6T8ZOn&^=x*G*OCTW;i1*e?EkV0Q8GSiyWQZp!MR zV*gPDWB=9gub(cZbdXTa4t>zNBJ6`fxQR!S}F4cmv zo@o0NrzX|5DK~kW@DcAUkCu6QAZ_bm0d08=);mIav*=cMp=brKc+PZYX_Q-OC)9%c z{bn35Kw*ccOd64XJhj^)c~MX4^l%FbeFxoFTBH~3xiTyy)Dy~sMCR;qUDtbn+(7E-omTRtmj9=BrMy#H2iP@l zGjrxzWBf8=NV3JK#AEtmMA%1NQT#*eyHhA~vK*elEVpfYsb>l8aZ~&`Vf3BAGAlOv z?;A5s{`q*Zra8<3A*G>nFgM3B5|~y!J+KM=*VUnuhqJu{PfvFzCpVG04=Y53zXmh3 zdu~!~f*PY$Hc;*oO-ij(5PoQ_5&T?gV)9Qg#`;Wjs#~0{X_&oLH1fmN{urT}FZ7;%bL>ILEAKWPSazN(Ic8~n%A-<~P%a-OTB`p{VK ziXMU`JSa4X>=Wub19scWW|oqgf+#iv`57s=T@?^8)VDHDWLVy&=*k=TpLEj-=#3|J zNg+rNEmEI8m;M+_!Y3pfGXEJE5dqOQWh)}fFo$&^8}SPhbVLgRBtoz$X-I%YlCW1$ znD^o;)xe@1!YYg#QGU+bdfJqEQ%QsH&w^A@vZrnFvhjmZvNBerv3m?O=w#u>s){;nMkh!WB-x zWLS=7ccc{h#|L?D=+CcY$~DKnN|xMF-BYi_W&+5h}HkdroA-@IU^^ z5)ZoPn>Ff}u2xT0HoR<2Wz8~r&Hyi_CONtu4Vulx+nHl3Xx9Q*o~HLVz84k_#6JBN zxxfy`wi-~_fOn6|Ot>;!l)EJ<{)qqk-c7-vKf5v&jZAI}(lsOHHG8CB%HaD^PyF0L zN1pMQ7FC3f0`bBjegGz^9n&9ov6cj?f1r~4enFEBx&M7wlmQG4Tu(KLhsF~ATu;B0 zGi9&-U~S^Z>?IG_E77u49$Ff1Ks*=-t;P&NO36V!kEu07KfPq*r8X%nl}C2A98CIF zO6FO)hArFi=$p4;wd#gv#eb*;l=Ce*#()5XzlL_W;9C|*r3~L+-7ZHY;cMU(sA2@! zb#3fQLXh50qn_nh`#M~IMum1s`ZIS=?0-dt_-vOY@hNd+nWtXUXK9;0o&9`iJ4!l* z3WLBpf`}~1w#;yKbU~T?^|5)zuy`G@j%ju95M0}&z}rjM{ubcQ0mjQRX*itfvsMo% zpm59{wkT&kMew*86SjjZNbc)20<`a&5pNO}8y9fxZ;EmXf#-?3jCc%Ov}HO>`GjstN&MBa1@a@IZu)gF?nB(Q5n$&T_B8km{eD_>yDHZC3Fzp zRFq4r05oT~Dr+B1jw7zW*Z|j|@yg-f8~@(P#V(G?k2IO1k!}+@xr3$HJj<G4zz{tav~q)wvFD7at@8A+LE0w zzi|w;Z4``>iV;UqA$(B2#dzlU51%`~_qMapv8Pyr>p#cS2z?grW=;h=D+rseX1M&E zEFo0R-nH3oJTKTX%8A7&M!fn+v9d^DAE(u5^=6kcE5`6s#_m6u+ZFfVJH1iA^NgJT z=Dj{oeuN~tv76pQ#j;$ZQnGqkm3t-nn@B<1lwUmODs$Lt)bMb6cgJm7K?0sq`cQIcuRFiBqzpd3z3oS+ zw>G5sPPU}HG4{7Gwf^vfsa44O11_mbDV@h+x6BkXGWA$$#TF8Z=bRv6R}_*UMLnor zs-e-*E1{AS*r$~6hUm3@J-YvXosnwtzR1F(u#8qNLJM}o;=qOl1FDb}d&9hZ-Nq4a z2Q=k~XFrWmZplgl0~I3Sy7=L)ob)i=8irb0I&&|+m$8KQNPO>WrxQkLoUSuvC{x$2 zv%FE~8bxGn({-&2Q_W;xQ$-xYvM_+r(Og|07ppm~#E=||6)D?`l>3hTy7@p4b#Z~n z>0W*SULHkQ0YMO$d7IwZkOT^i+G^~EN_B5Q~oN7Qn8Ti!@GPN6NV zQn7Y?ZE?ueq2?sw7s%4Wsl1l3HDZ}FSUGM)_>GHSIz?fhQ8Bjb2@u`Cai@2HKs$@K zrebFo#pCK|GcQyhuIRoC#tS8c_jueQ=1+ZPc(ysz`U2InW`(Y|yS=aWVkg&JMl#!Lc-h7=O*9O$gIHgTxjWyTy%m~Mg^;zmt>W>wlCkm!4aJq#A< z*nHoRr_q_5p=Dh@of-(f{a9Id{UHRW|2}<+GS5Qy1zqQG(Fre*!28NIw=4R<0`)y? zB#XyuMdKYwpH{(l`bHwkl&q5j@dT>9y~npeu8>2M08wV^M5W!Kh}mzRtw{L`5en9! z-WQmx5n+#H>hhB-FNHhHQVFPI#8%N!Dpw1*AO2?sqD9nv=XXZmy=B`OdxBx8SRd@s zrA3s;B@L?i!9JS$9ENnY6@%+YAEWlIO6!UaZ@LagwKFjGxu@lV_sMgJtTog$iZN|; z&`u4l6^=?@`Dzkc1R;?BOdSUF1taPX)~ z7Cw|6C8u_00XpjK0AR5t9K2JezUV$C%Owkva`ic#!sx})eFyF)vO6NoY)yxk>X()D zMyaGjnrlhpSNnDUk}VW9j79bh=*2p8|Klw)8x_9SV7KNrruQH`Bc_|=7`el(ONRlP zJ%o&tl3herZt87dGLFX#{%m$52K%rbXHOW2N|38t-=8IX%X+Vyi@^iyNo_li4NR|A z508T}ICckDDPgs1ljqg}=KGS4E68m*UeA|ZvlkJ|?J~4X;C4l2q&68L3CqfxD>{i; z*eUL|7`tL9D!N-cG!uSp5k97VrjO+&evB)XP>FQ(fz*X?666~TGQ}2|x5t%RnqA=D zt>%fL_buFqx%xa|ygg^fdAa80YSBCT-i)A&(l(|rvrI18Jt8C!~(YNf3PiT+)ZTyKK3UGltzy4D&7bn!7 zVBGJ07)$|Hm<6HiE~0q+1_N^ecmkg&K$C#cdbX()Y}jY?JDc)5AU8;p04ej6nB6vV zM4_go94s7I0s2P=|Nqnq_SYn^@GAwo!bbleYK_ySQwp*(aP>j)DsbW6C}7kB81Y`R7TjnvI(UdymYAeRShhG+^MZ z3Kx=R^3VkJy>d(4!+kw(;5}>V&l6BGP`N0MQd8I=jzf(&Qj8Dh;3)rSn_3WH5i)>t z*a8M0LRn2;Zrh4nZa|*Miw(ZjS70D1<-~N%a8@1t@TlKs!$oN$=) z`#)yJf6TT2Jv-yaZ~uS!oBwA>)zKU&!QM5_Mx2#x=g#2m?aU z^(!h!S0h25D+)KwRk_G!603-CVd1GG>;)NNT5@&Img8z7+8OEO`<=-V12L<=l~}WA zfG(1a{-)o{t-I8XzE%nL1AX@0I%SUR3Pt-p)VL&n+zcqq#Ij+X?~KLo)lT0bM&3OR zWF9xl3iucCvzHvThPyI+;H9``GJb*@883+Hh)nA)YrnTkPsltiQyxErtO0CU-@HA`4r&+{oWRpOvhMN2MSp?LypI^8phawt&p=uO26|c!)S}+fUQ3kD`x{# z?8=d_1dfDBvgvCRs#05sYR%SFyTS|~P7uXOyL!u-;M!mh9#!;d0O0r(vIm)xdD4EP z`O3W^m;Y!Q%WP7sBc^=F&3)iMG!2mdm8P-9jYCNNgT<`s0si$<{cpe}|BKcC?=+47 zx1i1cBeZ_m1?`D)>_*4!u_&%-{@V28e+O}5Tz zF64-fJ=R5x(nQ$$NvmwZK`e%5i3&0oY022D_{GD&uZJ@a{H^6eQJ4=VH) z^Ud{V-s_9WX(txVM!_A8WHBzL>q!rvubIQFXE$PE;;8-{q^IL!rtfLNPZrYGLXq#S zb3IuFz1)w)?KwGE^>!MNCBV6F)dZ{lbv-As{Earlb|FdZN_xbl-SJAB>;vk+YbyG>DdHJG{A{x0!Q^zMgV({f z^OX*=eTi_bD|_X<-_Zd48m@>6YEPynkCnsUw@kxcB*H#SYw*kkoD3b*kLlCQH}aS7 z_~v+g+CAU&-pDjXWCk!|8Zp@L+R8d)h$}g$m$p}<$WZm!nZ%vP{}hPoz?&!i8t5?1 zFW}NKeTWybGlOR&+ENvG}5oGepc!@EC_t(aL&mak!^)U<%LDQhZE)+|_8v^-*AyMiCE_G1a3 zW8I%?E|u(1ZE3W`FUM?9RakdwZ9Z3NvR|_>PL>^uX50F_^(t}8K$&VGR5jM^Zq^>9n$8*d%@B6h@MRVqIxWrX$Fue8 zsBXB!Znz4Ei^Umg>q3k5*5PY-Lrs^;i_Rf7)-IC;#x4|hoWb{;)#H{WHo9E@sb5xl z`CEIPT)D}5xyrf&{HcjL+YO1YLF#c*=STTR`9IqL^GlF2KOAFg+1)tj#Bz>DUYsR% zqUh=Ai2=XRihF(nj0+BY0&od!Ff~D?|-D+zHX~_M({x_;`R-@-`pNiS;sNrpqTk>4i-RCRX z{*)~rGmJcDe(O0Rb5_h`2oL@?BceFOO-wnPNmRCXPh7UAM0^@=3YMq)F~AFziF@(Jd+V8zDRz#+PwK$gU-!~_e{=0#>`59%u48oB7v@JI z-Nh@|#%KJ=Nm9ed?iVUUB&S*}dLTAuEwdW_E=@18)v?Aj5hn!T+s%*vBubfCms|@1 zl0}r@z~6~T??Y#ChwOaRZHxpcih`5b)$n*@{?+8;Y`WPO(?I9aL%;dMFfpjPTF^DM zJi!Z-B)Lxw)^kq6GR~+=JcmT0HC!@1#K9oy$yaygAQ$((NG&hzRNUM^ zh-}6z*`D(<&e5W{Q0dt0J_Ed1u@Z|@iBgUui7|i}0~LH|PL#aoTjQQe*5Aj9|Q(F2@H3 z>|X%MkLv9lbyqq4Jwmn8HSlABcjCIt#k-DI@LHo37D35zE}xoqSG@WH#d1g zxoSJO;+J5NM#T9(oSj_wc;@D3^bGh3OvinFSK*A%V@Lch zAm)I6cBTW3W}sx*K?lNw`jE_vy|7uzI2cVT5{ka!h)kV5ssPg45CML7$hj zy>@y1;FYfEE!tAKtQmGBWzX!e_+Uw0@=s`~nAQt1=V|5d!1{nqWqreD&;g4cuO{o4 zQxnVNdVdEF$b-W?cY?e>WlobL94ms}>+B$qeT3fYN>D75{)klglgv<|ecUu?Lb>#= zC}PL6$GMIS;|Srl${pVgf-eH*bc8Xf%MJ7n{w}z=$-;df&HhGz4So+S;f7I`=`cUMZ?a;$iD-ok|tb0KICK<(xph%48pyaUH7 zAZt34Dd#c4ZiwMHfG(l24G}qyuG#(WA;sIptEXT6C%hzXgV69~(W7u0toYBls==U~dwi;ZPe7kOhj@=%si*p2N z|K;_CEMkn+ODI~*#S4WBxBX55^X(`H)62WbEQ%kyA@&Gx5exsY zi{8tk61VV=*frLJ7!DzQk}%Hko0z}cVxgJ2OMJ-+VFJN}NSdQQZ`jyL z_(|^q#=hS0azKx$2-uM2>3k!d^!rM=)3#F{tj9HT4?1>^%O+}O2T~h#&6+6`RrOkp z@iYxxpO1FG!h@W_q6%03{q4T4tOuoa=c~#`sG@L6y`woZ^xxDc`$qWPNHctnkQ0CN zc4p}mdqzqX{dRqBEZT z@IR9br@73$f6x|3o1{6L6w8Mz_ETsGTWq3bxh>H+#g4)3y1QeEcsQY!6^)k%3-tQA zcLdWfG&L{M8>a*vFP_2dhTm!R1}@*VmR5LNi=*BSKpHNz<@9`dd&;bgZuWNmH3S$F zpgf0IXuBrg#wHGdx__LNrM91kWp%(gUGc2+^;(6l5NePp#bc1EWgCXy$;A|cin;fx zV@gB_9#l&W#3dqn{^DbVzu@_y;*4Zp5qCu6t#7+Aagxgkutqx&WQ*c}5Ym@(VrC<& z19o!`X3xr2Wh9D=!RI`2^?II7QPR>iveo8mI8DYYiYJysCYBuzu@zoLqXBK{Y*!dQ zCS*M4YR|-S*6dx$GBsTphx_&A3aXEb&6sM#6CHYMJX?2rewGl`c&T>AavkIS)r`|B ze%h#&6^3Nw&u7^kt18-0l8Ie+h41@QHT{3(&%=-4oPWr z7!$I+tf@1uGRA0Kov6>LMn)*QLS^(g5)xwUJ1Zxsi$}+Nn&O<2Hm4rGYVC>3JtMt7 z%mE1kX)sbu7D2;kpFTAS;qdPkX7n)6J7q(**F=@{7K;(B;^$&97`7XdEadr?ktU*C zROx4|B;`fIQaj8#2*uE37VPavvq>~M68}1BE&%%*sG~Z4bEs!f5oXepk_yeWVN?19qr8IZpW6YBK1tH_cFQsHzr+q%D4g9%JFlN+|Il!MhYf{{INQK-G z2iyp?QS@Fnpjwk}6a0AxdzTKWkw=nwOm7Q(frT^jP?MP#9vzTXTXr3vn_f6xkh3A` zZ~t^$b|YqVJOJRSWMrZqbA!S!lR;smvxp_o!P0!P6EO1qC>Wr?38|RCjM9EUs;Mv^ zGB6-&{`{e+mY|T4CM<$p;qjBdC}R!?UibKwC{uKz>nG5G(gC(<=n6+7$=#*SexaFI zRwEAPg_3YqDA_ie7`C5=u9Bn|;GJY~%5zsYN(1wPH2MXnvv~f(d_{(>pkJ9OOnT!u zJ=_{zATAQn`C97jV8rtiGhGij#{dxRD}ec`1+%8*cg;0T9Cp<&}Ke%e-YJ4{4!jm!`LsBqhKnMLsA zfyOySB=h_SW#7Q&h)o#$st=O46+;of{v5YWmdGC|(R?m|_%j^!$b$~f{chOJ`rh&C zmzl;l1Hx2~6l2QY93Xet-SCe}gfyOa^iG}5p-A0u*e4OI5qoe?)|FeRoU83fbVwm~ ziA+niC3__Sna&;9>(k)e4ig26atl)wTCUM&OeYQkeC18hrtds1tOFJnoI751U_>Ag zy<&^tx14HRZ^xUc$y~>H$y|r-GT>cN?+32Ccc_Uu>5gFa>6GHf2zHAPlSKk*sT*QFcUx)PYlV6UJ$) zVI%pVCd?91MpEMUfQb$mOCAQT-)y=!nr~-KKg-%ZNC@W`ohGEBrTd^Uc<%#{d102 zvaI>B=V!KEx8u4$C*XLY%!+Beo#?pyJ_ZO~)u-;6R8ukZ0p8N!SCU48x4rtbn~lmR zv3I(~CCGyOTXeO#%_?^4sTG_LiTKxlrEr8YLTI?(08!Yo(N1zo)|V=M6-ziOlzbGP zO~KViYHR5k(blOLEw0FeK7f_hj8Z|GsfzE9g7#URBv_H%u?PH;3OMf7xZrXFs zQUqit@_jULT4dA>C6L97bSw!ozD&09D)K&EK~LSGBp7XlKB-8yIV*e(rr7;qI@l~4 zI{u#YDS>>RA!Jg0I?^KvNmOx`oqFHB%u|R_=a5*d*#X{2sPj%uNGP+f&{0{@8ER7_ zdX`MqHLv$x#0{(WK1p${2vD?tXJf#mjkWh!z0du61}I2g8lPM;G=9}_ zC0#GtJ=h83o1X5wY#wH=M{MXOEN67;eoTfetBs3pdDXeccq&e2%nwjG+NqYd9kbC! zG>dPJ7^@XroQcTm1^+Bfg`;_z&j%!5y0MM}B2S6wvR&vckEGrdl1uzwRNt&cLE~f; zq>z&T)*O7}3n7kRCAaeza!K5_a`l)tV9$aPFt5WQ23%>~m3rwb z7L69{G^*HFqS^#ORXcF;7Yk1t)(+3Ec3-QK56RmfAsF7oXYV^wBa=(B9_LEF@J}35 zNufGsim$K4svxccx86y_as(eJlX;!gK7x>KJ@H zjtbm-UcvvYxiiDw&4=#iMv%;1Pks0$l!aGa$Nl{5f)q8|0G&E9OBbD+uUC}OypbmZ zG+r;&9VbI_I`1gMEDka(#`)U$6lzqFo$vjuE$l)^QD#2;W0RyvCYokC%sABh_0;mt zLqt0(B7g>bVd3B^yIAR=bF<1UiW|GZoH5seZ?? z!3G?is1u9iYBJj+=nk*Uj-MP=k>;REn{lOxFzuw-h%{PjDo6+s`|;$+FiHF{Z~FrXnPvftEKAe9#Go? zIIxfPiq1sad0a%i$|yFM zr#6o3j33SU5%EDUlu~_cOiU8UdCn*mJg)_uYj{{hWK0gok&B`20r9D?dBBN-r!+1C zxhGt zq>WTVyFt0^3i11ag+ycH&JPKRL{*YbiBpP18lpmqajx~aUL-~0c5hJPSfpnU=n|4~ zdv^;$3i-l1*@*Fqg({5&P}!)Y*(l;v@tIa52ojWvA0L{nXryPy73(G7@uFdzJVJ`$ zj$5Zy(Zd>&B#j{P;UrA!w=_QuwYMcpEfP!5x%d^sQBq7vwIh(6+_K3iMZ)LRgytv| zO7MTKIjnEbA-2&%r;3fU_fi%I?nB3^OGF>M_lB-z&}c`Qa#U5})MUz%Ndl-aW$7dV zWSFvqk^nkPSxQOMMQH$$B!CbTZk1$8SsFkkX}T&6@Y7O?NvEAkr`d!vg-R#dq#Z`3 zU3fa&c=Gp)jG;_6=$(r)$`$cM@at+J3|rC` zZkKJYDf=E8QkVc$AhHqPqVz6Bdyhr8ZRJ`uKrBnE5;VLE@hhx$JOd64==Ws5N(ohgq$MFZ|V<7n)$-f^a}@G4~?V$#sgD z-{F4pVJM9ZjLs7uKTOb$bNxT#sriRw`mV#|?7syJ=dLerx~@zrN$VVMqj*@62od=`WCW=TM3zroZmC7mMB|LmU7 zO#1Zam_e-0M8Q0b|9PN#w|5k&tVmq`!f1zCL0H0J4ky>CI710nGdYZ4lQ8#UZq%;a zH=0&AD#OEx59Eme%GqW#!5*54 z9{?=HS8={0tP5tZ#DR`3I5}HflXIJ>EDrxL z1R)VF+31hKLCZyaMdS>NIn)5i~^*+-VwC(lF=;{Q>1}OvOgkD=L*{-{}l0b zL6%tbe6Hf8c|6c>8e?+1A{>%R z6cFG%$$2t?9fRyhlMyoM2NdZ0K~eQ}>7T<6g&f**lS!3Ar04AIH|56`W*;lk?bt;r ze~hV}HY2Gm*HjRlRis(BZ#FP?wP{Be``a~b<8Ki(b*G94(9c)s_`c@$dpPgTFpkTG zt0k9I%9c9nqQ3^pr%KQ4oPFT2PxhbmcIpB7l@Yo1`;A+*6I`obkM7DHpUp82Ji*#A zb=4#M8Qb@B*|$1$&f4d#YXXYNAv%Yd6A{|N*UrXz>ui@)a2??sYLxa#_0@BjYSW9G ze9kJ`)?(H9VY0&EIcz_0ZqyZjZ7ucXWK5T<#aQ+UA z%L-M=MG~3W4(FjbDyGRZjOq)9Rq$`wxQ}EjYCxR9B?E^F9-G7DEUoOxkMlMp)zfW= z<0VIzN|~gMOmk-DPD`41;2BL%Vs^rT1}sH7aZz^9+>7+E zEwS&r@7rsMF$O1P%)Zdcq0laR!kA-jW;THC#>XyNrtbYF;jE@Gcu6w5QSjwZ4HMOv z15MIix@(W%wU9T3)7AGo;(nE2!W6n&j|+eCWPXCYk<(dMGXi>OufYn6zV{HTkxxpe zKC3W$$d*Av(A@LmiGh4+E0JI)m1a?r$3$rgZQUqMRT33)1&|0WmVhdHMNr>WuwQUD z-MqW|)xCw+KMs=O4l;0{wym$BEdEjYu%f>+XPe80a$`w8y1|73MIXTPX$%yMY4~!I zt4qc&QB^!SkMg?^yDLnM>S53ME?7Br4{;<;C*&!|NFPJgo{Nj{<@g2xd5sh$kOBqq z{7{}mhDMuy4+e-{pW*vobb;Wg;4XOWjOa#)GY;`BbWp>fzSKX;BA*j(wESr?#UThJ z?eN0zGU7B3E>0KRQ?vE$uJh5j05Mn4dCiuL;eFL~5hz_FFuHq*IRcQr`%U?!)Kzvv zSZyd^CMsmU&Oo?XbI}Q9iemZHFDU*fq9vizC(Xw5!-W*Z5igu9_x9^cpWxW;&LIzF+BPFt*m1orWopEj5J zGTfan_=j)jp_!AQ~NUPS_=W$q!x3 zoo9dc2t%qbY*5zMu0&RMF|^0e%HU1F`g6;{)nr3gf?BjsPXnNZXa32?1pMpM#$X6g zaS5^1jN*9%af&A0Bow6WGfE2NhDo0uHUnoobQLNAreL&;awTH4EIg4akCy;un4CvZ ze|{hlt0Qa$Ys8ja5c>4fVgfN%ClP`<*30;!o1%VMBAa9!9%~dZrI3j07^ede_*HLt zTp;S4oRng;r`uX8{5#r*Q-Ln~Ovof#uJ|5S|ZnwG zltjJd7w(d3n@pc6Q@pi8A5>?_l+V`iaP%(J;89yJ|E!q4#wx$P{;#%T^~p=`Dw-B! zZv6JzMbh4-dLTC0&!@+-!mOsLOKUyZAKtrm$-j&>)$5oLf{CX|bvlv$4?1QSYpjTX zEV8n2?7XtgAK&64eNh2fayejo`=14od5d!h#Z;9+T&0+LNT7z_K?h}DK_Z8=E(UT? z$PwO~4{bkH{{yOPyjKXwl;zC}UU4(0HKe-ABq;eU+D@QWnWTw)03(0~&Y?iaT6}Zn zi}O-qF`kA_i8-oM02)jDMIAKPcCd4WPqBio1M9LgOEvh3@NrJD^czte))LFjV(fUMW{R&7ST zmygrcwDLMv+2PAGS5w=i>Uzf#%*VTqi^x0YddGR~o9L={?Ua96nCr)gFgNmT{xf%a zs2Ff`GE}8x9Hwz<3#WFc zd-+|}@OskqQbP#mY#w>ro{jQ`b-wOiso1tUM^i)r8=*V0)Zcc#g+Ej@?tbAi=LJ7+ z{HmCP>X4~7alwdWM&v}8V%-AJRHBLB61o3KA!P9l88!33$q(cE|NK(oi|$vZ!73xn zMH-+U&$V!9b-k~8y1v*C#}^ukN&Dr5@7=_r6tf9s>&TVAuF?Lz=#9~e)i zZr>fq(O7B|WN+v)qBG(0~^ zH`-T~h|Drq#rkKpXi6j|VQ`w|CNYa48avHeMA6{$Q(t;6m=z_&pj;L<#a5Q4L02)~ zilJ9AGl(NsF)z}=HX<=ddIUuYG!h{SPNdQ)FgCt1lIEgnJmW#Xv>gzf<5NQ7;{45n ztek}TVun(!E6oq?m4r#90I5i}wVO!%uIgL=f#$KRc><&6&?8dYC5=1nmubO8Z-t*! z(R4Wpz=WNzH7jnfuhm*6T)sQCaalLJSp7G~No`zO*4K?DJKNXAs*P-`ThKu7$m|z|$@TOaH7ZBR z)h`Ec4{q9C(-qvHM1)#EW)G^zFuJZG0Jdx=?z52QrCoa@Q#`1?RiVXN2-~BI)}b_`%;UEcXbF z^zJ-vm{HTxp5T5dBp00mBWNTSR}k}3ht<<$asQq`D#-<(V9YMq@De;f60g`Gw@+3< z?Vua4C#l>Kw5q^hnTS84NY+X*W*NT&N-oNZNjE+{KrWBYN?8)DU$X)vqpqU}+|3$m zq{GN27@IS)!6uW|v;Mh)$4zd+)vKg5uu)=6euN{frD^ScXkF~lXVfe^c1p1+zh|m_ z{%}*yO%6iM(#k~xAsq)jqut%3)xKp_?wmp?z8+Yb=YCwUh7h|JeXvR1>LLI{t~IkP zJTEa{STuU|FuRPnuRkxR>~CKm9A8=0cg-gP^|UBAs;zBRS^YPXX0`ej?Psqsr{!!q zk1?4Q*`U8^|6Iv4I=&N2^0U<=>(HDX#}Bl{(8t*v7Htqr2Sa0jR8X}4&J%$($iU!M z2)4Ey7_ux`+g|hOxZ4!1exax0wKCM+121UAj*$UlZ%3W2qSTWyLu9+3rxtg1UBkWObSn19${*R-|G#115eD{5X5`B^ z+Icum{3UC2e^vBb_FGio1BFMAtFM^WIjn>SjdTHlHB14)c%>1b-DyMhu!Nzu5gooR zV@7G}ds>p{D??H|I5$}A zQ}R$N5xM>^Xkvo7Z(oM5<<4nG$p0jf0e)2I<3k?6&;7*JuHI zW(E(I{Jv6W&m=`;FHFK*|Lrs?Z-V&tPwcrCFdCMs$ z-;w8PCpw*b$~6*!B0vHH7&|UOM`KLa$><%gUIy6hHnFoP*uhZiIg_xYmcLtiG2nj(rnuhK4jgTu^NGr;>@;$P<4#3J^^g z8bf}$DzAt&T&Z%=;l?dd92#Y-?LA+BG59Cf49+e_Ryqgo>NO7!B`#u?B z*2l5VLzs7QCP9I;AsR7;HRk!y%%ckn@r*!SG@&<{vFrH zG@H`FC10xLMVM=6xIb039!EE|);XZ*SD;eK?outEVHM$$!3N{Y8n@;Ey+0XL;<9C+j3W9_I@W3ZemtRJ5XhI+)R5yI?EwBFk`!8}9V??#*wru^< zb^H_DI|w3kufFa+{Zi9fwq8pHk}tXGX<|P-pPAJ!UknI}cs%#;Q!ex~I5-G*;-`6akTEhDNdbEoxL6n%SS0Q^VaStGT%FoLYX&4; zv}Csk62yB*(5;sL@W(=v@bR}C#rL*(pv!QN#ZS1_+jxBt(&dS=dhf;Vn+lO#kN)}_ z)sKmiBs|b0>P{9fdw%v;bzY-@&-5UgwO_8U;P7|%hX&>ZAvalo>5koQ>%nltK6qz%?*dmtav!9 zyVXIqG~LLi>#7m1~TjGFg>`ivMztN)M z3;64zDJgzx!f5c2qzFec{lR2dcirKY@0BtI6P6b9&vs^Tow-W!?RJFe{<5CFwq!e+!xB zOV=9?-Du&C9R>8u^Ow()kiZT+VDwAvX{Lk#K37G3v;PIFCP%azcC^KMt%dDked6_r zaGTe4I+FZ*G1^?tX+_CvpwA5}t|X3h`<1)9{qzl^d@Hbgy|@BCumaw85kldy@7DRC zY;b18aZL&NXmraodNJ)F<5U6dpvToup*d_0ja!q&$0A`( zc*GjhjBwrdQyf;I!<{vYq*u}kpU&r0TR@N9KeYCHC8{hS?b)avfWUjjK*WEQCc>=aszWGkl;a3O z5@q~z8XX)QTrk3L8V64;YbZBv*ex_G2Hi*^a_7qjrf{2}npWxx5)q=cIy^#J7fcR3 zPb#w;Uraee`|iScy)yepuJ&Bco>Zr_Fv1~Au=@WX@128fYrbs3du-dbZQHhO+q%cL zZQHiZd+xDqv(H!k;?=L}MRiy8za6o2t-Uj1#fq4jk$cRMGsjS>I#-tFrG{7}v9@3m zYsK#fE++83Yp%BLkc%?o)_8u5XB)M}Inw?cXN7gXE?UDc%U73w8M-zoHuqF`5l%Mn zH&?%vk73u($G*eZrp@?r3Ad&QCAq2L<^4e8!IB_{6-8t0p|jiEYBalaer52Yet)-m^iTDuL(bT4sdhi;jwOSvly*7#+l>8s$8dS1Y-v!z3#45uev_7?g$j{ug z7Z|G+RK}jeoTmM)3Pvk>t-kY55s#W93TGvUM3blsfowJiRz^xgt_=1ZdwtVPh~Vqs zlRH=sg-xnAFovJ5?&s!-EfCn1@6v8=MpobQc5;69Rz?>wUUO6D5=^s-DN!yAcZC-N zw;Q*8Rne5>8FjmUqogtQ7`h^q3p98ds=mh6bnjz|PKyZnot>XP-2_#)T=b)u;E1x) zA8H8i!TCM?y#MDLY)u6 z7zQFo9S!`(XmCUn9|O@63KsWEqJPAz2~n8B!si%c^5>KQL{~UxuCDqP+iXrcQYTf>2^0{#x{pKxJo&S@gjCUr_66B7| zXdL?j17ScPpg>o70|VP7`s-}j>(8CQ)ZIX&)Z!F(GyromPa)yVh(X{A^K~~ahn6Ki zo`F|$iFw`eQv;$jmUt)q`FnoZzj{bsH5TDKP=gq!4=V6TjZ2s|7aFG^jw4J$ik<$z zF&NZPtgK9hy(SJ<|9f`PMd}zM)HZU((qRB6QIVIs2u1lJ-zukPP+dSmMUZ%}F!Omz zNq#NhSHf^#kub=wd0U=dL;IgHd!n#VLGo*Z-6`&{>NZ<`~*E4xqNVgS(A@$JYhSyp)AKWyY{y+H&Rd^mE_vsl%k~smLY>DEB9i zNAl?h6{1=7X>TY_hUS$*k`@4|ddf9m-?bh|e!_c_+GBT-Gbn?HXxa2b0)|Z=9wUo; zNuV3*JBnSdkfqDou_eVJTh7kTcM)U?0s+rk)W$72| zr1m&x_v9&9ZG7;wDb!r8ww`ImXlHG#nSPm=1k>YWpZ|LMa$h0%$g;8aRJq`IwBy=x z+&n`_od}#h(|^c_b*dJwNv^UoF*NO|vU)F6XC7$x9y>JB=>apZC!{RR0VGJPeA@Xk z<_~yn`%|9QSO9tzs)5fU!yqRxSv{}R{ZOs=p2zb3d&T%yLRuvMTLAPg4^584KnG8W zv2zD^Tk(tH;l@Y$S#{Gdx|El?VXn;3pc9qB&|y#YT?`L+6dL5ORuyJt;M_X=G6nP!}%l$NT~Ab4S(;c zvurhGs77;CVW#wN$FDK-vV_X)9|$#ssU&qLv>h;rQ584z)=cjtwTmhu`sAe0x+l3O zviGBUAz>R;p7y&qEABp- z%pYFIiT03DY(z3lmLoz;5`ODc%Rbw2k0Zub*qKO*jW>-2DYEi>{E$pw_}xTlcv0!r z5*e_x$MjjTAR}9`Tk<-J`8hFDsTS zvmH{h&LOFyr8b>gw3n~0ZK=5}d^e#JCN4Lmt&B9>H`JO-Ujnk*6J?C8pfi#b z8%^H*nzK1oGF<=E?RSPLEDw`dwxr%7oAZZBK?rQK;+WYTn$-MKd?_V0I@cGc#t5v7 ztNztX`o_ks6)KL?Xer1YrhVnBM`dK+0ztwGu~%h&oCLF5R~X*ie9rqe7qv#@2a1>9 zE4j_R#7y19=x4k!PVWk>o|Ajmb($y&k!bQY_d=$p2I%Uv6;`EU`01X41_1twfxRUz z)i2#d>yyW_rr+jE5__89LUe1fS~Z{6ymWuIrL(PAfPXB`jv7Gp5c8XH5&@XxkLs*V zhWr4jdX0U+b&X!yY$BO^SD8|-JmZJ9N7wRFkZ^6KmPdWM!`jQ`sk7V5f0x^G%cyKeg#;Svh3(3t}AVE!XA%D@@G-U5BwB?^iif^CTFo)a#usgP0^%gf(RK z#WyaPO~t(x@Ee`FpY}k2@{*|Utmx#&h%pgs1#lyKZ6}zj>dnvKowu+fOQw7{e?A4h zKbNRvKZuJ(0aR87x)By!6BsH~cpM;CdznO>G$`3A8BWh(OF-Ra5fTi!A_bL-h)Ycv# zGj$ClbfjxCKAgT&vc&XZHLR8*-ysL>^KTl3vzRdrY-b zb=ZQEP=8#V*riJ;I-{eb@E{_#sOaUUyW)$%62Wq;dOj$jT9-h! zgO$Ko^V#Y#j?r>*-p%qSJcPROQm2zN0Z@?dt3TGY5G%cQ4)^<}YLNb^ktXtK%XW_| z;~M7pZA}}m=RG>4mf725`B7#6vi1sV`}r3uey*Mt8KsucxKiOpnlJKK}lJY9dNfFt{N#sSou!yUUK;$e{ z8?wkVVK-wCxF3ZEVV7+U{)C?*?Zo*NI4?omCIzOJv4k5iT}bOxk#W2mFdByqB1_aN ze(bQ@a#Fdc;%?I`%E>fG7V9>nnl+}wy0=n+lD8jh_MVG=_GZC`zBDU-)QZC}24ze! zx)QaNMd?gdR{|ns`~l5`xF#$N5kY*-oKe%$8wZS6`4Vo`C3=4Hv-bJGqh3j}>L0ry zhh}Z6Lv9?k0xD7G5A)Wv6WRDSf8o~fde;6=ZSoJ&7^=d<*yFa2+&i@C&W}BJBG2mi zyJHNwNTn3>;w5|S&0!0v=%VOLBi$5x{Hsr|SXabzFJhpF`OmcUEAXsqtSI)hoc?=& zYMz;NtdufSEQcp(I55!9c<0?MogiNc?CbVtj#>$myvQ^q{u~fTbXuH%hGJ(h!PHP7 zE25bz{cZC+3d8d&s}Zn(uReY$c;|e^ABMAKd5rdU)d$zVW4*)t%ysuu?ENWTW?qd| zx^@Fww>@2}9i)0AQ04L#3F-JMMO7B}+fGflH?TOc%T$om+HKOR7A^nIRJq5a<3qmf z176J+I2kY9e&!(6o8q(iVO&Ssmu8~YhP`=6E&u{@nKB=x(*(qnY}3#vSqn zXN^OSIkK?$&7 zH|0tYYr<&jufMjM35LDvee+ld-SNe2n2uimcFv=r_DF~B&C|Z(wOZA7*|o`%>whKY ztV;49o(LCwWxuuGhG=Ok0Zmlu4k}GoO767>8A~g_AzRw{SZ$m-Nw6gePs;D6#}CzE$UoGQ`KoE;>;!4W<;PQ#G)+H4fXA3EsPw>D>a zY&+BhlhraOrZIr74f@bZ8uD3wG<;+LqXwiS%zuThI=CTEM?z2j!bM-?N)chAG@*O! zUUhNBSgpTOX4AnFTF#Vr`Rn``VE+hE^M+r+%f1!Eoy?TuEnUjPVjwpSgLiBpm_zzw zoyhj$?r=Y?N~(Y3A=iC&vCI0BMpuQ!usNouj^;<#a7rq11P>4Ks8g5ylNhT4BHWFMC6*r5Hv9@0S-0oaCh55bfslv#8K;N2-aZs2bG-rn!A*uC^Pr>K5ziwqL_;-;oFP++j}qWWsrEB&20SA!y2T7=}sWLm)UhHQDC>=Kg*zJwGtqB zuV*mg!_XtBWXf5MOH-=C2>hM|8%W&|lb87i>J%tr@?I&AWh*@AR}|}uowDM?Z_^lC z0cR+wR&Q+9s%2qEf!}_M$h(iiFQhgQ;(B4ZlFDJL@y#z;zGf5=w^U71(19OX9u@%*0kwqv`-Wm#0ThAqrLn3{XUl{n;7x4-8F9H{XpIj)CHx=P zUO)C^VHN|(yRnwQ40=B+-bAb72kY11IY=M?86{tarf`diRSAoXyTWH~?-Fm)SxmVv zof-x*{ui2{SAER&;CE@EPVUVv=*!>v=RFSf8x>UFhgQ(Cgdf$zJoa=Fs<+M9n^7mG zDYjqi>ch?S3UQvT>58>EOP!KRNpol(ZlsiVmXp-pqFgBjU0Qh>`tYvL#8PX^Gl;~S zqlXDKH3Hs}_#XB`Q~0o@3FN1~NO$VqLGJZk-N;WjOmn~`JlbT%7I0*}(q8P3=Iet{ z%pO4Iz-9q-X-D?Eyo=bp-wnPO)4RYoNUWi?b6J_f1aHXX!V;d@dBKIN1fDojwM=?s z>h#@GTA)bGwX>NSsX0f@nquzDtKB_uWmm;yWcM1M>xxS~kS-C>Z>zOrQr`=c$jo)y z^e0v?CHyf1FbsPCfKbpUn=vAZo{)fivnC%HrIK2LpRrtp?><&|U@1ESHb>oN_JLV= zm(~mViIq3Z?|iZoD|r}MLd|c0s2R=56zY_;Zk^<4{KsIKa=Msq9U$39lODcKIIm7y zQ9kVmM7;ph0D-MY;`yzbcoK0hH<E&j~fN5Zc+%VVC>QJbH2ke0esqTTROprWlg|YH5|13~2Gk zF9T7rDEX0xgI-ChdO!(UZvsXE;vtBct@xSX6`;YKBA|zP3^WoG0lFo5xK*-+&=^5} zdiZ>aSV6l<(me3%S6i5pHMsiu+efX25yA8YxNG5_JNRb-iDh6EeS~R$C)OhwpVg>K zrg%z0T2x~Ptt2%-4{V@ZE>F_>{axy5ycv-;Ylcoji08pG^LZVN!Hk|JtJ`$0KI1f- z^uc9=`r%C*?xKE!rA_z2Wlc-tbNh?&h~VC zKY>uJV7Yq#=Hb2y%q>p9`Me4ka#d2(fDh%AaWIcrvps8xBtzhdFB2~Mt0ZVkmQoKvS_by9cpGh`!?0b1GQ zOHy|;JF2_#1xmAMCAY&&B!@V6(RK)lFrpYi-kNnio0>~biX*P+CgzP%*)wRCSV2S& zGb+4HeE$0x!&hh){(0;Bd9GpE2v2K92W zfTdl*QKjii7ar?5&^(|UKSVo|dVxB8J%jp*ralCV=35-w>gzK}4_ho`QjapF*B1A1 zmg&b3lX_9I+yJIGbTI`>0;+4DbaTG^Vbtzd0|L18OD0Nqg#&zCM(h12w(lnme6^ox-sMAe_zza_)Cm|B&1|-72#%ra`$Z03s&GU*4 zb*fz3sKBVT>-t>&WMYY4WU1cGP^~egs`xYEQ8AL6=6+&8y(KdJK}M|#Lal0-GDAV+-g)@Qe)ves>*~UUiq#2aRAJZw z)sIf+7380)%3ye(o1eYMMmWKT{@n1S^pAp7oyAv|-H#3|ueJ}!?!w^?fKW z@2pZxj|z>uY&rVg3YFD&g=JLb<(LXtX|-%vwQSSbtiuXV1r@rQum0PhdykCPFHfpF zTfr^X10L>69^5BfGr6pzF0;62Xy&7ruc7^4Ejzf`I;s8zc4|`OaJdC^k4&HV;gz*T z&ykK2;(G>5&%)#9RCG)!+^+tbWi;cziC+-zXDw@Nr?46=y%auRhb8GI(^N?CE|^Ju z(DlsNNpE4Xo|M87BT8~5y|=hOltm{g3(T_`7C23st^OD3%Gf@o&k01}Sr}5i1gpCm z@w#NFPy1Z6Ih8y3B!Y|Zmp_&NC6s?mwc}enPa|gEm~Yb$&{1(}mO`-6T&lAEcFr3! zF|3geSzom_8A~e6rdk=#I^5+F%D=@xO0mi+lil;*N)SzjYL}${ImrBf09-F>(Y=U- z)${t1Y=@PBMTcW))h%aL5BAEBPA-M>FRoC9bAcywv$p0$o6D>!&%#|Q7$pksqQ;lM zj}Ww#U?z`0@D~*J9tIai$OQXPk6z`g#iC+~Nu?&zjB}n%)t5-Y$V6KSCU^UqR>Y|| zC0^xd>{GP?I>o^WH1V(%#?xsVVT^-b)Hf#qEZSLcP8^?um0n;%LK8!sLS(^qJcx?W zLfc%6B#ANHbb<>x%DY>gbZ}~}c+%^$VelV?6ZrL^67p0UR=j-;RuNrQBNKjn{XW9} zCZ!1r3F-`Zu`76EYCoe>o<2di)Asa3lAp>B?VkdV7r7&q54so=rvdF&%b^C1SMlD_ zhhmSe(T4)#<eY2Y9~=*?NiA`hVV^UFPkz=TDiS<%@EbB z7_AzYou**Xg$O(AWn>!nJhTC$rYWm;;rn&p$i9%U=;^+7;ZoX$PoR?}{7K-!(!h)I9plL+H=DK2l=~KZy01;lr6!+&2UGn5XP&bsh!gfV+Y@P2D`aO=w%e|^tsLc^d zLK^oRFx)~m2+Vj(5u+P?qC(w05e9(YF}dgFHNO(WC0AG-`p0N=P%E}kD^~7B?>d)O z1BX_FM`b$MT971D{e$Nd!ugG2W;s;br>au!?+N|tOw7-wse9+7G464NXpQBH%4c6R zEaa;;J@ZGTh4~y4^AaTsrqBLBFPWvbyoHQ6?sp3^3?#0JMt_*Tgj@oGKr$9O?8PbL zV8CL95qld^oSQ#f|4T9^EVe%TuLKhi9NmA=InzJroF3yJbbbl?4?2hW0>4xLIFxxc zJf=WpITQlRsI59HV%F0j>77zjEf>2_R*6lZ@n_YArtbIFQfZs@)^hnW`CRL*mel9n zZB@BXT1+Uj&$N#Ylvqb^2rpD4@??>!Wn~M|riYOuafaa373(P9KRt1Ov=mU$o8(H! z$xA6EfW2?WmemXN++uXG5Lo2Op_Y*&;+uPvop9mNA-lzcAV&HD=XX#);9R5rg#hpM z=x$6Dr!FHJ#VvF!6Lm|wHFaT~NHL73eAm7^OludnFZ0B{JK{V2UBt48`8hIb>;C2v z&0{f9__`3Fc8~E97+A*?o^(4gMdY5k1j{eiYup_brdz!Sd)1ni4`4O7{qHf+(VNtQ zZSJq_j;p4B5V}O>q+g5nv{V@26Ck<%w%(DCR;!9_FW- z63c>8G-k80w9ny5vT!nyj7G|=nKwm+*gu&E$f;UlF}}qSIOW1`ZIOjd=42FcJ5OhB zsZ9e{`%r-KCjU$+*{nZ15J(a#nA-3XUxJ;3;y-=@ll^7}NiBTA+^E|)s>J8~!BQ8k z(y8iMNhMNIAB;MNONFP1KjL{Xd6Kf}whX5}C0f@13`^%Q_y*YpD%D&@hA`9nK5@rL z21f_TJPq}xVaEunG|&=qA27~Mtu&6j9GLY2s`g`Z-*izk3PtAPZa`ovBBu;dynGB{)DKJW!Z`4Y%d zV`#$JD}B5wbo^n+YZG0ncu;N%e1e3ErcS|Z7IezZkWF^)m;-NG^G>@!r{yAYW>S$a zabN*w+v_E9=UjDk!V;!CxM0N#ngjbI#Vlx>K8ft;@_Pdm20~D8pxT*)eAg0!(wkLx z6%pAk)e0HS_-}Fz@py|SMxegdG2_v}OFaKqxcUrm5z!BnU;E-XsDGgi7PPRU;D*%7 zAC`^{{K3qH{J!b9AZPOr(0RWufNS(Qk$=BShYoy9zSX;t!`I2J<;T@BFol2J zL(KUl9LT9p&QFKgnx2|2=GN(ZrldTOm}?R!Ek+zQ4az-)xMe#B#?bY$XyO{HD>CNRS z@Wu6LPEAI-Fhm=e(*2tt1%PHjKptq3ahj!nPraH@!Gap_0u!P#aFYldZ(&muNf{)v zIVyS*fSYJS62K+30M6KW9yErRA7$J(3ER^EMLH*b)8B5EByF5N!JcWmVdO|1VVpW7 zmp*|d05SzIMSmLlYpQcv$v=AaX^Kx-tSg#4H8m7!7;$wpOU;EwA$$5-#L&$|H751+ zs6>#UCVKejjdUpkaop(JJtqpWzeD938vYS_vCON;uNRhIUKcomb5h@SRxMh50_MW9 z-v{H9A{j>O&M9QW_XLu%Q#barDVL1~LAPTt(1sgXJ!974Cf`mYPda~0jQbN(41PS^ z5ezE6oj@(VI*Y3B(+R!8m4>9sQq)TqM=1XB+kBVkMT!CvGB>#X<}=F^AGOEP#f##? zJCy0D<^EF`XLdb_zW^^h`x=hKwI`k9!h8D{=`zg(!3Z(Tzyh{fR{oxP1Viv~Wi}_1 zGYN?@;W!s6@)+f9%^zZ2W1xPortW&*&h6V0rNKz+op3et^kRX{3 z%J;N2k>+ZTb=P@hoX!ICa^%a1=%1rmI5&Z7ZD_(;8z2ll8%4x>ZAHYEods*HL$sDn zA4Cop9nVU>4%is9e&y<49Ue(P8-AKu4boJ_AvzDZvRbx3v{xy8cN$aLQqdhjy&QlC z>X?CkoEUxh+-AVs;cnmAV|>y-oUFc!)6mvtt0xdI^aB{EsvUG7MxFGFc#$8>MS{9& zY!JBTMu&N)d5t(b@Am%g1|@5&<;Zyu44s*!n#`&##H#JCWo!PrO;)MrM|il|b;>L0 ztRKg{=MlBZv=xEzfN$$L^jSxr?uD2R(8Z#3Tckl}n#8vAaEARbW=#Qx3bUM=HLRL- zPxJANZi^w4ngg~9w%P6TLb9IxlF6j*nj6x;mD~0AWZsw&nvnLMxU4$+>ah1-jo{Mu zVjQwH9&qhbq&>Y1;NsDu{lnsN<*xauRMtEfR?w7H;HYG$)o|j}a8BoNOsYHsUm0#L~-VA-%1~XnBr2sj;|5%VmS}>YZ2h7fv`A9($`%_ zLQ#?$jol3~pQ-Mdn7{h#<;M+Zx>NVzaYy-Ht9qm2(tO!$K|5UiH<@tzhNT4YqJ4=wg1x-iqw+I)}!i=%5;AiK4r`$!+U7lC~<` z#F@dk+VsG5D=c%B`Wt4I{;21lCZp3o^_qKS-)|{Zvt*Xz5oR_!jqe+ZO(quTJI1gT zbi|VxD0D`3X5FT9GLKFH7XetbVXQRlDvewT2>(_xYk5KJ_p;fU-;9B72Q4iEnn$Y$ zFT*wk9US5r`i8{?C*sGTPZ@3uHOql`f@2Wawgep}Z~T2NK5Rd`<56OkS6g~(Ek{3x z15H2s@Cb_Owi8tNVaMC%!K>^)!~(X-mw@799(IZW4GYIWfVhO8u(i0ojRVa<)SZui z54_=q2oBCS#enYzR-PqfMhEOEB<-ISV3wdWE&;r)4)21<_xTIHq4jk!?uFz9eu)DZ|)@3-Ene;44vJwK`c2Z**0jPEDR#~b3u8)F=s{|9Lt1^zbhoeh33 z+yUzkn2Qt7CjJferzq`0R%Gjdj4m~fC2Z*arDv`E4)#0D76<(<_5rMgk+0!Sn?*e$ zacZ3!bA|EI(Zaw8>1p2 z+0U%ZQuQl)e9Dv8XXUkOIDtXRVzJUIz7ntrjzp{=Ami#|DM*(|0)v`E0$y3`DNrZUE=<~B0%+?KiViDkfH`=9{} zcV(-{?Q`#7>-=5e&{Me`MTOV3G~p4~7!J?qQVtvfl=Pk7XEUs#rLf}D?W5|N-0Ona zijOBVey_PfnYKXEjOSefqxp2`3T|v0u2`+3Dj}olpbS%ZWGGgm#$w-#OfWIKs#}*_ zuq9*970}DQ!No^Y0aXL)%maT zJXRdOFM!wg{D+BCb*3o0!!k%=l{)3&*h3df0hH>pJ$kPPV689UzmLRL*BW`Q6!pBh z;bka|>9=oo8yLOcBW#Y!3 z@F|;kb7RU5otRg+BhuxKAd17b)t#iJBdTZ3@GJP;1^|@ivh5jT z@S7CRUP8iWBsL(M-ABTa)?K2MMNZ<|ipYldJIA;yz8 z6M8mA6m&n3Q|?AE6yB`4hjlG|wxY`hUVyv|VQKr0R2`Gx6nInaNGzn2_n#(4qD1}V zRfcht9RQHvC=q6~;kdG7s3ZTv^Tt#NcIlV=fk%0HOWAti}AgQ?*QZN79kFB$g>_7&0eeNz?8qpn2{Y zW67@!B*ik8e%@aGOKaV8a(3N~M*h*tJ{*evIz1@yDkmsWQl5bodr7^Dh(St$j+Q+) z6DW%WnunuM#Q4RtaRkF{MI61mBmJfd;nn#=gDJF#cz<==0xW*_yaHr)$OfEFOogzZ z$dXMM!`BzJAmYe8%*2W1_iq`JQUYOKGuGd8@Sffe?%KSXg9^=I6(;4HQ(~1jgDD2l zw7huP?xwW7nxhJjVHIZOZ^rlY$!$ZgZA4~^8O-bL%Pfy=SFhtTVNN(F-DatwCPRl)RL6YA)(>>BjZf`w7l6%>${W&F_6fYy#15_CR9qPg3UizlS#@vG@_W;pZ>VotUxPIKS z|D)=6+jYo8^iMeddp6qtWk~;Bu>L>A^gr5Yx0*XdANE7eWVwe8JK&cE=V6;wAlRXs z?V*d4GK^2eB!36{1-@)cw-<5NC0GG-Q6)N{wB8@?dwE=wY>#&0Km+O<6@Td^!TWpN z5NI}jG})bk9^i)})1wQmL}*P#@~p)Cor?82F{N2I*lhc|!Lx4k*|xp$mpLqGW0uPq z;LjO&R0o`_%K;W%zNCI*7;K%QML?z{z@>z5@FY%x1ybIoL`X1c7xSdK7Pw@?lFIcZ z0<+MAmg6*&ET5~FnQ;PnK;t0T$wgj3WuXYu+;KwIDLjjWrz`Q_7;iBgF&(L+^NZ{> ziO_4(hT|i>E$6QtYigACp@!?XoQkAVa~m}Ux1DjDH*arFQs!7$rvm4(g{aE$^|xnx zA0wNF=5*_?Fna~zT2F|5juM4{Nnz!9@kzmws3L{pLT0lf={9+V5vk%u#Y%bQ+3wt@ z$+SH^$8pbgmF3`KF*v~&cHkSjs{U~^dJwHg?vfoXo>)KpHrp9w>ec%=q4PJu|2XtG zLmt%#KN9;=|1B5o|8nU63m0ugnI9KzbYHW?3)99;S2u%UIBIa!KSD^6ik|QS@^m5y z(*lH;3&xm0OcH77G+1(xWZwbb{0MSiiQu?`vLJGM#dBJ6rVG>S-5mEf-1ojWGw-r@ z#oKIonwhV;_LU$vP_)!0`K5T2Z@>V+VZTIR3usD3P{i^p3Kgg4WO=7J+JJJfdAuN__K4jVi>H$u^yR*Hpe!J!9eo;PCW*h6usY4jb2R2N3;#)` zv>(nw=cf)So^n+}4E|0tcyIg$i$oP>!cF`Z_;U~L(5|nfcT;Omx&3~z+|NuH-~unKowjt3#QS%v z-RNp#Gw1TMqw|>>J7mR!Bb5|@R5i?qZMwGctl%jQpi+=G!x>xT|4#KyO zc6A%W->>#WSO7db>Q}c2N7pMJa$Ra|fxp%(WlVqCYNtPzM-X7k9Vp1g6c+a2r8mm( zoBkFRf9^S6?Wg~(9QrXm?NapXmv^pCMaLJ^zt6pYTsp!KF#CyrR&v6h#`NDY8!)o7 zv9YuLSCj4kXNwLB)!4&-UF-gN>AxCsF#iu;`tOU|{~z5ryeDTeAmM)fx{vtvi{XC| zwf}9!afzd4_m}l~vo}yLl8|)=qn(2dA1-FU>I$3dzIYfOInZ!19CdVoxZR5NqSvo0 zvWQ$nF_~m0D^#^$Ci`HQ;~@T{=MJ|?HQx&=b*@#29f&Pf-}{*B9=>T4Y*vFGPq`JV z^KbN}<{?iYFP{IGJ~Zd?CPUhTSN1&k2Wy)DN*$0W(!=bFk(IQj?G|%rgbC^1RFBJ?hNTh+Bb1{?)7PiR`9`k@s*7Fxi&AC^p>g CdwlRdG z88@n%ohC&Cn0}+Y3dFZ3M*P^{tX;*1bf~C|1D2SeJILC8)ykBM!8pJ>iC#>Q3bD$Y zncKU}RoTkF-mqlWZLIH<>SV6yG&gZO{t&R41=YJSpy#;E&T{!!&9#ya<}B}lcbcX} z+SVFn!}UCP1+8!Z{zcNiTv=}WtPfs)64A0^{4$5(>h7Q2ICbmmCN z#VwPY<;v~6jCnkl58;T?U%%`x;%k}F{N(j;CdPHCEm+xyv}oaN^-P=@!-T8yJ#PqN z`OM_*^ZQG-gTwTvv%`E-pnA<-ago+ik|7tk-7HW!iX7mYN#9iRR3edw0aH1UD{j}g zQIbBCQF@~d@cI#Oe?9RDJ4srnl*cA7r)PhTi!k>x3peG!20&yUf8wJC_GO|P~r zccud{+RgMMs5tl_gJZ)r44)>=PbcDG6R7f)Gq|^29az%%#5n-1?yViDd4V=q9OJp` z&k=&Mee-9|>aT+dOg~|qc+j3_<~D(a&(eD2`b154vBnR zw&Sc@;&pjF$7nJN@19Ll{syoC;{!0`aN`3Z7dL*6-L>A$AA9ZBopIONk-Y};z3=o| zM3Gbd3e=O;dEN@>|1;2C4qozL33}YukX4MNsmkIX>7i4{bJ;0{3^A##fn+dI10m-c zt=>QtPGn=SzQ;FJ+@GH3tFV0Vi%|<0c{Ljd(E$FpF~nEcSO~6!>uzjZ*-1bhf?dji zXsnUz;L2S-&wk24eos+4Q>hG$l`U94OOR4i9U$2+DK_9W=wDR(z%z{Cmxsuz6wKkf z=ZXhg=5Po|@1_7EO3`_U7#0U|cYx`x#GnWyq@mPKINC^xDIy`ICd)=ey(`F`ywGaC zOFtX#tixf%=w>%78mdG|eAn`>U?X3WNR^y=>u~1YQ$h(Q)7cs@vA+Z^2Ld9~Wuwwl zcKjEWGU`}F_xqD2vA;L*DW4gn%|dQn_jX7dgpwpi2!`R~#cHfUlB`s%U`)@eRoK5z z%`0K^t~-70I}U~H4CHoIVH$-swD+-u({nnT=-baPYt{XAexU&{%SS^5?M@LY9_(!0 zRtPhopwSXqU8ESo*ksf+i#$ozgFlN|*+hIQO80+WYi1T`L{OY~Hm-!QlMln6L3W0K zGVB)bn@?()aU=pSs42cF3uF1c$&RJkvfT_bVA4=7UC4d@R*NYoix zUYRK-?w~6FGzFyE-yX@r>B_=sASl9VB1?&@tzDLqZ454OjOA1c# ziwY;6N24PqSDD@RH?qw0f|`444FonSdGvi|S}2IvTfrS$Z}3^ZH?4hO^4+1aF4E4$ zG{gE=Z}^_zStR;Yf2PtyQkMAvd4ZSL)nowu&DHpTSp>Cp-sezeEN%(IIBlS0pqG-n zHDypSzX)8_Hf~_w7+?-%000%hDA6dsj~n)Ki0TSNOxu6U)ei|J@`DSq_-xa!x-L`t z^R@u@KEvnp>F8ODBSd@|VZls5cgkmB{*~Vw8v07eGuhJHJ8@4Gi;o1d4+Q5&K@Q_r zsl(S2$hgU@#t;7RbEFc-G`30`OX@};o-S){5#{*HU-7Ok6l8t3VK(Jd{J0!<1lgOw zC2JZ6vZx)S%WPFk>%dz`51Q$1nmB75wNuE@6arxN=XHuSa9j9h z9HC1cPWtv&u2_4JMu`V{ehfm3RU>yjj$d)arh z^QGBHMw%!bl^-QK*gKk=-}JV=^yRLzNQC|6#0mnzwj)ZZ*Rp2Njha160+DkAkv>IC{Q!6j4 zzz;w0XDwWERj|z;j{X79LkOR2mRQnI4+`N|+gWQmMefYY2=Qw0CWK0Y-eeN80gWKs z*Ir3d{-oyEwF_&am0b6i! zEGTSZ{dRu%ZHqXXNc_D)kg?hbmT<$fmp|6xg-Ov8vDaHYX1jj{Iv$flbEl!XQlwFSXI#q~R<6CxxP(2Jer`UA`@vhUs9aCf5R*R2qPLJaZS)rf zVkXF)VI2w?$!Z1x8aGCufW3JOfW2Ysv(E$x*14?iAb5oF_oW;6+yl`VJM@3KW66=T zj}d|i&%_{bVuhAIguHzNzxWW%907yz zCWZ%d-PzvjXWT)PZuhx@D*yuBA^L6?Nqz~;JNc2|j8j!c zVZ&9lFip#(i}sIGB$6*FaDS6OMGu=pzwy3b^Qhe7NDx%F;LO; z`KHWo;vl_Bu}5xV6%nj05;s4QzFxmF1SoCWud!}Yj^ExG*Jg1x1M_*|IcUwt zT90I{;oGOlQSJ$<6`Rr1(48`M_6Ux~r{Yz1fhv}gUecBwJjpXC6raEo`|SV|LCOtw zck)z;R!O~E^=DALS;iz>@QX$@B?|_$JIUwFd_zPqxBY65go=M*RxGLf#tu6n>1HLY z6fdglCX-N3YI0-vUZRV`(0zm`r08$~p3MO)j!n+kols`Tm-KQefE^{>D4(a){OWki z8L{k4RQQe{a4jrM9_lg_?N9K{!DeXHwX{3PGvtsr9U+ORGKtJqof`3QSS|Tv>X85h z#&2O74mm|oCyej}LHfgYjq`aJc__;#Nv>zt!em=9fb~(^iyIpk7`8H%Yj9tajSs1 zm1mDx_@6GtDq#t{BIo(T)}dpL8gK- zPHt@h?Ku}vqN^qyWg@o&(|A?6DE@L$=oc{?J18u#sik(Gc1_RI=Mu=Rf{;S zqCfWRZFg3w44DXIcb`zDwmbW3K5Z`XD)l%MamrcHl*NA(XL@gv_n;;gqm7$po=Q<6 zQ!D}}@;Mvk7)#(vuovQ;yfvL>n-?9NW;0IZW^9FnMpPs?z2djGu%(hs>=>Rx$u1AJ zWk@QNLp;V6;8M7xO7fJb8jgpjY0+6HwJa&4iX{!#b4msc<$Fwlj=DyLtA?9l6MLHv zkESS(nhtF+#9kC_)Tv5%CfHpp8X=xO5Zfa794J9i9-}!BH#+H$g3EFJ2G6=Y9#ufp z4q#|rs8$)1nG=>YR@3Zoo;Ft`=*<8t><@=GW&vxLJ<}A>(o^|K{R)It7{#j@gP`zP zl)vEqodfxY&Bn(9N1Xjhs84^I^#35S{#Uof{|1#!1#mhFE$$B?>30N#xQL;Mu86*v zUVyHc7Kp)|Zdae2q>NgGy1geoGfPD)F-1l3^q)v;@$=f7qJ^m}Y*zlmVvcfG5o~@h^nf;Xba8KeQ)A<e{}{M`=XCxL!kR~lngLnH)%D59IkMHPZ&wcfdpb$#|03@lV?_zVHoYN+qP}nwr$(CZJT?(nM}US>~6BNGuc1;qpOv4b*j=`Z@u+^yieTnza^}XZLP1t zPHb&Fx&T&HtF!L`0dE1i1r5@5o~gEcp$%gJK@CPR>;l~CM$+vP-s<4CD)}@jv0E;B zL^vujT`qhQK<#4R)FP(;X=M5Geaw}?dL9rcLRucr{ZS?#v(uW&;~J~6y9+5?2R3!d z)X#&UnZ$pm0wyxsFQ0h^Natpc*Zf7d)oe|Jpu;8gp6wAhI?whl^>v<|Nr+K9hMp60 zal}iBbQdHSG!r#2ZXqe0-7W|^Z+?!1Ow&n=$jAApI&1AH6wy;vU6Gx`cD73B$jpcY z8qc{2C&H@5+cRlv{GeCcR7SNhhh+5NXP(S;Y{%X~FOZH5$tK2$hHDZ>63UeoBr*&` zWk41s@K?wt)?`_A@Z`ISP^%KSl(g4BUh;wGd;ok0{bl`tCDyG-9|EpaY-k1R`+1k@ z)n|fQYpJ&R@4I{;j5WZY8~^}z5dc8^|7Dl|Uni=syr5(?)Y7+?kp5}mp!9$ZkI8pJuS{+&opM)nQ%35 zHB=iCUl1}hNgKDEYaUt8Zi=rs)@N*;7n1iaWUW=c&vHtjHL`n8cY(wd&z}ux4;tj6@9xd#*%f5TEL)3q{Efu$iC3AIXAh*gU#zn_$}U zH$-|m=N(Oi-9`@st4b%i08%Kr9N9>_Kvln)EIIWmoJg27492dRL%*4$&Z^9`gJOE7 z*fpU9;Uk1r%8q^mJ7F!Gb;rZtj%8%68!LhEN-HOrkfkuTP${y8DB0NzOXNep`Oa;F zd_rT)*=G^u=Ynfpjwv}q_)y;Qq0xf{e#KOcCSG1eVcXJ1 zT3?*RfpeX2+Ld$y`wtPl|-Q1 zH;2ZtSw=egLwHjcvFeI^`|{ERFt1lsSJT>p^`v7TQrcYvYgN~g9leEz6#Bv36Vlrh=?ERZmiy3i zh-r3d!KHM=??xoJ9ju-GjepLp)d(6jr()MG|=C3j-& z#J=&x8Acz|9xS4CLF-(B)F1~gE9*YWqdDv8)j#hY;zbnC)yQJt`8_yGqvq1gp6P1+ z7#n*Fg?;9AI>?vj=CxU_W{1hmX`^t1XS|if^V&*9U_%%X3F*&Ivn;p|W#9W)1vE@2 zz#lr0oq@ylSJbho4sTNZ{2CWwa`L!lN_nk-#=i&-eZRqIvwa~|lqI>)`hKdxop7NR zZXL4rjn<-qquyX6wSIX-Dzk{B5Q2i#pdw$S5yHao5Whm4cU(WP8C66Ese~S(k5*wb z_5J+uXfS@Jcwr}Cq;e?Zw8NvmIdJg-w@AP(8?50iTfB_KC%lc(-VDR{!NiC&@eKRt zTG|_@$*%tj593*%jy6rQ9mY^h%ijGK%dC@auEp(Nm5u@=)Z1<6Ppd|bcPtY3k6_TZ zs4(tz5BK-zxEp^OX~zqP$IMXnCC5j{CXV2Dt!b6plC<@mWuh-4JB5xy7jz4C+Xj=O ze?XUKM6TP=hETCpyyt$FK=oOkhKxW~Knk#hfTQ<*sp5D_yybqe{&G-*)Un2-D>lO^ z7whbWRsBKGW>t0molQ7qI^mb*g6qMeU(AJDlHpv)hwq_f$%#8z2nr5^jZi5UL!3a! zVgiky)g{f#BIf3KtJkaOYa3W=ZBVEutUUMn7BZC0?s=;?q6MqrReop5u&8=vrSq$q zS7Uucj43lVxabg)ZvtP|xCBjz;%ku;?zKnoofb|)N<#co^3T{$aDICD|SoiaoIVo&yD{#UPWmTd_ zhKyP_zMTz%k{}2PL5sX^5yV`Fq1@>f#^t^KTq3kXO48b$6`VcJgvTlBqczKipY-mG zItqK73*J%vZN&qMZ~;=84r-|wy0&2sWS9yCB2f?HgA1)hBu(Na9EaJ)k<6bKa6xE0 zYh|%4tCEw=Rm?d*n!C2NyQq^Y0@-(Cu$pNdUpRf#XlQNl$j}CU4O{sW$PCn+(ZGPH zp4+>AcfD!^bqS1GZyp3BBb_R?S(_6)aMFE zh_iI>Wdjzqk_M_zVEQ%g8b7^UvE7igMYjD`bg$hKB5gcSYGB+)X6UG5c|Lr3m1#fx z8vmGu*aOxWMSP3W`4exRyaZ`hpYgZNUVvO!z?-9zX~p4hz+6pbjuy9Ls!z*ek!^-X z3GXS_;_N`j#bek~@*m;{83`hbTy_xWe_AOkCK}`~WYh#R#*19y2Sc3^ebCAI^7LNw zAb-(n7?;w?W!fmulpd26?Ld!p({&jp^GB5*`~mnohJvM_tWQbk_4y+pexCuu4Kwk< z8FbK7++EyOoHZ9XS-+-==y8XLNea~pwk^)RYCcLo=_ z?TbKL8x>G6+|N0Y#Rb}R3+H9ct_ED;t{4pw03Idh%04QxBY-a@V#c6$r4OEg36Xsl z`|0gXvzGPrA=)D}%@5~K4YuR+)5AO832(~*d<}#)%>5$~4bmgXC-rH=-LNVn_YY}J zOudM^L92mtA4^)j%G#>oE(+Pl5AB!)!>l9lK33wN( znvXP8U&+eQ(^2pF#VVa@KG7Ge0=Egd5b+0?0UV@{^8Eze^)9%^&E7fJD(o9E>&}vp z=BEt2ao$-dc5ARvhPqi&IPFUMFOt%H73hzi77G z^4AmvOSK?mc=v`=1&O9?2;@08`_aC1Q~?B!3%Rd<-fbB?```2%_0sjf)$2pbOm<3A zP)Z;DN*_3p-1m9qUY1LC)#t4JE035yB62!%UFF|akI!hoCGGqtqcjL_ybuG`4cMrCe^tND%Ynd=g)x` z&$UidnpzM1S{|*{UwTv+tYPQtYwZe}TV#qZkP>$=N+s_%yUSpky;pRb(Eu#A8CQp* z^J%vZ`^YZ+A;Z+RB*SxuL$_fOr-MUp{XzG^wr)Tg-3b&sZL-@~)HdF=A9LD@+mp#~ zc1Hr>Iw>Z!6e@}}02H{u8)s=dAmNKoDGXXF$;c{y@&6cqgn1ZkA>BEtl{X}0aa~lS&U4W6jFbwzO zmlR`>xrvdj)ArQ>$si~Czic$U6{Ev;PM#*&4i3$ZjH<1PrE9HHE2x!bu1())=z?|T z=4V0`55%SHL7^ijB{1Zq6(!hRQ0thzWw^~=m~ipG^@?Qo4|d*r9K$V94pElgY$-a? z_Y*@lDMeOTRsxMzb}f)TNf9A;vP3R@vWU!ywZZb!vf+UgZmpm|xb&JFNt~J&y#ge4 zW3CNC+`1VlFo%XylIQ=@ELBNj{=$v!r;ir8C7<3(ijaMBmKO?3F<*K-L@5ri$FL{T zqz%^#c}VT=!jI?{B75gO%|D<~1+R>=BebiM=oe&`ykY$Y<8vcDAv!d01+%s#b3u6Z z@EpVvqxcj-l#UdHP;n*DhW~=yAk{)`+5#z|$qeJV`0L>nCU{ZwQ%holR)sYAQHGK0 zX{%x>1+(d6gV2u24FAERB*%he@uRT)cMlMFATo^0;74)$&#U@srN>le?-Y_Fxaep{ z9I0Km%D~-Gxc}(16tM~=f`tsB`G$q95@~t;=L8)S8@17h+`bIa=Ao!-8vMazud=^>dUd;=`+}`XHNLN<$L3;_b?r!Hz2|k-= zyYYAOSZ)96KJt4uX^Uyxq!^jIMyhm5ZTPiU6N}~!J588R9mZf*J<&FSdCJVgK|d!b z?#%dzEem(78Xs_c`;@?Sy@}#6H91p`wia@vu=5M_#yNa`$@?bQ{Ql$ksOtJd*Z8xV zd9pm{??*mkDN|au{|KUICn|e;;Vvij#$~NePB{V=XfK4JdGR6jHvem3>1*ws7tz%sE z!FJ&tNYj$yz(9$8c#4-F5I(kTDO_w%wzLk1anr=9Ix(T0TU~TAhfrMv z2g5!f`4{A*y0I=4+DlkO2)CMn+V9?reFR| zbe2|~F$q>jl~v$6CY-K`OB z?&O0hyUvgQ++3xZ44z{ zsW0kojO;$5$6}VpNQ_%I*%NWVw_Uq@i@n5j;I{fKKIqMOeTLkx$K`Rk(N_E&SA-iY zjHLt`%gi;6#&ra}KRgoVo{Vx9)ZgjXf^k4IqM=Sg+vSl zOzuGmpL04d=-h8RcboY2TW-mk*oeN*e96Mi_CEaz;@LbE1U0#*2xnM&JN&Cx&oEAi z{W3N!qYn={(_mZyum402vRgoTctyEDB&~3aal;5 za70O2YYm?$o;gW8B}5{P_Yq_3&k{aSfpC(wC`dH^QibEKA7LEnK<_bRIe)6YIfWon zK1UMqpyKQV06k?Y$yVR)5x_wlGL|j!YV&8T#`XjDHi zm>*WYKCPb@*8>G&sX9na0}9 zf2^8o3_GZ4th#B8+O6FpK})`!=Gsst+*q?OhYj1OP>-2CRZZNKBuU-0Y>kl0@y|hV z;Bso?tYNI=`Vj&h4jd8VbfFfgm5`&9GkuL^sn| z_gG23lVl2w|N)uFWWhj8HjPHVd64W6G(YLpPx7g1UWE48~LXltTd8u#`S-v!A_1h1A!(q@5 zeYQ~&P!(dC%_{l?d3T$t_k>6B@IkCk6R?c97=-tlS@fbfHd*VymNnIfQkbBxrUCE` zO+VP4B1Gp2pa+o70>CkfWEhcGTWH=AbXSSciM>}#DE?z{mx8v zfv+ZE*GRNGnG(W$iI7&5p6T6-3ZzMVw>+H%d@Obud&M4D+$#l9cCqy)+PBm=&IN(r z$`8^Sh$uvMVMzKX$ry%13W&ohQLm%|gaD~y4ZR?!0+mIJt+AGOtOQW~R2)@}kc(=- z-ZZc4HVr7K&>o*xuN_UVTG8ka|0?Lx(vLhBOCasfL?nDQv2Hb%gnQjSgl`+wSk){B zZ42n+*n*dxAKQ|rcl!4{V3qz8_XC%BX+#nRz@4>>0nTS)PoWyp&9B$${(= z1BE}AR}hosF?2?p-U9}7;y-BM4Q@k7pg8>!F;M5gy9nSVS_j$Z#kqN*TMVI6VdiKA z-}DDrb>2Bsv0P)AXgPla^TUDuMcv3Bu|6~h`%fetVNzn9X%YasN%22)c4U8`8UBI| zY$}!T>UEREW|4y+3xknD$9gj9(%cQ!;sBtap%Q)MlI~M^OZp!V<3-c77$3KKzOGfjEru-7JY0Q2+)YS3=AP1@VG`R3ai%jN1># zT+e8WgHiS@AZWeGuyl%@lq+7LMJ;)evzfcuu(3Yj_RxXR61a22CIX>3cBw)H*b$= zi^HWCl*$^;J2pMbk+w^JNEtl;ow$*g$IBbi7c&QAo5L%ZgUj#p&-EiAq*RcMOx}XR zg8f>&z0*$To~f+*h*^x>g+49%Y00#?yDhu7W6a=wc<6j%C#W{W}EouCStXvo9*Qs zgyLGiGR^^c%s)>pFKh<-Dt73||CpAz`y#aJ3A-D6%=ck*41g?^=Caop?ZvqN2;FRT zdwBUmug48#L&&%F(`PBgnzhp!5m3`bNXY;wd_Sds?_x-{sq2Ziee3F^zX#|Wl$Y|9 zmTJ62GB=2tEcl;vr4600HTpW!>Y`pzlRhaI>XDyS#YzL6)mQ3(+7r_iL$OJeeWv;g zl>o~JXG4nFuu-}f=)(mK+QGGxZY)s%NEFlyFV7C5r^n_&&`v5FmM6pSPvJ^#Ww_Y} z7QHw;dJEp9!0W@TEXPDc+gb&juHM;h*eyDHPCHEW=x(7mUw8V1HZ|KWqXgEhOxxE) zH9P`~$l$s2tC7@$q>&d1Ug1J-aXjVv6}{L!atq!h^xh&=FWsHuuebxlV8gWF2vv#L zr5`{o2iG?3c~ey%8}mP|J^#uRvkJ{R@&Ta7jfNg^vJ51IU`Y|CMIA!zr3nNb;h zz?9GREX(`i@sT&>`J0O-b*`c#oY+GMG6C>fkOcUtKm@t^;}bse-4w0Jp2~wfs`P5f zo1hl`ZSv^{S4d>VK{wcFeVv$Kr%B}RvA>DrZ|Poi2Bol{EUiX2 zcE6S{Irc5nDMrlAt$~bHLx6u^)>i8pfJe-Mopl*16ZEEl2j>^tRrF02>jQSU1M~(G ztD}zye70`Lv(_l;UCA28@XuCO@fPi^H*tm@o>*9afw2D$%->>?9YxVL>U?Fp*GFP9 z`;*$l>e+B@4XMk=R%s^t^wTG1@R}HTJUqllO3apb?hi3`r&5r9f0ff;f2Cww43JX_H&-=qMM*{W!}{r7 z5V0DP3*6gI%z|Z z`xx!25?_~fZF6rDxJgP=xL+nK)>;~cj6qU?lV;rP-tipXIB&zP2dJ{T^q!uG*g2Ui zRIAM)RvT&?u+z;3ea$SE8y~sPaoB6m^~@wD4)~s0?~ukn_2=-la4jRY7og93%q1Fa zsRSFCCyg!kCnc--Ri&mDgYuoaWsm`l=ByGh^^3b9j`ecz8g5e`MK?*^8qOV-X&91U z&Ucn~4%G3YWup0spPWBO17>G^*!WY+`xNISvOyoh^!;7MkS)cKBr+~erTt&sHbpJY z)`^EUSDU0`JL_$7@jT{|nG>q~C~stjb=EMCj`G5 zX9I$mkya1NB^yI-fQ~6o>wRhv7g0X2(Lks@ODwW*d^zRc7j+lAfx4-(7DzLkC^DD_ zeY(FtGMSa+1lDYQ3cELep(j-$_B96jw-Z$ZhpM)g$mVG#$wTtEvCG{34SjtZxq{ELQL}n=0`I{ec8~-o)5!vOIqO%%KY#(5|?NeQEX^7rx z#K%ox2v!=uY=~8f+mnz)0Dh#^6uddXr|{l6!Jly5dElj>a$3l%Z*-#Fju_fH70QBp znOAX@Bb|MwBJow9UP5R5JaBP~;V$oECvna(T2M)(VF>mwq&ScM)>da;*3M(Vz$G1D z0%VtmcM02_1lvFuHN{wh+)2VTXUel(8z+jot|_Hu-4keM!`hmx?3VbCGf%n+qV8Cy ze>$^v2|9|+&GljU-S4@XMH;OCA*iCCdUIjDP^+~#!+~S&6F1TPxK|TW^ud-+DP3CH zrbUypq*l1dW}+nbu6$kTEsZ-yqd7_gcalQk>G{BPW8-B*@qH_*8d$T(VA-89;ZorF zf&>;=>p!X59$7E}ua$gVmf>;Jq2>-Ja>MtCV<$CTWt?2ScZwmnVkm3Bct6BNVJ}-Y zU#$%m-xF`rTCvig3gM{HG*97YY*?X!=xOoS%R=FGne2VJ=(bAbu}Wr-Lt@TF`4^=u zv=fZn-SF4Tq~z90vMzc2FtwSOG7et?7I8(+N_=xtyL)TRQOUo;(#d~PWnR6qRK06b zWWSzTZb$pLguO4uQ*A(=Qmp)4f{^t5cH)z8JcRUR0wor`V2h*Pw=zn-`9_%CFx+BN z@X1S0ZB6M^!%G-qV%%0rSsW$i3BMtLiG#HPXf9fnrzo(sGklV(vMq$+1+QJi)RjDJ z0gs+Tts#Md1K~q6hhNm|MCY||GmD-eIltfFv!{mfU|m#pGA$mw)rZDpMCxgkT~Q~L397v z!H?1xWtv0r6R|~8bsw~_zmP(waDQyr`Y)wT!d$^QvFa{yyqMCTN?W8c37M`*7*?%5 zBWod`?zjhRzdO4d(NWafmySEvB^a57fU@PTEEh2BZHj+(a_V@8I5)mYA@g*#^MH)5 zIFC{uISbd$YIMi01%oIxS%eXtz{-KxSKkz|KcI&-zMo7<`XU|oiui~t_FlLc0g&?X zfl3;f6}MD0`egkrmygLtUnfliu=8w}xdP3U6m}2wfYM_OxuRVE`5jeVO#(5`G@ZgX4?+WRq5 zQYk6FOBux-8e^5`$AZ5CnWrF48h{?+8l_xkY-OF#ygE8?HULKRKz_Zhjno9@4e`wu zo=eHo)n-Lx7VM(n=E=J`W!k=33$P(j=CXWJh4t58%P;C{<5`+V(~~Y}6>Fd0gC0mX zpw?ivtKDnelKt!DJ2P%HCtIBxZmgCsmM+A46_+(ZQ} zTm&zR?+O#AENO}Tq?7z%Q;Ue5<_E4x%Oa#_EMzKf^^V#8Dq^Se6%viv$Jjl6|QcvJ9Z7@``2>tOSER4$aHVIt{N zADEJ_mh@|a>3MQ=AZF?k=6=Z=h!g4cZ~w?XK^l2(DwK8Ep^~nQV5Tzj(9zSAN;HEH zP8zzGUZ%R+rZu=Itxili@~_4L?z|1lrQ-LbofiX0#3K z#qJh8WxZy+f50Vfj!(Bjx2c92Qi>|r+A-#q&H$IY17E_28*Iv6+)>aTd-Ajmq1l&< zBmkuh3}ys9o9PiokwuE>2^$#CBIo4T(9%7D%^WEy2H2hEVixec{pUkUz|D$rmLbtj zd_v%;c_GkD%w-W%9w7JWeJ2Ad-C`Nw@ZD*Z?O6~~a15`Frn)lQQ0KZVMobLBX_%@1 z@D&bY;_{Gfw<7N&NLk-afb%9(^)chc|CkiPj};+;hTh*C;E5mW0Ll3i`w+z5l%}kC zRbV}D#wQ#(?~H>A$`fAXU?c-mtH38yMy8b3qN*5>OCq|!5&P3m(h>idxFDM=dX10@ zQDFr`K2O?^4GqXvh1>k=4`#>0H_bcL;1$vvzq4vXca*dX4-77ua`GQv!2G!~ z0HDl_p?$Z%GAuCn9Ry@&bBV_+dR4qVK>9_W6t^6cp|=o(f~IWQo%_pS;mXaPlt0ZZ z2dA(mXcmwF_OxjR&{K*fo2T>n(Jg373S|gta^KBS6SdBOH1`C^!UIf)zi%!nmG4fe zCMuxLnk?M++XD~^3nx>psy4&~H^K8eh=;`LcK^Btn+JtOSsE9&%Nc>j(`TEDgY`+T z{aV&zdh+=gd&Rmc`6_+p75(y>rY#a9_O{LKoK(X#O zDuhy|9HK{NMrBeabU1z(8<4KtTpU$l2J}j=PxkJ-<6lH=S=OO2s@`muY!#|06HRSB zV6VCP4SbsK-jo-yqt>^Sc7Lm1@i)HeMKp?MxW|Tbrd4a*cLe`jabC(?Bz)ciDaTgI zq&>BS^bg3SLN^Y>RLeLo!}nTFe7Os4D|N&Zo>&2Ix!ycIr3*7T1i_wdZs_0%Vxpuu zZo3;8F)D|Gr)3FOyaWZI(Ms<4o^R}TD|hSA zshY!k3WT=!iVjBv?svb8q7Bw|m^mv1_8;S%+`Q>|CoHXy({@zvkQc!!4*FMh*r#}5 zX*shs_+!RV)cN`M7$F0vf=8oyJFZWq5(FT4|g>l6{#_W}00cqFrWV*#dexEu|^dg%Ord27-I2+Hrqa zsoW)BJ9v?&8Is6A6Nroj=~5}_fUb4}=9r8HMN%v22rim*qtqPdlKt7!fMM4r-ZE$1 zhFz!?ZwOmVJ79)V{A5!6qS1l4@VZ%#ZQ>yU0SWx>+)aEg!xj@h+5ct#xfw-9depk7 zpRFOhSbF_TH3+r)2uS!tEEt^FImX3(4H=wT7AG6K0yZ7Jw?6=o+`H;`2~dwZN(pZd zOv?u03r*-la6EY98~fM3(18E=RksT>4U8}=*0hA6yVJ|sG!5Lfc(|)dG8)pINA~nq zg2r5td?`OozN?sj@yxLrya{Yb2caBvNC#J8_~P)!pd4jLCvNleqFBf)NxVmtC;>AC zXW)uqY&nnIZ^V$^HM;lso}#o-)QU$(g6edO>vZL>(kD;_QJ49obE5CY#JO|HePH&@a|_g~)^L)!#j zS45ynv4g5Qb$s|_k-~xq^tj{ou7UlJ03~vLo`& z(e(u5)DnlYnJ>$)Sq3j$F7G(ddGpaBtid>ZkO!siI|1pqlI;dy@-Y^3oagxN4DFT>oCq$djqVqaU**O z(I7++I%@}L0#u|uc`L2?O6q_2 z4h!5D;xYJn4i3SK86;=_TB6xMG7&RLVv8V*GSx}tv66+Q&Mz|}laPg{vgswu0}AgR zF4|=_ih}>a-HH6y%}=)I?&^T_wF0XCKio|KSRdwNqHL5x5n_BP_{T$TQN-;aDeb(-X9p?F3Ysk&t44jfb(CX8@|yh0D`PC z@+}^aSX!N(K{|;_1QiMzCWWWRprI{|X*`W;+M?{WM4g4yX&XwgK>NBY{r!+2fOH@_ z)#8HNteCg;t*Ye%%Pm&Ywx@Pmf|WPyui{M26~xgl^VOi}w{OP+Un#AzlUqgrvay9jdiiAEgR4(9JU5(O+b-93= z3l(Yl1PMite!>h?6bByYo)tJ_Mou#sOX~eFI1Vnm;T*TipEs2oK$`ADSvi1xif?*D z8ep#gAdXWWLMNS378z|l_yVeAZna{nnY2?=gLwP2wVIyxRq# zvrWL1L4(!3?Y{)3pLpRHtR+;IjxvZBCU@`d6(JIU3>NcTiQp?d55?Yjh9q+kmKur_ zXs90W-oHOT@hNBt{|pz&?%c_xR-O>gzNu3@x3D&n@SHk(fX7@iC_2=>pJTSNu@}m( zbC?(Er=u$$b1CH@=BFdCBsxU-87sK|uTn>|WzOdoH zZLESGcxDcz>Ah;4-hL$tIB{vt+PsYk|5fMjE@J$wv}yR;<=bjeX;*?o0XW+od``b~ zI&L6cxrD4>2yufrh7_!t=jU1QV=sy4y*`CH;cvu2;}Sr*>fDBDy5yktq^Zo6qBLW) zv~Jm?rM)%h9M+5@&=KImk}op%haQt=PIghu|#dv83AvOfS%?$WL7cW?QQ5s8l$}WcFQBZrOm}@agG-{&E3SLln_IF;>x2uvDnP*Q zC(M6_0RKS@>pV17UickHu6{S!|H<*>|9f)S>HijT*kbtMqJO+(uF(IeXr}&O70rB| z|3lG?_P;5bX}Iz~f=~~EI>S@y1BCwJT_3eM@X9qWs5=#Th=*VDi19Ho2m zHtH^mO?WEfB1(wS=wa|Hi0LtTtQ>|2YYe zHH3Qt;yN$PG)(--6SGZ&FY`nhYQv(gseY}nm>8If`W|-oU0{4&zG&d=V-&SpJ`w6I zp!|zKASYW2@Iy%wuo@dqPvoWT+1xyt^Ji#7e>rtc>nJME1&sf78{`%+dH0hC&2P#c zk)T#pdLP~OWeMG{%^oq|LbK<8==1KL53nIb004J>008O#g+BkkVToy2SYnEJ=WHDr zh*9Atlk()J78;Hxr7v0C**lylY7hc6hJ z@XtOIQtqHQ)@&1t9*H$NQf;{EU&XW3(5aoqXt7s@{&OsL*Hm4*9q}N%E_5IkAsTSy z5r%G6<{37m?@OtE-oFwQkpfl?rOXHi!_C~`0k$1J`fd^AuV2G-i<3|&i|~Jz*b}wI zats%wP);cPTBMEJ;7%!bv`5^|F#95A!cv(Go4TU4nTFLnp@!8&W1_ultRh9oCRmkb zXpJjA;ayk8OIXm_QnutsoWo)IhLH=NPJKZAgUF~mGa^JwI9RQSM~H|#2r8HXGen4V z+!%e_1g%-J7Xc$d5N7!72ox|yB5xFWbbUf0u_HLpka4Z4sqlrSWyRrBpLl+waB?9B zyrH^$p@w1bL+mDfqVFk4sb|De$34~e~-w$Ts9Quv8$I|Uh3Zh{idMmhN)~)xSBd5*BuJ$G zd}fR7wyKAP;O0@buG5w2g4E39yK*GEe0aNZ`Ro0Arj+>Mp-YjUs$UFnGC3KC2A9tc zlr-Yl2XUu?u0|TU4Ft1ISK>A@ttB2DEX>VX>@>Fp@~&7A@EhF2Ljz$I8%S}lsqpA0 z1RFKt8*Tb6@GTOE7&!VO`{m*x1D=^F{d;A_=idu*k(;4{2nL~_FVL+L9N*h#GmYbq zC_&rt1s(N{E1!8mDn{^9zVzN}M<--hFn# zT!m?@6q8AF{&kB?#tA5Nn-X>l-4kTp7N~rEzP=VBv!Nb*dTJ)(-;uJ2lrnQFK<^f3 z5Ck=~LP#R$Ng|6$ee}C;(&GpgcQQR0Fq%RoL$wgh$Np*n^=LRhlTCArzCg%$7P*7V z=p)!dfpc#{6Z!Aq1gUGF>Z1RNdS%WOU?5hySB0eK3RM|P07iw@X*2w*`RlczbZ;ez zU7+apPR2+)3y&XsE6a+DFavc_YkPE{7d^W!I9LYhB+P~4K(k@+G+NclE^{%pXD)j6 za~6BKUf9a~<=@rx^E?-{499WZ_`S7oTefh299z#u*uftQdzMa?1qeO$S%PjsET&1D zuf0JdClN_HEJ~u5+@Er;!|exdCCXw>Z=YHVK413zfD)aS&k-*g)ULmcmxHR zU0*ErD3j-t08>)$?OonbnrZgXXtbW*C`yFV52A5*k*`eRCmDAaAVwj9@;L_S?E&%= z+Sp8(Om{9}rCQ!m*1HjH=;Ya5iEcuLvplJ8tufJrYon?by1^OyIE?Zs2LIc z@`(NtjHZW2>xCmv zWN@PuQ5$ipR*YVZNUFsuso^5ii}aO@TBkSs134k&Sj@~(qsIJis~{!f(QI{ZzM9W| z39BB5Qd@YHQrfMlVB)Mf)(~iZ7HR}>sCn|OGk>o`fO^jjFsfc%nEoZ*V!OXy$O~?{ z1n=EIJS}=s_Q~*T>-pdxFjwkfbMHSXHR-_tG*~KB1ARy#!Jtawo#f?{M$+(efML+> za}Z0~qrovTWy$`M#CP~PvSkJ{*ed8)UX<~(5wrwyv0fH5z_5dAS$q_KKd>bD|JigX z6y?*&F+D&uCY3kZOcmUXvX(!RHuEZ(Cz>c(yy8Ct&yE;<)(JEP3VDZLD3;OgRHYA~ zwpDP6;>E}NPk;>7O zg%v!4RX)>|LpwBp^f5w!5z_aaWT~8CYBa)Jr@yT7d-H;V!F|_V`Pa5rJ8z3dAM(&h zAYwB)k7=}CGRrOcasOmtQsp-Du9orKB+9&&{y?Q+PsO#CjMG-6Vs@5I)3?A@m%9qe$N=Qxj(qdMJ8KWI=a8^VO$I+dZdBm z;MS0H>ZlW2%_ijXO0hGG?WVfYWySv~8phpaY#S;31z1qJ{apt@GGF~u-Dhj0hVP!? zD+!~M&UL0?FKVC$S6XH~n|Wvfl*{Urpl3LNSX(DYU59$OjA6X&A+{$svmKlSaZLfj zl{}8_)F0b%V(mHb-Okme z;LJjCZG37y&0t|4$ui+;G+aaLJ^Ffc!~bPhhq{`{)=nnDGWIAOv<-e1ReCcO?k1z) zT?IEMR551N0|);Gogk!Jzg7WbV99PCdNj(`Oh8irX!;31MUXu$|kW9AU z)NItmmttmcCc2<3fcca=te}xG5ooOjNtIOFnXE*VVp~1v%P;)+G869##VF+kW3`$} zsIqD=7+*Q3t<&XcK6IpzbfojsCk`|A_k|L_!8yqDLb*TWq{I0m)_J-#Lvw?-Pya7F z?Ha836s;Z@zQaX?G8yU!O73igfjdC+y!Kz$umxB{#g=C+eFyWm3Tmp>{(IzG6uv5o zV&3DTS}?Gip+1x@w57U4#Iy(^ey*6dN7aGXLVc`yUb)d$)zKDRfA$o6^kia0ke~R! z=JRk5lSr$ON>r$JUe0C76R%qo`$9vsI6*1!Y3e!y&Wrb)VfsRzrkp_cLI-y!^k^^r z(F3MQUEYKvmXq|Db!y9LPtMaGD3ZABH>rsMCA-xN^(=+10`v~TdegkelG$ZBT)z%r z+mu&9-PCE<%BR+%E+-e7D<67W#8z9@)AUnm&o3g@Ia=VOlQ=h3zpcVm(b z{fq*|=qYWe%S}{CZ;KxZEeTLGF)zfE)IeIBPH#DAcE@yu6n{olIA4;>rc-B#P>8%OYMvn@t0f_5Ug9QQ>? z`;9_L|1@;;JY(;cX8&COkug3aFQ{v>IYNa*aX`!?H}IcPoRHR^i;grQ zKva1kY(@*4NyOcCFyZuU60=S5ocp6E1KZs?^F3D<07Fk?^D~BtdxL}3` z_9SE!*{#y$FGXC&fQ?wEdqZlPxwFd*jj#tin0*ydSSi8l%2TIykgHx{YE#`;SK}}g zcZ({q_o`ww{3i}Kml8eeBV0=;VI+>wN`$SbS|7Udh!QL`C7`sw&yWE|mELL~x|)J+ zWk*sxM(7vsNBG8YPjwFJw&gc-h>PfPEXyM|2HOZBvSG9IOXrA3s#6?Nd&ukJ8IF*5 z)aR^oG53%v+7AW~;dnPdniG}W+^9?3Rid1tmRgALwSAKYqHo`` z?e5dIZJVcU+qP}n-KTBawr$(C?dh9I-u&iHGP&>XmsF)vyO652D?4ki^{MYFY!!yG za$}Uv>xqAaaIz}jolid`sCy!KYy2UY<24sS?;D8( z_+#G)Ba{~zKnqNM8lO@(NahqimXGefQ=GBDu@h7IJL+Gkcx^=@Lv_U#@D3Oyx2_y^ zw5gh}1G2F7{Mf>MJ>>f(Iuo^rnjR-fR1X+2$!_o~GPl>5jg@-rHD3FAQSfUc9%Q76y4%aAL^kYFFv5-eGjmSJK)Wf_e02bu`B(;T>OP<3#k zsFFE>o&@4`elomFxVf0Z&-a~2{2PjB_vcL`fz(ziCa=WTXN0DVqa#+$)KsCxehOvR zOd(a>Zs6s)eP=ke@OO$N1xW`E4LM$hU}C59L@y{#VAzyTwR@d6B33zNHJjHX%P)o2 zi&O{%43tYi{=j`Ky#f^5>)uiY$Ot-R9|;?rCXy0%iC zxFvng2QU5u`Cf;s030N|$z+F+UX0AxWlsSLB41IgLUkh@@3Q=+!F2U~HiVS2c7qqz?HoqHDlT#c2&AXeI zK6Iy4$M93&1v@CN>ibh*j#ME@nvL@5GWJ$l5!VSDHO%;+>mRSfdWG!)pE_OSxyKVws|vO%2bLon(wJ=#dRL7BH}$y@MkOeaRJO7^08T<`xiu=ZS?R?~-ge!9yYgu&$Gs^G<=*sph_f~Gt> z_tpwjCIh_o!tcygalQ|d#r%YEkXj`FOoeEI0wU{Ii&xev#3{E%C_g(jjVFhy{D6{W zP@c>8-e$%P8et+uML_NVqm1;Smz6uam8aa#q#k{UOnm+TCiVZ}hrN^yTr6Hzk&K)h z^&NuoRZ?80i)67l)vmgyAAiM5-HU~_{(&T=koty64D>uBoJ;%>v$OpxIE~BSgRq-j z3~$}Ea-EPS5&-V;#Gh3!t_uF#VfI2#q2DAN2z84hpbug38`~ z7J5P(*DB=b8n<@F%=e`iRqgs-=0$2NpF{+{6w+eh5$UDr#k7c_MlM$3U%MQUco+~! z0`QZ%+#jrUqgwU3V5T|u2IFO~Eb+UvrWWqmMfFGB$;Zv}kYb$no#geb?9Q}KX9u6d zzy`dR%{9m_y8gYCT#80Brj|JPMXKJ8QS{)#?D#%XP^$pFg%!n+*i3thTu3zij6Bl} zquEkF5eUo&(v`{hG$M_M@Bfx)&i@i9S%d`u;2Z(~;QOBh%$#g3jcputZS5TC)RmO} zYe&{j(b&Po+|ZcT&izXJSLK@%(dVTGUjexptvrsjiyG>XKzef_)A_0w&FN3TUpXtv zxiZ2xIfabN8r_IOVuk$bZf2R((BDz(GNVQnJZzx9vEEIII*D|SyieLoz%IO!+L!0;Lcu6w;0|Iqnv-q z`_T6h4Uk$vqb>1AR98d&Xe`m$dZ?+{DO{@fV)N{RjQ-l48!-!J0i=A-wE>s;jxO zru!MZ@wC*Afu75qm&Nuu8dQkRt*EuLLzL!P!^;w9Bgdj96ISrA{hMp zg^yk7bvWPn=26`|>j%|T)K8N9KfVM@QmqlflFC~887>jli7k`Ip54{Mf?6AY3eTID zsznCOaw>zuGGyxn`HL;pF1MxTDns z?yuo|xia2sjXpKGP?XJ7C-rHG_?FV{-VBL?t?X|3Z2MCKI1Ivl8! zZGwTvlAFTM%=r><`B6K+!q{hCIZHO4M!1LgxNu@yNM--|sY@v%kaEHrnnlzXQ7R-` zo=t$;Z+HJgC~P)z_Z7typRtJfYBF3iDs4jO#dcwM$jX4-P2qvnmV}A4lfsTiBh(1h zK#73#!d~z~C2Fus>v^w0>^Vn6_ITs=Qs&eo26h4j!b3=fFn6fwP-EKW#%sCDi{Bl1 z+PmLhi2iKhNWm^uUTQrjv3n@=n9%1t1D4Vdc^4)K@%iExEPiXBu0*KiB<1`4r)?-T({zFer82qA0m z8TO48fCjSemSiA&4K8mX<1|A}E+s!A4JutBIuhkcbzOc+XMM0TF_N$zNICh2O*>zg zA9o{ZXWE5k7jY=*db$fE;Q3YF>zpPJC!F9s< z%>i}cdy@Fc2uWjaA)H#q3aN8Rn6A9}sqLLiTSdU~O#O`U8AsHH=4of``A0oz6&3Ye zyMdOMX=PR_6;V%S^D^3JM~rPKqIG|(<-(bjs<6vT&`y+s1O6eZlRHOyawbJ*K+M}c znW+@orPK6=tV3-H^9CIS3m+-yOW|P*WsRT9!xY1BLiitPgfVI9h|H7HvA0XY54bo! z{}A&V|Acm(#|LV@JF%waOkoPT+Iz(jO?e^XaAINXfHl+{IKn!ez$oVfhDrFX_@H$b z4_`%EqZs2ZlHQvoHz3NVgSye-Rh0vIYKXvGe0pOt{ga%%%d@g-D?@nNO?StdWAm&Q zK!!CT58|+`;@_AsDk*O&9M=Tdb=fra!)@qOhuF8T9^BQY<1r+tTw=+dTX|hnKvEEo zp1^LpG|-V`RG!1R{?ZIA*2<5S0ZSR^QJ^DG4B`SQEx}7TBN7Ly2bynvML(slK5cl2 zct1KZ&vdKR?0w`aTLPCp-VUxFMxma2O*&P8eZ zgEv^bra!Cnrjt?w{qF(+nJM&g`5_XdgP5n+cu!E}4Cp1Ul-+f20E#wWaBxGhR-C;R zeF`jL?rauaiu3>y4jheE$GD>y7+(OlMh@XM#l%8mK2!@^50?NIM43O=Nn15+*i!%Y zIo=iew@R6MaB2VpiUH-|=Tz8RhdraL#|r2eWx9uS5H*RkW^aL=&fi`UP(LUD6o6|u zfH&Csop2w-e;ugL%J;Ju__`b|YsfEg;}9tLlz+X({lc;(7*k7%E%Ws`JUemGlkMWB z>gca@qnub?u>R{5CBIgtzfxu6+WUJ{x6bsP*iyYIZ>QAt!^oL2(|{x=#|eGR1Y1<< zm^a6^+~hD+2Zkag{C<}i@F(3?YL2D?L#Bp9g3@Wg()|y$Z;pkV81toX$d^o3Uikp- zq#QoEiQair4Rn>x&(X}&yd~B-LRDV)qUony)E}BYe-n@?Jn9p0XDQhogltgpI0##g zL}Ny)$_4WpMS74}5 zf30L){q0YWuoyZirRMCYq7MEU3Yli4ib`9Sh>>Ub_VS{Div6~!O~h+Ue`YLyVb#?l zb#Y*LC(>oKhuqErWztK$esVUsv_V(1{oYz42YM%))8`q{>b1$q6m@%i%7K4F=e}EP z?5r5L5DO#{(3UJbZX8qG+#ii|1S>Q(&^|3ZW?v8E_*d4k(#&3M7#V38K87erOrKO9 zMP((v{Z(54Ul;=Bf^do7UQ|n%TQ7C-4@MEu{{6KaG-QK7QUSXyWTFsPK|ZT5NurMta{c zz}&0m6x}>pQmtKl>Xe!HqzTha)9pB#BXPBU+G@b7IV3dV30gDZEs|vLuGqVH$pMGA zd`VxU&Dhrbh*jG-zTI5ays5AP9iGKbYt<(xMa$CqF>1h3QCPieGqXQW$wAv&x1yrZW+FzS8REh@A+kpLBV7fh>sGJ{tyo#}Y!e7W0Fv6Pl7(8| zQfhQg?PzOaI}2=E!bcNR(l%LVI?fH_Ztwe zNz5!2gGkREqL6K&;dppn2P7oV5cK9t=ZCWZu(#f`=8G4B7EJAvLL*Ex9`%m`nkN%( zy?)OO73`0=OH}8sj2mju*oh0HSomvrLo15S0^u?XCW8WNTR+=be>o`;8ZikXzxVeD zm$PG2fLmhuzE6*$vVfSq%->jYftT)HDm$_6=WP=ltx>@ChqV;)gTkiw!hKDRJ91S9 z$ijLJ(Z=OE`iliWp~!ob81rD)Hk0{Pe9IG@s%KtR8WDB%qL;>baCr{EiqtKDHS)qH z-dPPiN9o{TmDkm?mGx-jLiw#PwLOdmHjeHC>3&9~n@UFcI~5B;if!hifWwVK?7$@V z(`cSzjp;GD&j|)1iw%dH7Meen)v+h04fR)LgIyr&HhWt;o;FfzUuRo!V@@_;#lMO- zC@QZI7LP6ARsQW<%_!O@B{GI4&KFsg9e*^p$DGdD3XEG8d73KjGF({9ZWAsom3ZB? z8!wBVUbS#N^TFVRmq4{j9~yPnfVzeowuIM7l^vmak|;8fbyZKs$wniS#T#{GHi*KT zE3Gt@mmE$=XkE$i6Y1Pm>!8;DmAIxpNx>mhTO1=Y$M&EuxQ}5*jGdg^Dj)dKSWDGk z=SLSk)U=Zj*hyq?P-^B^x=R@;j_|O0Itk(wxS3xdI7j8Xzkd&suV{?mARnc7RiKht zqmo6Kp|p2_ewUkr(NQr5iSWz| z7=L@_yix7*8~@XsA#Q*uX028pevD!&uIxxKqVTl0{3r-+3tVqd@=PX_3EO|6c9i!_ zzHrgLQZMJ3FwHbChm~y7FPpzf$?|dRNP3&FsBkFIOnTFGR2ivX!W2aAF|~wg;wq0Z zt}tA)=##JA9Tnzos;iPQsz6$^sB0(~l7aJm&)O_Hae~u)mOv9WCFeZgpk_hNs^u9fPsBTd);Dx1anyZ(>J{!b*8y){MNiGRT;l3y_D ze?%((hvohksq7rbFA2;5AAIwM0^HSzpXY|mO?_Q1yp_Z%EL2!{<^+35N|2UZ-Lvhq zmWX;zGWl_Da?C))8ek>bEE1@LXrs5~|9a;kd8?;cg7rwBeZN7GBeP20{s1*D@i%@3 zgnDAdpw4f`0&=a>Z-|k1p96`n>~m z_bV@mJWW#`fB38cEO=Y=fJ%G0*zNE9v8H_Th`@dyi%O;w%&J5AtlJ@{6B>16EZXhR z^F|8)i)e!wi<7`@LyOh(0ZMkIC|G#98=hEHek;-o#@6%8;g z&_~ZI`ZQXge<`Go(j^Nd{YDFw`$Mh~K$`tUH1bGL1qPR0$qs?jD#j!i*v; z7d6-@0-qQ?O>Nx3Sve;gqkVZT-=ncz&{`?$4(g$co3 zhtBERqxsl%dhruZX6bV?jp^e$auN+84M9A>KwX8jFM8)=({<;>XWN^uJ0&}NcsEDz zwc0{_3hVHmh?+$kf)czGTY~z)h6k@?PRK zvV%3IL8xMf;#iYAc@!185|f=o2&BC-WaZZPA?S{<^(03+2^MyH%F*63YQuXeElu0Se^ZlHl@M zIEqWq<&NSK`uoJh7KoG<_To;st$R*`@rm8c2s0TOski;nt=t41<5?;+Sj%g+e~4;I z6s^Ati-HZsmghCNPFwH@GJdsSp5G`u^zO<9lUI)wIzI=A7Vn4hpID&lK zYA~@ZB2TJ_)|gVYbVup;Hqm$G;eD2Yv^rSXtC$<`!fDc}j2VXH4e}|BSZGxYp(fG7 zt(w`Su9{UtTeNDOSOv9P0d-Z;ea#YR8l-!E9)emQzcR=>8#g4b&CkG}qpd3|%(G|^MNMqFR}V2@U?N_5ozGnb z1%kldU3Q{)s^|i1S0iuny<$6~arYFS`nwjfzKa_x&kVVKIp~xOVu8Kh>^%tCn@ta0 zyg#Q9c&CaAyaA77SQzm9T~aN515Q{xAvN?{q_GHs4I@E$Lx#h%=XH9i`*r*MMM2aw zXdWM(-d0KuayvO{;tsgLaAuu+2`bp%I5$V0pJ}zrPFM^2Bx_f_T z$IJUlt9o*6Vq@`~9o4`*x>tzS1Vg=d`5S&h-hggj^oZ%W!hz6c-wFuFa*2Z~k3a&| zH{(&^O+rr!@Ft}5nty|o*Uez}$k<}s?yReofQAf20w~t}}kw zGjq@&b`TKU&D*QF8Tu=f8dK(uo7xu$b%qPAFl*m+E zSC3q6q#p&eO{Fy-pXuD1Qfml^!gf@iWmt6mm8>AjB5G#%paU@Wo|JJsV~pIEprA7; z>Yf6<9v5{$i5|(R#C@K=(SLtu%AFl%fj|Cc`qoI#47{n6YatM5lv?-(l;7|qsxH!1^3wEKsX(Eo_H)$&5K#)9*tsk=g2&Ev_ac`NE z9IX9`=hrrF4|7O})yJ*2+utM0yt`de`duuD$48}9u|Q~bFcF3gSoPnZWZ1-|rOWqm z(4#hr?v+LNoA{koYgYGn#L(|!Z5glKppjAla~NI$+g+k2dhx9YF?{pgg>p5EEn z>@Y-1lb#>dgV23}(xO9hsu|C`J`FaFf*@rb}5%{oyis z6|%QF$ZLRua1fWhojj(~I%th6;m;mMHE65vY8Lautom`K9AdiWilc@QRy?~3FX96tEFu+YOFonOGb zxMJ9hH#-~a5rS`BDd60B`|LEuB`G&AlK+6B8s$6rlW_%5|0$1FPu8uSK&k`bV}(+q zg$#e9Q7NGO6a9RgjAPIz9y)LCh7P=|x6{5#Im9qnnXD7$Zx%Dk z3Uh{v7d+P*mjP}P=3L22op#=$*B3K_;EX)bWz%8PW|K{Au-Jv&Jg2GtXq|s#sx%9ZH(Il(N8X?+Ju-QdmxJ7m}`*&To4l}htE5(0#R zeO-sxF?rb4mAz_{weIEN`*8Q1Y-r6Lh4CS$v|i!D+gU=5&*k(-r>&F5X${SX#ZfdUAuY zXFI}yDzI6iPsN?BL1>a%FwHo@^Q7%D^3{6yHoZ=`awjXR^4bD{0Soa zikHjuVPA)Olfkk|Vw3Yc=Zs0GmFF1i5_P>LLWhdp%5+1jb2W7Rg%JE-Y@mzO7UQi| znpKHo$K}p;&E3pb+_h!M=KHrxuf!`2yqOgw5uG52B^YJY zqC5NIqhD#cs!`tH)e?|r3PM*Uu&bneZ=!lzH|?wYyE>}sh=D$!_Wr~uUPO`DXHk)-3MgRW~LW7GodL$%~*9?KpWmG zI)v*Jaa~f!=i<{-#2haU9IozChKollp9t8Iw#I~Cr{n|6p>w#UzDe#zUJU}1>wfjD+(6wJ@Jz#U!>PS7Lzvjodq1*% zHg~Tkm%SOa2D;<$*=a}^1<0twax#Uu$ba4#MXalcMXK+((^M&eb^>aXV4CEo>xTH7 z-AtlKZT?<@`EUJAHBz6|Azi16>k`nTGAR3-X7pw{Kpc&qLY{Ug1yyh-nTk*!HB3iK zRM^GMK#Hj!B*%cl>MfF5P)!Yrwz%u9ZBK8Rme}IWJaCz?A zpLa#$dd@o-Re@BO1+?M5a*cR(5%1kKdHL^>MypBu8LVe$RWrQ(iU;0yBJ!MA(!>f4 zU3g{|UCuYf-AJoSX_8V@{V3M2LK~IIEwK-p`Uk^2kXs5%uRB?InD7Wjg*;N}w;~=G zgq9_Ds7eO*>z`D!qZtGvoP`w|mF;V!HLyWe2rJ+{OOn2@RoYr(@gut97Q}HpB~9)l zx=7uS?U5J8I?QeO0(ykO#(MX#yGSYIo{YOMGl(U;&V&zr|4|qGW-9+C4(K&) z@hzDrNk%@7o+5fawx8O?cIV{bBEtYyhd?0NQZ9aj3kmt2CI3mVKW{JD<*SQBo|P&G}{nx%h!VO`<(nLZrEIAC>Wx}*dY)Y1)! zH1x=aCBKJNjMYWN#c0Um;UJ2yNZ_6lT2t8X@T3U71F3&EL!DGKi4r&q$}W1GAhwL$ z)Ukh`{QQK$hHhER;sxR6PjCf^LdxwZ()Xem_SB~Aic{^%()T7A_U4%f)bTUH`4VvY z!o>8f7oH@C4m5eRkao7O0EW}-R-oHNi`v639!xJFLpj&EFf|IWh#DpCBsgZ`u-vQb z7Y;{djS*NYOb1t(s5t^2Gr?Qpp~fOP6CLVSPB8B8%iAFLebb~jkevIR`orFhNykAO zY+|-hj9jNYwmHb_tHP{NR+l?qKjZ~}3Nh;5`qMq)w_IEo;2O`2Ui(DitJ*b{W6NVg z@RY)=j<#T(%><#{>963|?mLp2O7C->bjjX2~lq%k=YFIZ+FQgPoA zDHSoPV7Za;6}%pT8FX{TBe|K<6%H(o1%!GkKmv>q1I+SD9w&<;r0V1aY(wVW%^~>I z9{Yfz@>}jJo)$63%vpxtP1EHbZ>hk(McG{wRJ@WP{(9a!fT9+;A-4|=3+iPLGyS;WnBa246&dGq z3sRqi1MtMD8P~i^Wkc18nd3*Ep)^!d$f$vaM?`-?HazK*uZ{-QAfueKLj0YOx- zLO;=rTk@uUe{6YfPHI>t{f*rW$9X^u=bU^jwwoDw>SPv-l=I}q<>>KXy@16RcM8Wy zTR^5|@wLhI6hr(*L~9jL3krA|RPK^-04yAJRF~Il(SR-WJr3LjoWpfmaSL1iIL}RD*e1as4q~ey{xgo(*{;%q z>V{3DbaQ;%H2w)P2ity_->~r(JL3onSermRm4H@!FUpkTS^_xlzE*;Cm?qm+48*&8 z3r&kXpbLRbO7B&_#J3Uta-nqB{Li|OT4TQD2zt&GCys7~I#!dwu+uzpm+J~m8KtCu z46kAG8hN*jycSg@Ak?#!O08Z)K6Hk0miCoBpZ-}M4)fm>B}$&68cpw?V-ujE2v_y^ ziYFLbW_n!{pbHe+1Dgqa+XlY_R2|v52}eRuLy;Ec)Bde5kMReJ?wzFR8I0sO!){7Z z6*f`S6ERWve?%YL7oPH*2iO;fZok@$D~iAy$P{A}@`g@w$}vtDq|*#f?ZJl$?_T9M zrB1_d%;ooM!3UNnNDGut_mEaeIaHE)YDyLn28if%Id2@tZ*QYWb-OOqJLhcNbD_eiF3nuRc5s5oo?O(tCW0lE_ zq9IF!J7(p$oE4Dr2)kyh!!0Jf(E}J*_+?cWqk{Nlnj(@aC>6o+&aJY%YR3b{e)p=I{pp zH7OI@!Pj9|q1KQAMa@!SLU0bke zXfwE*-OuuEHP!GxQ|^wgG4^KORP+1{g8(r(qtxH1>qV7e8NrA1>1uJf1K0-#yuXrt zDh};{)=3!sJwU4lcOpH@L3aECGL)D~>C>hKXSBaWY4Z--AhY`r15#%9?_5d%KkTs^ zhA*4R4T?w3Mr`8=+aR*@B&67!s`y+B;V~Ov*gMlq9bXF1G=;GfTm7JCr}p% z-q&tWy*UiOWEjLnS~b)I-`CkM=B)J!Tm|x{!C!(Z_6_!C!MUuk47kL^l;Hf#mHB2i zw!Q9lp6s|Nhda#JmYiC-=8(3OzNwK8yY{i(eo@7uU(BZY zxkIAVC=GQvDhzBchDSxYN4Y7r;}-^%e&!NOF&yh3(rYH94TIuzN0j%E0Km)_*yotX z?#~aR)i{naD}M^5BKx5(*uU4) zKlCNJ=UtZ$WOV#i2AF+(s{zI@rnW=*l^&yq-Nlj`g!_>uHHacR{;&mf%L*@z-vPVD z13bOc`Cg%)H7cAjq`#K&i_ZppeoFxh_a688f~=E0PpLG>iSFwkUVFylb1eB9z}-uG zI8B2#(C4GPl&$Xr*9&ZFk)4SNw_-BL0BwHjTiqqgRr+Kuvd>9#U(s^*W9aJOF!iN8 z4wmuy(lM4vKzQhP^7`uDT2iw0=)ra5AAamp1cPa6+r`Rpq%9rh=lH^vqee|mx}tAc zI7CR?FuxZV(&54UB%i!;x+mU@aiE5^NpXv@_#Qt$9v!|_Woe;GIS6js0uz={+FRLm z8OJWGubH@FExQM_Dan$hZdn*3lOy0$O%^!*#pAiAy7HXbwEb1gq-{#Zgjrf~j&j29 zL~(3HH}4}~BcmcV{Q3L`uSO4H_FQ6dVm}bU4u;aXWf*cCQP4L<^})zK?#TvsOC#$e z`mP{QuT!;u4lL?wK~b8C*Vch+o1|MNOf!UC@C;rcFT24PuZf$R&2L2JzP_ebbC%l*K}(hh-@HYI~-nszz$;6hD)5(UO8*KVh5~bpU-5&uH?~-Q8qSEsW(|I zzwL){9ztF6F0jwfQJ`lE`X9u>TCKp?6YMjxhMeob>pVeRW&-c{zWEsBk#ZN=_NS4? zM-!29&)03C(E$dl?pNL^7 zrVCn%dUHW60EVM!2f0Ma-2Kr*qeLGlp{i+qJEM^*8vsrho<4piJ6cI(=WJnY5UJ*zvhnm2ms z9Vh!doLgmg0?!GiwKH8y8g5HhaO$yib zM0+aUPTgq2pf@Y8GV`?Q3lsm~0rYk<)uka703pP+Jm<89G#>@+H4Jkm$ z%;q>Bc;%|sCsFD0ifia!w|T@-Ta54zSnuLRu~&G0jIE0*@Z$|Mt1zREmz$n?3^ zn=(0jNsAAz37V$J@55`=t(wFy%A!}Q4UXRdH5^_clilf7I?^SH z7x&PS)gjww6e}Dj&Y=y~)i)XpNpP?~-soGT=FnTrGVb2vs3WqR)3dDFK|Tx12U>vd zy@0YC2bYn;@hXjvaHJ;t#rD0dpyw=-Rl1at`c1Ia5nH%1u*~<9yv`FYp1xCC&XV`HSAwO*C0MYIYw%W<6RmnMdVI}_!?;IT1k4txqox`>p zcRHA0uRNYCC^kvkUY-VCImlbZ18%j&fYvHq$Cc=VT9vcSc}XTPBI&OEj`gSUh1vWu z)LZ?OZUq|Fs-qoz(mH(t_u$^{Mu=rgWZlKP)$f~GIZ0CbyMLPA^%K;xvf$}6Dv6^b z*nImUy!$|(x0s#Xou^az@f37fyK_ATz_*Ns@A)8sb&HFB3+G~S0Xt+I|i zh1dKeoiaDj5^v2BPIXBUVzn!Y>Yhpq}}8luIzcsq_2k#tEK5@90nd`6dp`8%25o=gu}M6<6+W_bHzz% zWrfsc7{*0vH!^x3p(btBrxt&n>6$dvb{<&mG1r9C)*PudXEdVp;B?i=^Rkh=vJl{s zqUZX>&1Acs&9n3|zcg0y!11@ zXQOj^Y$|cAr5R97lV1VXc7CkI69T=sMK@%*wu879oL&oE%K+l7WRC=+vtfq_^q%2nYUIJ&^@lZf z&$lBBwNK|)1M1%*o?{}4(1CZ*LTjg(-XCXX1Wr%oH=uG@o@B!En+{wyb8&%~2A*F2 zorVL%n~~NNs>Xx^B(!S>FB$NI$fPF>ErA;$Bs^7Ua2j|C>EZg!if0qC+cZ=hK@7*D z5He*uzeJ;j%bn)mlnBfs5;z%sT-B17t}PXzUXO}kok6kwgKC|l?QI$@*Qia+86F`6 z^@@)5BdT`e3IGN8Ykp3UU_T_0!`VdwX(8V6Nl%(9OY;TIAt|zPC#W=;b$;P$bo(J7 zRj->uK-aEd7hr}$WRNwWcp4$AJa`U+CU$g}a8y?&5|PM_8m+xog93%v1)U0XQAI^F z&vB)??Be~TI$cpBjf~F{M?glSrOlG$K@~!D!Qc>oEf+hH& zG8^QGNAEWVc@)^=`k^WXh6zH;grc5N2|-6J zAe#N6H2^fFym%$Brl*biABgL`@+;%#iOfv?)w78{!no@OZ;a2j#AaY z6>`57A%_07dRJcj6C~f!=5r5@mC6irmB)%tB~osvigOtb!8uGzJ&R z&|cRgudnwu8Q3tim7e6KagZgrrttTS==9IzrO)FT2`-V%sJ z#%|&ly}Am_FF@eH1oi4gj39`|64|Wng-rm72+)cjT za*og<+Q$T~jyHKvNKpIeScr7NL0jhv=->>}#$lh4++(ak#o>xG4E2j;;yIlJjlU4p z>gjzVOE;aknjmZew&wz|0kSiHR298&0!G`lfFTb zZ^-3fg?2ml1#|u!mWgJs<(nfm1 zOs3@Usik*Ntv$s|Evnk#-X7e(IH5~pH5bx4M<2Pojy@;)=|(O(MqY}p3_dHXD&Yo3c4V8I7+GF&*| zdKl*eNl&`gSa-OP1r_>y`hx`$dO#I%bzh#xfKAww9)pzm#pOM@&+swT$;B;V-pNkw zp=D|?hX_m?zMa<|!Rdpc{-m`*WE7e8XJjrF@}MPZhqcR~Gae2Pv)y);1Yr@GQN!1TM)$y>az)M!v`CwK|_a@Hwm!j~-o7GAW z)zX7Gdt!WbSLsulPIT=XtW;;S?)Sx6z!7NYqC_q$_z1cG7>*u#fkkPdzD1Fo`Ky~- zLT1z#_*9!?N5N={$M`1(=Cz+2-^fhansQL2R>}-f8LAyEj*o!AHcF3K_t59-U285` zZw|Gt6b@yHCiQl~^Y&a3AR#0cDne%`8mbxMuyyN^i-ylE?dq#)8LxSJ*f+qtruRB} zY4VI#ze=hR95?Ny@WlewW3YOCOC+yB@}FEQ-WKB>pFux$){kR|&j6bd2BenK4G&8F z^P?M`p+FMJUvdbyUUZv)U6Tfrb|cak*if^bpvw#p&yTBTyll^rk8ZkFl;{4a22&+c zqAOVr{W$m&B|(6n7OBOS5Qb!4+W<9zUew-sncB>msT`Z{b@{$)L^U-2GQuKQA0b|g z4wNe(M2S9Yo5DYA(EHE=pT%K*k4yl*ka~c2Moopvi3Kpll7ti_S1X983<-5e!o7;n zIwh=(kfJ}pA3G)bccu$k9-%!J*@WI>bXNI-pv=2 zzXn?j2ljQr3b%1i@@UY+tT`@65uSJBu1W87v46YRaX!#c{tsMWjp$zAo~F_8o!mYZ zCDL0dwT+hgeN*Mi#px|Wviy#&zQ>_iNOYF$CkAZ^1!G5#7{`UvIE8H)F+G1?cMBjq7a#)3H#ER2VpQSJ53XMgP_9iM0Tn7psHoQ<6Z;?z zE{7~wOY!#K8fHpF7l~u#aNxS+n_EwSZSY$_q>dY|G>m8}u%J-cRIIDeJVko$`nEac zKiq8R+pi)-4l6`2)f_K-xgWKsHp15`sbK>SlND8tZ6tl0|1fG&I}na+kVoqh%c|zn z*+S&bXqDE3B)Co_X22w8piZVEi@Zx4ZO@R55$j<`yU^NlvK@1v+GQcLOod-K4tc@J zWz8pXNI5r@gU#FIx}w^uy%vyISBh-^$aqyta>y1b8br^L&E4$H+Oxq`9|xV}C+aS_ zzqTq!I$akdtl?Na)_?&=>{h}q>Vf>4n*b?rqSnoaYjNz4gI0UFv(AFpYfxz7 z#G)q^#hi{pQm`Q~ZL!%s-QUjn(WQIG{BV%$|Cr?WD8BjnN6C^D5}j)vbi)>Ykqc;i z#{zyr`tgC@ngV=079!x3No4PW*D&gzGIEOg6OGD>{(zLTzF`L1ahyZ2`VFWq5;{i(R2rKFheaiEm#*P}X*9(D)# znFJ|BM5Og!iqgl@(8y>6Z>;t$o{WdUn$#Rj7nzT*G7oz59xY|Z_nOkKuZ?eneE1+r zpD<`{cIK3dU0o5-)BU(gs;jX^TDuC{qOINFV*(#7bkqg{m%TM*(~u zVKo<&EyHPfGu+mUgx!c-)HtOCGYr4~fv4OLY`;1bc5e!XaG434jN?FfNflCbgA0i%l@^fs4 z@SpwDKwFH2_g%vG+`g9#*+!G%Dd?M)Nm<$=seEvtQp=;bg0+_}E$t-PJG?Jp%Ne=HtBl3M3710c*Dr_neE%ReYi{&Xx7AWH4 zl2z^KfVFsud>;8TFi742jLgTomFn~(qWtEVlXvoQKec4<0AnahkE5zOcwa&}MCDKg z|5s_(9Z2Q(|1TjUGc#n%o+T@?iBO6%vkCXg%qT9A2$@->bge`pQK`&CMMkNN6s{G@ zOvLZp&*$q7x*y|rIG*c|=k+@0eO~YP8TUEoaaeeso-r7;TT1Tr^^8GX=9K}F%p;!B zNcKp<=aYQ7zY>4O8wKh%%u*=VYt>z`2k-xJ^3~5GxmX-B9WcxNl=49rk&l_j#YwKw zba_=v_5LNl|F6e2TELT90{Ihnn7&9ZDNpVM&(#|oz9e!qRrqn5 z+&{mme!E--J)fqfw!q`~4;F<~2>BEf4*syRzSR_UpN;=|h>~?Xv@!dZPGR)zPGMKSC)2;@yAo4e_czLx*(QI;Hj z`@S$tQryVoLx&wAP$r#2?U0-mh1I*bvA4nq9v39(@1InqN0h}n+GDl2`(P^HJPiLZ z?=9?89JYVe-`Wyd)Zc&4+jU1gpQDPmD!-#jLg_8C{>%*GlZ%sQDHR5Ge~=j99XibO zGv4#oKi1uOME#0=3%saxG)?_brK{y*7Q-?gR}YdIYrpQZCmqx?7sC)rxnt)FL>b3i zysRvWG;?`^b>)sK#mM)Vx?NQ%SR71h8}*tZdImjG*g4W1U>Ot4AHmrYP;-Z&W2^}B zTWf|{jNqzGth2xU6*-&?xif`d?Z)EqcCz8X^jPc5L zz6w?i-+dmwd*IekcR~uWlTOD;cW9}^rj>Yy>8ffO~PO9qxb3I zCyEINhq~Fkpo}-lUY%ALawU~#btv!o#3jqB?zq##hxFHZC?Ssr!|sVm1ZoL}4C8Nal6|WSt#zVhTUXbzQJERbQll|C>mN3Vi(%ND5vJbV}Z-=>sIjNj8 zB@}ha*!jal=T!jl<=48rY_vKRGFg(kHSlCtZe3D!87><_Z~(Swi5WykvbBd$TTESK zfhUcl@$3UPBIEi9o;t7JJ2_)dFq_nQJvnVx&gm0X1zp&cVdNg8K%B1I{9tJuS#H`P z7v6K$C(@kdTN4p@g(S%bQsf0E->w_4o#xwB8}j=mjA|x~n5S>cH6U-X#sT+6F^VDX@^{nYy=n(xmjgaTk;kB?6Bgqv{p zrAzz*EB3uMT*Qw~L9V$xqOWqGI8wz>J3&srpT)R(j-1}r|6_m%Bw5%g^f|eoqW_mH zLVHTh$Bd`Xm0P?i8{YX~dP%1X_AA$2^z*&qKbrStt~|=GKD#>xJn?m^ERR#CJ7_;~ z4d0U$Lyb#MTIhv)kI!l_w%OC3^vQ2!AHiOMLzw>%{r5cthwg*<$-M>7vt6Qi<+9@yvigte7u>zM+EsxSrnXl@D&1s@R}NCfz6q{aqMl)=LMD>SWlv8`O^z>5KwB-My32GArX+o+ zl)QAr_jViqiO+H-{xYZhjb(i;MaI>#FX*nkycy8X8vUv#xH}+jaYSxD|JGvSJ3nKu zOtlHmbM9uJs0aCaUecA{7PaXwajrj|Bb@x%P46bM)q~Y6M%Xy3qRm>`x?GaQM6_Ho ztyitVn@`5pu8UVtvZ(80mV#9L$CBv5{bCjZNz8{|om#%Uzvq6N7n5|}fXl9Ty!9T; zw-=3HvKAWKjnSCxEB5{zeg#?<*EeV@&YlkSe|B;}h{lA=ok7QHKc(%ZWO*7%t^KC) zNK$rB_2?YNY}mDrwx@GcRLv|Br2LzvnA3PaIC1aNyu$yQoQ3aUDN$*i3Pr*-*3a)r ze=(K5d>Pv$UdaHLR3Y2@lc^Wd7@211-^Z=&&Slu6uisWh<=w2NJ9K+-x=KXNrRJ;V zzt7PddN2O`!9cwSCLkQ&Oa}YHJjqeD;x>+mbDG?piC5Hv1Gn|4aXBTceMBJn;nWdtf{k3E1lvPy=Ec9I$0i!2!UHgATIJ&-in$97i%~P-<#y=9$ct%3RPCDFi7on$E zobAm|VtKKCIVTJ`I3%6Luf6cgQH@}m)_X}Y0K3_^o4Y}=)Y6b+S}*mM5>i^qs#x^Y z*sc)@J&_bwl?;k=?;{GH?wrqTx_=?OpTAZB8u2@~Q7mynbL4=)vx^@*4E*Qv<@<=o zid&1J@YZmne1TGWd#FlkbZM{elgYTtnL?p{A|{k1-N>@9Z;}EAmhz`4Cm#Xfy9WDzN& znzRKhLxV3m&rVrLjxR$=_@450hQ1V;f5@sicT}`jjI*dm_}TPv{E1v}Ld{r%3%NAbY`Hk{n8t8ePFn zx|q>MGna^CaF<^>{AjJ{?z)JO$HZ8h+k%N8-;7{`*5o}q<-M#$Pj0a6Ff>c zbw|{(9CJG%8W#J#4E9GPkbV*8#M&O+iz+Lexo4qI<&t(@aWvs^M^m`<|LgHECF~{9dt-_v)GN>($*edlh3= zTEJNFi7rl1gHC}r6>+poZzsL>AqfHt=2q7_S;jVIwNZ-ic5qVBc=wZ~f5Ldg*|@&1 ztgPA0RY!jh)(w8xYY!dvW-j!W$Tgd|%$i@nu7?nLFj?lN6O1(M(hep& z5`^4A*Ur+$a-5k_Qd1HkK_cxWTG?}9Nc z;sPIHV_V+57P%7JDV(=dZ@r`Acv`|)lRC18QM3_J8o_lt9W*sxvYO{GRpr6iiK0Hj z>9DqQqK>lx%X>!SMJCNz36%p+en%un^|K98Mu zlpqa`j{f~zfD2OIcl050;peM1*tJl$<5g#Nofd{liqD-=fJ!UR-A*ray6oK#^_+K# zo$bHq32W!1@=%_9$7fdnJJa#Z^DfVqU!*4q)9q&%gp(L6@{`{&H3og1l5TuR_U`Nc zTGic_X$QPGiTF;B-tC+tP2-Y}GoQZNUf$Eom&$*n@B1$*c?GAw%dI6W!yNJEqcjI} z{C#-4Vgj|;wJdZUE-dT6$*wio{mFAWiRp$>#!&=!=czf1FGfjH?1ke(qvGei(v@yl z=#jp?6O|#rUb4Uc?31!TlHJP>53o=2KUJU=iyVGoS#v(d`9z--?MdFYl52uI&L>8+ zr!y-~kTD50ozM3&JWMDkk~zC6tA^Gb#oC z)}MCeuzPz3%c%k;uFD^0S#bA8QYG_Vr32eh^>%FX+;%*z|Mzp-qmLhrYCp_;F<0mF z{+@|PVUZjaoE2h+T)$Se4tW*WvirBZJLqyG$ zOKkAX-WnDl)hOc~;5qd}ar)xG+r{Pq-Ex=14`kYvzZ&o8V=_P0q-vl{KSM@HN<$FI zrp~G#Diou7QY$;{&TXj=oJT)4{~&ri@d|2p4Jf z$U#W?xfN-`U*V%NG?k+}M=6*O1YJjJzdq1)lTX&FH1iBsC5?%3!^JnB0@#$V2Bxyk z-N}#L&GJB&<1=;qCH3bfrn>W_gF6;k9ODaKqi&PX1*Fz2CoMf*&2sJ3>3ztZmZ8$Dgfi5oXYmTx}w?X0nOx^dl8kV4{t zD%;b>H>N&k-Xi!uDX>vo=c|#2RqHu8{(kl?jI4ToVQ@5ypl&`ypZ?OjM#wwX#G0`c z6_UjfA*qU_qHoMs{budlc?#pIjZJw%)19MjKkA#r70x4X?3#JJ{PQz4^YSHY@>;!~ z6plC4zp}H&UKr{U5Yq3U2LYTEf{H$dm6NC9!GFUb(}KL35OCBd4J|EE9eqs$aYNmI zKdim|k>s0Cnjm-?kU}6R(A9*1SA&n}+l>rP9#_*a67xFmwH6_Pq*DrL9|Axi?5nqf zD*`SD+yaeYXYXzgwX=uX`dPU;LtQ-o21Ux?FDeCYMeWjOz=86`hVrmK=k4ra|5u2_ z4*hTza4QqYj5rY8*brVYcPm$WAA8ro6$86QxF!_@6-C#`-`QQ%)7rrv+aR@|MNq7O z>z{;M3o~%LyEW9&!`jW+8vAW$*bt~j<*R}|2?YQ-AdCHr^v_=e{Ms4_cRyQeTPJ%f zD-7J4-UAyORlIYl5w`}(Adm??u#X-G+lmOgcsACa_Ew(%o{uN43jV>ImA4ks(z4}; zIDnD_nHvZ4G8JA(^!UNuZ!Ml$j!oGBz<~ly@!;Ui;m5;tpX&i@3WP;iPyz+*!ifXO zN`tq6=mP_=i-&5R-7{m)4uX2}1VJE5IA|U~E|>Ke#K227!G$*S21JQ%sw4&IsB))x5xJC}DDdyqXa3h|)DQwlFt0 z7!;q6u2m-IDiXLN;3IgzFy>e<%fQ#oY2?%p$aLh|@eD=T-;0Jk z;lLFEzdVf}5wAoVL!$mvf!jZUjN9$jF5rjTj!Z09xP;CFnH&N#IRw~N69V4zdvi<} zvuq$04#GvR3OWeHK@}X&yvhTv2zc?*=BPF&7wjXTg>iVQs0nZ-i8bq9cM8$^M#erW z3|EIe?D&Dv4!a%@zYdS@#Se%v_&&`=NFLJL{dfnMCP2njewgv*7}56LfT^LK)cew3m4Dx4b4-|q*nld!I`1H= z`;-OUC$92iPH&ECW91*y!<}Q|4sOTH2q0cg9?lM4R&FpmdsloNCu(pzFX)mG2F$;H zaLZ%94h7R?Y{u?uN8q@f{QO#YM|-H1yWfA6mDwKh{wgSIG03=msf@*%vZBHMe?=Aq zOsfe2XSV!13#Q{kn+4ZdQHD0UNA8#mu%3VnVj?<$ZfFy*TdxN893OW`U}CjVHE?FF z*fUk!YM~k!d)|*bBuH@H5)B^HtsUH-JlZ!{={`l4A>zTyLC1QOp~Hdzkdd-p#Oy!pxIGfvOD*EsNhgq zC2yiKm_>%a&9vA1B=-6RZg6S2w56f_a%jv=`abU!_o&~Z9-Vy`0bN{WCHt@9CtHV@Cz)l3b4L8UXkP~hYWPsc; z3c)T1y7@LBub2#VQCwpsS3MMK21vmmV?)7Ukznk(UY6KvJz#E(P6)W_ zv8}P-m1gCDlQCFOgN%cWJ2Z+n*&4-m=1.7.0 -python-dotenv==1.0.1 -redis>=4.0.0 -requests>=2.31.0 -tabulate>=0.9.0 -thrift>=0.16.0,<=0.20.0 -waitress -yt_dlp>=2025.3.27 -yt-dlp-get-pot==0.3.0 diff --git a/airflow/ytdlp-ops-auth/setup.py b/airflow/ytdlp-ops-auth/setup.py deleted file mode 100644 index 71b338a..0000000 --- a/airflow/ytdlp-ops-auth/setup.py +++ /dev/null @@ -1,2 +0,0 @@ -# This file is no longer needed and will be removed. -# The packaging logic has been consolidated into the root setup.py file. diff --git a/airflow/ytdlp-ops-auth/yt_ops_services.egg-info/PKG-INFO b/airflow/ytdlp-ops-auth/yt_ops_services.egg-info/PKG-INFO deleted file mode 100644 index 6adf735..0000000 --- a/airflow/ytdlp-ops-auth/yt_ops_services.egg-info/PKG-INFO +++ /dev/null @@ -1,9 +0,0 @@ -Metadata-Version: 2.2 -Name: yt_ops_services -Version: 1.6.2.dev0 -Requires-Python: >=3.9 -Requires-Dist: thrift<=0.20.0,>=0.16.0 -Requires-Dist: python-dotenv>=1.0.0 -Requires-Dist: psutil -Dynamic: requires-dist -Dynamic: requires-python diff --git a/airflow/ytdlp-ops-auth/yt_ops_services.egg-info/SOURCES.txt b/airflow/ytdlp-ops-auth/yt_ops_services.egg-info/SOURCES.txt deleted file mode 100644 index d7a7433..0000000 --- a/airflow/ytdlp-ops-auth/yt_ops_services.egg-info/SOURCES.txt +++ /dev/null @@ -1,48 +0,0 @@ -setup.py -./pangramia/__init__.py -./pangramia/base_service/BaseService.py -./pangramia/base_service/__init__.py -./pangramia/base_service/constants.py -./pangramia/base_service/ttypes.py -./pangramia/yt/__init__.py -./pangramia/yt/admin_ops/YTAccountsOpService.py -./pangramia/yt/admin_ops/__init__.py -./pangramia/yt/admin_ops/constants.py -./pangramia/yt/admin_ops/ttypes.py -./pangramia/yt/common/__init__.py -./pangramia/yt/common/constants.py -./pangramia/yt/common/ttypes.py -./pangramia/yt/exceptions/__init__.py -./pangramia/yt/exceptions/constants.py -./pangramia/yt/exceptions/ttypes.py -./pangramia/yt/tokens_ops/YTTokenOpService.py -./pangramia/yt/tokens_ops/__init__.py -./pangramia/yt/tokens_ops/constants.py -./pangramia/yt/tokens_ops/ttypes.py -./thrift_model/__init__.py -./thrift_model/gen_py/__init__.py -./thrift_model/gen_py/pangramia/__init__.py -./thrift_model/gen_py/pangramia/base_service/BaseService.py -./thrift_model/gen_py/pangramia/base_service/__init__.py -./thrift_model/gen_py/pangramia/base_service/constants.py -./thrift_model/gen_py/pangramia/base_service/ttypes.py -./thrift_model/gen_py/pangramia/yt/__init__.py -./thrift_model/gen_py/pangramia/yt/admin_ops/YTAccountsOpService.py -./thrift_model/gen_py/pangramia/yt/admin_ops/__init__.py -./thrift_model/gen_py/pangramia/yt/admin_ops/constants.py -./thrift_model/gen_py/pangramia/yt/admin_ops/ttypes.py -./thrift_model/gen_py/pangramia/yt/common/__init__.py -./thrift_model/gen_py/pangramia/yt/common/constants.py -./thrift_model/gen_py/pangramia/yt/common/ttypes.py -./thrift_model/gen_py/pangramia/yt/exceptions/__init__.py -./thrift_model/gen_py/pangramia/yt/exceptions/constants.py -./thrift_model/gen_py/pangramia/yt/exceptions/ttypes.py -./thrift_model/gen_py/pangramia/yt/tokens_ops/YTTokenOpService.py -./thrift_model/gen_py/pangramia/yt/tokens_ops/__init__.py -./thrift_model/gen_py/pangramia/yt/tokens_ops/constants.py -./thrift_model/gen_py/pangramia/yt/tokens_ops/ttypes.py -yt_ops_services.egg-info/PKG-INFO -yt_ops_services.egg-info/SOURCES.txt -yt_ops_services.egg-info/dependency_links.txt -yt_ops_services.egg-info/requires.txt -yt_ops_services.egg-info/top_level.txt \ No newline at end of file diff --git a/airflow/ytdlp-ops-auth/yt_ops_services.egg-info/dependency_links.txt b/airflow/ytdlp-ops-auth/yt_ops_services.egg-info/dependency_links.txt deleted file mode 100644 index 8b13789..0000000 --- a/airflow/ytdlp-ops-auth/yt_ops_services.egg-info/dependency_links.txt +++ /dev/null @@ -1 +0,0 @@ - diff --git a/airflow/ytdlp-ops-auth/yt_ops_services.egg-info/requires.txt b/airflow/ytdlp-ops-auth/yt_ops_services.egg-info/requires.txt deleted file mode 100644 index 08ca11a..0000000 --- a/airflow/ytdlp-ops-auth/yt_ops_services.egg-info/requires.txt +++ /dev/null @@ -1,3 +0,0 @@ -thrift<=0.20.0,>=0.16.0 -python-dotenv>=1.0.0 -psutil diff --git a/airflow/ytdlp-ops-auth/yt_ops_services.egg-info/top_level.txt b/airflow/ytdlp-ops-auth/yt_ops_services.egg-info/top_level.txt deleted file mode 100644 index c6a2990..0000000 --- a/airflow/ytdlp-ops-auth/yt_ops_services.egg-info/top_level.txt +++ /dev/null @@ -1,2 +0,0 @@ -pangramia -thrift_model diff --git a/airflow/ytdlp-ops-auth/ytdlp_ops_client.log b/airflow/ytdlp-ops-auth/ytdlp_ops_client.log deleted file mode 100644 index 17030cd..0000000 --- a/airflow/ytdlp-ops-auth/ytdlp_ops_client.log +++ /dev/null @@ -1,261 +0,0 @@ -2025-04-01 14:23:28,586 - INFO - Attempting to connect to server at 85.192.30.55:9090... -2025-04-01 14:23:28,700 - INFO - Successfully connected to server -2025-04-01 14:23:28,815 - INFO - Server connection test successful -2025-04-01 14:23:28,815 - INFO - Requesting token for URL: https://www.youtube.com/watch?v=vKTVLpmvznI -2025-04-01 14:23:32,869 - INFO - Successfully received token data from server -2025-04-01 14:23:32,870 - WARNING - infoJson attribute *MISSING* in received token_data object. -2025-04-01 14:23:32,870 - WARNING - Valid info.json was NOT received from the server. -2025-04-01 14:23:32,870 - ERROR - Failed to obtain valid info.json from the server. -2025-04-01 14:40:18,685 - INFO - Attempting to connect to server at 85.192.30.55:9090... -2025-04-01 14:40:18,800 - INFO - Successfully connected to server -2025-04-01 14:40:18,914 - INFO - Server connection test successful -2025-04-01 14:40:18,915 - INFO - Requesting token for URL: https://www.youtube.com/watch?v=vKTVLpmvznI -2025-04-01 14:40:31,927 - INFO - Successfully received token data from server -2025-04-01 14:40:31,929 - INFO - Valid JSON with video data: Best of Dmitri Shostakovich - Essential Classical Music -2025-04-01 14:40:31,932 - INFO - Successfully saved info.json to info_json_vKTVLpmvznI_1743507631.json and latest.json to latest.json -2025-04-13 16:32:14,014 - INFO - Attempting to connect to server at 89.253.221.173:9090... -2025-04-13 16:32:14,129 - INFO - Successfully connected to server -2025-04-13 16:32:14,241 - INFO - Server connection test successful -2025-04-13 16:32:14,241 - INFO - Requesting token for URL: https://www.youtube.com/watch?v=i7SQ6ENOv5s&t=1012s -2025-04-13 16:32:23,236 - ERROR - Unexpected error: TSocket read 0 bytes -2025-04-13 16:32:23,243 - ERROR - Traceback (most recent call last): - File "/opt/yt-ops-services/airflow/ytdlp-ops-auth/ytdlp_ops_client.py", line 533, in main - token_data = client.getOrRefreshToken( - ^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/opt/yt-ops-services/airflow/ytdlp-ops-auth/pangramia/yt/tokens_ops/YTTokenOpService.py", line 138, in getOrRefreshToken - return self.recv_getOrRefreshToken() - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/opt/yt-ops-services/airflow/ytdlp-ops-auth/pangramia/yt/tokens_ops/YTTokenOpService.py", line 152, in recv_getOrRefreshToken - (fname, mtype, rseqid) = iprot.readMessageBegin() - ^^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/protocol/TBinaryProtocol.py", line 134, in readMessageBegin - sz = self.readI32() - ^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/protocol/TBinaryProtocol.py", line 217, in readI32 - buff = self.trans.readAll(4) - ^^^^^^^^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/transport/TTransport.py", line 62, in readAll - chunk = self.read(sz - have) - ^^^^^^^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/transport/TTransport.py", line 283, in read - self.readFrame() - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/transport/TTransport.py", line 287, in readFrame - buff = self.__trans.readAll(4) - ^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/transport/TTransport.py", line 62, in readAll - chunk = self.read(sz - have) - ^^^^^^^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/transport/TSocket.py", line 166, in read - raise TTransportException(type=TTransportException.END_OF_FILE, -thrift.transport.TTransport.TTransportException: TSocket read 0 bytes - -2025-04-13 16:33:43,822 - INFO - Attempting to connect to server at 89.253.221.173:9090... -2025-04-13 16:33:43,933 - INFO - Successfully connected to server -2025-04-13 16:33:44,046 - INFO - Server connection test successful -2025-04-13 16:33:44,047 - INFO - Requesting token for URL: https://www.youtube.com/watch?v=i7SQ6ENOv5s&t=1012s -2025-04-13 16:33:50,906 - ERROR - Unexpected error: TSocket read 0 bytes -2025-04-13 16:33:50,908 - ERROR - Traceback (most recent call last): - File "/opt/yt-ops-services/airflow/ytdlp-ops-auth/ytdlp_ops_client.py", line 533, in main - token_data = client.getOrRefreshToken( - ^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/opt/yt-ops-services/airflow/ytdlp-ops-auth/pangramia/yt/tokens_ops/YTTokenOpService.py", line 138, in getOrRefreshToken - return self.recv_getOrRefreshToken() - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/opt/yt-ops-services/airflow/ytdlp-ops-auth/pangramia/yt/tokens_ops/YTTokenOpService.py", line 152, in recv_getOrRefreshToken - (fname, mtype, rseqid) = iprot.readMessageBegin() - ^^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/protocol/TBinaryProtocol.py", line 134, in readMessageBegin - sz = self.readI32() - ^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/protocol/TBinaryProtocol.py", line 217, in readI32 - buff = self.trans.readAll(4) - ^^^^^^^^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/transport/TTransport.py", line 62, in readAll - chunk = self.read(sz - have) - ^^^^^^^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/transport/TTransport.py", line 283, in read - self.readFrame() - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/transport/TTransport.py", line 287, in readFrame - buff = self.__trans.readAll(4) - ^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/transport/TTransport.py", line 62, in readAll - chunk = self.read(sz - have) - ^^^^^^^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/transport/TSocket.py", line 166, in read - raise TTransportException(type=TTransportException.END_OF_FILE, -thrift.transport.TTransport.TTransportException: TSocket read 0 bytes - -2025-04-13 17:32:58,458 - INFO - Attempting to connect to server at 89.253.221.173:9090... -2025-04-13 17:32:58,563 - INFO - Successfully connected to server -2025-04-13 17:32:58,668 - INFO - Server connection test successful -2025-04-13 17:32:58,668 - INFO - Requesting token for URL: https://www.youtube.com/watch?v=i7SQ6ENOv5s&t=1012s -2025-04-13 17:33:07,768 - ERROR - Unexpected error: TSocket read 0 bytes -2025-04-13 17:33:07,773 - ERROR - Traceback (most recent call last): - File "/opt/yt-ops-services/airflow/ytdlp-ops-auth/ytdlp_ops_client.py", line 533, in main - token_data = client.getOrRefreshToken( - ^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/opt/yt-ops-services/airflow/ytdlp-ops-auth/pangramia/yt/tokens_ops/YTTokenOpService.py", line 138, in getOrRefreshToken - return self.recv_getOrRefreshToken() - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/opt/yt-ops-services/airflow/ytdlp-ops-auth/pangramia/yt/tokens_ops/YTTokenOpService.py", line 152, in recv_getOrRefreshToken - (fname, mtype, rseqid) = iprot.readMessageBegin() - ^^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/protocol/TBinaryProtocol.py", line 134, in readMessageBegin - sz = self.readI32() - ^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/protocol/TBinaryProtocol.py", line 217, in readI32 - buff = self.trans.readAll(4) - ^^^^^^^^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/transport/TTransport.py", line 62, in readAll - chunk = self.read(sz - have) - ^^^^^^^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/transport/TTransport.py", line 283, in read - self.readFrame() - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/transport/TTransport.py", line 287, in readFrame - buff = self.__trans.readAll(4) - ^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/transport/TTransport.py", line 62, in readAll - chunk = self.read(sz - have) - ^^^^^^^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/transport/TSocket.py", line 166, in read - raise TTransportException(type=TTransportException.END_OF_FILE, -thrift.transport.TTransport.TTransportException: TSocket read 0 bytes - -2025-04-13 17:36:10,276 - INFO - Attempting to connect to server at 89.253.221.173:9090... -2025-04-13 17:36:10,388 - INFO - Successfully connected to server -2025-04-13 17:36:10,501 - INFO - Server connection test successful -2025-04-13 17:36:10,501 - INFO - Requesting token for URL: https://www.youtube.com/watch?v=i7SQ6ENOv5s&t=1012s -2025-04-13 17:36:17,597 - ERROR - Unexpected error: TSocket read 0 bytes -2025-04-13 17:36:17,606 - ERROR - Traceback (most recent call last): - File "/opt/yt-ops-services/airflow/ytdlp-ops-auth/ytdlp_ops_client.py", line 543, in main - token_data = client.getOrRefreshToken( - ^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/opt/yt-ops-services/airflow/ytdlp-ops-auth/pangramia/yt/tokens_ops/YTTokenOpService.py", line 138, in getOrRefreshToken - return self.recv_getOrRefreshToken() - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/opt/yt-ops-services/airflow/ytdlp-ops-auth/pangramia/yt/tokens_ops/YTTokenOpService.py", line 152, in recv_getOrRefreshToken - (fname, mtype, rseqid) = iprot.readMessageBegin() - ^^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/protocol/TBinaryProtocol.py", line 134, in readMessageBegin - sz = self.readI32() - ^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/protocol/TBinaryProtocol.py", line 217, in readI32 - buff = self.trans.readAll(4) - ^^^^^^^^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/transport/TTransport.py", line 62, in readAll - chunk = self.read(sz - have) - ^^^^^^^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/transport/TTransport.py", line 283, in read - self.readFrame() - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/transport/TTransport.py", line 287, in readFrame - buff = self.__trans.readAll(4) - ^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/transport/TTransport.py", line 62, in readAll - chunk = self.read(sz - have) - ^^^^^^^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/transport/TSocket.py", line 166, in read - raise TTransportException(type=TTransportException.END_OF_FILE, -thrift.transport.TTransport.TTransportException: TSocket read 0 bytes - -2025-04-13 18:02:37,249 - INFO - Attempting to connect to server at 89.253.221.173:9090... -2025-04-13 18:02:37,361 - INFO - Successfully connected to server -2025-04-13 18:02:37,478 - INFO - Server connection test successful -2025-04-13 18:02:37,478 - INFO - Requesting token for URL: https://www.youtube.com/watch?v=i7SQ6ENOv5s&t=1012s -2025-04-13 18:02:42,457 - ERROR - Unexpected error: TSocket read 0 bytes -2025-04-13 18:02:42,467 - ERROR - Traceback (most recent call last): - File "/opt/yt-ops-services/airflow/ytdlp-ops-auth/ytdlp_ops_client.py", line 533, in main - token_data = client.getOrRefreshToken( - ^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/opt/yt-ops-services/airflow/ytdlp-ops-auth/pangramia/yt/tokens_ops/YTTokenOpService.py", line 138, in getOrRefreshToken - return self.recv_getOrRefreshToken() - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/opt/yt-ops-services/airflow/ytdlp-ops-auth/pangramia/yt/tokens_ops/YTTokenOpService.py", line 152, in recv_getOrRefreshToken - (fname, mtype, rseqid) = iprot.readMessageBegin() - ^^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/protocol/TBinaryProtocol.py", line 134, in readMessageBegin - sz = self.readI32() - ^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/protocol/TBinaryProtocol.py", line 217, in readI32 - buff = self.trans.readAll(4) - ^^^^^^^^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/transport/TTransport.py", line 62, in readAll - chunk = self.read(sz - have) - ^^^^^^^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/transport/TTransport.py", line 283, in read - self.readFrame() - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/transport/TTransport.py", line 287, in readFrame - buff = self.__trans.readAll(4) - ^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/transport/TTransport.py", line 62, in readAll - chunk = self.read(sz - have) - ^^^^^^^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/transport/TSocket.py", line 166, in read - raise TTransportException(type=TTransportException.END_OF_FILE, -thrift.transport.TTransport.TTransportException: TSocket read 0 bytes - -2025-04-13 18:03:16,782 - INFO - Attempting to connect to server at 89.253.221.173:9090... -2025-04-13 18:03:16,890 - INFO - Successfully connected to server -2025-04-13 18:03:16,999 - INFO - Server connection test successful -2025-04-13 18:03:17,000 - INFO - Requesting token for URL: https://www.youtube.com/watch?v=i7SQ6ENOv5s&t=1012s -2025-04-13 18:03:26,040 - ERROR - Unexpected error: TSocket read 0 bytes -2025-04-13 18:03:26,042 - ERROR - Traceback (most recent call last): - File "/opt/yt-ops-services/airflow/ytdlp-ops-auth/ytdlp_ops_client.py", line 533, in main - token_data = client.getOrRefreshToken( - ^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/opt/yt-ops-services/airflow/ytdlp-ops-auth/pangramia/yt/tokens_ops/YTTokenOpService.py", line 138, in getOrRefreshToken - return self.recv_getOrRefreshToken() - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/opt/yt-ops-services/airflow/ytdlp-ops-auth/pangramia/yt/tokens_ops/YTTokenOpService.py", line 152, in recv_getOrRefreshToken - (fname, mtype, rseqid) = iprot.readMessageBegin() - ^^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/protocol/TBinaryProtocol.py", line 134, in readMessageBegin - sz = self.readI32() - ^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/protocol/TBinaryProtocol.py", line 217, in readI32 - buff = self.trans.readAll(4) - ^^^^^^^^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/transport/TTransport.py", line 62, in readAll - chunk = self.read(sz - have) - ^^^^^^^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/transport/TTransport.py", line 283, in read - self.readFrame() - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/transport/TTransport.py", line 287, in readFrame - buff = self.__trans.readAll(4) - ^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/transport/TTransport.py", line 62, in readAll - chunk = self.read(sz - have) - ^^^^^^^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/transport/TSocket.py", line 166, in read - raise TTransportException(type=TTransportException.END_OF_FILE, -thrift.transport.TTransport.TTransportException: TSocket read 0 bytes - -2025-04-13 18:09:56,759 - INFO - Attempting to connect to server at 89.253.221.173:9090... -2025-04-13 18:09:56,875 - INFO - Successfully connected to server -2025-04-13 18:09:56,988 - INFO - Server connection test successful -2025-04-13 18:09:56,988 - INFO - Requesting token for URL: https://www.youtube.com/watch?v=i7SQ6ENOv5s&t=1012s -2025-04-13 18:10:05,434 - ERROR - Service exception: Bot detection triggered: ERROR: [youtube+GetPOT] i7SQ6ENOv5s: Sign in to confirm you’re not a bot. Use --cookies-from-browser or --cookies for the authentication. See https://github.com/yt-dlp/yt-dlp/wiki/FAQ#how-do-i-pass-cookies-to-yt-dlp for how to manually pass cookies. Also see https://github.com/yt-dlp/yt-dlp/wiki/Extractors#exporting-youtube-cookies for tips on effectively exporting YouTube cookies -2025-04-14 13:45:44,486 - INFO - Attempting to connect to server at 89.253.221.173:9090... -2025-04-14 13:45:44,593 - INFO - Successfully connected to server -2025-04-14 13:45:44,702 - INFO - Server connection test successful -2025-04-14 13:45:44,702 - INFO - Requesting token for URL: https://www.youtube.com/watch?v=i7SQ6ENOv5s&t=1012s -2025-04-14 13:45:45,560 - ERROR - Service exception: Script execution failed: file:///app/utils/tokenUtils.js:1 -import { BG, BgConfig, DescrambledChallenge } from '../node_modules/bgutils-js/dist/index.js'; // Add BgConfig, DescrambledChallenge - ^^^^^^^^ -SyntaxError: The requested module '../node_modules/bgutils-js/dist/index.js' does not provide an export named 'BgConfig' - at ModuleJob._instantiate (node:internal/modules/esm/module_job:123:21) - at async ModuleJob.run (node:internal/modules/esm/module_job:191:5) - at async ModuleLoader.import (node:internal/modules/esm/loader:337:24) - at async loadESM (node:internal/process/esm_loader:34:7) - at async handleMainPromise (node:internal/modules/run_main:106:12) - -Node.js v18.20.8 -2025-04-14 14:32:59,820 - INFO - Attempting to connect to server at 89.253.221.173:9090... -2025-04-14 14:32:59,925 - INFO - Successfully connected to server -2025-04-14 14:33:00,031 - INFO - Server connection test successful -2025-04-14 14:33:00,031 - INFO - Requesting token for URL: https://www.youtube.com/watch?v=i7SQ6ENOv5s&t=1012s -2025-04-14 14:33:12,563 - ERROR - Service exception: Bot detection triggered: ERROR: [youtube+GetPOT] i7SQ6ENOv5s: Sign in to confirm you’re not a bot. Use --cookies-from-browser or --cookies for the authentication. See https://github.com/yt-dlp/yt-dlp/wiki/FAQ#how-do-i-pass-cookies-to-yt-dlp for how to manually pass cookies. Also see https://github.com/yt-dlp/yt-dlp/wiki/Extractors#exporting-youtube-cookies for tips on effectively exporting YouTube cookies -2025-04-14 14:58:31,413 - INFO - Attempting to connect to server at 89.253.221.173:9090... -2025-04-14 14:58:31,518 - INFO - Successfully connected to server -2025-04-14 14:58:31,623 - INFO - Server connection test successful -2025-04-14 14:58:31,624 - INFO - Requesting token for URL: https://www.youtube.com/watch?v=i7SQ6ENOv5s&t=1012s -2025-04-14 14:58:43,453 - ERROR - Service exception: Bot detection triggered: ERROR: [youtube+GetPOT] i7SQ6ENOv5s: Sign in to confirm you’re not a bot. Use --cookies-from-browser or --cookies for the authentication. See https://github.com/yt-dlp/yt-dlp/wiki/FAQ#how-do-i-pass-cookies-to-yt-dlp for how to manually pass cookies. Also see https://github.com/yt-dlp/yt-dlp/wiki/Extractors#exporting-youtube-cookies for tips on effectively exporting YouTube cookies diff --git a/airflow/ytdlp-ops-auth/ytdlp_ops_client.py b/airflow/ytdlp-ops-auth/ytdlp_ops_client.py deleted file mode 100644 index f42335b..0000000 --- a/airflow/ytdlp-ops-auth/ytdlp_ops_client.py +++ /dev/null @@ -1,721 +0,0 @@ -#!/usr/bin/env python3 - -from typing import Dict, List, Optional, Any -import argparse -import csv -import datetime -import json -import os -import re -import subprocess -import sys -import time -import uuid -import traceback -import logging -import signal -from pathlib import Path -from tabulate import tabulate -import yt_dlp - -def signal_handler(sig: int, frame) -> None: - """Handle shutdown signals gracefully.""" - logger.info(f"Received signal {sig}, shutting down...") - # Clean up any resources here - sys.exit(0) - -# Register signal handlers -signal.signal(signal.SIGINT, signal_handler) -signal.signal(signal.SIGTERM, signal_handler) - -# Import the patch for Thrift exceptions -try: - import os - from thrift_exceptions_patch import patch_thrift_exceptions - # Explicitly call the patch function to ensure it's applied - patch_thrift_exceptions() - print("Applied Thrift exceptions patch for compatibility") - if 'AIRFLOW_HOME' in os.environ: - print("Running in Airflow environment - patch is essential") - else: - print("Not running in Airflow environment, but patch applied anyway for consistency") -except ImportError: - print("Could not import thrift_exceptions_patch, compatibility may be affected") - print("If running in Airflow, this may cause 'immutable instance' errors") -except Exception as e: - print(f"Error applying Thrift exceptions patch: {e}") - -# --- Python Path Setup --- -# Ensure the script can find necessary modules, especially Thrift-generated code. -# Assumes the script is run from the project root or the path is adjusted accordingly. -project_root = Path(__file__).parent.absolute() -gen_py_dir = project_root / "thrift_model" / "gen_py" - -# Add project root to sys.path (needed for the 'pangramia' symlink) -if str(project_root) not in sys.path: - sys.path.insert(0, str(project_root)) - -# Verify paths for debugging -# print("Project Root:", project_root) -# print("Project Root:", project_root) -# print("Gen Py Dir:", gen_py_dir) -# print("Sys Path:", sys.path) -# --- End Python Path Setup --- - -from thrift.transport import TSocket, TTransport -from thrift.protocol import TBinaryProtocol - -try: - from pangramia.yt.tokens_ops import YTTokenOpService - from pangramia.yt.common.ttypes import JobTokenData, TokenUpdateMode, JobState - from pangramia.yt.exceptions.ttypes import PBServiceException, PBUserException -except ImportError as e: - print(f"Error importing Thrift-generated modules: {e}") - print("Please ensure you have run './generate-thrift.py' successfully from the project root.") - print(f"Current sys.path includes: {gen_py_dir}") - sys.exit(1) - -# Configure logging -logging.basicConfig( - level=logging.INFO, - format='%(asctime)s - %(levelname)s - %(message)s', - handlers=[ - logging.StreamHandler(), - logging.FileHandler('ytdlp_ops_client.log') - ] -) -logger = logging.getLogger(__name__) - -def get_info_json(token_data): - """Get infoJson from token_data""" - if not hasattr(token_data, 'infoJson'): - logger.error("infoJson attribute missing in token_data") - raise ValueError("Server response missing infoJson") - - if not token_data.infoJson or token_data.infoJson == "{}": - logger.error("Empty infoJson received from server") - raise ValueError("Empty infoJson received from server") - - logger.info(f"Using infoJson from server response ({len(token_data.infoJson)} bytes)") - return token_data.infoJson - -def is_valid_json(json_str): - """Check if a string is valid JSON and not empty""" - if not json_str or json_str == "{}" or json_str == "": - logger.warning("Empty JSON string received") - return False - - try: - data = json.loads(json_str) - - # Check if it's an empty object - if isinstance(data, dict) and not data: - logger.warning("Empty JSON object received") - return False - - # Check if it has an error field - if isinstance(data, dict) and ('error' in data or 'errorCode' in data): - # It's valid JSON but contains an error - logger.warning(f"JSON contains error: {data.get('error', 'Unknown error')} (code: {data.get('errorCode', 'none')})") - return True - - # Check if it has at least some basic fields - if isinstance(data, dict) and ('id' in data or 'title' in data): - logger.info(f"Valid JSON with video data: {data.get('title', 'Unknown title')}") - return True - - # Check if it has token_data which is important - if isinstance(data, dict) and 'token_data' in data and data['token_data']: - logger.info("Valid JSON with token_data") - return True - - logger.warning("JSON is valid but missing expected fields") - return True - except json.JSONDecodeError as e: - logger.warning(f"Invalid JSON: {e}") - return False - except Exception as e: - logger.warning(f"Unexpected error validating JSON: {e}") - return False - -def extract_video_id(url: str) -> Optional[str]: - """Extract video ID from a YouTube URL.""" - # If it's already a video ID - if re.match(r'^[a-zA-Z0-9_-]{11}$', url): - return url - - # Handle youtu.be URLs - youtu_be_match = re.search(r'youtu\.be/([a-zA-Z0-9_-]{11})', url) - if youtu_be_match: - return youtu_be_match.group(1) - - # Handle youtube.com URLs - youtube_match = re.search(r'(?:youtube\.com/(?:watch\?v=|embed/|v/)|youtube\.com/.*[?&]v=)([a-zA-Z0-9_-]{11})', url) - if youtube_match: - return youtube_match.group(1) - - # Handle shorts URLs - shorts_match = re.search(r'youtube\.com/shorts/([a-zA-Z0-9_-]{11})', url) - if shorts_match: - return shorts_match.group(1) - - return None - -def list_available_formats(url: str, args: argparse.Namespace) -> Optional[List[Dict[str, Any]]]: - """List available formats for a YouTube video.""" - ydl_opts = { - 'quiet': not args.no_quiet if hasattr(args, 'no_quiet') else True, - 'no_warnings': True, - 'skip_download': True, - 'extract_flat': True, - } - - try: - with yt_dlp.YoutubeDL(ydl_opts) as ydl: - info = ydl.extract_info(url, download=False) - - if not info: - logger.error("Could not retrieve video information") - return None - - formats = info.get('formats', []) - - if not formats: - logger.warning("No formats available for this video") - return None - - # Create a table of available formats - format_table = [] - for f in formats: - format_table.append({ - 'format_id': f.get('format_id', 'unknown'), - 'ext': f.get('ext', 'unknown'), - 'resolution': f.get('resolution', 'unknown'), - 'fps': f.get('fps', 'unknown'), - 'vcodec': f.get('vcodec', 'unknown'), - 'acodec': f.get('acodec', 'unknown'), - 'filesize': f.get('filesize', 'unknown'), - 'format_note': f.get('format_note', '') - }) - - return format_table - - except Exception as e: - logger.error(f"Error listing formats: {e}") - return None -def suggest_best_formats(formats: List[Dict[str, Any]]) -> List[Dict[str, Any]]: - """Suggest best formats based on resolution and codec.""" - - best = [] - seen_resolutions = set() - - # Prioritize higher resolutions and certain codecs - preferred_codecs = ["vp9", "avc1", "av01"] # In order of preference - - for f in sorted(formats, key=lambda x: ( - -int(x.get('height', 0) or 0), # Higher resolution first - preferred_codecs.index(x.get('vcodec', '').split('.')[0]) if x.get('vcodec', '').split('.')[0] in preferred_codecs else float('inf'), # Preferred codecs - x.get('filesize', 0) or 0 # Smaller filesize - )): - resolution = f.get('resolution') - if resolution and resolution not in seen_resolutions: - best.append(f) - seen_resolutions.add(resolution) - if len(best) >= 3: # Suggest up to 3 formats - break - return best - -def load_info_json(path: str) -> Optional[Dict[str, Any]]: - """Load and validate info.json file.""" - try: - path = Path(path).resolve() - if not path.exists(): - logger.error(f"Info.json file not found: {path}") - return None - - with open(path, 'r') as f: - data = json.load(f) - - # Basic validation - if not isinstance(data, dict): - logger.error("Invalid info.json format: not a JSON object") - return None - - if 'id' not in data: - logger.warning("Info.json missing video ID") - - return data - - except Exception as e: - logger.error(f"Error loading info.json: {e}") - return None - -def save_info_json(info_json: str, video_id: str, context_dir: str) -> Optional[str]: - """Save info.json to disk and return the saved path.""" - try: - # Ensure context directory exists - Path(context_dir).mkdir(parents=True, exist_ok=True) - - # Create filename with video ID and timestamp - timestamp = int(time.time()) - output_path = Path(context_dir) / f"info_json_{video_id}_{timestamp}.json" - - # Write the file - with open(output_path, 'w') as f: - f.write(info_json) - - # Also create a symlink or copy to the standard name for compatibility - standard_path = Path(context_dir) / f"info_json_{video_id}.json" - try: - # Try to create a symlink first (more efficient) - if os.path.exists(standard_path): - os.remove(standard_path) - os.symlink(output_path, standard_path) - except (OSError, AttributeError): - # If symlink fails (e.g., on Windows), make a copy - with open(standard_path, 'w') as f: - f.write(info_json) - - # Save latest.json - latest_path = Path(context_dir) / "latest.json" - with open(latest_path, 'w') as f: - f.write(info_json) - - logger.info(f"Successfully saved info.json to {output_path} and latest.json to {latest_path}") - return str(output_path) - except Exception as e: - logger.error(f"Failed to save info.json: {e}") - logger.error(traceback.format_exc()) - return False - -def main(): - # Create main parser - parser = argparse.ArgumentParser(description='''YtdlpOpsService Client - -This client connects to the YTDLP Operations Server to generate tokens for YouTube videos. -The server performs SOCKS5 proxy connection testing with a 9-second timeout for early detection -of proxy issues. If a proxy connection fails, the server will immediately stop token generation -and return an error instead of trying other clients.''') - - # Add global options - parser.add_argument('--host', default=os.getenv('YTDLP_HOST', 'localhost'), - help='Server host (default: localhost or YTDLP_HOST env)') - parser.add_argument('--port', type=int, default=int(os.getenv('YTDLP_PORT', '9090')), - help='Server port (default: 9090 or YTDLP_PORT env)') - parser.add_argument('--timeout', type=int, default=30000, - help='Timeout in milliseconds (default: 30000)') - parser.add_argument('--timeout-sec', type=int, default=30, - help='Timeout in seconds (default: 30, overrides --timeout if provided)') - parser.add_argument('--context-dir', default='.', help='Context directory to save info.json (default: .)') - parser.add_argument('--load-info-json', help='Path to existing info.json file to load') - parser.add_argument('--framed-transport', action='store_true', - help='Use TFramedTransport instead of TBufferedTransport for handling very large messages') - parser.add_argument('--force-framed-transport', action='store_true', - help='Force the use of TFramedTransport (recommended for large messages)') - - # Create subparsers for commands - subparsers = parser.add_subparsers(dest='command', required=True, help='Commands') - - # getToken command - get_token_parser = subparsers.add_parser('getToken', help='Get token for a YouTube URL', - description='''Get token for a YouTube URL - -This command connects to the server to generate tokens for a YouTube video. -The server will test any configured SOCKS5 proxy with a 9-second timeout. -If the proxy connection fails, token generation will stop immediately with an error.''') - get_token_parser.add_argument('--url', required=True, - help='YouTube URL to process') - # --format removed, format/quality is determined by the server or embedded in the command - get_token_parser.add_argument('--account_id', default='default', - help='Account ID (default: default)') - get_token_parser.add_argument('--list-formats', action='store_true', - help='List available formats for the video') - - args = parser.parse_args() - - # Handle info.json loading - if args.load_info_json: - info_json = load_info_json(args.load_info_json) - if info_json: - print("Loaded info.json:") - print(json.dumps(info_json, indent=2)) - return - - transport = None - try: - # Ensure context directory exists and is writable - try: - Path(args.context_dir).mkdir(parents=True, exist_ok=True) - test_file = Path(args.context_dir) / "test.txt" - test_file.touch() - test_file.unlink() - except Exception as e: - logger.error(f"Could not access context directory {args.context_dir}: {e}") - print(f"Error: Could not access context directory {args.context_dir}") - sys.exit(1) - - try: - # Check if we should use framed transport for very large messages - use_framed_transport = args.framed_transport or args.force_framed_transport or os.environ.get('USE_FRAMED_TRANSPORT', '').lower() in ('1', 'true', 'yes') - logger.debug(f"Using framed transport: {use_framed_transport}") # Changed to DEBUG - - # Create socket with configurable timeout, force IPv4 - socket = TSocket.TSocket(args.host, args.port, socket_family=2) # AF_INET = 2 for IPv4 - - # Use timeout-sec if provided, otherwise use timeout in milliseconds - if args.timeout_sec is not None: - socket.setTimeout(args.timeout_sec * 1000) # Convert seconds to milliseconds - logger.debug(f"Using timeout of {args.timeout_sec} seconds") # Changed to DEBUG - else: - socket.setTimeout(args.timeout) # Use timeout from CLI in milliseconds - logger.debug(f"Using timeout of {args.timeout} milliseconds") # Changed to DEBUG - - # Always use TFramedTransport to match the server - transport = TTransport.TFramedTransport(socket) - logger.debug("Using TFramedTransport for large messages") # Changed to DEBUG - - protocol = TBinaryProtocol.TBinaryProtocol(transport) - client = YTTokenOpService.Client(protocol) - - logger.info(f"Attempting to connect to server at {args.host}:{args.port}...") - try: - transport.open() - logger.info("Successfully connected to server") - except TTransport.TTransportException as e: - logger.error(f"Connection failed: {str(e)}") - print(f"Error: Could not connect to server at {args.host}:{args.port}") - print(f"Reason: {str(e)}") - sys.exit(1) - - # Add connection test - try: - client.ping() - logger.info("Server connection test successful") - except Exception as e: - logger.error(f"Server connection test failed: {e}") - raise - except TTransport.TTransportException as e: - logger.error(f"Connection failed: {str(e)}") - logger.error(f"Could not connect to {args.host}:{args.port}") - sys.exit(1) - except Exception as e: - logger.error(f"Connection failed: {str(e)}") - logger.error(traceback.format_exc()) - sys.exit(1) - - - if args.command == 'getToken': - url = args.url - # format_codes removed - - # Handle format listing - if args.list_formats: - formats = list_available_formats(url, args) - if formats: - print("\nAvailable formats:") - print(tabulate(formats, headers="keys", showindex=True)) # Show index for format selection - - # Suggest best formats based on resolution - best_formats = suggest_best_formats(formats) - if best_formats: - print("\nSuggested formats:") - print(tabulate(best_formats, headers="keys")) - else: - print("No formats available or could not retrieve format information") - return - elif args.youtube_url: - url = args.youtube_url - format_code = args.format - print("Warning: --youtube-url is deprecated, use 'getToken --url' instead") - else: - print("Please provide a YouTube URL using 'getToken --url' command") - return - - # Get token for URL - try: - # Get token for URL - logger.info(f"Requesting token for URL: {url}") - token_data = client.getOrRefreshToken( - accountId=args.account_id, - updateType=TokenUpdateMode.AUTO, - url=url - ) - - if not token_data: - logger.error("Received empty token data from server") - print("Error: Received empty token data from server") - sys.exit(1) - - # Validate token data - if not hasattr(token_data, 'ytdlpCommand') or not token_data.ytdlpCommand: - logger.error("Token data missing required ytdlpCommand") - print("Error: Token data missing required ytdlpCommand") - sys.exit(1) - - logger.info("Successfully received token data from server") - - # Log all attributes of token_data for debugging - token_attrs = [attr for attr in dir(token_data) if not attr.startswith('__') and not callable(getattr(token_data, attr))] - logger.debug(f"Received token_data attributes: {token_attrs}") - - # Handle case where token_data is a dict-like object - if hasattr(token_data, 'items'): - # Convert to dict if needed - token_dict = dict(token_data.items()) - logger.debug(f"Token data as dict: {token_dict}") - - # If we have JSON data directly in the response - if isinstance(token_dict.get('infoJson', None), str): - received_info_json = token_dict['infoJson'] - elif isinstance(token_dict.get('data', None), (dict, str)): - # Try to use the data field if it exists - data = token_dict['data'] - if isinstance(data, str): - received_info_json = data - else: - received_info_json = json.dumps(data) - else: - # Create info.json from available fields - info_data = { - "id": token_dict.get('id', extract_video_id(url)), - "title": token_dict.get('title', ''), - "formats": token_dict.get('formats', []), - "timestamp": int(time.time()), - "ytdlp_command": token_dict.get('ytdlpCommand', '') - } - received_info_json = json.dumps(info_data) - else: - # Handle case where token_data is a regular object - received_info_json = getattr(token_data, 'infoJson', None) - - if received_info_json: - logger.debug(f"Received info.json data ({len(received_info_json)} bytes)") - if len(received_info_json) > 100: - logger.debug(f"Preview: {received_info_json[:100]}...") - else: - logger.warning("No valid info.json data found in response") - - except PBServiceException as e: - logger.error(f"Service exception: {e.message}") - if hasattr(e, 'errorCode'): - if e.errorCode == "BOT_DETECTED": - print(f"Error: {e.message}") - print("\nYouTube has detected bot activity. Authentication is required.") - - # Print suggestions if available - if hasattr(e, 'context') and e.context and 'suggestions' in e.context: - print("\nSuggestions:") - for i, suggestion in enumerate(e.context['suggestions'], 1): - print(f" {i}. {suggestion}") - else: - print("\nTry:") - print(" 1. Use --cookies-from-browser to pass authentication cookies") - print(" 2. Export cookies from a logged-in browser session") - print(" 3. Try a different client type (ios, android, mweb)") - print(" 4. Use a different proxy or IP address") - print(" 5. Try again later") - - sys.exit(1) - elif e.errorCode in ["SOCKS5_CONNECTION_FAILED", "SOCKS5_TIMEOUT", "SOCKS5_CONNECTION_REFUSED", - "SOCKS5_CONNECTION_TIMEOUT", "SOCKS5_HOST_NOT_FOUND", "SOCKS5_NETWORK_UNREACHABLE"]: - print(f"Error: {e.message}") - print("\nSOCKS5 proxy connection failed. Please check your proxy settings.") - - # Provide more specific guidance based on error code - if e.errorCode == "SOCKS5_TIMEOUT" or e.errorCode == "SOCKS5_CONNECTION_TIMEOUT": - print("The proxy server did not respond within the timeout period (9 seconds).") - print("This could indicate network congestion or a proxy server that's overloaded.") - elif e.errorCode == "SOCKS5_CONNECTION_REFUSED": - print("The proxy server actively refused the connection.") - print("This usually means the proxy server is not running or is not accepting connections on the specified port.") - elif e.errorCode == "SOCKS5_HOST_NOT_FOUND": - print("The proxy host could not be resolved.") - print("Please check that the hostname is correct and your DNS is working properly.") - elif e.errorCode == "SOCKS5_NETWORK_UNREACHABLE": - print("The network containing the proxy server is unreachable.") - print("This could indicate network routing issues or firewall restrictions.") - - print("\nPossible solutions:") - print("1. Try using a different proxy server") - print("2. Check if the proxy server is running and accessible") - print("3. Verify your network connection and firewall settings") - print("4. If using a remote proxy, check if it's accessible from your location") - - # Exit with a specific error code for proxy failures - sys.exit(2) - elif e.errorCode == "GLOBAL_TIMEOUT": - print(f"Error: {e.message}") - print("\nThe server timed out while processing your request.") - print("This could be due to:") - print("1. Slow network connection") - print("2. Server overload") - print("3. Complex video that takes too long to process") - print("\nTry again later or with a different video.") - sys.exit(3) - elif e.errorCode == "CLIENT_TIMEOUT": - print(f"Error: {e.message}") - print("\nA client-specific timeout occurred while processing your request.") - print("The server has stopped processing to avoid wasting resources.") - print("\nPossible solutions:") - print("1. Try again later when network conditions improve") - print("2. Try a different video") - print("3. Check your internet connection") - sys.exit(3) - else: - print(f"Error: {e.message}") - else: - print(f"Error: {e.message}") - return - except PBUserException as e: - logger.error(f"User exception: {e.message}") - print(f"Error: {e.message}") - return - except Exception as e: - logger.error(f"Unexpected error: {str(e)}") - logger.error(traceback.format_exc()) - print(f"Unexpected error: {str(e)}") - sys.exit(1) - - # Log the entire token_data object for debugging AFTER potential exceptions - logger.debug(f"Processing received token_data: {token_data}") - - # Check if valid infoJson was received from the server - info_json = None - if hasattr(token_data, 'infoJson') and token_data.infoJson and token_data.infoJson != "{}": - if is_valid_json(token_data.infoJson): - logger.debug("Valid info.json received from server.") # Changed to DEBUG - info_json = token_data.infoJson - else: - logger.warning("Received infoJson from server, but it is not valid JSON or is empty.") - else: - logger.warning("Valid info.json was NOT received from the server.") - - # Proceed only if we have valid info_json - if info_json: - # Save info.json if present in the server response - video_id = extract_video_id(url) - if not video_id: - logger.warning(f"Could not extract video ID from URL: {url}") # Keep as WARNING - video_id = f"unknown_{int(time.time())}" - - try: - info_data = json.loads(info_json) - # Check if it contains an error - if isinstance(info_data, dict) and ('error' in info_data or 'errorCode' in info_data): - error_msg = info_data.get('error', 'Unknown error') - error_code = info_data.get('errorCode', 'UNKNOWN_ERROR') - logger.warning(f"infoJson contains error: {error_msg} (code: {error_code})") - - # If it's a bot detection error, raise appropriate exception - if error_code == 'BOT_DETECTED' or 'bot' in error_msg.lower() or 'sign in' in error_msg.lower(): - raise PBUserException( - message=f"Bot detection triggered: {error_msg}", - errorCode="BOT_DETECTION", - context={ - "video_id": extract_video_id(url), - "url": url, - "suggestions": info_data.get('suggestions', ["Try different client", "Use proxy", "Wait and retry later"]) - } - ) - except json.JSONDecodeError as e: - # This case should ideally not happen due to is_valid_json check, but handle defensively - logger.error(f"Invalid JSON received despite initial check: {e}") - print(f"Error: Received invalid JSON data from server.") - info_json = None # Ensure we don't proceed - - # If info_json is still None after checks, handle the failure case - if not info_json: - logger.error("Failed to obtain valid info.json from the server.") - print("Error: No valid video information (info.json) was received from the server.") - # Optionally, print the raw ytdlp command if available - if hasattr(token_data, 'ytdlpCommand') and token_data.ytdlpCommand: - print("\nRaw command from server (may be incomplete or require info.json):") - print(token_data.ytdlpCommand) - sys.exit(1) # Exit with error - - # --- We have valid info_json, proceed with saving and command generation --- - try: - info_data = json.loads(info_json) # We know this is valid now - - # Check if it's an error response embedded in the JSON - if isinstance(info_data, dict) and "error" in info_data: - logger.error(f"Received error report from server: {info_json}") - - # Check if this is a bot detection error - if (info_data.get('errorCode') == "BOT_DETECTED" or - "bot" in info_data.get('message', '').lower() or - "sign in to confirm" in info_data.get('message', '').lower() or - "sign in to confirm" in info_data.get('error', '').lower() or - "unusual traffic" in info_data.get('message', '').lower() or - "captcha" in info_data.get('message', '').lower() or - info_data.get('requires_auth') == True): - - logger.error("Bot detection error detected in info.json") - # Raise PBServiceException for bot detection - raise PBServiceException( - message=f"Bot detection triggered: {info_data.get('message', 'Authentication required')}", - errorCode="BOT_DETECTED", - context={ - "video_id": video_id, - "url": url, - "requires_auth": True, - "info_data": info_data, - "suggestions": info_data.get('suggestions', [ - "Use --cookies-from-browser to pass authentication cookies", - "Export cookies from a logged-in browser session", - "Try a different client type (ios, android, mweb)", - "Use a different proxy or IP address" - ]) - } - ) - else: - # Raise PBServiceException for other errors - raise PBServiceException( - message=f"Error extracting video info: {info_data.get('error', 'Unknown error')}", - errorCode=info_data.get('errorCode', "EXTRACTION_FAILED"), - context={"video_id": video_id, "url": url, "info_data": info_data} - ) - - # If it's a valid response, process it - if 'title' in info_data or 'id' in info_data: - print(f"Video info retrieved: {info_data.get('title', 'Unknown title')}") - saved_path = save_info_json(info_json, video_id, args.context_dir) - if saved_path: - print(f"info.json saved to: {saved_path}") - - # Create simpler base command using only the saved info.json and proxy - base_cmd = f"yt-dlp --load-info-json \"{saved_path}\"" # Quote the path - if hasattr(token_data, 'socks') and token_data.socks: - if token_data.socks.startswith(('socks5://', 'ss://')): - # Quote the proxy URL as well - base_cmd += f" --proxy \"{token_data.socks}\"" - - # Show format listing command - print("\nTo list available formats:") - format_cmd = f"{base_cmd} -F" - print(format_cmd) - - # Show download command (format is usually embedded in info.json or determined by yt-dlp) - simplified_cmd = f"{base_cmd} --simulate" # Removed format codes - - print("\nTo download (with --simulate to preview):") - print(simplified_cmd) - print("\nRemove --simulate to actually download") - else: - logger.error("Failed to save info.json file") - print("Failed to save info.json file") - else: - logger.warning("info.json appears to be valid JSON but missing expected video fields") - print("Error: Received incomplete or invalid video data") - print("This usually indicates an authentication or access issue") - sys.exit(1) - except Exception as e: # Catch errors during saving or command generation - logger.error(f"Error processing valid info.json: {str(e)}") - # Re-raise the exception to be handled by the main error handler - raise - finally: - if transport: - transport.close() - -if __name__ == "__main__": - main() diff --git a/airflow/ytdlp-ops-auth/ytdlp_utils.py b/airflow/ytdlp-ops-auth/ytdlp_utils.py deleted file mode 100644 index 292c6b7..0000000 --- a/airflow/ytdlp-ops-auth/ytdlp_utils.py +++ /dev/null @@ -1,60 +0,0 @@ -import json -import logging -import re - -logger = logging.getLogger(__name__) - -def get_info_json(token_data): - """Get infoJson from token_data""" - if hasattr(token_data, 'infoJson') and token_data.infoJson: - return token_data.infoJson - - # Log the issue for debugging - logger.warning("infoJson attribute missing or empty in token_data") - logger.info(f"Available attributes: {[attr for attr in dir(token_data) if not attr.startswith('__') and not callable(getattr(token_data, attr))]}") - - return "{}" - -def is_valid_json(json_str): - """Check if a string is valid JSON and not empty""" - if not json_str or json_str == "{}" or json_str == "": - return False - - try: - data = json.loads(json_str) - # Check if it's an empty object - if isinstance(data, dict) and not data: - return False - # Check if it has at least some basic fields - if isinstance(data, dict) and ('id' in data or 'title' in data): - return True - # Check if it has token_data which is important - if isinstance(data, dict) and 'token_data' in data and data['token_data']: - return True - return True - except Exception as e: - logger.warning(f"Invalid JSON: {e}") - return False - -def extract_video_id(url): - """Extract video ID from a YouTube URL""" - # If it's already a video ID - if re.match(r'^[a-zA-Z0-9_-]{11}$', url): - return url - - # Handle youtu.be URLs - youtu_be_match = re.search(r'youtu\.be/([a-zA-Z0-9_-]{11})', url) - if youtu_be_match: - return youtu_be_match.group(1) - - # Handle youtube.com URLs - youtube_match = re.search(r'(?:youtube\.com/(?:watch\?v=|embed/|v/)|youtube\.com/.*[?&]v=)([a-zA-Z0-9_-]{11})', url) - if youtube_match: - return youtube_match.group(1) - - # Handle shorts URLs - shorts_match = re.search(r'youtube\.com/shorts/([a-zA-Z0-9_-]{11})', url) - if shorts_match: - return shorts_match.group(1) - - return None diff --git a/ansible/.gitignore b/ansible/.gitignore new file mode 100644 index 0000000..b0ac3ed --- /dev/null +++ b/ansible/.gitignore @@ -0,0 +1 @@ +.aider* diff --git a/ansible/.vault_pass b/ansible/.vault_pass new file mode 100644 index 0000000..89a0cd2 --- /dev/null +++ b/ansible/.vault_pass @@ -0,0 +1 @@ +ytdlp-ops diff --git a/ansible/MIGRATION.md b/ansible/MIGRATION.md new file mode 100644 index 0000000..78f995b --- /dev/null +++ b/ansible/MIGRATION.md @@ -0,0 +1,9 @@ +# Migration Notes + +This document tracks the process of migrating the Ansible deployment. + +## Guiding Principles + +- No changes to business logic or core functionality are permitted during this phase. +- The focus is solely on resolving file path issues, dependency errors, and structural inconsistencies resulting from the migration of a subset of files. +- All changes should be aimed at making the existing playbooks runnable in the new environment. diff --git a/ansible/README-yt.md b/ansible/README-yt.md index 1ed4ea0..0695db7 100644 --- a/ansible/README-yt.md +++ b/ansible/README-yt.md @@ -94,7 +94,7 @@ For faster development cycles, you can deploy changes to specific parts of the c #### Updating Only the Master Node (Fast Deploy) -To sync configuration, code, and restart services on the master node *without* rebuilding the Airflow image or pulling the `ytdlp-ops-service` image, use the `fast_deploy` flag with the master playbook. This is ideal for pushing changes to DAGs, Python code, or config files. +To sync configuration, code, and restart services on the master node *without* rebuilding the Airflow image or pulling the `ytdlp-ops-server` image, use the `fast_deploy` flag with the master playbook. This is ideal for pushing changes to DAGs, Python code, or config files. ```bash # Run from inside the ansible/ directory diff --git a/ansible/ansible.cfg b/ansible/ansible.cfg index 120a5da..4521bb2 100644 --- a/ansible/ansible.cfg +++ b/ansible/ansible.cfg @@ -1,5 +1,6 @@ [defaults] inventory = inventory.ini +remote_user = alex_p roles_path = ./roles retry_files_enabled = False host_key_checking = False diff --git a/ansible/configs/etc/ssh/sshd_config b/ansible/configs/etc/ssh/sshd_config new file mode 100644 index 0000000..8fafa28 --- /dev/null +++ b/ansible/configs/etc/ssh/sshd_config @@ -0,0 +1,42 @@ +# This file is managed by Ansible. Do not edit manually. + +# SSH port configuration - listen on both standard and custom ports +Port 22 +Port 22822 + +# Protocol settings +Protocol 2 + +# Host keys +HostKey /etc/ssh/ssh_host_rsa_key +HostKey /etc/ssh/ssh_host_ecdsa_key +HostKey /etc/ssh/ssh_host_ed25519_key + +# Authentication settings +PermitRootLogin yes +PasswordAuthentication no +PubkeyAuthentication yes +AuthorizedKeysFile .ssh/authorized_keys + +# Security settings +PermitEmptyPasswords no +ChallengeResponseAuthentication no +UsePAM yes + +# Connection settings +X11Forwarding yes +PrintMotd no +AcceptEnv LANG LC_* + +# Performance settings +TCPKeepAlive yes +ClientAliveInterval 60 +ClientAliveCountMax 3 + +# Login settings +LoginGraceTime 1m +MaxStartups 10:30:60 + +# Logging +SyslogFacility AUTH +LogLevel INFO diff --git a/ansible/configs/etc/sysctl.d/99-system-limits.conf b/ansible/configs/etc/sysctl.d/99-system-limits.conf new file mode 100644 index 0000000..759dfe5 --- /dev/null +++ b/ansible/configs/etc/sysctl.d/99-system-limits.conf @@ -0,0 +1,18 @@ +# System limits configuration for better performance + +# Enable memory overcommit for Redis to prevent background save failures +vm.overcommit_memory = 1 + +# Increase file handle limits +fs.file-max = 1000000 + +# Network tuning +net.core.somaxconn = 65535 +net.core.netdev_max_backlog = 5000 +net.core.rmem_max = 16777216 +net.core.wmem_max = 16777216 +net.ipv4.tcp_wmem = 4096 65536 16777216 +net.ipv4.tcp_rmem = 4096 65536 16777216 +net.ipv4.tcp_max_syn_backlog = 8192 +net.ipv4.tcp_slow_start_after_idle = 0 +net.ipv4.tcp_tw_reuse = 1 diff --git a/ansible/group_vars/all.yml b/ansible/group_vars/all.yml deleted file mode 100644 index e6bcca2..0000000 --- a/ansible/group_vars/all.yml +++ /dev/null @@ -1,52 +0,0 @@ ---- -# Global variables shared across all hosts - -# Docker image versions -ytdlp_ops_image: "pangramia/ytdlp-ops-airflow:latest" -airflow_image_name: "pangramia/ytdlp-ops-airflow:latest" - -# Default ports -redis_port: 52909 -postgres_port: 5432 -ytdlp_base_port: 9090 -envoy_port: 9080 -envoy_admin_port: 9901 -management_service_port: 9091 -camoufox_base_vnc_port: 5901 - -# Default UID -airflow_uid: 1003 - -# Default directories -airflow_master_dir: "/srv/airflow_master" -airflow_worker_dir: "/srv/airflow_dl_worker" - -# Docker network name -docker_network_name: "airflow_proxynet" - -# Default usernames -ssh_user: "alex_p" -ansible_user: "alex_p" - -# Default group -deploy_group: "ytdl" - -# Default file permissions -dir_permissions: "0755" -file_permissions: "0644" - -# Default rsync options -rsync_default_opts: - - "--no-owner" - - "--no-group" - - "--no-times" - - "--copy-links" - - "--copy-unsafe-links" - - "--exclude=.git*" - - "--exclude=__pycache__" - - "--exclude=*.pyc" - - "--exclude=*.log" - - "--exclude=.DS_Store" - -# Docker-Hub credentials -dockerhub_user: "pangramia" diff --git a/ansible/group_vars/all/generated_vars.yml b/ansible/group_vars/all/generated_vars.yml index 995825c..79a77f2 100644 --- a/ansible/group_vars/all/generated_vars.yml +++ b/ansible/group_vars/all/generated_vars.yml @@ -1,7 +1,42 @@ --- # This file is auto-generated by tools/generate-inventory.py # Do not edit – your changes will be overwritten. -master_host_ip: 89.253.221.173 +airflow_image_name: pangramia/ytdlp-ops-airflow:latest +airflow_master_dir: /srv/airflow_master +airflow_uid: 1003 +airflow_worker_dir: /srv/airflow_dl_worker +ansible_user: alex_p +camoufox_base_vnc_port: 5901 +deploy_group: ytdl +dir_permissions: '0755' +docker_network_name: airflow_proxynet +dockerhub_user: pangramia +envoy_admin_port: 9901 +envoy_port: 9080 +external_access_ips: [] +file_permissions: '0644' +host_timezone: Europe/Moscow +management_service_port: 9091 +master_host_ip: 89.253.223.97 +postgres_port: 5432 redis_port: 52909 -external_access_ips: - [] +rsync_default_opts: +- --no-owner +- --no-group +- --no-times +- --copy-links +- --copy-unsafe-links +- --exclude=.git* +- --exclude=__pycache__ +- --exclude=*.pyc +- --exclude=*.log +- --exclude=.DS_Store +shadowsocks_cipher_method: aes-256-gcm +shadowsocks_fast_open: true +shadowsocks_image: ghcr.io/shadowsocks/sslocal-rust:v1.22.0 +shadowsocks_local_address: 0.0.0.0 +shadowsocks_mode: tcp_and_udp +shadowsocks_timeout: 20 +ssh_user: alex_p +ytdlp_base_port: 9090 +ytdlp_ops_image: pangramia/ytdlp-ops-server:latest diff --git a/ansible/group_vars/all/vault.yml b/ansible/group_vars/all/vault.yml index f0343a9..3893d1b 100644 --- a/ansible/group_vars/all/vault.yml +++ b/ansible/group_vars/all/vault.yml @@ -1,4 +1,8 @@ vault_redis_password: "rOhTAIlTFFylXsjhqwxnYxDChFc" vault_postgres_password: "pgdb_pwd_A7bC2xY9zE1wV5uP" -vault_airflow_admin_password: "admin_pwd_X9yZ3aB1cE5dF7gH" +vault_airflow_admin_password: "2r234sdfrt3q454arq45q355" +vault_flower_password: "dO4eXm7UkF81OdMvT8E2tIKFtPYPCzyzwlcZ4RyOmCsmG4qzrNFqM5sNTOT9" vault_vnc_password: "vnc_pwd_Z5xW8cV2bN4mP7lK" +vault_ss_password_1: "UCUAR7vRO/u9Zo71nfA13c+/b1MCiJpfZJo+EmEBCfA=" +vault_ss_password_2: "tgtQcfjJp/A3F01g4woO0bEQoxij3CAOK/iR1OTPuF4=" +vault_dockerhub_password: "dckr_pat_DmFFqwFEdXFvZlgngGY9ooBaq6o" diff --git a/ansible/host_vars/af-green.yml b/ansible/host_vars/af-green.yml deleted file mode 100644 index 35be338..0000000 --- a/ansible/host_vars/af-green.yml +++ /dev/null @@ -1,4 +0,0 @@ ---- -# Variables for af-green -master_host_ip: 89.253.221.173 -redis_port: 52909 diff --git a/ansible/host_vars/af-test.yml b/ansible/host_vars/af-test.yml new file mode 100644 index 0000000..7f00d45 --- /dev/null +++ b/ansible/host_vars/af-test.yml @@ -0,0 +1,23 @@ +--- +# Variables for af-test +master_host_ip: 89.253.223.97 +redis_port: 52909 +shadowsocks_proxies: + sslocal-rust-1087: + server: "91.103.252.51" + server_port: 8388 + local_port: 1087 + vault_password_key: "vault_ss_password_1" + sslocal-rust-1086: + server: "62.60.178.45" + server_port: 8388 + local_port: 1086 + vault_password_key: "vault_ss_password_2" + sslocal-rust-1081: + server: "79.137.207.43" + server_port: 8388 + local_port: 1081 + vault_password_key: "vault_ss_password_2" +worker_proxies: + - "socks5://sslocal-rust-1086:1086" + - "socks5://sslocal-rust-1081:1081" diff --git a/ansible/host_vars/dl002.yml b/ansible/host_vars/dl002.yml new file mode 100644 index 0000000..9ee82b3 --- /dev/null +++ b/ansible/host_vars/dl002.yml @@ -0,0 +1,23 @@ +--- +# Variables for dl002 +master_host_ip: 89.253.223.97 +redis_port: 52909 +shadowsocks_proxies: + sslocal-rust-1087: + server: "91.103.252.51" + server_port: 8388 + local_port: 1087 + vault_password_key: "vault_ss_password_1" + sslocal-rust-1086: + server: "62.60.178.45" + server_port: 8388 + local_port: 1086 + vault_password_key: "vault_ss_password_2" + sslocal-rust-1081: + server: "79.137.207.43" + server_port: 8388 + local_port: 1081 + vault_password_key: "vault_ss_password_2" +worker_proxies: + - "socks5://sslocal-rust-1081:1081" + - "socks5://sslocal-rust-1086:1086" diff --git a/ansible/host_vars/dl003.yml b/ansible/host_vars/dl003.yml deleted file mode 100644 index e8ab03f..0000000 --- a/ansible/host_vars/dl003.yml +++ /dev/null @@ -1,6 +0,0 @@ ---- -# Variables for dl003 -master_host_ip: 89.253.221.173 -redis_port: 52909 -worker_proxies: - - "socks5://sslocal-rust-1087:1087" diff --git a/ansible/inventory.ini b/ansible/inventory.ini index bbe9ca8..1be33c6 100644 --- a/ansible/inventory.ini +++ b/ansible/inventory.ini @@ -3,7 +3,7 @@ # Edit cluster.yml and re-run the generator instead. [airflow_master] -af-green ansible_host=89.253.221.173 +af-test ansible_host=89.253.223.97 ansible_port=22 [airflow_workers] -dl003 ansible_host=62.60.245.103 +dl002 ansible_host=62.60.178.54 diff --git a/ansible/playbook-dags.yml b/ansible/playbook-dags.yml index a703cbe..c9beb54 100644 --- a/ansible/playbook-dags.yml +++ b/ansible/playbook-dags.yml @@ -4,7 +4,6 @@ gather_facts: no vars_files: - group_vars/all.yml - remote_user: "{{ ansible_user }}" tasks: - name: Sync DAGs to MASTER server ansible.posix.synchronize: @@ -35,7 +34,6 @@ gather_facts: no vars_files: - group_vars/all.yml - remote_user: "{{ ansible_user }}" tasks: - name: Sync DAGs to WORKER server ansible.posix.synchronize: diff --git a/ansible/playbook-depricated.dl.yml b/ansible/playbook-depricated.dl.yml new file mode 100644 index 0000000..c2035a2 --- /dev/null +++ b/ansible/playbook-depricated.dl.yml @@ -0,0 +1,73 @@ +--- +- name: Deploy Airflow DL Worker Stack + hosts: airflow_workers + vars_files: + - group_vars/all.yml + - group_vars/all/vault.yml + pre_tasks: + - name: Announce fast deploy mode if enabled + debug: + msg: "🚀 FAST DEPLOY MODE ENABLED: Skipping Docker image builds and pulls. 🚀" + when: fast_deploy | default(false) + run_once: true + tasks: + + - name: Ensure worker directory exists + file: + path: "{{ airflow_worker_dir }}" + state: directory + owner: "{{ ansible_user }}" + group: "{{ ansible_user }}" + + - name: Template .env.worker + template: + src: templates/.env.worker.j2 + dest: "{{ airflow_worker_dir }}/.env" + mode: '0600' + + - name: Template docker-compose file for Airflow worker + template: + src: ../airflow/configs/docker-compose-dl.yaml.j2 + dest: "{{ airflow_worker_dir }}/configs/docker-compose-dl.yaml" + mode: '0644' + + - name: Build Airflow worker image from local Dockerfile + community.docker.docker_image: + name: "{{ airflow_image_name }}" + build: + path: "{{ airflow_worker_dir }}" + dockerfile: "Dockerfile" + source: build + force_source: true + when: not fast_deploy | default(false) + + - name: Build Camoufox image from local Dockerfile + community.docker.docker_image: + name: "camoufox:latest" + build: + path: "{{ airflow_worker_dir }}/camoufox" + source: build + force_source: true + when: not fast_deploy | default(false) + + - name: Pull ytdlp-ops-server image only + community.docker.docker_image: + name: "{{ ytdlp_ops_image }}" + source: pull + when: not fast_deploy | default(false) + + - name: Generate dynamic configs (camoufox + envoy) + shell: + cmd: "docker compose -f configs/docker-compose.config-generate.yaml run --rm config-generator" + chdir: "{{ airflow_worker_dir }}" + + - name: Start worker services + community.docker.docker_compose_v2: + project_src: "{{ airflow_worker_dir }}" + files: + - configs/docker-compose-dl.yaml + - configs/docker-compose-ytdlp-ops.yaml + - configs/docker-compose.camoufox.yaml + state: present + remove_orphans: true + pull: "{{ 'never' if fast_deploy | default(false) else 'missing' }}" diff --git a/ansible/playbook-dl.yml b/ansible/playbook-dl.yml index 2822bab..e69de29 100644 --- a/ansible/playbook-dl.yml +++ b/ansible/playbook-dl.yml @@ -1,73 +0,0 @@ ---- -- name: Deploy Airflow DL Worker Stack - hosts: airflow_workers - vars_files: - - group_vars/all.yml - - group_vars/all/vault.yml - pre_tasks: - - name: Announce fast deploy mode if enabled - debug: - msg: "🚀 FAST DEPLOY MODE ENABLED: Skipping Docker image builds and pulls. 🚀" - when: fast_deploy | default(false) - run_once: true - tasks: - - - name: Ensure worker directory exists - file: - path: "{{ airflow_worker_dir }}" - state: directory - owner: "{{ ansible_user }}" - group: "{{ ansible_user }}" - - - name: Template .env.worker - template: - src: templates/.env.worker.j2 - dest: "{{ airflow_worker_dir }}/.env" - mode: '0600' - - - name: Template docker-compose file for Airflow worker - template: - src: ../airflow/configs/docker-compose-dl.yaml.j2 - dest: "{{ airflow_worker_dir }}/configs/docker-compose-dl.yaml" - mode: '0644' - - - name: Build Airflow worker image from local Dockerfile - community.docker.docker_image: - name: "{{ airflow_image_name }}" - build: - path: "{{ airflow_worker_dir }}" - dockerfile: "Dockerfile" - source: build - force_source: true - when: not fast_deploy | default(false) - - - name: Build Camoufox image from local Dockerfile - community.docker.docker_image: - name: "camoufox:latest" - build: - path: "{{ airflow_worker_dir }}/camoufox" - source: build - force_source: true - when: not fast_deploy | default(false) - - - name: Pull ytdlp-ops-service image only - community.docker.docker_image: - name: "{{ ytdlp_ops_image }}" - source: pull - when: not fast_deploy | default(false) - - - name: Generate dynamic configs (camoufox + envoy) - shell: - cmd: "docker compose -f configs/docker-compose.config-generate.yaml run --rm config-generator" - chdir: "{{ airflow_worker_dir }}" - - - name: Start worker services - community.docker.docker_compose_v2: - project_src: "{{ airflow_worker_dir }}" - files: - - configs/docker-compose-dl.yaml - - configs/docker-compose-ytdlp-ops.yaml - - configs/docker-compose.camoufox.yaml - state: present - remove_orphans: true - pull: "{{ 'never' if fast_deploy | default(false) else 'missing' }}" diff --git a/ansible/playbook-full-with-proxies.yml b/ansible/playbook-full-with-proxies.yml new file mode 100644 index 0000000..de6555a --- /dev/null +++ b/ansible/playbook-full-with-proxies.yml @@ -0,0 +1,6 @@ +--- +- name: Deploy entire cluster with proxies + import_playbook: playbook-proxies.yml + +- name: Deploy application stack + import_playbook: playbook-full.yml diff --git a/ansible/playbook-full.yml b/ansible/playbook-full.yml index 639d46c..60b119c 100644 --- a/ansible/playbook-full.yml +++ b/ansible/playbook-full.yml @@ -11,13 +11,96 @@ msg: "🚀 FAST DEPLOY MODE ENABLED: Skipping Docker image builds and pulls. 🚀" when: fast_deploy | default(false) run_once: true - tasks: - - name: Ensure python3-docker is installed - ansible.builtin.apt: - name: python3-docker - state: present - update_cache: yes + + - name: Check if Docker is already installed + ansible.builtin.stat: + path: /usr/bin/docker + register: docker_binary + + - name: Install Docker if not present + block: + - name: Add Docker's official GPG key + ansible.builtin.apt_key: + url: https://download.docker.com/linux/ubuntu/gpg + state: present + + - name: Find and remove any existing Docker repository files to avoid conflicts + block: + - name: Find legacy docker repository files + ansible.builtin.find: + paths: /etc/apt/sources.list.d/ + patterns: '*.list' + contains: 'deb .*download.docker.com' + register: legacy_docker_repo_files + + - name: Remove legacy docker repository files + ansible.builtin.file: + path: "{{ item.path }}" + state: absent + loop: "{{ legacy_docker_repo_files.files }}" + + - name: Set up the Docker repository + ansible.builtin.apt_repository: + repo: "deb [arch=amd64] https://download.docker.com/linux/ubuntu {{ ansible_lsb.codename }} stable" + state: present + + - name: Install prerequisites for Docker + ansible.builtin.apt: + name: + - apt-transport-https + - ca-certificates + - curl + - software-properties-common + - vim + - python3-pip + - iputils-ping + state: present + update_cache: yes + + - name: Install Docker Engine and Docker Compose + ansible.builtin.apt: + name: + - docker-ce + - docker-ce-cli + - containerd.io + - docker-compose-plugin + - python3-docker + state: present + update_cache: yes + when: not docker_binary.stat.exists become: yes + tasks: + + - name: Install pipx + ansible.builtin.apt: + name: pipx + state: present + become: yes + + - name: Install Glances for system monitoring + ansible.builtin.command: pipx install glances[all] + args: + creates: "{{ ansible_env.HOME }}/.local/bin/glances" + become: yes + become_user: "{{ ansible_user }}" + + - name: Ensure Docker service is started and enabled + ansible.builtin.service: + name: docker + state: started + enabled: yes + become: yes + + - name: Add deploy user to the docker group + ansible.builtin.user: + name: "{{ ansible_user }}" + groups: docker + append: yes + become: yes + + - name: Reset SSH connection to apply group changes + ansible.builtin.meta: reset_connection + - name: Ensure shared Docker network exists community.docker.docker_network: diff --git a/ansible/playbook-master.yml b/ansible/playbook-master.yml index 5409633..cae26f9 100644 --- a/ansible/playbook-master.yml +++ b/ansible/playbook-master.yml @@ -10,6 +10,55 @@ debug: msg: "Starting deployment for Airflow Master: {{ inventory_hostname }} ({{ ansible_host }})" + - name: Configure Redis memory overcommit setting + copy: + src: "configs/etc/sysctl.d/99-redis-overcommit.conf" + dest: "/etc/sysctl.d/99-redis-overcommit.conf" + owner: root + group: root + mode: '0644' + become: yes + register: redis_sysctl_config_copy + + - name: Configure system limits + copy: + src: "configs/etc/sysctl.d/99-system-limits.conf" + dest: "/etc/sysctl.d/99-system-limits.conf" + owner: root + group: root + mode: '0644' + become: yes + register: limits_sysctl_config_copy + + - name: Apply sysctl settings for Redis + command: sysctl --system + become: yes + when: redis_sysctl_config_copy.changed + + - name: Apply sysctl settings for system limits + command: sysctl --system + become: yes + when: limits_sysctl_config_copy.changed + + - name: Configure system timezone + # Ensures all services and logs on this node use a consistent timezone. + community.general.timezone: + name: "{{ host_timezone }}" + become: yes + + - name: Install NTP for time synchronization + ansible.builtin.apt: + name: ntp + state: present + become: yes + + - name: Ensure NTP service is started and enabled + ansible.builtin.service: + name: ntp + state: started + enabled: yes + become: yes + - name: Set deploy_group to a valid single group name set_fact: deploy_group: "ytdl" @@ -22,7 +71,7 @@ - name: Ensure deploy user exists user: - name: "{{ ssh_user }}" + name: "{{ ansible_user }}" group: "{{ deploy_group }}" state: present become: yes @@ -95,6 +144,29 @@ deploy_group_gid: "0" when: deploy_group_gid is not defined or deploy_group_gid == "" + tasks: + - name: Install pipx + ansible.builtin.apt: + name: pipx + state: present + become: yes + + - name: Install Glances for system monitoring + ansible.builtin.command: pipx install glances[all] + args: + creates: "{{ ansible_env.HOME }}/.local/bin/glances" + become: yes + become_user: "{{ ansible_user }}" + + # Include Docker health check + - name: Include Docker health check tasks + include_tasks: tasks/docker_health_check.yml + roles: - ytdlp-master - airflow-master + + post_tasks: + - name: Include camoufox verification tasks + include_tasks: tasks/verify_camoufox.yml + when: not fast_deploy | default(false) diff --git a/ansible/playbook-proxies.yml b/ansible/playbook-proxies.yml new file mode 100644 index 0000000..794fcf0 --- /dev/null +++ b/ansible/playbook-proxies.yml @@ -0,0 +1,151 @@ +--- +- name: Deploy Shadowsocks-Rust Proxy Configurations + hosts: all + gather_facts: yes + pre_tasks: + - name: Check if Docker is already installed + ansible.builtin.stat: + path: /usr/bin/docker + register: docker_binary + + - name: Install Docker if not present + block: + - name: Add Docker's official GPG key + ansible.builtin.apt_key: + url: https://download.docker.com/linux/ubuntu/gpg + state: present + + - name: Find and remove any existing Docker repository files to avoid conflicts + block: + - name: Find legacy docker repository files + ansible.builtin.find: + paths: /etc/apt/sources.list.d/ + patterns: '*.list' + contains: 'deb .*download.docker.com' + register: legacy_docker_repo_files + + - name: Remove legacy docker repository files + ansible.builtin.file: + path: "{{ item.path }}" + state: absent + loop: "{{ legacy_docker_repo_files.files }}" + + - name: Set up the Docker repository + ansible.builtin.apt_repository: + repo: "deb [arch=amd64] https://download.docker.com/linux/ubuntu {{ ansible_lsb.codename }} stable" + state: present + + - name: Install prerequisites for Docker + ansible.builtin.apt: + name: + - apt-transport-https + - ca-certificates + - curl + - software-properties-common + - vim + - python3-pip + state: present + update_cache: yes + + - name: Install Docker Engine and Docker Compose + ansible.builtin.apt: + name: + - docker-ce + - docker-ce-cli + - containerd.io + - docker-compose-plugin + - python3-docker + state: present + update_cache: yes + when: not docker_binary.stat.exists + become: yes + + - name: Ensure Docker service is started and enabled + ansible.builtin.service: + name: docker + state: started + enabled: yes + become: yes + + - name: Add deploy user to the docker group + ansible.builtin.user: + name: "{{ ansible_user }}" + groups: docker + append: yes + become: yes + + - name: Reset SSH connection to apply group changes + ansible.builtin.meta: reset_connection + + - name: Ensure shared Docker network exists + community.docker.docker_network: + name: "{{ docker_network_name }}" + driver: bridge + become: yes + tasks: + - name: Deploy Shadowsocks-Rust proxy services + block: + - name: Ensure base directory for shadowsocks exists + ansible.builtin.file: + path: "/srv/shadowsocks-rust" + state: directory + owner: "{{ ansible_user }}" + group: "{{ deploy_group }}" + mode: '0755' + + - name: Create individual proxy config directories + ansible.builtin.file: + path: "/srv/shadowsocks-rust/config_ssp_{{ item.value.local_port }}" + state: directory + owner: "{{ ansible_user }}" + group: "{{ deploy_group }}" + mode: '0755' + loop: "{{ shadowsocks_proxies | dict2items }}" + + - name: Create Shadowsocks-Rust proxy configuration files + ansible.builtin.copy: + content: | + { + "server": "{{ item.value.server }}", + "server_port": {{ item.value.server_port }}, + "password": "{{ lookup('vars', item.value.vault_password_key) }}", + "local_address": "{{ shadowsocks_local_address }}", + "local_port": {{ item.value.local_port }}, + "timeout": {{ shadowsocks_timeout }}, + "method": "{{ shadowsocks_cipher_method }}", + "fast_open": {{ shadowsocks_fast_open | to_json }}, + "mode": "{{ shadowsocks_mode }}" + } + dest: "/srv/shadowsocks-rust/config_ssp_{{ item.value.local_port }}/config.json" + owner: "{{ ansible_user }}" + group: "{{ deploy_group }}" + mode: '0644' + loop: "{{ shadowsocks_proxies | dict2items }}" + + - name: Create docker-compose.yml for Shadowsocks-Rust proxies + ansible.builtin.template: + src: templates/shadowsocks-compose.yml.j2 + dest: /srv/shadowsocks-rust/docker-compose.yml + owner: "{{ ansible_user }}" + group: "{{ deploy_group }}" + mode: '0644' + + - name: Ensure old docker-compose.yaml file is removed to avoid conflicts + ansible.builtin.file: + path: /srv/shadowsocks-rust/docker-compose.yaml + state: absent + + - name: Stop and remove any existing Shadowsocks-Rust proxy services + community.docker.docker_compose_v2: + project_src: "/srv/shadowsocks-rust" + state: absent + + - name: Start Shadowsocks-Rust proxy services + community.docker.docker_compose_v2: + project_src: "/srv/shadowsocks-rust" + state: present + remove_orphans: true + recreate: always + pull: "{{ 'never' if fast_deploy | default(false) else 'missing' }}" + when: shadowsocks_proxies is defined and shadowsocks_proxies | length > 0 + become: yes diff --git a/ansible/playbook-worker.yml b/ansible/playbook-worker.yml index f940a16..0dc3092 100644 --- a/ansible/playbook-worker.yml +++ b/ansible/playbook-worker.yml @@ -10,6 +10,25 @@ debug: msg: "Starting deployment for Airflow Worker: {{ inventory_hostname }} ({{ ansible_host }})" + - name: Configure system timezone + # Ensures all services and logs on this node use a consistent timezone. + community.general.timezone: + name: "{{ host_timezone }}" + become: yes + + - name: Install NTP for time synchronization + ansible.builtin.apt: + name: ntp + state: present + become: yes + + - name: Ensure NTP service is started and enabled + ansible.builtin.service: + name: ntp + state: started + enabled: yes + become: yes + - name: Set deploy_group to a valid single group name set_fact: deploy_group: "ytdl" @@ -22,7 +41,7 @@ - name: Ensure deploy user exists user: - name: "{{ ssh_user }}" + name: "{{ ansible_user }}" group: "{{ deploy_group }}" state: present become: yes @@ -95,6 +114,44 @@ deploy_group_gid: "0" when: deploy_group_gid is not defined or deploy_group_gid == "" + - name: Configure system limits + copy: + src: "configs/etc/sysctl.d/99-system-limits.conf" + dest: "/etc/sysctl.d/99-system-limits.conf" + owner: root + group: root + mode: '0644' + become: yes + register: limits_sysctl_config_copy + + - name: Apply sysctl settings for system limits + command: sysctl --system + become: yes + when: limits_sysctl_config_copy.changed + + tasks: + - name: Install pipx + ansible.builtin.apt: + name: pipx + state: present + become: yes + + - name: Install Glances for system monitoring + ansible.builtin.command: pipx install glances[all] + args: + creates: "{{ ansible_env.HOME }}/.local/bin/glances" + become: yes + become_user: "{{ ansible_user }}" + + # Include Docker health check + - name: Include Docker health check tasks + include_tasks: tasks/docker_health_check.yml + roles: - - airflow-worker - ytdlp-worker + - airflow-worker + + post_tasks: + - name: Include camoufox verification tasks + include_tasks: tasks/verify_camoufox.yml + when: not fast_deploy | default(false) diff --git a/ansible/roles/airflow-master/tasks/main.yml b/ansible/roles/airflow-master/tasks/main.yml index 71193b7..324456c 100644 --- a/ansible/roles/airflow-master/tasks/main.yml +++ b/ansible/roles/airflow-master/tasks/main.yml @@ -32,6 +32,22 @@ mode: '0755' become: yes +- name: Ensure Airflow operational directories exist with correct permissions + file: + path: "{{ airflow_master_dir }}/{{ item }}" + state: directory + owner: "{{ airflow_uid }}" + group: "{{ deploy_group }}" + mode: '0775' + become: yes + loop: + - "dags" + - "logs" + - "plugins" + - "downloadfiles" + - "addfiles" + - "inputfiles" + - name: Check if source directories exist stat: path: "../{{ item }}" @@ -51,6 +67,7 @@ dest: "{{ airflow_master_dir }}/" archive: yes recursive: yes + delete: yes rsync_path: "sudo rsync" rsync_opts: "{{ rsync_default_opts }}" loop: @@ -66,7 +83,6 @@ - "airflow/update-yt-dlp.sh" - "get_info_json_client.py" - "proxy_manager_client.py" - - "token_generator" - "utils" - name: Copy custom Python config files to master @@ -81,6 +97,13 @@ - "custom_task_hooks.py" - "airflow_local_settings.py" +- name: Ensure any existing airflow.cfg directory is removed + file: + path: "{{ airflow_master_dir }}/config/airflow.cfg" + state: absent + become: yes + ignore_errors: yes + - name: Copy airflow.cfg to master copy: src: "../airflow/airflow.cfg" @@ -188,26 +211,18 @@ recurse: yes become: yes -- name: Ensure logs directory exists on master - file: - path: "{{ airflow_master_dir }}/logs" - state: directory - owner: "{{ airflow_uid }}" - group: "{{ deploy_group }}" - mode: '0775' - become: yes - - name: Ensure postgres-data directory exists on master and has correct permissions file: path: "{{ airflow_master_dir }}/postgres-data" state: directory - owner: "{{ airflow_uid }}" - group: "{{ deploy_group }}" - mode: '0775' + owner: "999" # UID for the 'postgres' user in the official postgres image + group: "999" # GID for the 'postgres' group in the official postgres image + mode: '0700' become: yes -- name: Set group-writable and setgid permissions on master logs directory contents +- name: Set proper ownership and permissions on master logs directory contents shell: | + chown -R {{ airflow_uid }}:{{ deploy_group }} {{ airflow_master_dir }}/logs find {{ airflow_master_dir }}/logs -type d -exec chmod g+rws {} + find {{ airflow_master_dir }}/logs -type f -exec chmod g+rw {} + become: yes @@ -236,6 +251,59 @@ force_source: true when: not fast_deploy | default(false) +- name: "Log: Preparing assets for Caddy image" + debug: + msg: "Extracting static assets from the Airflow image to build the Caddy reverse proxy." + when: not fast_deploy | default(false) + +- name: Prepare Caddy asset extraction directory + file: + path: "{{ airflow_master_dir }}/caddy_build_assets" + state: "{{ item }}" + owner: "{{ ssh_user }}" + group: "{{ deploy_group }}" + mode: '0755' + loop: + - absent + - directory + become: yes + when: not fast_deploy | default(false) + +- name: Ensure subdirectories exist with correct permissions + file: + path: "{{ airflow_master_dir }}/caddy_build_assets/{{ item }}" + state: directory + owner: "{{ ssh_user }}" + group: "{{ deploy_group }}" + mode: '0755' + loop: + - "appbuilder" + - "dist" + become: yes + when: not fast_deploy | default(false) + +- name: Extract static assets from Airflow image for Caddy build + shell: | + set -e + CONTAINER_ID=$(docker create {{ airflow_image_name }}) + # Dynamically find paths inside the container + APPBUILDER_PATH=$(docker run --rm --entrypoint "" {{ airflow_image_name }} python -c 'import os, flask_appbuilder; print(os.path.join(os.path.dirname(flask_appbuilder.__file__), "static", "appbuilder"))') + AIRFLOW_DIST_PATH=$(docker run --rm --entrypoint "" {{ airflow_image_name }} python -c 'import os, airflow; print(os.path.join(os.path.dirname(airflow.__file__), "www/static/dist"))') + # Copy assets from container to host + docker cp "${CONTAINER_ID}:${APPBUILDER_PATH}/." "./caddy_build_assets/appbuilder" + docker cp "${CONTAINER_ID}:${AIRFLOW_DIST_PATH}/." "./caddy_build_assets/dist" + docker rm -f $CONTAINER_ID + # Pre-compress assets + find ./caddy_build_assets/appbuilder -type f -print0 | xargs -0 gzip -k -9 + find ./caddy_build_assets/dist -type f -print0 | xargs -0 gzip -k -9 + args: + chdir: "{{ airflow_master_dir }}" + executable: /bin/bash + become: yes + register: asset_extraction + changed_when: asset_extraction.rc == 0 + when: not fast_deploy | default(false) + - name: "Log: Building Caddy reverse proxy image" debug: msg: "Building the Caddy image (pangramia/ytdlp-ops-caddy:latest) to serve static assets." diff --git a/ansible/roles/airflow-worker/tasks/main.yml b/ansible/roles/airflow-worker/tasks/main.yml index 2b8474a..020596a 100644 --- a/ansible/roles/airflow-worker/tasks/main.yml +++ b/ansible/roles/airflow-worker/tasks/main.yml @@ -32,6 +32,22 @@ mode: '0755' become: yes +- name: Ensure Airflow operational directories exist with correct permissions + file: + path: "{{ airflow_worker_dir }}/{{ item }}" + state: directory + owner: "{{ airflow_uid }}" + group: "{{ deploy_group }}" + mode: '0775' + become: yes + loop: + - "dags" + - "logs" + - "plugins" + - "downloadfiles" + - "addfiles" + - "inputfiles" + - name: "Log: Syncing Airflow core files" debug: msg: "Syncing DAGs, configs, and Python source code to the worker node." @@ -42,6 +58,7 @@ dest: "{{ airflow_worker_dir }}/" archive: yes recursive: yes + delete: yes rsync_path: "sudo rsync" rsync_opts: "{{ rsync_default_opts }}" loop: @@ -56,7 +73,6 @@ - "airflow/update-yt-dlp.sh" - "get_info_json_client.py" - "proxy_manager_client.py" - - "token_generator" - "utils" - name: Copy custom Python config files to worker @@ -184,17 +200,9 @@ recurse: yes become: yes -- name: Ensure logs directory exists on worker - file: - path: "{{ airflow_worker_dir }}/logs" - state: directory - owner: "{{ airflow_uid }}" - group: "{{ deploy_group }}" - mode: '0775' - become: yes - -- name: Set group-writable and setgid permissions on worker logs directory contents +- name: Set proper ownership and permissions on worker logs directory contents shell: | + chown -R {{ airflow_uid }}:{{ deploy_group }} {{ airflow_worker_dir }}/logs find {{ airflow_worker_dir }}/logs -type d -exec chmod g+rws {} + find {{ airflow_worker_dir }}/logs -type f -exec chmod g+rw {} + become: yes diff --git a/ansible/roles/fail2ban/handlers/main.yml b/ansible/roles/fail2ban/handlers/main.yml new file mode 100644 index 0000000..6bd3b61 --- /dev/null +++ b/ansible/roles/fail2ban/handlers/main.yml @@ -0,0 +1,6 @@ +--- +- name: Restart fail2ban + ansible.builtin.service: + name: fail2ban + state: restarted + become: yes diff --git a/ansible/roles/fail2ban/tasks/main.yml b/ansible/roles/fail2ban/tasks/main.yml new file mode 100644 index 0000000..fd2b4fc --- /dev/null +++ b/ansible/roles/fail2ban/tasks/main.yml @@ -0,0 +1,24 @@ +--- +- name: Install fail2ban + ansible.builtin.apt: + name: fail2ban + state: present + update_cache: yes + become: yes + +- name: Template fail2ban jail.local configuration + ansible.builtin.template: + src: jail.local.j2 + dest: /etc/fail2ban/jail.local + owner: root + group: root + mode: '0644' + become: yes + notify: Restart fail2ban + +- name: Ensure fail2ban service is started and enabled + ansible.builtin.service: + name: fail2ban + state: started + enabled: yes + become: yes diff --git a/ansible/roles/fail2ban/templates/jail.local.j2 b/ansible/roles/fail2ban/templates/jail.local.j2 new file mode 100644 index 0000000..feee86e --- /dev/null +++ b/ansible/roles/fail2ban/templates/jail.local.j2 @@ -0,0 +1,16 @@ +# This file is managed by Ansible. Do not edit manually. + +[DEFAULT] +ignoreip = 127.0.0.1/8 ::1 +bantime = 86400 +findtime = 600 +maxretry = 3 +banaction = iptables-multiport +backend = systemd + +[sshd] +enabled = true +port = ssh +logpath = /var/log/auth.log +maxretry = 3 +bantime = 86400 diff --git a/ansible/roles/shadowsocks-deploy/tasks/main.yml b/ansible/roles/shadowsocks-deploy/tasks/main.yml new file mode 100644 index 0000000..180ebb2 --- /dev/null +++ b/ansible/roles/shadowsocks-deploy/tasks/main.yml @@ -0,0 +1,60 @@ +--- +- name: Set shadowsocks base directory fact + set_fact: + shadowsocks_dir: "/srv/shadowsocks-rust" + +- name: Ensure shadowsocks base directory exists + file: + path: "{{ shadowsocks_dir }}" + state: directory + owner: "{{ ssh_user }}" + group: "{{ deploy_group }}" + mode: '0755' + become: yes + +- name: Ensure proxy configuration directories exist + file: + path: "{{ shadowsocks_dir }}/config_ssp_{{ item.value.local_port }}" + state: directory + owner: "{{ ssh_user }}" + group: "{{ deploy_group }}" + mode: '0755' + loop: "{{ shadowsocks_proxies | default({}) | dict2items }}" + loop_control: + label: "{{ item.key }}" + become: yes + when: shadowsocks_proxies is defined + +- name: Template proxy configuration files + template: + src: "config.json.j2" + dest: "{{ shadowsocks_dir }}/config_ssp_{{ item.value.local_port }}/config.json" + owner: "{{ ssh_user }}" + group: "{{ deploy_group }}" + mode: '0644' + loop: "{{ shadowsocks_proxies | default({}) | dict2items }}" + loop_control: + label: "{{ item.key }}" + become: yes + when: shadowsocks_proxies is defined + +- name: Template docker-compose file for proxies + template: + src: "docker-compose.proxies.yaml.j2" + dest: "{{ shadowsocks_dir }}/docker-compose.proxies.yaml" + owner: "{{ ssh_user }}" + group: "{{ deploy_group }}" + mode: '0644' + become: yes + when: shadowsocks_proxies is defined + +- name: Create symlink for docker-compose.yaml + file: + src: "{{ shadowsocks_dir }}/docker-compose.proxies.yaml" + dest: "{{ shadowsocks_dir }}/docker-compose.yaml" + state: link + force: yes + owner: "{{ ssh_user }}" + group: "{{ deploy_group }}" + become: yes + when: shadowsocks_proxies is defined diff --git a/ansible/roles/shadowsocks-deploy/templates/config.json.j2 b/ansible/roles/shadowsocks-deploy/templates/config.json.j2 new file mode 100644 index 0000000..846b84d --- /dev/null +++ b/ansible/roles/shadowsocks-deploy/templates/config.json.j2 @@ -0,0 +1,11 @@ +{ + "server": "{{ item.value.server }}", + "server_port": {{ item.value.server_port }}, + "password": "{{ lookup('vars', item.value.vault_password_key) }}", + "local_address": "0.0.0.0", + "local_port": {{ item.value.local_port }}, + "timeout": 20, + "method": "aes-256-gcm", + "fast_open": true, + "mode": "tcp_and_udp" +} diff --git a/ansible/roles/shadowsocks-deploy/templates/docker-compose.proxies.yaml.j2 b/ansible/roles/shadowsocks-deploy/templates/docker-compose.proxies.yaml.j2 new file mode 100644 index 0000000..ca51296 --- /dev/null +++ b/ansible/roles/shadowsocks-deploy/templates/docker-compose.proxies.yaml.j2 @@ -0,0 +1,22 @@ +# This file is managed by Ansible. +name: "shadowsocks-proxies" +services: +{% for name, config in shadowsocks_proxies.items() %} + {{ name }}: + image: ghcr.io/shadowsocks/sslocal-rust:v1.22.0 + container_name: {{ name }} + restart: always + ports: + - "127.0.0.1:{{ config.local_port }}:{{ config.local_port }}/tcp" + - "127.0.0.1:{{ config.local_port }}:{{ config.local_port }}/udp" + volumes: + - /srv/shadowsocks-rust/config_ssp_{{ config.local_port }}/config.json:/etc/shadowsocks-rust/config.json:ro + networks: + - default + - airflow_proxynet +{% endfor %} + +networks: + airflow_proxynet: + name: airflow_proxynet + external: true diff --git a/ansible/roles/ytdlp-master/tasks/main.yml b/ansible/roles/ytdlp-master/tasks/main.yml index c93d07b..04d8d8b 100644 --- a/ansible/roles/ytdlp-master/tasks/main.yml +++ b/ansible/roles/ytdlp-master/tasks/main.yml @@ -60,6 +60,16 @@ service_role: "management" server_identity: "ytdlp-ops-service-mgmt" +- name: Create symlink for .env in configs directory for manual docker-compose commands + file: + src: "../.env" + dest: "{{ airflow_master_dir }}/configs/.env" + state: link + force: yes + owner: "{{ ssh_user }}" + group: "{{ deploy_group }}" + become: yes + - name: Template docker-compose file for YT-DLP master service template: src: "../airflow/configs/docker-compose-ytdlp-ops.yaml.j2" @@ -81,19 +91,22 @@ state: absent become: yes +- name: Create placeholder envoy.yaml to prevent Docker from creating a directory + file: + path: "{{ airflow_master_dir }}/envoy.yaml" + state: touch + owner: "{{ ssh_user }}" + group: "{{ deploy_group }}" + mode: '0664' + become: yes + - name: Generate YT-DLP service configurations shell: - cmd: "docker compose --project-directory . --env-file .env -f configs/docker-compose.config-generate.yaml run --rm config-generator" + cmd: "docker compose --project-directory {{ airflow_master_dir }} -f configs/docker-compose.config-generate.yaml run --rm config-generator" chdir: "{{ airflow_master_dir }}" become: yes become_user: "{{ ssh_user }}" -- name: Pull YT-DLP service image - community.docker.docker_image: - name: "{{ ytdlp_ops_image }}" - source: pull - when: not fast_deploy | default(false) - - name: Ensure correct permissions for build context after generation file: path: "{{ airflow_master_dir }}" @@ -117,10 +130,21 @@ group: "{{ deploy_group }}" become: yes +- name: Check for shadowsocks-rust proxy compose file + stat: + path: "/srv/shadowsocks-rust/docker-compose.proxies.yaml" + register: proxy_compose_file + - name: "Log: Starting YT-DLP management service" debug: msg: "Starting the YT-DLP management service on the master node. This service handles account and proxy management." +- name: Log in to Docker Hub to pull private images + community.docker.docker_login: + username: "{{ dockerhub_user }}" + password: "{{ vault_dockerhub_password }}" + when: vault_dockerhub_password is defined and vault_dockerhub_password | length > 0 + - name: Start YT-DLP master service community.docker.docker_compose_v2: project_src: "{{ airflow_master_dir }}" diff --git a/ansible/roles/ytdlp-worker/tasks/main.yml b/ansible/roles/ytdlp-worker/tasks/main.yml index fb64f63..9a0cfaa 100644 --- a/ansible/roles/ytdlp-worker/tasks/main.yml +++ b/ansible/roles/ytdlp-worker/tasks/main.yml @@ -72,24 +72,52 @@ service_role: "worker" server_identity: "ytdlp-ops-service-worker-{{ inventory_hostname }}" +- name: Create symlink for .env in configs directory for manual docker-compose commands + file: + src: "../.env" + dest: "{{ airflow_worker_dir }}/configs/.env" + state: link + force: yes + owner: "{{ ssh_user }}" + group: "{{ deploy_group }}" + become: yes + +- name: Log in to Docker Hub to pull private images + community.docker.docker_login: + username: "{{ dockerhub_user }}" + password: "{{ vault_dockerhub_password }}" + when: vault_dockerhub_password is defined and vault_dockerhub_password | length > 0 - name: "Log: Generating YT-DLP service configurations" debug: msg: "Running the configuration generator script inside a temporary Docker container. This creates docker-compose, envoy, and camoufox files based on .env variables." +- name: Ensure previously generated config files are removed before generation + file: + path: "{{ item }}" + state: absent + loop: + - "{{ airflow_worker_dir }}/envoy.yaml" + - "{{ airflow_worker_dir }}/configs/docker-compose.camoufox.yaml" + - "{{ airflow_worker_dir }}/configs/camoufox_endpoints.json" + become: yes + +- name: Create placeholder envoy.yaml to prevent Docker from creating a directory + file: + path: "{{ airflow_worker_dir }}/envoy.yaml" + state: touch + owner: "{{ ssh_user }}" + group: "{{ deploy_group }}" + mode: '0664' + become: yes + - name: Generate YT-DLP service configurations shell: - cmd: "docker compose --project-directory . --env-file .env -f configs/docker-compose.config-generate.yaml run --rm config-generator" + cmd: "docker compose --project-directory {{ airflow_worker_dir }} -f configs/docker-compose.config-generate.yaml run --rm config-generator" chdir: "{{ airflow_worker_dir }}" become: yes become_user: "{{ ssh_user }}" -- name: Pull YT-DLP service image - community.docker.docker_image: - name: "{{ ytdlp_ops_image }}" - source: pull - when: not fast_deploy | default(false) - - name: "Log: Building Camoufox (remote browser) image" debug: msg: "Building the Camoufox image locally. This image provides remote-controlled Firefox browsers for token generation." @@ -112,6 +140,11 @@ recurse: yes become: yes +- name: Check for shadowsocks-rust proxy compose file + stat: + path: "/srv/shadowsocks-rust/docker-compose.proxies.yaml" + register: proxy_compose_file + - name: "Log: Starting YT-DLP worker services" debug: msg: "Starting the core YT-DLP worker services: ytdlp-ops-service (Thrift API), envoy (load balancer), and camoufox (remote browsers)." @@ -121,7 +154,6 @@ project_src: "{{ airflow_worker_dir }}" files: - "configs/docker-compose-ytdlp-ops.yaml" - - "configs/docker-compose.camoufox.yaml" state: present remove_orphans: true pull: "{{ 'never' if fast_deploy | default(false) else 'missing' }}" diff --git a/ansible/scripts/verify_camoufox_services.py b/ansible/scripts/verify_camoufox_services.py new file mode 100644 index 0000000..4eade42 --- /dev/null +++ b/ansible/scripts/verify_camoufox_services.py @@ -0,0 +1,242 @@ +#!/usr/bin/env python3 +""" +Script to verify that all camoufox services are running and accessible. +This script should be run after deployment to ensure the cluster is healthy. +""" + +import subprocess +import sys +import json +import time +import logging +from typing import List, Dict, Tuple + +# Configure logging +logging.basicConfig( + level=logging.INFO, + format='%(asctime)s - %(levelname)s - %(message)s' +) +logger = logging.getLogger(__name__) + +def run_docker_command(cmd: List[str]) -> Tuple[int, str, str]: + """Run a docker command and return (returncode, stdout, stderr)""" + try: + result = subprocess.run(cmd, capture_output=True, text=True, timeout=30) + return result.returncode, result.stdout.strip(), result.stderr.strip() + except subprocess.TimeoutExpired: + logger.error(f"Command timed out: {' '.join(cmd)}") + return 1, "", "Command timed out" + except Exception as e: + logger.error(f"Error running command: {' '.join(cmd)} - {e}") + return 1, "", str(e) + +def get_docker_compose_services(project_path: str) -> List[Dict]: + """Get list of services from docker-compose""" + # Try different ways to get services since the project naming might vary + possible_commands = [ + ["docker", "compose", "-p", "ytdlp-ops", "ps", "--format", "json"], + ["docker", "compose", "-p", "ytdlp-ops-camoufox", "ps", "--format", "json"], + ["docker", "compose", "--project-directory", project_path, "ps", "--format", "json"], + ["docker", "compose", "ps", "--format", "json"] + ] + + for cmd in possible_commands: + returncode, stdout, stderr = run_docker_command(cmd) + if returncode == 0 and stdout: + try: + # Handle both single JSON object and JSON array + if stdout.startswith('['): + services = json.loads(stdout) + else: + # Multiple JSON objects, one per line + services = [] + for line in stdout.split('\n'): + if line.strip(): + services.append(json.loads(line)) + if services: + return services + except json.JSONDecodeError as e: + logger.debug(f"Failed to parse docker-compose output with command {' '.join(cmd)}: {e}") + continue + + # If all commands failed, try to get all containers and filter for camoufox + logger.info("Falling back to direct container inspection") + returncode, stdout, stderr = run_docker_command(["docker", "ps", "--format", "json"]) + if returncode == 0 and stdout: + try: + containers = [] + for line in stdout.split('\n'): + if line.strip(): + containers.append(json.loads(line)) + + # Filter for camoufox containers + camoufox_containers = [c for c in containers if 'camoufox' in c.get('Names', '')] + return camoufox_containers + except json.JSONDecodeError: + pass + + logger.error("Failed to get docker-compose services with all methods") + return [] + +def check_service_health(service_name: str, port: int = 12345) -> bool: + """Check if a service is responding on its expected port""" + # For camoufox services, we can check if they're running and have network connectivity + # Since they're WebSocket services, we'll just verify they're running for now + cmd = ["docker", "inspect", service_name] + returncode, stdout, stderr = run_docker_command(cmd) + + if returncode != 0: + logger.error(f"Failed to inspect service {service_name}: {stderr}") + return False + + try: + service_info = json.loads(stdout) + if service_info and len(service_info) > 0: + state = service_info[0].get('State', {}) + running = state.get('Running', False) + health = state.get('Health', {}).get('Status', 'unknown') + + if running: + logger.info(f"Service {service_name} is running (health: {health})") + return True + else: + logger.error(f"Service {service_name} is not running") + return False + except json.JSONDecodeError as e: + logger.error(f"Failed to parse docker inspect output for {service_name}: {e}") + return False + +def verify_camoufox_services(project_path: str = "/srv/airflow_dl_worker") -> bool: + """Main function to verify all camoufox services""" + logger.info("Starting camoufox service verification...") + + # Get all services + services = get_docker_compose_services(project_path) + + if not services: + logger.warning("No services found through docker-compose. Checking for running camoufox containers directly...") + # Try to find camoufox containers directly + cmd = ["docker", "ps", "--filter", "name=camoufox", "--format", "json"] + returncode, stdout, stderr = run_docker_command(cmd) + + if returncode == 0 and stdout: + try: + camoufox_containers = [] + for line in stdout.split('\n'): + if line.strip(): + camoufox_containers.append(json.loads(line)) + services = camoufox_containers + except json.JSONDecodeError: + services = [] + + if not services: + logger.error("No camoufox services or containers found.") + # Check if we're on a worker node by looking for camoufox config + import os + if os.path.exists(f"{project_path}/configs/docker-compose.camoufox.yaml"): + logger.info("Camoufox config exists but no services running. This might indicate a startup issue.") + return False + else: + logger.info("No camoufox config found. This might be a master node.") + return True + + logger.info(f"Found {len(services)} camoufox service(s) or container(s)") + + # Check each service + all_healthy = True + camoufox_services_found = 0 + + for service in services: + # Different docker output formats have different field names + service_name = (service.get('Name') or + service.get('Names') or + service.get('name') or + service.get('Service', 'unknown')) + + # If we're dealing with container output, Names might be a string + if isinstance(service_name, str): + service_names = [service_name] + else: + service_names = service_name if isinstance(service_name, list) else [str(service_name)] + + # Check if any of the service names contain 'camoufox' + is_camoufox_service = any('camoufox' in name.lower() for name in service_names) + + if not is_camoufox_service: + continue + + camoufox_services_found += 1 + logger.info(f"Checking service: {service_names[0] if service_names else 'unknown'}") + + # Use the first service name for health check + name_to_check = service_names[0] if service_names else 'unknown' + + # Check if service is running + if not check_service_health(name_to_check): + all_healthy = False + continue + + # Check service status from docker output + service_status = (service.get('State') or + service.get('status') or + service.get('Status') or + 'unknown') + service_health = (service.get('Health') or + service.get('health') or + 'unknown') + + logger.info(f"Service {name_to_check} - Status: {service_status}, Health: {service_health}") + + if service_status not in ['running', 'Running']: + logger.error(f"Service {name_to_check} is not running (status: {service_status})") + all_healthy = False + elif service_health not in ['healthy', 'unknown', '']: # unknown or empty is OK for services without healthcheck + logger.warning(f"Service {name_to_check} health is {service_health}") + + if camoufox_services_found == 0: + logger.warning("No camoufox services found in the service list") + return False + + logger.info(f"Successfully verified {camoufox_services_found} camoufox service(s)") + return all_healthy + +def main(): + """Main entry point""" + logger.info("Camoufox Service Verification Script") + logger.info("=" * 40) + + # Try to detect project path + import os + project_paths = [ + "/srv/airflow_dl_worker", # Worker node + "/srv/airflow_master", # Master node + "/app", # Container path + "." # Current directory + ] + + project_path = None + for path in project_paths: + if os.path.exists(path): + project_path = path + break + + if not project_path: + logger.error("Could not determine project path") + return 1 + + logger.info(f"Using project path: {project_path}") + + try: + success = verify_camoufox_services(project_path) + if success: + logger.info("✅ All camoufox services verification PASSED") + return 0 + else: + logger.error("❌ Camoufox services verification FAILED") + return 1 + except Exception as e: + logger.error(f"Unexpected error during verification: {e}", exc_info=True) + return 1 + +if __name__ == "__main__": + sys.exit(main()) diff --git a/ansible/tasks/docker_health_check.yml b/ansible/tasks/docker_health_check.yml new file mode 100644 index 0000000..077eddd --- /dev/null +++ b/ansible/tasks/docker_health_check.yml @@ -0,0 +1,35 @@ +--- +- name: Check if Docker daemon is running + systemd: + name: docker + state: started + enabled: yes + become: yes + register: docker_service + +- name: Restart Docker if it was not running + systemd: + name: docker + state: restarted + become: yes + when: docker_service.changed + +- name: Wait for Docker to be ready + command: docker info + register: docker_info + until: docker_info.rc == 0 + retries: 10 + delay: 3 + become: yes + ignore_errors: yes + +- name: Check Docker networks + command: docker network ls + register: docker_networks + become: yes + +- name: Ensure airflow_proxynet network exists + docker_network: + name: airflow_proxynet + state: present + become: yes diff --git a/ansible/tasks/verify_camoufox.yml b/ansible/tasks/verify_camoufox.yml new file mode 100644 index 0000000..b574cf9 --- /dev/null +++ b/ansible/tasks/verify_camoufox.yml @@ -0,0 +1,38 @@ +--- +- name: Copy camoufox verification script to worker + copy: + src: scripts/verify_camoufox_services.py + dest: "{{ airflow_worker_dir }}/verify_camoufox_services.py" + mode: '0755' + owner: "{{ ssh_user }}" + group: "{{ deploy_group }}" + become: yes + when: inventory_hostname in groups['airflow_workers'] + +- name: Run camoufox service verification + command: python3 verify_camoufox_services.py + args: + chdir: "{{ airflow_worker_dir }}" + environment: + PATH: "{{ ansible_env.PATH }}:/usr/local/bin" + register: verification_result + become: yes + become_user: "{{ ssh_user }}" + when: inventory_hostname in groups['airflow_workers'] + ignore_errors: yes + +- name: Check verification results + debug: + msg: | + Camoufox verification {{ 'PASSED' if verification_result.rc == 0 else 'FAILED' }} + Output: {{ verification_result.stdout }} + Errors: {{ verification_result.stderr }} + when: inventory_hostname in groups['airflow_workers'] and verification_result is defined + +- name: Fail deployment if camoufox verification failed + fail: + msg: "Camoufox service verification failed. Check service status and network connectivity." + when: > + inventory_hostname in groups['airflow_workers'] and + verification_result is defined and + verification_result.rc != 0 diff --git a/ansible/templates/.env.j2 b/ansible/templates/.env.j2 index 768e3e7..91e117a 100644 --- a/ansible/templates/.env.j2 +++ b/ansible/templates/.env.j2 @@ -1,4 +1,7 @@ # This file is managed by Ansible. +# Set the timezone for all services to ensure consistency in logs. +TZ=Europe/Moscow + HOSTNAME="{{ inventory_hostname }}" SERVICE_ROLE={{ service_role }} {% if server_identity is defined %} @@ -18,8 +21,11 @@ REDIS_PORT={{ redis_port }} # Master-specific settings {% if 'master' in service_role or 'management' in service_role %} AIRFLOW_ADMIN_PASSWORD="{{ vault_airflow_admin_password }}" +FLOWER_PASSWORD="{{ vault_flower_password }}" AIRFLOW_VAR_MASTER_HOST_IP={{ hostvars[groups['airflow_master'][0]].ansible_host }} -MASTER_HOST_IP={{ hostvars[groups['airflow_master'][0]].ansible_host }} +# MASTER_HOST_IP is not needed on the master node itself for ytdlp-ops, +# as it connects to Redis via the internal Docker service name 'redis'. +# It is defined for workers to connect back to the master. # Camoufox is not used on master, but the config generator expects the variable. CAMOUFOX_PROXIES= @@ -27,6 +33,7 @@ CAMOUFOX_PROXIES= # Worker-specific settings {% if 'worker' in service_role %} +AIRFLOW_PROJ_DIR={{ airflow_worker_dir }} MASTER_HOST_IP={{ hostvars[groups['airflow_master'][0]].ansible_host }} # --- Envoy & Worker Configuration --- diff --git a/ansible/templates/shadowsocks-compose.yml.j2 b/ansible/templates/shadowsocks-compose.yml.j2 new file mode 100644 index 0000000..6ae640a --- /dev/null +++ b/ansible/templates/shadowsocks-compose.yml.j2 @@ -0,0 +1,21 @@ +name: "shadowsocks-proxies" +services: +{% for proxy_name, proxy_config in shadowsocks_proxies.items() %} + {{ proxy_name }}: + image: {{ shadowsocks_image }} + container_name: {{ proxy_name }} + restart: always + ports: + - "127.0.0.1:{{ proxy_config.local_port }}:{{ proxy_config.local_port }}/tcp" + - "127.0.0.1:{{ proxy_config.local_port }}:{{ proxy_config.local_port }}/udp" + volumes: + - ./config_ssp_{{ proxy_config.local_port }}/:/etc/shadowsocks-rust/:ro + networks: + - default + - {{ docker_network_name }} +{% endfor %} + +networks: + {{ docker_network_name }}: + name: {{ docker_network_name }} + external: true diff --git a/cluster.green.yml b/cluster.green.yml new file mode 100644 index 0000000..f9818c6 --- /dev/null +++ b/cluster.green.yml @@ -0,0 +1,105 @@ +global_vars: + # Docker image versions + ytdlp_ops_image: "pangramia/ytdlp-ops-server:latest" + airflow_image_name: "pangramia/ytdlp-ops-airflow:latest" + + # Default ports + postgres_port: 5432 + ytdlp_base_port: 9090 + envoy_port: 9080 + envoy_admin_port: 9901 + management_service_port: 9091 + camoufox_base_vnc_port: 5901 + + # Default UID + airflow_uid: 1003 + + # Default directories + airflow_master_dir: "/srv/airflow_master" + airflow_worker_dir: "/srv/airflow_dl_worker" + + # Docker network name + docker_network_name: "airflow_proxynet" + + # Default usernames + ssh_user: "alex_p" + ansible_user: "alex_p" + + # Default group + deploy_group: "ytdl" + + # Default file permissions + dir_permissions: "0755" + file_permissions: "0644" + + # Default rsync options + rsync_default_opts: + - "--no-owner" + - "--no-group" + - "--no-times" + - "--copy-links" + - "--copy-unsafe-links" + - "--exclude=.git*" + - "--exclude=__pycache__" + - "--exclude=*.pyc" + - "--exclude=*.log" + - "--exclude=.DS_Store" + + # Docker-Hub credentials + dockerhub_user: "pangramia" + + # Host timezone + host_timezone: "Europe/Moscow" + + # Shadowsocks cipher method + shadowsocks_cipher_method: "aes-256-gcm" + + # Shadowsocks image + shadowsocks_image: "ghcr.io/shadowsocks/sslocal-rust:v1.22.0" + + # Shadowsocks config options + shadowsocks_local_address: "0.0.0.0" + shadowsocks_timeout: 20 + shadowsocks_fast_open: true + shadowsocks_mode: "tcp_and_udp" + +# Global list of all available proxies to be deployed everywhere. +# The key (e.g., 'sslocal-rust-1087') becomes the service name. +shadowsocks_proxies: + + sslocal-rust-1087: + server: 91.103.252.51 + server_port: 8388 + local_port: 1087 + vault_password_key: vault_ss_password_1 + + sslocal-rust-1086: + server: 62.60.178.45 + server_port: 8388 + local_port: 1086 + vault_password_key: vault_ss_password_2 + + + sslocal-rust-1081: + server: 79.137.207.43 + server_port: 8388 + local_port: 1081 + vault_password_key: vault_ss_password_2 + +master: + af-green: + ip: 89.253.223.97 + port: 22 + proxies: + - "socks5://sslocal-rust-1087:1087" + +workers: + dl003: + ip: 62.60.245.103 + proxies: + - "socks5://sslocal-rust-1087:1087" + + dl001: + ip: 109.107.189.106 + proxies: + - "socks5://sslocal-rust-1087:1087" diff --git a/cluster.test.yml b/cluster.test.yml new file mode 100644 index 0000000..69db96a --- /dev/null +++ b/cluster.test.yml @@ -0,0 +1,101 @@ +global_vars: + # Docker image versions + ytdlp_ops_image: "pangramia/ytdlp-ops-server:latest" + airflow_image_name: "pangramia/ytdlp-ops-airflow:latest" + + # Default ports + postgres_port: 5432 + ytdlp_base_port: 9090 + envoy_port: 9080 + envoy_admin_port: 9901 + management_service_port: 9091 + camoufox_base_vnc_port: 5901 + + # Default UID + airflow_uid: 1003 + + # Default directories + airflow_master_dir: "/srv/airflow_master" + airflow_worker_dir: "/srv/airflow_dl_worker" + + # Docker network name + docker_network_name: "airflow_proxynet" + + # Default usernames + ssh_user: "alex_p" + ansible_user: "alex_p" + + # Default group + deploy_group: "ytdl" + + # Default file permissions + dir_permissions: "0755" + file_permissions: "0644" + + # Default rsync options + rsync_default_opts: + - "--no-owner" + - "--no-group" + - "--no-times" + - "--copy-links" + - "--copy-unsafe-links" + - "--exclude=.git*" + - "--exclude=__pycache__" + - "--exclude=*.pyc" + - "--exclude=*.log" + - "--exclude=.DS_Store" + + # Docker-Hub credentials + dockerhub_user: "pangramia" + + # Host timezone + host_timezone: "Europe/Moscow" + + # Shadowsocks cipher method + shadowsocks_cipher_method: "aes-256-gcm" + + # Shadowsocks image + shadowsocks_image: "ghcr.io/shadowsocks/sslocal-rust:v1.22.0" + + # Shadowsocks config options + shadowsocks_local_address: "0.0.0.0" + shadowsocks_timeout: 20 + shadowsocks_fast_open: true + shadowsocks_mode: "tcp_and_udp" + +# Global list of all available proxies to be deployed everywhere. +# The key (e.g., 'sslocal-rust-1087') becomes the service name. +shadowsocks_proxies: + + sslocal-rust-1087: + server: 91.103.252.51 + server_port: 8388 + local_port: 1087 + vault_password_key: vault_ss_password_1 + + sslocal-rust-1086: + server: 62.60.178.45 + server_port: 8388 + local_port: 1086 + vault_password_key: vault_ss_password_2 + + + sslocal-rust-1081: + server: 79.137.207.43 + server_port: 8388 + local_port: 1081 + vault_password_key: vault_ss_password_2 + +master: + af-test: + ip: 89.253.223.97 + port: 22 + proxies: + - "socks5://sslocal-rust-1086:1086" + - "socks5://sslocal-rust-1081:1081" +workers: + dl002: + ip: 62.60.178.54 + proxies: + - "socks5://sslocal-rust-1081:1081" + - "socks5://sslocal-rust-1086:1086" diff --git a/cluster.yml b/cluster.yml deleted file mode 100644 index b5bd126..0000000 --- a/cluster.yml +++ /dev/null @@ -1,8 +0,0 @@ -master: - af-green: 89.253.221.173 - -workers: - dl003: - ip: 62.60.245.103 - proxies: - - "socks5://sslocal-rust-1087:1087" diff --git a/get_info_json_client.py b/get_info_json_client.py new file mode 100644 index 0000000..ba4393c --- /dev/null +++ b/get_info_json_client.py @@ -0,0 +1,150 @@ +#!/usr/bin/env python3 +""" +Client script to get info.json from the Thrift service. + +Usage: + python get_info_json_client.py [URL] --host [HOST] --port [PORT] [options] + +Options: + --host HOST Thrift server host + --port PORT Thrift server port + --account-id ID Account ID to use + --output FILE Output file path + --verbose Enable verbose output +""" + +import argparse +import json +import os +import sys +import logging +from typing import Dict, Any, Optional + +# Configure logging +logging.basicConfig( + level=logging.INFO, + format='%(asctime)s - %(name)s - %(levelname)s - %(message)s' +) +logger = logging.getLogger('info_json_client') + +# Import Thrift modules +sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) +from thrift.transport import TTransport +from pangramia.yt.common.ttypes import TokenUpdateMode +from pangramia.yt.exceptions.ttypes import PBServiceException, PBUserException +from yt_ops_services.client_utils import get_thrift_client + +def parse_args(): + """Parse command line arguments""" + parser = argparse.ArgumentParser(description='Get info.json from Thrift service') + parser.add_argument('url', help='YouTube URL or video ID') + parser.add_argument('--host', default='127.0.0.1', help="Thrift server host. Using 127.0.0.1 avoids harmless connection errors when the local Envoy proxy only listens on IPv4.") + parser.add_argument('--port', type=int, default=9080, help='Thrift server port') + parser.add_argument('--profile', default='default_profile', help='The profile name (accountId) to use for the request.') + parser.add_argument('--client', help='Specific client to use (e.g., web, ios, android). Overrides server default.') + parser.add_argument('--output', help='Output file path for the info.json. If not provided, prints to stdout.') + parser.add_argument('--machine-id', help='Identifier for the client machine. Defaults to hostname.') + parser.add_argument('--verbose', action='store_true', help='Enable verbose output') + return parser.parse_args() + +def main(): + """Main entry point""" + args = parse_args() + + # Set log level + if args.verbose: + logger.setLevel(logging.DEBUG) + + transport = None + try: + # Create Thrift client + client, transport = get_thrift_client(args.host, args.port) + + # Get token data, which includes the info.json + logger.info(f"Requesting info.json for URL '{args.url}' using profile '{args.profile}'") + + # Prepare arguments for the Thrift call + machine_id = args.machine_id + if not machine_id: + import socket + machine_id = socket.gethostname() + logger.info(f"No machine ID provided, using hostname: {machine_id}") + + thrift_args = { + 'accountId': args.profile, + 'updateType': TokenUpdateMode.AUTO, + 'url': args.url, + 'clients': args.client, + 'machineId': machine_id + } + if args.client: + logger.info(f"Requesting to use specific client: {args.client}") + else: + logger.info("No specific client requested, server will use its default.") + + token_data = client.getOrRefreshToken(**thrift_args) + + if not token_data or not hasattr(token_data, 'infoJson') or not token_data.infoJson: + logger.error("Server did not return valid info.json data.") + print("Error: Server did not return valid info.json data.", file=sys.stderr) + return 1 + + info_json_str = token_data.infoJson + + # Check if the returned info.json is an error report + try: + info_data = json.loads(info_json_str) + if isinstance(info_data, dict) and 'error' in info_data: + error_code = info_data.get('errorCode', 'N/A') + error_message = info_data.get('message', info_data.get('error', 'Unknown error')) + logger.error(f"Server returned an error in info.json (Code: {error_code}): {error_message}") + print(f"Error from server (Code: {error_code}): {error_message}", file=sys.stderr) + # Optionally print the full error JSON + if args.verbose: + print(json.dumps(info_data, indent=2), file=sys.stderr) + return 1 + except json.JSONDecodeError: + logger.error(f"Failed to parse info.json from server: {info_json_str[:200]}...") + print("Error: Failed to parse the info.json response from the server.", file=sys.stderr) + return 1 + + logger.info(f"Successfully retrieved info.json ({len(info_json_str)} bytes)") + + # Write to output file if specified, otherwise print to stdout + if args.output: + try: + with open(args.output, 'w', encoding='utf-8') as f: + # Pretty-print the JSON to the file + json.dump(info_data, f, indent=2) + logger.info(f"Wrote info.json to {args.output}") + print(f"Successfully saved info.json to {args.output}") + except IOError as e: + logger.error(f"Failed to write to output file {args.output}: {e}") + print(f"Error: Failed to write to output file {args.output}: {e}", file=sys.stderr) + return 1 + else: + # Pretty-print the JSON to stdout + print(json.dumps(info_data, indent=2)) + + return 0 + except (PBServiceException, PBUserException) as e: + logger.error(f"A Thrift error occurred: {e.message}", exc_info=args.verbose) + print(f"Error: {e.message}", file=sys.stderr) + if hasattr(e, 'context') and e.context: + print(f"Context: {e.context}", file=sys.stderr) + return 1 + except TTransport.TTransportException as e: + logger.error(f"Connection to server failed: {e}", exc_info=args.verbose) + print(f"Error: Connection to server at {args.host}:{args.port} failed.", file=sys.stderr) + return 1 + except Exception as e: + logger.exception(f"An unexpected error occurred: {e}") + print(f"An unexpected error occurred: {e}", file=sys.stderr) + return 1 + finally: + if transport and transport.isOpen(): + transport.close() + logger.info("Thrift connection closed.") + +if __name__ == "__main__": + sys.exit(main()) diff --git a/proxy_manager_client.py b/proxy_manager_client.py new file mode 100644 index 0000000..19b57e0 --- /dev/null +++ b/proxy_manager_client.py @@ -0,0 +1,192 @@ +#!/usr/bin/env python3 +""" +Client script to manage proxies in the YTTokenOpService. + +This script allows you to list, ban, unban, and reset proxies that are managed +by a ytdlp-ops-server instance via Redis. +""" + +import argparse +import sys +import os +import logging +from pathlib import Path +import datetime + +# Configure logging +logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s') +logger = logging.getLogger(__name__) + + + +try: + from thrift.transport import TTransport + from pangramia.yt.exceptions.ttypes import PBServiceException, PBUserException + from pangramia.yt.common.constants import ErrorCode + from tabulate import tabulate + from yt_ops_services.client_utils import get_thrift_client, format_timestamp +except ImportError as e: + print(f"Error importing required modules: {e}") + print("Please ensure you have installed dependencies by running: pip install -e .") + sys.exit(1) + + + + +def main(): + parser = argparse.ArgumentParser( + description="Manage proxies for the YTDLP Operations Server.\n\n" + "This script allows you to list, ban, unban, and reset proxies that are managed\n" + "by a ytdlp-ops-server instance via Redis. It provides a command-line interface\n" + "to interact with the proxy management features of the server.", + epilog="Usage examples:\n" + " # List statuses for a specific server identity\n" + " python proxy_manager_client.py list --server-identity ytdlp-ops-airflow-service\n\n" + " # Ban a proxy for a specific server\n" + " python proxy_manager_client.py ban --server-identity ytdlp-ops-airflow-service --proxy-url socks5://proxy.example.com:1080\n\n" + " # Unban a proxy\n" + " python proxy_manager_client.py unban --server-identity ytdlp-ops-airflow-service --proxy-url socks5://proxy.example.com:1080\n\n" + " # Reset all proxies for a server to ACTIVE\n" + " python proxy_manager_client.py reset --server-identity ytdlp-ops-airflow-service", + formatter_class=argparse.RawTextHelpFormatter + ) + parser.add_argument('--host', default=os.getenv('YTDLP_HOST', '127.0.0.1'), help="Server host (default: 127.0.0.1 or YTDLP_HOST env). Using 127.0.0.1 avoids harmless connection errors when the local Envoy proxy only listens on IPv4.") + parser.add_argument('--port', type=int, default=int(os.getenv('YTDLP_PORT', '9080')), help='Server port (default: 9080 or YTDLP_PORT env)') + + subparsers = parser.add_subparsers(dest='command', required=True, help='Available commands') + + # List command + list_parser = subparsers.add_parser( + 'list', + help='List proxy statuses for a given server identity.', + description="List the status of all proxies associated with a specific server identity.\n" + "The status includes:\n" + "- Server: The server identity.\n" + "- Proxy URL: The URL of the proxy.\n" + "- Status: ACTIVE or BANNED.\n" + "- Success: Count of successful uses.\n" + "- Failures: Count of failed uses.\n" + "- Last Success: Timestamp of the last successful use.\n" + "- Last Failure: Timestamp of the last failed use.", + formatter_class=argparse.RawTextHelpFormatter + ) + list_parser.add_argument('--server-identity', type=str, help='The identity of the server to query. If not provided, shows status for the connected server instance.') + + # Ban command + ban_parser = subparsers.add_parser( + 'ban', + help='Ban a specific proxy for a server.', + description="Manually set a proxy's status to BANNED for a specific server identity.\n" + "A banned proxy will not be used for future requests by that server instance\n" + "until it is unbanned or reset.", + formatter_class=argparse.RawTextHelpFormatter + ) + ban_parser.add_argument('--server-identity', type=str, required=True, help='The identity of the server where the proxy should be banned.') + ban_parser.add_argument('--proxy-url', type=str, required=True, help="The full URL of the proxy to ban (e.g., 'socks5://host:port').") + + # Unban command + unban_parser = subparsers.add_parser( + 'unban', + help='Unban a specific proxy for a server.', + description="Manually set a proxy's status to ACTIVE for a specific server identity.\n" + "This will allow the server instance to use the proxy for future requests.", + formatter_class=argparse.RawTextHelpFormatter + ) + unban_parser.add_argument('--server-identity', type=str, required=True, help='The identity of the server where the proxy should be unbanned.') + unban_parser.add_argument('--proxy-url', type=str, required=True, help="The full URL of the proxy to unban (e.g., 'socks5://host:port').") + + # Reset command + reset_parser = subparsers.add_parser( + 'reset', + help='Reset all proxy statuses for a server to ACTIVE.', + description="Reset the status of all proxies associated with a specific server identity to ACTIVE.\n" + "This is useful for clearing all bans and making all configured proxies available again.", + formatter_class=argparse.RawTextHelpFormatter + ) + reset_parser.add_argument('--server-identity', type=str, required=True, help='The identity of the server whose proxies should be reset.') + + args = parser.parse_args() + + client, transport = None, None + try: + client, transport = get_thrift_client(args.host, args.port) + + if args.command == 'list': + logger.info(f"Getting proxy statuses for server: {args.server_identity or 'local server'}") + statuses = client.getProxyStatus(args.server_identity) + if not statuses: + print("\nThe server reported no proxy statuses.") + print("This can happen if no proxies are configured, or if all configured proxies failed their initial health check on server startup.\n") + else: + # Determine which proxy is next in rotation for each server identity + next_proxies = {s.serverIdentity: s.proxyUrl for s in statuses if '(next)' in s.status} + + status_list = [] + for s in statuses: + is_next = next_proxies.get(s.serverIdentity) == s.proxyUrl + status_list.append({ + "Server": s.serverIdentity, + "Proxy URL": f"{s.proxyUrl} ->" if is_next else s.proxyUrl, + "Status": s.status.replace(" (next)", ""), + "Success": s.successCount, + "Failures": s.failureCount, + "Last Success": format_timestamp(s.lastSuccessTimestamp), + "Last Failure": format_timestamp(s.lastFailureTimestamp), + }) + print("\n--- Proxy Statuses ---") + print(tabulate(status_list, headers="keys", tablefmt="grid")) + print("----------------------\n") + + elif args.command == 'ban': + logger.info(f"Banning proxy '{args.proxy_url}' for server '{args.server_identity}'...") + success = client.banProxy(args.proxy_url, args.server_identity) + if success: + print(f"Successfully banned proxy '{args.proxy_url}' for server '{args.server_identity}'.") + else: + print("Failed to ban proxy. Check server logs for details.") + sys.exit(1) + + elif args.command == 'unban': + logger.info(f"Unbanning proxy '{args.proxy_url}' for server '{args.server_identity}'...") + success = client.unbanProxy(args.proxy_url, args.server_identity) + if success: + print(f"Successfully unbanned proxy '{args.proxy_url}' for server '{args.server_identity}'.") + else: + print("Failed to unban proxy. Check server logs for details.") + sys.exit(1) + + elif args.command == 'reset': + logger.info(f"Resetting all proxy statuses for server '{args.server_identity}'...") + success = client.resetAllProxyStatuses(args.server_identity) + if success: + print(f"Successfully reset all proxy statuses for server '{args.server_identity}'.") + else: + print("Failed to reset all proxy statuses. Check server logs for details.") + sys.exit(1) + + except (PBServiceException, PBUserException) as e: + if hasattr(e, 'errorCode') and e.errorCode == ErrorCode.NOT_IMPLEMENTED: + logger.error(f"Action '{args.command}' is not implemented by the server. It may be running in the wrong service mode.") + print(f"Error: The server does not support the action '{args.command}'.") + print("Please check that the server is running in 'all-in-one' or 'management' mode.") + else: + logger.error(f"Thrift error performing action '{args.command}': {e.message}", exc_info=True) + print(f"Error: {e.message}") + sys.exit(1) + except TTransport.TTransportException as e: + # The logger.error is not needed here because TSocket already logs connection errors. + print(f"Error: Connection to server at {args.host}:{args.port} failed. Is the server running?") + print(f"Details: {e}") + sys.exit(1) + except Exception as e: + logger.error(f"An unexpected error occurred: {e}", exc_info=True) + print(f"An unexpected error occurred: {e}") + sys.exit(1) + finally: + if transport and transport.isOpen(): + transport.close() + logger.info("Thrift connection closed.") + + +if __name__ == "__main__": + main() diff --git a/thrift_exceptions_patch.py b/thrift_exceptions_patch.py new file mode 100644 index 0000000..46e3ead --- /dev/null +++ b/thrift_exceptions_patch.py @@ -0,0 +1,58 @@ +""" +Patch for Thrift-generated exception classes to make them compatible with Airflow's secret masking. +""" + +import logging +import sys +from pathlib import Path +from typing import Any, Dict, List, Optional, Tuple, Union + +# --- Python Path Setup --- +project_root = Path(__file__).parent.absolute() +# Add project root to sys.path (needed for the 'pangramia' symlink) +if str(project_root) not in sys.path: sys.path.insert(0, str(project_root)) +# --- End Python Path Setup --- + +logger = logging.getLogger(__name__) + +def patch_thrift_exceptions(): + """ + Patch Thrift-generated exception classes to make them compatible with Airflow's secret masking. + """ + try: + from pangramia.yt.exceptions.ttypes import PBServiceException, PBUserException + + # Save original __setattr__ methods + original_service_setattr = PBServiceException.__setattr__ + original_user_setattr = PBUserException.__setattr__ + + # Define a new __setattr__ method that allows modifying any attribute + def new_service_setattr(self, name, value): + logger.debug(f"Setting attribute {name} on PBServiceException") + object.__setattr__(self, name, value) + + def new_user_setattr(self, name, value): + logger.debug(f"Setting attribute {name} on PBUserException") + object.__setattr__(self, name, value) + + # Apply the patch to both exception classes + PBServiceException.__setattr__ = new_service_setattr + PBUserException.__setattr__ = new_user_setattr + + logger.info("Successfully patched Thrift exception classes for Airflow compatibility") + + # Verify the patch + try: + test_exception = PBServiceException(message="Test") + test_exception.args = ("Test",) # Try to modify an attribute + logger.info("Verified Thrift exception patch is working correctly") + except Exception as e: + logger.error(f"Thrift exception patch verification failed: {e}") + except ImportError as e: + logger.warning(f"Could not import Thrift exception classes: {e}") + logger.warning("Airflow error handling may not work properly with Thrift exceptions") + except Exception as e: + logger.error(f"Error patching Thrift exception classes: {e}") + +# Apply the patch when this module is imported +patch_thrift_exceptions() diff --git a/thrift_model/data/common.thrift b/thrift_model/data/common.thrift deleted file mode 100644 index 27babc1..0000000 --- a/thrift_model/data/common.thrift +++ /dev/null @@ -1,131 +0,0 @@ -namespace py pangramia.yt.common -namespace java com.pangramia.yt.common - -typedef string JobID -typedef string Timestamp - - -/** - * Standard error codes for service exceptions. - */ -enum ErrorCode { - UNKNOWN = 0, - NOT_IMPLEMENTED = 1, - INTERNAL_ERROR = 2, - INVALID_REQUEST = 3, - PROXY_UNAVAILABLE = 4, - ACCOUNT_UNAVAILABLE = 5, - BOT_DETECTED = 6, - BOT_DETECTION_SIGN_IN_REQUIRED = 7 -} - - -enum JobState { - SUCCESS, - FAIL, - BOT_FORBIDDEN_ON_URL_ACCESS, - BOT_FORBIDDEN_ON_FILE_DOWNLOAD, - BOT_CAPTCHA, - BOT_AUTH_RELOGIN_REQUIRED, - BOT_AUTH_SMS_REQUIRED, - BOT_AUTH_DEVICE_QR_REQUIRED, - BOT_ACCOUNT_BANNED, - BOT_IP_BANNED -} - -struct JobTokenData { - 1: optional string infoJson, - 2: optional string ytdlpCommand, - 3: optional string socks, - 4: optional JobID jobId, - 5: optional string url, - 6: optional string cookiesBlob, -} - - -enum TokenUpdateMode { - AUTOREFRESH_AND_REMAIN_ANONYMOUS, - AUTOREFRESH_AND_ALLOW_AUTH, - AUTOREFRESH_AND_ONLY_AUTH, - CLEANUP_THEN_AUTOREFRESH_AND_ONLY_AUTH, - CLEANUP_THEN_AUTOREFRESH_AND_REMAIN_ANONYMOUS, - CLEANUP_THEN_AUTOREFRESH_AND_ALLOW_AUTH, - AUTO,// AUTOREFRESH_AND_ONLY_AUTH, -} - - -struct AccountData { - 1: required string username, - 2: required string password, - 3: optional string countryCode -} - -struct ProxyData { - 1: required string proxyUrl, - 2: optional string countryCode -} - - -enum AccountPairState { - ACTIVE, - PAUSED, - REMOVED, - IN_PROGRESS, - ALL -} - - -struct AccountPairWithState { - 1: required string accountId, - 2: required string proxyId, - 3: optional AccountPairState accountPairState - 4: optional string machineId, -} - -struct JobData { - 1: required string jobId, - 2: required string url, - 3: required string cookiesBlob, - 4: required string potoken, - 5: required string visitorId, - 6: required string ytdlpCommand, - 7: required string createdTime, - 8: required map telemetry, - 9: required JobState state, - 10: optional string errorMessage, - 11: optional string socks5Id -} - -struct RichCollectionPagination { - 1: required bool hasNext, - 2: required i32 totalCount, - 3: required i32 page, - 4: required i32 pageSize -} - -struct RichCollectionJobData { - 1: required list items, - 2: required RichCollectionPagination pagination -} - -struct ProxyStatus { - 1: string proxyUrl, - 2: string status, - 3: i64 successCount, - 4: i64 failureCount, - 5: optional string lastFailureTimestamp, - 6: optional string lastSuccessTimestamp, - 7: optional string serverIdentity -} - -struct AccountStatus { - 1: string accountId, - 2: string status, - 3: i64 successCount, - 4: i64 failureCount, - 5: optional string lastFailureTimestamp, - 6: optional string lastSuccessTimestamp, - 7: optional string lastUsedProxy, - 8: optional string lastUsedMachine -} - diff --git a/thrift_model/data/exceptions.thrift b/thrift_model/data/exceptions.thrift deleted file mode 100644 index 2e0370e..0000000 --- a/thrift_model/data/exceptions.thrift +++ /dev/null @@ -1,14 +0,0 @@ -namespace py pangramia.yt.exceptions -namespace java com.pangramia.yt.exceptions - -exception PBServiceException { - 1: required string message, - 2: optional string errorCode, - 3: optional map context -} - -exception PBUserException { - 1: required string message, - 2: optional string errorCode, - 3: optional map context -} diff --git a/thrift_model/services/base_service.thrift b/thrift_model/services/base_service.thrift deleted file mode 100644 index bce4461..0000000 --- a/thrift_model/services/base_service.thrift +++ /dev/null @@ -1,19 +0,0 @@ -namespace py pangramia.base_service -namespace java com.pangramia.base_service - -include "../data/common.thrift" -include "../data/exceptions.thrift" - -service BaseService { - // Common health check method - bool ping() throws (1: exceptions.PBServiceException serviceExp, - 2: exceptions.PBUserException userExp), - - // Common error reporting - bool reportError(1: string message, - 2: map details) throws (1: exceptions.PBServiceException serviceExp, - 2: exceptions.PBUserException userExp) - - // Add this to fix AsyncProcessor issues - oneway void shutdown() -} diff --git a/thrift_model/services/yt_admin_ops.thrift b/thrift_model/services/yt_admin_ops.thrift deleted file mode 100644 index 5b2b71a..0000000 --- a/thrift_model/services/yt_admin_ops.thrift +++ /dev/null @@ -1,63 +0,0 @@ -namespace py pangramia.yt.admin_ops -namespace java com.pangramia.yt.admin_ops - -include "../data/common.thrift" -include "../data/exceptions.thrift" -include "base_service.thrift" - -// Proxy and Account management -service YTAccountsOpService extends base_service.BaseService { - - // AccountPairs - bool addAccountPair(1: string accountId, 2: string proxyId, 3: string machineId, 4: common.ProxyData proxyData, 5: optional common.AccountData accountData) - throws (1: exceptions.PBServiceException serviceExp, - 2: exceptions.PBUserException userExp), - - common.AccountPairWithState getPair(1: string machineId) - throws (1: exceptions.PBServiceException serviceExp, - 2: exceptions.PBUserException userExp), - - bool pair(1: string accountId, 2: string proxyId, 3:string machineId) - throws (1: exceptions.PBServiceException serviceExp, - 2: exceptions.PBUserException userExp), - - bool unpair(1: string accountId, 2: string proxyId, 3:string machineId) - throws (1: exceptions.PBServiceException serviceExp, - 2: exceptions.PBUserException userExp), - - list listAccountPairs(1: optional common.AccountPairState filter) throws (1: exceptions.PBServiceException serviceExp, - 2: exceptions.PBUserException userExp), - - // ManageAccounts - bool addAccount(1: string accountId, 2: optional common.AccountData accountData) throws (1: exceptions.PBServiceException serviceExp, - 2: exceptions.PBUserException userExp), - - - bool suspendAccount(1: string accountId) throws (1: exceptions.PBServiceException serviceExp, - 2: exceptions.PBUserException userExp), - - bool resumeAccount(1: string accountId) throws (1: exceptions.PBServiceException serviceExp, - 2: exceptions.PBUserException userExp), - - bool removeAccount(1: string accountId) throws (1: exceptions.PBServiceException serviceExp, - 2: exceptions.PBUserException userExp), - - list listActiveAccounts() throws (1: exceptions.PBServiceException serviceExp, - 2: exceptions.PBUserException userExp), - - // ManageProxy - bool addProxy(1: string proxyId, 2: common.ProxyData proxyData) throws (1: exceptions.PBServiceException serviceExp, - 2: exceptions.PBUserException userExp), - - bool suspendProxy(1: string proxyId) throws (1: exceptions.PBServiceException serviceExp, - 2: exceptions.PBUserException userExp), - - bool resumeProxy(1: string proxyId) throws (1: exceptions.PBServiceException serviceExp, - 2: exceptions.PBUserException userExp), - - bool removeProxy(1: string proxyId) throws (1: exceptions.PBServiceException serviceExp, - 2: exceptions.PBUserException userExp), - - list listActiveProxies() throws (1: exceptions.PBServiceException serviceExp, - 2: exceptions.PBUserException userExp), -} diff --git a/thrift_model/services/yt_management.thrift b/thrift_model/services/yt_management.thrift deleted file mode 100644 index 728c9db..0000000 --- a/thrift_model/services/yt_management.thrift +++ /dev/null @@ -1,27 +0,0 @@ -namespace py pangramia.yt.management -namespace java com.pangramia.yt.management - -include "../data/common.thrift" -include "../data/exceptions.thrift" -include "base_service.thrift" - -// Service for managing the state of shared resources like proxies and accounts. -// This service is intended to be run as a single, authoritative instance. -service YTManagementService extends base_service.BaseService { - - // --- Proxy Management Methods --- - list getProxyStatus(1: optional string serverIdentity) throws (1: exceptions.PBServiceException serviceExp, 2: exceptions.PBUserException userExp), - bool banProxy(1: string proxyUrl, 2: string serverIdentity) throws (1: exceptions.PBServiceException serviceExp, 2: exceptions.PBUserException userExp), - bool unbanProxy(1: string proxyUrl, 2: string serverIdentity) throws (1: exceptions.PBServiceException serviceExp, 2: exceptions.PBUserException userExp), - bool resetAllProxyStatuses(1: string serverIdentity) throws (1: exceptions.PBServiceException serviceExp, 2: exceptions.PBUserException userExp), - bool banAllProxies(1: string serverIdentity) throws (1: exceptions.PBServiceException serviceExp, 2: exceptions.PBUserException userExp), - bool deleteProxyFromRedis(1: string proxyUrl, 2: string serverIdentity) throws (1: exceptions.PBServiceException serviceExp, 2: exceptions.PBUserException userExp), - i32 deleteAllProxiesFromRedis(1: optional string serverIdentity) throws (1: exceptions.PBServiceException serviceExp, 2: exceptions.PBUserException userExp), - - // --- Account Management Methods --- - list getAccountStatus(1: optional string accountId, 2: optional string accountPrefix) throws (1: exceptions.PBServiceException serviceExp, 2: exceptions.PBUserException userExp), - bool banAccount(1: string accountId, 2: optional string reason) throws (1: exceptions.PBServiceException serviceExp, 2: exceptions.PBUserException userExp), - bool unbanAccount(1: string accountId, 2: optional string reason) throws (1: exceptions.PBServiceException serviceExp, 2: exceptions.PBUserException userExp), - bool deleteAccountFromRedis(1: string accountId) throws (1: exceptions.PBServiceException serviceExp, 2: exceptions.PBUserException userExp), - i32 deleteAllAccountsFromRedis(1: optional string accountPrefix) throws (1: exceptions.PBServiceException serviceExp, 2: exceptions.PBUserException userExp) -} diff --git a/thrift_model/services/yt_tokens_ops.thrift b/thrift_model/services/yt_tokens_ops.thrift deleted file mode 100644 index 82e3ed9..0000000 --- a/thrift_model/services/yt_tokens_ops.thrift +++ /dev/null @@ -1,40 +0,0 @@ -namespace py pangramia.yt.tokens_ops -namespace java com.pangramia.yt.tokens_ops - -include "../data/common.thrift" -include "../data/exceptions.thrift" -include "yt_management.thrift" - -// The unified service that combines token operations and management functions. -// The server implementation will decide which functions are active based on its role. -service YTTokenOpService extends yt_management.YTManagementService { - - common.JobTokenData getOrRefreshTokenWithReport ( 1: string accountId, - 2: string oldUrl, - 3: common.JobState status, - 4: optional string details, - 5: optional string jobId, - 6: optional common.TokenUpdateMode updateType = common.TokenUpdateMode.AUTO, - 7: optional string url, - 8: optional string clients) throws (1: exceptions.PBServiceException serviceExp, - 2: exceptions.PBUserException userExp) - - common.JobTokenData getOrRefreshToken ( 1: string accountId, - 2: optional common.TokenUpdateMode updateType = common.TokenUpdateMode.AUTO, - 3: optional string url, - 4: optional string clients, - 5: optional string machineId) throws (1: exceptions.PBServiceException serviceExp, - 2: exceptions.PBUserException userExp) - - common.JobTokenData getLatestToken (1: string accountId) throws (1: exceptions.PBServiceException serviceExp, - 2: exceptions.PBUserException userExp), - common.JobTokenData refreshToken ( 1: string accountId, - 2: optional common.TokenUpdateMode updateType = common.TokenUpdateMode.AUTO, - 3: optional string url ) throws (1: exceptions.PBServiceException serviceExp, - 2: exceptions.PBUserException userExp) - bool reportState( 1: string url, - 2: common.JobState status, - 3: optional string details, - 4: optional string jobId) throws (1: exceptions.PBServiceException serviceExp, - 2: exceptions.PBUserException userExp) -} diff --git a/tools/create-deployment-bundle.sh b/tools/create-deployment-bundle.sh new file mode 100755 index 0000000..2a21263 --- /dev/null +++ b/tools/create-deployment-bundle.sh @@ -0,0 +1,99 @@ +#!/bin/bash +# +# Creates a clean deployment bundle of the project in a specified local directory. +# +# This script is designed to be run from the root of the project directory. +# It uses rsync with a "whitelist" of files and directories to ensure only +# artifacts required for deployment are included in the bundle. +# +# Usage: +# ./tools/create-deployment-bundle.sh [DESTINATION_PATH] +# +# If DESTINATION_PATH is not provided, it defaults to /opt/yt-ops-deploys/yt-ops-services. + +set -e # Exit immediately if a command exits with a non-zero status. +set -u # Treat unset variables as an error. + +# --- Configuration --- +# The root directory of the project on the local machine. +SOURCE_DIR="." +# Default destination for the deployment bundle. Can be overridden by the first argument. +DEFAULT_DEST_DIR="/opt/yt-ops-deploys/yt-ops-services" +DEST_DIR="${1:-$DEFAULT_DEST_DIR}" + +# --- rsync command --- +echo ">>> Creating deployment bundle from '$SOURCE_DIR' to '$DEST_DIR'..." + +# Ensure the parent directory of the destination exists. +# This requires sudo if the user doesn't have permissions for the parent path. +if [ ! -d "$(dirname "$DEST_DIR")" ]; then + echo "Parent directory of destination does not exist. Attempting to create with sudo..." + sudo mkdir -p "$(dirname "$DEST_DIR")" + sudo chown "$USER" "$(dirname "$DEST_DIR")" +fi + +# Create a temporary file to list the files to be included. +# This is a "whitelist" approach, ensuring only necessary files are bundled. +# This list is generated by analyzing the Ansible playbooks to determine +# exactly which files and directories are required for deployment. +INCLUDE_FILE=$(mktemp) +EXCLUDE_FILE=$(mktemp) +trap 'rm -f -- "$INCLUDE_FILE" "$EXCLUDE_FILE"' EXIT + +# Define files and directories to exclude from the bundle. +cat > "$EXCLUDE_FILE" < "$INCLUDE_FILE" <>> Deployment bundle created successfully at '$DEST_DIR'." diff --git a/tools/generate-inventory.py b/tools/generate-inventory.py index e805121..5ebf313 100755 --- a/tools/generate-inventory.py +++ b/tools/generate-inventory.py @@ -20,15 +20,21 @@ def generate_inventory(cluster_config, inventory_path): # Master group f.write("[airflow_master]\n") - for hostname, ip in cluster_config['master'].items(): - f.write(f"{hostname} ansible_host={ip}\n") + for hostname, config in cluster_config['master'].items(): + line = f"{hostname} ansible_host={config['ip']}" + if 'port' in config: + line += f" ansible_port={config['port']}" + f.write(line + "\n") f.write("\n") # Workers group f.write("[airflow_workers]\n") for hostname, config in cluster_config['workers'].items(): - f.write(f"{hostname} ansible_host={config['ip']}\n") + line = f"{hostname} ansible_host={config['ip']}" + if 'port' in config: + line += f" ansible_port={config['port']}" + f.write(line + "\n") def generate_host_vars(cluster_config, host_vars_dir): """Generate host-specific variables""" @@ -46,21 +52,19 @@ def generate_host_vars(cluster_config, host_vars_dir): except Exception as e: print(f'Failed to delete {file_path}. Reason: {e}') - # Get master IP for Redis configuration - master_ip = list(cluster_config['master'].values())[0] + # Get master IP for Redis configuration from the new structure + master_ip = list(cluster_config['master'].values())[0]['ip'] - # Generate master host variables - for hostname, ip in cluster_config['master'].items(): - host_vars_file = os.path.join(host_vars_dir, f"{hostname}.yml") - with open(host_vars_file, 'w') as f: - f.write("---\n") - f.write(f"# Variables for {hostname}\n") - f.write(f"master_host_ip: {ip}\n") - f.write(f"redis_port: 52909\n") + # Get global proxy definitions + shadowsocks_proxies = cluster_config.get('shadowsocks_proxies', {}) - # Generate worker proxy variables - for hostname, config in cluster_config['workers'].items(): + # Combine master and worker nodes for processing + all_nodes = {**cluster_config['master'], **cluster_config['workers']} + + for hostname, config in all_nodes.items(): host_vars_file = os.path.join(host_vars_dir, f"{hostname}.yml") + + # Per-node list of proxies to USE worker_proxies = config.get('proxies', []) with open(host_vars_file, 'w') as f: @@ -68,6 +72,18 @@ def generate_host_vars(cluster_config, host_vars_dir): f.write(f"# Variables for {hostname}\n") f.write(f"master_host_ip: {master_ip}\n") f.write("redis_port: 52909\n") + + # Write the global proxy definitions for deployment + if shadowsocks_proxies: + f.write("shadowsocks_proxies:\n") + for name, proxy_config in shadowsocks_proxies.items(): + f.write(f" {name}:\n") + f.write(f" server: \"{proxy_config['server']}\"\n") + f.write(f" server_port: {proxy_config['server_port']}\n") + f.write(f" local_port: {proxy_config['local_port']}\n") + f.write(f" vault_password_key: \"{proxy_config['vault_password_key']}\"\n") + + # Write the per-node list of proxies to USE if worker_proxies: f.write("worker_proxies:\n") for proxy in worker_proxies: @@ -87,23 +103,26 @@ def generate_group_vars(cluster_config, group_vars_dir): all_vars_file = os.path.join(all_vars_dir, "generated_vars.yml") if os.path.exists(all_vars_file): os.remove(all_vars_file) + + global_vars = cluster_config.get('global_vars', {}) external_ips = cluster_config.get('external_access_ips', []) # Get master IP for Redis configuration - master_ip = list(cluster_config['master'].values())[0] + master_ip = list(cluster_config['master'].values())[0]['ip'] + + # Prepare data for YAML dump + generated_data = { + 'master_host_ip': master_ip, + 'redis_port': 52909, + 'external_access_ips': external_ips if external_ips else [] + } + generated_data.update(global_vars) with open(all_vars_file, 'w') as f: f.write("---\n") f.write("# This file is auto-generated by tools/generate-inventory.py\n") f.write("# Do not edit – your changes will be overwritten.\n") - f.write(f"master_host_ip: {master_ip}\n") - f.write("redis_port: 52909\n") - f.write("external_access_ips:\n") - if external_ips: - for ip in external_ips: - f.write(f" - \"{ip}\"\n") - else: - f.write(" []\n") + yaml.dump(generated_data, f, default_flow_style=False) def main(): if len(sys.argv) != 2: diff --git a/tools/host_vars/dl-master.yml b/tools/host_vars/dl-master.yml deleted file mode 100644 index f553382..0000000 --- a/tools/host_vars/dl-master.yml +++ /dev/null @@ -1,19 +0,0 @@ -# Master server specific variables -hostname: "dl-master" -service_role: "management" - -# Ports -ytdlp_base_port: 9090 -management_service_port: 9091 - -# Redis / Postgres run on the master itself -redis_host: "localhost" - -# All secrets live in vault – only the master needs them -# These will be replaced with encrypted values -redis_password: "rOhTAIlTFFylXsjhqwxnYxDChFc" -postgres_password: "pgdb_pwd_A7bC2xY9zE1wV5uP" -airflow_admin_password: "2r234sdfrt3q454arq45q355" - -server_identity: "ytdlp-ops-service-mgmt" -ansible_user: "alex_p" diff --git a/tools/host_vars/dl-worker-001.yml b/tools/host_vars/dl-worker-001.yml deleted file mode 100644 index a3410de..0000000 --- a/tools/host_vars/dl-worker-001.yml +++ /dev/null @@ -1,26 +0,0 @@ -# Worker server specific variables -hostname: "dl-worker-001" -service_role: "worker" - -# Master server connection -master_host_ip: "89.253.223.97" -redis_host: "89.253.223.97" - -# Ports -ytdlp_base_port: 9090 -envoy_port: 9080 -envoy_admin_port: 9901 -management_service_port: 9091 - -# Camoufox configuration -camoufox_proxies: "socks5://172.17.0.1:1087" -camoufox_base_vnc_port: 5901 -vnc_password: "vnc_pwd_Z5xW8cV2bN4mP7lK" - -# Account management -account_active_duration_min: 7 -account_cooldown_duration_min: 30 - -# Server identity -server_identity: "ytdlp-ops-service-worker" -ansible_user: "alex_p" diff --git a/tools/sync-to-tower.sh b/tools/sync-to-tower.sh new file mode 100755 index 0000000..06a8e83 --- /dev/null +++ b/tools/sync-to-tower.sh @@ -0,0 +1,63 @@ +#!/bin/bash +# +# Syncs the project directory to a remote "tower" host for deployment orchestration. +# +# This script is designed to be run from the root of the project directory. +# It excludes generated files, local data, logs, and other non-essential files +# to ensure a clean copy of the source code and configuration templates is synced. + +set -e # Exit immediately if a command exits with a non-zero status. +set -u # Treat unset variables as an error. + +# --- Configuration --- +# IMPORTANT: Update these variables to match your environment. +# +# The remote host to sync to (e.g., user@hostname) +REMOTE_HOST="user@your-tower-host.com" +# The destination path on the remote host +REMOTE_PATH="/path/to/your/project" +# The root directory of the project on the local machine. +SOURCE_DIR="." + +# --- rsync command --- +echo ">>> Syncing project from '$SOURCE_DIR' to '$REMOTE_HOST:$REMOTE_PATH'..." + +# Use an array for exclude options for clarity and to handle spaces correctly. +# This list is based on an analysis of the project structure and generated artifacts. +EXCLUDE_OPTS=( + "--exclude=.git" + "--exclude=__pycache__" + "--exclude='*.pyc'" + "--exclude='*.log'" + "--exclude=.DS_Store" + "--exclude=.vault_pass" + "--exclude=.env" + "--exclude=ansible/inventory.ini" + "--exclude=ansible/host_vars/" + "--exclude=ansible/group_vars/all/generated_vars.yml" + "--exclude=postgres-data/" + "--exclude=redis-data/" + "--exclude=minio-data/" + "--exclude=logs/" + "--exclude=downloadfiles/" + "--exclude=addfiles/" + "--exclude=token_generator/node_modules/" + # Exclude files generated on remote hosts by Ansible/config-generator + "--exclude=airflow/configs/envoy.yaml" + "--exclude=airflow/configs/docker-compose.camoufox.yaml" + "--exclude=airflow/configs/camoufox_endpoints.json" + # Exclude local development notes + "--exclude=TODO-*.md" +) + +# The rsync command: +# -a: archive mode (recursive, preserves permissions, etc.) +# -v: verbose +# -z: compress file data during the transfer +# --delete: delete extraneous files from the destination directory +rsync -avz --delete \ + "${EXCLUDE_OPTS[@]}" \ + "$SOURCE_DIR/" \ + "$REMOTE_HOST:$REMOTE_PATH/" + +echo ">>> Sync complete." diff --git a/yt_ops_services/__init__.py b/yt_ops_services/__init__.py new file mode 100644 index 0000000..22bf027 --- /dev/null +++ b/yt_ops_services/__init__.py @@ -0,0 +1,3 @@ +from .version import VERSION +# Package initialization +__version__ = "1.6.2.dev0" diff --git a/yt_ops_services/client_utils.py b/yt_ops_services/client_utils.py new file mode 100644 index 0000000..089de29 --- /dev/null +++ b/yt_ops_services/client_utils.py @@ -0,0 +1,36 @@ +import logging +import datetime +from thrift.transport import TSocket, TTransport +from thrift.protocol import TBinaryProtocol +from pangramia.yt.tokens_ops import YTTokenOpService + +logger = logging.getLogger(__name__) + +def get_thrift_client(host: str, port: int, timeout_ms: int = 30000): + """ + Helper function to create and connect a Thrift client. + Returns a tuple of (client, transport). + """ + logger.info(f"Connecting to Thrift server at {host}:{port}...") + transport = TSocket.TSocket(host, port) + transport.setTimeout(timeout_ms) + transport = TTransport.TFramedTransport(transport) + protocol = TBinaryProtocol.TBinaryProtocol(transport) + client = YTTokenOpService.Client(protocol) + transport.open() + logger.info("Connection successful.") + return client, transport + +def format_timestamp(ts_str: str) -> str: + """Formats a string timestamp into a human-readable date string.""" + if not ts_str: + return "" + try: + ts_float = float(ts_str) + # Handle cases where timestamp might be 0 or negative + if ts_float <= 0: + return "" + dt_obj = datetime.datetime.fromtimestamp(ts_float) + return dt_obj.strftime('%Y-%m-%d %H:%M:%S') + except (ValueError, TypeError): + return ts_str # Return original string if conversion fails diff --git a/yt_ops_services/version.py b/yt_ops_services/version.py new file mode 100644 index 0000000..b4c109c --- /dev/null +++ b/yt_ops_services/version.py @@ -0,0 +1,9 @@ +import os + +def get_version(): + """Reads the version from the VERSION file in the project root.""" + version_path = os.path.join(os.path.dirname(__file__), '..', 'VERSION') + with open(version_path, 'r') as f: + return f.read().strip() + +VERSION = get_version()