diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..fda177f --- /dev/null +++ b/.gitignore @@ -0,0 +1,2 @@ +.aider* +*/.DS_Store diff --git a/.vault_pass b/.vault_pass new file mode 100644 index 0000000..89a0cd2 --- /dev/null +++ b/.vault_pass @@ -0,0 +1 @@ +ytdlp-ops diff --git a/README.md b/README.md new file mode 100644 index 0000000..22f80b4 --- /dev/null +++ b/README.md @@ -0,0 +1,79 @@ +# Airflow/YT-DLP Cluster Deployment + +This repository contains Ansible playbooks and configuration files for deploying a distributed Airflow cluster with YT-DLP services. + +## Prerequisites + +1. Install Ansible on your control machine +2. Ensure SSH access to all target nodes +3. Set up your vault password in `.vault_pass` file + +## Initial Setup + +Generate the inventory and configuration files from your cluster definition: + +```bash +./tools/generate-inventory.py cluster.test.yml +cd ansible +``` + +## Full Deployment + +### Deploy entire cluster with proxies (recommended for new setups): + +```bash +ansible-playbook playbook-full-with-proxies.yml +``` + +### Deploy cluster without proxies: + +```bash +ansible-playbook playbook-full.yml +``` + +## Targeted Deployments + +### Deploy only to master node: + +```bash +ansible-playbook playbook-master.yml --limit="af-test" +``` + +### Deploy only to worker nodes: + +```bash +ansible-playbook playbook-worker.yml +``` + +## Deploy Specific Steps + +To start at a specific task (useful for debugging or partial deployments): + +```bash +ansible-playbook playbook-master.yml --limit="af-test" --start-at-task="Prepare Caddy asset extraction directory" +``` + +## Debug Deployments + +Run with dry-run and verbose output for debugging: + +```bash +ansible-playbook playbook-full.yml --check --diff -vv +``` + +## DAGs Only Deployment + +To update only DAG files and configurations: + +```bash +ansible-playbook playbook-dags.yml +``` + +## Vault Management + +All sensitive data is encrypted with Ansible Vault. The vault password should be stored in `.vault_pass` file in the project root. + +To edit vault files: +```bash +ansible-vault edit group_vars/all/vault.yml +``` diff --git a/airflow/.dockerignore b/airflow/.dockerignore index 312f1f7..6037927 100644 --- a/airflow/.dockerignore +++ b/airflow/.dockerignore @@ -1,5 +1,6 @@ redis-data minio-data +postgres-data logs downloadfiles addfiles diff --git a/airflow/Dockerfile b/airflow/Dockerfile index 8ea5aec..9a48fc6 100644 --- a/airflow/Dockerfile +++ b/airflow/Dockerfile @@ -58,7 +58,10 @@ RUN pip install --no-cache-dir \ apache-airflow-providers-http \ apache-airflow-providers-amazon \ "botocore>=1.34.118" \ - psycopg2-binary "gunicorn==20.1.0" + psycopg2-binary \ + "gunicorn==20.1.0" \ + "python-ffmpeg==2.0.12" \ + "ffprobe3" # --- Install the custom yt_ops_services package --- # Copy all the necessary source code for the package. diff --git a/airflow/Dockerfile.caddy b/airflow/Dockerfile.caddy index 83e1076..5d37c33 100644 --- a/airflow/Dockerfile.caddy +++ b/airflow/Dockerfile.caddy @@ -1,30 +1,10 @@ -# Stage 1: Extract static assets from the Airflow image -FROM pangramia/ytdlp-ops-airflow:latest AS asset-extractor - -# Switch to root to create and write to the /assets directory -USER root - -# Create a temporary directory for extracted assets -WORKDIR /assets - -# Copy static assets from the Airflow image. -# This dynamically finds the paths to flask_appbuilder and airflow static assets -# to be resilient to version changes. -RUN cp -R $(python -c 'import os, flask_appbuilder; print(os.path.join(os.path.dirname(flask_appbuilder.__file__), "static"))') ./appbuilder && \ - cp -R $(python -c 'import os, airflow; print(os.path.join(os.path.dirname(airflow.__file__), "www/static/dist"))') ./dist - -# Pre-compress the static assets using gzip -# This improves performance by allowing Caddy to serve compressed files directly. -RUN find ./appbuilder -type f -print0 | xargs -0 gzip -k -9 && \ - find ./dist -type f -print0 | xargs -0 gzip -k -9 - - -# Stage 2: Build the final Caddy image +# Build the final Caddy image FROM caddy:2-alpine -# Copy the pre-compressed static assets from the first stage -COPY --from=asset-extractor /assets/appbuilder /usr/share/caddy/static/appbuilder -COPY --from=asset-extractor /assets/dist /usr/share/caddy/static/dist +# Copy the pre-compressed static assets from the build context. +# These assets are extracted from the main Airflow image by the Ansible playbook. +COPY caddy_build_assets/appbuilder /usr/share/caddy/static/appbuilder +COPY caddy_build_assets/dist /usr/share/caddy/static/dist # Copy the Caddyfile configuration. The build context is the project root, # so the path is relative to that. diff --git a/airflow/README-proxy.RU.md b/airflow/README-proxy.RU.md deleted file mode 100644 index e7ed166..0000000 --- a/airflow/README-proxy.RU.md +++ /dev/null @@ -1,249 +0,0 @@ -# Стратегия Управления Прокси и Аккаунтами - -В этом документе описывается интеллектуальная стратегия управления ресурсами (прокси и аккаунтами), используемая в `ytdlp-ops-server`. Цель этой системы — максимизировать процент успешных операций, минимизировать блокировки и обеспечить отказоустойчивость. - -Сервер может работать в разных ролях для поддержки распределенной архитектуры, разделяя задачи управления и задачи генерации токенов. - ---- - -## Роли Сервиса и Архитектура - -Сервер предназначен для работы в одной из трех ролей, указываемых флагом `--service-role`: - -- **`management`**: Один легковесный экземпляр сервиса, отвечающий за все вызовы API управления. - - **Назначение**: Предоставляет централизованную точку входа для мониторинга и управления состоянием всех прокси и аккаунтов в системе. - - **Поведение**: Предоставляет только функции управления (`getProxyStatus`, `banAccount` и т.д.). Вызовы функций генерации токенов будут завершаться ошибкой. - - **Развертывание**: Запускается как один контейнер (`ytdlp-ops-management`) и напрямую открывает свой порт на хост (например, порт `9091`), минуя Envoy. - -- **`worker`**: Основная "рабочая лошадка" для генерации токенов и `info.json`. - - **Назначение**: Обрабатывает все запросы на генерацию токенов. - - **Поведение**: Реализует полный API, но его функции управления ограничены его собственным `server_identity`. - - **Развертывание**: Запускается как масштабируемый сервис (`ytdlp-ops-worker`) за балансировщиком нагрузки Envoy (например, порт `9080`). - -- **`all-in-one`** (По умолчанию): Один экземпляр, который выполняет как управленческие, так и рабочие функции. Идеально подходит для локальной разработки или небольших развертываний. - -Эта архитектура позволяет создать надежную, федеративную систему, где воркеры управляют своими ресурсами локально, в то время как центральный сервис предоставляет глобальное представление для управления и мониторинга. - ---- - -## 1. Управление Жизненным Циклом Аккаунтов (Cooldown / Resting) - -**Цель:** Предотвратить чрезмерное использование и последующую блокировку аккаунтов, предоставляя им периоды "отдыха" после интенсивной работы. - -### Как это работает: -Жизненный цикл аккаунта состоит из трех состояний: -- **`ACTIVE`**: Аккаунт активен и используется для выполнения задач. При первом успешном использовании запускается таймер его активности. -- **`RESTING`**: Если аккаунт был в состоянии `ACTIVE` дольше установленного лимита, `AccountManager` автоматически переводит его в состояние "отдыха". В этом состоянии Airflow worker не будет выбирать его для новых задач. -- **Возврат в `ACTIVE`**: После завершения периода "отдыха" `AccountManager` автоматически возвращает аккаунт в состояние `ACTIVE`, делая его снова доступным. - -### Конфигурация: -Эти параметры настраиваются при запуске `ytdlp-ops-server`. - -- `--account-active-duration-min`: "Время работы" в **минутах**, которое аккаунт может быть непрерывно активным до перехода в `RESTING`. - - **Значение по умолчанию:** `30` (минут). -- `--account-cooldown-duration-min`: "Время отдыха" в **минутах**, которое аккаунт должен находиться в состоянии `RESTING`. - - **Значение по умолчанию:** `60` (минут). - -**Где настраивать:** -Параметры передаются как аргументы командной строки при запуске сервера. При использовании Docker Compose это делается в файле `airflow/docker-compose-ytdlp-ops.yaml`: -```yaml - command: - # ... другие параметры - - "--account-active-duration-min" - - "${ACCOUNT_ACTIVE_DURATION_MIN:-30}" - - "--account-cooldown-duration-min" - - "${ACCOUNT_COOLDOWN_DURATION_MIN:-60}" -``` -Вы можете изменить значения по умолчанию, установив переменные окружения `ACCOUNT_ACTIVE_DURATION_MIN` и `ACCOUNT_COOLDOWN_DURATION_MIN` в вашем `.env` файле. - -**Соответствующие файлы:** -- `server_fix/account_manager.py`: Содержит основную логику для переключения состояний. -- `ytdlp_ops_server_fix.py`: Обрабатывает аргументы командной строки. -- `airflow/docker-compose-ytdlp-ops.yaml`: Передает аргументы в контейнер сервера. - ---- - -## 2. Умная Стратегия Банов - -**Цель:** Избежать необоснованных банов хороших прокси. Проблема часто может быть в аккаунте, а не в прокси, через который он работает. - -### Как это работает: - -#### Этап 1: Сначала Бан Аккаунта -- При возникновении серьезной ошибки, требующей бана (например, `BOT_DETECTED` или `SOCKS5_CONNECTION_FAILED`), система применяет санкции **только к аккаунту**, который вызвал ошибку. -- Для прокси эта ошибка просто фиксируется как один сбой, но сам прокси **не банится** и остается в работе. - -#### Этап 2: Бан Прокси по "Скользящему Окну" -- Прокси блокируется автоматически, только если он демонстрирует **систематические сбои с РАЗНЫМИ аккаунтами** за короткий промежуток времени. -- Это является надежным индикатором того, что проблема именно в прокси. `ProxyManager` на сервере отслеживает это и автоматически банит такой прокси. - -### Конфигурация: -Эти параметры **жестко заданы** как константы в коде и для их изменения требуется редактирование файла. - -**Где настраивать:** -- **Файл:** `server_fix/proxy_manager.py` -- **Константы** в классе `ProxyManager`: - - `FAILURE_WINDOW_SECONDS`: Временное окно в секундах для анализа сбоев. - - **Значение по умолчанию:** `3600` (1 час). - - `FAILURE_THRESHOLD_COUNT`: Минимальное общее количество сбоев для запуска проверки. - - **Значение по умолчанию:** `3`. - - `FAILURE_THRESHOLD_UNIQUE_ACCOUNTS`: Минимальное количество **уникальных аккаунтов**, с которыми произошли сбои, чтобы забанить прокси. - - **Значение по умолчанию:** `3`. - -**Соответствующие файлы:** -- `server_fix/proxy_manager.py`: Содержит логику "скользящего окна" и константы. -- `airflow/dags/ytdlp_ops_worker_per_url.py`: Функция `handle_bannable_error_callable` реализует политику бана "только аккаунт". - ---- - -### Расшифровка Статусов Аккаунтов - -Вы можете просмотреть статус всех аккаунтов с помощью DAG `ytdlp_mgmt_proxy_account`. Статусы имеют следующие значения: - -- **`ACTIVE`**: Аккаунт исправен и доступен для использования. По умолчанию, аккаунт считается `ACTIVE`, если у него не установлен конкретный статус. -- **`BANNED`**: Аккаунт временно отключен из-за повторяющихся сбоев (например, ошибок `BOT_DETECTED`) или забанен вручную. В статусе будет указано время, оставшееся до его автоматического возвращения в `ACTIVE` (например, `BANNED (active in 55m)`). -- **`RESTING`**: Аккаунт использовался в течение длительного времени и находится в обязательном периоде "отдыха" для предотвращения "выгорания". В статусе будет указано время, оставшееся до его возвращения в `ACTIVE` (например, `RESTING (active in 25m)`). -- **(Пустой Статус)**: В более старых версиях аккаунт, у которого были только сбои (и ни одного успеха), мог отображаться с пустым статусом. Это было исправлено; теперь такие аккаунты корректно отображаются как `ACTIVE`. - ---- - -## 3. Сквозной Процесс Ротации: Как Всё Работает Вместе - -Этот раздел описывает пошаговый процесс того, как воркер получает аккаунт и прокси для одной задачи, объединяя все вышеописанные стратегии управления. - -1. **Инициализация Воркера (`ytdlp_ops_worker_per_url`)** - - Запускается DAG, инициированный либо оркестратором, либо предыдущим успешным запуском самого себя. - - Задача `pull_url_from_redis` извлекает URL из очереди `_inbox` в Redis. - -2. **Выбор Аккаунта (Воркер Airflow)** - - Выполняется задача `assign_account`. - - Она генерирует полный список потенциальных ID аккаунтов на основе параметра `account_pool` (например, от `my_prefix_01` до `my_prefix_50`). - - Она подключается к Redis и проверяет статус каждого аккаунта из этого списка. - - Она создает новый временный список, содержащий только те аккаунты, которые **не** находятся в состоянии `BANNED` или `RESTING`. - - Если итоговый список активных аккаунтов пуст, воркер завершается с ошибкой (если не включено автосоздание). - - Затем из отфильтрованного списка активных аккаунтов с помощью **`random.choice()`** выбирается один. - - Выбранный `account_id` передается следующей задаче. - -3. **Выбор Прокси (`ytdlp-ops-server`)** - - Выполняется задача `get_token`, которая отправляет случайно выбранный `account_id` в Thrift RPC-вызове на `ytdlp-ops-server`. - - На сервере у `ProxyManager` запрашивается прокси. - - `ProxyManager`: - a. Обновляет свое внутреннее состояние, загружая статусы всех прокси из Redis. - b. Фильтрует список, оставляя только прокси со статусом `ACTIVE`. - c. Применяет политику бана по "скользящему окну", потенциально блокируя прокси, которые недавно слишком часто выходили из строя. - d. Выбирает следующий доступный прокси из активного списка, используя индекс **round-robin** (по кругу). - e. Возвращает выбранный `proxy_url`. - -4. **Выполнение и Отчетность** - - Теперь у сервера есть и `account_id` (от Airflow), и `proxy_url` (от его `ProxyManager`). - - Он приступает к процессу генерации токенов, используя эти ресурсы. - - По завершении (успешном или неудачном) он сообщает о результате в Redis, обновляя статусы для конкретного аккаунта и прокси, которые были использованы. Это влияет на их счетчики сбоев, таймеры "отдыха" и т.д. для следующего запуска. - -Это разделение ответственности является ключевым: -- **Воркер Airflow (задача `assign_account`)** отвечает за **случайный выбор активного аккаунта**, сохраняя при этом "привязку" (повторно используя тот же аккаунт после успеха). -- **Сервер `ytdlp-ops-server`** отвечает за **циклический выбор (round-robin) активного прокси**. - ---- - -## 4. Автоматический Бан Аккаунтов по Количеству Сбоев - -**Цель:** Автоматически выводить из ротации аккаунты, которые постоянно вызывают ошибки, не связанные с баном (например, неверный пароль, проблемы с авторизацией). - -### Как это работает: -- `AccountManager` отслеживает количество **последовательных** сбоев для каждого аккаунта. -- При успешной операции счетчик сбрасывается. -- Если количество последовательных сбоев достигает заданного порога, аккаунт автоматически банится на определенный срок. - -### Конфигурация: -Эти параметры задаются в конструкторе класса `AccountManager`. - -**Где настраивать:** -- **Файл:** `server_fix/account_manager.py` -- **Параметры** в `__init__` метода `AccountManager`: - - `failure_threshold`: Количество последовательных сбоев до бана. - - **Значение по умолчанию:** `5`. - - `ban_duration_s`: Длительность бана в секундах. - - **Значение по умолчанию:** `3600` (1 час). - ---- - -## 5. Мониторинг и Восстановление - -### Как Проверить Статусы -DAG **`ytdlp_mgmt_proxy_account`** — это основной инструмент для мониторинга состояния ваших ресурсов. Он подключается напрямую к **сервису управления** для выполнения действий. - -- **ID DAG'а:** `ytdlp_mgmt_proxy_account` -- **Как использовать:** Запустите DAG из интерфейса Airflow. Убедитесь, что параметры `management_host` и `management_port` правильно указывают на ваш экземпляр сервиса `ytdlp-ops-management`. Для получения полного обзора установите параметры: - - `entity`: `all` - - `action`: `list` -- **Результат:** В логе DAG'а будут отображены таблицы с текущим статусом всех аккаунтов и прокси. Для аккаунтов в состоянии `BANNED` или `RESTING` будет показано время, оставшееся до их активации (например, `RESTING (active in 45m)`). Для прокси будет подсвечено, какой из них является следующим `(next)` в ротации для конкретного воркера. - -### Что Произойдет, если Все Аккаунты Будут Забанены или в "Отдыхе"? -Если весь пул аккаунтов станет недоступен (в статусе `BANNED` или `RESTING`), система по умолчанию приостановит работу. -- DAG `ytdlp_ops_worker_per_url` завершится с ошибкой `AirflowException` на шаге `assign_account`, так как пул активных аккаунтов будет пуст. -- Это остановит циклы обработки. Система будет находиться в состоянии паузы до тех пор, пока аккаунты не будут разбанены вручную или пока не истечет их таймер бана/отдыха. После этого вы сможете перезапустить циклы обработки с помощью DAG'а `ytdlp_ops_orchestrator`. -- Граф выполнения DAG `ytdlp_ops_worker_per_url` теперь явно показывает такие задачи, как `assign_account`, `get_token`, `ban_account`, `retry_get_token` и т.д., что делает поток выполнения и точки сбоя более наглядными. - -Систему можно настроить на автоматическое создание новых аккаунтов, чтобы предотвратить полную остановку обработки. - -#### Автоматическое Создание Аккаунтов при Исчерпании -- **Цель**: Обеспечить непрерывную работу конвейера обработки, даже если все аккаунты в основном пуле временно забанены или находятся в "отдыхе". -- **Как это работает**: Если параметр `auto_create_new_accounts_on_exhaustion` установлен в `True` и пул аккаунтов задан с помощью префикса (а не явного списка), система сгенерирует новый уникальный ID аккаунта, когда обнаружит, что активный пул пуст. -- **Именование новых аккаунтов**: Новые аккаунты создаются в формате `{prefix}-auto-{уникальный_id}`. -- **Конфигурация**: - - **Параметр**: `auto_create_new_accounts_on_exhaustion` - - **Где настраивать**: В конфигурации DAG `ytdlp_ops_orchestrator` при запуске. - - **Значение по умолчанию**: `True`. - ---- - -## 6. Обработка Сбоев и Политика Повторных Попыток - -**Цель:** Обеспечить гибкое управление поведением системы, когда воркер сталкивается с ошибкой, требующей бана (например, `BOT_DETECTED`). - -### Как это работает -Когда задача `get_token` воркера завершается с ошибкой, требующей бана, поведение системы определяется политикой `on_bannable_failure`, которую можно настроить при запуске `ytdlp_ops_orchestrator`. - -### Конфигурация -- **Параметр**: `on_bannable_failure` -- **Где настраивать**: В конфигурации DAG `ytdlp_ops_orchestrator`. -- **Опции**: - - `stop_loop` (Самая строгая): - - Использованный аккаунт банится. - - URL помечается как сбойный в хэше `_fail` в Redis. - - Цикл обработки воркера **останавливается**. "Линия" обработки становится неактивной. - - `retry_with_new_account` (По умолчанию, самая отказоустойчивая): - - Аккаунт, вызвавший сбой, банится. - - Воркер немедленно повторяет обработку **того же URL** с новым, неиспользованным аккаунтом из пула. - - Если повторная попытка успешна, воркер продолжает свой цикл для обработки следующего URL. - - Если повторная попытка также завершается сбоем, второй аккаунт **и использованный прокси** также банятся, и цикл работы воркера останавливается. - - `retry_and_ban_account_only`: - - Похожа на `retry_with_new_account`, но при втором сбое банится **только второй аккаунт**, а не прокси. - - Это полезно, когда вы доверяете своим прокси, но хотите агрессивно перебирать сбойные аккаунты. - - `retry_without_ban` (Самая мягкая): - - Воркер повторяет попытку с новым аккаунтом, но **ни аккаунты, ни прокси никогда не банятся**. - - Эта политика полезна для отладки или когда вы уверены, что сбои являются временными и не вызваны проблемами с ресурсами. - -Эта политика позволяет системе быть устойчивой к сбоям отдельных аккаунтов, не теряя URL, и в то же время обеспечивает гранулярный контроль над тем, когда банить аккаунты и/или прокси, если проблема сохраняется. - ---- - -## 7. Логика Работы Worker DAG (`ytdlp_ops_worker_per_url`) - -Этот DAG является "рабочей лошадкой" системы. Он спроектирован как самоподдерживающийся цикл для обработки одного URL за запуск. Логика обработки сбоев и повторных попыток теперь явно видна в графе задач DAG. - -### Задачи и их назначение: - -- **`pull_url_from_redis`**: Извлекает один URL из очереди `_inbox` в Redis. Если очередь пуста, DAG завершается со статусом `skipped`, останавливая эту "линию" обработки. -- **`assign_account`**: Выбирает аккаунт для задачи. Он поддерживает **привязку аккаунта (affinity)**, повторно используя тот же аккаунт из предыдущего успешного запуска в своей "линии". Если это первый запуск или предыдущий был неудачным, он выбирает случайный активный аккаунт. -- **`get_token`**: Основная попытка получить токены и `info.json` путем вызова `ytdlp-ops-server`. -- **`handle_bannable_error_branch`**: Задача-развилка, которая запускается в случае сбоя `get_token`. Она анализирует ошибку и определяет следующий шаг на основе политики `on_bannable_failure`. -- **`ban_account_and_prepare_for_retry`**: Если разрешен повтор, эта задача банит сбойный аккаунт и выбирает новый. -- **`retry_get_token`**: Вторая попытка получить токен с использованием нового аккаунта. -- **`ban_second_account_and_proxy`**: Если и повторная попытка завершается неудачей, эта задача банит второй аккаунт и использованный прокси. -- **`download_and_probe`**: Если `get_token` или `retry_get_token` завершается успешно, эта задача использует `yt-dlp` для скачивания медиа и `ffmpeg` для проверки целостности файла. -- **`mark_url_as_success`**: Если `download_and_probe` завершается успешно, эта задача записывает успешный результат в хэш `_result` в Redis. -- **`handle_generic_failure`**: Если любая задача завершается с неисправимой ошибкой, эта задача записывает подробную информацию об ошибке в хэш `_fail` в Redis. -- **`decide_what_to_do_next`**: Финальная задача-развилка, которая решает, продолжать ли цикл (`trigger_self_run`), остановить его корректно (`stop_loop`) или пометить как сбойный (`fail_loop`). -- **`trigger_self_run`**: Задача, которая фактически запускает следующий экземпляр DAG, создавая непрерывный цикл. - diff --git a/airflow/bgutil-diff.txt b/airflow/bgutil-diff.txt deleted file mode 100644 index 6647512..0000000 --- a/airflow/bgutil-diff.txt +++ /dev/null @@ -1,407 +0,0 @@ -Diff to getpot_bgutil_http - - def _validate_get_pot(self, client: str, ydl: YoutubeDL, visitor_data=None, data_sync_id=None, player_url=None, **kwargs): - if client != 'ios': - raise UnsupportedRequest(f'Client {client} is not supported') - - base_url = ydl.get_info_extractor('Youtube')._configuration_arg( - 'getpot_bgutil_baseurl', ['http://127.0.0.1:4416'], casesense=True)[0] - - # Validate visitor data format for ios client - if visitor_data and not visitor_data.startswith('Cg'): - raise UnsupportedRequest('Invalid visitor data format for ios client') - - if not data_sync_id and not visitor_data: - raise UnsupportedRequest( - 'One of [data_sync_id, visitor_data] must be passed') ->>>>>>> 559b875 (feat: Add support for pre-provided ios PO tokens and client-specific validation) - try: - self.logger.trace( - f'Checking server availability at {self._base_url}/ping') - response = json.load(self._request_webpage(Request( - f'{self._base_url}/ping', extensions={'timeout': self._GET_SERVER_VSN_TIMEOUT}, proxies={'all': None}), - note=False)) - except TransportError as e: - # the server may be down - script_path_provided = self.ie._configuration_arg( - ie_key='youtubepot-bgutilscript', key='script_path', default=[None])[0] is not None - - warning_base = f'Error reaching GET {self._base_url}/ping (caused by {e.__class__.__name__}). ' - if script_path_provided: # server down is expected, log info - self._info_and_raise( - warning_base + 'This is expected if you are using the script method.') - else: - self._warn_and_raise( - warning_base + f'Please make sure that the server is reachable at {self._base_url}.') - - return - except HTTPError as e: - # may be an old server, don't raise - self.logger.warning( - f'HTTP Error reaching GET /ping (caused by {e!r})', once=True) - return - except json.JSONDecodeError as e: - # invalid server - self._warn_and_raise( - f'Error parsing ping response JSON (caused by {e!r})') - return - except Exception as e: - self._warn_and_raise( - f'Unknown error reaching GET /ping (caused by {e!r})', raise_from=e) - return - else: - self._check_version(response.get('version', ''), name='HTTP server') - self._server_available = True - return True - finally: - self._last_server_check = time.time() - -<<<<<<< HEAD - def is_available(self): - return self._server_available or self._last_server_check + 60 < int(time.time()) - - def _real_request_pot( - self, - request: PoTokenRequest, - ) -> PoTokenResponse: - if not self._check_server_availability(request): - raise PoTokenProviderRejectedRequest( - f'{self.PROVIDER_NAME} server is not available') - - # used for CI check - self.logger.trace('Generating POT via HTTP server') -======= - def _validate_get_pot(self, client: str, ydl: YoutubeDL, visitor_data=None, data_sync_id=None, player_url=None, **kwargs): - if client != 'ios': - raise UnsupportedRequest(f'Client {client} is not supported') - - base_url = ydl.get_info_extractor('Youtube')._configuration_arg( - 'getpot_bgutil_baseurl', ['http://127.0.0.1:4416'], casesense=True)[0] - - # Validate visitor data format for ios client - if visitor_data and not visitor_data.startswith('Cg'): - raise UnsupportedRequest('Invalid visitor data format for ios client') - - if not data_sync_id and not visitor_data: - raise UnsupportedRequest( - 'One of [data_sync_id, visitor_data] must be passed') ->>>>>>> 559b875 (feat: Add support for pre-provided ios PO tokens and client-specific validation) - try: - self.logger.trace( - f'Checking server availability at {self._base_url}/ping') - response = json.load(self._request_webpage(Request( - f'{self._base_url}/ping', extensions={'timeout': self._GET_SERVER_VSN_TIMEOUT}, proxies={'all': None}), - note=False)) - except TransportError as e: - # the server may be down - script_path_provided = self.ie._configuration_arg( - ie_key='youtubepot-bgutilscript', key='script_path', default=[None])[0] is not None - - warning_base = f'Error reaching GET {self._base_url}/ping (caused by {e.__class__.__name__}). ' - if script_path_provided: # server down is expected, log info - self._info_and_raise( - warning_base + 'This is expected if you are using the script method.') - else: - self._warn_and_raise( - warning_base + f'Please make sure that the server is reachable at {self._base_url}.') - - return - except HTTPError as e: - # may be an old server, don't raise - self.logger.warning( - f'HTTP Error reaching GET /ping (caused by {e!r})', once=True) - return - except json.JSONDecodeError as e: - # invalid server - self._warn_and_raise( - f'Error parsing ping response JSON (caused by {e!r})') - return - except Exception as e: - self._warn_and_raise( - f'Unknown error reaching GET /ping (caused by {e!r})', raise_from=e) - return - else: - self._check_version(response.get('version', ''), name='HTTP server') - self._server_available = True - return True - finally: - self._last_server_check = time.time() - -<<<<<<< HEAD - def is_available(self): - return self._server_available or self._last_server_check + 60 < int(time.time()) - - def _real_request_pot( - self, - request: PoTokenRequest, - ) -> PoTokenResponse: - if not self._check_server_availability(request): - raise PoTokenProviderRejectedRequest( - f'{self.PROVIDER_NAME} server is not available') - - # used for CI check - self.logger.trace('Generating POT via HTTP server') -======= - def _get_pot(self, client: str, ydl: YoutubeDL, visitor_data=None, data_sync_id=None, player_url=None, **kwargs) -> str: - # Check if we have a pre-provided token - if client == 'ios' and kwargs.get('po_token'): - self._logger.info('Using provided ios PO token') - return kwargs['po_token'] - - self._logger.info(f'Generating POT via HTTP server for {client} client') - if ((proxy := select_proxy('https://jnn-pa.googleapis.com', self.proxies)) - != select_proxy('https://youtube.com', self.proxies)): - self._logger.warning( - 'Proxies for https://youtube.com and https://jnn-pa.googleapis.com are different. ' - 'This is likely to cause subsequent errors.') ->>>>>>> 559b875 (feat: Add support for pre-provided ios PO tokens and client-specific validation) - - try: - response = self._request_webpage( - request=Request( - f'{self._base_url}/get_pot', data=json.dumps({ - 'content_binding': get_webpo_content_binding(request)[0], - 'proxy': request.request_proxy, - 'bypass_cache': request.bypass_cache, - 'source_address': request.request_source_address, - 'disable_tls_verification': not request.request_verify_tls, - }).encode(), headers={'Content-Type': 'application/json'}, - extensions={'timeout': self._GETPOT_TIMEOUT}, proxies={'all': None}), - note=f'Generating a {request.context.value} PO Token for ' - f'{request.internal_client_name} client via bgutil HTTP server', - ) - except Exception as e: - raise PoTokenProviderError( - f'Error reaching POST /get_pot (caused by {e!r})') from e - - try: - response_json = json.load(response) - except Exception as e: - raise PoTokenProviderError( - f'Error parsing response JSON (caused by {e!r}). response = {response.read().decode()}') from e - - if error_msg := response_json.get('error'): - raise PoTokenProviderError(error_msg) - if 'poToken' not in response_json: - raise PoTokenProviderError( - f'Server did not respond with a poToken. Received response: {json.dumps(response_json)}') - - po_token = response_json['poToken'] - self.logger.trace(f'Generated POT: {po_token}') - return PoTokenResponse(po_token=po_token) - - -@register_preference(BgUtilHTTPPTP) -def bgutil_HTTP_getpot_preference(provider, request): - return 100 - - -__all__ = [BgUtilHTTPPTP.__name__, - bgutil_HTTP_getpot_preference.__name__] - - -------------------------- -Diff to getpot_bgutil_script.py - - - -from __future__ import annotations - -import contextlib -import functools -import json -import os.path -import re -import shutil -import subprocess - -from yt_dlp.extractor.youtube.pot.utils import get_webpo_content_binding -from yt_dlp.utils import Popen - -with contextlib.suppress(ImportError): - from yt_dlp_plugins.extractor.getpot_bgutil import BgUtilPTPBase - -from yt_dlp.extractor.youtube.pot.provider import ( - PoTokenProviderError, - PoTokenRequest, - PoTokenResponse, - register_preference, - register_provider, -) - - -@register_provider -class BgUtilScriptPTP(BgUtilPTPBase): - PROVIDER_NAME = 'bgutil:script' - - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self._check_script = functools.cache(self._check_script_impl) - - @functools.cached_property - def _script_path(self): - script_path = self._configuration_arg( - 'script_path', casesense=True, default=[None])[0] - - if script_path: - return os.path.expandvars(script_path) - - # check deprecated arg - deprecated_script_path = self.ie._configuration_arg( - ie_key='youtube', key='getpot_bgutil_script', default=[None])[0] - - if deprecated_script_path: - self._warn_and_raise( - "'youtube:getpot_bgutil_script' extractor arg is deprecated, use 'youtubepot-bgutilscript:script_path' instead") - - # default if no arg was passed - home = os.path.expanduser('~') - default_path = os.path.join( - home, 'bgutil-ytdlp-pot-provider', 'server', 'build', 'generate_once.js') - self.logger.debug( - f'No script path passed, defaulting to {default_path}') - return default_path - -<<<<<<< HEAD - def is_available(self): - return self._check_script(self._script_path) - - @functools.cached_property - def _node_path(self): - node_path = shutil.which('node') - if node_path is None: - self.logger.trace('node is not in PATH') - vsn = self._check_node_version(node_path) - if vsn: - self.logger.trace(f'Node version: {vsn}') - return node_path - - def _check_script_impl(self, script_path): -======= - def _validate_get_pot(self, client: str, ydl: YoutubeDL, visitor_data=None, data_sync_id=None, player_url=None, **kwargs): - script_path = ydl.get_info_extractor('Youtube')._configuration_arg( - 'getpot_bgutil_script', [self._default_script_path], casesense=True)[0] - - # If a specific client is requested, validate it's supported - requested_client = ydl.params.get('extractor_args', {}).get('youtube', {}).get('formats') - if requested_client and client != requested_client: - raise UnsupportedRequest(f'Skipping {client} as {requested_client} was specifically requested') - - if not data_sync_id and not visitor_data: - raise UnsupportedRequest( - 'One of [data_sync_id, visitor_data] must be passed') ->>>>>>> 046a994 (refactor: support client-specific requests via extractor_args in POT providers) - if not os.path.isfile(script_path): - self.logger.debug( - f"Script path doesn't exist: {script_path}") - return False - if os.path.basename(script_path) != 'generate_once.js': - self.logger.warning( - 'Incorrect script passed to extractor args. Path to generate_once.js required', once=True) - return False - node_path = self._node_path - if not node_path: - return False - stdout, stderr, returncode = Popen.run( - [self._node_path, script_path, '--version'], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True, - timeout=self._GET_SERVER_VSN_TIMEOUT) - if returncode: - self.logger.warning( - f'Failed to check script version. ' - f'Script returned {returncode} exit status. ' - f'Script stdout: {stdout}; Script stderr: {stderr}', - once=True) - return False - else: - self._check_version(stdout.strip(), name='script') - return True - - def _check_node_version(self, node_path): - try: - stdout, stderr, returncode = Popen.run( - [node_path, '--version'], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True, - timeout=self._GET_SERVER_VSN_TIMEOUT) - stdout = stdout.strip() - mobj = re.match(r'v(\d+)\.(\d+)\.(\d+)', stdout) - if returncode or not mobj: - raise ValueError - node_vsn = tuple(map(int, mobj.groups())) - if node_vsn >= self._MIN_NODE_VSN: - return node_vsn - raise RuntimeError - except RuntimeError: - min_vsn_str = 'v' + '.'.join(str(v) for v in self._MIN_NODE_VSN) - self.logger.warning( - f'Node version too low. ' - f'(got {stdout}, but at least {min_vsn_str} is required)') - except (subprocess.TimeoutExpired, ValueError): - self.logger.warning( - f'Failed to check node version. ' - f'Node returned {returncode} exit status. ' - f'Node stdout: {stdout}; Node stderr: {stderr}') - - def _real_request_pot( - self, - request: PoTokenRequest, - ) -> PoTokenResponse: - # used for CI check - self.logger.trace( - f'Generating POT via script: {self._script_path}') - - command_args = [self._node_path, self._script_path] - if proxy := request.request_proxy: - command_args.extend(['-p', proxy]) - command_args.extend(['-c', get_webpo_content_binding(request)[0]]) - if request.bypass_cache: - command_args.append('--bypass-cache') - if request.request_source_address: - command_args.extend( - ['--source-address', request.request_source_address]) - if request.request_verify_tls is False: - command_args.append('--disable-tls-verification') - - self.logger.info( - f'Generating a {request.context.value} PO Token for ' - f'{request.internal_client_name} client via bgutil script', - ) - self.logger.debug( - f'Executing command to get POT via script: {" ".join(command_args)}') - - try: - stdout, stderr, returncode = Popen.run( - command_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True, - timeout=self._GETPOT_TIMEOUT) - except subprocess.TimeoutExpired as e: - raise PoTokenProviderError( - f'_get_pot_via_script failed: Timeout expired when trying to run script (caused by {e!r})') - except Exception as e: - raise PoTokenProviderError( - f'_get_pot_via_script failed: Unable to run script (caused by {e!r})') from e - - msg = f'stdout:\n{stdout.strip()}' - if stderr.strip(): # Empty strings are falsy - msg += f'\nstderr:\n{stderr.strip()}' - self.logger.trace(msg) - if returncode: - raise PoTokenProviderError( - f'_get_pot_via_script failed with returncode {returncode}') - - try: - # The JSON response is always the last line - script_data_resp = json.loads(stdout.splitlines()[-1]) - except json.JSONDecodeError as e: - raise PoTokenProviderError( - f'Error parsing JSON response from _get_pot_via_script (caused by {e!r})') from e - if 'poToken' not in script_data_resp: - raise PoTokenProviderError( - 'The script did not respond with a po_token') - return PoTokenResponse(po_token=script_data_resp['poToken']) - - -@register_preference(BgUtilScriptPTP) -def bgutil_script_getpot_preference(provider, request): - return 1 - - -__all__ = [BgUtilScriptPTP.__name__, - bgutil_script_getpot_preference.__name__] diff --git a/airflow/camoufox/Dockerfile b/airflow/camoufox/Dockerfile index f65f13c..207faca 100644 --- a/airflow/camoufox/Dockerfile +++ b/airflow/camoufox/Dockerfile @@ -28,6 +28,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ libnss3 libnspr4 libdbus-1-3 libatk1.0-0 libatk-bridge2.0-0 libcups2 libdrm2 libxkbcommon0 libxcomposite1 libxdamage1 libxfixes3 libxrandr2 libgbm1 libpango-1.0-0 libcairo2 libasound2 \ libgtk-3-0 libx11-xcb1 fonts-liberation tzdata \ xauth util-linux x11-xserver-utils \ + curl \ && \ # Configure timezone ln -fs /usr/share/zoneinfo/${TZ} /etc/localtime && \ diff --git a/airflow/camoufox/camoufox_server.py b/airflow/camoufox/camoufox_server.py index 1fe2528..c4c9b4d 100644 --- a/airflow/camoufox/camoufox_server.py +++ b/airflow/camoufox/camoufox_server.py @@ -80,7 +80,7 @@ def monitor_resources(server_ports, proxy_url): process_cpu = current_process.cpu_percent() # Update active connections using psutil - all_connections = current_process.net_connections(kind='inet') + all_connections = psutil.net_connections(kind='inet') new_active_connections = defaultdict(int) for conn in all_connections: if conn.status == psutil.CONN_ESTABLISHED and conn.laddr.port in server_ports: diff --git a/airflow/config/.DS_Store b/airflow/config/.DS_Store new file mode 100644 index 0000000..5008ddf Binary files /dev/null and b/airflow/config/.DS_Store differ diff --git a/airflow/config/airflow.cfg b/airflow/config/airflow.cfg new file mode 100644 index 0000000..96f558d --- /dev/null +++ b/airflow/config/airflow.cfg @@ -0,0 +1,3167 @@ +[core] +# The folder where your airflow pipelines live, most likely a +# subfolder in a code repository. This path must be absolute. +# +# Variable: AIRFLOW__CORE__DAGS_FOLDER +# +dags_folder = /opt/airflow/dags + +# Hostname by providing a path to a callable, which will resolve the hostname. +# The format is "package.function". +# +# For example, default value ``airflow.utils.net.getfqdn`` means that result from patched +# version of `socket.getfqdn() `__, +# see related `CPython Issue `__. +# +# No argument should be required in the function specified. +# If using IP address as hostname is preferred, use value ``airflow.utils.net.get_host_ip_address`` +# +# Variable: AIRFLOW__CORE__HOSTNAME_CALLABLE +# +hostname_callable = airflow.utils.net.getfqdn + +# A callable to check if a python file has airflow dags defined or not and should +# return ``True`` if it has dags otherwise ``False``. +# If this is not provided, Airflow uses its own heuristic rules. +# +# The function should have the following signature +# +# .. code-block:: python +# +# def func_name(file_path: str, zip_file: zipfile.ZipFile | None = None) -> bool: ... +# +# Variable: AIRFLOW__CORE__MIGHT_CONTAIN_DAG_CALLABLE +# +might_contain_dag_callable = airflow.utils.file.might_contain_dag_via_default_heuristic + +# Default timezone in case supplied date times are naive +# can be `UTC` (default), `system`, or any `IANA ` +# timezone string (e.g. Europe/Amsterdam) +# +# Variable: AIRFLOW__CORE__DEFAULT_TIMEZONE +# +default_timezone = utc + +# The executor class that airflow should use. Choices include +# ``SequentialExecutor``, ``LocalExecutor``, ``CeleryExecutor``, +# ``KubernetesExecutor``, ``CeleryKubernetesExecutor``, ``LocalKubernetesExecutor`` or the +# full import path to the class when using a custom executor. +# +# Variable: AIRFLOW__CORE__EXECUTOR +# +executor = CeleryExecutor + +# The auth manager class that airflow should use. Full import path to the auth manager class. +# +# Variable: AIRFLOW__CORE__AUTH_MANAGER +# +auth_manager = airflow.providers.fab.auth_manager.fab_auth_manager.FabAuthManager + +# This defines the maximum number of task instances that can run concurrently per scheduler in +# Airflow, regardless of the worker count. Generally this value, multiplied by the number of +# schedulers in your cluster, is the maximum number of task instances with the running +# state in the metadata database. Setting this value to zero allows unlimited parallelism. +# +# Variable: AIRFLOW__CORE__PARALLELISM +# +parallelism = 32 + +# The maximum number of task instances allowed to run concurrently in each DAG. To calculate +# the number of tasks that is running concurrently for a DAG, add up the number of running +# tasks for all DAG runs of the DAG. This is configurable at the DAG level with ``max_active_tasks``, +# which is defaulted as ``[core] max_active_tasks_per_dag``. +# +# An example scenario when this would be useful is when you want to stop a new dag with an early +# start date from stealing all the executor slots in a cluster. +# +# Variable: AIRFLOW__CORE__MAX_ACTIVE_TASKS_PER_DAG +# +max_active_tasks_per_dag = 16 + +# Are DAGs paused by default at creation +# +# Variable: AIRFLOW__CORE__DAGS_ARE_PAUSED_AT_CREATION +# +dags_are_paused_at_creation = True + +# The maximum number of active DAG runs per DAG. The scheduler will not create more DAG runs +# if it reaches the limit. This is configurable at the DAG level with ``max_active_runs``, +# which is defaulted as ``[core] max_active_runs_per_dag``. +# +# Variable: AIRFLOW__CORE__MAX_ACTIVE_RUNS_PER_DAG +# +max_active_runs_per_dag = 16 + +# (experimental) The maximum number of consecutive DAG failures before DAG is automatically paused. +# This is also configurable per DAG level with ``max_consecutive_failed_dag_runs``, +# which is defaulted as ``[core] max_consecutive_failed_dag_runs_per_dag``. +# If not specified, then the value is considered as 0, +# meaning that the dags are never paused out by default. +# +# Variable: AIRFLOW__CORE__MAX_CONSECUTIVE_FAILED_DAG_RUNS_PER_DAG +# +max_consecutive_failed_dag_runs_per_dag = 0 + +# The name of the method used in order to start Python processes via the multiprocessing module. +# This corresponds directly with the options available in the Python docs: +# `multiprocessing.set_start_method +# `__ +# must be one of the values returned by `multiprocessing.get_all_start_methods() +# `__. +# +# Example: mp_start_method = fork +# +# Variable: AIRFLOW__CORE__MP_START_METHOD +# +# mp_start_method = + +# Whether to load the DAG examples that ship with Airflow. It's good to +# get started, but you probably want to set this to ``False`` in a production +# environment +# +# Variable: AIRFLOW__CORE__LOAD_EXAMPLES +# +load_examples = False + +# Path to the folder containing Airflow plugins +# +# Variable: AIRFLOW__CORE__PLUGINS_FOLDER +# +plugins_folder = /opt/airflow/plugins + +# Should tasks be executed via forking of the parent process +# +# * ``False``: Execute via forking of the parent process +# * ``True``: Spawning a new python process, slower than fork, but means plugin changes picked +# up by tasks straight away +# +# Variable: AIRFLOW__CORE__EXECUTE_TASKS_NEW_PYTHON_INTERPRETER +# +execute_tasks_new_python_interpreter = False + +# Secret key to save connection passwords in the db +# +# Variable: AIRFLOW__CORE__FERNET_KEY +# +fernet_key = + +# Whether to disable pickling dags +# +# Variable: AIRFLOW__CORE__DONOT_PICKLE +# +donot_pickle = True + +# How long before timing out a python file import +# +# Variable: AIRFLOW__CORE__DAGBAG_IMPORT_TIMEOUT +# +dagbag_import_timeout = 30.0 + +# Should a traceback be shown in the UI for dagbag import errors, +# instead of just the exception message +# +# Variable: AIRFLOW__CORE__DAGBAG_IMPORT_ERROR_TRACEBACKS +# +dagbag_import_error_tracebacks = True + +# If tracebacks are shown, how many entries from the traceback should be shown +# +# Variable: AIRFLOW__CORE__DAGBAG_IMPORT_ERROR_TRACEBACK_DEPTH +# +dagbag_import_error_traceback_depth = 2 + +# How long before timing out a DagFileProcessor, which processes a dag file +# +# Variable: AIRFLOW__CORE__DAG_FILE_PROCESSOR_TIMEOUT +# +dag_file_processor_timeout = 50 + +# The class to use for running task instances in a subprocess. +# Choices include StandardTaskRunner, CgroupTaskRunner or the full import path to the class +# when using a custom task runner. +# +# Variable: AIRFLOW__CORE__TASK_RUNNER +# +task_runner = StandardTaskRunner + +# If set, tasks without a ``run_as_user`` argument will be run with this user +# Can be used to de-elevate a sudo user running Airflow when executing tasks +# +# Variable: AIRFLOW__CORE__DEFAULT_IMPERSONATION +# +default_impersonation = + +# What security module to use (for example kerberos) +# +# Variable: AIRFLOW__CORE__SECURITY +# +security = + +# Turn unit test mode on (overwrites many configuration options with test +# values at runtime) +# +# Variable: AIRFLOW__CORE__UNIT_TEST_MODE +# +unit_test_mode = False + +# Whether to enable pickling for xcom (note that this is insecure and allows for +# RCE exploits). +# +# Variable: AIRFLOW__CORE__ENABLE_XCOM_PICKLING +# +enable_xcom_pickling = False + +# What classes can be imported during deserialization. This is a multi line value. +# The individual items will be parsed as a pattern to a glob function. +# Python built-in classes (like dict) are always allowed. +# +# Variable: AIRFLOW__CORE__ALLOWED_DESERIALIZATION_CLASSES +# +allowed_deserialization_classes = airflow.* + +# What classes can be imported during deserialization. This is a multi line value. +# The individual items will be parsed as regexp patterns. +# This is a secondary option to ``[core] allowed_deserialization_classes``. +# +# Variable: AIRFLOW__CORE__ALLOWED_DESERIALIZATION_CLASSES_REGEXP +# +allowed_deserialization_classes_regexp = + +# When a task is killed forcefully, this is the amount of time in seconds that +# it has to cleanup after it is sent a SIGTERM, before it is SIGKILLED +# +# Variable: AIRFLOW__CORE__KILLED_TASK_CLEANUP_TIME +# +killed_task_cleanup_time = 60 + +# Whether to override params with dag_run.conf. If you pass some key-value pairs +# through ``airflow dags backfill -c`` or +# ``airflow dags trigger -c``, the key-value pairs will override the existing ones in params. +# +# Variable: AIRFLOW__CORE__DAG_RUN_CONF_OVERRIDES_PARAMS +# +dag_run_conf_overrides_params = True + +# If enabled, Airflow will only scan files containing both ``DAG`` and ``airflow`` (case-insensitive). +# +# Variable: AIRFLOW__CORE__DAG_DISCOVERY_SAFE_MODE +# +dag_discovery_safe_mode = True + +# The pattern syntax used in the +# `.airflowignore +# `__ +# files in the DAG directories. Valid values are ``regexp`` or ``glob``. +# +# Variable: AIRFLOW__CORE__DAG_IGNORE_FILE_SYNTAX +# +dag_ignore_file_syntax = regexp + +# The number of retries each task is going to have by default. Can be overridden at dag or task level. +# +# Variable: AIRFLOW__CORE__DEFAULT_TASK_RETRIES +# +default_task_retries = 3 + +# The number of seconds each task is going to wait by default between retries. Can be overridden at +# dag or task level. +# +# Variable: AIRFLOW__CORE__DEFAULT_TASK_RETRY_DELAY +# +default_task_retry_delay = 300 + +# The maximum delay (in seconds) each task is going to wait by default between retries. +# This is a global setting and cannot be overridden at task or DAG level. +# +# Variable: AIRFLOW__CORE__MAX_TASK_RETRY_DELAY +# +max_task_retry_delay = 86400 + +# The weighting method used for the effective total priority weight of the task +# +# Variable: AIRFLOW__CORE__DEFAULT_TASK_WEIGHT_RULE +# +default_task_weight_rule = downstream + +# Maximum possible time (in seconds) that task will have for execution of auxiliary processes +# (like listeners, mini scheduler...) after task is marked as success.. +# +# Variable: AIRFLOW__CORE__TASK_SUCCESS_OVERTIME +# +task_success_overtime = 20 + +# The default task execution_timeout value for the operators. Expected an integer value to +# be passed into timedelta as seconds. If not specified, then the value is considered as None, +# meaning that the operators are never timed out by default. +# +# Variable: AIRFLOW__CORE__DEFAULT_TASK_EXECUTION_TIMEOUT +# +default_task_execution_timeout = 3600 + +# Updating serialized DAG can not be faster than a minimum interval to reduce database write rate. +# +# Variable: AIRFLOW__CORE__MIN_SERIALIZED_DAG_UPDATE_INTERVAL +# +min_serialized_dag_update_interval = 30 + +# If ``True``, serialized DAGs are compressed before writing to DB. +# +# .. note:: +# +# This will disable the DAG dependencies view +# +# Variable: AIRFLOW__CORE__COMPRESS_SERIALIZED_DAGS +# +compress_serialized_dags = False + +# Fetching serialized DAG can not be faster than a minimum interval to reduce database +# read rate. This config controls when your DAGs are updated in the Webserver +# +# Variable: AIRFLOW__CORE__MIN_SERIALIZED_DAG_FETCH_INTERVAL +# +min_serialized_dag_fetch_interval = 10 + +# Maximum number of Rendered Task Instance Fields (Template Fields) per task to store +# in the Database. +# All the template_fields for each of Task Instance are stored in the Database. +# Keeping this number small may cause an error when you try to view ``Rendered`` tab in +# TaskInstance view for older tasks. +# +# Variable: AIRFLOW__CORE__MAX_NUM_RENDERED_TI_FIELDS_PER_TASK +# +max_num_rendered_ti_fields_per_task = 30 + +# On each dagrun check against defined SLAs +# +# Variable: AIRFLOW__CORE__CHECK_SLAS +# +check_slas = True + +# Path to custom XCom class that will be used to store and resolve operators results +# +# Example: xcom_backend = path.to.CustomXCom +# +# Variable: AIRFLOW__CORE__XCOM_BACKEND +# +xcom_backend = airflow.models.xcom.BaseXCom + +# By default Airflow plugins are lazily-loaded (only loaded when required). Set it to ``False``, +# if you want to load plugins whenever 'airflow' is invoked via cli or loaded from module. +# +# Variable: AIRFLOW__CORE__LAZY_LOAD_PLUGINS +# +lazy_load_plugins = True + +# By default Airflow providers are lazily-discovered (discovery and imports happen only when required). +# Set it to ``False``, if you want to discover providers whenever 'airflow' is invoked via cli or +# loaded from module. +# +# Variable: AIRFLOW__CORE__LAZY_DISCOVER_PROVIDERS +# +lazy_discover_providers = True + +# Hide sensitive **Variables** or **Connection extra json keys** from UI +# and task logs when set to ``True`` +# +# .. note:: +# +# Connection passwords are always hidden in logs +# +# Variable: AIRFLOW__CORE__HIDE_SENSITIVE_VAR_CONN_FIELDS +# +hide_sensitive_var_conn_fields = False + +# A comma-separated list of extra sensitive keywords to look for in variables names or connection's +# extra JSON. +# +# Variable: AIRFLOW__CORE__SENSITIVE_VAR_CONN_NAMES +# +sensitive_var_conn_names = + +# Task Slot counts for ``default_pool``. This setting would not have any effect in an existing +# deployment where the ``default_pool`` is already created. For existing deployments, users can +# change the number of slots using Webserver, API or the CLI +# +# Variable: AIRFLOW__CORE__DEFAULT_POOL_TASK_SLOT_COUNT +# +default_pool_task_slot_count = 128 + +# The maximum list/dict length an XCom can push to trigger task mapping. If the pushed list/dict has a +# length exceeding this value, the task pushing the XCom will be failed automatically to prevent the +# mapped tasks from clogging the scheduler. +# +# Variable: AIRFLOW__CORE__MAX_MAP_LENGTH +# +max_map_length = 1024 + +# The default umask to use for process when run in daemon mode (scheduler, worker, etc.) +# +# This controls the file-creation mode mask which determines the initial value of file permission bits +# for newly created files. +# +# This value is treated as an octal-integer. +# +# Variable: AIRFLOW__CORE__DAEMON_UMASK +# +daemon_umask = 0o002 + +# Class to use as dataset manager. +# +# Example: dataset_manager_class = airflow.datasets.manager.DatasetManager +# +# Variable: AIRFLOW__CORE__DATASET_MANAGER_CLASS +# +# dataset_manager_class = + +# Kwargs to supply to dataset manager. +# +# Example: dataset_manager_kwargs = {"some_param": "some_value"} +# +# Variable: AIRFLOW__CORE__DATASET_MANAGER_KWARGS +# +# dataset_manager_kwargs = + +# Dataset URI validation should raise an exception if it is not compliant with AIP-60. +# By default this configuration is false, meaning that Airflow 2.x only warns the user. +# In Airflow 3, this configuration will be removed, unconditionally enabling strict validation. +# +# Variable: AIRFLOW__CORE__STRICT_DATASET_URI_VALIDATION +# +strict_dataset_uri_validation = False + +# (experimental) Whether components should use Airflow Internal API for DB connectivity. +# +# Variable: AIRFLOW__CORE__DATABASE_ACCESS_ISOLATION +# +database_access_isolation = False + +# (experimental) Airflow Internal API url. +# Only used if ``[core] database_access_isolation`` is ``True``. +# +# Example: internal_api_url = http://localhost:8080 +# +# Variable: AIRFLOW__CORE__INTERNAL_API_URL +# +# internal_api_url = + +# Secret key used to authenticate internal API clients to core. It should be as random as possible. +# However, when running more than 1 instances of webserver / internal API services, make sure all +# of them use the same ``secret_key`` otherwise calls will fail on authentication. +# The authentication token generated using the secret key has a short expiry time though - make +# sure that time on ALL the machines that you run airflow components on is synchronized +# (for example using ntpd) otherwise you might get "forbidden" errors when the logs are accessed. +# +# Variable: AIRFLOW__CORE__INTERNAL_API_SECRET_KEY +# +internal_api_secret_key = tCnTbEabdFBDLHWoT/LxLw== + +# The ability to allow testing connections across Airflow UI, API and CLI. +# Supported options: ``Disabled``, ``Enabled``, ``Hidden``. Default: Disabled +# Disabled - Disables the test connection functionality and disables the Test Connection button in UI. +# Enabled - Enables the test connection functionality and shows the Test Connection button in UI. +# Hidden - Disables the test connection functionality and hides the Test Connection button in UI. +# Before setting this to Enabled, make sure that you review the users who are able to add/edit +# connections and ensure they are trusted. Connection testing can be done maliciously leading to +# undesired and insecure outcomes. +# See `Airflow Security Model: Capabilities of authenticated UI users +# `__ +# for more details. +# +# Variable: AIRFLOW__CORE__TEST_CONNECTION +# +test_connection = Disabled + +# The maximum length of the rendered template field. If the value to be stored in the +# rendered template field exceeds this size, it's redacted. +# +# Variable: AIRFLOW__CORE__MAX_TEMPLATED_FIELD_LENGTH +# +max_templated_field_length = 4096 + +host_docker_socket = /var/run/docker.sock + +[database] +# Path to the ``alembic.ini`` file. You can either provide the file path relative +# to the Airflow home directory or the absolute path if it is located elsewhere. +# +# Variable: AIRFLOW__DATABASE__ALEMBIC_INI_FILE_PATH +# +alembic_ini_file_path = alembic.ini + +# The SQLAlchemy connection string to the metadata database. +# SQLAlchemy supports many different database engines. +# See: `Set up a Database Backend: Database URI +# `__ +# for more details. +# +# Variable: AIRFLOW__DATABASE__SQL_ALCHEMY_CONN +# +# This is configured via the AIRFLOW__DATABASE__SQL_ALCHEMY_CONN environment variable +# in the docker-compose files, as it differs between master and workers. +# A dummy value is set here to ensure the env var override is picked up. +sql_alchemy_conn = postgresql://dummy:dummy@dummy/dummy + +# Extra engine specific keyword args passed to SQLAlchemy's create_engine, as a JSON-encoded value +# +# Example: sql_alchemy_engine_args = {"arg1": true} +# +# Variable: AIRFLOW__DATABASE__SQL_ALCHEMY_ENGINE_ARGS +# +# sql_alchemy_engine_args = + +# The encoding for the databases +# +# Variable: AIRFLOW__DATABASE__SQL_ENGINE_ENCODING +# +sql_engine_encoding = utf-8 + +# Collation for ``dag_id``, ``task_id``, ``key``, ``external_executor_id`` columns +# in case they have different encoding. +# By default this collation is the same as the database collation, however for ``mysql`` and ``mariadb`` +# the default is ``utf8mb3_bin`` so that the index sizes of our index keys will not exceed +# the maximum size of allowed index when collation is set to ``utf8mb4`` variant, see +# `GitHub Issue Comment `__ +# for more details. +# +# Variable: AIRFLOW__DATABASE__SQL_ENGINE_COLLATION_FOR_IDS +# +# sql_engine_collation_for_ids = + +# If SQLAlchemy should pool database connections. +# +# Variable: AIRFLOW__DATABASE__SQL_ALCHEMY_POOL_ENABLED +# +sql_alchemy_pool_enabled = True + +# The SQLAlchemy pool size is the maximum number of database connections +# in the pool. 0 indicates no limit. +# +# Variable: AIRFLOW__DATABASE__SQL_ALCHEMY_POOL_SIZE +# +sql_alchemy_pool_size = 20 + +# The maximum overflow size of the pool. +# When the number of checked-out connections reaches the size set in pool_size, +# additional connections will be returned up to this limit. +# When those additional connections are returned to the pool, they are disconnected and discarded. +# It follows then that the total number of simultaneous connections the pool will allow +# is **pool_size** + **max_overflow**, +# and the total number of "sleeping" connections the pool will allow is pool_size. +# max_overflow can be set to ``-1`` to indicate no overflow limit; +# no limit will be placed on the total number of concurrent connections. Defaults to ``10``. +# +# Variable: AIRFLOW__DATABASE__SQL_ALCHEMY_MAX_OVERFLOW +# +sql_alchemy_max_overflow = 30 + +# The SQLAlchemy pool recycle is the number of seconds a connection +# can be idle in the pool before it is invalidated. This config does +# not apply to sqlite. If the number of DB connections is ever exceeded, +# a lower config value will allow the system to recover faster. +# +# Variable: AIRFLOW__DATABASE__SQL_ALCHEMY_POOL_RECYCLE +# +sql_alchemy_pool_recycle = 1800 + +# Check connection at the start of each connection pool checkout. +# Typically, this is a simple statement like "SELECT 1". +# See `SQLAlchemy Pooling: Disconnect Handling - Pessimistic +# `__ +# for more details. +# +# Variable: AIRFLOW__DATABASE__SQL_ALCHEMY_POOL_PRE_PING +# +sql_alchemy_pool_pre_ping = True + +# The schema to use for the metadata database. +# SQLAlchemy supports databases with the concept of multiple schemas. +# +# Variable: AIRFLOW__DATABASE__SQL_ALCHEMY_SCHEMA +# +sql_alchemy_schema = + +# Import path for connect args in SQLAlchemy. Defaults to an empty dict. +# This is useful when you want to configure db engine args that SQLAlchemy won't parse +# in connection string. This can be set by passing a dictionary containing the create engine parameters. +# For more details about passing create engine parameters (keepalives variables, timeout etc) +# in Postgres DB Backend see `Setting up a PostgreSQL Database +# `__ +# e.g ``connect_args={"timeout":30}`` can be defined in ``airflow_local_settings.py`` and +# can be imported as shown below +# +# Example: sql_alchemy_connect_args = airflow_local_settings.connect_args +# +# Variable: AIRFLOW__DATABASE__SQL_ALCHEMY_CONNECT_ARGS +# +# sql_alchemy_connect_args = + +# Important Warning: Use of sql_alchemy_session_maker Highly Discouraged +# Import path for function which returns 'sqlalchemy.orm.sessionmaker'. +# Improper configuration of sql_alchemy_session_maker can lead to serious issues, +# including data corruption, unrecoverable application crashes. Please review the SQLAlchemy +# documentation for detailed guidance on proper configuration and best practices. +# +# Example: sql_alchemy_session_maker = airflow_local_settings._sessionmaker +# +# Variable: AIRFLOW__DATABASE__SQL_ALCHEMY_SESSION_MAKER +# +# sql_alchemy_session_maker = + +# Whether to load the default connections that ship with Airflow when ``airflow db init`` is called. +# It's good to get started, but you probably want to set this to ``False`` in a production environment. +# +# Variable: AIRFLOW__DATABASE__LOAD_DEFAULT_CONNECTIONS +# +load_default_connections = True + +# Number of times the code should be retried in case of DB Operational Errors. +# Not all transactions will be retried as it can cause undesired state. +# Currently it is only used in ``DagFileProcessor.process_file`` to retry ``dagbag.sync_to_db``. +# +# Variable: AIRFLOW__DATABASE__MAX_DB_RETRIES +# +max_db_retries = 3 + +# Whether to run alembic migrations during Airflow start up. Sometimes this operation can be expensive, +# and the users can assert the correct version through other means (e.g. through a Helm chart). +# Accepts ``True`` or ``False``. +# +# Variable: AIRFLOW__DATABASE__CHECK_MIGRATIONS +# +check_migrations = True + +[logging] +# The folder where airflow should store its log files. +# This path must be absolute. +# There are a few existing configurations that assume this is set to the default. +# If you choose to override this you may need to update the +# ``[logging] dag_processor_manager_log_location`` and +# ``[logging] child_process_log_directory settings`` as well. +# +# Variable: AIRFLOW__LOGGING__BASE_LOG_FOLDER +# +base_log_folder = /opt/airflow/logs + +# Airflow can store logs remotely in AWS S3, Google Cloud Storage or Elastic Search. +# Set this to ``True`` if you want to enable remote logging. +# +# Variable: AIRFLOW__LOGGING__REMOTE_LOGGING +# +remote_logging = True + +# Users must supply an Airflow connection id that provides access to the storage +# location. Depending on your remote logging service, this may only be used for +# reading logs, not writing them. +# +# Variable: AIRFLOW__LOGGING__REMOTE_LOG_CONN_ID +# +remote_log_conn_id = minio_default + +# Whether the local log files for GCS, S3, WASB and OSS remote logging should be deleted after +# they are uploaded to the remote location. +# +# Variable: AIRFLOW__LOGGING__DELETE_LOCAL_LOGS +# +delete_local_logs = False + +# Path to Google Credential JSON file. If omitted, authorization based on `the Application Default +# Credentials +# `__ will +# be used. +# +# Variable: AIRFLOW__LOGGING__GOOGLE_KEY_PATH +# +google_key_path = + +# Storage bucket URL for remote logging +# S3 buckets should start with **s3://** +# Cloudwatch log groups should start with **cloudwatch://** +# GCS buckets should start with **gs://** +# WASB buckets should start with **wasb** just to help Airflow select correct handler +# Stackdriver logs should start with **stackdriver://** +# +# Variable: AIRFLOW__LOGGING__REMOTE_BASE_LOG_FOLDER +# +remote_base_log_folder = s3://airflow-logs/ + +# The remote_task_handler_kwargs param is loaded into a dictionary and passed to the ``__init__`` +# of remote task handler and it overrides the values provided by Airflow config. For example if you set +# ``delete_local_logs=False`` and you provide ``{"delete_local_copy": true}``, then the local +# log files will be deleted after they are uploaded to remote location. +# +# Example: remote_task_handler_kwargs = {"delete_local_copy": true} +# +# Variable: AIRFLOW__LOGGING__REMOTE_TASK_HANDLER_KWARGS +# +remote_task_handler_kwargs = + +# Use server-side encryption for logs stored in S3 +# +# Variable: AIRFLOW__LOGGING__ENCRYPT_S3_LOGS +# +encrypt_s3_logs = False + +# Logging level. +# +# Supported values: ``CRITICAL``, ``ERROR``, ``WARNING``, ``INFO``, ``DEBUG``. +# +# Variable: AIRFLOW__LOGGING__LOGGING_LEVEL +# +logging_level = INFO + +# Logging level for celery. If not set, it uses the value of logging_level +# +# Supported values: ``CRITICAL``, ``ERROR``, ``WARNING``, ``INFO``, ``DEBUG``. +# +# Variable: AIRFLOW__LOGGING__CELERY_LOGGING_LEVEL +# +celery_logging_level = + +# Logging level for Flask-appbuilder UI. +# +# Supported values: ``CRITICAL``, ``ERROR``, ``WARNING``, ``INFO``, ``DEBUG``. +# +# Variable: AIRFLOW__LOGGING__FAB_LOGGING_LEVEL +# +fab_logging_level = WARNING + +# Logging class +# Specify the class that will specify the logging configuration +# This class has to be on the python classpath +# +# Example: logging_config_class = my.path.default_local_settings.LOGGING_CONFIG +# +# Variable: AIRFLOW__LOGGING__LOGGING_CONFIG_CLASS +# +logging_config_class = airflow_local_settings.LOGGING_CONFIG + +# Flag to enable/disable Colored logs in Console +# Colour the logs when the controlling terminal is a TTY. +# +# Variable: AIRFLOW__LOGGING__COLORED_CONSOLE_LOG +# +colored_console_log = True + +# Log format for when Colored logs is enabled +# +# Variable: AIRFLOW__LOGGING__COLORED_LOG_FORMAT +# +colored_log_format = [%%(blue)s%%(asctime)s%%(reset)s] {%%(blue)s%%(filename)s:%%(reset)s%%(lineno)d} %%(log_color)s%%(levelname)s%%(reset)s - %%(log_color)s%%(message)s%%(reset)s + +# Specifies the class utilized by Airflow to implement colored logging +# +# Variable: AIRFLOW__LOGGING__COLORED_FORMATTER_CLASS +# +colored_formatter_class = airflow.utils.log.colored_log.CustomTTYColoredFormatter + +# Format of Log line +# +# Variable: AIRFLOW__LOGGING__LOG_FORMAT +# +log_format = [%%(asctime)s] {%%(filename)s:%%(lineno)d} %%(levelname)s - %%(message)s + +# Defines the format of log messages for simple logging configuration +# +# Variable: AIRFLOW__LOGGING__SIMPLE_LOG_FORMAT +# +simple_log_format = %%(asctime)s %%(levelname)s - %%(message)s + +# Where to send dag parser logs. If "file", logs are sent to log files defined by child_process_log_directory. +# +# Variable: AIRFLOW__LOGGING__DAG_PROCESSOR_LOG_TARGET +# +dag_processor_log_target = file + +# Format of Dag Processor Log line +# +# Variable: AIRFLOW__LOGGING__DAG_PROCESSOR_LOG_FORMAT +# +dag_processor_log_format = [%%(asctime)s] [SOURCE:DAG_PROCESSOR] {%%(filename)s:%%(lineno)d} %%(levelname)s - %%(message)s + +# Determines the formatter class used by Airflow for structuring its log messages +# The default formatter class is timezone-aware, which means that timestamps attached to log entries +# will be adjusted to reflect the local timezone of the Airflow instance +# +# Variable: AIRFLOW__LOGGING__LOG_FORMATTER_CLASS +# +log_formatter_class = airflow.utils.log.timezone_aware.TimezoneAware + +# An import path to a function to add adaptations of each secret added with +# ``airflow.utils.log.secrets_masker.mask_secret`` to be masked in log messages. The given function +# is expected to require a single parameter: the secret to be adapted. It may return a +# single adaptation of the secret or an iterable of adaptations to each be masked as secrets. +# The original secret will be masked as well as any adaptations returned. +# +# Example: secret_mask_adapter = urllib.parse.quote +# +# Variable: AIRFLOW__LOGGING__SECRET_MASK_ADAPTER +# +secret_mask_adapter = + +secret_mask_exception_args = False + +# Specify prefix pattern like mentioned below with stream handler ``TaskHandlerWithCustomFormatter`` +# +# Example: task_log_prefix_template = {{ti.dag_id}}-{{ti.task_id}}-{{execution_date}}-{{ti.try_number}} +# +# Variable: AIRFLOW__LOGGING__TASK_LOG_PREFIX_TEMPLATE +# +task_log_prefix_template = {{ ti.dag_id }}-{{ ti.task_id }}-{{ ti.run_id }} + +# Formatting for how airflow generates file names/paths for each task run. +# +# Variable: AIRFLOW__LOGGING__LOG_FILENAME_TEMPLATE +# +log_filename_template = dag_id={{ ti.dag_id }}/run_id={{ ti.run_id }}/task_id={{ ti.task_id }}/{%% if ti.map_index >= 0 %%}map_index={{ ti.map_index }}/{%% endif %%}attempt={{ try_number }}.log + +# Formatting for how airflow generates file names for log +# +# Variable: AIRFLOW__LOGGING__LOG_PROCESSOR_FILENAME_TEMPLATE +# +log_processor_filename_template = {{ filename }}.log + +# Full path of dag_processor_manager logfile. +# +# Variable: AIRFLOW__LOGGING__DAG_PROCESSOR_MANAGER_LOG_LOCATION +# +dag_processor_manager_log_location = /opt/airflow/logs/dag_processor_manager/dag_processor_manager.log + +# Whether DAG processor manager will write logs to stdout +# +# Variable: AIRFLOW__LOGGING__DAG_PROCESSOR_MANAGER_LOG_STDOUT +# +dag_processor_manager_log_stdout = False + +# Name of handler to read task instance logs. +# Defaults to use ``task`` handler. +# +# Variable: AIRFLOW__LOGGING__TASK_LOG_READER +# +task_log_reader = task + +# A comma\-separated list of third-party logger names that will be configured to print messages to +# consoles\. +# +# Example: extra_logger_names = connexion,sqlalchemy +# +# Variable: AIRFLOW__LOGGING__EXTRA_LOGGER_NAMES +# +extra_logger_names = + +# When you start an Airflow worker, Airflow starts a tiny web server +# subprocess to serve the workers local log files to the airflow main +# web server, who then builds pages and sends them to users. This defines +# the port on which the logs are served. It needs to be unused, and open +# visible from the main web server to connect into the workers. +# +# Variable: AIRFLOW__LOGGING__WORKER_LOG_SERVER_PORT +# +worker_log_server_port = 8793 + +# Port to serve logs from for triggerer. +# See ``[logging] worker_log_server_port`` description for more info. +# +# Variable: AIRFLOW__LOGGING__TRIGGER_LOG_SERVER_PORT +# +trigger_log_server_port = 8794 + +# We must parse timestamps to interleave logs between trigger and task. To do so, +# we need to parse timestamps in log files. In case your log format is non-standard, +# you may provide import path to callable which takes a string log line and returns +# the timestamp (datetime.datetime compatible). +# +# Example: interleave_timestamp_parser = path.to.my_func +# +# Variable: AIRFLOW__LOGGING__INTERLEAVE_TIMESTAMP_PARSER +# +# interleave_timestamp_parser = + +# Permissions in the form or of octal string as understood by chmod. The permissions are important +# when you use impersonation, when logs are written by a different user than airflow. The most secure +# way of configuring it in this case is to add both users to the same group and make it the default +# group of both users. Group-writeable logs are default in airflow, but you might decide that you are +# OK with having the logs other-writeable, in which case you should set it to ``0o777``. You might +# decide to add more security if you do not use impersonation and change it to ``0o755`` to make it +# only owner-writeable. You can also make it just readable only for owner by changing it to ``0o700`` +# if all the access (read/write) for your logs happens from the same user. +# +# Example: file_task_handler_new_folder_permissions = 0o775 +# +# Variable: AIRFLOW__LOGGING__FILE_TASK_HANDLER_NEW_FOLDER_PERMISSIONS +# +file_task_handler_new_folder_permissions = 0o775 + +# Permissions in the form or of octal string as understood by chmod. The permissions are important +# when you use impersonation, when logs are written by a different user than airflow. The most secure +# way of configuring it in this case is to add both users to the same group and make it the default +# group of both users. Group-writeable logs are default in airflow, but you might decide that you are +# OK with having the logs other-writeable, in which case you should set it to ``0o666``. You might +# decide to add more security if you do not use impersonation and change it to ``0o644`` to make it +# only owner-writeable. You can also make it just readable only for owner by changing it to ``0o600`` +# if all the access (read/write) for your logs happens from the same user. +# +# Example: file_task_handler_new_file_permissions = 0o664 +# +# Variable: AIRFLOW__LOGGING__FILE_TASK_HANDLER_NEW_FILE_PERMISSIONS +# +file_task_handler_new_file_permissions = 0o664 + +# By default Celery sends all logs into stderr. +# If enabled any previous logging handlers will get *removed*. +# With this option AirFlow will create new handlers +# and send low level logs like INFO and WARNING to stdout, +# while sending higher severity logs to stderr. +# +# Variable: AIRFLOW__LOGGING__CELERY_STDOUT_STDERR_SEPARATION +# +celery_stdout_stderr_separation = False + +# If enabled, Airflow may ship messages to task logs from outside the task run context, e.g. from +# the scheduler, executor, or callback execution context. This can help in circumstances such as +# when there's something blocking the execution of the task and ordinarily there may be no task +# logs at all. +# This is set to ``True`` by default. If you encounter issues with this feature +# (e.g. scheduler performance issues) it can be disabled. +# +# Variable: AIRFLOW__LOGGING__ENABLE_TASK_CONTEXT_LOGGER +# +enable_task_context_logger = True + +# A comma separated list of keywords related to errors whose presence should display the line in red +# color in UI +# +# Variable: AIRFLOW__LOGGING__COLOR_LOG_ERROR_KEYWORDS +# +color_log_error_keywords = error,exception + +# A comma separated list of keywords related to warning whose presence should display the line in yellow +# color in UI +# +# Variable: AIRFLOW__LOGGING__COLOR_LOG_WARNING_KEYWORDS +# +color_log_warning_keywords = warn + +[metrics] +# `StatsD `__ integration settings. + +# If true, ``[metrics] metrics_allow_list`` and ``[metrics] metrics_block_list`` will use +# regex pattern matching anywhere within the metric name instead of only prefix matching +# at the start of the name. +# +# Variable: AIRFLOW__METRICS__METRICS_USE_PATTERN_MATCH +# +metrics_use_pattern_match = False + +# Configure an allow list (comma separated string) to send only certain metrics. +# If ``[metrics] metrics_use_pattern_match`` is ``false``, match only the exact metric name prefix. +# If ``[metrics] metrics_use_pattern_match`` is ``true``, provide regex patterns to match. +# +# Example: metrics_allow_list = "scheduler,executor,dagrun,pool,triggerer,celery" or "^scheduler,^executor,heartbeat|timeout" +# +# Variable: AIRFLOW__METRICS__METRICS_ALLOW_LIST +# +metrics_allow_list = + +# Configure a block list (comma separated string) to block certain metrics from being emitted. +# If ``[metrics] metrics_allow_list`` and ``[metrics] metrics_block_list`` are both configured, +# ``[metrics] metrics_block_list`` is ignored. +# +# If ``[metrics] metrics_use_pattern_match`` is ``false``, match only the exact metric name prefix. +# +# If ``[metrics] metrics_use_pattern_match`` is ``true``, provide regex patterns to match. +# +# Example: metrics_block_list = "scheduler,executor,dagrun,pool,triggerer,celery" or "^scheduler,^executor,heartbeat|timeout" +# +# Variable: AIRFLOW__METRICS__METRICS_BLOCK_LIST +# +metrics_block_list = + +# Enables sending metrics to StatsD. +# +# Variable: AIRFLOW__METRICS__STATSD_ON +# +statsd_on = False + +# Specifies the host address where the StatsD daemon (or server) is running +# +# Variable: AIRFLOW__METRICS__STATSD_HOST +# +statsd_host = localhost + +# Specifies the port on which the StatsD daemon (or server) is listening to +# +# Variable: AIRFLOW__METRICS__STATSD_PORT +# +statsd_port = 8125 + +# Defines the namespace for all metrics sent from Airflow to StatsD +# +# Variable: AIRFLOW__METRICS__STATSD_PREFIX +# +statsd_prefix = airflow + +# A function that validate the StatsD stat name, apply changes to the stat name if necessary and return +# the transformed stat name. +# +# The function should have the following signature +# +# .. code-block:: python +# +# def func_name(stat_name: str) -> str: ... +# +# Variable: AIRFLOW__METRICS__STAT_NAME_HANDLER +# +stat_name_handler = + +# To enable datadog integration to send airflow metrics. +# +# Variable: AIRFLOW__METRICS__STATSD_DATADOG_ENABLED +# +statsd_datadog_enabled = False + +# List of datadog tags attached to all metrics(e.g: ``key1:value1,key2:value2``) +# +# Variable: AIRFLOW__METRICS__STATSD_DATADOG_TAGS +# +statsd_datadog_tags = + +# Set to ``False`` to disable metadata tags for some of the emitted metrics +# +# Variable: AIRFLOW__METRICS__STATSD_DATADOG_METRICS_TAGS +# +statsd_datadog_metrics_tags = True + +# If you want to utilise your own custom StatsD client set the relevant +# module path below. +# Note: The module path must exist on your +# `PYTHONPATH ` +# for Airflow to pick it up +# +# Variable: AIRFLOW__METRICS__STATSD_CUSTOM_CLIENT_PATH +# +# statsd_custom_client_path = + +# If you want to avoid sending all the available metrics tags to StatsD, +# you can configure a block list of prefixes (comma separated) to filter out metric tags +# that start with the elements of the list (e.g: ``job_id,run_id``) +# +# Example: statsd_disabled_tags = job_id,run_id,dag_id,task_id +# +# Variable: AIRFLOW__METRICS__STATSD_DISABLED_TAGS +# +statsd_disabled_tags = job_id,run_id + +# To enable sending Airflow metrics with StatsD-Influxdb tagging convention. +# +# Variable: AIRFLOW__METRICS__STATSD_INFLUXDB_ENABLED +# +statsd_influxdb_enabled = False + +# Enables sending metrics to OpenTelemetry. +# +# Variable: AIRFLOW__METRICS__OTEL_ON +# +otel_on = False + +# Specifies the hostname or IP address of the OpenTelemetry Collector to which Airflow sends +# metrics and traces. +# +# Variable: AIRFLOW__METRICS__OTEL_HOST +# +otel_host = localhost + +# Specifies the port of the OpenTelemetry Collector that is listening to. +# +# Variable: AIRFLOW__METRICS__OTEL_PORT +# +otel_port = 8889 + +# The prefix for the Airflow metrics. +# +# Variable: AIRFLOW__METRICS__OTEL_PREFIX +# +otel_prefix = airflow + +# Defines the interval, in milliseconds, at which Airflow sends batches of metrics and traces +# to the configured OpenTelemetry Collector. +# +# Variable: AIRFLOW__METRICS__OTEL_INTERVAL_MILLISECONDS +# +otel_interval_milliseconds = 60000 + +# If ``True``, all metrics are also emitted to the console. Defaults to ``False``. +# +# Variable: AIRFLOW__METRICS__OTEL_DEBUGGING_ON +# +otel_debugging_on = False + +# The default service name of traces. +# +# Variable: AIRFLOW__METRICS__OTEL_SERVICE +# +otel_service = Airflow + +# If ``True``, SSL will be enabled. Defaults to ``False``. +# To establish an HTTPS connection to the OpenTelemetry collector, +# you need to configure the SSL certificate and key within the OpenTelemetry collector's +# ``config.yml`` file. +# +# Variable: AIRFLOW__METRICS__OTEL_SSL_ACTIVE +# +otel_ssl_active = False + +[traces] +# Distributed traces integration settings. + +# Enables sending traces to OpenTelemetry. +# +# Variable: AIRFLOW__TRACES__OTEL_ON +# +otel_on = False + +# Specifies the hostname or IP address of the OpenTelemetry Collector to which Airflow sends +# traces. +# +# Variable: AIRFLOW__TRACES__OTEL_HOST +# +otel_host = localhost + +# Specifies the port of the OpenTelemetry Collector that is listening to. +# +# Variable: AIRFLOW__TRACES__OTEL_PORT +# +otel_port = 8889 + +# The default service name of traces. +# +# Variable: AIRFLOW__TRACES__OTEL_SERVICE +# +otel_service = Airflow + +# If True, all traces are also emitted to the console. Defaults to False. +# +# Variable: AIRFLOW__TRACES__OTEL_DEBUGGING_ON +# +otel_debugging_on = False + +# If True, SSL will be enabled. Defaults to False. +# To establish an HTTPS connection to the OpenTelemetry collector, +# you need to configure the SSL certificate and key within the OpenTelemetry collector's +# config.yml file. +# +# Variable: AIRFLOW__TRACES__OTEL_SSL_ACTIVE +# +otel_ssl_active = False + +# If True, after the task is complete, the full task log messages will be added as the +# span events, chunked by 64k size. defaults to False. +# +# Variable: AIRFLOW__TRACES__OTEL_TASK_LOG_EVENT +# +otel_task_log_event = False + +[secrets] +# Full class name of secrets backend to enable (will precede env vars and metastore in search path) +# +# Example: backend = airflow.providers.amazon.aws.secrets.systems_manager.SystemsManagerParameterStoreBackend +# +# Variable: AIRFLOW__SECRETS__BACKEND +# +backend = + +# The backend_kwargs param is loaded into a dictionary and passed to ``__init__`` +# of secrets backend class. See documentation for the secrets backend you are using. +# JSON is expected. +# +# Example for AWS Systems Manager ParameterStore: +# ``{"connections_prefix": "/airflow/connections", "profile_name": "default"}`` +# +# Variable: AIRFLOW__SECRETS__BACKEND_KWARGS +# +backend_kwargs = + +# .. note:: |experimental| +# +# Enables local caching of Variables, when parsing DAGs only. +# Using this option can make dag parsing faster if Variables are used in top level code, at the expense +# of longer propagation time for changes. +# Please note that this cache concerns only the DAG parsing step. There is no caching in place when DAG +# tasks are run. +# +# Variable: AIRFLOW__SECRETS__USE_CACHE +# +use_cache = False + +# .. note:: |experimental| +# +# When the cache is enabled, this is the duration for which we consider an entry in the cache to be +# valid. Entries are refreshed if they are older than this many seconds. +# It means that when the cache is enabled, this is the maximum amount of time you need to wait to see a +# Variable change take effect. +# +# Variable: AIRFLOW__SECRETS__CACHE_TTL_SECONDS +# +cache_ttl_seconds = 900 + +[cli] +# In what way should the cli access the API. The LocalClient will use the +# database directly, while the json_client will use the api running on the +# webserver +# +# Variable: AIRFLOW__CLI__API_CLIENT +# +api_client = airflow.api.client.local_client + +# If you set web_server_url_prefix, do NOT forget to append it here, ex: +# ``endpoint_url = http://localhost:8080/myroot`` +# So api will look like: ``http://localhost:8080/myroot/api/experimental/...`` +# +# Variable: AIRFLOW__CLI__ENDPOINT_URL +# +endpoint_url = http://localhost:8080 + +[debug] +# Used only with ``DebugExecutor``. If set to ``True`` DAG will fail with first +# failed task. Helpful for debugging purposes. +# +# Variable: AIRFLOW__DEBUG__FAIL_FAST +# +fail_fast = False + +[api] +# Enables the deprecated experimental API. Please note that these API endpoints do not have +# access control. An authenticated user has full access. +# +# .. warning:: +# +# This `Experimental REST API +# `__ is +# deprecated since version 2.0. Please consider using +# `the Stable REST API +# `__. +# For more information on migration, see +# `RELEASE_NOTES.rst `_ +# +# Variable: AIRFLOW__API__ENABLE_EXPERIMENTAL_API +# +enable_experimental_api = False + +# Comma separated list of auth backends to authenticate users of the API. See +# `Security: API +# `__ for possible values. +# ("airflow.api.auth.backend.default" allows all requests for historic reasons) +# +# Variable: AIRFLOW__API__AUTH_BACKENDS +# +auth_backends = airflow.api.auth.backend.session + +# Used to set the maximum page limit for API requests. If limit passed as param +# is greater than maximum page limit, it will be ignored and maximum page limit value +# will be set as the limit +# +# Variable: AIRFLOW__API__MAXIMUM_PAGE_LIMIT +# +maximum_page_limit = 100 + +# Used to set the default page limit when limit param is zero or not provided in API +# requests. Otherwise if positive integer is passed in the API requests as limit, the +# smallest number of user given limit or maximum page limit is taken as limit. +# +# Variable: AIRFLOW__API__FALLBACK_PAGE_LIMIT +# +fallback_page_limit = 100 + +# The intended audience for JWT token credentials used for authorization. This value must match on the client and server sides. If empty, audience will not be tested. +# +# Example: google_oauth2_audience = project-id-random-value.apps.googleusercontent.com +# +# Variable: AIRFLOW__API__GOOGLE_OAUTH2_AUDIENCE +# +google_oauth2_audience = + +# Path to Google Cloud Service Account key file (JSON). If omitted, authorization based on +# `the Application Default Credentials +# `__ will +# be used. +# +# Example: google_key_path = /files/service-account-json +# +# Variable: AIRFLOW__API__GOOGLE_KEY_PATH +# +google_key_path = + +# Used in response to a preflight request to indicate which HTTP +# headers can be used when making the actual request. This header is +# the server side response to the browser's +# Access-Control-Request-Headers header. +# +# Variable: AIRFLOW__API__ACCESS_CONTROL_ALLOW_HEADERS +# +access_control_allow_headers = + +# Specifies the method or methods allowed when accessing the resource. +# +# Variable: AIRFLOW__API__ACCESS_CONTROL_ALLOW_METHODS +# +access_control_allow_methods = + +# Indicates whether the response can be shared with requesting code from the given origins. +# Separate URLs with space. +# +# Variable: AIRFLOW__API__ACCESS_CONTROL_ALLOW_ORIGINS +# +access_control_allow_origins = + +# Indicates whether the **xcomEntries** endpoint supports the **deserialize** +# flag. If set to ``False``, setting this flag in a request would result in a +# 400 Bad Request error. +# +# Variable: AIRFLOW__API__ENABLE_XCOM_DESERIALIZE_SUPPORT +# +enable_xcom_deserialize_support = False + +[lineage] +# what lineage backend to use +# +# Variable: AIRFLOW__LINEAGE__BACKEND +# +backend = + +[operators] +# The default owner assigned to each new operator, unless +# provided explicitly or passed via ``default_args`` +# +# Variable: AIRFLOW__OPERATORS__DEFAULT_OWNER +# +default_owner = airflow + +# The default value of attribute "deferrable" in operators and sensors. +# +# Variable: AIRFLOW__OPERATORS__DEFAULT_DEFERRABLE +# +default_deferrable = false + +# Indicates the default number of CPU units allocated to each operator when no specific CPU request +# is specified in the operator's configuration +# +# Variable: AIRFLOW__OPERATORS__DEFAULT_CPUS +# +default_cpus = 1 + +# Indicates the default number of RAM allocated to each operator when no specific RAM request +# is specified in the operator's configuration +# +# Variable: AIRFLOW__OPERATORS__DEFAULT_RAM +# +default_ram = 512 + +# Indicates the default number of disk storage allocated to each operator when no specific disk request +# is specified in the operator's configuration +# +# Variable: AIRFLOW__OPERATORS__DEFAULT_DISK +# +default_disk = 512 + +# Indicates the default number of GPUs allocated to each operator when no specific GPUs request +# is specified in the operator's configuration +# +# Variable: AIRFLOW__OPERATORS__DEFAULT_GPUS +# +default_gpus = 0 + +# Default queue that tasks get assigned to and that worker listen on. +# +# Variable: AIRFLOW__OPERATORS__DEFAULT_QUEUE +# +default_queue = default + +# Is allowed to pass additional/unused arguments (args, kwargs) to the BaseOperator operator. +# If set to ``False``, an exception will be thrown, +# otherwise only the console message will be displayed. +# +# Variable: AIRFLOW__OPERATORS__ALLOW_ILLEGAL_ARGUMENTS +# +allow_illegal_arguments = False + +[webserver] +# The message displayed when a user attempts to execute actions beyond their authorised privileges. +# +# Variable: AIRFLOW__WEBSERVER__ACCESS_DENIED_MESSAGE +# +access_denied_message = Access is Denied + +# Path of webserver config file used for configuring the webserver parameters +# +# Variable: AIRFLOW__WEBSERVER__CONFIG_FILE +# +config_file = /opt/airflow/webserver_config.py + +# The base url of your website: Airflow cannot guess what domain or CNAME you are using. +# This is used to create links in the Log Url column in the Browse - Task Instances menu, +# as well as in any automated emails sent by Airflow that contain links to your webserver. +# +# Variable: AIRFLOW__WEBSERVER__BASE_URL +# +base_url = http://localhost:8080 + +# Default timezone to display all dates in the UI, can be UTC, system, or +# any IANA timezone string (e.g. **Europe/Amsterdam**). If left empty the +# default value of core/default_timezone will be used +# +# Example: default_ui_timezone = America/New_York +# +# Variable: AIRFLOW__WEBSERVER__DEFAULT_UI_TIMEZONE +# +default_ui_timezone = UTC + +# The ip specified when starting the web server +# +# Variable: AIRFLOW__WEBSERVER__WEB_SERVER_HOST +# +web_server_host = 0.0.0.0 + +# The port on which to run the web server +# +# Variable: AIRFLOW__WEBSERVER__WEB_SERVER_PORT +# +web_server_port = 8080 + +# Paths to the SSL certificate and key for the web server. When both are +# provided SSL will be enabled. This does not change the web server port. +# +# Variable: AIRFLOW__WEBSERVER__WEB_SERVER_SSL_CERT +# +web_server_ssl_cert = + +# Paths to the SSL certificate and key for the web server. When both are +# provided SSL will be enabled. This does not change the web server port. +# +# Variable: AIRFLOW__WEBSERVER__WEB_SERVER_SSL_KEY +# +web_server_ssl_key = + +# The type of backend used to store web session data, can be ``database`` or ``securecookie``. For the +# ``database`` backend, sessions are store in the database and they can be +# managed there (for example when you reset password of the user, all sessions for that user are +# deleted). For the ``securecookie`` backend, sessions are stored in encrypted cookies on the client +# side. The ``securecookie`` mechanism is 'lighter' than database backend, but sessions are not deleted +# when you reset password of the user, which means that other than waiting for expiry time, the only +# way to invalidate all sessions for a user is to change secret_key and restart webserver (which +# also invalidates and logs out all other user's sessions). +# +# When you are using ``database`` backend, make sure to keep your database session table small +# by periodically running ``airflow db clean --table session`` command, especially if you have +# automated API calls that will create a new session for each call rather than reuse the sessions +# stored in browser cookies. +# +# Example: session_backend = securecookie +# +# Variable: AIRFLOW__WEBSERVER__SESSION_BACKEND +# +session_backend = database + +# Number of seconds the webserver waits before killing gunicorn master that doesn't respond +# +# Variable: AIRFLOW__WEBSERVER__WEB_SERVER_MASTER_TIMEOUT +# +web_server_master_timeout = 120 + +# Number of seconds the gunicorn webserver waits before timing out on a worker +# +# Variable: AIRFLOW__WEBSERVER__WEB_SERVER_WORKER_TIMEOUT +# +web_server_worker_timeout = 120 + +# Number of workers to refresh at a time. When set to 0, worker refresh is +# disabled. When nonzero, airflow periodically refreshes webserver workers by +# bringing up new ones and killing old ones. +# +# Variable: AIRFLOW__WEBSERVER__WORKER_REFRESH_BATCH_SIZE +# +worker_refresh_batch_size = 1 + +# Number of seconds to wait before refreshing a batch of workers. +# +# Variable: AIRFLOW__WEBSERVER__WORKER_REFRESH_INTERVAL +# +worker_refresh_interval = 6000 + +# If set to ``True``, Airflow will track files in plugins_folder directory. When it detects changes, +# then reload the gunicorn. If set to ``True``, gunicorn starts without preloading, which is slower, +# uses more memory, and may cause race conditions. Avoid setting this to ``True`` in production. +# +# Variable: AIRFLOW__WEBSERVER__RELOAD_ON_PLUGIN_CHANGE +# +reload_on_plugin_change = False + +# Secret key used to run your flask app. It should be as random as possible. However, when running +# more than 1 instances of webserver, make sure all of them use the same ``secret_key`` otherwise +# one of them will error with "CSRF session token is missing". +# The webserver key is also used to authorize requests to Celery workers when logs are retrieved. +# The token generated using the secret key has a short expiry time though - make sure that time on +# ALL the machines that you run airflow components on is synchronized (for example using ntpd) +# otherwise you might get "forbidden" errors when the logs are accessed. +# +# Variable: AIRFLOW__WEBSERVER__SECRET_KEY +# +secret_key = tCnTbEabdFBDLHWoT/LxLw== + +# Number of workers to run the Gunicorn web server +# +# Variable: AIRFLOW__WEBSERVER__WORKERS +# +workers = 1 + +# The worker class gunicorn should use. Choices include +# ``sync`` (default), ``eventlet``, ``gevent``. +# +# .. warning:: +# +# When using ``gevent`` you might also want to set the ``_AIRFLOW_PATCH_GEVENT`` +# environment variable to ``"1"`` to make sure gevent patching is done as early as possible. +# +# Be careful to set ``_AIRFLOW_PATCH_GEVENT`` only on the web server as gevent patching may +# affect the scheduler behavior via the ``multiprocessing`` sockets module and cause crash. +# +# See related Issues / PRs for more details: +# +# * https://github.com/benoitc/gunicorn/issues/2796 +# * https://github.com/apache/airflow/issues/8212 +# * https://github.com/apache/airflow/pull/28283 +# +# Variable: AIRFLOW__WEBSERVER__WORKER_CLASS +# +worker_class = gevent + +# Log files for the gunicorn webserver. '-' means log to stderr. +# +# Variable: AIRFLOW__WEBSERVER__ACCESS_LOGFILE +# +access_logfile = - + +# Log files for the gunicorn webserver. '-' means log to stderr. +# +# Variable: AIRFLOW__WEBSERVER__ERROR_LOGFILE +# +error_logfile = - + +# Access log format for gunicorn webserver. +# default format is ``%%(h)s %%(l)s %%(u)s %%(t)s "%%(r)s" %%(s)s %%(b)s "%%(f)s" "%%(a)s"`` +# See `Gunicorn Settings: 'access_log_format' Reference +# `__ for more details +# +# Variable: AIRFLOW__WEBSERVER__ACCESS_LOGFORMAT +# +access_logformat = + +# Expose the configuration file in the web server. Set to ``non-sensitive-only`` to show all values +# except those that have security implications. ``True`` shows all values. ``False`` hides the +# configuration completely. +# +# Variable: AIRFLOW__WEBSERVER__EXPOSE_CONFIG +# +expose_config = False + +# Expose hostname in the web server +# +# Variable: AIRFLOW__WEBSERVER__EXPOSE_HOSTNAME +# +expose_hostname = False + +# Expose stacktrace in the web server +# +# Variable: AIRFLOW__WEBSERVER__EXPOSE_STACKTRACE +# +expose_stacktrace = False + +# Default DAG view. Valid values are: ``grid``, ``graph``, ``duration``, ``gantt``, ``landing_times`` +# +# Variable: AIRFLOW__WEBSERVER__DAG_DEFAULT_VIEW +# +dag_default_view = grid + +# Default DAG orientation. Valid values are: +# ``LR`` (Left->Right), ``TB`` (Top->Bottom), ``RL`` (Right->Left), ``BT`` (Bottom->Top) +# +# Variable: AIRFLOW__WEBSERVER__DAG_ORIENTATION +# +dag_orientation = LR + +# Sorting order in grid view. Valid values are: ``topological``, ``hierarchical_alphabetical`` +# +# Variable: AIRFLOW__WEBSERVER__GRID_VIEW_SORTING_ORDER +# +grid_view_sorting_order = topological + +# The amount of time (in secs) webserver will wait for initial handshake +# while fetching logs from other worker machine +# +# Variable: AIRFLOW__WEBSERVER__LOG_FETCH_TIMEOUT_SEC +# +log_fetch_timeout_sec = 10 + +# Time interval (in secs) to wait before next log fetching. +# +# Variable: AIRFLOW__WEBSERVER__LOG_FETCH_DELAY_SEC +# +log_fetch_delay_sec = 5 + +# Distance away from page bottom to enable auto tailing. +# +# Variable: AIRFLOW__WEBSERVER__LOG_AUTO_TAILING_OFFSET +# +log_auto_tailing_offset = 30 + +# Animation speed for auto tailing log display. +# +# Variable: AIRFLOW__WEBSERVER__LOG_ANIMATION_SPEED +# +log_animation_speed = 1000 + +# By default, the webserver shows paused DAGs. Flip this to hide paused +# DAGs by default +# +# Variable: AIRFLOW__WEBSERVER__HIDE_PAUSED_DAGS_BY_DEFAULT +# +hide_paused_dags_by_default = False + +# Consistent page size across all listing views in the UI +# +# Variable: AIRFLOW__WEBSERVER__PAGE_SIZE +# +page_size = 100 + +# Define the color of navigation bar +# +# Variable: AIRFLOW__WEBSERVER__NAVBAR_COLOR +# +navbar_color = #fff + +# Define the color of text in the navigation bar +# +# Variable: AIRFLOW__WEBSERVER__NAVBAR_TEXT_COLOR +# +navbar_text_color = #51504f + +# Define the color of navigation bar links when hovered +# +# Variable: AIRFLOW__WEBSERVER__NAVBAR_HOVER_COLOR +# +navbar_hover_color = #eee + +# Define the color of text in the navigation bar when hovered +# +# Variable: AIRFLOW__WEBSERVER__NAVBAR_TEXT_HOVER_COLOR +# +navbar_text_hover_color = #51504f + +# Define the color of the logo text +# +# Variable: AIRFLOW__WEBSERVER__NAVBAR_LOGO_TEXT_COLOR +# +navbar_logo_text_color = #51504f + +# Default dagrun to show in UI +# +# Variable: AIRFLOW__WEBSERVER__DEFAULT_DAG_RUN_DISPLAY_NUMBER +# +default_dag_run_display_number = 25 + +# Enable werkzeug ``ProxyFix`` middleware for reverse proxy +# +# Variable: AIRFLOW__WEBSERVER__ENABLE_PROXY_FIX +# +enable_proxy_fix = True + +# Number of values to trust for ``X-Forwarded-For``. +# See `Werkzeug: X-Forwarded-For Proxy Fix +# `__ for more details. +# +# Variable: AIRFLOW__WEBSERVER__PROXY_FIX_X_FOR +# +proxy_fix_x_for = 1 + +# Number of values to trust for ``X-Forwarded-Proto``. +# See `Werkzeug: X-Forwarded-For Proxy Fix +# `__ for more details. +# +# Variable: AIRFLOW__WEBSERVER__PROXY_FIX_X_PROTO +# +proxy_fix_x_proto = 1 + +# Number of values to trust for ``X-Forwarded-Host``. +# See `Werkzeug: X-Forwarded-For Proxy Fix +# `__ for more details. +# +# Variable: AIRFLOW__WEBSERVER__PROXY_FIX_X_HOST +# +proxy_fix_x_host = 1 + +# Number of values to trust for ``X-Forwarded-Port``. +# See `Werkzeug: X-Forwarded-For Proxy Fix +# `__ for more details. +# +# Variable: AIRFLOW__WEBSERVER__PROXY_FIX_X_PORT +# +proxy_fix_x_port = 1 + +# Number of values to trust for ``X-Forwarded-Prefix``. +# See `Werkzeug: X-Forwarded-For Proxy Fix +# `__ for more details. +# +# Variable: AIRFLOW__WEBSERVER__PROXY_FIX_X_PREFIX +# +proxy_fix_x_prefix = 1 + +# Set secure flag on session cookie +# +# Variable: AIRFLOW__WEBSERVER__COOKIE_SECURE +# +cookie_secure = False + +# Set samesite policy on session cookie +# +# Variable: AIRFLOW__WEBSERVER__COOKIE_SAMESITE +# +cookie_samesite = Lax + +# Default setting for wrap toggle on DAG code and TI log views. +# +# Variable: AIRFLOW__WEBSERVER__DEFAULT_WRAP +# +default_wrap = False + +# Allow the UI to be rendered in a frame +# +# Variable: AIRFLOW__WEBSERVER__X_FRAME_ENABLED +# +x_frame_enabled = True + +# Send anonymous user activity to your analytics tool +# choose from ``google_analytics``, ``segment``, ``metarouter``, or ``matomo`` +# +# Variable: AIRFLOW__WEBSERVER__ANALYTICS_TOOL +# +# analytics_tool = + +# Unique ID of your account in the analytics tool +# +# Variable: AIRFLOW__WEBSERVER__ANALYTICS_ID +# +# analytics_id = + +# Your instances url, only applicable to Matomo. +# +# Example: analytics_url = https://your.matomo.instance.com/ +# +# Variable: AIRFLOW__WEBSERVER__ANALYTICS_URL +# +# analytics_url = + +# 'Recent Tasks' stats will show for old DagRuns if set +# +# Variable: AIRFLOW__WEBSERVER__SHOW_RECENT_STATS_FOR_COMPLETED_RUNS +# +show_recent_stats_for_completed_runs = True + +# The UI cookie lifetime in minutes. User will be logged out from UI after +# ``[webserver] session_lifetime_minutes`` of non-activity +# +# Variable: AIRFLOW__WEBSERVER__SESSION_LIFETIME_MINUTES +# +session_lifetime_minutes = 43200 + +# Sets a custom page title for the DAGs overview page and site title for all pages +# +# Variable: AIRFLOW__WEBSERVER__INSTANCE_NAME +# +# instance_name = + +# Whether the custom page title for the DAGs overview page contains any Markup language +# +# Variable: AIRFLOW__WEBSERVER__INSTANCE_NAME_HAS_MARKUP +# +instance_name_has_markup = False + +# How frequently, in seconds, the DAG data will auto-refresh in graph or grid view +# when auto-refresh is turned on +# +# Variable: AIRFLOW__WEBSERVER__AUTO_REFRESH_INTERVAL +# +auto_refresh_interval = 3 + +# Boolean for displaying warning for publicly viewable deployment +# +# Variable: AIRFLOW__WEBSERVER__WARN_DEPLOYMENT_EXPOSURE +# +warn_deployment_exposure = True + +# Comma separated string of view events to exclude from dag audit view. +# All other events will be added minus the ones passed here. +# The audit logs in the db will not be affected by this parameter. +# +# Example: audit_view_excluded_events = cli_task_run,running,success +# +# Variable: AIRFLOW__WEBSERVER__AUDIT_VIEW_EXCLUDED_EVENTS +# +# audit_view_excluded_events = + +# Comma separated string of view events to include in dag audit view. +# If passed, only these events will populate the dag audit view. +# The audit logs in the db will not be affected by this parameter. +# +# Example: audit_view_included_events = dagrun_cleared,failed +# +# Variable: AIRFLOW__WEBSERVER__AUDIT_VIEW_INCLUDED_EVENTS +# +# audit_view_included_events = + +# Boolean for running SwaggerUI in the webserver. +# +# Variable: AIRFLOW__WEBSERVER__ENABLE_SWAGGER_UI +# +enable_swagger_ui = True + +# Boolean for running Internal API in the webserver. +# +# Variable: AIRFLOW__WEBSERVER__RUN_INTERNAL_API +# +run_internal_api = False + +# The caching algorithm used by the webserver. Must be a valid hashlib function name. +# +# Example: caching_hash_method = sha256 +# +# Variable: AIRFLOW__WEBSERVER__CACHING_HASH_METHOD +# +caching_hash_method = md5 + +# Behavior of the trigger DAG run button for DAGs without params. ``False`` to skip and trigger +# without displaying a form to add a **dag_run.conf**, ``True`` to always display the form. +# The form is displayed always if parameters are defined. +# +# Variable: AIRFLOW__WEBSERVER__SHOW_TRIGGER_FORM_IF_NO_PARAMS +# +show_trigger_form_if_no_params = False + +# Number of recent DAG run configurations in the selector on the trigger web form. +# +# Example: num_recent_configurations_for_trigger = 10 +# +# Variable: AIRFLOW__WEBSERVER__NUM_RECENT_CONFIGURATIONS_FOR_TRIGGER +# +num_recent_configurations_for_trigger = 5 + +# A DAG author is able to provide any raw HTML into ``doc_md`` or params description in +# ``description_md`` for text formatting. This is including potentially unsafe javascript. +# Displaying the DAG or trigger form in web UI provides the DAG author the potential to +# inject malicious code into clients browsers. To ensure the web UI is safe by default, +# raw HTML is disabled by default. If you trust your DAG authors, you can enable HTML +# support in markdown by setting this option to ``True``. +# +# This parameter also enables the deprecated fields ``description_html`` and +# ``custom_html_form`` in DAG params until the feature is removed in a future version. +# +# Example: allow_raw_html_descriptions = False +# +# Variable: AIRFLOW__WEBSERVER__ALLOW_RAW_HTML_DESCRIPTIONS +# +allow_raw_html_descriptions = False + +# The maximum size of the request payload (in MB) that can be sent. +# +# Variable: AIRFLOW__WEBSERVER__ALLOWED_PAYLOAD_SIZE +# +allowed_payload_size = 1.0 + +# Require confirmation when changing a DAG in the web UI. This is to prevent accidental changes +# to a DAG that may be running on sensitive environments like production. +# When set to ``True``, confirmation dialog will be shown when a user tries to Pause/Unpause, +# Trigger a DAG +# +# Variable: AIRFLOW__WEBSERVER__REQUIRE_CONFIRMATION_DAG_CHANGE +# +require_confirmation_dag_change = False + +# The maximum size in bytes any non-file form field may be in a multipart/form-data body. +# If this limit is exceeded, a 413 RequestEntityTooLarge error is raised by webserver. +# +# Variable: AIRFLOW__WEBSERVER__MAX_FORM_MEMORY_SIZE +# +max_form_memory_size = 500000 + +# The maximum number of fields that may be present in a multipart/form-data body. +# If this limit is exceeded, a 413 RequestEntityTooLarge error is raised by webserver. +# +# Variable: AIRFLOW__WEBSERVER__MAX_FORM_PARTS +# +max_form_parts = 1000 + +[email] +# Configuration email backend and whether to +# send email alerts on retry or failure + +# Email backend to use +# +# Variable: AIRFLOW__EMAIL__EMAIL_BACKEND +# +email_backend = airflow.utils.email.send_email_smtp + +# Email connection to use +# +# Variable: AIRFLOW__EMAIL__EMAIL_CONN_ID +# +email_conn_id = smtp_default + +# Whether email alerts should be sent when a task is retried +# +# Variable: AIRFLOW__EMAIL__DEFAULT_EMAIL_ON_RETRY +# +default_email_on_retry = True + +# Whether email alerts should be sent when a task failed +# +# Variable: AIRFLOW__EMAIL__DEFAULT_EMAIL_ON_FAILURE +# +default_email_on_failure = True + +# File that will be used as the template for Email subject (which will be rendered using Jinja2). +# If not set, Airflow uses a base template. +# +# Example: subject_template = /path/to/my_subject_template_file +# +# Variable: AIRFLOW__EMAIL__SUBJECT_TEMPLATE +# +# subject_template = + +# File that will be used as the template for Email content (which will be rendered using Jinja2). +# If not set, Airflow uses a base template. +# +# Example: html_content_template = /path/to/my_html_content_template_file +# +# Variable: AIRFLOW__EMAIL__HTML_CONTENT_TEMPLATE +# +# html_content_template = + +# Email address that will be used as sender address. +# It can either be raw email or the complete address in a format ``Sender Name `` +# +# Example: from_email = Airflow +# +# Variable: AIRFLOW__EMAIL__FROM_EMAIL +# +# from_email = + +# ssl context to use when using SMTP and IMAP SSL connections. By default, the context is "default" +# which sets it to ``ssl.create_default_context()`` which provides the right balance between +# compatibility and security, it however requires that certificates in your operating system are +# updated and that SMTP/IMAP servers of yours have valid certificates that have corresponding public +# keys installed on your machines. You can switch it to "none" if you want to disable checking +# of the certificates, but it is not recommended as it allows MITM (man-in-the-middle) attacks +# if your infrastructure is not sufficiently secured. It should only be set temporarily while you +# are fixing your certificate configuration. This can be typically done by upgrading to newer +# version of the operating system you run Airflow components on,by upgrading/refreshing proper +# certificates in the OS or by updating certificates for your mail servers. +# +# Example: ssl_context = default +# +# Variable: AIRFLOW__EMAIL__SSL_CONTEXT +# +ssl_context = default + +[smtp] +# If you want airflow to send emails on retries, failure, and you want to use +# the airflow.utils.email.send_email_smtp function, you have to configure an +# smtp server here + +# Specifies the host server address used by Airflow when sending out email notifications via SMTP. +# +# Variable: AIRFLOW__SMTP__SMTP_HOST +# +smtp_host = localhost + +# Determines whether to use the STARTTLS command when connecting to the SMTP server. +# +# Variable: AIRFLOW__SMTP__SMTP_STARTTLS +# +smtp_starttls = True + +# Determines whether to use an SSL connection when talking to the SMTP server. +# +# Variable: AIRFLOW__SMTP__SMTP_SSL +# +smtp_ssl = False + +# Username to authenticate when connecting to smtp server. +# +# Example: smtp_user = airflow +# +# Variable: AIRFLOW__SMTP__SMTP_USER +# +# smtp_user = + +# Password to authenticate when connecting to smtp server. +# +# Example: smtp_password = airflow +# +# Variable: AIRFLOW__SMTP__SMTP_PASSWORD +# +# smtp_password = + +# Defines the port number on which Airflow connects to the SMTP server to send email notifications. +# +# Variable: AIRFLOW__SMTP__SMTP_PORT +# +smtp_port = 25 + +# Specifies the default **from** email address used when Airflow sends email notifications. +# +# Variable: AIRFLOW__SMTP__SMTP_MAIL_FROM +# +smtp_mail_from = airflow@example.com + +# Determines the maximum time (in seconds) the Apache Airflow system will wait for a +# connection to the SMTP server to be established. +# +# Variable: AIRFLOW__SMTP__SMTP_TIMEOUT +# +smtp_timeout = 30 + +# Defines the maximum number of times Airflow will attempt to connect to the SMTP server. +# +# Variable: AIRFLOW__SMTP__SMTP_RETRY_LIMIT +# +smtp_retry_limit = 5 + +[sentry] +# `Sentry `__ integration. Here you can supply +# additional configuration options based on the Python platform. +# See `Python / Configuration / Basic Options +# `__ for more details. +# Unsupported options: ``integrations``, ``in_app_include``, ``in_app_exclude``, +# ``ignore_errors``, ``before_breadcrumb``, ``transport``. + +# Enable error reporting to Sentry +# +# Variable: AIRFLOW__SENTRY__SENTRY_ON +# +sentry_on = false + +# +# Variable: AIRFLOW__SENTRY__SENTRY_DSN +# +sentry_dsn = + +# Dotted path to a before_send function that the sentry SDK should be configured to use. +# +# Variable: AIRFLOW__SENTRY__BEFORE_SEND +# +# before_send = + +[scheduler] +# Task instances listen for external kill signal (when you clear tasks +# from the CLI or the UI), this defines the frequency at which they should +# listen (in seconds). +# +# Variable: AIRFLOW__SCHEDULER__JOB_HEARTBEAT_SEC +# +job_heartbeat_sec = 5 + +# The scheduler constantly tries to trigger new tasks (look at the +# scheduler section in the docs for more information). This defines +# how often the scheduler should run (in seconds). +# +# Variable: AIRFLOW__SCHEDULER__SCHEDULER_HEARTBEAT_SEC +# +scheduler_heartbeat_sec = 5 + +# The frequency (in seconds) at which the LocalTaskJob should send heartbeat signals to the +# scheduler to notify it's still alive. If this value is set to 0, the heartbeat interval will default +# to the value of ``[scheduler] scheduler_zombie_task_threshold``. +# +# Variable: AIRFLOW__SCHEDULER__LOCAL_TASK_JOB_HEARTBEAT_SEC +# +local_task_job_heartbeat_sec = 0 + +# The number of times to try to schedule each DAG file +# -1 indicates unlimited number +# +# Variable: AIRFLOW__SCHEDULER__NUM_RUNS +# +num_runs = -1 + +# Controls how long the scheduler will sleep between loops, but if there was nothing to do +# in the loop. i.e. if it scheduled something then it will start the next loop +# iteration straight away. +# +# Variable: AIRFLOW__SCHEDULER__SCHEDULER_IDLE_SLEEP_TIME +# +scheduler_idle_sleep_time = 1 + +# Number of seconds after which a DAG file is parsed. The DAG file is parsed every +# ``[scheduler] min_file_process_interval`` number of seconds. Updates to DAGs are reflected after +# this interval. Keeping this number low will increase CPU usage. +# +# Variable: AIRFLOW__SCHEDULER__MIN_FILE_PROCESS_INTERVAL +# +min_file_process_interval = 60 + +# How often (in seconds) to check for stale DAGs (DAGs which are no longer present in +# the expected files) which should be deactivated, as well as datasets that are no longer +# referenced and should be marked as orphaned. +# +# Variable: AIRFLOW__SCHEDULER__PARSING_CLEANUP_INTERVAL +# +parsing_cleanup_interval = 60 + +# How long (in seconds) to wait after we have re-parsed a DAG file before deactivating stale +# DAGs (DAGs which are no longer present in the expected files). The reason why we need +# this threshold is to account for the time between when the file is parsed and when the +# DAG is loaded. The absolute maximum that this could take is ``[core] dag_file_processor_timeout``, +# but when you have a long timeout configured, it results in a significant delay in the +# deactivation of stale dags. +# +# Variable: AIRFLOW__SCHEDULER__STALE_DAG_THRESHOLD +# +stale_dag_threshold = 50 + +# How often (in seconds) to scan the DAGs directory for new files. Default to 5 minutes. +# +# Variable: AIRFLOW__SCHEDULER__DAG_DIR_LIST_INTERVAL +# +dag_dir_list_interval = 600 + +# How often should stats be printed to the logs. Setting to 0 will disable printing stats +# +# Variable: AIRFLOW__SCHEDULER__PRINT_STATS_INTERVAL +# +print_stats_interval = 30 + +# How often (in seconds) should pool usage stats be sent to StatsD (if statsd_on is enabled) +# +# Variable: AIRFLOW__SCHEDULER__POOL_METRICS_INTERVAL +# +pool_metrics_interval = 5.0 + +# If the last scheduler heartbeat happened more than ``[scheduler] scheduler_health_check_threshold`` +# ago (in seconds), scheduler is considered unhealthy. +# This is used by the health check in the **/health** endpoint and in ``airflow jobs check`` CLI +# for SchedulerJob. +# +# Variable: AIRFLOW__SCHEDULER__SCHEDULER_HEALTH_CHECK_THRESHOLD +# +scheduler_health_check_threshold = 30 + +# When you start a scheduler, airflow starts a tiny web server +# subprocess to serve a health check if this is set to ``True`` +# +# Variable: AIRFLOW__SCHEDULER__ENABLE_HEALTH_CHECK +# +enable_health_check = True + +# When you start a scheduler, airflow starts a tiny web server +# subprocess to serve a health check on this host +# +# Variable: AIRFLOW__SCHEDULER__SCHEDULER_HEALTH_CHECK_SERVER_HOST +# +scheduler_health_check_server_host = 0.0.0.0 + +# When you start a scheduler, airflow starts a tiny web server +# subprocess to serve a health check on this port +# +# Variable: AIRFLOW__SCHEDULER__SCHEDULER_HEALTH_CHECK_SERVER_PORT +# +scheduler_health_check_server_port = 8974 + +# How often (in seconds) should the scheduler check for orphaned tasks and SchedulerJobs +# +# Variable: AIRFLOW__SCHEDULER__ORPHANED_TASKS_CHECK_INTERVAL +# +orphaned_tasks_check_interval = 300.0 + +# Determines the directory where logs for the child processes of the scheduler will be stored +# +# Variable: AIRFLOW__SCHEDULER__CHILD_PROCESS_LOG_DIRECTORY +# +child_process_log_directory = /opt/airflow/logs/scheduler + +# Local task jobs periodically heartbeat to the DB. If the job has +# not heartbeat in this many seconds, the scheduler will mark the +# associated task instance as failed and will re-schedule the task. +# +# Variable: AIRFLOW__SCHEDULER__SCHEDULER_ZOMBIE_TASK_THRESHOLD +# +scheduler_zombie_task_threshold = 300 + +# How often (in seconds) should the scheduler check for zombie tasks. +# +# Variable: AIRFLOW__SCHEDULER__ZOMBIE_DETECTION_INTERVAL +# +zombie_detection_interval = 10.0 + +# Turn off scheduler catchup by setting this to ``False``. +# Default behavior is unchanged and +# Command Line Backfills still work, but the scheduler +# will not do scheduler catchup if this is ``False``, +# however it can be set on a per DAG basis in the +# DAG definition (catchup) +# +# Variable: AIRFLOW__SCHEDULER__CATCHUP_BY_DEFAULT +# +catchup_by_default = True + +# Setting this to ``True`` will make first task instance of a task +# ignore depends_on_past setting. A task instance will be considered +# as the first task instance of a task when there is no task instance +# in the DB with an execution_date earlier than it., i.e. no manual marking +# success will be needed for a newly added task to be scheduled. +# +# Variable: AIRFLOW__SCHEDULER__IGNORE_FIRST_DEPENDS_ON_PAST_BY_DEFAULT +# +ignore_first_depends_on_past_by_default = True + +# This changes the batch size of queries in the scheduling main loop. +# This should not be greater than ``[core] parallelism``. +# If this is too high, SQL query performance may be impacted by +# complexity of query predicate, and/or excessive locking. +# Additionally, you may hit the maximum allowable query length for your db. +# Set this to 0 to use the value of ``[core] parallelism`` +# +# Variable: AIRFLOW__SCHEDULER__MAX_TIS_PER_QUERY +# +max_tis_per_query = 16 + +# Should the scheduler issue ``SELECT ... FOR UPDATE`` in relevant queries. +# If this is set to ``False`` then you should not run more than a single +# scheduler at once +# +# Variable: AIRFLOW__SCHEDULER__USE_ROW_LEVEL_LOCKING +# +use_row_level_locking = True + +# Max number of DAGs to create DagRuns for per scheduler loop. +# +# Variable: AIRFLOW__SCHEDULER__MAX_DAGRUNS_TO_CREATE_PER_LOOP +# +max_dagruns_to_create_per_loop = 10 + +# How many DagRuns should a scheduler examine (and lock) when scheduling +# and queuing tasks. +# +# Variable: AIRFLOW__SCHEDULER__MAX_DAGRUNS_PER_LOOP_TO_SCHEDULE +# +max_dagruns_per_loop_to_schedule = 20 + +# Should the Task supervisor process perform a "mini scheduler" to attempt to schedule more tasks of the +# same DAG. Leaving this on will mean tasks in the same DAG execute quicker, but might starve out other +# dags in some circumstances +# +# Variable: AIRFLOW__SCHEDULER__SCHEDULE_AFTER_TASK_EXECUTION +# +schedule_after_task_execution = True + +# The scheduler reads dag files to extract the airflow modules that are going to be used, +# and imports them ahead of time to avoid having to re-do it for each parsing process. +# This flag can be set to ``False`` to disable this behavior in case an airflow module needs +# to be freshly imported each time (at the cost of increased DAG parsing time). +# +# Variable: AIRFLOW__SCHEDULER__PARSING_PRE_IMPORT_MODULES +# +parsing_pre_import_modules = True + +# The scheduler can run multiple processes in parallel to parse dags. +# This defines how many processes will run. +# +# Variable: AIRFLOW__SCHEDULER__PARSING_PROCESSES +# +parsing_processes = 2 + +# One of ``modified_time``, ``random_seeded_by_host`` and ``alphabetical``. +# The scheduler will list and sort the dag files to decide the parsing order. +# +# * ``modified_time``: Sort by modified time of the files. This is useful on large scale to parse the +# recently modified DAGs first. +# * ``random_seeded_by_host``: Sort randomly across multiple Schedulers but with same order on the +# same host. This is useful when running with Scheduler in HA mode where each scheduler can +# parse different DAG files. +# * ``alphabetical``: Sort by filename +# +# Variable: AIRFLOW__SCHEDULER__FILE_PARSING_SORT_MODE +# +file_parsing_sort_mode = modified_time + +# Whether the dag processor is running as a standalone process or it is a subprocess of a scheduler +# job. +# +# Variable: AIRFLOW__SCHEDULER__STANDALONE_DAG_PROCESSOR +# +standalone_dag_processor = False + +# Only applicable if ``[scheduler] standalone_dag_processor`` is true and callbacks are stored +# in database. Contains maximum number of callbacks that are fetched during a single loop. +# +# Variable: AIRFLOW__SCHEDULER__MAX_CALLBACKS_PER_LOOP +# +max_callbacks_per_loop = 20 + +# Only applicable if ``[scheduler] standalone_dag_processor`` is true. +# Time in seconds after which dags, which were not updated by Dag Processor are deactivated. +# +# Variable: AIRFLOW__SCHEDULER__DAG_STALE_NOT_SEEN_DURATION +# +dag_stale_not_seen_duration = 600 + +# Turn off scheduler use of cron intervals by setting this to ``False``. +# DAGs submitted manually in the web UI or with trigger_dag will still run. +# +# Variable: AIRFLOW__SCHEDULER__USE_JOB_SCHEDULE +# +use_job_schedule = True + +# Allow externally triggered DagRuns for Execution Dates in the future +# Only has effect if schedule_interval is set to None in DAG +# +# Variable: AIRFLOW__SCHEDULER__ALLOW_TRIGGER_IN_FUTURE +# +allow_trigger_in_future = False + +# How often to check for expired trigger requests that have not run yet. +# +# Variable: AIRFLOW__SCHEDULER__TRIGGER_TIMEOUT_CHECK_INTERVAL +# +trigger_timeout_check_interval = 15 + +# Amount of time a task can be in the queued state before being retried or set to failed. +# +# Variable: AIRFLOW__SCHEDULER__TASK_QUEUED_TIMEOUT +# +task_queued_timeout = 300.0 + +# How often to check for tasks that have been in the queued state for +# longer than ``[scheduler] task_queued_timeout``. +# +# Variable: AIRFLOW__SCHEDULER__TASK_QUEUED_TIMEOUT_CHECK_INTERVAL +# +task_queued_timeout_check_interval = 120.0 + +# The run_id pattern used to verify the validity of user input to the run_id parameter when +# triggering a DAG. This pattern cannot change the pattern used by scheduler to generate run_id +# for scheduled DAG runs or DAG runs triggered without changing the run_id parameter. +# +# Variable: AIRFLOW__SCHEDULER__ALLOWED_RUN_ID_PATTERN +# +allowed_run_id_pattern = ^[A-Za-z0-9_.~:+-]+$ + +# Whether to create DAG runs that span an interval or one single point in time for cron schedules, when +# a cron string is provided to ``schedule`` argument of a DAG. +# +# * ``True``: **CronDataIntervalTimetable** is used, which is suitable +# for DAGs with well-defined data interval. You get contiguous intervals from the end of the previous +# interval up to the scheduled datetime. +# * ``False``: **CronTriggerTimetable** is used, which is closer to the behavior of cron itself. +# +# Notably, for **CronTriggerTimetable**, the logical date is the same as the time the DAG Run will +# try to schedule, while for **CronDataIntervalTimetable**, the logical date is the beginning of +# the data interval, but the DAG Run will try to schedule at the end of the data interval. +# +# Variable: AIRFLOW__SCHEDULER__CREATE_CRON_DATA_INTERVALS +# +create_cron_data_intervals = True + +[triggerer] +# How many triggers a single Triggerer will run at once, by default. +# +# Variable: AIRFLOW__TRIGGERER__DEFAULT_CAPACITY +# +default_capacity = 1000 + +# How often to heartbeat the Triggerer job to ensure it hasn't been killed. +# +# Variable: AIRFLOW__TRIGGERER__JOB_HEARTBEAT_SEC +# +job_heartbeat_sec = 5 + +# If the last triggerer heartbeat happened more than ``[triggerer] triggerer_health_check_threshold`` +# ago (in seconds), triggerer is considered unhealthy. +# This is used by the health check in the **/health** endpoint and in ``airflow jobs check`` CLI +# for TriggererJob. +# +# Variable: AIRFLOW__TRIGGERER__TRIGGERER_HEALTH_CHECK_THRESHOLD +# +triggerer_health_check_threshold = 30 + +[kerberos] +# Location of your ccache file once kinit has been performed. +# +# Variable: AIRFLOW__KERBEROS__CCACHE +# +ccache = /tmp/airflow_krb5_ccache + +# gets augmented with fqdn +# +# Variable: AIRFLOW__KERBEROS__PRINCIPAL +# +principal = airflow + +# Determines the frequency at which initialization or re-initialization processes occur. +# +# Variable: AIRFLOW__KERBEROS__REINIT_FREQUENCY +# +reinit_frequency = 3600 + +# Path to the kinit executable +# +# Variable: AIRFLOW__KERBEROS__KINIT_PATH +# +kinit_path = kinit + +# Designates the path to the Kerberos keytab file for the Airflow user +# +# Variable: AIRFLOW__KERBEROS__KEYTAB +# +keytab = airflow.keytab + +# Allow to disable ticket forwardability. +# +# Variable: AIRFLOW__KERBEROS__FORWARDABLE +# +forwardable = True + +# Allow to remove source IP from token, useful when using token behind NATted Docker host. +# +# Variable: AIRFLOW__KERBEROS__INCLUDE_IP +# +include_ip = True + +[sensors] +# Sensor default timeout, 7 days by default (7 * 24 * 60 * 60). +# +# Variable: AIRFLOW__SENSORS__DEFAULT_TIMEOUT +# +default_timeout = 604800 + +[aws] +# This section contains settings for Amazon Web Services (AWS) integration. + +# session_factory = +cloudwatch_task_handler_json_serializer = airflow.providers.amazon.aws.log.cloudwatch_task_handler.json_serialize_legacy + +[aws_batch_executor] +# This section only applies if you are using the AwsBatchExecutor in +# Airflow's ``[core]`` configuration. +# For more information on any of these execution parameters, see the link below: +# https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/batch.html#Batch.Client.submit_job +# For boto3 credential management, see +# https://boto3.amazonaws.com/v1/documentation/api/latest/guide/configuration.html + +conn_id = aws_default +# region_name = +max_submit_job_attempts = 3 +check_health_on_startup = True +# job_name = +# job_queue = +# job_definition = +# submit_job_kwargs = + +[aws_ecs_executor] +# This section only applies if you are using the AwsEcsExecutor in +# Airflow's ``[core]`` configuration. +# For more information on any of these execution parameters, see the link below: +# https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/ecs/client/run_task.html +# For boto3 credential management, see +# https://boto3.amazonaws.com/v1/documentation/api/latest/guide/configuration.html + +conn_id = aws_default +# region_name = +assign_public_ip = False +# cluster = +# capacity_provider_strategy = +# container_name = +# launch_type = +platform_version = LATEST +# security_groups = +# subnets = +# task_definition = +max_run_task_attempts = 3 +# run_task_kwargs = +check_health_on_startup = True + +[aws_auth_manager] +# This section only applies if you are using the AwsAuthManager. In other words, if you set +# ``[core] auth_manager = airflow.providers.amazon.aws.auth_manager.aws_auth_manager.AwsAuthManager`` in +# Airflow's configuration. + +enable = False +conn_id = aws_default +# region_name = +# saml_metadata_url = +# avp_policy_store_id = + +[celery_kubernetes_executor] +# This section only applies if you are using the ``CeleryKubernetesExecutor`` in +# ``[core]`` section above + +# Define when to send a task to ``KubernetesExecutor`` when using ``CeleryKubernetesExecutor``. +# When the queue of a task is the value of ``kubernetes_queue`` (default ``kubernetes``), +# the task is executed via ``KubernetesExecutor``, +# otherwise via ``CeleryExecutor`` +# +# Variable: AIRFLOW__CELERY_KUBERNETES_EXECUTOR__KUBERNETES_QUEUE +# +kubernetes_queue = kubernetes + +[celery] +# This section only applies if you are using the CeleryExecutor in +# ``[core]`` section above + +# The app name that will be used by celery +# +# Variable: AIRFLOW__CELERY__CELERY_APP_NAME +# +celery_app_name = airflow.providers.celery.executors.celery_executor + +# The concurrency that will be used when starting workers with the +# ``airflow celery worker`` command. This defines the number of task instances that +# a worker will take, so size up your workers based on the resources on +# your worker box and the nature of your tasks +# +# Variable: AIRFLOW__CELERY__WORKER_CONCURRENCY +# +worker_concurrency = 32 + +# The maximum and minimum number of pool processes that will be used to dynamically resize +# the pool based on load.Enable autoscaling by providing max_concurrency,min_concurrency +# with the ``airflow celery worker`` command (always keep minimum processes, +# but grow to maximum if necessary). +# Pick these numbers based on resources on worker box and the nature of the task. +# If autoscale option is available, worker_concurrency will be ignored. +# https://docs.celeryq.dev/en/latest/reference/celery.bin.worker.html#cmdoption-celery-worker-autoscale +# +# Example: worker_autoscale = 16,12 +# +# Variable: AIRFLOW__CELERY__WORKER_AUTOSCALE +# +# worker_autoscale = + +# Used to increase the number of tasks that a worker prefetches which can improve performance. +# The number of processes multiplied by worker_prefetch_multiplier is the number of tasks +# that are prefetched by a worker. A value greater than 1 can result in tasks being unnecessarily +# blocked if there are multiple workers and one worker prefetches tasks that sit behind long +# running tasks while another worker has unutilized processes that are unable to process the already +# claimed blocked tasks. +# https://docs.celeryq.dev/en/stable/userguide/optimizing.html#prefetch-limits +# +# Variable: AIRFLOW__CELERY__WORKER_PREFETCH_MULTIPLIER +# +worker_prefetch_multiplier = 2 + +# Specify if remote control of the workers is enabled. +# In some cases when the broker does not support remote control, Celery creates lots of +# ``.*reply-celery-pidbox`` queues. You can prevent this by setting this to false. +# However, with this disabled Flower won't work. +# https://docs.celeryq.dev/en/stable/getting-started/backends-and-brokers/index.html#broker-overview +# +# Variable: AIRFLOW__CELERY__WORKER_ENABLE_REMOTE_CONTROL +# +worker_enable_remote_control = true + +# The Celery broker URL. Celery supports RabbitMQ, Redis and experimentally +# a sqlalchemy database. Refer to the Celery documentation for more information. +# +# Variable: AIRFLOW__CELERY__BROKER_URL +# +# This will be configured via environment variables, as it differs between master and workers. +# broker_url = + +# The Celery result_backend. When a job finishes, it needs to update the +# metadata of the job. Therefore it will post a message on a message bus, +# or insert it into a database (depending of the backend) +# This status is used by the scheduler to update the state of the task +# The use of a database is highly recommended +# When not specified, sql_alchemy_conn with a db+ scheme prefix will be used +# https://docs.celeryq.dev/en/latest/userguide/configuration.html#task-result-backend-settings +# +# Example: result_backend = db+postgresql://postgres:airflow@postgres/airflow +# +# Variable: AIRFLOW__CELERY__RESULT_BACKEND +# +# The result_backend is intentionally left blank. +# When blank, Airflow's CeleryExecutor defaults to using the value from +# `sql_alchemy_conn` as the result backend, which is the recommended setup. +result_backend = + +# Optional configuration dictionary to pass to the Celery result backend SQLAlchemy engine. +# +# Example: result_backend_sqlalchemy_engine_options = {"pool_recycle": 1800} +# +# Variable: AIRFLOW__CELERY__RESULT_BACKEND_SQLALCHEMY_ENGINE_OPTIONS +# +result_backend_sqlalchemy_engine_options = + +# Celery Flower is a sweet UI for Celery. Airflow has a shortcut to start +# it ``airflow celery flower``. This defines the IP that Celery Flower runs on +# +# Variable: AIRFLOW__CELERY__FLOWER_HOST +# +flower_host = 0.0.0.0 + +# The root URL for Flower +# +# Example: flower_url_prefix = /flower +# +# Variable: AIRFLOW__CELERY__FLOWER_URL_PREFIX +# +flower_url_prefix = + +# This defines the port that Celery Flower runs on +# +# Variable: AIRFLOW__CELERY__FLOWER_PORT +# +flower_port = 5555 + +# Securing Flower with Basic Authentication +# Accepts user:password pairs separated by a comma +# +# Example: flower_basic_auth = user1:password1,user2:password2 +# +# Variable: AIRFLOW__CELERY__FLOWER_BASIC_AUTH +# +flower_basic_auth = + +# How many processes CeleryExecutor uses to sync task state. +# 0 means to use max(1, number of cores - 1) processes. +# +# Variable: AIRFLOW__CELERY__SYNC_PARALLELISM +# +sync_parallelism = 0 + +# Import path for celery configuration options +# +# Variable: AIRFLOW__CELERY__CELERY_CONFIG_OPTIONS +# +celery_config_options = airflow.providers.celery.executors.default_celery.DEFAULT_CELERY_CONFIG + +# +# Variable: AIRFLOW__CELERY__SSL_ACTIVE +# +ssl_active = False + +# Path to the client key. +# +# Variable: AIRFLOW__CELERY__SSL_KEY +# +ssl_key = + +# Path to the client certificate. +# +# Variable: AIRFLOW__CELERY__SSL_CERT +# +ssl_cert = + +# Path to the CA certificate. +# +# Variable: AIRFLOW__CELERY__SSL_CACERT +# +ssl_cacert = + +# Celery Pool implementation. +# Choices include: ``prefork`` (default), ``eventlet``, ``gevent`` or ``solo``. +# See: +# https://docs.celeryq.dev/en/latest/userguide/workers.html#concurrency +# https://docs.celeryq.dev/en/latest/userguide/concurrency/eventlet.html +# +# Variable: AIRFLOW__CELERY__POOL +# +pool = prefork + +# The number of seconds to wait before timing out ``send_task_to_executor`` or +# ``fetch_celery_task_state`` operations. +# +# Variable: AIRFLOW__CELERY__OPERATION_TIMEOUT +# +operation_timeout = 1.0 + +task_acks_late = True +# Celery task will report its status as 'started' when the task is executed by a worker. +# This is used in Airflow to keep track of the running tasks and if a Scheduler is restarted +# or run in HA mode, it can adopt the orphan tasks launched by previous SchedulerJob. +# +# Variable: AIRFLOW__CELERY__TASK_TRACK_STARTED +# +task_track_started = True + +# The Maximum number of retries for publishing task messages to the broker when failing +# due to ``AirflowTaskTimeout`` error before giving up and marking Task as failed. +# +# Variable: AIRFLOW__CELERY__TASK_PUBLISH_MAX_RETRIES +# +task_publish_max_retries = 3 + +# Worker initialisation check to validate Metadata Database connection +# +# Variable: AIRFLOW__CELERY__WORKER_PRECHECK +# +worker_precheck = False + +# Extra celery configs to include in the celery worker. +# Any of the celery config can be added to this config and it +# will be applied while starting the celery worker. e.g. {"worker_max_tasks_per_child": 10} +# See also: +# https://docs.celeryq.dev/en/stable/userguide/configuration.html#configuration-and-defaults +# +# Variable: AIRFLOW__CELERY__EXTRA_CELERY_CONFIG +# +extra_celery_config = {} + +[celery_broker_transport_options] +# This section is for specifying options which can be passed to the +# underlying celery broker transport. See: +# https://docs.celeryq.dev/en/latest/userguide/configuration.html#std:setting-broker_transport_options + +# The visibility timeout defines the number of seconds to wait for the worker +# to acknowledge the task before the message is redelivered to another worker. +# Make sure to increase the visibility timeout to match the time of the longest +# ETA you're planning to use. +# visibility_timeout is only supported for Redis and SQS celery brokers. +# See: +# https://docs.celeryq.dev/en/stable/getting-started/backends-and-brokers/redis.html#visibility-timeout +# +# Example: visibility_timeout = 21600 +# +# Variable: AIRFLOW__CELERY_BROKER_TRANSPORT_OPTIONS__VISIBILITY_TIMEOUT +# +# visibility_timeout = + +# The sentinel_kwargs parameter allows passing additional options to the Sentinel client. +# In a typical scenario where Redis Sentinel is used as the broker and Redis servers are +# password-protected, the password needs to be passed through this parameter. Although its +# type is string, it is required to pass a string that conforms to the dictionary format. +# See: +# https://docs.celeryq.dev/en/stable/getting-started/backends-and-brokers/redis.html#configuration +# +# Example: sentinel_kwargs = {"password": "password_for_redis_server"} +# +# Variable: AIRFLOW__CELERY_BROKER_TRANSPORT_OPTIONS__SENTINEL_KWARGS +# +# sentinel_kwargs = + +[local_kubernetes_executor] +# This section only applies if you are using the ``LocalKubernetesExecutor`` in +# ``[core]`` section above + +# Define when to send a task to ``KubernetesExecutor`` when using ``LocalKubernetesExecutor``. +# When the queue of a task is the value of ``kubernetes_queue`` (default ``kubernetes``), +# the task is executed via ``KubernetesExecutor``, +# otherwise via ``LocalExecutor`` +# +# Variable: AIRFLOW__LOCAL_KUBERNETES_EXECUTOR__KUBERNETES_QUEUE +# +kubernetes_queue = kubernetes + +[kubernetes_executor] +# Kwargs to override the default urllib3 Retry used in the kubernetes API client +# +# Example: api_client_retry_configuration = { "total": 3, "backoff_factor": 0.5 } +# +# Variable: AIRFLOW__KUBERNETES_EXECUTOR__API_CLIENT_RETRY_CONFIGURATION +# +api_client_retry_configuration = + +# Flag to control the information added to kubernetes executor logs for better traceability +# +# Variable: AIRFLOW__KUBERNETES_EXECUTOR__LOGS_TASK_METADATA +# +logs_task_metadata = False + +# Path to the YAML pod file that forms the basis for KubernetesExecutor workers. +# +# Variable: AIRFLOW__KUBERNETES_EXECUTOR__POD_TEMPLATE_FILE +# +pod_template_file = + +# The repository of the Kubernetes Image for the Worker to Run +# +# Variable: AIRFLOW__KUBERNETES_EXECUTOR__WORKER_CONTAINER_REPOSITORY +# +worker_container_repository = + +# The tag of the Kubernetes Image for the Worker to Run +# +# Variable: AIRFLOW__KUBERNETES_EXECUTOR__WORKER_CONTAINER_TAG +# +worker_container_tag = + +# The Kubernetes namespace where airflow workers should be created. Defaults to ``default`` +# +# Variable: AIRFLOW__KUBERNETES_EXECUTOR__NAMESPACE +# +namespace = default + +# If True, all worker pods will be deleted upon termination +# +# Variable: AIRFLOW__KUBERNETES_EXECUTOR__DELETE_WORKER_PODS +# +delete_worker_pods = True + +# If False (and delete_worker_pods is True), +# failed worker pods will not be deleted so users can investigate them. +# This only prevents removal of worker pods where the worker itself failed, +# not when the task it ran failed. +# +# Variable: AIRFLOW__KUBERNETES_EXECUTOR__DELETE_WORKER_PODS_ON_FAILURE +# +delete_worker_pods_on_failure = False + +worker_pod_pending_fatal_container_state_reasons = CreateContainerConfigError,ErrImagePull,CreateContainerError,ImageInspectError, InvalidImageName +# Number of Kubernetes Worker Pod creation calls per scheduler loop. +# Note that the current default of "1" will only launch a single pod +# per-heartbeat. It is HIGHLY recommended that users increase this +# number to match the tolerance of their kubernetes cluster for +# better performance. +# +# Variable: AIRFLOW__KUBERNETES_EXECUTOR__WORKER_PODS_CREATION_BATCH_SIZE +# +worker_pods_creation_batch_size = 1 + +# Allows users to launch pods in multiple namespaces. +# Will require creating a cluster-role for the scheduler, +# or use multi_namespace_mode_namespace_list configuration. +# +# Variable: AIRFLOW__KUBERNETES_EXECUTOR__MULTI_NAMESPACE_MODE +# +multi_namespace_mode = False + +# If multi_namespace_mode is True while scheduler does not have a cluster-role, +# give the list of namespaces where the scheduler will schedule jobs +# Scheduler needs to have the necessary permissions in these namespaces. +# +# Variable: AIRFLOW__KUBERNETES_EXECUTOR__MULTI_NAMESPACE_MODE_NAMESPACE_LIST +# +multi_namespace_mode_namespace_list = + +# Use the service account kubernetes gives to pods to connect to kubernetes cluster. +# It's intended for clients that expect to be running inside a pod running on kubernetes. +# It will raise an exception if called from a process not running in a kubernetes environment. +# +# Variable: AIRFLOW__KUBERNETES_EXECUTOR__IN_CLUSTER +# +in_cluster = True + +# When running with in_cluster=False change the default cluster_context or config_file +# options to Kubernetes client. Leave blank these to use default behaviour like ``kubectl`` has. +# +# Variable: AIRFLOW__KUBERNETES_EXECUTOR__CLUSTER_CONTEXT +# +# cluster_context = + +# Path to the kubernetes configfile to be used when ``in_cluster`` is set to False +# +# Variable: AIRFLOW__KUBERNETES_EXECUTOR__CONFIG_FILE +# +# config_file = + +# Keyword parameters to pass while calling a kubernetes client core_v1_api methods +# from Kubernetes Executor provided as a single line formatted JSON dictionary string. +# List of supported params are similar for all core_v1_apis, hence a single config +# variable for all apis. See: +# https://raw.githubusercontent.com/kubernetes-client/python/41f11a09995efcd0142e25946adc7591431bfb2f/kubernetes/client/api/core_v1_api.py +# +# Variable: AIRFLOW__KUBERNETES_EXECUTOR__KUBE_CLIENT_REQUEST_ARGS +# +kube_client_request_args = + +# Optional keyword arguments to pass to the ``delete_namespaced_pod`` kubernetes client +# ``core_v1_api`` method when using the Kubernetes Executor. +# This should be an object and can contain any of the options listed in the ``v1DeleteOptions`` +# class defined here: +# https://github.com/kubernetes-client/python/blob/41f11a09995efcd0142e25946adc7591431bfb2f/kubernetes/client/models/v1_delete_options.py#L19 +# +# Example: delete_option_kwargs = {"grace_period_seconds": 10} +# +# Variable: AIRFLOW__KUBERNETES_EXECUTOR__DELETE_OPTION_KWARGS +# +delete_option_kwargs = + +# Enables TCP keepalive mechanism. This prevents Kubernetes API requests to hang indefinitely +# when idle connection is time-outed on services like cloud load balancers or firewalls. +# +# Variable: AIRFLOW__KUBERNETES_EXECUTOR__ENABLE_TCP_KEEPALIVE +# +enable_tcp_keepalive = True + +# When the `enable_tcp_keepalive` option is enabled, TCP probes a connection that has +# been idle for `tcp_keep_idle` seconds. +# +# Variable: AIRFLOW__KUBERNETES_EXECUTOR__TCP_KEEP_IDLE +# +tcp_keep_idle = 120 + +# When the `enable_tcp_keepalive` option is enabled, if Kubernetes API does not respond +# to a keepalive probe, TCP retransmits the probe after `tcp_keep_intvl` seconds. +# +# Variable: AIRFLOW__KUBERNETES_EXECUTOR__TCP_KEEP_INTVL +# +tcp_keep_intvl = 30 + +# When the `enable_tcp_keepalive` option is enabled, if Kubernetes API does not respond +# to a keepalive probe, TCP retransmits the probe `tcp_keep_cnt number` of times before +# a connection is considered to be broken. +# +# Variable: AIRFLOW__KUBERNETES_EXECUTOR__TCP_KEEP_CNT +# +tcp_keep_cnt = 6 + +# Set this to false to skip verifying SSL certificate of Kubernetes python client. +# +# Variable: AIRFLOW__KUBERNETES_EXECUTOR__VERIFY_SSL +# +verify_ssl = True + +# How often in seconds to check for task instances stuck in "queued" status without a pod +# +# Variable: AIRFLOW__KUBERNETES_EXECUTOR__WORKER_PODS_QUEUED_CHECK_INTERVAL +# +worker_pods_queued_check_interval = 60 + +# Path to a CA certificate to be used by the Kubernetes client to verify the server's SSL certificate. +# +# Variable: AIRFLOW__KUBERNETES_EXECUTOR__SSL_CA_CERT +# +ssl_ca_cert = + +# The Maximum number of retries for queuing the task to the kubernetes scheduler when +# failing due to Kube API exceeded quota errors before giving up and marking task as failed. +# -1 for unlimited times. +# +# Variable: AIRFLOW__KUBERNETES_EXECUTOR__TASK_PUBLISH_MAX_RETRIES +# +task_publish_max_retries = 0 + +[common.io] +# Common IO configuration section + +# Path to a location on object storage where XComs can be stored in url format. +# +# Example: xcom_objectstorage_path = s3://conn_id@bucket/path +# +# Variable: AIRFLOW__COMMON.IO__XCOM_OBJECTSTORAGE_PATH +# +xcom_objectstorage_path = + +# Threshold in bytes for storing XComs in object storage. -1 means always store in the +# database. 0 means always store in object storage. Any positive number means +# it will be stored in object storage if the size of the value is greater than the threshold. +# +# Example: xcom_objectstorage_threshold = 1000000 +# +# Variable: AIRFLOW__COMMON.IO__XCOM_OBJECTSTORAGE_THRESHOLD +# +xcom_objectstorage_threshold = -1 + +# Compression algorithm to use when storing XComs in object storage. Supported algorithms +# are a.o.: snappy, zip, gzip, bz2, and lzma. If not specified, no compression will be used. +# Note that the compression algorithm must be available in the Python installation (e.g. +# python-snappy for snappy). Zip, gz, bz2 are available by default. +# +# Example: xcom_objectstorage_compression = gz +# +# Variable: AIRFLOW__COMMON.IO__XCOM_OBJECTSTORAGE_COMPRESSION +# +xcom_objectstorage_compression = + + + +[fab] +# This section contains configs specific to FAB provider. + +# Boolean for enabling rate limiting on authentication endpoints. +# +# Variable: AIRFLOW__FAB__AUTH_RATE_LIMITED +# +auth_rate_limited = True + +# Rate limit for authentication endpoints. +# +# Variable: AIRFLOW__FAB__AUTH_RATE_LIMIT +# +auth_rate_limit = 5 per 40 second + +# Update FAB permissions and sync security manager roles +# on webserver startup +# +# Variable: AIRFLOW__FAB__UPDATE_FAB_PERMS +# +update_fab_perms = True + +[imap] +# Options for IMAP provider. + +# ssl_context = + +[azure_remote_logging] +# Configuration that needs to be set for enable remote logging in Azure Blob Storage + +remote_wasb_log_container = airflow-logs + +[openlineage] +# This section applies settings for OpenLineage integration. +# More about configuration and it's precedence can be found at +# https://airflow.apache.org/docs/apache-airflow-providers-openlineage/stable/guides/user.html#transport-setup + +# Disable sending events without uninstalling the OpenLineage Provider by setting this to true. +# +# Variable: AIRFLOW__OPENLINEAGE__DISABLED +# +disabled = False + +# Exclude some Operators from emitting OpenLineage events by passing a string of semicolon separated +# full import paths of Operators to disable. +# +# Example: disabled_for_operators = airflow.providers.standard.operators.bash.BashOperator; airflow.providers.standard.operators.python.PythonOperator +# +# Variable: AIRFLOW__OPENLINEAGE__DISABLED_FOR_OPERATORS +# +disabled_for_operators = + +# If this setting is enabled, OpenLineage integration won't collect and emit metadata, +# unless you explicitly enable it per `DAG` or `Task` using `enable_lineage` method. +# +# Variable: AIRFLOW__OPENLINEAGE__SELECTIVE_ENABLE +# +selective_enable = False + +# Set namespace that the lineage data belongs to, so that if you use multiple OpenLineage producers, +# events coming from them will be logically separated. +# +# Example: namespace = my_airflow_instance_1 +# +# Variable: AIRFLOW__OPENLINEAGE__NAMESPACE +# +# namespace = + +# Register custom OpenLineage Extractors by passing a string of semicolon separated full import paths. +# +# Example: extractors = full.path.to.ExtractorClass;full.path.to.AnotherExtractorClass +# +# Variable: AIRFLOW__OPENLINEAGE__EXTRACTORS +# +# extractors = + +# Register custom run facet functions by passing a string of semicolon separated full import paths. +# +# Example: custom_run_facets = full.path.to.custom_facet_function;full.path.to.another_custom_facet_function +# +# Variable: AIRFLOW__OPENLINEAGE__CUSTOM_RUN_FACETS +# +custom_run_facets = + +# Specify the path to the YAML configuration file. +# This ensures backwards compatibility with passing config through the `openlineage.yml` file. +# +# Example: config_path = full/path/to/openlineage.yml +# +# Variable: AIRFLOW__OPENLINEAGE__CONFIG_PATH +# +config_path = + +# Pass OpenLineage Client transport configuration as JSON string. It should contain type of the +# transport and additional options (different for each transport type). For more details see: +# https://openlineage.io/docs/client/python/#built-in-transport-types +# +# Currently supported types are: +# +# * HTTP +# * Kafka +# * Console +# * File +# +# Example: transport = {"type": "http", "url": "http://localhost:5000", "endpoint": "api/v1/lineage"} +# +# Variable: AIRFLOW__OPENLINEAGE__TRANSPORT +# +transport = + +# Disable the inclusion of source code in OpenLineage events by setting this to `true`. +# By default, several Operators (e.g. Python, Bash) will include their source code in the events +# unless disabled. +# +# Variable: AIRFLOW__OPENLINEAGE__DISABLE_SOURCE_CODE +# +disable_source_code = False + +# Number of processes to utilize for processing DAG state changes +# in an asynchronous manner within the scheduler process. +# +# Variable: AIRFLOW__OPENLINEAGE__DAG_STATE_CHANGE_PROCESS_POOL_SIZE +# +dag_state_change_process_pool_size = 1 + +# Maximum amount of time (in seconds) that OpenLineage can spend executing metadata extraction. +# +# Variable: AIRFLOW__OPENLINEAGE__EXECUTION_TIMEOUT +# +execution_timeout = 10 + +# If true, OpenLineage event will include full task info - potentially containing large fields. +# +# Variable: AIRFLOW__OPENLINEAGE__INCLUDE_FULL_TASK_INFO +# +include_full_task_info = False + +# If true, OpenLineage events will include information useful for debugging - potentially +# containing large fields e.g. all installed packages and their versions. +# +# Variable: AIRFLOW__OPENLINEAGE__DEBUG_MODE +# +debug_mode = False + +# Automatically inject OpenLineage's parent job (namespace, job name, run id) information into Spark +# application properties for supported Operators. +# +# Variable: AIRFLOW__OPENLINEAGE__SPARK_INJECT_PARENT_JOB_INFO +# +spark_inject_parent_job_info = False + +[smtp_provider] +# Options for SMTP provider. + +# ssl context to use when using SMTP and IMAP SSL connections. By default, the context is "default" +# which sets it to ``ssl.create_default_context()`` which provides the right balance between +# compatibility and security, it however requires that certificates in your operating system are +# updated and that SMTP/IMAP servers of yours have valid certificates that have corresponding public +# keys installed on your machines. You can switch it to "none" if you want to disable checking +# of the certificates, but it is not recommended as it allows MITM (man-in-the-middle) attacks +# if your infrastructure is not sufficiently secured. It should only be set temporarily while you +# are fixing your certificate configuration. This can be typically done by upgrading to newer +# version of the operating system you run Airflow components on,by upgrading/refreshing proper +# certificates in the OS or by updating certificates for your mail servers. +# +# If you do not set this option explicitly, it will use Airflow "email.ssl_context" configuration, +# but if this configuration is not present, it will use "default" value. +# +# Example: ssl_context = default +# +# Variable: AIRFLOW__SMTP_PROVIDER__SSL_CONTEXT +# +# ssl_context = + +# Allows overriding of the standard templated email subject line when the SmtpNotifier is used. +# Must provide a path to the template. +# +# Example: templated_email_subject_path = path/to/override/email_subject.html +# +# Variable: AIRFLOW__SMTP_PROVIDER__TEMPLATED_EMAIL_SUBJECT_PATH +# +# templated_email_subject_path = + +# Allows overriding of the standard templated email path when the SmtpNotifier is used. Must provide +# a path to the template. +# +# Example: templated_html_content_path = path/to/override/email.html +# +# Variable: AIRFLOW__SMTP_PROVIDER__TEMPLATED_HTML_CONTENT_PATH +# +# templated_html_content_path = + +[docker] +docker_url = unix://var/run/docker.sock diff --git a/airflow/configs/.env.master b/airflow/configs/.env.master new file mode 100644 index 0000000..9288246 --- /dev/null +++ b/airflow/configs/.env.master @@ -0,0 +1,4 @@ +# This file should be generated from ansible/templates/.env.ytdlp.j2 +# Do not edit manually - your changes will be overwritten. +# +# To generate this file, run the Ansible playbook that processes the templates. diff --git a/airflow/configs/.env.worker b/airflow/configs/.env.worker new file mode 100644 index 0000000..9288246 --- /dev/null +++ b/airflow/configs/.env.worker @@ -0,0 +1,4 @@ +# This file should be generated from ansible/templates/.env.ytdlp.j2 +# Do not edit manually - your changes will be overwritten. +# +# To generate this file, run the Ansible playbook that processes the templates. diff --git a/airflow/configs/docker-compose-dl.yaml.j2 b/airflow/configs/docker-compose-dl.yaml.j2 index fd93f4d..41c3633 100644 --- a/airflow/configs/docker-compose-dl.yaml.j2 +++ b/airflow/configs/docker-compose-dl.yaml.j2 @@ -28,8 +28,8 @@ x-airflow-common: environment: &airflow-common-env - AIRFLOW__CORE__PARALLELISM: 64 - AIRFLOW__CORE__MAX_ACTIVE_TASKS_PER_DAG: 32 + AIRFLOW__CORE__PARALLELISM: 128 + AIRFLOW__CORE__MAX_ACTIVE_TASKS_PER_DAG: 64 AIRFLOW__SCHEDULER__PARSING_PROCESSES: 4 AIRFLOW__WEBSERVER__WORKERS: 5 AIRFLOW__WEBSERVER__WORKER_CLASS: "gevent" @@ -49,8 +49,8 @@ x-airflow-common: # Backend connections - These should point to the master node # Set MASTER_HOST_IP, POSTGRES_PASSWORD, and REDIS_PASSWORD in your .env file AIRFLOW__DATABASE__SQL_ALCHEMY_CONN: postgresql+psycopg2://airflow:${{ '{' }}POSTGRES_PASSWORD{{ '}' }}@${{ '{' }}MASTER_HOST_IP{{ '}' }}:{{ postgres_port }}/airflow - IRFLOW__CELERY__RESULT_BACKEND: db+postgresql+psycopg2://airflow:${{ '{' }}POSTGRES_PASSWORD{{ '}' }}@${{ '{' }}MASTER_HOST_IP{{ '}' }}:{{ postgres_port }}/airflow - AIRFLOW__CELERY__BROKER_URL: redis://:${REDIS_PASSWORD}@${MASTER_HOST_IP}:52909/0 + AIRFLOW__CELERY__RESULT_BACKEND: db+postgresql+psycopg2://airflow:${{ '{' }}POSTGRES_PASSWORD{{ '}' }}@${{ '{' }}MASTER_HOST_IP{{ '}' }}:{{ postgres_port }}/airflow + AIRFLOW__CELERY__BROKER_URL: redis://:${REDIS_PASSWORD}@${MASTER_HOST_IP}:{{ redis_port }}/0 # Remote Logging - connection is configured directly via environment variables #_PIP_ADDITIONAL_REQUIREMENTS: ${{ '{' }}_PIP_ADDITIONAL_REQUIREMENTS:- apache-airflow-providers-docker apache-airflow-providers-http thrift>=0.16.0,<=0.20.0 backoff>=2.2.1 python-dotenv==1.0.1 psutil>=5.9.0 apache-airflow-providers-amazon{{ '}' }} @@ -75,8 +75,8 @@ x-airflow-common: - ${AIRFLOW_PROJ_DIR:-.}/downloadfiles:/opt/airflow/downloadfiles - ${AIRFLOW_PROJ_DIR:-.}/addfiles:/opt/airflow/addfiles - ${AIRFLOW_PROJ_DIR:-.}/inputfiles:/opt/airflow/inputfiles - # Use AIRFLOW_UID from .env file to fix permission issues. - user: "${AIRFLOW_UID:-50000}" + # Use AIRFLOW_UID from .env file to fix permission issues. GID is set to 0 for compatibility with the Airflow image. + user: "${{ '{' }}AIRFLOW_UID:-50000{{ '}' }}:0" services: airflow-worker: @@ -108,7 +108,9 @@ services: AIRFLOW__CELERY__WORKER_QUEUES: "queue-dl,queue-dl-${HOSTNAME:-dl001}" AIRFLOW__CELERY__WORKER_TAGS: "dl" AIRFLOW__CELERY__WORKER_PREFETCH_MULTIPLIER: "1" - AIRFLOW__CELERY__WORKER_CONCURRENCY: ${AIRFLOW_WORKER_DOWNLOAD_CONCURRENCY:-16} + # Use autoscaling to adjust number of workers based on load. + # Format is max_concurrency,min_concurrency. + AIRFLOW__CELERY__WORKER_AUTOSCALE: "16,4" # Use prefork pool for better compatibility with blocking libraries. AIRFLOW__CELERY__POOL: "prefork" AIRFLOW__CELERY__TASK_ACKS_LATE: "False" diff --git a/airflow/configs/docker-compose-master.yaml.j2 b/airflow/configs/docker-compose-master.yaml.j2 index 1b7cb5d..35419cb 100644 --- a/airflow/configs/docker-compose-master.yaml.j2 +++ b/airflow/configs/docker-compose-master.yaml.j2 @@ -66,10 +66,10 @@ x-airflow-common: - proxynet environment: &airflow-common-env - AIRFLOW__CORE__PARALLELISM: 64 - AIRFLOW__CORE__MAX_ACTIVE_TASKS_PER_DAG: 32 + AIRFLOW__CORE__PARALLELISM: 128 + AIRFLOW__CORE__MAX_ACTIVE_TASKS_PER_DAG: 64 AIRFLOW__SCHEDULER__PARSING_PROCESSES: 4 - AIRFLOW__WEBSERVER__WORKER_CLASS: gevent + AIRFLOW__WEBSERVER__WORKER_CLASS: sync AIRFLOW__WEBSERVER__WORKERS: 8 AIRFLOW__LOGGING__SECRET_MASK_EXCEPTION_ARGS: 'false' @@ -101,7 +101,7 @@ x-airflow-common: AIRFLOW__LOGGING__REMOTE_LOGGING: 'true' AIRFLOW__LOGGING__REMOTE_BASE_LOG_FOLDER: "s3://airflow-logs" AIRFLOW__LOGGING__REMOTE_LOG_CONN_ID: minio_default - AIRFLOW__LOGGING__ENCRYPT_S3_LOGS: 'false' + AIRROW__LOGGING__ENCRYPT_S3_LOGS: 'false' AIRFLOW__CORE__LOCAL_SETTINGS_PATH: "/opt/airflow/config/custom_task_hooks.py" volumes: - ${{ '{' }}AIRFLOW_PROJ_DIR:-.{{ '}' }}/dags:/opt/airflow/dags @@ -310,17 +310,17 @@ services: ports: - "8080:8080" depends_on: - - airflow-webserver + airflow-webserver: + condition: service_started restart: always airflow-webserver: <<: *airflow-common command: webserver + expose: + - "8080" environment: <<: *airflow-common-env - # Trigger gevent monkeypatching for webserver. - # See: https://github.com/apache/airflow/pull/28283 - _AIRFLOW_PATCH_GEVENT: "1" healthcheck: test: ["CMD", "curl", "--fail", "http://localhost:8080/health"] interval: 30s @@ -511,14 +511,14 @@ services: - -c - airflow - # You can enable flower by adding "--profile flower" option e.g. docker-compose --profile flower up - # or by explicitly targeted on the command line e.g. docker-compose up flower. - # See: https://docs.docker.com/compose/profiles/ flower: <<: *airflow-common command: celery flower ports: - "5555:5555" + environment: + <<: *airflow-common-env + FLOWER_BASIC_AUTH: "flower:${{ '{' }}FLOWER_PASSWORD{{ '}' }}" healthcheck: test: ["CMD", "curl", "--fail", "http://localhost:5555/"] interval: 30s @@ -530,8 +530,6 @@ services: <<: *airflow-common-depends-on airflow-init: condition: service_completed_successfully - profiles: - - flower docker-socket-proxy: profiles: diff --git a/airflow/configs/docker-compose-ytdlp-ops.yaml.j2 b/airflow/configs/docker-compose-ytdlp-ops.yaml.j2 index bec7764..98007cd 100644 --- a/airflow/configs/docker-compose-ytdlp-ops.yaml.j2 +++ b/airflow/configs/docker-compose-ytdlp-ops.yaml.j2 @@ -4,17 +4,21 @@ include: # This automatically includes the generated camoufox service definitions and dependencies. # It simplifies the docker-compose command, as you no longer need to specify both files with -f. # The file is generated by the config-generator service and will be created even if empty. - - docker-compose.camoufox.yaml + - ./configs/docker-compose.camoufox.yaml {% endif %} services: envoy: image: envoyproxy/envoy:v1.29-latest + {% if service_role != 'management' %} + container_name: envoy-thrift-lb-${HOSTNAME} + {% else %} container_name: envoy-thrift-lb + {% endif %} restart: unless-stopped volumes: # Mount the generated config file from the host - - ./envoy.yaml:/etc/envoy/envoy.yaml:ro + - ./configs/envoy.yaml:/etc/envoy/envoy.yaml:ro ports: # This is the single public port for all Thrift traffic - "${ENVOY_PORT:-9080}:${ENVOY_PORT:-9080}" @@ -37,12 +41,12 @@ services: {% endif %} # Ports are no longer exposed directly. Envoy will connect to them on the internal network. env_file: - - ./.env # Path is relative to the compose file location (configs directory) + - ./.env # Path is relative to the project directory volumes: - context-data:/app/context-data {% if service_role != 'management' %} # Mount the generated endpoints file to make it available to the server - - ../camoufox/camoufox_endpoints.json:/app/config/camoufox_endpoints.json:ro + - ./configs/camoufox_endpoints.json:/app/config/camoufox_endpoints.json:ro {% endif %} # Mount the plugin source code for live updates without rebuilding the image. # Assumes the plugin source is in a 'bgutil-ytdlp-pot-provider' directory @@ -95,8 +99,6 @@ services: volumes: context-data: - name: context-data - external: true {% if service_role == 'management' or not camoufox_proxies %} networks: diff --git a/airflow/configs/docker-compose.camoufox.yaml.j2 b/airflow/configs/docker-compose.camoufox.yaml.j2 index 387eb74..7cd4cc4 100644 --- a/airflow/configs/docker-compose.camoufox.yaml.j2 +++ b/airflow/configs/docker-compose.camoufox.yaml.j2 @@ -5,16 +5,17 @@ # and adds the necessary dependencies to the main services. services: {% for proxy in camoufox_proxies %} +{% set proxy_port = _get_port_from_proxy_url(proxy.url) | int %} {% set container_base_port = camoufox_port + loop.index0 * worker_count %} {% set host_base_port = container_base_port %} - camoufox-{{ loop.index }}: + camoufox-{{ proxy_port }}-{{ loop.index }}: build: context: ../camoufox dockerfile: Dockerfile args: VNC_PASSWORD: "{{ vnc_password }}" image: camoufox:latest - container_name: ytdlp-ops-camoufox-{{ loop.index }}-1 + container_name: ytdlp-ops-camoufox-{{ proxy_port }}-{{ loop.index }}-1 restart: unless-stopped shm_size: '2gb' # Mitigates browser crashes due to shared memory limitations ports: @@ -27,7 +28,7 @@ services: - CAMOUFOX_RESTART_THRESHOLD_MB=1500 volumes: - /tmp/.X11-unix:/tmp/.X11-unix:rw - - camoufox-data-{{ loop.index }}:/app/context-data + - camoufox-data-{{ proxy_port }}-{{ loop.index }}:/app/context-data - camoufox-browser-cache:/root/.cache/ms-playwright # Persist browser binaries command: [ "--ws-host", "0.0.0.0", @@ -62,7 +63,8 @@ services: restart: "no" depends_on: {% for proxy in camoufox_proxies %} - - camoufox-{{ loop.index }} +{% set proxy_port = _get_port_from_proxy_url(proxy.url) | int %} + - camoufox-{{ proxy_port }}-{{ loop.index }} {% endfor %} networks: - proxynet @@ -70,7 +72,8 @@ services: volumes: {% for proxy in camoufox_proxies %} - camoufox-data-{{ loop.index }}: +{% set proxy_port = _get_port_from_proxy_url(proxy.url) | int %} + camoufox-data-{{ proxy_port }}-{{ loop.index }}: {% endfor %} {% if camoufox_proxies %} camoufox-browser-cache: diff --git a/airflow/configs/docker-compose.config-generate.yaml b/airflow/configs/docker-compose.config-generate.yaml index aaeefca..42ea4ea 100644 --- a/airflow/configs/docker-compose.config-generate.yaml +++ b/airflow/configs/docker-compose.config-generate.yaml @@ -1,6 +1,5 @@ -# This file is used to generate the necessary configuration files for the main application stack. -# It should be run as a one-off command before starting the main services. -# Example: docker-compose -f airflow/docker-compose.config-generate.yaml run --rm config-generator +version: '3.8' + services: config-generator: image: python:3.12-slim @@ -9,6 +8,6 @@ services: - ./.env volumes: # Mount the entire project directory to access scripts and write output files - - ./:/app + - ../:/app command: > sh -c "pip install jinja2 && python3 /app/generate_envoy_config.py" diff --git a/airflow/dags/.DS_Store b/airflow/dags/.DS_Store new file mode 100644 index 0000000..5008ddf Binary files /dev/null and b/airflow/dags/.DS_Store differ diff --git a/airflow/dags/README.ru.md b/airflow/dags/README.ru.md new file mode 100644 index 0000000..92df83c --- /dev/null +++ b/airflow/dags/README.ru.md @@ -0,0 +1,88 @@ +# Архитектура и описание YTDLP Airflow DAGs + +Этот документ описывает архитектуру и назначение DAG'ов, используемых для скачивания видео с YouTube. Система построена на модели непрерывного, самоподдерживающегося цикла для параллельной и отказоустойчивой обработки. + +## Основной цикл обработки + +Обработка выполняется двумя основными DAG'ами, которые работают в паре: оркестратор и воркер. + +### `ytdlp_ops_orchestrator` (Система "зажигания") + +- **Назначение:** Этот DAG действует как "система зажигания" для запуска обработки. Он запускается вручную для старта указанного количества параллельных циклов-воркеров. +- **Принцип работы:** + - Он **не** обрабатывает URL-адреса самостоятельно. + - Его единственная задача — запустить сконфигурированное количество DAG'ов `ytdlp_ops_worker_per_url`. + - Он передает всю необходимую конфигурацию (пул аккаунтов, подключение к Redis и т.д.) воркерам. + +### `ytdlp_ops_worker_per_url` (Самоподдерживающийся воркер) + +- **Назначение:** Этот DAG обрабатывает один URL и спроектирован для работы в непрерывном цикле. +- **Принцип работы:** + 1. **Запуск:** Начальный запуск инициируется `ytdlp_ops_orchestrator`. + 2. **Получение задачи:** Воркер извлекает один URL из очереди `_inbox` в Redis. Если очередь пуста, выполнение воркера завершается, и его "линия" обработки останавливается. + 3. **Обработка:** Он взаимодействует с сервисом `ytdlp-ops-server` для получения `info.json` и прокси, после чего скачивает видео. + 4. **Продолжение или остановка:** + - **В случае успеха:** Он запускает новый экземпляр самого себя, создавая непрерывный цикл для обработки следующего URL. + - **В случае сбоя:** Цикл прерывается (если `stop_on_failure` установлено в `True`), останавливая эту "линию" обработки. Это предотвращает остановку всей системы из-за одного проблемного URL или аккаунта. + +## Управляющие DAG'и + +### `ytdlp_mgmt_proxy_account` + +- **Назначение:** Это основной инструмент для мониторинга и управления состоянием ресурсов, используемых `ytdlp-ops-server`. +- **Функциональность:** + - **Просмотр статусов:** Позволяет увидеть текущий статус всех прокси и аккаунтов (например, `ACTIVE`, `BANNED`, `RESTING`). + - **Управление прокси:** Позволяет вручную банить, разбанивать или сбрасывать статус прокси. + - **Управление аккаунтами:** Позволяет вручную банить или разбанивать аккаунты. + +### `ytdlp_mgmt_queues` + +- **Назначение:** Предоставляет набор инструментов для управления очередями Redis, используемыми в конвейере обработки. +- **Функциональность (через параметр `action`):** + - `add_videos`: Добавление одного или нескольких URL-адресов YouTube в очередь. + - `clear_queue`: Очистка (удаление) указанного ключа Redis. + - `list_contents`: Просмотр содержимого ключа Redis (списка или хэша). + - `check_status`: Проверка общего состояния очередей (тип, размер). + - `requeue_failed`: Перемещение всех URL-адресов из очереди сбоев `_fail` обратно в очередь `_inbox` для повторной обработки. + +## Стратегия управления ресурсами (Прокси и Аккаунты) + +Система использует интеллектуальную стратегию для управления жизненным циклом и состоянием аккаунтов и прокси, чтобы максимизировать процент успеха и минимизировать блокировки. + +- **Жизненный цикл аккаунта ("Cooldown"):** + - Чтобы предотвратить "выгорание", аккаунты автоматически переходят в состояние "отдыха" (`RESTING`) после периода интенсивного использования. + - По истечении периода отдыха они автоматически возвращаются в `ACTIVE` и снова становятся доступными для воркеров. + +- **Умная стратегия банов:** + - **Сначала бан аккаунта:** При возникновении серьезной ошибки (например, `BOT_DETECTED`) система наказывает **только аккаунт**, который вызвал сбой. Прокси при этом продолжает работать. + - **Бан прокси по "скользящему окну":** Прокси банится автоматически, только если он демонстрирует **систематические сбои с РАЗНЫМИ аккаунтами** за короткий промежуток времени. Это является надежным индикатором того, что проблема именно в прокси. + +- **Мониторинг:** + - DAG `ytdlp_mgmt_proxy_account` является основным инструментом для мониторинга. Он показывает текущий статус всех ресурсов, включая время, оставшееся до активации забаненных или отдыхающих аккаунтов. + - Граф выполнения DAG `ytdlp_ops_worker_per_url` теперь явно показывает шаги, такие как `assign_account`, `get_token`, `ban_account`, `retry_get_token`, что делает процесс отладки более наглядным. + +## Внешние сервисы + +### `ytdlp-ops-server` (Thrift Service) + +- **Назначение:** Внешний сервис, который предоставляет аутентификационные данные (токены, cookies, proxy) для скачивания видео. +- **Взаимодействие:** Worker DAG (`ytdlp_ops_worker_per_url`) обращается к этому сервису перед началом загрузки для получения необходимых данных для `yt-dlp`. + +## Логика работы Worker DAG (`ytdlp_ops_worker_per_url`) + +Этот DAG является "рабочей лошадкой" системы. Он спроектирован как самоподдерживающийся цикл для обработки одного URL за запуск. + +### Задачи и их назначение: + +- **`pull_url_from_redis`**: Извлекает один URL из очереди `_inbox` в Redis. Если очередь пуста, DAG завершается со статусом `skipped`, останавливая эту "линию" обработки. +- **`assign_account`**: Выбирает аккаунт для выполнения задачи. Он будет повторно использовать тот же аккаунт, который был успешно использован в предыдущем запуске в своей "линии" (привязка аккаунта). Если это первый запуск, он выбирает случайный аккаунт. +- **`get_token`**: Основная задача. Она обращается к `ytdlp-ops-server` для получения `info.json`. +- **`handle_bannable_error_branch`**: Если `get_token` завершается с ошибкой, требующей бана, эта задача-развилка решает, что делать дальше, в зависимости от политики `on_bannable_failure`. +- **`ban_account_and_prepare_for_retry`**: Если политика разрешает повтор, эта задача банит сбойный аккаунт и выбирает новый для повторной попытки. +- **`retry_get_token`**: Выполняет вторую попытку получить токен с новым аккаунтом. +- **`ban_second_account_and_proxy`**: Если и вторая попытка неудачна, эта задача банит второй аккаунт и использованный прокси. +- **`download_and_probe`**: Если `get_token` (или `retry_get_token`) завершилась успешно, эта задача использует `yt-dlp` для скачивания медиа и `ffmpeg` для проверки целостности скачанного файла. +- **`mark_url_as_success`**: Если `download_and_probe` завершилась успешно, эта задача записывает результат в хэш `_result` в Redis. +- **`handle_generic_failure`**: Если любая из основных задач завершается с неисправимой ошибкой, эта задача записывает подробную информацию об ошибке в хэш `_fail` в Redis. +- **`decide_what_to_do_next`**: Задача-развилка, которая запускается после успеха или неудачи. Она решает, продолжать ли цикл. +- **`trigger_self_run`**: Задача, которая фактически запускает следующий экземпляр DAG, создавая непрерывный цикл. diff --git a/airflow/dags/__pycache__/ytdlp_service_deploy.cpython-312.pyc b/airflow/dags/__pycache__/ytdlp_service_deploy.cpython-312.pyc deleted file mode 100644 index 778532a..0000000 Binary files a/airflow/dags/__pycache__/ytdlp_service_deploy.cpython-312.pyc and /dev/null differ diff --git a/airflow/dags/__pycache__/ytdlp_service_test.cpython-312.pyc b/airflow/dags/__pycache__/ytdlp_service_test.cpython-312.pyc deleted file mode 100644 index 248433d..0000000 Binary files a/airflow/dags/__pycache__/ytdlp_service_test.cpython-312.pyc and /dev/null differ diff --git a/airflow/dags/__pycache__/ytdlp_token_dag.cpython-312.pyc b/airflow/dags/__pycache__/ytdlp_token_dag.cpython-312.pyc deleted file mode 100644 index d59e820..0000000 Binary files a/airflow/dags/__pycache__/ytdlp_token_dag.cpython-312.pyc and /dev/null differ diff --git a/airflow/dags/ytdlp_mgmt_queues.py b/airflow/dags/ytdlp_mgmt_queues.py index d54acea..be3b573 100644 --- a/airflow/dags/ytdlp_mgmt_queues.py +++ b/airflow/dags/ytdlp_mgmt_queues.py @@ -634,7 +634,7 @@ with DAG( ), # --- Params for 'list_contents' --- "queue_to_list": Param( - 'video_queue_inbox,video_queue_fail', + 'video_queue_inbox,video_queue_result,video_queue_fail', type="string", title="[list_contents] Queues to List", description="Comma-separated list of exact Redis key names to list.", diff --git a/airflow/dags/ytdlp_ops_worker_per_url.py b/airflow/dags/ytdlp_ops_worker_per_url.py index b9383a3..d931522 100644 --- a/airflow/dags/ytdlp_ops_worker_per_url.py +++ b/airflow/dags/ytdlp_ops_worker_per_url.py @@ -238,9 +238,15 @@ def handle_bannable_error_branch(task_id_to_check: str, **context): logger.error(f"Task {task_id_to_check} failed without error details. Marking as fatal.") return 'handle_fatal_error' + error_message = error_details.get('error_message', '').strip() error_code = error_details.get('error_code', '').strip() policy = params.get('on_bannable_failure', 'retry_with_new_account') + # Check if this is an age confirmation error - should not stop the loop + if "Sign in to confirm your age" in error_message or "confirm your age" in error_message.lower(): + logger.info(f"Age confirmation error detected for '{task_id_to_check}'. This is a content restriction, not a bot detection issue.") + return 'handle_age_restriction_error' + # Fatal Thrift connection errors that should stop all processing. if error_code == 'TRANSPORT_ERROR': logger.error(f"Fatal Thrift connection error from '{task_id_to_check}'. Stopping processing.") @@ -574,8 +580,15 @@ def report_failure_and_continue(**context): try: client = _get_redis_client(params['redis_conn_id']) - client.hset(f"{params['queue_name']}_result", url, json.dumps(result_data)) - logger.info(f"Stored failure result for URL '{url}'.") + result_queue = f"{params['queue_name']}_result" + fail_queue = f"{params['queue_name']}_fail" + + with client.pipeline() as pipe: + pipe.hset(result_queue, url, json.dumps(result_data)) + pipe.hset(fail_queue, url, json.dumps(result_data)) + pipe.execute() + + logger.info(f"Stored failure result for URL '{url}' in '{result_queue}' and '{fail_queue}'.") except Exception as e: logger.error(f"Could not report failure to Redis: {e}", exc_info=True) @@ -610,8 +623,8 @@ def handle_fatal_error(**context): # Report failure to Redis so the URL can be reprocessed later try: result_data = { - 'status': 'failed', - 'end_time': time.time(), + 'status': 'failed', + 'end_time': time.time(), 'url': url, 'dag_run_id': context['dag_run'].run_id, 'error': 'fatal_error', @@ -619,8 +632,15 @@ def handle_fatal_error(**context): 'error_details': error_details } client = _get_redis_client(params['redis_conn_id']) - client.hset(f"{params['queue_name']}_result", url, json.dumps(result_data)) - logger.info(f"Stored fatal error result for URL '{url}' in Redis for later reprocessing.") + result_queue = f"{params['queue_name']}_result" + fail_queue = f"{params['queue_name']}_fail" + + with client.pipeline() as pipe: + pipe.hset(result_queue, url, json.dumps(result_data)) + pipe.hset(fail_queue, url, json.dumps(result_data)) + pipe.execute() + + logger.info(f"Stored fatal error result for URL '{url}' in '{result_queue}' and '{fail_queue}' for later reprocessing.") except Exception as e: logger.error(f"Could not report fatal error to Redis: {e}", exc_info=True) @@ -669,8 +689,14 @@ def handle_retry_failure_branch(task_id_to_check: str, **context): if not error_details: return 'handle_fatal_error' + error_message = error_details.get('error_message', '').strip() error_code = error_details.get('error_code', '').strip() + # Check if this is an age confirmation error - should not stop the loop + if "Sign in to confirm your age" in error_message or "confirm your age" in error_message.lower(): + logger.info(f"Age confirmation error detected on retry from '{task_id_to_check}'. Reporting failure and continuing loop.") + return 'report_failure_and_continue' + if error_code == 'TRANSPORT_ERROR': logger.error(f"Fatal Thrift connection error on retry from '{task_id_to_check}'.") return 'handle_fatal_error' @@ -715,6 +741,61 @@ def coalesce_token_data(get_token_result=None, retry_get_token_result=None): # This should not be reached if trigger_rule='one_success' is working correctly. raise AirflowException("Could not find a successful token result from any attempt.") + +@task(trigger_rule='one_failed') +def handle_age_restriction_error(**context): + """ + Handles age restriction errors specifically. These are content restrictions + that cannot be bypassed by using different accounts, so we report the failure + and continue the processing loop rather than stopping it. + """ + params = context['params'] + ti = context['task_instance'] + url = params.get('url_to_process', 'unknown') + + # Collect error details + error_details = {} + first_token_task_id = 'get_token' + retry_token_task_id = 'retry_get_token' + + first_token_error = ti.xcom_pull(task_ids=first_token_task_id, key='error_details') + retry_token_error = ti.xcom_pull(task_ids=retry_token_task_id, key='error_details') + + # Use the most recent error details + if retry_token_error: + error_details = retry_token_error + elif first_token_error: + error_details = first_token_error + + logger.error(f"Age restriction error for URL '{url}'. This content requires age confirmation and cannot be bypassed.") + + # Report failure to Redis so the URL can be marked as failed + try: + result_data = { + 'status': 'failed', + 'end_time': time.time(), + 'url': url, + 'dag_run_id': context['dag_run'].run_id, + 'error': 'age_restriction', + 'error_message': 'Content requires age confirmation', + 'error_details': error_details + } + client = _get_redis_client(params['redis_conn_id']) + result_queue = f"{params['queue_name']}_result" + fail_queue = f"{params['queue_name']}_fail" + + with client.pipeline() as pipe: + pipe.hset(result_queue, url, json.dumps(result_data)) + pipe.hset(fail_queue, url, json.dumps(result_data)) + pipe.execute() + + logger.info(f"Stored age restriction error for URL '{url}' in '{result_queue}' and '{fail_queue}'.") + except Exception as e: + logger.error(f"Could not report age restriction error to Redis: {e}", exc_info=True) + + # This is NOT a fatal error for the processing loop - we just continue with the next URL + + # ============================================================================= # DAG Definition with TaskGroups # ============================================================================= @@ -755,6 +836,7 @@ with DAG( fatal_error_task = handle_fatal_error() report_failure_task = report_failure_and_continue() continue_loop_task = continue_processing_loop() + age_restriction_task = handle_age_restriction_error() # --- Task Group 1: Initial Attempt --- with TaskGroup("initial_attempt", tooltip="Initial token acquisition attempt") as initial_attempt_group: @@ -770,7 +852,7 @@ with DAG( ) first_token_attempt >> initial_branch_task - initial_branch_task >> [fatal_error_task, ban_and_report_immediately_task] + initial_branch_task >> [fatal_error_task, ban_and_report_immediately_task, age_restriction_task] # --- Task Group 2: Retry Logic --- with TaskGroup("retry_logic", tooltip="Retry logic with account management") as retry_logic_group: @@ -820,7 +902,7 @@ with DAG( direct_retry_account_task >> coalesced_retry_data coalesced_retry_data >> retry_token_task retry_token_task >> retry_branch_task - retry_branch_task >> [fatal_error_task, report_failure_task, ban_after_retry_report_task] + retry_branch_task >> [fatal_error_task, report_failure_task, ban_after_retry_report_task, age_restriction_task] ban_after_retry_report_task >> report_failure_task # --- Task Group 3: Download and Processing --- @@ -849,10 +931,13 @@ with DAG( # --- DAG Dependencies between TaskGroups --- # Initial attempt can lead to retry logic or direct failure - initial_branch_task >> [retry_logic_group, fatal_error_task, ban_and_report_immediately_task] + initial_branch_task >> [retry_logic_group, fatal_error_task, ban_and_report_immediately_task, age_restriction_task] # Retry logic leads to download processing on success or failure reporting on failure retry_branch_task >> [download_processing_group, report_failure_task] # Ban and report immediately leads to failure reporting ban_and_report_immediately_task >> report_failure_task + + # Age restriction error leads to failure reporting and continues the loop + age_restriction_task >> continue_loop_task diff --git a/airflow/generate_envoy_config.py b/airflow/generate_envoy_config.py index e5251b7..ff446c3 100644 --- a/airflow/generate_envoy_config.py +++ b/airflow/generate_envoy_config.py @@ -113,6 +113,8 @@ def generate_configs(): # The templates are in the 'configs' directory. env = Environment(loader=FileSystemLoader(configs_dir), trim_blocks=True, lstrip_blocks=True) + # Make the helper function available to Jinja2 templates + env.globals['_get_port_from_proxy_url'] = _get_port_from_proxy_url # Get service role from environment to determine what to generate service_role = os.getenv('SERVICE_ROLE', 'all-in-one') @@ -165,11 +167,14 @@ def generate_configs(): for i, proxy in enumerate(camoufox_proxies): proxy_port = _get_port_from_proxy_url(proxy['url']) if proxy_port: + # Use the correct container name pattern that matches the docker-compose template + # The container name in the template is: ytdlp-ops-camoufox-{{ proxy_port }}-{{ loop.index }}-1 + container_name = f"ytdlp-ops-camoufox-{proxy_port}-{i+1}-1" container_base_port = camoufox_port + i * worker_count endpoints = [] for j in range(worker_count): port = container_base_port + j - endpoints.append(f"ws://{camoufox_backend_prefix}{i+1}:{port}/mypath") + endpoints.append(f"ws://{container_name}:{port}/mypath") endpoints_map[proxy_port] = { "ws_endpoints": endpoints diff --git a/airflow/requirements.txt b/airflow/requirements.txt deleted file mode 100644 index 0d0b044..0000000 --- a/airflow/requirements.txt +++ /dev/null @@ -1,9 +0,0 @@ -thrift>=0.16.0,<=0.20.0 -backoff>=2.2.1 -python-dotenv==1.0.1 -psutil>=5.9.0 -docker>=6.0.0 -apache-airflow-providers-docker -redis -ffprobe3 -ffmpeg-python \ No newline at end of file diff --git a/airflow/roles/airflow-master/tasks/main.yml b/airflow/roles/airflow-master/tasks/main.yml index 12d63b0..479c8d7 100644 --- a/airflow/roles/airflow-master/tasks/main.yml +++ b/airflow/roles/airflow-master/tasks/main.yml @@ -9,85 +9,222 @@ path: "{{ airflow_master_dir }}" state: directory owner: "{{ ssh_user }}" - group: ytdl + group: "{{ deploy_group }}" mode: '0755' become: yes when: not master_dir_stat.stat.exists +- name: Ensure Airflow master configs directory exists + file: + path: "{{ airflow_master_dir }}/configs" + state: directory + owner: "{{ ssh_user }}" + group: "{{ deploy_group }}" + mode: '0755' + become: yes + +- name: Ensure Airflow master config directory exists + file: + path: "{{ airflow_master_dir }}/config" + state: directory + owner: "{{ ssh_user }}" + group: "{{ deploy_group }}" + mode: '0755' + become: yes + +- name: Ensure Airflow operational directories exist with correct permissions + file: + path: "{{ airflow_master_dir }}/{{ item }}" + state: directory + owner: "{{ airflow_uid }}" + group: "{{ deploy_group }}" + mode: '0775' + become: yes + loop: + - "dags" + - "logs" + - "plugins" + - "downloadfiles" + - "addfiles" + - "inputfiles" + - name: Check if source directories exist stat: - path: "{{ playbook_dir }}/../{{ item }}" + path: "../{{ item }}" register: source_dirs loop: - - "airflow/inputfiles" - "airflow/plugins" - "airflow/addfiles" - "airflow/bgutil-ytdlp-pot-provider" +- name: "Log: Syncing Airflow core files" + debug: + msg: "Syncing DAGs, configs, and Python source code to the master node." + - name: Sync Airflow master files synchronize: - src: "{{ playbook_dir }}/../{{ item }}" + src: "../{{ item }}" dest: "{{ airflow_master_dir }}/" archive: yes recursive: yes delete: yes + rsync_path: "sudo rsync" rsync_opts: "{{ rsync_default_opts }}" loop: - "airflow/Dockerfile" - - "airflow/docker-compose-master.yaml" - - "airflow/dags/" - - "airflow/config/" + - "airflow/Dockerfile.caddy" + - "airflow/.dockerignore" + - "airflow/dags" + - "airflow/inputfiles" - "setup.py" - - "yt_ops_services/" - - "thrift_model/" + - "yt_ops_services" + - "thrift_model" - "VERSION" - - "airflow/init-airflow.sh" - - "airflow/nginx.conf" + - "airflow/update-yt-dlp.sh" - "get_info_json_client.py" - "proxy_manager_client.py" + - "utils" + +- name: Copy custom Python config files to master + copy: + src: "../airflow/config/{{ item }}" + dest: "{{ airflow_master_dir }}/config/{{ item }}" + owner: "{{ ssh_user }}" + group: "{{ deploy_group }}" + mode: '0644' + become: yes + loop: + - "custom_task_hooks.py" + - "airflow_local_settings.py" + +- name: Ensure any existing airflow.cfg directory is removed + file: + path: "{{ airflow_master_dir }}/config/airflow.cfg" + state: absent + become: yes + ignore_errors: yes + +- name: Copy airflow.cfg to master + copy: + src: "../airflow/airflow.cfg" + dest: "{{ airflow_master_dir }}/config/airflow.cfg" + owner: "{{ ssh_user }}" + group: "{{ deploy_group }}" + mode: '0644' + become: yes + +- name: Sync Airflow master config files + synchronize: + src: "../airflow/configs/{{ item }}" + dest: "{{ airflow_master_dir }}/configs/" + archive: yes + recursive: yes + rsync_path: "sudo rsync" + rsync_opts: "{{ rsync_default_opts }}" + loop: + - "nginx.conf" + - "Caddyfile" - name: Sync optional directories if they exist synchronize: - src: "{{ playbook_dir }}/../{{ item }}/" - dest: "{{ airflow_master_dir }}/{{ item | basename }}/" + src: "../{{ item.item }}/" + dest: "{{ airflow_master_dir }}/{{ item.item | basename }}/" archive: yes recursive: yes delete: yes + rsync_path: "sudo rsync" rsync_opts: "{{ rsync_default_opts }}" - loop: - - "airflow/inputfiles" - - "airflow/plugins" - - "airflow/addfiles" - - "airflow/bgutil-ytdlp-pot-provider" - when: source_dirs.results | selectattr('item', 'equalto', item) | map(attribute='stat.exists') | first + loop: "{{ source_dirs.results }}" + when: item.stat.exists - name: Sync pangramia thrift files synchronize: - src: "{{ playbook_dir }}/../thrift_model/gen_py/pangramia/" + src: "../thrift_model/gen_py/pangramia/" dest: "{{ airflow_master_dir }}/pangramia/" archive: yes recursive: yes delete: yes + rsync_path: "sudo rsync" rsync_opts: "{{ rsync_default_opts }}" -- name: Create .env file for Airflow master service +- name: Template docker-compose file for master template: - src: "../../templates/.env.master.j2" - dest: "{{ airflow_master_dir }}/.env" + src: "{{ playbook_dir }}/../airflow/configs/docker-compose-master.yaml.j2" + dest: "{{ airflow_master_dir }}/configs/docker-compose-master.yaml" mode: "{{ file_permissions }}" owner: "{{ ssh_user }}" - group: ytdl - vars: - service_role: "master" + group: "{{ deploy_group }}" + become: yes + +- name: Template Redis connection file + template: + src: "../airflow/config/redis_default_conn.json.j2" + dest: "{{ airflow_master_dir }}/config/redis_default_conn.json" + mode: "{{ file_permissions }}" + owner: "{{ ssh_user }}" + group: "{{ deploy_group }}" + become: yes + +- name: Template Minio connection file for master + template: + src: "../airflow/config/minio_default_conn.json.j2" + dest: "{{ airflow_master_dir }}/config/minio_default_conn.json" + mode: "{{ file_permissions }}" + owner: "{{ ssh_user }}" + group: "{{ deploy_group }}" + become: yes + +- name: Ensure config directory is group-writable for Airflow initialization + file: + path: "{{ airflow_master_dir }}/config" + state: directory + mode: '0775' + owner: "{{ ssh_user }}" + group: "{{ deploy_group }}" + become: yes + +- name: Ensure airflow.cfg is group-writable for Airflow initialization + file: + path: "{{ airflow_master_dir }}/config/airflow.cfg" + state: file + mode: '0664' + owner: "{{ ssh_user }}" + group: "{{ deploy_group }}" + become: yes - name: Create symlink for docker-compose.yaml file: - src: "{{ airflow_master_dir }}/docker-compose-master.yaml" + src: "{{ airflow_master_dir }}/configs/docker-compose-master.yaml" dest: "{{ airflow_master_dir }}/docker-compose.yaml" state: link owner: "{{ ssh_user }}" - group: ytdl + group: "{{ deploy_group }}" force: yes + follow: no + +- name: Ensure correct permissions for build context + file: + path: "{{ airflow_master_dir }}" + state: directory + owner: "{{ ssh_user }}" + group: "{{ deploy_group }}" + recurse: yes + become: yes + +- name: Ensure postgres-data directory exists on master and has correct permissions + file: + path: "{{ airflow_master_dir }}/postgres-data" + state: directory + owner: "999" # UID for the 'postgres' user in the official postgres image + group: "999" # GID for the 'postgres' group in the official postgres image + mode: '0700' + become: yes + +- name: Set group-writable and setgid permissions on master logs directory contents + shell: | + find {{ airflow_master_dir }}/logs -type d -exec chmod g+rws {} + + find {{ airflow_master_dir }}/logs -type f -exec chmod g+rw {} + + become: yes - name: Verify Dockerfile exists in build directory stat: @@ -99,26 +236,96 @@ msg: "Dockerfile not found in {{ airflow_master_dir }}. Cannot build image." when: not dockerfile_stat.stat.exists +- name: "Log: Building Airflow Docker image" + debug: + msg: "Building the main Airflow Docker image ({{ airflow_image_name }}) locally on the master node. This may take a few minutes." + - name: Build Airflow master image community.docker.docker_image: name: "{{ airflow_image_name }}" build: path: "{{ airflow_master_dir }}" - dockerfile: "Dockerfile" + dockerfile: "Dockerfile" # Explicitly specify the Dockerfile name source: build force_source: true + when: not fast_deploy | default(false) -- name: Run Airflow init script - shell: - cmd: "chmod +x init-airflow.sh && ./init-airflow.sh" - chdir: "{{ airflow_master_dir }}" +- name: "Log: Preparing assets for Caddy image" + debug: + msg: "Extracting static assets from the Airflow image to build the Caddy reverse proxy." + when: not fast_deploy | default(false) + +- name: Prepare Caddy asset extraction directory + file: + path: "{{ airflow_master_dir }}/caddy_build_assets" + state: "{{ item }}" + owner: "{{ ssh_user }}" + group: "{{ deploy_group }}" + mode: '0755' + loop: + - absent + - directory become: yes - become_user: "{{ ssh_user }}" + when: not fast_deploy | default(false) + +- name: Ensure subdirectories exist with correct permissions + file: + path: "{{ airflow_master_dir }}/caddy_build_assets/{{ item }}" + state: directory + owner: "{{ ssh_user }}" + group: "{{ deploy_group }}" + mode: '0755' + loop: + - "appbuilder" + - "dist" + become: yes + when: not fast_deploy | default(false) + +- name: Extract static assets from Airflow image for Caddy build + shell: | + set -e + CONTAINER_ID=$(docker create {{ airflow_image_name }}) + # Dynamically find paths inside the container + APPBUILDER_PATH=$(docker run --rm --entrypoint "" {{ airflow_image_name }} python -c 'import os, flask_appbuilder; print(os.path.join(os.path.dirname(flask_appbuilder.__file__), "static", "appbuilder"))') + AIRFLOW_DIST_PATH=$(docker run --rm --entrypoint "" {{ airflow_image_name }} python -c 'import os, airflow; print(os.path.join(os.path.dirname(airflow.__file__), "www/static/dist"))') + # Copy assets from container to host + docker cp "${CONTAINER_ID}:${APPBUILDER_PATH}/." "./caddy_build_assets/appbuilder" + docker cp "${CONTAINER_ID}:${AIRFLOW_DIST_PATH}/." "./caddy_build_assets/dist" + docker rm -f $CONTAINER_ID + # Pre-compress assets + find ./caddy_build_assets/appbuilder -type f -print0 | xargs -0 gzip -k -9 + find ./caddy_build_assets/dist -type f -print0 | xargs -0 gzip -k -9 + args: + chdir: "{{ airflow_master_dir }}" + executable: /bin/bash + become: yes + register: asset_extraction + changed_when: asset_extraction.rc == 0 + when: not fast_deploy | default(false) + +- name: "Log: Building Caddy reverse proxy image" + debug: + msg: "Building the Caddy image (pangramia/ytdlp-ops-caddy:latest) to serve static assets." + +- name: Build Caddy image + community.docker.docker_image: + name: "pangramia/ytdlp-ops-caddy:latest" + build: + path: "{{ airflow_master_dir }}" + dockerfile: "Dockerfile.caddy" + source: build + force_source: true + when: not fast_deploy | default(false) + +- name: "Log: Starting Airflow services" + debug: + msg: "Starting Airflow core services (webserver, scheduler, etc.) on the master node using docker-compose." - name: Start Airflow master service community.docker.docker_compose_v2: project_src: "{{ airflow_master_dir }}" files: - - "docker-compose-master.yaml" + - "configs/docker-compose-master.yaml" state: present remove_orphans: true + pull: "{{ 'never' if fast_deploy | default(false) else 'missing' }}" diff --git a/airflow/roles/airflow-worker/tasks/main.yml b/airflow/roles/airflow-worker/tasks/main.yml deleted file mode 100644 index 4c63e07..0000000 --- a/airflow/roles/airflow-worker/tasks/main.yml +++ /dev/null @@ -1,103 +0,0 @@ ---- -- name: Check if Airflow worker deployment directory exists - stat: - path: "{{ airflow_worker_dir }}" - register: worker_dir_stat - -- name: Ensure Airflow worker deployment directory exists - file: - path: "{{ airflow_worker_dir }}" - state: directory - owner: "{{ ssh_user }}" - group: ytdl - mode: '0755' - become: yes - when: not worker_dir_stat.stat.exists - -- name: Sync Airflow worker files - synchronize: - src: "{{ playbook_dir }}/../{{ item }}" - dest: "{{ airflow_worker_dir }}/" - archive: yes - recursive: yes - delete: yes - rsync_opts: "{{ rsync_default_opts }}" - loop: - - "airflow/Dockerfile" - - "airflow/docker-compose-dl.yaml" - - "airflow/dags/" - - "airflow/config/" - - "setup.py" - - "yt_ops_services/" - - "thrift_model/" - - "VERSION" - - "airflow/init-airflow.sh" - - "get_info_json_client.py" - - "proxy_manager_client.py" - - "token_generator/" - - "utils/" - -- name: Check if inputfiles directory exists - stat: - path: "{{ playbook_dir }}/../airflow/inputfiles" - register: inputfiles_stat - -- name: Sync inputfiles directory if it exists - synchronize: - src: "{{ playbook_dir }}/../airflow/inputfiles/" - dest: "{{ airflow_worker_dir }}/inputfiles/" - archive: yes - recursive: yes - delete: yes - rsync_opts: "{{ rsync_default_opts }}" - when: inputfiles_stat.stat.exists - -- name: Sync pangramia thrift files - synchronize: - src: "{{ playbook_dir }}/../thrift_model/gen_py/pangramia/" - dest: "{{ airflow_worker_dir }}/pangramia/" - archive: yes - recursive: yes - delete: yes - rsync_opts: "{{ rsync_default_opts }}" - -- name: Create .env file for Airflow worker service - template: - src: "../../templates/.env.worker.j2" - dest: "{{ airflow_worker_dir }}/.env" - mode: "{{ file_permissions }}" - owner: "{{ ssh_user }}" - group: ytdl - vars: - service_role: "worker" - -- name: Create symlink for docker-compose.yaml - file: - src: "{{ airflow_worker_dir }}/docker-compose-dl.yaml" - dest: "{{ airflow_worker_dir }}/docker-compose.yaml" - state: link - owner: "{{ ssh_user }}" - group: ytdl - -- name: Build Airflow worker image - community.docker.docker_image: - name: "{{ airflow_image_name }}" - build: - path: "{{ airflow_worker_dir }}" - source: build - force_source: true - -- name: Run Airflow init script - shell: - cmd: "chmod +x init-airflow.sh && ./init-airflow.sh" - chdir: "{{ airflow_worker_dir }}" - become: yes - become_user: "{{ ssh_user }}" - -- name: Start Airflow worker service - community.docker.docker_compose_v2: - project_src: "{{ airflow_worker_dir }}" - files: - - "docker-compose-dl.yaml" - state: present - remove_orphans: true diff --git a/airflow/scripts/minio-init.sh b/airflow/scripts/minio-init.sh deleted file mode 100644 index 3d62c30..0000000 --- a/airflow/scripts/minio-init.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/sh -set -e - -# Wait for MinIO to be ready -until (mc alias set local http://minio:9000 admin 0153093693-0009) do - echo 'Waiting for MinIO...' - sleep 1 -done - -# Create bucket if it doesn't exist -if ! mc ls local/airflow-logs >/dev/null 2>&1; then - mc mb local/airflow-logs - mc anonymous set download local/airflow-logs - echo 'MinIO bucket initialized' -else - echo 'MinIO bucket already exists' -fi diff --git a/airflow/ytdlp-ops-auth/__pycache__/thrift_exceptions_patch.cpython-311.pyc b/airflow/ytdlp-ops-auth/__pycache__/thrift_exceptions_patch.cpython-311.pyc deleted file mode 100644 index 00632a2..0000000 Binary files a/airflow/ytdlp-ops-auth/__pycache__/thrift_exceptions_patch.cpython-311.pyc and /dev/null differ diff --git a/airflow/ytdlp-ops-auth/build/lib/pangramia/__init__.py b/airflow/ytdlp-ops-auth/build/lib/pangramia/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/airflow/ytdlp-ops-auth/build/lib/pangramia/base_service/BaseService.py b/airflow/ytdlp-ops-auth/build/lib/pangramia/base_service/BaseService.py deleted file mode 100644 index b6cf1f4..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/pangramia/base_service/BaseService.py +++ /dev/null @@ -1,564 +0,0 @@ -# -# Autogenerated by Thrift Compiler (0.20.0) -# -# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING -# -# options string: py -# - -from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException -from thrift.protocol.TProtocol import TProtocolException -from thrift.TRecursive import fix_spec - -import sys -import logging -from .ttypes import * -from thrift.Thrift import TProcessor -from thrift.transport import TTransport -all_structs = [] - - -class Iface(object): - def ping(self): - pass - - def reportError(self, message, details): - """ - Parameters: - - message - - details - - """ - pass - - def shutdown(self): - pass - - -class Client(Iface): - def __init__(self, iprot, oprot=None): - self._iprot = self._oprot = iprot - if oprot is not None: - self._oprot = oprot - self._seqid = 0 - - def ping(self): - self.send_ping() - return self.recv_ping() - - def send_ping(self): - self._oprot.writeMessageBegin('ping', TMessageType.CALL, self._seqid) - args = ping_args() - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_ping(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = ping_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "ping failed: unknown result") - - def reportError(self, message, details): - """ - Parameters: - - message - - details - - """ - self.send_reportError(message, details) - return self.recv_reportError() - - def send_reportError(self, message, details): - self._oprot.writeMessageBegin('reportError', TMessageType.CALL, self._seqid) - args = reportError_args() - args.message = message - args.details = details - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_reportError(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = reportError_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "reportError failed: unknown result") - - def shutdown(self): - self.send_shutdown() - - def send_shutdown(self): - self._oprot.writeMessageBegin('shutdown', TMessageType.ONEWAY, self._seqid) - args = shutdown_args() - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - -class Processor(Iface, TProcessor): - def __init__(self, handler): - self._handler = handler - self._processMap = {} - self._processMap["ping"] = Processor.process_ping - self._processMap["reportError"] = Processor.process_reportError - self._processMap["shutdown"] = Processor.process_shutdown - self._on_message_begin = None - - def on_message_begin(self, func): - self._on_message_begin = func - - def process(self, iprot, oprot): - (name, type, seqid) = iprot.readMessageBegin() - if self._on_message_begin: - self._on_message_begin(name, type, seqid) - if name not in self._processMap: - iprot.skip(TType.STRUCT) - iprot.readMessageEnd() - x = TApplicationException(TApplicationException.UNKNOWN_METHOD, 'Unknown function %s' % (name)) - oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid) - x.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - return - else: - self._processMap[name](self, seqid, iprot, oprot) - return True - - def process_ping(self, seqid, iprot, oprot): - args = ping_args() - args.read(iprot) - iprot.readMessageEnd() - result = ping_result() - try: - result.success = self._handler.ping() - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("ping", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_reportError(self, seqid, iprot, oprot): - args = reportError_args() - args.read(iprot) - iprot.readMessageEnd() - result = reportError_result() - try: - result.success = self._handler.reportError(args.message, args.details) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("reportError", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_shutdown(self, seqid, iprot, oprot): - args = shutdown_args() - args.read(iprot) - iprot.readMessageEnd() - try: - self._handler.shutdown() - except TTransport.TTransportException: - raise - except Exception: - logging.exception('Exception in oneway handler') - -# HELPER FUNCTIONS AND STRUCTURES - - -class ping_args(object): - - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('ping_args') - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(ping_args) -ping_args.thrift_spec = ( -) - - -class ping_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.BOOL: - self.success = iprot.readBool() - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('ping_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.BOOL, 0) - oprot.writeBool(self.success) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(ping_result) -ping_result.thrift_spec = ( - (0, TType.BOOL, 'success', None, None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class reportError_args(object): - """ - Attributes: - - message - - details - - """ - - - def __init__(self, message=None, details=None,): - self.message = message - self.details = details - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.message = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.MAP: - self.details = {} - (_ktype1, _vtype2, _size0) = iprot.readMapBegin() - for _i4 in range(_size0): - _key5 = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - _val6 = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - self.details[_key5] = _val6 - iprot.readMapEnd() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('reportError_args') - if self.message is not None: - oprot.writeFieldBegin('message', TType.STRING, 1) - oprot.writeString(self.message.encode('utf-8') if sys.version_info[0] == 2 else self.message) - oprot.writeFieldEnd() - if self.details is not None: - oprot.writeFieldBegin('details', TType.MAP, 2) - oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.details)) - for kiter7, viter8 in self.details.items(): - oprot.writeString(kiter7.encode('utf-8') if sys.version_info[0] == 2 else kiter7) - oprot.writeString(viter8.encode('utf-8') if sys.version_info[0] == 2 else viter8) - oprot.writeMapEnd() - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(reportError_args) -reportError_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'message', 'UTF8', None, ), # 1 - (2, TType.MAP, 'details', (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), None, ), # 2 -) - - -class reportError_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.BOOL: - self.success = iprot.readBool() - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('reportError_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.BOOL, 0) - oprot.writeBool(self.success) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(reportError_result) -reportError_result.thrift_spec = ( - (0, TType.BOOL, 'success', None, None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class shutdown_args(object): - - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('shutdown_args') - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(shutdown_args) -shutdown_args.thrift_spec = ( -) -fix_spec(all_structs) -del all_structs diff --git a/airflow/ytdlp-ops-auth/build/lib/pangramia/base_service/__init__.py b/airflow/ytdlp-ops-auth/build/lib/pangramia/base_service/__init__.py deleted file mode 100644 index f8be3f5..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/pangramia/base_service/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__all__ = ['ttypes', 'constants', 'BaseService'] diff --git a/airflow/ytdlp-ops-auth/build/lib/pangramia/base_service/constants.py b/airflow/ytdlp-ops-auth/build/lib/pangramia/base_service/constants.py deleted file mode 100644 index 09a78b3..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/pangramia/base_service/constants.py +++ /dev/null @@ -1,14 +0,0 @@ -# -# Autogenerated by Thrift Compiler (0.20.0) -# -# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING -# -# options string: py -# - -from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException -from thrift.protocol.TProtocol import TProtocolException -from thrift.TRecursive import fix_spec - -import sys -from .ttypes import * diff --git a/airflow/ytdlp-ops-auth/build/lib/pangramia/base_service/ttypes.py b/airflow/ytdlp-ops-auth/build/lib/pangramia/base_service/ttypes.py deleted file mode 100644 index 3bfb47f..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/pangramia/base_service/ttypes.py +++ /dev/null @@ -1,20 +0,0 @@ -# -# Autogenerated by Thrift Compiler (0.20.0) -# -# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING -# -# options string: py -# - -from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException -from thrift.protocol.TProtocol import TProtocolException -from thrift.TRecursive import fix_spec - -import sys -import pangramia.yt.common.ttypes -import pangramia.yt.exceptions.ttypes - -from thrift.transport import TTransport -all_structs = [] -fix_spec(all_structs) -del all_structs diff --git a/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/__init__.py b/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/admin_ops/YTAccountsOpService.py b/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/admin_ops/YTAccountsOpService.py deleted file mode 100644 index 609fd61..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/admin_ops/YTAccountsOpService.py +++ /dev/null @@ -1,3491 +0,0 @@ -# -# Autogenerated by Thrift Compiler (0.20.0) -# -# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING -# -# options string: py -# - -from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException -from thrift.protocol.TProtocol import TProtocolException -from thrift.TRecursive import fix_spec - -import sys -import pangramia.base_service.BaseService -import logging -from .ttypes import * -from thrift.Thrift import TProcessor -from thrift.transport import TTransport -all_structs = [] - - -class Iface(pangramia.base_service.BaseService.Iface): - def addAccountPair(self, accountId, proxyId, machineId, proxyData, accountData): - """ - Parameters: - - accountId - - proxyId - - machineId - - proxyData - - accountData - - """ - pass - - def getPair(self, machineId): - """ - Parameters: - - machineId - - """ - pass - - def pair(self, accountId, proxyId, machineId): - """ - Parameters: - - accountId - - proxyId - - machineId - - """ - pass - - def unpair(self, accountId, proxyId, machineId): - """ - Parameters: - - accountId - - proxyId - - machineId - - """ - pass - - def listAccountPairs(self, filter): - """ - Parameters: - - filter - - """ - pass - - def addAccount(self, accountId, accountData): - """ - Parameters: - - accountId - - accountData - - """ - pass - - def suspendAccount(self, accountId): - """ - Parameters: - - accountId - - """ - pass - - def resumeAccount(self, accountId): - """ - Parameters: - - accountId - - """ - pass - - def removeAccount(self, accountId): - """ - Parameters: - - accountId - - """ - pass - - def listActiveAccounts(self): - pass - - def addProxy(self, proxyId, proxyData): - """ - Parameters: - - proxyId - - proxyData - - """ - pass - - def suspendProxy(self, proxyId): - """ - Parameters: - - proxyId - - """ - pass - - def resumeProxy(self, proxyId): - """ - Parameters: - - proxyId - - """ - pass - - def removeProxy(self, proxyId): - """ - Parameters: - - proxyId - - """ - pass - - def listActiveProxies(self): - pass - - -class Client(pangramia.base_service.BaseService.Client, Iface): - def __init__(self, iprot, oprot=None): - pangramia.base_service.BaseService.Client.__init__(self, iprot, oprot) - - def addAccountPair(self, accountId, proxyId, machineId, proxyData, accountData): - """ - Parameters: - - accountId - - proxyId - - machineId - - proxyData - - accountData - - """ - self.send_addAccountPair(accountId, proxyId, machineId, proxyData, accountData) - return self.recv_addAccountPair() - - def send_addAccountPair(self, accountId, proxyId, machineId, proxyData, accountData): - self._oprot.writeMessageBegin('addAccountPair', TMessageType.CALL, self._seqid) - args = addAccountPair_args() - args.accountId = accountId - args.proxyId = proxyId - args.machineId = machineId - args.proxyData = proxyData - args.accountData = accountData - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_addAccountPair(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = addAccountPair_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "addAccountPair failed: unknown result") - - def getPair(self, machineId): - """ - Parameters: - - machineId - - """ - self.send_getPair(machineId) - return self.recv_getPair() - - def send_getPair(self, machineId): - self._oprot.writeMessageBegin('getPair', TMessageType.CALL, self._seqid) - args = getPair_args() - args.machineId = machineId - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_getPair(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = getPair_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "getPair failed: unknown result") - - def pair(self, accountId, proxyId, machineId): - """ - Parameters: - - accountId - - proxyId - - machineId - - """ - self.send_pair(accountId, proxyId, machineId) - return self.recv_pair() - - def send_pair(self, accountId, proxyId, machineId): - self._oprot.writeMessageBegin('pair', TMessageType.CALL, self._seqid) - args = pair_args() - args.accountId = accountId - args.proxyId = proxyId - args.machineId = machineId - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_pair(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = pair_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "pair failed: unknown result") - - def unpair(self, accountId, proxyId, machineId): - """ - Parameters: - - accountId - - proxyId - - machineId - - """ - self.send_unpair(accountId, proxyId, machineId) - return self.recv_unpair() - - def send_unpair(self, accountId, proxyId, machineId): - self._oprot.writeMessageBegin('unpair', TMessageType.CALL, self._seqid) - args = unpair_args() - args.accountId = accountId - args.proxyId = proxyId - args.machineId = machineId - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_unpair(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = unpair_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "unpair failed: unknown result") - - def listAccountPairs(self, filter): - """ - Parameters: - - filter - - """ - self.send_listAccountPairs(filter) - return self.recv_listAccountPairs() - - def send_listAccountPairs(self, filter): - self._oprot.writeMessageBegin('listAccountPairs', TMessageType.CALL, self._seqid) - args = listAccountPairs_args() - args.filter = filter - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_listAccountPairs(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = listAccountPairs_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "listAccountPairs failed: unknown result") - - def addAccount(self, accountId, accountData): - """ - Parameters: - - accountId - - accountData - - """ - self.send_addAccount(accountId, accountData) - return self.recv_addAccount() - - def send_addAccount(self, accountId, accountData): - self._oprot.writeMessageBegin('addAccount', TMessageType.CALL, self._seqid) - args = addAccount_args() - args.accountId = accountId - args.accountData = accountData - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_addAccount(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = addAccount_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "addAccount failed: unknown result") - - def suspendAccount(self, accountId): - """ - Parameters: - - accountId - - """ - self.send_suspendAccount(accountId) - return self.recv_suspendAccount() - - def send_suspendAccount(self, accountId): - self._oprot.writeMessageBegin('suspendAccount', TMessageType.CALL, self._seqid) - args = suspendAccount_args() - args.accountId = accountId - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_suspendAccount(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = suspendAccount_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "suspendAccount failed: unknown result") - - def resumeAccount(self, accountId): - """ - Parameters: - - accountId - - """ - self.send_resumeAccount(accountId) - return self.recv_resumeAccount() - - def send_resumeAccount(self, accountId): - self._oprot.writeMessageBegin('resumeAccount', TMessageType.CALL, self._seqid) - args = resumeAccount_args() - args.accountId = accountId - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_resumeAccount(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = resumeAccount_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "resumeAccount failed: unknown result") - - def removeAccount(self, accountId): - """ - Parameters: - - accountId - - """ - self.send_removeAccount(accountId) - return self.recv_removeAccount() - - def send_removeAccount(self, accountId): - self._oprot.writeMessageBegin('removeAccount', TMessageType.CALL, self._seqid) - args = removeAccount_args() - args.accountId = accountId - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_removeAccount(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = removeAccount_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "removeAccount failed: unknown result") - - def listActiveAccounts(self): - self.send_listActiveAccounts() - return self.recv_listActiveAccounts() - - def send_listActiveAccounts(self): - self._oprot.writeMessageBegin('listActiveAccounts', TMessageType.CALL, self._seqid) - args = listActiveAccounts_args() - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_listActiveAccounts(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = listActiveAccounts_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "listActiveAccounts failed: unknown result") - - def addProxy(self, proxyId, proxyData): - """ - Parameters: - - proxyId - - proxyData - - """ - self.send_addProxy(proxyId, proxyData) - return self.recv_addProxy() - - def send_addProxy(self, proxyId, proxyData): - self._oprot.writeMessageBegin('addProxy', TMessageType.CALL, self._seqid) - args = addProxy_args() - args.proxyId = proxyId - args.proxyData = proxyData - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_addProxy(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = addProxy_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "addProxy failed: unknown result") - - def suspendProxy(self, proxyId): - """ - Parameters: - - proxyId - - """ - self.send_suspendProxy(proxyId) - return self.recv_suspendProxy() - - def send_suspendProxy(self, proxyId): - self._oprot.writeMessageBegin('suspendProxy', TMessageType.CALL, self._seqid) - args = suspendProxy_args() - args.proxyId = proxyId - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_suspendProxy(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = suspendProxy_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "suspendProxy failed: unknown result") - - def resumeProxy(self, proxyId): - """ - Parameters: - - proxyId - - """ - self.send_resumeProxy(proxyId) - return self.recv_resumeProxy() - - def send_resumeProxy(self, proxyId): - self._oprot.writeMessageBegin('resumeProxy', TMessageType.CALL, self._seqid) - args = resumeProxy_args() - args.proxyId = proxyId - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_resumeProxy(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = resumeProxy_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "resumeProxy failed: unknown result") - - def removeProxy(self, proxyId): - """ - Parameters: - - proxyId - - """ - self.send_removeProxy(proxyId) - return self.recv_removeProxy() - - def send_removeProxy(self, proxyId): - self._oprot.writeMessageBegin('removeProxy', TMessageType.CALL, self._seqid) - args = removeProxy_args() - args.proxyId = proxyId - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_removeProxy(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = removeProxy_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "removeProxy failed: unknown result") - - def listActiveProxies(self): - self.send_listActiveProxies() - return self.recv_listActiveProxies() - - def send_listActiveProxies(self): - self._oprot.writeMessageBegin('listActiveProxies', TMessageType.CALL, self._seqid) - args = listActiveProxies_args() - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_listActiveProxies(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = listActiveProxies_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "listActiveProxies failed: unknown result") - - -class Processor(pangramia.base_service.BaseService.Processor, Iface, TProcessor): - def __init__(self, handler): - pangramia.base_service.BaseService.Processor.__init__(self, handler) - self._processMap["addAccountPair"] = Processor.process_addAccountPair - self._processMap["getPair"] = Processor.process_getPair - self._processMap["pair"] = Processor.process_pair - self._processMap["unpair"] = Processor.process_unpair - self._processMap["listAccountPairs"] = Processor.process_listAccountPairs - self._processMap["addAccount"] = Processor.process_addAccount - self._processMap["suspendAccount"] = Processor.process_suspendAccount - self._processMap["resumeAccount"] = Processor.process_resumeAccount - self._processMap["removeAccount"] = Processor.process_removeAccount - self._processMap["listActiveAccounts"] = Processor.process_listActiveAccounts - self._processMap["addProxy"] = Processor.process_addProxy - self._processMap["suspendProxy"] = Processor.process_suspendProxy - self._processMap["resumeProxy"] = Processor.process_resumeProxy - self._processMap["removeProxy"] = Processor.process_removeProxy - self._processMap["listActiveProxies"] = Processor.process_listActiveProxies - self._on_message_begin = None - - def on_message_begin(self, func): - self._on_message_begin = func - - def process(self, iprot, oprot): - (name, type, seqid) = iprot.readMessageBegin() - if self._on_message_begin: - self._on_message_begin(name, type, seqid) - if name not in self._processMap: - iprot.skip(TType.STRUCT) - iprot.readMessageEnd() - x = TApplicationException(TApplicationException.UNKNOWN_METHOD, 'Unknown function %s' % (name)) - oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid) - x.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - return - else: - self._processMap[name](self, seqid, iprot, oprot) - return True - - def process_addAccountPair(self, seqid, iprot, oprot): - args = addAccountPair_args() - args.read(iprot) - iprot.readMessageEnd() - result = addAccountPair_result() - try: - result.success = self._handler.addAccountPair(args.accountId, args.proxyId, args.machineId, args.proxyData, args.accountData) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("addAccountPair", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_getPair(self, seqid, iprot, oprot): - args = getPair_args() - args.read(iprot) - iprot.readMessageEnd() - result = getPair_result() - try: - result.success = self._handler.getPair(args.machineId) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("getPair", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_pair(self, seqid, iprot, oprot): - args = pair_args() - args.read(iprot) - iprot.readMessageEnd() - result = pair_result() - try: - result.success = self._handler.pair(args.accountId, args.proxyId, args.machineId) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("pair", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_unpair(self, seqid, iprot, oprot): - args = unpair_args() - args.read(iprot) - iprot.readMessageEnd() - result = unpair_result() - try: - result.success = self._handler.unpair(args.accountId, args.proxyId, args.machineId) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("unpair", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_listAccountPairs(self, seqid, iprot, oprot): - args = listAccountPairs_args() - args.read(iprot) - iprot.readMessageEnd() - result = listAccountPairs_result() - try: - result.success = self._handler.listAccountPairs(args.filter) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("listAccountPairs", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_addAccount(self, seqid, iprot, oprot): - args = addAccount_args() - args.read(iprot) - iprot.readMessageEnd() - result = addAccount_result() - try: - result.success = self._handler.addAccount(args.accountId, args.accountData) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("addAccount", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_suspendAccount(self, seqid, iprot, oprot): - args = suspendAccount_args() - args.read(iprot) - iprot.readMessageEnd() - result = suspendAccount_result() - try: - result.success = self._handler.suspendAccount(args.accountId) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("suspendAccount", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_resumeAccount(self, seqid, iprot, oprot): - args = resumeAccount_args() - args.read(iprot) - iprot.readMessageEnd() - result = resumeAccount_result() - try: - result.success = self._handler.resumeAccount(args.accountId) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("resumeAccount", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_removeAccount(self, seqid, iprot, oprot): - args = removeAccount_args() - args.read(iprot) - iprot.readMessageEnd() - result = removeAccount_result() - try: - result.success = self._handler.removeAccount(args.accountId) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("removeAccount", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_listActiveAccounts(self, seqid, iprot, oprot): - args = listActiveAccounts_args() - args.read(iprot) - iprot.readMessageEnd() - result = listActiveAccounts_result() - try: - result.success = self._handler.listActiveAccounts() - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("listActiveAccounts", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_addProxy(self, seqid, iprot, oprot): - args = addProxy_args() - args.read(iprot) - iprot.readMessageEnd() - result = addProxy_result() - try: - result.success = self._handler.addProxy(args.proxyId, args.proxyData) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("addProxy", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_suspendProxy(self, seqid, iprot, oprot): - args = suspendProxy_args() - args.read(iprot) - iprot.readMessageEnd() - result = suspendProxy_result() - try: - result.success = self._handler.suspendProxy(args.proxyId) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("suspendProxy", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_resumeProxy(self, seqid, iprot, oprot): - args = resumeProxy_args() - args.read(iprot) - iprot.readMessageEnd() - result = resumeProxy_result() - try: - result.success = self._handler.resumeProxy(args.proxyId) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("resumeProxy", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_removeProxy(self, seqid, iprot, oprot): - args = removeProxy_args() - args.read(iprot) - iprot.readMessageEnd() - result = removeProxy_result() - try: - result.success = self._handler.removeProxy(args.proxyId) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("removeProxy", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_listActiveProxies(self, seqid, iprot, oprot): - args = listActiveProxies_args() - args.read(iprot) - iprot.readMessageEnd() - result = listActiveProxies_result() - try: - result.success = self._handler.listActiveProxies() - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("listActiveProxies", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - -# HELPER FUNCTIONS AND STRUCTURES - - -class addAccountPair_args(object): - """ - Attributes: - - accountId - - proxyId - - machineId - - proxyData - - accountData - - """ - - - def __init__(self, accountId=None, proxyId=None, machineId=None, proxyData=None, accountData=None,): - self.accountId = accountId - self.proxyId = proxyId - self.machineId = machineId - self.proxyData = proxyData - self.accountData = accountData - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.accountId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRING: - self.proxyId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 3: - if ftype == TType.STRING: - self.machineId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 4: - if ftype == TType.STRUCT: - self.proxyData = pangramia.yt.common.ttypes.ProxyData() - self.proxyData.read(iprot) - else: - iprot.skip(ftype) - elif fid == 5: - if ftype == TType.STRUCT: - self.accountData = pangramia.yt.common.ttypes.AccountData() - self.accountData.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('addAccountPair_args') - if self.accountId is not None: - oprot.writeFieldBegin('accountId', TType.STRING, 1) - oprot.writeString(self.accountId.encode('utf-8') if sys.version_info[0] == 2 else self.accountId) - oprot.writeFieldEnd() - if self.proxyId is not None: - oprot.writeFieldBegin('proxyId', TType.STRING, 2) - oprot.writeString(self.proxyId.encode('utf-8') if sys.version_info[0] == 2 else self.proxyId) - oprot.writeFieldEnd() - if self.machineId is not None: - oprot.writeFieldBegin('machineId', TType.STRING, 3) - oprot.writeString(self.machineId.encode('utf-8') if sys.version_info[0] == 2 else self.machineId) - oprot.writeFieldEnd() - if self.proxyData is not None: - oprot.writeFieldBegin('proxyData', TType.STRUCT, 4) - self.proxyData.write(oprot) - oprot.writeFieldEnd() - if self.accountData is not None: - oprot.writeFieldBegin('accountData', TType.STRUCT, 5) - self.accountData.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(addAccountPair_args) -addAccountPair_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'accountId', 'UTF8', None, ), # 1 - (2, TType.STRING, 'proxyId', 'UTF8', None, ), # 2 - (3, TType.STRING, 'machineId', 'UTF8', None, ), # 3 - (4, TType.STRUCT, 'proxyData', [pangramia.yt.common.ttypes.ProxyData, None], None, ), # 4 - (5, TType.STRUCT, 'accountData', [pangramia.yt.common.ttypes.AccountData, None], None, ), # 5 -) - - -class addAccountPair_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.BOOL: - self.success = iprot.readBool() - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('addAccountPair_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.BOOL, 0) - oprot.writeBool(self.success) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(addAccountPair_result) -addAccountPair_result.thrift_spec = ( - (0, TType.BOOL, 'success', None, None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class getPair_args(object): - """ - Attributes: - - machineId - - """ - - - def __init__(self, machineId=None,): - self.machineId = machineId - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.machineId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('getPair_args') - if self.machineId is not None: - oprot.writeFieldBegin('machineId', TType.STRING, 1) - oprot.writeString(self.machineId.encode('utf-8') if sys.version_info[0] == 2 else self.machineId) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(getPair_args) -getPair_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'machineId', 'UTF8', None, ), # 1 -) - - -class getPair_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.STRUCT: - self.success = pangramia.yt.common.ttypes.AccountPairWithState() - self.success.read(iprot) - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('getPair_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.STRUCT, 0) - self.success.write(oprot) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(getPair_result) -getPair_result.thrift_spec = ( - (0, TType.STRUCT, 'success', [pangramia.yt.common.ttypes.AccountPairWithState, None], None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class pair_args(object): - """ - Attributes: - - accountId - - proxyId - - machineId - - """ - - - def __init__(self, accountId=None, proxyId=None, machineId=None,): - self.accountId = accountId - self.proxyId = proxyId - self.machineId = machineId - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.accountId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRING: - self.proxyId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 3: - if ftype == TType.STRING: - self.machineId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('pair_args') - if self.accountId is not None: - oprot.writeFieldBegin('accountId', TType.STRING, 1) - oprot.writeString(self.accountId.encode('utf-8') if sys.version_info[0] == 2 else self.accountId) - oprot.writeFieldEnd() - if self.proxyId is not None: - oprot.writeFieldBegin('proxyId', TType.STRING, 2) - oprot.writeString(self.proxyId.encode('utf-8') if sys.version_info[0] == 2 else self.proxyId) - oprot.writeFieldEnd() - if self.machineId is not None: - oprot.writeFieldBegin('machineId', TType.STRING, 3) - oprot.writeString(self.machineId.encode('utf-8') if sys.version_info[0] == 2 else self.machineId) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(pair_args) -pair_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'accountId', 'UTF8', None, ), # 1 - (2, TType.STRING, 'proxyId', 'UTF8', None, ), # 2 - (3, TType.STRING, 'machineId', 'UTF8', None, ), # 3 -) - - -class pair_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.BOOL: - self.success = iprot.readBool() - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('pair_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.BOOL, 0) - oprot.writeBool(self.success) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(pair_result) -pair_result.thrift_spec = ( - (0, TType.BOOL, 'success', None, None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class unpair_args(object): - """ - Attributes: - - accountId - - proxyId - - machineId - - """ - - - def __init__(self, accountId=None, proxyId=None, machineId=None,): - self.accountId = accountId - self.proxyId = proxyId - self.machineId = machineId - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.accountId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRING: - self.proxyId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 3: - if ftype == TType.STRING: - self.machineId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('unpair_args') - if self.accountId is not None: - oprot.writeFieldBegin('accountId', TType.STRING, 1) - oprot.writeString(self.accountId.encode('utf-8') if sys.version_info[0] == 2 else self.accountId) - oprot.writeFieldEnd() - if self.proxyId is not None: - oprot.writeFieldBegin('proxyId', TType.STRING, 2) - oprot.writeString(self.proxyId.encode('utf-8') if sys.version_info[0] == 2 else self.proxyId) - oprot.writeFieldEnd() - if self.machineId is not None: - oprot.writeFieldBegin('machineId', TType.STRING, 3) - oprot.writeString(self.machineId.encode('utf-8') if sys.version_info[0] == 2 else self.machineId) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(unpair_args) -unpair_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'accountId', 'UTF8', None, ), # 1 - (2, TType.STRING, 'proxyId', 'UTF8', None, ), # 2 - (3, TType.STRING, 'machineId', 'UTF8', None, ), # 3 -) - - -class unpair_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.BOOL: - self.success = iprot.readBool() - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('unpair_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.BOOL, 0) - oprot.writeBool(self.success) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(unpair_result) -unpair_result.thrift_spec = ( - (0, TType.BOOL, 'success', None, None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class listAccountPairs_args(object): - """ - Attributes: - - filter - - """ - - - def __init__(self, filter=None,): - self.filter = filter - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.I32: - self.filter = iprot.readI32() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('listAccountPairs_args') - if self.filter is not None: - oprot.writeFieldBegin('filter', TType.I32, 1) - oprot.writeI32(self.filter) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(listAccountPairs_args) -listAccountPairs_args.thrift_spec = ( - None, # 0 - (1, TType.I32, 'filter', None, None, ), # 1 -) - - -class listAccountPairs_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.LIST: - self.success = [] - (_etype3, _size0) = iprot.readListBegin() - for _i4 in range(_size0): - _elem5 = pangramia.yt.common.ttypes.AccountPairWithState() - _elem5.read(iprot) - self.success.append(_elem5) - iprot.readListEnd() - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('listAccountPairs_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.LIST, 0) - oprot.writeListBegin(TType.STRUCT, len(self.success)) - for iter6 in self.success: - iter6.write(oprot) - oprot.writeListEnd() - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(listAccountPairs_result) -listAccountPairs_result.thrift_spec = ( - (0, TType.LIST, 'success', (TType.STRUCT, [pangramia.yt.common.ttypes.AccountPairWithState, None], False), None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class addAccount_args(object): - """ - Attributes: - - accountId - - accountData - - """ - - - def __init__(self, accountId=None, accountData=None,): - self.accountId = accountId - self.accountData = accountData - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.accountId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.accountData = pangramia.yt.common.ttypes.AccountData() - self.accountData.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('addAccount_args') - if self.accountId is not None: - oprot.writeFieldBegin('accountId', TType.STRING, 1) - oprot.writeString(self.accountId.encode('utf-8') if sys.version_info[0] == 2 else self.accountId) - oprot.writeFieldEnd() - if self.accountData is not None: - oprot.writeFieldBegin('accountData', TType.STRUCT, 2) - self.accountData.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(addAccount_args) -addAccount_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'accountId', 'UTF8', None, ), # 1 - (2, TType.STRUCT, 'accountData', [pangramia.yt.common.ttypes.AccountData, None], None, ), # 2 -) - - -class addAccount_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.BOOL: - self.success = iprot.readBool() - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('addAccount_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.BOOL, 0) - oprot.writeBool(self.success) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(addAccount_result) -addAccount_result.thrift_spec = ( - (0, TType.BOOL, 'success', None, None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class suspendAccount_args(object): - """ - Attributes: - - accountId - - """ - - - def __init__(self, accountId=None,): - self.accountId = accountId - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.accountId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('suspendAccount_args') - if self.accountId is not None: - oprot.writeFieldBegin('accountId', TType.STRING, 1) - oprot.writeString(self.accountId.encode('utf-8') if sys.version_info[0] == 2 else self.accountId) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(suspendAccount_args) -suspendAccount_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'accountId', 'UTF8', None, ), # 1 -) - - -class suspendAccount_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.BOOL: - self.success = iprot.readBool() - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('suspendAccount_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.BOOL, 0) - oprot.writeBool(self.success) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(suspendAccount_result) -suspendAccount_result.thrift_spec = ( - (0, TType.BOOL, 'success', None, None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class resumeAccount_args(object): - """ - Attributes: - - accountId - - """ - - - def __init__(self, accountId=None,): - self.accountId = accountId - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.accountId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('resumeAccount_args') - if self.accountId is not None: - oprot.writeFieldBegin('accountId', TType.STRING, 1) - oprot.writeString(self.accountId.encode('utf-8') if sys.version_info[0] == 2 else self.accountId) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(resumeAccount_args) -resumeAccount_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'accountId', 'UTF8', None, ), # 1 -) - - -class resumeAccount_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.BOOL: - self.success = iprot.readBool() - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('resumeAccount_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.BOOL, 0) - oprot.writeBool(self.success) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(resumeAccount_result) -resumeAccount_result.thrift_spec = ( - (0, TType.BOOL, 'success', None, None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class removeAccount_args(object): - """ - Attributes: - - accountId - - """ - - - def __init__(self, accountId=None,): - self.accountId = accountId - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.accountId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('removeAccount_args') - if self.accountId is not None: - oprot.writeFieldBegin('accountId', TType.STRING, 1) - oprot.writeString(self.accountId.encode('utf-8') if sys.version_info[0] == 2 else self.accountId) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(removeAccount_args) -removeAccount_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'accountId', 'UTF8', None, ), # 1 -) - - -class removeAccount_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.BOOL: - self.success = iprot.readBool() - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('removeAccount_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.BOOL, 0) - oprot.writeBool(self.success) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(removeAccount_result) -removeAccount_result.thrift_spec = ( - (0, TType.BOOL, 'success', None, None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class listActiveAccounts_args(object): - - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('listActiveAccounts_args') - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(listActiveAccounts_args) -listActiveAccounts_args.thrift_spec = ( -) - - -class listActiveAccounts_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.LIST: - self.success = [] - (_etype10, _size7) = iprot.readListBegin() - for _i11 in range(_size7): - _elem12 = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - self.success.append(_elem12) - iprot.readListEnd() - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('listActiveAccounts_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.LIST, 0) - oprot.writeListBegin(TType.STRING, len(self.success)) - for iter13 in self.success: - oprot.writeString(iter13.encode('utf-8') if sys.version_info[0] == 2 else iter13) - oprot.writeListEnd() - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(listActiveAccounts_result) -listActiveAccounts_result.thrift_spec = ( - (0, TType.LIST, 'success', (TType.STRING, 'UTF8', False), None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class addProxy_args(object): - """ - Attributes: - - proxyId - - proxyData - - """ - - - def __init__(self, proxyId=None, proxyData=None,): - self.proxyId = proxyId - self.proxyData = proxyData - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.proxyId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.proxyData = pangramia.yt.common.ttypes.ProxyData() - self.proxyData.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('addProxy_args') - if self.proxyId is not None: - oprot.writeFieldBegin('proxyId', TType.STRING, 1) - oprot.writeString(self.proxyId.encode('utf-8') if sys.version_info[0] == 2 else self.proxyId) - oprot.writeFieldEnd() - if self.proxyData is not None: - oprot.writeFieldBegin('proxyData', TType.STRUCT, 2) - self.proxyData.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(addProxy_args) -addProxy_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'proxyId', 'UTF8', None, ), # 1 - (2, TType.STRUCT, 'proxyData', [pangramia.yt.common.ttypes.ProxyData, None], None, ), # 2 -) - - -class addProxy_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.BOOL: - self.success = iprot.readBool() - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('addProxy_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.BOOL, 0) - oprot.writeBool(self.success) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(addProxy_result) -addProxy_result.thrift_spec = ( - (0, TType.BOOL, 'success', None, None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class suspendProxy_args(object): - """ - Attributes: - - proxyId - - """ - - - def __init__(self, proxyId=None,): - self.proxyId = proxyId - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.proxyId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('suspendProxy_args') - if self.proxyId is not None: - oprot.writeFieldBegin('proxyId', TType.STRING, 1) - oprot.writeString(self.proxyId.encode('utf-8') if sys.version_info[0] == 2 else self.proxyId) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(suspendProxy_args) -suspendProxy_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'proxyId', 'UTF8', None, ), # 1 -) - - -class suspendProxy_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.BOOL: - self.success = iprot.readBool() - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('suspendProxy_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.BOOL, 0) - oprot.writeBool(self.success) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(suspendProxy_result) -suspendProxy_result.thrift_spec = ( - (0, TType.BOOL, 'success', None, None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class resumeProxy_args(object): - """ - Attributes: - - proxyId - - """ - - - def __init__(self, proxyId=None,): - self.proxyId = proxyId - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.proxyId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('resumeProxy_args') - if self.proxyId is not None: - oprot.writeFieldBegin('proxyId', TType.STRING, 1) - oprot.writeString(self.proxyId.encode('utf-8') if sys.version_info[0] == 2 else self.proxyId) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(resumeProxy_args) -resumeProxy_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'proxyId', 'UTF8', None, ), # 1 -) - - -class resumeProxy_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.BOOL: - self.success = iprot.readBool() - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('resumeProxy_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.BOOL, 0) - oprot.writeBool(self.success) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(resumeProxy_result) -resumeProxy_result.thrift_spec = ( - (0, TType.BOOL, 'success', None, None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class removeProxy_args(object): - """ - Attributes: - - proxyId - - """ - - - def __init__(self, proxyId=None,): - self.proxyId = proxyId - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.proxyId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('removeProxy_args') - if self.proxyId is not None: - oprot.writeFieldBegin('proxyId', TType.STRING, 1) - oprot.writeString(self.proxyId.encode('utf-8') if sys.version_info[0] == 2 else self.proxyId) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(removeProxy_args) -removeProxy_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'proxyId', 'UTF8', None, ), # 1 -) - - -class removeProxy_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.BOOL: - self.success = iprot.readBool() - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('removeProxy_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.BOOL, 0) - oprot.writeBool(self.success) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(removeProxy_result) -removeProxy_result.thrift_spec = ( - (0, TType.BOOL, 'success', None, None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class listActiveProxies_args(object): - - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('listActiveProxies_args') - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(listActiveProxies_args) -listActiveProxies_args.thrift_spec = ( -) - - -class listActiveProxies_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.LIST: - self.success = [] - (_etype17, _size14) = iprot.readListBegin() - for _i18 in range(_size14): - _elem19 = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - self.success.append(_elem19) - iprot.readListEnd() - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('listActiveProxies_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.LIST, 0) - oprot.writeListBegin(TType.STRING, len(self.success)) - for iter20 in self.success: - oprot.writeString(iter20.encode('utf-8') if sys.version_info[0] == 2 else iter20) - oprot.writeListEnd() - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(listActiveProxies_result) -listActiveProxies_result.thrift_spec = ( - (0, TType.LIST, 'success', (TType.STRING, 'UTF8', False), None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) -fix_spec(all_structs) -del all_structs diff --git a/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/admin_ops/__init__.py b/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/admin_ops/__init__.py deleted file mode 100644 index 00b4776..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/admin_ops/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__all__ = ['ttypes', 'constants', 'YTAccountsOpService'] diff --git a/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/admin_ops/constants.py b/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/admin_ops/constants.py deleted file mode 100644 index 09a78b3..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/admin_ops/constants.py +++ /dev/null @@ -1,14 +0,0 @@ -# -# Autogenerated by Thrift Compiler (0.20.0) -# -# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING -# -# options string: py -# - -from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException -from thrift.protocol.TProtocol import TProtocolException -from thrift.TRecursive import fix_spec - -import sys -from .ttypes import * diff --git a/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/admin_ops/ttypes.py b/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/admin_ops/ttypes.py deleted file mode 100644 index de828aa..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/admin_ops/ttypes.py +++ /dev/null @@ -1,21 +0,0 @@ -# -# Autogenerated by Thrift Compiler (0.20.0) -# -# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING -# -# options string: py -# - -from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException -from thrift.protocol.TProtocol import TProtocolException -from thrift.TRecursive import fix_spec - -import sys -import pangramia.yt.common.ttypes -import pangramia.yt.exceptions.ttypes -import pangramia.base_service.ttypes - -from thrift.transport import TTransport -all_structs = [] -fix_spec(all_structs) -del all_structs diff --git a/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/common/__init__.py b/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/common/__init__.py deleted file mode 100644 index adefd8e..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/common/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__all__ = ['ttypes', 'constants'] diff --git a/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/common/constants.py b/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/common/constants.py deleted file mode 100644 index 09a78b3..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/common/constants.py +++ /dev/null @@ -1,14 +0,0 @@ -# -# Autogenerated by Thrift Compiler (0.20.0) -# -# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING -# -# options string: py -# - -from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException -from thrift.protocol.TProtocol import TProtocolException -from thrift.TRecursive import fix_spec - -import sys -from .ttypes import * diff --git a/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/common/ttypes.py b/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/common/ttypes.py deleted file mode 100644 index a23d813..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/common/ttypes.py +++ /dev/null @@ -1,905 +0,0 @@ -# -# Autogenerated by Thrift Compiler (0.20.0) -# -# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING -# -# options string: py -# - -from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException -from thrift.protocol.TProtocol import TProtocolException -from thrift.TRecursive import fix_spec - -import sys - -from thrift.transport import TTransport -all_structs = [] - - -class JobState(object): - SUCCESS = 0 - FAIL = 1 - BOT_FORBIDDEN_ON_URL_ACCESS = 2 - BOT_FORBIDDEN_ON_FILE_DOWNLOAD = 3 - BOT_CAPTCHA = 4 - BOT_AUTH_RELOGIN_REQUIRED = 5 - BOT_AUTH_SMS_REQUIRED = 6 - BOT_AUTH_DEVICE_QR_REQUIRED = 7 - BOT_ACCOUNT_BANNED = 8 - BOT_IP_BANNED = 9 - - _VALUES_TO_NAMES = { - 0: "SUCCESS", - 1: "FAIL", - 2: "BOT_FORBIDDEN_ON_URL_ACCESS", - 3: "BOT_FORBIDDEN_ON_FILE_DOWNLOAD", - 4: "BOT_CAPTCHA", - 5: "BOT_AUTH_RELOGIN_REQUIRED", - 6: "BOT_AUTH_SMS_REQUIRED", - 7: "BOT_AUTH_DEVICE_QR_REQUIRED", - 8: "BOT_ACCOUNT_BANNED", - 9: "BOT_IP_BANNED", - } - - _NAMES_TO_VALUES = { - "SUCCESS": 0, - "FAIL": 1, - "BOT_FORBIDDEN_ON_URL_ACCESS": 2, - "BOT_FORBIDDEN_ON_FILE_DOWNLOAD": 3, - "BOT_CAPTCHA": 4, - "BOT_AUTH_RELOGIN_REQUIRED": 5, - "BOT_AUTH_SMS_REQUIRED": 6, - "BOT_AUTH_DEVICE_QR_REQUIRED": 7, - "BOT_ACCOUNT_BANNED": 8, - "BOT_IP_BANNED": 9, - } - - -class TokenUpdateMode(object): - AUTOREFRESH_AND_REMAIN_ANONYMOUS = 0 - AUTOREFRESH_AND_ALLOW_AUTH = 1 - AUTOREFRESH_AND_ONLY_AUTH = 2 - CLEANUP_THEN_AUTOREFRESH_AND_ONLY_AUTH = 3 - CLEANUP_THEN_AUTOREFRESH_AND_REMAIN_ANONYMOUS = 4 - CLEANUP_THEN_AUTOREFRESH_AND_ALLOW_AUTH = 5 - AUTO = 6 - - _VALUES_TO_NAMES = { - 0: "AUTOREFRESH_AND_REMAIN_ANONYMOUS", - 1: "AUTOREFRESH_AND_ALLOW_AUTH", - 2: "AUTOREFRESH_AND_ONLY_AUTH", - 3: "CLEANUP_THEN_AUTOREFRESH_AND_ONLY_AUTH", - 4: "CLEANUP_THEN_AUTOREFRESH_AND_REMAIN_ANONYMOUS", - 5: "CLEANUP_THEN_AUTOREFRESH_AND_ALLOW_AUTH", - 6: "AUTO", - } - - _NAMES_TO_VALUES = { - "AUTOREFRESH_AND_REMAIN_ANONYMOUS": 0, - "AUTOREFRESH_AND_ALLOW_AUTH": 1, - "AUTOREFRESH_AND_ONLY_AUTH": 2, - "CLEANUP_THEN_AUTOREFRESH_AND_ONLY_AUTH": 3, - "CLEANUP_THEN_AUTOREFRESH_AND_REMAIN_ANONYMOUS": 4, - "CLEANUP_THEN_AUTOREFRESH_AND_ALLOW_AUTH": 5, - "AUTO": 6, - } - - -class AccountPairState(object): - ACTIVE = 0 - PAUSED = 1 - REMOVED = 2 - IN_PROGRESS = 3 - ALL = 4 - - _VALUES_TO_NAMES = { - 0: "ACTIVE", - 1: "PAUSED", - 2: "REMOVED", - 3: "IN_PROGRESS", - 4: "ALL", - } - - _NAMES_TO_VALUES = { - "ACTIVE": 0, - "PAUSED": 1, - "REMOVED": 2, - "IN_PROGRESS": 3, - "ALL": 4, - } - - -class JobTokenData(object): - """ - Attributes: - - infoJson - - ytdlpCommand - - socks - - jobId - - url - - cookiesBlob - - """ - - - def __init__(self, infoJson=None, ytdlpCommand=None, socks=None, jobId=None, url=None, cookiesBlob=None,): - self.infoJson = infoJson - self.ytdlpCommand = ytdlpCommand - self.socks = socks - self.jobId = jobId - self.url = url - self.cookiesBlob = cookiesBlob - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.infoJson = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRING: - self.ytdlpCommand = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 3: - if ftype == TType.STRING: - self.socks = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 4: - if ftype == TType.STRING: - self.jobId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 5: - if ftype == TType.STRING: - self.url = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 6: - if ftype == TType.STRING: - self.cookiesBlob = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('JobTokenData') - if self.infoJson is not None: - oprot.writeFieldBegin('infoJson', TType.STRING, 1) - oprot.writeString(self.infoJson.encode('utf-8') if sys.version_info[0] == 2 else self.infoJson) - oprot.writeFieldEnd() - if self.ytdlpCommand is not None: - oprot.writeFieldBegin('ytdlpCommand', TType.STRING, 2) - oprot.writeString(self.ytdlpCommand.encode('utf-8') if sys.version_info[0] == 2 else self.ytdlpCommand) - oprot.writeFieldEnd() - if self.socks is not None: - oprot.writeFieldBegin('socks', TType.STRING, 3) - oprot.writeString(self.socks.encode('utf-8') if sys.version_info[0] == 2 else self.socks) - oprot.writeFieldEnd() - if self.jobId is not None: - oprot.writeFieldBegin('jobId', TType.STRING, 4) - oprot.writeString(self.jobId.encode('utf-8') if sys.version_info[0] == 2 else self.jobId) - oprot.writeFieldEnd() - if self.url is not None: - oprot.writeFieldBegin('url', TType.STRING, 5) - oprot.writeString(self.url.encode('utf-8') if sys.version_info[0] == 2 else self.url) - oprot.writeFieldEnd() - if self.cookiesBlob is not None: - oprot.writeFieldBegin('cookiesBlob', TType.STRING, 6) - oprot.writeString(self.cookiesBlob.encode('utf-8') if sys.version_info[0] == 2 else self.cookiesBlob) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) - - -class AccountData(object): - """ - Attributes: - - username - - password - - countryCode - - """ - - - def __init__(self, username=None, password=None, countryCode=None,): - self.username = username - self.password = password - self.countryCode = countryCode - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.username = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRING: - self.password = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 3: - if ftype == TType.STRING: - self.countryCode = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('AccountData') - if self.username is not None: - oprot.writeFieldBegin('username', TType.STRING, 1) - oprot.writeString(self.username.encode('utf-8') if sys.version_info[0] == 2 else self.username) - oprot.writeFieldEnd() - if self.password is not None: - oprot.writeFieldBegin('password', TType.STRING, 2) - oprot.writeString(self.password.encode('utf-8') if sys.version_info[0] == 2 else self.password) - oprot.writeFieldEnd() - if self.countryCode is not None: - oprot.writeFieldBegin('countryCode', TType.STRING, 3) - oprot.writeString(self.countryCode.encode('utf-8') if sys.version_info[0] == 2 else self.countryCode) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - if self.username is None: - raise TProtocolException(message='Required field username is unset!') - if self.password is None: - raise TProtocolException(message='Required field password is unset!') - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) - - -class ProxyData(object): - """ - Attributes: - - proxyUrl - - countryCode - - """ - - - def __init__(self, proxyUrl=None, countryCode=None,): - self.proxyUrl = proxyUrl - self.countryCode = countryCode - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.proxyUrl = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRING: - self.countryCode = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('ProxyData') - if self.proxyUrl is not None: - oprot.writeFieldBegin('proxyUrl', TType.STRING, 1) - oprot.writeString(self.proxyUrl.encode('utf-8') if sys.version_info[0] == 2 else self.proxyUrl) - oprot.writeFieldEnd() - if self.countryCode is not None: - oprot.writeFieldBegin('countryCode', TType.STRING, 2) - oprot.writeString(self.countryCode.encode('utf-8') if sys.version_info[0] == 2 else self.countryCode) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - if self.proxyUrl is None: - raise TProtocolException(message='Required field proxyUrl is unset!') - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) - - -class AccountPairWithState(object): - """ - Attributes: - - accountId - - proxyId - - accountPairState - - machineId - - """ - - - def __init__(self, accountId=None, proxyId=None, accountPairState=None, machineId=None,): - self.accountId = accountId - self.proxyId = proxyId - self.accountPairState = accountPairState - self.machineId = machineId - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.accountId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRING: - self.proxyId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 3: - if ftype == TType.I32: - self.accountPairState = iprot.readI32() - else: - iprot.skip(ftype) - elif fid == 4: - if ftype == TType.STRING: - self.machineId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('AccountPairWithState') - if self.accountId is not None: - oprot.writeFieldBegin('accountId', TType.STRING, 1) - oprot.writeString(self.accountId.encode('utf-8') if sys.version_info[0] == 2 else self.accountId) - oprot.writeFieldEnd() - if self.proxyId is not None: - oprot.writeFieldBegin('proxyId', TType.STRING, 2) - oprot.writeString(self.proxyId.encode('utf-8') if sys.version_info[0] == 2 else self.proxyId) - oprot.writeFieldEnd() - if self.accountPairState is not None: - oprot.writeFieldBegin('accountPairState', TType.I32, 3) - oprot.writeI32(self.accountPairState) - oprot.writeFieldEnd() - if self.machineId is not None: - oprot.writeFieldBegin('machineId', TType.STRING, 4) - oprot.writeString(self.machineId.encode('utf-8') if sys.version_info[0] == 2 else self.machineId) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - if self.accountId is None: - raise TProtocolException(message='Required field accountId is unset!') - if self.proxyId is None: - raise TProtocolException(message='Required field proxyId is unset!') - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) - - -class JobData(object): - """ - Attributes: - - jobId - - url - - cookiesBlob - - potoken - - visitorId - - ytdlpCommand - - createdTime - - telemetry - - state - - errorMessage - - socks5Id - - """ - - - def __init__(self, jobId=None, url=None, cookiesBlob=None, potoken=None, visitorId=None, ytdlpCommand=None, createdTime=None, telemetry=None, state=None, errorMessage=None, socks5Id=None,): - self.jobId = jobId - self.url = url - self.cookiesBlob = cookiesBlob - self.potoken = potoken - self.visitorId = visitorId - self.ytdlpCommand = ytdlpCommand - self.createdTime = createdTime - self.telemetry = telemetry - self.state = state - self.errorMessage = errorMessage - self.socks5Id = socks5Id - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.jobId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRING: - self.url = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 3: - if ftype == TType.STRING: - self.cookiesBlob = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 4: - if ftype == TType.STRING: - self.potoken = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 5: - if ftype == TType.STRING: - self.visitorId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 6: - if ftype == TType.STRING: - self.ytdlpCommand = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 7: - if ftype == TType.STRING: - self.createdTime = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 8: - if ftype == TType.MAP: - self.telemetry = {} - (_ktype1, _vtype2, _size0) = iprot.readMapBegin() - for _i4 in range(_size0): - _key5 = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - _val6 = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - self.telemetry[_key5] = _val6 - iprot.readMapEnd() - else: - iprot.skip(ftype) - elif fid == 9: - if ftype == TType.I32: - self.state = iprot.readI32() - else: - iprot.skip(ftype) - elif fid == 10: - if ftype == TType.STRING: - self.errorMessage = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 11: - if ftype == TType.STRING: - self.socks5Id = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('JobData') - if self.jobId is not None: - oprot.writeFieldBegin('jobId', TType.STRING, 1) - oprot.writeString(self.jobId.encode('utf-8') if sys.version_info[0] == 2 else self.jobId) - oprot.writeFieldEnd() - if self.url is not None: - oprot.writeFieldBegin('url', TType.STRING, 2) - oprot.writeString(self.url.encode('utf-8') if sys.version_info[0] == 2 else self.url) - oprot.writeFieldEnd() - if self.cookiesBlob is not None: - oprot.writeFieldBegin('cookiesBlob', TType.STRING, 3) - oprot.writeString(self.cookiesBlob.encode('utf-8') if sys.version_info[0] == 2 else self.cookiesBlob) - oprot.writeFieldEnd() - if self.potoken is not None: - oprot.writeFieldBegin('potoken', TType.STRING, 4) - oprot.writeString(self.potoken.encode('utf-8') if sys.version_info[0] == 2 else self.potoken) - oprot.writeFieldEnd() - if self.visitorId is not None: - oprot.writeFieldBegin('visitorId', TType.STRING, 5) - oprot.writeString(self.visitorId.encode('utf-8') if sys.version_info[0] == 2 else self.visitorId) - oprot.writeFieldEnd() - if self.ytdlpCommand is not None: - oprot.writeFieldBegin('ytdlpCommand', TType.STRING, 6) - oprot.writeString(self.ytdlpCommand.encode('utf-8') if sys.version_info[0] == 2 else self.ytdlpCommand) - oprot.writeFieldEnd() - if self.createdTime is not None: - oprot.writeFieldBegin('createdTime', TType.STRING, 7) - oprot.writeString(self.createdTime.encode('utf-8') if sys.version_info[0] == 2 else self.createdTime) - oprot.writeFieldEnd() - if self.telemetry is not None: - oprot.writeFieldBegin('telemetry', TType.MAP, 8) - oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.telemetry)) - for kiter7, viter8 in self.telemetry.items(): - oprot.writeString(kiter7.encode('utf-8') if sys.version_info[0] == 2 else kiter7) - oprot.writeString(viter8.encode('utf-8') if sys.version_info[0] == 2 else viter8) - oprot.writeMapEnd() - oprot.writeFieldEnd() - if self.state is not None: - oprot.writeFieldBegin('state', TType.I32, 9) - oprot.writeI32(self.state) - oprot.writeFieldEnd() - if self.errorMessage is not None: - oprot.writeFieldBegin('errorMessage', TType.STRING, 10) - oprot.writeString(self.errorMessage.encode('utf-8') if sys.version_info[0] == 2 else self.errorMessage) - oprot.writeFieldEnd() - if self.socks5Id is not None: - oprot.writeFieldBegin('socks5Id', TType.STRING, 11) - oprot.writeString(self.socks5Id.encode('utf-8') if sys.version_info[0] == 2 else self.socks5Id) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - if self.jobId is None: - raise TProtocolException(message='Required field jobId is unset!') - if self.url is None: - raise TProtocolException(message='Required field url is unset!') - if self.cookiesBlob is None: - raise TProtocolException(message='Required field cookiesBlob is unset!') - if self.potoken is None: - raise TProtocolException(message='Required field potoken is unset!') - if self.visitorId is None: - raise TProtocolException(message='Required field visitorId is unset!') - if self.ytdlpCommand is None: - raise TProtocolException(message='Required field ytdlpCommand is unset!') - if self.createdTime is None: - raise TProtocolException(message='Required field createdTime is unset!') - if self.telemetry is None: - raise TProtocolException(message='Required field telemetry is unset!') - if self.state is None: - raise TProtocolException(message='Required field state is unset!') - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) - - -class RichCollectionPagination(object): - """ - Attributes: - - hasNext - - totalCount - - page - - pageSize - - """ - - - def __init__(self, hasNext=None, totalCount=None, page=None, pageSize=None,): - self.hasNext = hasNext - self.totalCount = totalCount - self.page = page - self.pageSize = pageSize - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.BOOL: - self.hasNext = iprot.readBool() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.I32: - self.totalCount = iprot.readI32() - else: - iprot.skip(ftype) - elif fid == 3: - if ftype == TType.I32: - self.page = iprot.readI32() - else: - iprot.skip(ftype) - elif fid == 4: - if ftype == TType.I32: - self.pageSize = iprot.readI32() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('RichCollectionPagination') - if self.hasNext is not None: - oprot.writeFieldBegin('hasNext', TType.BOOL, 1) - oprot.writeBool(self.hasNext) - oprot.writeFieldEnd() - if self.totalCount is not None: - oprot.writeFieldBegin('totalCount', TType.I32, 2) - oprot.writeI32(self.totalCount) - oprot.writeFieldEnd() - if self.page is not None: - oprot.writeFieldBegin('page', TType.I32, 3) - oprot.writeI32(self.page) - oprot.writeFieldEnd() - if self.pageSize is not None: - oprot.writeFieldBegin('pageSize', TType.I32, 4) - oprot.writeI32(self.pageSize) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - if self.hasNext is None: - raise TProtocolException(message='Required field hasNext is unset!') - if self.totalCount is None: - raise TProtocolException(message='Required field totalCount is unset!') - if self.page is None: - raise TProtocolException(message='Required field page is unset!') - if self.pageSize is None: - raise TProtocolException(message='Required field pageSize is unset!') - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) - - -class RichCollectionJobData(object): - """ - Attributes: - - items - - pagination - - """ - - - def __init__(self, items=None, pagination=None,): - self.items = items - self.pagination = pagination - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.LIST: - self.items = [] - (_etype12, _size9) = iprot.readListBegin() - for _i13 in range(_size9): - _elem14 = JobData() - _elem14.read(iprot) - self.items.append(_elem14) - iprot.readListEnd() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.pagination = RichCollectionPagination() - self.pagination.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('RichCollectionJobData') - if self.items is not None: - oprot.writeFieldBegin('items', TType.LIST, 1) - oprot.writeListBegin(TType.STRUCT, len(self.items)) - for iter15 in self.items: - iter15.write(oprot) - oprot.writeListEnd() - oprot.writeFieldEnd() - if self.pagination is not None: - oprot.writeFieldBegin('pagination', TType.STRUCT, 2) - self.pagination.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - if self.items is None: - raise TProtocolException(message='Required field items is unset!') - if self.pagination is None: - raise TProtocolException(message='Required field pagination is unset!') - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(JobTokenData) -JobTokenData.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'infoJson', 'UTF8', None, ), # 1 - (2, TType.STRING, 'ytdlpCommand', 'UTF8', None, ), # 2 - (3, TType.STRING, 'socks', 'UTF8', None, ), # 3 - (4, TType.STRING, 'jobId', 'UTF8', None, ), # 4 - (5, TType.STRING, 'url', 'UTF8', None, ), # 5 - (6, TType.STRING, 'cookiesBlob', 'UTF8', None, ), # 6 -) -all_structs.append(AccountData) -AccountData.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'username', 'UTF8', None, ), # 1 - (2, TType.STRING, 'password', 'UTF8', None, ), # 2 - (3, TType.STRING, 'countryCode', 'UTF8', None, ), # 3 -) -all_structs.append(ProxyData) -ProxyData.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'proxyUrl', 'UTF8', None, ), # 1 - (2, TType.STRING, 'countryCode', 'UTF8', None, ), # 2 -) -all_structs.append(AccountPairWithState) -AccountPairWithState.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'accountId', 'UTF8', None, ), # 1 - (2, TType.STRING, 'proxyId', 'UTF8', None, ), # 2 - (3, TType.I32, 'accountPairState', None, None, ), # 3 - (4, TType.STRING, 'machineId', 'UTF8', None, ), # 4 -) -all_structs.append(JobData) -JobData.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'jobId', 'UTF8', None, ), # 1 - (2, TType.STRING, 'url', 'UTF8', None, ), # 2 - (3, TType.STRING, 'cookiesBlob', 'UTF8', None, ), # 3 - (4, TType.STRING, 'potoken', 'UTF8', None, ), # 4 - (5, TType.STRING, 'visitorId', 'UTF8', None, ), # 5 - (6, TType.STRING, 'ytdlpCommand', 'UTF8', None, ), # 6 - (7, TType.STRING, 'createdTime', 'UTF8', None, ), # 7 - (8, TType.MAP, 'telemetry', (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), None, ), # 8 - (9, TType.I32, 'state', None, None, ), # 9 - (10, TType.STRING, 'errorMessage', 'UTF8', None, ), # 10 - (11, TType.STRING, 'socks5Id', 'UTF8', None, ), # 11 -) -all_structs.append(RichCollectionPagination) -RichCollectionPagination.thrift_spec = ( - None, # 0 - (1, TType.BOOL, 'hasNext', None, None, ), # 1 - (2, TType.I32, 'totalCount', None, None, ), # 2 - (3, TType.I32, 'page', None, None, ), # 3 - (4, TType.I32, 'pageSize', None, None, ), # 4 -) -all_structs.append(RichCollectionJobData) -RichCollectionJobData.thrift_spec = ( - None, # 0 - (1, TType.LIST, 'items', (TType.STRUCT, [JobData, None], False), None, ), # 1 - (2, TType.STRUCT, 'pagination', [RichCollectionPagination, None], None, ), # 2 -) -fix_spec(all_structs) -del all_structs diff --git a/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/exceptions/__init__.py b/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/exceptions/__init__.py deleted file mode 100644 index adefd8e..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/exceptions/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__all__ = ['ttypes', 'constants'] diff --git a/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/exceptions/constants.py b/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/exceptions/constants.py deleted file mode 100644 index 09a78b3..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/exceptions/constants.py +++ /dev/null @@ -1,14 +0,0 @@ -# -# Autogenerated by Thrift Compiler (0.20.0) -# -# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING -# -# options string: py -# - -from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException -from thrift.protocol.TProtocol import TProtocolException -from thrift.TRecursive import fix_spec - -import sys -from .ttypes import * diff --git a/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/exceptions/ttypes.py b/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/exceptions/ttypes.py deleted file mode 100644 index e930913..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/exceptions/ttypes.py +++ /dev/null @@ -1,254 +0,0 @@ -# -# Autogenerated by Thrift Compiler (0.20.0) -# -# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING -# -# options string: py -# - -from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException -from thrift.protocol.TProtocol import TProtocolException -from thrift.TRecursive import fix_spec - -import sys - -from thrift.transport import TTransport -all_structs = [] - - -class PBServiceException(TException): - """ - Attributes: - - message - - errorCode - - context - - """ - - - def __init__(self, message=None, errorCode=None, context=None,): - super(PBServiceException, self).__setattr__('message', message) - super(PBServiceException, self).__setattr__('errorCode', errorCode) - super(PBServiceException, self).__setattr__('context', context) - - def __setattr__(self, *args): - raise TypeError("can't modify immutable instance") - - def __delattr__(self, *args): - raise TypeError("can't modify immutable instance") - - def __hash__(self): - return hash(self.__class__) ^ hash((self.message, self.errorCode, self.context, )) - - @classmethod - def read(cls, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and cls.thrift_spec is not None: - return iprot._fast_decode(None, iprot, [cls, cls.thrift_spec]) - iprot.readStructBegin() - message = None - errorCode = None - context = None - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - message = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRING: - errorCode = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 3: - if ftype == TType.MAP: - context = {} - (_ktype1, _vtype2, _size0) = iprot.readMapBegin() - for _i4 in range(_size0): - _key5 = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - _val6 = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - context[_key5] = _val6 - iprot.readMapEnd() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - return cls( - message=message, - errorCode=errorCode, - context=context, - ) - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('PBServiceException') - if self.message is not None: - oprot.writeFieldBegin('message', TType.STRING, 1) - oprot.writeString(self.message.encode('utf-8') if sys.version_info[0] == 2 else self.message) - oprot.writeFieldEnd() - if self.errorCode is not None: - oprot.writeFieldBegin('errorCode', TType.STRING, 2) - oprot.writeString(self.errorCode.encode('utf-8') if sys.version_info[0] == 2 else self.errorCode) - oprot.writeFieldEnd() - if self.context is not None: - oprot.writeFieldBegin('context', TType.MAP, 3) - oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.context)) - for kiter7, viter8 in self.context.items(): - oprot.writeString(kiter7.encode('utf-8') if sys.version_info[0] == 2 else kiter7) - oprot.writeString(viter8.encode('utf-8') if sys.version_info[0] == 2 else viter8) - oprot.writeMapEnd() - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - if self.message is None: - raise TProtocolException(message='Required field message is unset!') - return - - def __str__(self): - return repr(self) - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) - - -class PBUserException(TException): - """ - Attributes: - - message - - errorCode - - context - - """ - - - def __init__(self, message=None, errorCode=None, context=None,): - super(PBUserException, self).__setattr__('message', message) - super(PBUserException, self).__setattr__('errorCode', errorCode) - super(PBUserException, self).__setattr__('context', context) - - def __setattr__(self, *args): - raise TypeError("can't modify immutable instance") - - def __delattr__(self, *args): - raise TypeError("can't modify immutable instance") - - def __hash__(self): - return hash(self.__class__) ^ hash((self.message, self.errorCode, self.context, )) - - @classmethod - def read(cls, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and cls.thrift_spec is not None: - return iprot._fast_decode(None, iprot, [cls, cls.thrift_spec]) - iprot.readStructBegin() - message = None - errorCode = None - context = None - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - message = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRING: - errorCode = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 3: - if ftype == TType.MAP: - context = {} - (_ktype10, _vtype11, _size9) = iprot.readMapBegin() - for _i13 in range(_size9): - _key14 = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - _val15 = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - context[_key14] = _val15 - iprot.readMapEnd() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - return cls( - message=message, - errorCode=errorCode, - context=context, - ) - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('PBUserException') - if self.message is not None: - oprot.writeFieldBegin('message', TType.STRING, 1) - oprot.writeString(self.message.encode('utf-8') if sys.version_info[0] == 2 else self.message) - oprot.writeFieldEnd() - if self.errorCode is not None: - oprot.writeFieldBegin('errorCode', TType.STRING, 2) - oprot.writeString(self.errorCode.encode('utf-8') if sys.version_info[0] == 2 else self.errorCode) - oprot.writeFieldEnd() - if self.context is not None: - oprot.writeFieldBegin('context', TType.MAP, 3) - oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.context)) - for kiter16, viter17 in self.context.items(): - oprot.writeString(kiter16.encode('utf-8') if sys.version_info[0] == 2 else kiter16) - oprot.writeString(viter17.encode('utf-8') if sys.version_info[0] == 2 else viter17) - oprot.writeMapEnd() - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - if self.message is None: - raise TProtocolException(message='Required field message is unset!') - return - - def __str__(self): - return repr(self) - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(PBServiceException) -PBServiceException.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'message', 'UTF8', None, ), # 1 - (2, TType.STRING, 'errorCode', 'UTF8', None, ), # 2 - (3, TType.MAP, 'context', (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), None, ), # 3 -) -all_structs.append(PBUserException) -PBUserException.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'message', 'UTF8', None, ), # 1 - (2, TType.STRING, 'errorCode', 'UTF8', None, ), # 2 - (3, TType.MAP, 'context', (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), None, ), # 3 -) -fix_spec(all_structs) -del all_structs diff --git a/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/tokens_ops/YTTokenOpService.py b/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/tokens_ops/YTTokenOpService.py deleted file mode 100644 index 8589aee..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/tokens_ops/YTTokenOpService.py +++ /dev/null @@ -1,1360 +0,0 @@ -# -# Autogenerated by Thrift Compiler (0.20.0) -# -# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING -# -# options string: py -# - -from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException -from thrift.protocol.TProtocol import TProtocolException -from thrift.TRecursive import fix_spec - -import sys -import pangramia.base_service.BaseService -import logging -from .ttypes import * -from thrift.Thrift import TProcessor -from thrift.transport import TTransport -all_structs = [] - - -class Iface(pangramia.base_service.BaseService.Iface): - def getOrRefreshTokenWithReport(self, accountId, oldUrl, status, details, jobId, updateType, url): - """ - Parameters: - - accountId - - oldUrl - - status - - details - - jobId - - updateType - - url - - """ - pass - - def getOrRefreshToken(self, accountId, updateType, url): - """ - Parameters: - - accountId - - updateType - - url - - """ - pass - - def getLatestToken(self, accountId): - """ - Parameters: - - accountId - - """ - pass - - def refreshToken(self, accountId, updateType, url): - """ - Parameters: - - accountId - - updateType - - url - - """ - pass - - def reportState(self, url, status, details, jobId): - """ - Parameters: - - url - - status - - details - - jobId - - """ - pass - - -class Client(pangramia.base_service.BaseService.Client, Iface): - def __init__(self, iprot, oprot=None): - pangramia.base_service.BaseService.Client.__init__(self, iprot, oprot) - - def getOrRefreshTokenWithReport(self, accountId, oldUrl, status, details, jobId, updateType, url): - """ - Parameters: - - accountId - - oldUrl - - status - - details - - jobId - - updateType - - url - - """ - self.send_getOrRefreshTokenWithReport(accountId, oldUrl, status, details, jobId, updateType, url) - return self.recv_getOrRefreshTokenWithReport() - - def send_getOrRefreshTokenWithReport(self, accountId, oldUrl, status, details, jobId, updateType, url): - self._oprot.writeMessageBegin('getOrRefreshTokenWithReport', TMessageType.CALL, self._seqid) - args = getOrRefreshTokenWithReport_args() - args.accountId = accountId - args.oldUrl = oldUrl - args.status = status - args.details = details - args.jobId = jobId - args.updateType = updateType - args.url = url - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_getOrRefreshTokenWithReport(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = getOrRefreshTokenWithReport_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "getOrRefreshTokenWithReport failed: unknown result") - - def getOrRefreshToken(self, accountId, updateType, url): - """ - Parameters: - - accountId - - updateType - - url - - """ - self.send_getOrRefreshToken(accountId, updateType, url) - return self.recv_getOrRefreshToken() - - def send_getOrRefreshToken(self, accountId, updateType, url): - self._oprot.writeMessageBegin('getOrRefreshToken', TMessageType.CALL, self._seqid) - args = getOrRefreshToken_args() - args.accountId = accountId - args.updateType = updateType - args.url = url - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_getOrRefreshToken(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = getOrRefreshToken_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "getOrRefreshToken failed: unknown result") - - def getLatestToken(self, accountId): - """ - Parameters: - - accountId - - """ - self.send_getLatestToken(accountId) - return self.recv_getLatestToken() - - def send_getLatestToken(self, accountId): - self._oprot.writeMessageBegin('getLatestToken', TMessageType.CALL, self._seqid) - args = getLatestToken_args() - args.accountId = accountId - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_getLatestToken(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = getLatestToken_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "getLatestToken failed: unknown result") - - def refreshToken(self, accountId, updateType, url): - """ - Parameters: - - accountId - - updateType - - url - - """ - self.send_refreshToken(accountId, updateType, url) - return self.recv_refreshToken() - - def send_refreshToken(self, accountId, updateType, url): - self._oprot.writeMessageBegin('refreshToken', TMessageType.CALL, self._seqid) - args = refreshToken_args() - args.accountId = accountId - args.updateType = updateType - args.url = url - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_refreshToken(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = refreshToken_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "refreshToken failed: unknown result") - - def reportState(self, url, status, details, jobId): - """ - Parameters: - - url - - status - - details - - jobId - - """ - self.send_reportState(url, status, details, jobId) - return self.recv_reportState() - - def send_reportState(self, url, status, details, jobId): - self._oprot.writeMessageBegin('reportState', TMessageType.CALL, self._seqid) - args = reportState_args() - args.url = url - args.status = status - args.details = details - args.jobId = jobId - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_reportState(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = reportState_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "reportState failed: unknown result") - - -class Processor(pangramia.base_service.BaseService.Processor, Iface, TProcessor): - def __init__(self, handler): - pangramia.base_service.BaseService.Processor.__init__(self, handler) - self._processMap["getOrRefreshTokenWithReport"] = Processor.process_getOrRefreshTokenWithReport - self._processMap["getOrRefreshToken"] = Processor.process_getOrRefreshToken - self._processMap["getLatestToken"] = Processor.process_getLatestToken - self._processMap["refreshToken"] = Processor.process_refreshToken - self._processMap["reportState"] = Processor.process_reportState - self._on_message_begin = None - - def on_message_begin(self, func): - self._on_message_begin = func - - def process(self, iprot, oprot): - (name, type, seqid) = iprot.readMessageBegin() - if self._on_message_begin: - self._on_message_begin(name, type, seqid) - if name not in self._processMap: - iprot.skip(TType.STRUCT) - iprot.readMessageEnd() - x = TApplicationException(TApplicationException.UNKNOWN_METHOD, 'Unknown function %s' % (name)) - oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid) - x.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - return - else: - self._processMap[name](self, seqid, iprot, oprot) - return True - - def process_getOrRefreshTokenWithReport(self, seqid, iprot, oprot): - args = getOrRefreshTokenWithReport_args() - args.read(iprot) - iprot.readMessageEnd() - result = getOrRefreshTokenWithReport_result() - try: - result.success = self._handler.getOrRefreshTokenWithReport(args.accountId, args.oldUrl, args.status, args.details, args.jobId, args.updateType, args.url) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("getOrRefreshTokenWithReport", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_getOrRefreshToken(self, seqid, iprot, oprot): - args = getOrRefreshToken_args() - args.read(iprot) - iprot.readMessageEnd() - result = getOrRefreshToken_result() - try: - result.success = self._handler.getOrRefreshToken(args.accountId, args.updateType, args.url) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("getOrRefreshToken", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_getLatestToken(self, seqid, iprot, oprot): - args = getLatestToken_args() - args.read(iprot) - iprot.readMessageEnd() - result = getLatestToken_result() - try: - result.success = self._handler.getLatestToken(args.accountId) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("getLatestToken", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_refreshToken(self, seqid, iprot, oprot): - args = refreshToken_args() - args.read(iprot) - iprot.readMessageEnd() - result = refreshToken_result() - try: - result.success = self._handler.refreshToken(args.accountId, args.updateType, args.url) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("refreshToken", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_reportState(self, seqid, iprot, oprot): - args = reportState_args() - args.read(iprot) - iprot.readMessageEnd() - result = reportState_result() - try: - result.success = self._handler.reportState(args.url, args.status, args.details, args.jobId) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("reportState", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - -# HELPER FUNCTIONS AND STRUCTURES - - -class getOrRefreshTokenWithReport_args(object): - """ - Attributes: - - accountId - - oldUrl - - status - - details - - jobId - - updateType - - url - - """ - - - def __init__(self, accountId=None, oldUrl=None, status=None, details=None, jobId=None, updateType= 6, url=None,): - self.accountId = accountId - self.oldUrl = oldUrl - self.status = status - self.details = details - self.jobId = jobId - self.updateType = updateType - self.url = url - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.accountId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRING: - self.oldUrl = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 3: - if ftype == TType.I32: - self.status = iprot.readI32() - else: - iprot.skip(ftype) - elif fid == 4: - if ftype == TType.STRING: - self.details = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 5: - if ftype == TType.STRING: - self.jobId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 6: - if ftype == TType.I32: - self.updateType = iprot.readI32() - else: - iprot.skip(ftype) - elif fid == 7: - if ftype == TType.STRING: - self.url = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('getOrRefreshTokenWithReport_args') - if self.accountId is not None: - oprot.writeFieldBegin('accountId', TType.STRING, 1) - oprot.writeString(self.accountId.encode('utf-8') if sys.version_info[0] == 2 else self.accountId) - oprot.writeFieldEnd() - if self.oldUrl is not None: - oprot.writeFieldBegin('oldUrl', TType.STRING, 2) - oprot.writeString(self.oldUrl.encode('utf-8') if sys.version_info[0] == 2 else self.oldUrl) - oprot.writeFieldEnd() - if self.status is not None: - oprot.writeFieldBegin('status', TType.I32, 3) - oprot.writeI32(self.status) - oprot.writeFieldEnd() - if self.details is not None: - oprot.writeFieldBegin('details', TType.STRING, 4) - oprot.writeString(self.details.encode('utf-8') if sys.version_info[0] == 2 else self.details) - oprot.writeFieldEnd() - if self.jobId is not None: - oprot.writeFieldBegin('jobId', TType.STRING, 5) - oprot.writeString(self.jobId.encode('utf-8') if sys.version_info[0] == 2 else self.jobId) - oprot.writeFieldEnd() - if self.updateType is not None: - oprot.writeFieldBegin('updateType', TType.I32, 6) - oprot.writeI32(self.updateType) - oprot.writeFieldEnd() - if self.url is not None: - oprot.writeFieldBegin('url', TType.STRING, 7) - oprot.writeString(self.url.encode('utf-8') if sys.version_info[0] == 2 else self.url) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(getOrRefreshTokenWithReport_args) -getOrRefreshTokenWithReport_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'accountId', 'UTF8', None, ), # 1 - (2, TType.STRING, 'oldUrl', 'UTF8', None, ), # 2 - (3, TType.I32, 'status', None, None, ), # 3 - (4, TType.STRING, 'details', 'UTF8', None, ), # 4 - (5, TType.STRING, 'jobId', 'UTF8', None, ), # 5 - (6, TType.I32, 'updateType', None, 6, ), # 6 - (7, TType.STRING, 'url', 'UTF8', None, ), # 7 -) - - -class getOrRefreshTokenWithReport_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.STRUCT: - self.success = pangramia.yt.common.ttypes.JobTokenData() - self.success.read(iprot) - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('getOrRefreshTokenWithReport_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.STRUCT, 0) - self.success.write(oprot) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(getOrRefreshTokenWithReport_result) -getOrRefreshTokenWithReport_result.thrift_spec = ( - (0, TType.STRUCT, 'success', [pangramia.yt.common.ttypes.JobTokenData, None], None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class getOrRefreshToken_args(object): - """ - Attributes: - - accountId - - updateType - - url - - """ - - - def __init__(self, accountId=None, updateType= 6, url=None,): - self.accountId = accountId - self.updateType = updateType - self.url = url - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.accountId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.I32: - self.updateType = iprot.readI32() - else: - iprot.skip(ftype) - elif fid == 3: - if ftype == TType.STRING: - self.url = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('getOrRefreshToken_args') - if self.accountId is not None: - oprot.writeFieldBegin('accountId', TType.STRING, 1) - oprot.writeString(self.accountId.encode('utf-8') if sys.version_info[0] == 2 else self.accountId) - oprot.writeFieldEnd() - if self.updateType is not None: - oprot.writeFieldBegin('updateType', TType.I32, 2) - oprot.writeI32(self.updateType) - oprot.writeFieldEnd() - if self.url is not None: - oprot.writeFieldBegin('url', TType.STRING, 3) - oprot.writeString(self.url.encode('utf-8') if sys.version_info[0] == 2 else self.url) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(getOrRefreshToken_args) -getOrRefreshToken_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'accountId', 'UTF8', None, ), # 1 - (2, TType.I32, 'updateType', None, 6, ), # 2 - (3, TType.STRING, 'url', 'UTF8', None, ), # 3 -) - - -class getOrRefreshToken_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.STRUCT: - self.success = pangramia.yt.common.ttypes.JobTokenData() - self.success.read(iprot) - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('getOrRefreshToken_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.STRUCT, 0) - self.success.write(oprot) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(getOrRefreshToken_result) -getOrRefreshToken_result.thrift_spec = ( - (0, TType.STRUCT, 'success', [pangramia.yt.common.ttypes.JobTokenData, None], None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class getLatestToken_args(object): - """ - Attributes: - - accountId - - """ - - - def __init__(self, accountId=None,): - self.accountId = accountId - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.accountId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('getLatestToken_args') - if self.accountId is not None: - oprot.writeFieldBegin('accountId', TType.STRING, 1) - oprot.writeString(self.accountId.encode('utf-8') if sys.version_info[0] == 2 else self.accountId) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(getLatestToken_args) -getLatestToken_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'accountId', 'UTF8', None, ), # 1 -) - - -class getLatestToken_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.STRUCT: - self.success = pangramia.yt.common.ttypes.JobTokenData() - self.success.read(iprot) - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('getLatestToken_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.STRUCT, 0) - self.success.write(oprot) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(getLatestToken_result) -getLatestToken_result.thrift_spec = ( - (0, TType.STRUCT, 'success', [pangramia.yt.common.ttypes.JobTokenData, None], None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class refreshToken_args(object): - """ - Attributes: - - accountId - - updateType - - url - - """ - - - def __init__(self, accountId=None, updateType= 6, url=None,): - self.accountId = accountId - self.updateType = updateType - self.url = url - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.accountId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.I32: - self.updateType = iprot.readI32() - else: - iprot.skip(ftype) - elif fid == 3: - if ftype == TType.STRING: - self.url = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('refreshToken_args') - if self.accountId is not None: - oprot.writeFieldBegin('accountId', TType.STRING, 1) - oprot.writeString(self.accountId.encode('utf-8') if sys.version_info[0] == 2 else self.accountId) - oprot.writeFieldEnd() - if self.updateType is not None: - oprot.writeFieldBegin('updateType', TType.I32, 2) - oprot.writeI32(self.updateType) - oprot.writeFieldEnd() - if self.url is not None: - oprot.writeFieldBegin('url', TType.STRING, 3) - oprot.writeString(self.url.encode('utf-8') if sys.version_info[0] == 2 else self.url) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(refreshToken_args) -refreshToken_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'accountId', 'UTF8', None, ), # 1 - (2, TType.I32, 'updateType', None, 6, ), # 2 - (3, TType.STRING, 'url', 'UTF8', None, ), # 3 -) - - -class refreshToken_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.STRUCT: - self.success = pangramia.yt.common.ttypes.JobTokenData() - self.success.read(iprot) - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('refreshToken_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.STRUCT, 0) - self.success.write(oprot) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(refreshToken_result) -refreshToken_result.thrift_spec = ( - (0, TType.STRUCT, 'success', [pangramia.yt.common.ttypes.JobTokenData, None], None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class reportState_args(object): - """ - Attributes: - - url - - status - - details - - jobId - - """ - - - def __init__(self, url=None, status=None, details=None, jobId=None,): - self.url = url - self.status = status - self.details = details - self.jobId = jobId - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.url = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.I32: - self.status = iprot.readI32() - else: - iprot.skip(ftype) - elif fid == 3: - if ftype == TType.STRING: - self.details = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 4: - if ftype == TType.STRING: - self.jobId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('reportState_args') - if self.url is not None: - oprot.writeFieldBegin('url', TType.STRING, 1) - oprot.writeString(self.url.encode('utf-8') if sys.version_info[0] == 2 else self.url) - oprot.writeFieldEnd() - if self.status is not None: - oprot.writeFieldBegin('status', TType.I32, 2) - oprot.writeI32(self.status) - oprot.writeFieldEnd() - if self.details is not None: - oprot.writeFieldBegin('details', TType.STRING, 3) - oprot.writeString(self.details.encode('utf-8') if sys.version_info[0] == 2 else self.details) - oprot.writeFieldEnd() - if self.jobId is not None: - oprot.writeFieldBegin('jobId', TType.STRING, 4) - oprot.writeString(self.jobId.encode('utf-8') if sys.version_info[0] == 2 else self.jobId) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(reportState_args) -reportState_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'url', 'UTF8', None, ), # 1 - (2, TType.I32, 'status', None, None, ), # 2 - (3, TType.STRING, 'details', 'UTF8', None, ), # 3 - (4, TType.STRING, 'jobId', 'UTF8', None, ), # 4 -) - - -class reportState_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.BOOL: - self.success = iprot.readBool() - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('reportState_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.BOOL, 0) - oprot.writeBool(self.success) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(reportState_result) -reportState_result.thrift_spec = ( - (0, TType.BOOL, 'success', None, None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) -fix_spec(all_structs) -del all_structs diff --git a/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/tokens_ops/__init__.py b/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/tokens_ops/__init__.py deleted file mode 100644 index e97f47d..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/tokens_ops/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__all__ = ['ttypes', 'constants', 'YTTokenOpService'] diff --git a/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/tokens_ops/constants.py b/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/tokens_ops/constants.py deleted file mode 100644 index 09a78b3..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/tokens_ops/constants.py +++ /dev/null @@ -1,14 +0,0 @@ -# -# Autogenerated by Thrift Compiler (0.20.0) -# -# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING -# -# options string: py -# - -from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException -from thrift.protocol.TProtocol import TProtocolException -from thrift.TRecursive import fix_spec - -import sys -from .ttypes import * diff --git a/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/tokens_ops/ttypes.py b/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/tokens_ops/ttypes.py deleted file mode 100644 index de828aa..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/pangramia/yt/tokens_ops/ttypes.py +++ /dev/null @@ -1,21 +0,0 @@ -# -# Autogenerated by Thrift Compiler (0.20.0) -# -# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING -# -# options string: py -# - -from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException -from thrift.protocol.TProtocol import TProtocolException -from thrift.TRecursive import fix_spec - -import sys -import pangramia.yt.common.ttypes -import pangramia.yt.exceptions.ttypes -import pangramia.base_service.ttypes - -from thrift.transport import TTransport -all_structs = [] -fix_spec(all_structs) -del all_structs diff --git a/airflow/ytdlp-ops-auth/build/lib/thrift_model/__init__.py b/airflow/ytdlp-ops-auth/build/lib/thrift_model/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/__init__.py b/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/__init__.py b/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/base_service/BaseService.py b/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/base_service/BaseService.py deleted file mode 100644 index b6cf1f4..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/base_service/BaseService.py +++ /dev/null @@ -1,564 +0,0 @@ -# -# Autogenerated by Thrift Compiler (0.20.0) -# -# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING -# -# options string: py -# - -from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException -from thrift.protocol.TProtocol import TProtocolException -from thrift.TRecursive import fix_spec - -import sys -import logging -from .ttypes import * -from thrift.Thrift import TProcessor -from thrift.transport import TTransport -all_structs = [] - - -class Iface(object): - def ping(self): - pass - - def reportError(self, message, details): - """ - Parameters: - - message - - details - - """ - pass - - def shutdown(self): - pass - - -class Client(Iface): - def __init__(self, iprot, oprot=None): - self._iprot = self._oprot = iprot - if oprot is not None: - self._oprot = oprot - self._seqid = 0 - - def ping(self): - self.send_ping() - return self.recv_ping() - - def send_ping(self): - self._oprot.writeMessageBegin('ping', TMessageType.CALL, self._seqid) - args = ping_args() - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_ping(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = ping_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "ping failed: unknown result") - - def reportError(self, message, details): - """ - Parameters: - - message - - details - - """ - self.send_reportError(message, details) - return self.recv_reportError() - - def send_reportError(self, message, details): - self._oprot.writeMessageBegin('reportError', TMessageType.CALL, self._seqid) - args = reportError_args() - args.message = message - args.details = details - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_reportError(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = reportError_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "reportError failed: unknown result") - - def shutdown(self): - self.send_shutdown() - - def send_shutdown(self): - self._oprot.writeMessageBegin('shutdown', TMessageType.ONEWAY, self._seqid) - args = shutdown_args() - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - -class Processor(Iface, TProcessor): - def __init__(self, handler): - self._handler = handler - self._processMap = {} - self._processMap["ping"] = Processor.process_ping - self._processMap["reportError"] = Processor.process_reportError - self._processMap["shutdown"] = Processor.process_shutdown - self._on_message_begin = None - - def on_message_begin(self, func): - self._on_message_begin = func - - def process(self, iprot, oprot): - (name, type, seqid) = iprot.readMessageBegin() - if self._on_message_begin: - self._on_message_begin(name, type, seqid) - if name not in self._processMap: - iprot.skip(TType.STRUCT) - iprot.readMessageEnd() - x = TApplicationException(TApplicationException.UNKNOWN_METHOD, 'Unknown function %s' % (name)) - oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid) - x.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - return - else: - self._processMap[name](self, seqid, iprot, oprot) - return True - - def process_ping(self, seqid, iprot, oprot): - args = ping_args() - args.read(iprot) - iprot.readMessageEnd() - result = ping_result() - try: - result.success = self._handler.ping() - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("ping", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_reportError(self, seqid, iprot, oprot): - args = reportError_args() - args.read(iprot) - iprot.readMessageEnd() - result = reportError_result() - try: - result.success = self._handler.reportError(args.message, args.details) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("reportError", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_shutdown(self, seqid, iprot, oprot): - args = shutdown_args() - args.read(iprot) - iprot.readMessageEnd() - try: - self._handler.shutdown() - except TTransport.TTransportException: - raise - except Exception: - logging.exception('Exception in oneway handler') - -# HELPER FUNCTIONS AND STRUCTURES - - -class ping_args(object): - - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('ping_args') - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(ping_args) -ping_args.thrift_spec = ( -) - - -class ping_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.BOOL: - self.success = iprot.readBool() - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('ping_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.BOOL, 0) - oprot.writeBool(self.success) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(ping_result) -ping_result.thrift_spec = ( - (0, TType.BOOL, 'success', None, None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class reportError_args(object): - """ - Attributes: - - message - - details - - """ - - - def __init__(self, message=None, details=None,): - self.message = message - self.details = details - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.message = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.MAP: - self.details = {} - (_ktype1, _vtype2, _size0) = iprot.readMapBegin() - for _i4 in range(_size0): - _key5 = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - _val6 = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - self.details[_key5] = _val6 - iprot.readMapEnd() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('reportError_args') - if self.message is not None: - oprot.writeFieldBegin('message', TType.STRING, 1) - oprot.writeString(self.message.encode('utf-8') if sys.version_info[0] == 2 else self.message) - oprot.writeFieldEnd() - if self.details is not None: - oprot.writeFieldBegin('details', TType.MAP, 2) - oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.details)) - for kiter7, viter8 in self.details.items(): - oprot.writeString(kiter7.encode('utf-8') if sys.version_info[0] == 2 else kiter7) - oprot.writeString(viter8.encode('utf-8') if sys.version_info[0] == 2 else viter8) - oprot.writeMapEnd() - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(reportError_args) -reportError_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'message', 'UTF8', None, ), # 1 - (2, TType.MAP, 'details', (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), None, ), # 2 -) - - -class reportError_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.BOOL: - self.success = iprot.readBool() - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('reportError_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.BOOL, 0) - oprot.writeBool(self.success) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(reportError_result) -reportError_result.thrift_spec = ( - (0, TType.BOOL, 'success', None, None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class shutdown_args(object): - - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('shutdown_args') - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(shutdown_args) -shutdown_args.thrift_spec = ( -) -fix_spec(all_structs) -del all_structs diff --git a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/base_service/__init__.py b/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/base_service/__init__.py deleted file mode 100644 index f8be3f5..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/base_service/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__all__ = ['ttypes', 'constants', 'BaseService'] diff --git a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/base_service/constants.py b/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/base_service/constants.py deleted file mode 100644 index 09a78b3..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/base_service/constants.py +++ /dev/null @@ -1,14 +0,0 @@ -# -# Autogenerated by Thrift Compiler (0.20.0) -# -# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING -# -# options string: py -# - -from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException -from thrift.protocol.TProtocol import TProtocolException -from thrift.TRecursive import fix_spec - -import sys -from .ttypes import * diff --git a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/base_service/ttypes.py b/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/base_service/ttypes.py deleted file mode 100644 index 3bfb47f..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/base_service/ttypes.py +++ /dev/null @@ -1,20 +0,0 @@ -# -# Autogenerated by Thrift Compiler (0.20.0) -# -# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING -# -# options string: py -# - -from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException -from thrift.protocol.TProtocol import TProtocolException -from thrift.TRecursive import fix_spec - -import sys -import pangramia.yt.common.ttypes -import pangramia.yt.exceptions.ttypes - -from thrift.transport import TTransport -all_structs = [] -fix_spec(all_structs) -del all_structs diff --git a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/__init__.py b/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/admin_ops/YTAccountsOpService.py b/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/admin_ops/YTAccountsOpService.py deleted file mode 100644 index 609fd61..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/admin_ops/YTAccountsOpService.py +++ /dev/null @@ -1,3491 +0,0 @@ -# -# Autogenerated by Thrift Compiler (0.20.0) -# -# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING -# -# options string: py -# - -from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException -from thrift.protocol.TProtocol import TProtocolException -from thrift.TRecursive import fix_spec - -import sys -import pangramia.base_service.BaseService -import logging -from .ttypes import * -from thrift.Thrift import TProcessor -from thrift.transport import TTransport -all_structs = [] - - -class Iface(pangramia.base_service.BaseService.Iface): - def addAccountPair(self, accountId, proxyId, machineId, proxyData, accountData): - """ - Parameters: - - accountId - - proxyId - - machineId - - proxyData - - accountData - - """ - pass - - def getPair(self, machineId): - """ - Parameters: - - machineId - - """ - pass - - def pair(self, accountId, proxyId, machineId): - """ - Parameters: - - accountId - - proxyId - - machineId - - """ - pass - - def unpair(self, accountId, proxyId, machineId): - """ - Parameters: - - accountId - - proxyId - - machineId - - """ - pass - - def listAccountPairs(self, filter): - """ - Parameters: - - filter - - """ - pass - - def addAccount(self, accountId, accountData): - """ - Parameters: - - accountId - - accountData - - """ - pass - - def suspendAccount(self, accountId): - """ - Parameters: - - accountId - - """ - pass - - def resumeAccount(self, accountId): - """ - Parameters: - - accountId - - """ - pass - - def removeAccount(self, accountId): - """ - Parameters: - - accountId - - """ - pass - - def listActiveAccounts(self): - pass - - def addProxy(self, proxyId, proxyData): - """ - Parameters: - - proxyId - - proxyData - - """ - pass - - def suspendProxy(self, proxyId): - """ - Parameters: - - proxyId - - """ - pass - - def resumeProxy(self, proxyId): - """ - Parameters: - - proxyId - - """ - pass - - def removeProxy(self, proxyId): - """ - Parameters: - - proxyId - - """ - pass - - def listActiveProxies(self): - pass - - -class Client(pangramia.base_service.BaseService.Client, Iface): - def __init__(self, iprot, oprot=None): - pangramia.base_service.BaseService.Client.__init__(self, iprot, oprot) - - def addAccountPair(self, accountId, proxyId, machineId, proxyData, accountData): - """ - Parameters: - - accountId - - proxyId - - machineId - - proxyData - - accountData - - """ - self.send_addAccountPair(accountId, proxyId, machineId, proxyData, accountData) - return self.recv_addAccountPair() - - def send_addAccountPair(self, accountId, proxyId, machineId, proxyData, accountData): - self._oprot.writeMessageBegin('addAccountPair', TMessageType.CALL, self._seqid) - args = addAccountPair_args() - args.accountId = accountId - args.proxyId = proxyId - args.machineId = machineId - args.proxyData = proxyData - args.accountData = accountData - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_addAccountPair(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = addAccountPair_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "addAccountPair failed: unknown result") - - def getPair(self, machineId): - """ - Parameters: - - machineId - - """ - self.send_getPair(machineId) - return self.recv_getPair() - - def send_getPair(self, machineId): - self._oprot.writeMessageBegin('getPair', TMessageType.CALL, self._seqid) - args = getPair_args() - args.machineId = machineId - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_getPair(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = getPair_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "getPair failed: unknown result") - - def pair(self, accountId, proxyId, machineId): - """ - Parameters: - - accountId - - proxyId - - machineId - - """ - self.send_pair(accountId, proxyId, machineId) - return self.recv_pair() - - def send_pair(self, accountId, proxyId, machineId): - self._oprot.writeMessageBegin('pair', TMessageType.CALL, self._seqid) - args = pair_args() - args.accountId = accountId - args.proxyId = proxyId - args.machineId = machineId - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_pair(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = pair_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "pair failed: unknown result") - - def unpair(self, accountId, proxyId, machineId): - """ - Parameters: - - accountId - - proxyId - - machineId - - """ - self.send_unpair(accountId, proxyId, machineId) - return self.recv_unpair() - - def send_unpair(self, accountId, proxyId, machineId): - self._oprot.writeMessageBegin('unpair', TMessageType.CALL, self._seqid) - args = unpair_args() - args.accountId = accountId - args.proxyId = proxyId - args.machineId = machineId - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_unpair(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = unpair_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "unpair failed: unknown result") - - def listAccountPairs(self, filter): - """ - Parameters: - - filter - - """ - self.send_listAccountPairs(filter) - return self.recv_listAccountPairs() - - def send_listAccountPairs(self, filter): - self._oprot.writeMessageBegin('listAccountPairs', TMessageType.CALL, self._seqid) - args = listAccountPairs_args() - args.filter = filter - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_listAccountPairs(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = listAccountPairs_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "listAccountPairs failed: unknown result") - - def addAccount(self, accountId, accountData): - """ - Parameters: - - accountId - - accountData - - """ - self.send_addAccount(accountId, accountData) - return self.recv_addAccount() - - def send_addAccount(self, accountId, accountData): - self._oprot.writeMessageBegin('addAccount', TMessageType.CALL, self._seqid) - args = addAccount_args() - args.accountId = accountId - args.accountData = accountData - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_addAccount(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = addAccount_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "addAccount failed: unknown result") - - def suspendAccount(self, accountId): - """ - Parameters: - - accountId - - """ - self.send_suspendAccount(accountId) - return self.recv_suspendAccount() - - def send_suspendAccount(self, accountId): - self._oprot.writeMessageBegin('suspendAccount', TMessageType.CALL, self._seqid) - args = suspendAccount_args() - args.accountId = accountId - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_suspendAccount(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = suspendAccount_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "suspendAccount failed: unknown result") - - def resumeAccount(self, accountId): - """ - Parameters: - - accountId - - """ - self.send_resumeAccount(accountId) - return self.recv_resumeAccount() - - def send_resumeAccount(self, accountId): - self._oprot.writeMessageBegin('resumeAccount', TMessageType.CALL, self._seqid) - args = resumeAccount_args() - args.accountId = accountId - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_resumeAccount(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = resumeAccount_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "resumeAccount failed: unknown result") - - def removeAccount(self, accountId): - """ - Parameters: - - accountId - - """ - self.send_removeAccount(accountId) - return self.recv_removeAccount() - - def send_removeAccount(self, accountId): - self._oprot.writeMessageBegin('removeAccount', TMessageType.CALL, self._seqid) - args = removeAccount_args() - args.accountId = accountId - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_removeAccount(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = removeAccount_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "removeAccount failed: unknown result") - - def listActiveAccounts(self): - self.send_listActiveAccounts() - return self.recv_listActiveAccounts() - - def send_listActiveAccounts(self): - self._oprot.writeMessageBegin('listActiveAccounts', TMessageType.CALL, self._seqid) - args = listActiveAccounts_args() - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_listActiveAccounts(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = listActiveAccounts_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "listActiveAccounts failed: unknown result") - - def addProxy(self, proxyId, proxyData): - """ - Parameters: - - proxyId - - proxyData - - """ - self.send_addProxy(proxyId, proxyData) - return self.recv_addProxy() - - def send_addProxy(self, proxyId, proxyData): - self._oprot.writeMessageBegin('addProxy', TMessageType.CALL, self._seqid) - args = addProxy_args() - args.proxyId = proxyId - args.proxyData = proxyData - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_addProxy(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = addProxy_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "addProxy failed: unknown result") - - def suspendProxy(self, proxyId): - """ - Parameters: - - proxyId - - """ - self.send_suspendProxy(proxyId) - return self.recv_suspendProxy() - - def send_suspendProxy(self, proxyId): - self._oprot.writeMessageBegin('suspendProxy', TMessageType.CALL, self._seqid) - args = suspendProxy_args() - args.proxyId = proxyId - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_suspendProxy(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = suspendProxy_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "suspendProxy failed: unknown result") - - def resumeProxy(self, proxyId): - """ - Parameters: - - proxyId - - """ - self.send_resumeProxy(proxyId) - return self.recv_resumeProxy() - - def send_resumeProxy(self, proxyId): - self._oprot.writeMessageBegin('resumeProxy', TMessageType.CALL, self._seqid) - args = resumeProxy_args() - args.proxyId = proxyId - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_resumeProxy(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = resumeProxy_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "resumeProxy failed: unknown result") - - def removeProxy(self, proxyId): - """ - Parameters: - - proxyId - - """ - self.send_removeProxy(proxyId) - return self.recv_removeProxy() - - def send_removeProxy(self, proxyId): - self._oprot.writeMessageBegin('removeProxy', TMessageType.CALL, self._seqid) - args = removeProxy_args() - args.proxyId = proxyId - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_removeProxy(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = removeProxy_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "removeProxy failed: unknown result") - - def listActiveProxies(self): - self.send_listActiveProxies() - return self.recv_listActiveProxies() - - def send_listActiveProxies(self): - self._oprot.writeMessageBegin('listActiveProxies', TMessageType.CALL, self._seqid) - args = listActiveProxies_args() - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_listActiveProxies(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = listActiveProxies_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "listActiveProxies failed: unknown result") - - -class Processor(pangramia.base_service.BaseService.Processor, Iface, TProcessor): - def __init__(self, handler): - pangramia.base_service.BaseService.Processor.__init__(self, handler) - self._processMap["addAccountPair"] = Processor.process_addAccountPair - self._processMap["getPair"] = Processor.process_getPair - self._processMap["pair"] = Processor.process_pair - self._processMap["unpair"] = Processor.process_unpair - self._processMap["listAccountPairs"] = Processor.process_listAccountPairs - self._processMap["addAccount"] = Processor.process_addAccount - self._processMap["suspendAccount"] = Processor.process_suspendAccount - self._processMap["resumeAccount"] = Processor.process_resumeAccount - self._processMap["removeAccount"] = Processor.process_removeAccount - self._processMap["listActiveAccounts"] = Processor.process_listActiveAccounts - self._processMap["addProxy"] = Processor.process_addProxy - self._processMap["suspendProxy"] = Processor.process_suspendProxy - self._processMap["resumeProxy"] = Processor.process_resumeProxy - self._processMap["removeProxy"] = Processor.process_removeProxy - self._processMap["listActiveProxies"] = Processor.process_listActiveProxies - self._on_message_begin = None - - def on_message_begin(self, func): - self._on_message_begin = func - - def process(self, iprot, oprot): - (name, type, seqid) = iprot.readMessageBegin() - if self._on_message_begin: - self._on_message_begin(name, type, seqid) - if name not in self._processMap: - iprot.skip(TType.STRUCT) - iprot.readMessageEnd() - x = TApplicationException(TApplicationException.UNKNOWN_METHOD, 'Unknown function %s' % (name)) - oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid) - x.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - return - else: - self._processMap[name](self, seqid, iprot, oprot) - return True - - def process_addAccountPair(self, seqid, iprot, oprot): - args = addAccountPair_args() - args.read(iprot) - iprot.readMessageEnd() - result = addAccountPair_result() - try: - result.success = self._handler.addAccountPair(args.accountId, args.proxyId, args.machineId, args.proxyData, args.accountData) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("addAccountPair", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_getPair(self, seqid, iprot, oprot): - args = getPair_args() - args.read(iprot) - iprot.readMessageEnd() - result = getPair_result() - try: - result.success = self._handler.getPair(args.machineId) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("getPair", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_pair(self, seqid, iprot, oprot): - args = pair_args() - args.read(iprot) - iprot.readMessageEnd() - result = pair_result() - try: - result.success = self._handler.pair(args.accountId, args.proxyId, args.machineId) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("pair", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_unpair(self, seqid, iprot, oprot): - args = unpair_args() - args.read(iprot) - iprot.readMessageEnd() - result = unpair_result() - try: - result.success = self._handler.unpair(args.accountId, args.proxyId, args.machineId) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("unpair", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_listAccountPairs(self, seqid, iprot, oprot): - args = listAccountPairs_args() - args.read(iprot) - iprot.readMessageEnd() - result = listAccountPairs_result() - try: - result.success = self._handler.listAccountPairs(args.filter) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("listAccountPairs", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_addAccount(self, seqid, iprot, oprot): - args = addAccount_args() - args.read(iprot) - iprot.readMessageEnd() - result = addAccount_result() - try: - result.success = self._handler.addAccount(args.accountId, args.accountData) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("addAccount", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_suspendAccount(self, seqid, iprot, oprot): - args = suspendAccount_args() - args.read(iprot) - iprot.readMessageEnd() - result = suspendAccount_result() - try: - result.success = self._handler.suspendAccount(args.accountId) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("suspendAccount", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_resumeAccount(self, seqid, iprot, oprot): - args = resumeAccount_args() - args.read(iprot) - iprot.readMessageEnd() - result = resumeAccount_result() - try: - result.success = self._handler.resumeAccount(args.accountId) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("resumeAccount", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_removeAccount(self, seqid, iprot, oprot): - args = removeAccount_args() - args.read(iprot) - iprot.readMessageEnd() - result = removeAccount_result() - try: - result.success = self._handler.removeAccount(args.accountId) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("removeAccount", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_listActiveAccounts(self, seqid, iprot, oprot): - args = listActiveAccounts_args() - args.read(iprot) - iprot.readMessageEnd() - result = listActiveAccounts_result() - try: - result.success = self._handler.listActiveAccounts() - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("listActiveAccounts", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_addProxy(self, seqid, iprot, oprot): - args = addProxy_args() - args.read(iprot) - iprot.readMessageEnd() - result = addProxy_result() - try: - result.success = self._handler.addProxy(args.proxyId, args.proxyData) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("addProxy", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_suspendProxy(self, seqid, iprot, oprot): - args = suspendProxy_args() - args.read(iprot) - iprot.readMessageEnd() - result = suspendProxy_result() - try: - result.success = self._handler.suspendProxy(args.proxyId) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("suspendProxy", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_resumeProxy(self, seqid, iprot, oprot): - args = resumeProxy_args() - args.read(iprot) - iprot.readMessageEnd() - result = resumeProxy_result() - try: - result.success = self._handler.resumeProxy(args.proxyId) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("resumeProxy", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_removeProxy(self, seqid, iprot, oprot): - args = removeProxy_args() - args.read(iprot) - iprot.readMessageEnd() - result = removeProxy_result() - try: - result.success = self._handler.removeProxy(args.proxyId) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("removeProxy", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_listActiveProxies(self, seqid, iprot, oprot): - args = listActiveProxies_args() - args.read(iprot) - iprot.readMessageEnd() - result = listActiveProxies_result() - try: - result.success = self._handler.listActiveProxies() - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("listActiveProxies", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - -# HELPER FUNCTIONS AND STRUCTURES - - -class addAccountPair_args(object): - """ - Attributes: - - accountId - - proxyId - - machineId - - proxyData - - accountData - - """ - - - def __init__(self, accountId=None, proxyId=None, machineId=None, proxyData=None, accountData=None,): - self.accountId = accountId - self.proxyId = proxyId - self.machineId = machineId - self.proxyData = proxyData - self.accountData = accountData - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.accountId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRING: - self.proxyId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 3: - if ftype == TType.STRING: - self.machineId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 4: - if ftype == TType.STRUCT: - self.proxyData = pangramia.yt.common.ttypes.ProxyData() - self.proxyData.read(iprot) - else: - iprot.skip(ftype) - elif fid == 5: - if ftype == TType.STRUCT: - self.accountData = pangramia.yt.common.ttypes.AccountData() - self.accountData.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('addAccountPair_args') - if self.accountId is not None: - oprot.writeFieldBegin('accountId', TType.STRING, 1) - oprot.writeString(self.accountId.encode('utf-8') if sys.version_info[0] == 2 else self.accountId) - oprot.writeFieldEnd() - if self.proxyId is not None: - oprot.writeFieldBegin('proxyId', TType.STRING, 2) - oprot.writeString(self.proxyId.encode('utf-8') if sys.version_info[0] == 2 else self.proxyId) - oprot.writeFieldEnd() - if self.machineId is not None: - oprot.writeFieldBegin('machineId', TType.STRING, 3) - oprot.writeString(self.machineId.encode('utf-8') if sys.version_info[0] == 2 else self.machineId) - oprot.writeFieldEnd() - if self.proxyData is not None: - oprot.writeFieldBegin('proxyData', TType.STRUCT, 4) - self.proxyData.write(oprot) - oprot.writeFieldEnd() - if self.accountData is not None: - oprot.writeFieldBegin('accountData', TType.STRUCT, 5) - self.accountData.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(addAccountPair_args) -addAccountPair_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'accountId', 'UTF8', None, ), # 1 - (2, TType.STRING, 'proxyId', 'UTF8', None, ), # 2 - (3, TType.STRING, 'machineId', 'UTF8', None, ), # 3 - (4, TType.STRUCT, 'proxyData', [pangramia.yt.common.ttypes.ProxyData, None], None, ), # 4 - (5, TType.STRUCT, 'accountData', [pangramia.yt.common.ttypes.AccountData, None], None, ), # 5 -) - - -class addAccountPair_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.BOOL: - self.success = iprot.readBool() - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('addAccountPair_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.BOOL, 0) - oprot.writeBool(self.success) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(addAccountPair_result) -addAccountPair_result.thrift_spec = ( - (0, TType.BOOL, 'success', None, None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class getPair_args(object): - """ - Attributes: - - machineId - - """ - - - def __init__(self, machineId=None,): - self.machineId = machineId - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.machineId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('getPair_args') - if self.machineId is not None: - oprot.writeFieldBegin('machineId', TType.STRING, 1) - oprot.writeString(self.machineId.encode('utf-8') if sys.version_info[0] == 2 else self.machineId) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(getPair_args) -getPair_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'machineId', 'UTF8', None, ), # 1 -) - - -class getPair_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.STRUCT: - self.success = pangramia.yt.common.ttypes.AccountPairWithState() - self.success.read(iprot) - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('getPair_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.STRUCT, 0) - self.success.write(oprot) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(getPair_result) -getPair_result.thrift_spec = ( - (0, TType.STRUCT, 'success', [pangramia.yt.common.ttypes.AccountPairWithState, None], None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class pair_args(object): - """ - Attributes: - - accountId - - proxyId - - machineId - - """ - - - def __init__(self, accountId=None, proxyId=None, machineId=None,): - self.accountId = accountId - self.proxyId = proxyId - self.machineId = machineId - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.accountId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRING: - self.proxyId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 3: - if ftype == TType.STRING: - self.machineId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('pair_args') - if self.accountId is not None: - oprot.writeFieldBegin('accountId', TType.STRING, 1) - oprot.writeString(self.accountId.encode('utf-8') if sys.version_info[0] == 2 else self.accountId) - oprot.writeFieldEnd() - if self.proxyId is not None: - oprot.writeFieldBegin('proxyId', TType.STRING, 2) - oprot.writeString(self.proxyId.encode('utf-8') if sys.version_info[0] == 2 else self.proxyId) - oprot.writeFieldEnd() - if self.machineId is not None: - oprot.writeFieldBegin('machineId', TType.STRING, 3) - oprot.writeString(self.machineId.encode('utf-8') if sys.version_info[0] == 2 else self.machineId) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(pair_args) -pair_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'accountId', 'UTF8', None, ), # 1 - (2, TType.STRING, 'proxyId', 'UTF8', None, ), # 2 - (3, TType.STRING, 'machineId', 'UTF8', None, ), # 3 -) - - -class pair_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.BOOL: - self.success = iprot.readBool() - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('pair_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.BOOL, 0) - oprot.writeBool(self.success) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(pair_result) -pair_result.thrift_spec = ( - (0, TType.BOOL, 'success', None, None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class unpair_args(object): - """ - Attributes: - - accountId - - proxyId - - machineId - - """ - - - def __init__(self, accountId=None, proxyId=None, machineId=None,): - self.accountId = accountId - self.proxyId = proxyId - self.machineId = machineId - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.accountId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRING: - self.proxyId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 3: - if ftype == TType.STRING: - self.machineId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('unpair_args') - if self.accountId is not None: - oprot.writeFieldBegin('accountId', TType.STRING, 1) - oprot.writeString(self.accountId.encode('utf-8') if sys.version_info[0] == 2 else self.accountId) - oprot.writeFieldEnd() - if self.proxyId is not None: - oprot.writeFieldBegin('proxyId', TType.STRING, 2) - oprot.writeString(self.proxyId.encode('utf-8') if sys.version_info[0] == 2 else self.proxyId) - oprot.writeFieldEnd() - if self.machineId is not None: - oprot.writeFieldBegin('machineId', TType.STRING, 3) - oprot.writeString(self.machineId.encode('utf-8') if sys.version_info[0] == 2 else self.machineId) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(unpair_args) -unpair_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'accountId', 'UTF8', None, ), # 1 - (2, TType.STRING, 'proxyId', 'UTF8', None, ), # 2 - (3, TType.STRING, 'machineId', 'UTF8', None, ), # 3 -) - - -class unpair_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.BOOL: - self.success = iprot.readBool() - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('unpair_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.BOOL, 0) - oprot.writeBool(self.success) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(unpair_result) -unpair_result.thrift_spec = ( - (0, TType.BOOL, 'success', None, None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class listAccountPairs_args(object): - """ - Attributes: - - filter - - """ - - - def __init__(self, filter=None,): - self.filter = filter - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.I32: - self.filter = iprot.readI32() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('listAccountPairs_args') - if self.filter is not None: - oprot.writeFieldBegin('filter', TType.I32, 1) - oprot.writeI32(self.filter) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(listAccountPairs_args) -listAccountPairs_args.thrift_spec = ( - None, # 0 - (1, TType.I32, 'filter', None, None, ), # 1 -) - - -class listAccountPairs_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.LIST: - self.success = [] - (_etype3, _size0) = iprot.readListBegin() - for _i4 in range(_size0): - _elem5 = pangramia.yt.common.ttypes.AccountPairWithState() - _elem5.read(iprot) - self.success.append(_elem5) - iprot.readListEnd() - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('listAccountPairs_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.LIST, 0) - oprot.writeListBegin(TType.STRUCT, len(self.success)) - for iter6 in self.success: - iter6.write(oprot) - oprot.writeListEnd() - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(listAccountPairs_result) -listAccountPairs_result.thrift_spec = ( - (0, TType.LIST, 'success', (TType.STRUCT, [pangramia.yt.common.ttypes.AccountPairWithState, None], False), None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class addAccount_args(object): - """ - Attributes: - - accountId - - accountData - - """ - - - def __init__(self, accountId=None, accountData=None,): - self.accountId = accountId - self.accountData = accountData - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.accountId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.accountData = pangramia.yt.common.ttypes.AccountData() - self.accountData.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('addAccount_args') - if self.accountId is not None: - oprot.writeFieldBegin('accountId', TType.STRING, 1) - oprot.writeString(self.accountId.encode('utf-8') if sys.version_info[0] == 2 else self.accountId) - oprot.writeFieldEnd() - if self.accountData is not None: - oprot.writeFieldBegin('accountData', TType.STRUCT, 2) - self.accountData.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(addAccount_args) -addAccount_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'accountId', 'UTF8', None, ), # 1 - (2, TType.STRUCT, 'accountData', [pangramia.yt.common.ttypes.AccountData, None], None, ), # 2 -) - - -class addAccount_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.BOOL: - self.success = iprot.readBool() - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('addAccount_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.BOOL, 0) - oprot.writeBool(self.success) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(addAccount_result) -addAccount_result.thrift_spec = ( - (0, TType.BOOL, 'success', None, None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class suspendAccount_args(object): - """ - Attributes: - - accountId - - """ - - - def __init__(self, accountId=None,): - self.accountId = accountId - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.accountId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('suspendAccount_args') - if self.accountId is not None: - oprot.writeFieldBegin('accountId', TType.STRING, 1) - oprot.writeString(self.accountId.encode('utf-8') if sys.version_info[0] == 2 else self.accountId) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(suspendAccount_args) -suspendAccount_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'accountId', 'UTF8', None, ), # 1 -) - - -class suspendAccount_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.BOOL: - self.success = iprot.readBool() - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('suspendAccount_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.BOOL, 0) - oprot.writeBool(self.success) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(suspendAccount_result) -suspendAccount_result.thrift_spec = ( - (0, TType.BOOL, 'success', None, None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class resumeAccount_args(object): - """ - Attributes: - - accountId - - """ - - - def __init__(self, accountId=None,): - self.accountId = accountId - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.accountId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('resumeAccount_args') - if self.accountId is not None: - oprot.writeFieldBegin('accountId', TType.STRING, 1) - oprot.writeString(self.accountId.encode('utf-8') if sys.version_info[0] == 2 else self.accountId) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(resumeAccount_args) -resumeAccount_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'accountId', 'UTF8', None, ), # 1 -) - - -class resumeAccount_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.BOOL: - self.success = iprot.readBool() - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('resumeAccount_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.BOOL, 0) - oprot.writeBool(self.success) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(resumeAccount_result) -resumeAccount_result.thrift_spec = ( - (0, TType.BOOL, 'success', None, None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class removeAccount_args(object): - """ - Attributes: - - accountId - - """ - - - def __init__(self, accountId=None,): - self.accountId = accountId - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.accountId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('removeAccount_args') - if self.accountId is not None: - oprot.writeFieldBegin('accountId', TType.STRING, 1) - oprot.writeString(self.accountId.encode('utf-8') if sys.version_info[0] == 2 else self.accountId) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(removeAccount_args) -removeAccount_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'accountId', 'UTF8', None, ), # 1 -) - - -class removeAccount_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.BOOL: - self.success = iprot.readBool() - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('removeAccount_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.BOOL, 0) - oprot.writeBool(self.success) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(removeAccount_result) -removeAccount_result.thrift_spec = ( - (0, TType.BOOL, 'success', None, None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class listActiveAccounts_args(object): - - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('listActiveAccounts_args') - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(listActiveAccounts_args) -listActiveAccounts_args.thrift_spec = ( -) - - -class listActiveAccounts_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.LIST: - self.success = [] - (_etype10, _size7) = iprot.readListBegin() - for _i11 in range(_size7): - _elem12 = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - self.success.append(_elem12) - iprot.readListEnd() - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('listActiveAccounts_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.LIST, 0) - oprot.writeListBegin(TType.STRING, len(self.success)) - for iter13 in self.success: - oprot.writeString(iter13.encode('utf-8') if sys.version_info[0] == 2 else iter13) - oprot.writeListEnd() - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(listActiveAccounts_result) -listActiveAccounts_result.thrift_spec = ( - (0, TType.LIST, 'success', (TType.STRING, 'UTF8', False), None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class addProxy_args(object): - """ - Attributes: - - proxyId - - proxyData - - """ - - - def __init__(self, proxyId=None, proxyData=None,): - self.proxyId = proxyId - self.proxyData = proxyData - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.proxyId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.proxyData = pangramia.yt.common.ttypes.ProxyData() - self.proxyData.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('addProxy_args') - if self.proxyId is not None: - oprot.writeFieldBegin('proxyId', TType.STRING, 1) - oprot.writeString(self.proxyId.encode('utf-8') if sys.version_info[0] == 2 else self.proxyId) - oprot.writeFieldEnd() - if self.proxyData is not None: - oprot.writeFieldBegin('proxyData', TType.STRUCT, 2) - self.proxyData.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(addProxy_args) -addProxy_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'proxyId', 'UTF8', None, ), # 1 - (2, TType.STRUCT, 'proxyData', [pangramia.yt.common.ttypes.ProxyData, None], None, ), # 2 -) - - -class addProxy_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.BOOL: - self.success = iprot.readBool() - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('addProxy_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.BOOL, 0) - oprot.writeBool(self.success) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(addProxy_result) -addProxy_result.thrift_spec = ( - (0, TType.BOOL, 'success', None, None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class suspendProxy_args(object): - """ - Attributes: - - proxyId - - """ - - - def __init__(self, proxyId=None,): - self.proxyId = proxyId - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.proxyId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('suspendProxy_args') - if self.proxyId is not None: - oprot.writeFieldBegin('proxyId', TType.STRING, 1) - oprot.writeString(self.proxyId.encode('utf-8') if sys.version_info[0] == 2 else self.proxyId) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(suspendProxy_args) -suspendProxy_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'proxyId', 'UTF8', None, ), # 1 -) - - -class suspendProxy_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.BOOL: - self.success = iprot.readBool() - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('suspendProxy_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.BOOL, 0) - oprot.writeBool(self.success) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(suspendProxy_result) -suspendProxy_result.thrift_spec = ( - (0, TType.BOOL, 'success', None, None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class resumeProxy_args(object): - """ - Attributes: - - proxyId - - """ - - - def __init__(self, proxyId=None,): - self.proxyId = proxyId - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.proxyId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('resumeProxy_args') - if self.proxyId is not None: - oprot.writeFieldBegin('proxyId', TType.STRING, 1) - oprot.writeString(self.proxyId.encode('utf-8') if sys.version_info[0] == 2 else self.proxyId) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(resumeProxy_args) -resumeProxy_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'proxyId', 'UTF8', None, ), # 1 -) - - -class resumeProxy_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.BOOL: - self.success = iprot.readBool() - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('resumeProxy_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.BOOL, 0) - oprot.writeBool(self.success) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(resumeProxy_result) -resumeProxy_result.thrift_spec = ( - (0, TType.BOOL, 'success', None, None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class removeProxy_args(object): - """ - Attributes: - - proxyId - - """ - - - def __init__(self, proxyId=None,): - self.proxyId = proxyId - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.proxyId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('removeProxy_args') - if self.proxyId is not None: - oprot.writeFieldBegin('proxyId', TType.STRING, 1) - oprot.writeString(self.proxyId.encode('utf-8') if sys.version_info[0] == 2 else self.proxyId) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(removeProxy_args) -removeProxy_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'proxyId', 'UTF8', None, ), # 1 -) - - -class removeProxy_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.BOOL: - self.success = iprot.readBool() - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('removeProxy_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.BOOL, 0) - oprot.writeBool(self.success) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(removeProxy_result) -removeProxy_result.thrift_spec = ( - (0, TType.BOOL, 'success', None, None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class listActiveProxies_args(object): - - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('listActiveProxies_args') - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(listActiveProxies_args) -listActiveProxies_args.thrift_spec = ( -) - - -class listActiveProxies_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.LIST: - self.success = [] - (_etype17, _size14) = iprot.readListBegin() - for _i18 in range(_size14): - _elem19 = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - self.success.append(_elem19) - iprot.readListEnd() - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('listActiveProxies_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.LIST, 0) - oprot.writeListBegin(TType.STRING, len(self.success)) - for iter20 in self.success: - oprot.writeString(iter20.encode('utf-8') if sys.version_info[0] == 2 else iter20) - oprot.writeListEnd() - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(listActiveProxies_result) -listActiveProxies_result.thrift_spec = ( - (0, TType.LIST, 'success', (TType.STRING, 'UTF8', False), None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) -fix_spec(all_structs) -del all_structs diff --git a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/admin_ops/__init__.py b/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/admin_ops/__init__.py deleted file mode 100644 index 00b4776..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/admin_ops/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__all__ = ['ttypes', 'constants', 'YTAccountsOpService'] diff --git a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/admin_ops/constants.py b/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/admin_ops/constants.py deleted file mode 100644 index 09a78b3..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/admin_ops/constants.py +++ /dev/null @@ -1,14 +0,0 @@ -# -# Autogenerated by Thrift Compiler (0.20.0) -# -# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING -# -# options string: py -# - -from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException -from thrift.protocol.TProtocol import TProtocolException -from thrift.TRecursive import fix_spec - -import sys -from .ttypes import * diff --git a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/admin_ops/ttypes.py b/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/admin_ops/ttypes.py deleted file mode 100644 index de828aa..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/admin_ops/ttypes.py +++ /dev/null @@ -1,21 +0,0 @@ -# -# Autogenerated by Thrift Compiler (0.20.0) -# -# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING -# -# options string: py -# - -from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException -from thrift.protocol.TProtocol import TProtocolException -from thrift.TRecursive import fix_spec - -import sys -import pangramia.yt.common.ttypes -import pangramia.yt.exceptions.ttypes -import pangramia.base_service.ttypes - -from thrift.transport import TTransport -all_structs = [] -fix_spec(all_structs) -del all_structs diff --git a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/common/__init__.py b/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/common/__init__.py deleted file mode 100644 index adefd8e..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/common/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__all__ = ['ttypes', 'constants'] diff --git a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/common/constants.py b/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/common/constants.py deleted file mode 100644 index 09a78b3..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/common/constants.py +++ /dev/null @@ -1,14 +0,0 @@ -# -# Autogenerated by Thrift Compiler (0.20.0) -# -# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING -# -# options string: py -# - -from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException -from thrift.protocol.TProtocol import TProtocolException -from thrift.TRecursive import fix_spec - -import sys -from .ttypes import * diff --git a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/common/ttypes.py b/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/common/ttypes.py deleted file mode 100644 index a23d813..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/common/ttypes.py +++ /dev/null @@ -1,905 +0,0 @@ -# -# Autogenerated by Thrift Compiler (0.20.0) -# -# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING -# -# options string: py -# - -from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException -from thrift.protocol.TProtocol import TProtocolException -from thrift.TRecursive import fix_spec - -import sys - -from thrift.transport import TTransport -all_structs = [] - - -class JobState(object): - SUCCESS = 0 - FAIL = 1 - BOT_FORBIDDEN_ON_URL_ACCESS = 2 - BOT_FORBIDDEN_ON_FILE_DOWNLOAD = 3 - BOT_CAPTCHA = 4 - BOT_AUTH_RELOGIN_REQUIRED = 5 - BOT_AUTH_SMS_REQUIRED = 6 - BOT_AUTH_DEVICE_QR_REQUIRED = 7 - BOT_ACCOUNT_BANNED = 8 - BOT_IP_BANNED = 9 - - _VALUES_TO_NAMES = { - 0: "SUCCESS", - 1: "FAIL", - 2: "BOT_FORBIDDEN_ON_URL_ACCESS", - 3: "BOT_FORBIDDEN_ON_FILE_DOWNLOAD", - 4: "BOT_CAPTCHA", - 5: "BOT_AUTH_RELOGIN_REQUIRED", - 6: "BOT_AUTH_SMS_REQUIRED", - 7: "BOT_AUTH_DEVICE_QR_REQUIRED", - 8: "BOT_ACCOUNT_BANNED", - 9: "BOT_IP_BANNED", - } - - _NAMES_TO_VALUES = { - "SUCCESS": 0, - "FAIL": 1, - "BOT_FORBIDDEN_ON_URL_ACCESS": 2, - "BOT_FORBIDDEN_ON_FILE_DOWNLOAD": 3, - "BOT_CAPTCHA": 4, - "BOT_AUTH_RELOGIN_REQUIRED": 5, - "BOT_AUTH_SMS_REQUIRED": 6, - "BOT_AUTH_DEVICE_QR_REQUIRED": 7, - "BOT_ACCOUNT_BANNED": 8, - "BOT_IP_BANNED": 9, - } - - -class TokenUpdateMode(object): - AUTOREFRESH_AND_REMAIN_ANONYMOUS = 0 - AUTOREFRESH_AND_ALLOW_AUTH = 1 - AUTOREFRESH_AND_ONLY_AUTH = 2 - CLEANUP_THEN_AUTOREFRESH_AND_ONLY_AUTH = 3 - CLEANUP_THEN_AUTOREFRESH_AND_REMAIN_ANONYMOUS = 4 - CLEANUP_THEN_AUTOREFRESH_AND_ALLOW_AUTH = 5 - AUTO = 6 - - _VALUES_TO_NAMES = { - 0: "AUTOREFRESH_AND_REMAIN_ANONYMOUS", - 1: "AUTOREFRESH_AND_ALLOW_AUTH", - 2: "AUTOREFRESH_AND_ONLY_AUTH", - 3: "CLEANUP_THEN_AUTOREFRESH_AND_ONLY_AUTH", - 4: "CLEANUP_THEN_AUTOREFRESH_AND_REMAIN_ANONYMOUS", - 5: "CLEANUP_THEN_AUTOREFRESH_AND_ALLOW_AUTH", - 6: "AUTO", - } - - _NAMES_TO_VALUES = { - "AUTOREFRESH_AND_REMAIN_ANONYMOUS": 0, - "AUTOREFRESH_AND_ALLOW_AUTH": 1, - "AUTOREFRESH_AND_ONLY_AUTH": 2, - "CLEANUP_THEN_AUTOREFRESH_AND_ONLY_AUTH": 3, - "CLEANUP_THEN_AUTOREFRESH_AND_REMAIN_ANONYMOUS": 4, - "CLEANUP_THEN_AUTOREFRESH_AND_ALLOW_AUTH": 5, - "AUTO": 6, - } - - -class AccountPairState(object): - ACTIVE = 0 - PAUSED = 1 - REMOVED = 2 - IN_PROGRESS = 3 - ALL = 4 - - _VALUES_TO_NAMES = { - 0: "ACTIVE", - 1: "PAUSED", - 2: "REMOVED", - 3: "IN_PROGRESS", - 4: "ALL", - } - - _NAMES_TO_VALUES = { - "ACTIVE": 0, - "PAUSED": 1, - "REMOVED": 2, - "IN_PROGRESS": 3, - "ALL": 4, - } - - -class JobTokenData(object): - """ - Attributes: - - infoJson - - ytdlpCommand - - socks - - jobId - - url - - cookiesBlob - - """ - - - def __init__(self, infoJson=None, ytdlpCommand=None, socks=None, jobId=None, url=None, cookiesBlob=None,): - self.infoJson = infoJson - self.ytdlpCommand = ytdlpCommand - self.socks = socks - self.jobId = jobId - self.url = url - self.cookiesBlob = cookiesBlob - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.infoJson = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRING: - self.ytdlpCommand = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 3: - if ftype == TType.STRING: - self.socks = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 4: - if ftype == TType.STRING: - self.jobId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 5: - if ftype == TType.STRING: - self.url = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 6: - if ftype == TType.STRING: - self.cookiesBlob = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('JobTokenData') - if self.infoJson is not None: - oprot.writeFieldBegin('infoJson', TType.STRING, 1) - oprot.writeString(self.infoJson.encode('utf-8') if sys.version_info[0] == 2 else self.infoJson) - oprot.writeFieldEnd() - if self.ytdlpCommand is not None: - oprot.writeFieldBegin('ytdlpCommand', TType.STRING, 2) - oprot.writeString(self.ytdlpCommand.encode('utf-8') if sys.version_info[0] == 2 else self.ytdlpCommand) - oprot.writeFieldEnd() - if self.socks is not None: - oprot.writeFieldBegin('socks', TType.STRING, 3) - oprot.writeString(self.socks.encode('utf-8') if sys.version_info[0] == 2 else self.socks) - oprot.writeFieldEnd() - if self.jobId is not None: - oprot.writeFieldBegin('jobId', TType.STRING, 4) - oprot.writeString(self.jobId.encode('utf-8') if sys.version_info[0] == 2 else self.jobId) - oprot.writeFieldEnd() - if self.url is not None: - oprot.writeFieldBegin('url', TType.STRING, 5) - oprot.writeString(self.url.encode('utf-8') if sys.version_info[0] == 2 else self.url) - oprot.writeFieldEnd() - if self.cookiesBlob is not None: - oprot.writeFieldBegin('cookiesBlob', TType.STRING, 6) - oprot.writeString(self.cookiesBlob.encode('utf-8') if sys.version_info[0] == 2 else self.cookiesBlob) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) - - -class AccountData(object): - """ - Attributes: - - username - - password - - countryCode - - """ - - - def __init__(self, username=None, password=None, countryCode=None,): - self.username = username - self.password = password - self.countryCode = countryCode - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.username = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRING: - self.password = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 3: - if ftype == TType.STRING: - self.countryCode = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('AccountData') - if self.username is not None: - oprot.writeFieldBegin('username', TType.STRING, 1) - oprot.writeString(self.username.encode('utf-8') if sys.version_info[0] == 2 else self.username) - oprot.writeFieldEnd() - if self.password is not None: - oprot.writeFieldBegin('password', TType.STRING, 2) - oprot.writeString(self.password.encode('utf-8') if sys.version_info[0] == 2 else self.password) - oprot.writeFieldEnd() - if self.countryCode is not None: - oprot.writeFieldBegin('countryCode', TType.STRING, 3) - oprot.writeString(self.countryCode.encode('utf-8') if sys.version_info[0] == 2 else self.countryCode) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - if self.username is None: - raise TProtocolException(message='Required field username is unset!') - if self.password is None: - raise TProtocolException(message='Required field password is unset!') - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) - - -class ProxyData(object): - """ - Attributes: - - proxyUrl - - countryCode - - """ - - - def __init__(self, proxyUrl=None, countryCode=None,): - self.proxyUrl = proxyUrl - self.countryCode = countryCode - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.proxyUrl = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRING: - self.countryCode = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('ProxyData') - if self.proxyUrl is not None: - oprot.writeFieldBegin('proxyUrl', TType.STRING, 1) - oprot.writeString(self.proxyUrl.encode('utf-8') if sys.version_info[0] == 2 else self.proxyUrl) - oprot.writeFieldEnd() - if self.countryCode is not None: - oprot.writeFieldBegin('countryCode', TType.STRING, 2) - oprot.writeString(self.countryCode.encode('utf-8') if sys.version_info[0] == 2 else self.countryCode) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - if self.proxyUrl is None: - raise TProtocolException(message='Required field proxyUrl is unset!') - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) - - -class AccountPairWithState(object): - """ - Attributes: - - accountId - - proxyId - - accountPairState - - machineId - - """ - - - def __init__(self, accountId=None, proxyId=None, accountPairState=None, machineId=None,): - self.accountId = accountId - self.proxyId = proxyId - self.accountPairState = accountPairState - self.machineId = machineId - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.accountId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRING: - self.proxyId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 3: - if ftype == TType.I32: - self.accountPairState = iprot.readI32() - else: - iprot.skip(ftype) - elif fid == 4: - if ftype == TType.STRING: - self.machineId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('AccountPairWithState') - if self.accountId is not None: - oprot.writeFieldBegin('accountId', TType.STRING, 1) - oprot.writeString(self.accountId.encode('utf-8') if sys.version_info[0] == 2 else self.accountId) - oprot.writeFieldEnd() - if self.proxyId is not None: - oprot.writeFieldBegin('proxyId', TType.STRING, 2) - oprot.writeString(self.proxyId.encode('utf-8') if sys.version_info[0] == 2 else self.proxyId) - oprot.writeFieldEnd() - if self.accountPairState is not None: - oprot.writeFieldBegin('accountPairState', TType.I32, 3) - oprot.writeI32(self.accountPairState) - oprot.writeFieldEnd() - if self.machineId is not None: - oprot.writeFieldBegin('machineId', TType.STRING, 4) - oprot.writeString(self.machineId.encode('utf-8') if sys.version_info[0] == 2 else self.machineId) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - if self.accountId is None: - raise TProtocolException(message='Required field accountId is unset!') - if self.proxyId is None: - raise TProtocolException(message='Required field proxyId is unset!') - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) - - -class JobData(object): - """ - Attributes: - - jobId - - url - - cookiesBlob - - potoken - - visitorId - - ytdlpCommand - - createdTime - - telemetry - - state - - errorMessage - - socks5Id - - """ - - - def __init__(self, jobId=None, url=None, cookiesBlob=None, potoken=None, visitorId=None, ytdlpCommand=None, createdTime=None, telemetry=None, state=None, errorMessage=None, socks5Id=None,): - self.jobId = jobId - self.url = url - self.cookiesBlob = cookiesBlob - self.potoken = potoken - self.visitorId = visitorId - self.ytdlpCommand = ytdlpCommand - self.createdTime = createdTime - self.telemetry = telemetry - self.state = state - self.errorMessage = errorMessage - self.socks5Id = socks5Id - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.jobId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRING: - self.url = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 3: - if ftype == TType.STRING: - self.cookiesBlob = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 4: - if ftype == TType.STRING: - self.potoken = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 5: - if ftype == TType.STRING: - self.visitorId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 6: - if ftype == TType.STRING: - self.ytdlpCommand = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 7: - if ftype == TType.STRING: - self.createdTime = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 8: - if ftype == TType.MAP: - self.telemetry = {} - (_ktype1, _vtype2, _size0) = iprot.readMapBegin() - for _i4 in range(_size0): - _key5 = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - _val6 = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - self.telemetry[_key5] = _val6 - iprot.readMapEnd() - else: - iprot.skip(ftype) - elif fid == 9: - if ftype == TType.I32: - self.state = iprot.readI32() - else: - iprot.skip(ftype) - elif fid == 10: - if ftype == TType.STRING: - self.errorMessage = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 11: - if ftype == TType.STRING: - self.socks5Id = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('JobData') - if self.jobId is not None: - oprot.writeFieldBegin('jobId', TType.STRING, 1) - oprot.writeString(self.jobId.encode('utf-8') if sys.version_info[0] == 2 else self.jobId) - oprot.writeFieldEnd() - if self.url is not None: - oprot.writeFieldBegin('url', TType.STRING, 2) - oprot.writeString(self.url.encode('utf-8') if sys.version_info[0] == 2 else self.url) - oprot.writeFieldEnd() - if self.cookiesBlob is not None: - oprot.writeFieldBegin('cookiesBlob', TType.STRING, 3) - oprot.writeString(self.cookiesBlob.encode('utf-8') if sys.version_info[0] == 2 else self.cookiesBlob) - oprot.writeFieldEnd() - if self.potoken is not None: - oprot.writeFieldBegin('potoken', TType.STRING, 4) - oprot.writeString(self.potoken.encode('utf-8') if sys.version_info[0] == 2 else self.potoken) - oprot.writeFieldEnd() - if self.visitorId is not None: - oprot.writeFieldBegin('visitorId', TType.STRING, 5) - oprot.writeString(self.visitorId.encode('utf-8') if sys.version_info[0] == 2 else self.visitorId) - oprot.writeFieldEnd() - if self.ytdlpCommand is not None: - oprot.writeFieldBegin('ytdlpCommand', TType.STRING, 6) - oprot.writeString(self.ytdlpCommand.encode('utf-8') if sys.version_info[0] == 2 else self.ytdlpCommand) - oprot.writeFieldEnd() - if self.createdTime is not None: - oprot.writeFieldBegin('createdTime', TType.STRING, 7) - oprot.writeString(self.createdTime.encode('utf-8') if sys.version_info[0] == 2 else self.createdTime) - oprot.writeFieldEnd() - if self.telemetry is not None: - oprot.writeFieldBegin('telemetry', TType.MAP, 8) - oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.telemetry)) - for kiter7, viter8 in self.telemetry.items(): - oprot.writeString(kiter7.encode('utf-8') if sys.version_info[0] == 2 else kiter7) - oprot.writeString(viter8.encode('utf-8') if sys.version_info[0] == 2 else viter8) - oprot.writeMapEnd() - oprot.writeFieldEnd() - if self.state is not None: - oprot.writeFieldBegin('state', TType.I32, 9) - oprot.writeI32(self.state) - oprot.writeFieldEnd() - if self.errorMessage is not None: - oprot.writeFieldBegin('errorMessage', TType.STRING, 10) - oprot.writeString(self.errorMessage.encode('utf-8') if sys.version_info[0] == 2 else self.errorMessage) - oprot.writeFieldEnd() - if self.socks5Id is not None: - oprot.writeFieldBegin('socks5Id', TType.STRING, 11) - oprot.writeString(self.socks5Id.encode('utf-8') if sys.version_info[0] == 2 else self.socks5Id) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - if self.jobId is None: - raise TProtocolException(message='Required field jobId is unset!') - if self.url is None: - raise TProtocolException(message='Required field url is unset!') - if self.cookiesBlob is None: - raise TProtocolException(message='Required field cookiesBlob is unset!') - if self.potoken is None: - raise TProtocolException(message='Required field potoken is unset!') - if self.visitorId is None: - raise TProtocolException(message='Required field visitorId is unset!') - if self.ytdlpCommand is None: - raise TProtocolException(message='Required field ytdlpCommand is unset!') - if self.createdTime is None: - raise TProtocolException(message='Required field createdTime is unset!') - if self.telemetry is None: - raise TProtocolException(message='Required field telemetry is unset!') - if self.state is None: - raise TProtocolException(message='Required field state is unset!') - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) - - -class RichCollectionPagination(object): - """ - Attributes: - - hasNext - - totalCount - - page - - pageSize - - """ - - - def __init__(self, hasNext=None, totalCount=None, page=None, pageSize=None,): - self.hasNext = hasNext - self.totalCount = totalCount - self.page = page - self.pageSize = pageSize - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.BOOL: - self.hasNext = iprot.readBool() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.I32: - self.totalCount = iprot.readI32() - else: - iprot.skip(ftype) - elif fid == 3: - if ftype == TType.I32: - self.page = iprot.readI32() - else: - iprot.skip(ftype) - elif fid == 4: - if ftype == TType.I32: - self.pageSize = iprot.readI32() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('RichCollectionPagination') - if self.hasNext is not None: - oprot.writeFieldBegin('hasNext', TType.BOOL, 1) - oprot.writeBool(self.hasNext) - oprot.writeFieldEnd() - if self.totalCount is not None: - oprot.writeFieldBegin('totalCount', TType.I32, 2) - oprot.writeI32(self.totalCount) - oprot.writeFieldEnd() - if self.page is not None: - oprot.writeFieldBegin('page', TType.I32, 3) - oprot.writeI32(self.page) - oprot.writeFieldEnd() - if self.pageSize is not None: - oprot.writeFieldBegin('pageSize', TType.I32, 4) - oprot.writeI32(self.pageSize) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - if self.hasNext is None: - raise TProtocolException(message='Required field hasNext is unset!') - if self.totalCount is None: - raise TProtocolException(message='Required field totalCount is unset!') - if self.page is None: - raise TProtocolException(message='Required field page is unset!') - if self.pageSize is None: - raise TProtocolException(message='Required field pageSize is unset!') - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) - - -class RichCollectionJobData(object): - """ - Attributes: - - items - - pagination - - """ - - - def __init__(self, items=None, pagination=None,): - self.items = items - self.pagination = pagination - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.LIST: - self.items = [] - (_etype12, _size9) = iprot.readListBegin() - for _i13 in range(_size9): - _elem14 = JobData() - _elem14.read(iprot) - self.items.append(_elem14) - iprot.readListEnd() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.pagination = RichCollectionPagination() - self.pagination.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('RichCollectionJobData') - if self.items is not None: - oprot.writeFieldBegin('items', TType.LIST, 1) - oprot.writeListBegin(TType.STRUCT, len(self.items)) - for iter15 in self.items: - iter15.write(oprot) - oprot.writeListEnd() - oprot.writeFieldEnd() - if self.pagination is not None: - oprot.writeFieldBegin('pagination', TType.STRUCT, 2) - self.pagination.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - if self.items is None: - raise TProtocolException(message='Required field items is unset!') - if self.pagination is None: - raise TProtocolException(message='Required field pagination is unset!') - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(JobTokenData) -JobTokenData.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'infoJson', 'UTF8', None, ), # 1 - (2, TType.STRING, 'ytdlpCommand', 'UTF8', None, ), # 2 - (3, TType.STRING, 'socks', 'UTF8', None, ), # 3 - (4, TType.STRING, 'jobId', 'UTF8', None, ), # 4 - (5, TType.STRING, 'url', 'UTF8', None, ), # 5 - (6, TType.STRING, 'cookiesBlob', 'UTF8', None, ), # 6 -) -all_structs.append(AccountData) -AccountData.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'username', 'UTF8', None, ), # 1 - (2, TType.STRING, 'password', 'UTF8', None, ), # 2 - (3, TType.STRING, 'countryCode', 'UTF8', None, ), # 3 -) -all_structs.append(ProxyData) -ProxyData.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'proxyUrl', 'UTF8', None, ), # 1 - (2, TType.STRING, 'countryCode', 'UTF8', None, ), # 2 -) -all_structs.append(AccountPairWithState) -AccountPairWithState.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'accountId', 'UTF8', None, ), # 1 - (2, TType.STRING, 'proxyId', 'UTF8', None, ), # 2 - (3, TType.I32, 'accountPairState', None, None, ), # 3 - (4, TType.STRING, 'machineId', 'UTF8', None, ), # 4 -) -all_structs.append(JobData) -JobData.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'jobId', 'UTF8', None, ), # 1 - (2, TType.STRING, 'url', 'UTF8', None, ), # 2 - (3, TType.STRING, 'cookiesBlob', 'UTF8', None, ), # 3 - (4, TType.STRING, 'potoken', 'UTF8', None, ), # 4 - (5, TType.STRING, 'visitorId', 'UTF8', None, ), # 5 - (6, TType.STRING, 'ytdlpCommand', 'UTF8', None, ), # 6 - (7, TType.STRING, 'createdTime', 'UTF8', None, ), # 7 - (8, TType.MAP, 'telemetry', (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), None, ), # 8 - (9, TType.I32, 'state', None, None, ), # 9 - (10, TType.STRING, 'errorMessage', 'UTF8', None, ), # 10 - (11, TType.STRING, 'socks5Id', 'UTF8', None, ), # 11 -) -all_structs.append(RichCollectionPagination) -RichCollectionPagination.thrift_spec = ( - None, # 0 - (1, TType.BOOL, 'hasNext', None, None, ), # 1 - (2, TType.I32, 'totalCount', None, None, ), # 2 - (3, TType.I32, 'page', None, None, ), # 3 - (4, TType.I32, 'pageSize', None, None, ), # 4 -) -all_structs.append(RichCollectionJobData) -RichCollectionJobData.thrift_spec = ( - None, # 0 - (1, TType.LIST, 'items', (TType.STRUCT, [JobData, None], False), None, ), # 1 - (2, TType.STRUCT, 'pagination', [RichCollectionPagination, None], None, ), # 2 -) -fix_spec(all_structs) -del all_structs diff --git a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/exceptions/__init__.py b/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/exceptions/__init__.py deleted file mode 100644 index adefd8e..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/exceptions/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__all__ = ['ttypes', 'constants'] diff --git a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/exceptions/constants.py b/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/exceptions/constants.py deleted file mode 100644 index 09a78b3..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/exceptions/constants.py +++ /dev/null @@ -1,14 +0,0 @@ -# -# Autogenerated by Thrift Compiler (0.20.0) -# -# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING -# -# options string: py -# - -from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException -from thrift.protocol.TProtocol import TProtocolException -from thrift.TRecursive import fix_spec - -import sys -from .ttypes import * diff --git a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/exceptions/ttypes.py b/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/exceptions/ttypes.py deleted file mode 100644 index e930913..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/exceptions/ttypes.py +++ /dev/null @@ -1,254 +0,0 @@ -# -# Autogenerated by Thrift Compiler (0.20.0) -# -# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING -# -# options string: py -# - -from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException -from thrift.protocol.TProtocol import TProtocolException -from thrift.TRecursive import fix_spec - -import sys - -from thrift.transport import TTransport -all_structs = [] - - -class PBServiceException(TException): - """ - Attributes: - - message - - errorCode - - context - - """ - - - def __init__(self, message=None, errorCode=None, context=None,): - super(PBServiceException, self).__setattr__('message', message) - super(PBServiceException, self).__setattr__('errorCode', errorCode) - super(PBServiceException, self).__setattr__('context', context) - - def __setattr__(self, *args): - raise TypeError("can't modify immutable instance") - - def __delattr__(self, *args): - raise TypeError("can't modify immutable instance") - - def __hash__(self): - return hash(self.__class__) ^ hash((self.message, self.errorCode, self.context, )) - - @classmethod - def read(cls, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and cls.thrift_spec is not None: - return iprot._fast_decode(None, iprot, [cls, cls.thrift_spec]) - iprot.readStructBegin() - message = None - errorCode = None - context = None - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - message = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRING: - errorCode = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 3: - if ftype == TType.MAP: - context = {} - (_ktype1, _vtype2, _size0) = iprot.readMapBegin() - for _i4 in range(_size0): - _key5 = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - _val6 = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - context[_key5] = _val6 - iprot.readMapEnd() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - return cls( - message=message, - errorCode=errorCode, - context=context, - ) - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('PBServiceException') - if self.message is not None: - oprot.writeFieldBegin('message', TType.STRING, 1) - oprot.writeString(self.message.encode('utf-8') if sys.version_info[0] == 2 else self.message) - oprot.writeFieldEnd() - if self.errorCode is not None: - oprot.writeFieldBegin('errorCode', TType.STRING, 2) - oprot.writeString(self.errorCode.encode('utf-8') if sys.version_info[0] == 2 else self.errorCode) - oprot.writeFieldEnd() - if self.context is not None: - oprot.writeFieldBegin('context', TType.MAP, 3) - oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.context)) - for kiter7, viter8 in self.context.items(): - oprot.writeString(kiter7.encode('utf-8') if sys.version_info[0] == 2 else kiter7) - oprot.writeString(viter8.encode('utf-8') if sys.version_info[0] == 2 else viter8) - oprot.writeMapEnd() - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - if self.message is None: - raise TProtocolException(message='Required field message is unset!') - return - - def __str__(self): - return repr(self) - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) - - -class PBUserException(TException): - """ - Attributes: - - message - - errorCode - - context - - """ - - - def __init__(self, message=None, errorCode=None, context=None,): - super(PBUserException, self).__setattr__('message', message) - super(PBUserException, self).__setattr__('errorCode', errorCode) - super(PBUserException, self).__setattr__('context', context) - - def __setattr__(self, *args): - raise TypeError("can't modify immutable instance") - - def __delattr__(self, *args): - raise TypeError("can't modify immutable instance") - - def __hash__(self): - return hash(self.__class__) ^ hash((self.message, self.errorCode, self.context, )) - - @classmethod - def read(cls, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and cls.thrift_spec is not None: - return iprot._fast_decode(None, iprot, [cls, cls.thrift_spec]) - iprot.readStructBegin() - message = None - errorCode = None - context = None - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - message = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRING: - errorCode = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 3: - if ftype == TType.MAP: - context = {} - (_ktype10, _vtype11, _size9) = iprot.readMapBegin() - for _i13 in range(_size9): - _key14 = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - _val15 = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - context[_key14] = _val15 - iprot.readMapEnd() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - return cls( - message=message, - errorCode=errorCode, - context=context, - ) - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('PBUserException') - if self.message is not None: - oprot.writeFieldBegin('message', TType.STRING, 1) - oprot.writeString(self.message.encode('utf-8') if sys.version_info[0] == 2 else self.message) - oprot.writeFieldEnd() - if self.errorCode is not None: - oprot.writeFieldBegin('errorCode', TType.STRING, 2) - oprot.writeString(self.errorCode.encode('utf-8') if sys.version_info[0] == 2 else self.errorCode) - oprot.writeFieldEnd() - if self.context is not None: - oprot.writeFieldBegin('context', TType.MAP, 3) - oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.context)) - for kiter16, viter17 in self.context.items(): - oprot.writeString(kiter16.encode('utf-8') if sys.version_info[0] == 2 else kiter16) - oprot.writeString(viter17.encode('utf-8') if sys.version_info[0] == 2 else viter17) - oprot.writeMapEnd() - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - if self.message is None: - raise TProtocolException(message='Required field message is unset!') - return - - def __str__(self): - return repr(self) - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(PBServiceException) -PBServiceException.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'message', 'UTF8', None, ), # 1 - (2, TType.STRING, 'errorCode', 'UTF8', None, ), # 2 - (3, TType.MAP, 'context', (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), None, ), # 3 -) -all_structs.append(PBUserException) -PBUserException.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'message', 'UTF8', None, ), # 1 - (2, TType.STRING, 'errorCode', 'UTF8', None, ), # 2 - (3, TType.MAP, 'context', (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), None, ), # 3 -) -fix_spec(all_structs) -del all_structs diff --git a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/tokens_ops/YTTokenOpService.py b/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/tokens_ops/YTTokenOpService.py deleted file mode 100644 index 8589aee..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/tokens_ops/YTTokenOpService.py +++ /dev/null @@ -1,1360 +0,0 @@ -# -# Autogenerated by Thrift Compiler (0.20.0) -# -# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING -# -# options string: py -# - -from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException -from thrift.protocol.TProtocol import TProtocolException -from thrift.TRecursive import fix_spec - -import sys -import pangramia.base_service.BaseService -import logging -from .ttypes import * -from thrift.Thrift import TProcessor -from thrift.transport import TTransport -all_structs = [] - - -class Iface(pangramia.base_service.BaseService.Iface): - def getOrRefreshTokenWithReport(self, accountId, oldUrl, status, details, jobId, updateType, url): - """ - Parameters: - - accountId - - oldUrl - - status - - details - - jobId - - updateType - - url - - """ - pass - - def getOrRefreshToken(self, accountId, updateType, url): - """ - Parameters: - - accountId - - updateType - - url - - """ - pass - - def getLatestToken(self, accountId): - """ - Parameters: - - accountId - - """ - pass - - def refreshToken(self, accountId, updateType, url): - """ - Parameters: - - accountId - - updateType - - url - - """ - pass - - def reportState(self, url, status, details, jobId): - """ - Parameters: - - url - - status - - details - - jobId - - """ - pass - - -class Client(pangramia.base_service.BaseService.Client, Iface): - def __init__(self, iprot, oprot=None): - pangramia.base_service.BaseService.Client.__init__(self, iprot, oprot) - - def getOrRefreshTokenWithReport(self, accountId, oldUrl, status, details, jobId, updateType, url): - """ - Parameters: - - accountId - - oldUrl - - status - - details - - jobId - - updateType - - url - - """ - self.send_getOrRefreshTokenWithReport(accountId, oldUrl, status, details, jobId, updateType, url) - return self.recv_getOrRefreshTokenWithReport() - - def send_getOrRefreshTokenWithReport(self, accountId, oldUrl, status, details, jobId, updateType, url): - self._oprot.writeMessageBegin('getOrRefreshTokenWithReport', TMessageType.CALL, self._seqid) - args = getOrRefreshTokenWithReport_args() - args.accountId = accountId - args.oldUrl = oldUrl - args.status = status - args.details = details - args.jobId = jobId - args.updateType = updateType - args.url = url - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_getOrRefreshTokenWithReport(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = getOrRefreshTokenWithReport_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "getOrRefreshTokenWithReport failed: unknown result") - - def getOrRefreshToken(self, accountId, updateType, url): - """ - Parameters: - - accountId - - updateType - - url - - """ - self.send_getOrRefreshToken(accountId, updateType, url) - return self.recv_getOrRefreshToken() - - def send_getOrRefreshToken(self, accountId, updateType, url): - self._oprot.writeMessageBegin('getOrRefreshToken', TMessageType.CALL, self._seqid) - args = getOrRefreshToken_args() - args.accountId = accountId - args.updateType = updateType - args.url = url - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_getOrRefreshToken(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = getOrRefreshToken_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "getOrRefreshToken failed: unknown result") - - def getLatestToken(self, accountId): - """ - Parameters: - - accountId - - """ - self.send_getLatestToken(accountId) - return self.recv_getLatestToken() - - def send_getLatestToken(self, accountId): - self._oprot.writeMessageBegin('getLatestToken', TMessageType.CALL, self._seqid) - args = getLatestToken_args() - args.accountId = accountId - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_getLatestToken(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = getLatestToken_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "getLatestToken failed: unknown result") - - def refreshToken(self, accountId, updateType, url): - """ - Parameters: - - accountId - - updateType - - url - - """ - self.send_refreshToken(accountId, updateType, url) - return self.recv_refreshToken() - - def send_refreshToken(self, accountId, updateType, url): - self._oprot.writeMessageBegin('refreshToken', TMessageType.CALL, self._seqid) - args = refreshToken_args() - args.accountId = accountId - args.updateType = updateType - args.url = url - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_refreshToken(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = refreshToken_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "refreshToken failed: unknown result") - - def reportState(self, url, status, details, jobId): - """ - Parameters: - - url - - status - - details - - jobId - - """ - self.send_reportState(url, status, details, jobId) - return self.recv_reportState() - - def send_reportState(self, url, status, details, jobId): - self._oprot.writeMessageBegin('reportState', TMessageType.CALL, self._seqid) - args = reportState_args() - args.url = url - args.status = status - args.details = details - args.jobId = jobId - args.write(self._oprot) - self._oprot.writeMessageEnd() - self._oprot.trans.flush() - - def recv_reportState(self): - iprot = self._iprot - (fname, mtype, rseqid) = iprot.readMessageBegin() - if mtype == TMessageType.EXCEPTION: - x = TApplicationException() - x.read(iprot) - iprot.readMessageEnd() - raise x - result = reportState_result() - result.read(iprot) - iprot.readMessageEnd() - if result.success is not None: - return result.success - if result.serviceExp is not None: - raise result.serviceExp - if result.userExp is not None: - raise result.userExp - raise TApplicationException(TApplicationException.MISSING_RESULT, "reportState failed: unknown result") - - -class Processor(pangramia.base_service.BaseService.Processor, Iface, TProcessor): - def __init__(self, handler): - pangramia.base_service.BaseService.Processor.__init__(self, handler) - self._processMap["getOrRefreshTokenWithReport"] = Processor.process_getOrRefreshTokenWithReport - self._processMap["getOrRefreshToken"] = Processor.process_getOrRefreshToken - self._processMap["getLatestToken"] = Processor.process_getLatestToken - self._processMap["refreshToken"] = Processor.process_refreshToken - self._processMap["reportState"] = Processor.process_reportState - self._on_message_begin = None - - def on_message_begin(self, func): - self._on_message_begin = func - - def process(self, iprot, oprot): - (name, type, seqid) = iprot.readMessageBegin() - if self._on_message_begin: - self._on_message_begin(name, type, seqid) - if name not in self._processMap: - iprot.skip(TType.STRUCT) - iprot.readMessageEnd() - x = TApplicationException(TApplicationException.UNKNOWN_METHOD, 'Unknown function %s' % (name)) - oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid) - x.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - return - else: - self._processMap[name](self, seqid, iprot, oprot) - return True - - def process_getOrRefreshTokenWithReport(self, seqid, iprot, oprot): - args = getOrRefreshTokenWithReport_args() - args.read(iprot) - iprot.readMessageEnd() - result = getOrRefreshTokenWithReport_result() - try: - result.success = self._handler.getOrRefreshTokenWithReport(args.accountId, args.oldUrl, args.status, args.details, args.jobId, args.updateType, args.url) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("getOrRefreshTokenWithReport", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_getOrRefreshToken(self, seqid, iprot, oprot): - args = getOrRefreshToken_args() - args.read(iprot) - iprot.readMessageEnd() - result = getOrRefreshToken_result() - try: - result.success = self._handler.getOrRefreshToken(args.accountId, args.updateType, args.url) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("getOrRefreshToken", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_getLatestToken(self, seqid, iprot, oprot): - args = getLatestToken_args() - args.read(iprot) - iprot.readMessageEnd() - result = getLatestToken_result() - try: - result.success = self._handler.getLatestToken(args.accountId) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("getLatestToken", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_refreshToken(self, seqid, iprot, oprot): - args = refreshToken_args() - args.read(iprot) - iprot.readMessageEnd() - result = refreshToken_result() - try: - result.success = self._handler.refreshToken(args.accountId, args.updateType, args.url) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("refreshToken", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - - def process_reportState(self, seqid, iprot, oprot): - args = reportState_args() - args.read(iprot) - iprot.readMessageEnd() - result = reportState_result() - try: - result.success = self._handler.reportState(args.url, args.status, args.details, args.jobId) - msg_type = TMessageType.REPLY - except TTransport.TTransportException: - raise - except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp: - msg_type = TMessageType.REPLY - result.serviceExp = serviceExp - except pangramia.yt.exceptions.ttypes.PBUserException as userExp: - msg_type = TMessageType.REPLY - result.userExp = userExp - except TApplicationException as ex: - logging.exception('TApplication exception in handler') - msg_type = TMessageType.EXCEPTION - result = ex - except Exception: - logging.exception('Unexpected exception in handler') - msg_type = TMessageType.EXCEPTION - result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') - oprot.writeMessageBegin("reportState", msg_type, seqid) - result.write(oprot) - oprot.writeMessageEnd() - oprot.trans.flush() - -# HELPER FUNCTIONS AND STRUCTURES - - -class getOrRefreshTokenWithReport_args(object): - """ - Attributes: - - accountId - - oldUrl - - status - - details - - jobId - - updateType - - url - - """ - - - def __init__(self, accountId=None, oldUrl=None, status=None, details=None, jobId=None, updateType= 6, url=None,): - self.accountId = accountId - self.oldUrl = oldUrl - self.status = status - self.details = details - self.jobId = jobId - self.updateType = updateType - self.url = url - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.accountId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRING: - self.oldUrl = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 3: - if ftype == TType.I32: - self.status = iprot.readI32() - else: - iprot.skip(ftype) - elif fid == 4: - if ftype == TType.STRING: - self.details = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 5: - if ftype == TType.STRING: - self.jobId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 6: - if ftype == TType.I32: - self.updateType = iprot.readI32() - else: - iprot.skip(ftype) - elif fid == 7: - if ftype == TType.STRING: - self.url = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('getOrRefreshTokenWithReport_args') - if self.accountId is not None: - oprot.writeFieldBegin('accountId', TType.STRING, 1) - oprot.writeString(self.accountId.encode('utf-8') if sys.version_info[0] == 2 else self.accountId) - oprot.writeFieldEnd() - if self.oldUrl is not None: - oprot.writeFieldBegin('oldUrl', TType.STRING, 2) - oprot.writeString(self.oldUrl.encode('utf-8') if sys.version_info[0] == 2 else self.oldUrl) - oprot.writeFieldEnd() - if self.status is not None: - oprot.writeFieldBegin('status', TType.I32, 3) - oprot.writeI32(self.status) - oprot.writeFieldEnd() - if self.details is not None: - oprot.writeFieldBegin('details', TType.STRING, 4) - oprot.writeString(self.details.encode('utf-8') if sys.version_info[0] == 2 else self.details) - oprot.writeFieldEnd() - if self.jobId is not None: - oprot.writeFieldBegin('jobId', TType.STRING, 5) - oprot.writeString(self.jobId.encode('utf-8') if sys.version_info[0] == 2 else self.jobId) - oprot.writeFieldEnd() - if self.updateType is not None: - oprot.writeFieldBegin('updateType', TType.I32, 6) - oprot.writeI32(self.updateType) - oprot.writeFieldEnd() - if self.url is not None: - oprot.writeFieldBegin('url', TType.STRING, 7) - oprot.writeString(self.url.encode('utf-8') if sys.version_info[0] == 2 else self.url) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(getOrRefreshTokenWithReport_args) -getOrRefreshTokenWithReport_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'accountId', 'UTF8', None, ), # 1 - (2, TType.STRING, 'oldUrl', 'UTF8', None, ), # 2 - (3, TType.I32, 'status', None, None, ), # 3 - (4, TType.STRING, 'details', 'UTF8', None, ), # 4 - (5, TType.STRING, 'jobId', 'UTF8', None, ), # 5 - (6, TType.I32, 'updateType', None, 6, ), # 6 - (7, TType.STRING, 'url', 'UTF8', None, ), # 7 -) - - -class getOrRefreshTokenWithReport_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.STRUCT: - self.success = pangramia.yt.common.ttypes.JobTokenData() - self.success.read(iprot) - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('getOrRefreshTokenWithReport_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.STRUCT, 0) - self.success.write(oprot) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(getOrRefreshTokenWithReport_result) -getOrRefreshTokenWithReport_result.thrift_spec = ( - (0, TType.STRUCT, 'success', [pangramia.yt.common.ttypes.JobTokenData, None], None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class getOrRefreshToken_args(object): - """ - Attributes: - - accountId - - updateType - - url - - """ - - - def __init__(self, accountId=None, updateType= 6, url=None,): - self.accountId = accountId - self.updateType = updateType - self.url = url - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.accountId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.I32: - self.updateType = iprot.readI32() - else: - iprot.skip(ftype) - elif fid == 3: - if ftype == TType.STRING: - self.url = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('getOrRefreshToken_args') - if self.accountId is not None: - oprot.writeFieldBegin('accountId', TType.STRING, 1) - oprot.writeString(self.accountId.encode('utf-8') if sys.version_info[0] == 2 else self.accountId) - oprot.writeFieldEnd() - if self.updateType is not None: - oprot.writeFieldBegin('updateType', TType.I32, 2) - oprot.writeI32(self.updateType) - oprot.writeFieldEnd() - if self.url is not None: - oprot.writeFieldBegin('url', TType.STRING, 3) - oprot.writeString(self.url.encode('utf-8') if sys.version_info[0] == 2 else self.url) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(getOrRefreshToken_args) -getOrRefreshToken_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'accountId', 'UTF8', None, ), # 1 - (2, TType.I32, 'updateType', None, 6, ), # 2 - (3, TType.STRING, 'url', 'UTF8', None, ), # 3 -) - - -class getOrRefreshToken_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.STRUCT: - self.success = pangramia.yt.common.ttypes.JobTokenData() - self.success.read(iprot) - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('getOrRefreshToken_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.STRUCT, 0) - self.success.write(oprot) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(getOrRefreshToken_result) -getOrRefreshToken_result.thrift_spec = ( - (0, TType.STRUCT, 'success', [pangramia.yt.common.ttypes.JobTokenData, None], None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class getLatestToken_args(object): - """ - Attributes: - - accountId - - """ - - - def __init__(self, accountId=None,): - self.accountId = accountId - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.accountId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('getLatestToken_args') - if self.accountId is not None: - oprot.writeFieldBegin('accountId', TType.STRING, 1) - oprot.writeString(self.accountId.encode('utf-8') if sys.version_info[0] == 2 else self.accountId) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(getLatestToken_args) -getLatestToken_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'accountId', 'UTF8', None, ), # 1 -) - - -class getLatestToken_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.STRUCT: - self.success = pangramia.yt.common.ttypes.JobTokenData() - self.success.read(iprot) - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('getLatestToken_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.STRUCT, 0) - self.success.write(oprot) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(getLatestToken_result) -getLatestToken_result.thrift_spec = ( - (0, TType.STRUCT, 'success', [pangramia.yt.common.ttypes.JobTokenData, None], None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class refreshToken_args(object): - """ - Attributes: - - accountId - - updateType - - url - - """ - - - def __init__(self, accountId=None, updateType= 6, url=None,): - self.accountId = accountId - self.updateType = updateType - self.url = url - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.accountId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.I32: - self.updateType = iprot.readI32() - else: - iprot.skip(ftype) - elif fid == 3: - if ftype == TType.STRING: - self.url = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('refreshToken_args') - if self.accountId is not None: - oprot.writeFieldBegin('accountId', TType.STRING, 1) - oprot.writeString(self.accountId.encode('utf-8') if sys.version_info[0] == 2 else self.accountId) - oprot.writeFieldEnd() - if self.updateType is not None: - oprot.writeFieldBegin('updateType', TType.I32, 2) - oprot.writeI32(self.updateType) - oprot.writeFieldEnd() - if self.url is not None: - oprot.writeFieldBegin('url', TType.STRING, 3) - oprot.writeString(self.url.encode('utf-8') if sys.version_info[0] == 2 else self.url) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(refreshToken_args) -refreshToken_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'accountId', 'UTF8', None, ), # 1 - (2, TType.I32, 'updateType', None, 6, ), # 2 - (3, TType.STRING, 'url', 'UTF8', None, ), # 3 -) - - -class refreshToken_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.STRUCT: - self.success = pangramia.yt.common.ttypes.JobTokenData() - self.success.read(iprot) - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('refreshToken_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.STRUCT, 0) - self.success.write(oprot) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(refreshToken_result) -refreshToken_result.thrift_spec = ( - (0, TType.STRUCT, 'success', [pangramia.yt.common.ttypes.JobTokenData, None], None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) - - -class reportState_args(object): - """ - Attributes: - - url - - status - - details - - jobId - - """ - - - def __init__(self, url=None, status=None, details=None, jobId=None,): - self.url = url - self.status = status - self.details = details - self.jobId = jobId - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.url = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.I32: - self.status = iprot.readI32() - else: - iprot.skip(ftype) - elif fid == 3: - if ftype == TType.STRING: - self.details = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - elif fid == 4: - if ftype == TType.STRING: - self.jobId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('reportState_args') - if self.url is not None: - oprot.writeFieldBegin('url', TType.STRING, 1) - oprot.writeString(self.url.encode('utf-8') if sys.version_info[0] == 2 else self.url) - oprot.writeFieldEnd() - if self.status is not None: - oprot.writeFieldBegin('status', TType.I32, 2) - oprot.writeI32(self.status) - oprot.writeFieldEnd() - if self.details is not None: - oprot.writeFieldBegin('details', TType.STRING, 3) - oprot.writeString(self.details.encode('utf-8') if sys.version_info[0] == 2 else self.details) - oprot.writeFieldEnd() - if self.jobId is not None: - oprot.writeFieldBegin('jobId', TType.STRING, 4) - oprot.writeString(self.jobId.encode('utf-8') if sys.version_info[0] == 2 else self.jobId) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(reportState_args) -reportState_args.thrift_spec = ( - None, # 0 - (1, TType.STRING, 'url', 'UTF8', None, ), # 1 - (2, TType.I32, 'status', None, None, ), # 2 - (3, TType.STRING, 'details', 'UTF8', None, ), # 3 - (4, TType.STRING, 'jobId', 'UTF8', None, ), # 4 -) - - -class reportState_result(object): - """ - Attributes: - - success - - serviceExp - - userExp - - """ - - - def __init__(self, success=None, serviceExp=None, userExp=None,): - self.success = success - self.serviceExp = serviceExp - self.userExp = userExp - - def read(self, iprot): - if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 0: - if ftype == TType.BOOL: - self.success = iprot.readBool() - else: - iprot.skip(ftype) - elif fid == 1: - if ftype == TType.STRUCT: - self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot) - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRUCT: - self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot._fast_encode is not None and self.thrift_spec is not None: - oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - return - oprot.writeStructBegin('reportState_result') - if self.success is not None: - oprot.writeFieldBegin('success', TType.BOOL, 0) - oprot.writeBool(self.success) - oprot.writeFieldEnd() - if self.serviceExp is not None: - oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1) - self.serviceExp.write(oprot) - oprot.writeFieldEnd() - if self.userExp is not None: - oprot.writeFieldBegin('userExp', TType.STRUCT, 2) - self.userExp.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) -all_structs.append(reportState_result) -reportState_result.thrift_spec = ( - (0, TType.BOOL, 'success', None, None, ), # 0 - (1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1 - (2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2 -) -fix_spec(all_structs) -del all_structs diff --git a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/tokens_ops/__init__.py b/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/tokens_ops/__init__.py deleted file mode 100644 index e97f47d..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/tokens_ops/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__all__ = ['ttypes', 'constants', 'YTTokenOpService'] diff --git a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/tokens_ops/constants.py b/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/tokens_ops/constants.py deleted file mode 100644 index 09a78b3..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/tokens_ops/constants.py +++ /dev/null @@ -1,14 +0,0 @@ -# -# Autogenerated by Thrift Compiler (0.20.0) -# -# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING -# -# options string: py -# - -from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException -from thrift.protocol.TProtocol import TProtocolException -from thrift.TRecursive import fix_spec - -import sys -from .ttypes import * diff --git a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/tokens_ops/ttypes.py b/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/tokens_ops/ttypes.py deleted file mode 100644 index de828aa..0000000 --- a/airflow/ytdlp-ops-auth/build/lib/thrift_model/gen_py/pangramia/yt/tokens_ops/ttypes.py +++ /dev/null @@ -1,21 +0,0 @@ -# -# Autogenerated by Thrift Compiler (0.20.0) -# -# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING -# -# options string: py -# - -from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException -from thrift.protocol.TProtocol import TProtocolException -from thrift.TRecursive import fix_spec - -import sys -import pangramia.yt.common.ttypes -import pangramia.yt.exceptions.ttypes -import pangramia.base_service.ttypes - -from thrift.transport import TTransport -all_structs = [] -fix_spec(all_structs) -del all_structs diff --git a/airflow/ytdlp-ops-auth/dist/yt_ops_services-1.6.2.dev0-py3.11.egg b/airflow/ytdlp-ops-auth/dist/yt_ops_services-1.6.2.dev0-py3.11.egg deleted file mode 100644 index c3958ab..0000000 Binary files a/airflow/ytdlp-ops-auth/dist/yt_ops_services-1.6.2.dev0-py3.11.egg and /dev/null differ diff --git a/airflow/ytdlp-ops-auth/pangramia b/airflow/ytdlp-ops-auth/pangramia deleted file mode 120000 index 48d8781..0000000 --- a/airflow/ytdlp-ops-auth/pangramia +++ /dev/null @@ -1 +0,0 @@ -../../thrift_model/gen_py/pangramia \ No newline at end of file diff --git a/airflow/ytdlp-ops-auth/requirements.txt b/airflow/ytdlp-ops-auth/requirements.txt deleted file mode 100644 index 05bb5c5..0000000 --- a/airflow/ytdlp-ops-auth/requirements.txt +++ /dev/null @@ -1,11 +0,0 @@ -flask -psutil -PySocks>=1.7.0 -python-dotenv==1.0.1 -redis>=4.0.0 -requests>=2.31.0 -tabulate>=0.9.0 -thrift>=0.16.0,<=0.20.0 -waitress -yt_dlp>=2025.3.27 -yt-dlp-get-pot==0.3.0 diff --git a/airflow/ytdlp-ops-auth/setup.py b/airflow/ytdlp-ops-auth/setup.py deleted file mode 100644 index 71b338a..0000000 --- a/airflow/ytdlp-ops-auth/setup.py +++ /dev/null @@ -1,2 +0,0 @@ -# This file is no longer needed and will be removed. -# The packaging logic has been consolidated into the root setup.py file. diff --git a/airflow/ytdlp-ops-auth/yt_ops_services.egg-info/PKG-INFO b/airflow/ytdlp-ops-auth/yt_ops_services.egg-info/PKG-INFO deleted file mode 100644 index 6adf735..0000000 --- a/airflow/ytdlp-ops-auth/yt_ops_services.egg-info/PKG-INFO +++ /dev/null @@ -1,9 +0,0 @@ -Metadata-Version: 2.2 -Name: yt_ops_services -Version: 1.6.2.dev0 -Requires-Python: >=3.9 -Requires-Dist: thrift<=0.20.0,>=0.16.0 -Requires-Dist: python-dotenv>=1.0.0 -Requires-Dist: psutil -Dynamic: requires-dist -Dynamic: requires-python diff --git a/airflow/ytdlp-ops-auth/yt_ops_services.egg-info/SOURCES.txt b/airflow/ytdlp-ops-auth/yt_ops_services.egg-info/SOURCES.txt deleted file mode 100644 index d7a7433..0000000 --- a/airflow/ytdlp-ops-auth/yt_ops_services.egg-info/SOURCES.txt +++ /dev/null @@ -1,48 +0,0 @@ -setup.py -./pangramia/__init__.py -./pangramia/base_service/BaseService.py -./pangramia/base_service/__init__.py -./pangramia/base_service/constants.py -./pangramia/base_service/ttypes.py -./pangramia/yt/__init__.py -./pangramia/yt/admin_ops/YTAccountsOpService.py -./pangramia/yt/admin_ops/__init__.py -./pangramia/yt/admin_ops/constants.py -./pangramia/yt/admin_ops/ttypes.py -./pangramia/yt/common/__init__.py -./pangramia/yt/common/constants.py -./pangramia/yt/common/ttypes.py -./pangramia/yt/exceptions/__init__.py -./pangramia/yt/exceptions/constants.py -./pangramia/yt/exceptions/ttypes.py -./pangramia/yt/tokens_ops/YTTokenOpService.py -./pangramia/yt/tokens_ops/__init__.py -./pangramia/yt/tokens_ops/constants.py -./pangramia/yt/tokens_ops/ttypes.py -./thrift_model/__init__.py -./thrift_model/gen_py/__init__.py -./thrift_model/gen_py/pangramia/__init__.py -./thrift_model/gen_py/pangramia/base_service/BaseService.py -./thrift_model/gen_py/pangramia/base_service/__init__.py -./thrift_model/gen_py/pangramia/base_service/constants.py -./thrift_model/gen_py/pangramia/base_service/ttypes.py -./thrift_model/gen_py/pangramia/yt/__init__.py -./thrift_model/gen_py/pangramia/yt/admin_ops/YTAccountsOpService.py -./thrift_model/gen_py/pangramia/yt/admin_ops/__init__.py -./thrift_model/gen_py/pangramia/yt/admin_ops/constants.py -./thrift_model/gen_py/pangramia/yt/admin_ops/ttypes.py -./thrift_model/gen_py/pangramia/yt/common/__init__.py -./thrift_model/gen_py/pangramia/yt/common/constants.py -./thrift_model/gen_py/pangramia/yt/common/ttypes.py -./thrift_model/gen_py/pangramia/yt/exceptions/__init__.py -./thrift_model/gen_py/pangramia/yt/exceptions/constants.py -./thrift_model/gen_py/pangramia/yt/exceptions/ttypes.py -./thrift_model/gen_py/pangramia/yt/tokens_ops/YTTokenOpService.py -./thrift_model/gen_py/pangramia/yt/tokens_ops/__init__.py -./thrift_model/gen_py/pangramia/yt/tokens_ops/constants.py -./thrift_model/gen_py/pangramia/yt/tokens_ops/ttypes.py -yt_ops_services.egg-info/PKG-INFO -yt_ops_services.egg-info/SOURCES.txt -yt_ops_services.egg-info/dependency_links.txt -yt_ops_services.egg-info/requires.txt -yt_ops_services.egg-info/top_level.txt \ No newline at end of file diff --git a/airflow/ytdlp-ops-auth/yt_ops_services.egg-info/dependency_links.txt b/airflow/ytdlp-ops-auth/yt_ops_services.egg-info/dependency_links.txt deleted file mode 100644 index 8b13789..0000000 --- a/airflow/ytdlp-ops-auth/yt_ops_services.egg-info/dependency_links.txt +++ /dev/null @@ -1 +0,0 @@ - diff --git a/airflow/ytdlp-ops-auth/yt_ops_services.egg-info/requires.txt b/airflow/ytdlp-ops-auth/yt_ops_services.egg-info/requires.txt deleted file mode 100644 index 08ca11a..0000000 --- a/airflow/ytdlp-ops-auth/yt_ops_services.egg-info/requires.txt +++ /dev/null @@ -1,3 +0,0 @@ -thrift<=0.20.0,>=0.16.0 -python-dotenv>=1.0.0 -psutil diff --git a/airflow/ytdlp-ops-auth/yt_ops_services.egg-info/top_level.txt b/airflow/ytdlp-ops-auth/yt_ops_services.egg-info/top_level.txt deleted file mode 100644 index c6a2990..0000000 --- a/airflow/ytdlp-ops-auth/yt_ops_services.egg-info/top_level.txt +++ /dev/null @@ -1,2 +0,0 @@ -pangramia -thrift_model diff --git a/airflow/ytdlp-ops-auth/ytdlp_ops_client.log b/airflow/ytdlp-ops-auth/ytdlp_ops_client.log deleted file mode 100644 index 17030cd..0000000 --- a/airflow/ytdlp-ops-auth/ytdlp_ops_client.log +++ /dev/null @@ -1,261 +0,0 @@ -2025-04-01 14:23:28,586 - INFO - Attempting to connect to server at 85.192.30.55:9090... -2025-04-01 14:23:28,700 - INFO - Successfully connected to server -2025-04-01 14:23:28,815 - INFO - Server connection test successful -2025-04-01 14:23:28,815 - INFO - Requesting token for URL: https://www.youtube.com/watch?v=vKTVLpmvznI -2025-04-01 14:23:32,869 - INFO - Successfully received token data from server -2025-04-01 14:23:32,870 - WARNING - infoJson attribute *MISSING* in received token_data object. -2025-04-01 14:23:32,870 - WARNING - Valid info.json was NOT received from the server. -2025-04-01 14:23:32,870 - ERROR - Failed to obtain valid info.json from the server. -2025-04-01 14:40:18,685 - INFO - Attempting to connect to server at 85.192.30.55:9090... -2025-04-01 14:40:18,800 - INFO - Successfully connected to server -2025-04-01 14:40:18,914 - INFO - Server connection test successful -2025-04-01 14:40:18,915 - INFO - Requesting token for URL: https://www.youtube.com/watch?v=vKTVLpmvznI -2025-04-01 14:40:31,927 - INFO - Successfully received token data from server -2025-04-01 14:40:31,929 - INFO - Valid JSON with video data: Best of Dmitri Shostakovich - Essential Classical Music -2025-04-01 14:40:31,932 - INFO - Successfully saved info.json to info_json_vKTVLpmvznI_1743507631.json and latest.json to latest.json -2025-04-13 16:32:14,014 - INFO - Attempting to connect to server at 89.253.221.173:9090... -2025-04-13 16:32:14,129 - INFO - Successfully connected to server -2025-04-13 16:32:14,241 - INFO - Server connection test successful -2025-04-13 16:32:14,241 - INFO - Requesting token for URL: https://www.youtube.com/watch?v=i7SQ6ENOv5s&t=1012s -2025-04-13 16:32:23,236 - ERROR - Unexpected error: TSocket read 0 bytes -2025-04-13 16:32:23,243 - ERROR - Traceback (most recent call last): - File "/opt/yt-ops-services/airflow/ytdlp-ops-auth/ytdlp_ops_client.py", line 533, in main - token_data = client.getOrRefreshToken( - ^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/opt/yt-ops-services/airflow/ytdlp-ops-auth/pangramia/yt/tokens_ops/YTTokenOpService.py", line 138, in getOrRefreshToken - return self.recv_getOrRefreshToken() - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/opt/yt-ops-services/airflow/ytdlp-ops-auth/pangramia/yt/tokens_ops/YTTokenOpService.py", line 152, in recv_getOrRefreshToken - (fname, mtype, rseqid) = iprot.readMessageBegin() - ^^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/protocol/TBinaryProtocol.py", line 134, in readMessageBegin - sz = self.readI32() - ^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/protocol/TBinaryProtocol.py", line 217, in readI32 - buff = self.trans.readAll(4) - ^^^^^^^^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/transport/TTransport.py", line 62, in readAll - chunk = self.read(sz - have) - ^^^^^^^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/transport/TTransport.py", line 283, in read - self.readFrame() - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/transport/TTransport.py", line 287, in readFrame - buff = self.__trans.readAll(4) - ^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/transport/TTransport.py", line 62, in readAll - chunk = self.read(sz - have) - ^^^^^^^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/transport/TSocket.py", line 166, in read - raise TTransportException(type=TTransportException.END_OF_FILE, -thrift.transport.TTransport.TTransportException: TSocket read 0 bytes - -2025-04-13 16:33:43,822 - INFO - Attempting to connect to server at 89.253.221.173:9090... -2025-04-13 16:33:43,933 - INFO - Successfully connected to server -2025-04-13 16:33:44,046 - INFO - Server connection test successful -2025-04-13 16:33:44,047 - INFO - Requesting token for URL: https://www.youtube.com/watch?v=i7SQ6ENOv5s&t=1012s -2025-04-13 16:33:50,906 - ERROR - Unexpected error: TSocket read 0 bytes -2025-04-13 16:33:50,908 - ERROR - Traceback (most recent call last): - File "/opt/yt-ops-services/airflow/ytdlp-ops-auth/ytdlp_ops_client.py", line 533, in main - token_data = client.getOrRefreshToken( - ^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/opt/yt-ops-services/airflow/ytdlp-ops-auth/pangramia/yt/tokens_ops/YTTokenOpService.py", line 138, in getOrRefreshToken - return self.recv_getOrRefreshToken() - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/opt/yt-ops-services/airflow/ytdlp-ops-auth/pangramia/yt/tokens_ops/YTTokenOpService.py", line 152, in recv_getOrRefreshToken - (fname, mtype, rseqid) = iprot.readMessageBegin() - ^^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/protocol/TBinaryProtocol.py", line 134, in readMessageBegin - sz = self.readI32() - ^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/protocol/TBinaryProtocol.py", line 217, in readI32 - buff = self.trans.readAll(4) - ^^^^^^^^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/transport/TTransport.py", line 62, in readAll - chunk = self.read(sz - have) - ^^^^^^^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/transport/TTransport.py", line 283, in read - self.readFrame() - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/transport/TTransport.py", line 287, in readFrame - buff = self.__trans.readAll(4) - ^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/transport/TTransport.py", line 62, in readAll - chunk = self.read(sz - have) - ^^^^^^^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/transport/TSocket.py", line 166, in read - raise TTransportException(type=TTransportException.END_OF_FILE, -thrift.transport.TTransport.TTransportException: TSocket read 0 bytes - -2025-04-13 17:32:58,458 - INFO - Attempting to connect to server at 89.253.221.173:9090... -2025-04-13 17:32:58,563 - INFO - Successfully connected to server -2025-04-13 17:32:58,668 - INFO - Server connection test successful -2025-04-13 17:32:58,668 - INFO - Requesting token for URL: https://www.youtube.com/watch?v=i7SQ6ENOv5s&t=1012s -2025-04-13 17:33:07,768 - ERROR - Unexpected error: TSocket read 0 bytes -2025-04-13 17:33:07,773 - ERROR - Traceback (most recent call last): - File "/opt/yt-ops-services/airflow/ytdlp-ops-auth/ytdlp_ops_client.py", line 533, in main - token_data = client.getOrRefreshToken( - ^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/opt/yt-ops-services/airflow/ytdlp-ops-auth/pangramia/yt/tokens_ops/YTTokenOpService.py", line 138, in getOrRefreshToken - return self.recv_getOrRefreshToken() - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/opt/yt-ops-services/airflow/ytdlp-ops-auth/pangramia/yt/tokens_ops/YTTokenOpService.py", line 152, in recv_getOrRefreshToken - (fname, mtype, rseqid) = iprot.readMessageBegin() - ^^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/protocol/TBinaryProtocol.py", line 134, in readMessageBegin - sz = self.readI32() - ^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/protocol/TBinaryProtocol.py", line 217, in readI32 - buff = self.trans.readAll(4) - ^^^^^^^^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/transport/TTransport.py", line 62, in readAll - chunk = self.read(sz - have) - ^^^^^^^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/transport/TTransport.py", line 283, in read - self.readFrame() - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/transport/TTransport.py", line 287, in readFrame - buff = self.__trans.readAll(4) - ^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/transport/TTransport.py", line 62, in readAll - chunk = self.read(sz - have) - ^^^^^^^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/transport/TSocket.py", line 166, in read - raise TTransportException(type=TTransportException.END_OF_FILE, -thrift.transport.TTransport.TTransportException: TSocket read 0 bytes - -2025-04-13 17:36:10,276 - INFO - Attempting to connect to server at 89.253.221.173:9090... -2025-04-13 17:36:10,388 - INFO - Successfully connected to server -2025-04-13 17:36:10,501 - INFO - Server connection test successful -2025-04-13 17:36:10,501 - INFO - Requesting token for URL: https://www.youtube.com/watch?v=i7SQ6ENOv5s&t=1012s -2025-04-13 17:36:17,597 - ERROR - Unexpected error: TSocket read 0 bytes -2025-04-13 17:36:17,606 - ERROR - Traceback (most recent call last): - File "/opt/yt-ops-services/airflow/ytdlp-ops-auth/ytdlp_ops_client.py", line 543, in main - token_data = client.getOrRefreshToken( - ^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/opt/yt-ops-services/airflow/ytdlp-ops-auth/pangramia/yt/tokens_ops/YTTokenOpService.py", line 138, in getOrRefreshToken - return self.recv_getOrRefreshToken() - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/opt/yt-ops-services/airflow/ytdlp-ops-auth/pangramia/yt/tokens_ops/YTTokenOpService.py", line 152, in recv_getOrRefreshToken - (fname, mtype, rseqid) = iprot.readMessageBegin() - ^^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/protocol/TBinaryProtocol.py", line 134, in readMessageBegin - sz = self.readI32() - ^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/protocol/TBinaryProtocol.py", line 217, in readI32 - buff = self.trans.readAll(4) - ^^^^^^^^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/transport/TTransport.py", line 62, in readAll - chunk = self.read(sz - have) - ^^^^^^^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/transport/TTransport.py", line 283, in read - self.readFrame() - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/transport/TTransport.py", line 287, in readFrame - buff = self.__trans.readAll(4) - ^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/transport/TTransport.py", line 62, in readAll - chunk = self.read(sz - have) - ^^^^^^^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/transport/TSocket.py", line 166, in read - raise TTransportException(type=TTransportException.END_OF_FILE, -thrift.transport.TTransport.TTransportException: TSocket read 0 bytes - -2025-04-13 18:02:37,249 - INFO - Attempting to connect to server at 89.253.221.173:9090... -2025-04-13 18:02:37,361 - INFO - Successfully connected to server -2025-04-13 18:02:37,478 - INFO - Server connection test successful -2025-04-13 18:02:37,478 - INFO - Requesting token for URL: https://www.youtube.com/watch?v=i7SQ6ENOv5s&t=1012s -2025-04-13 18:02:42,457 - ERROR - Unexpected error: TSocket read 0 bytes -2025-04-13 18:02:42,467 - ERROR - Traceback (most recent call last): - File "/opt/yt-ops-services/airflow/ytdlp-ops-auth/ytdlp_ops_client.py", line 533, in main - token_data = client.getOrRefreshToken( - ^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/opt/yt-ops-services/airflow/ytdlp-ops-auth/pangramia/yt/tokens_ops/YTTokenOpService.py", line 138, in getOrRefreshToken - return self.recv_getOrRefreshToken() - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/opt/yt-ops-services/airflow/ytdlp-ops-auth/pangramia/yt/tokens_ops/YTTokenOpService.py", line 152, in recv_getOrRefreshToken - (fname, mtype, rseqid) = iprot.readMessageBegin() - ^^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/protocol/TBinaryProtocol.py", line 134, in readMessageBegin - sz = self.readI32() - ^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/protocol/TBinaryProtocol.py", line 217, in readI32 - buff = self.trans.readAll(4) - ^^^^^^^^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/transport/TTransport.py", line 62, in readAll - chunk = self.read(sz - have) - ^^^^^^^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/transport/TTransport.py", line 283, in read - self.readFrame() - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/transport/TTransport.py", line 287, in readFrame - buff = self.__trans.readAll(4) - ^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/transport/TTransport.py", line 62, in readAll - chunk = self.read(sz - have) - ^^^^^^^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/transport/TSocket.py", line 166, in read - raise TTransportException(type=TTransportException.END_OF_FILE, -thrift.transport.TTransport.TTransportException: TSocket read 0 bytes - -2025-04-13 18:03:16,782 - INFO - Attempting to connect to server at 89.253.221.173:9090... -2025-04-13 18:03:16,890 - INFO - Successfully connected to server -2025-04-13 18:03:16,999 - INFO - Server connection test successful -2025-04-13 18:03:17,000 - INFO - Requesting token for URL: https://www.youtube.com/watch?v=i7SQ6ENOv5s&t=1012s -2025-04-13 18:03:26,040 - ERROR - Unexpected error: TSocket read 0 bytes -2025-04-13 18:03:26,042 - ERROR - Traceback (most recent call last): - File "/opt/yt-ops-services/airflow/ytdlp-ops-auth/ytdlp_ops_client.py", line 533, in main - token_data = client.getOrRefreshToken( - ^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/opt/yt-ops-services/airflow/ytdlp-ops-auth/pangramia/yt/tokens_ops/YTTokenOpService.py", line 138, in getOrRefreshToken - return self.recv_getOrRefreshToken() - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/opt/yt-ops-services/airflow/ytdlp-ops-auth/pangramia/yt/tokens_ops/YTTokenOpService.py", line 152, in recv_getOrRefreshToken - (fname, mtype, rseqid) = iprot.readMessageBegin() - ^^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/protocol/TBinaryProtocol.py", line 134, in readMessageBegin - sz = self.readI32() - ^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/protocol/TBinaryProtocol.py", line 217, in readI32 - buff = self.trans.readAll(4) - ^^^^^^^^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/transport/TTransport.py", line 62, in readAll - chunk = self.read(sz - have) - ^^^^^^^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/transport/TTransport.py", line 283, in read - self.readFrame() - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/transport/TTransport.py", line 287, in readFrame - buff = self.__trans.readAll(4) - ^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/transport/TTransport.py", line 62, in readAll - chunk = self.read(sz - have) - ^^^^^^^^^^^^^^^^^^^^ - File "/Users/aperez/micromamba/envs/browser-use/lib/python3.11/site-packages/thrift/transport/TSocket.py", line 166, in read - raise TTransportException(type=TTransportException.END_OF_FILE, -thrift.transport.TTransport.TTransportException: TSocket read 0 bytes - -2025-04-13 18:09:56,759 - INFO - Attempting to connect to server at 89.253.221.173:9090... -2025-04-13 18:09:56,875 - INFO - Successfully connected to server -2025-04-13 18:09:56,988 - INFO - Server connection test successful -2025-04-13 18:09:56,988 - INFO - Requesting token for URL: https://www.youtube.com/watch?v=i7SQ6ENOv5s&t=1012s -2025-04-13 18:10:05,434 - ERROR - Service exception: Bot detection triggered: ERROR: [youtube+GetPOT] i7SQ6ENOv5s: Sign in to confirm you’re not a bot. Use --cookies-from-browser or --cookies for the authentication. See https://github.com/yt-dlp/yt-dlp/wiki/FAQ#how-do-i-pass-cookies-to-yt-dlp for how to manually pass cookies. Also see https://github.com/yt-dlp/yt-dlp/wiki/Extractors#exporting-youtube-cookies for tips on effectively exporting YouTube cookies -2025-04-14 13:45:44,486 - INFO - Attempting to connect to server at 89.253.221.173:9090... -2025-04-14 13:45:44,593 - INFO - Successfully connected to server -2025-04-14 13:45:44,702 - INFO - Server connection test successful -2025-04-14 13:45:44,702 - INFO - Requesting token for URL: https://www.youtube.com/watch?v=i7SQ6ENOv5s&t=1012s -2025-04-14 13:45:45,560 - ERROR - Service exception: Script execution failed: file:///app/utils/tokenUtils.js:1 -import { BG, BgConfig, DescrambledChallenge } from '../node_modules/bgutils-js/dist/index.js'; // Add BgConfig, DescrambledChallenge - ^^^^^^^^ -SyntaxError: The requested module '../node_modules/bgutils-js/dist/index.js' does not provide an export named 'BgConfig' - at ModuleJob._instantiate (node:internal/modules/esm/module_job:123:21) - at async ModuleJob.run (node:internal/modules/esm/module_job:191:5) - at async ModuleLoader.import (node:internal/modules/esm/loader:337:24) - at async loadESM (node:internal/process/esm_loader:34:7) - at async handleMainPromise (node:internal/modules/run_main:106:12) - -Node.js v18.20.8 -2025-04-14 14:32:59,820 - INFO - Attempting to connect to server at 89.253.221.173:9090... -2025-04-14 14:32:59,925 - INFO - Successfully connected to server -2025-04-14 14:33:00,031 - INFO - Server connection test successful -2025-04-14 14:33:00,031 - INFO - Requesting token for URL: https://www.youtube.com/watch?v=i7SQ6ENOv5s&t=1012s -2025-04-14 14:33:12,563 - ERROR - Service exception: Bot detection triggered: ERROR: [youtube+GetPOT] i7SQ6ENOv5s: Sign in to confirm you’re not a bot. Use --cookies-from-browser or --cookies for the authentication. See https://github.com/yt-dlp/yt-dlp/wiki/FAQ#how-do-i-pass-cookies-to-yt-dlp for how to manually pass cookies. Also see https://github.com/yt-dlp/yt-dlp/wiki/Extractors#exporting-youtube-cookies for tips on effectively exporting YouTube cookies -2025-04-14 14:58:31,413 - INFO - Attempting to connect to server at 89.253.221.173:9090... -2025-04-14 14:58:31,518 - INFO - Successfully connected to server -2025-04-14 14:58:31,623 - INFO - Server connection test successful -2025-04-14 14:58:31,624 - INFO - Requesting token for URL: https://www.youtube.com/watch?v=i7SQ6ENOv5s&t=1012s -2025-04-14 14:58:43,453 - ERROR - Service exception: Bot detection triggered: ERROR: [youtube+GetPOT] i7SQ6ENOv5s: Sign in to confirm you’re not a bot. Use --cookies-from-browser or --cookies for the authentication. See https://github.com/yt-dlp/yt-dlp/wiki/FAQ#how-do-i-pass-cookies-to-yt-dlp for how to manually pass cookies. Also see https://github.com/yt-dlp/yt-dlp/wiki/Extractors#exporting-youtube-cookies for tips on effectively exporting YouTube cookies diff --git a/airflow/ytdlp-ops-auth/ytdlp_ops_client.py b/airflow/ytdlp-ops-auth/ytdlp_ops_client.py deleted file mode 100644 index f42335b..0000000 --- a/airflow/ytdlp-ops-auth/ytdlp_ops_client.py +++ /dev/null @@ -1,721 +0,0 @@ -#!/usr/bin/env python3 - -from typing import Dict, List, Optional, Any -import argparse -import csv -import datetime -import json -import os -import re -import subprocess -import sys -import time -import uuid -import traceback -import logging -import signal -from pathlib import Path -from tabulate import tabulate -import yt_dlp - -def signal_handler(sig: int, frame) -> None: - """Handle shutdown signals gracefully.""" - logger.info(f"Received signal {sig}, shutting down...") - # Clean up any resources here - sys.exit(0) - -# Register signal handlers -signal.signal(signal.SIGINT, signal_handler) -signal.signal(signal.SIGTERM, signal_handler) - -# Import the patch for Thrift exceptions -try: - import os - from thrift_exceptions_patch import patch_thrift_exceptions - # Explicitly call the patch function to ensure it's applied - patch_thrift_exceptions() - print("Applied Thrift exceptions patch for compatibility") - if 'AIRFLOW_HOME' in os.environ: - print("Running in Airflow environment - patch is essential") - else: - print("Not running in Airflow environment, but patch applied anyway for consistency") -except ImportError: - print("Could not import thrift_exceptions_patch, compatibility may be affected") - print("If running in Airflow, this may cause 'immutable instance' errors") -except Exception as e: - print(f"Error applying Thrift exceptions patch: {e}") - -# --- Python Path Setup --- -# Ensure the script can find necessary modules, especially Thrift-generated code. -# Assumes the script is run from the project root or the path is adjusted accordingly. -project_root = Path(__file__).parent.absolute() -gen_py_dir = project_root / "thrift_model" / "gen_py" - -# Add project root to sys.path (needed for the 'pangramia' symlink) -if str(project_root) not in sys.path: - sys.path.insert(0, str(project_root)) - -# Verify paths for debugging -# print("Project Root:", project_root) -# print("Project Root:", project_root) -# print("Gen Py Dir:", gen_py_dir) -# print("Sys Path:", sys.path) -# --- End Python Path Setup --- - -from thrift.transport import TSocket, TTransport -from thrift.protocol import TBinaryProtocol - -try: - from pangramia.yt.tokens_ops import YTTokenOpService - from pangramia.yt.common.ttypes import JobTokenData, TokenUpdateMode, JobState - from pangramia.yt.exceptions.ttypes import PBServiceException, PBUserException -except ImportError as e: - print(f"Error importing Thrift-generated modules: {e}") - print("Please ensure you have run './generate-thrift.py' successfully from the project root.") - print(f"Current sys.path includes: {gen_py_dir}") - sys.exit(1) - -# Configure logging -logging.basicConfig( - level=logging.INFO, - format='%(asctime)s - %(levelname)s - %(message)s', - handlers=[ - logging.StreamHandler(), - logging.FileHandler('ytdlp_ops_client.log') - ] -) -logger = logging.getLogger(__name__) - -def get_info_json(token_data): - """Get infoJson from token_data""" - if not hasattr(token_data, 'infoJson'): - logger.error("infoJson attribute missing in token_data") - raise ValueError("Server response missing infoJson") - - if not token_data.infoJson or token_data.infoJson == "{}": - logger.error("Empty infoJson received from server") - raise ValueError("Empty infoJson received from server") - - logger.info(f"Using infoJson from server response ({len(token_data.infoJson)} bytes)") - return token_data.infoJson - -def is_valid_json(json_str): - """Check if a string is valid JSON and not empty""" - if not json_str or json_str == "{}" or json_str == "": - logger.warning("Empty JSON string received") - return False - - try: - data = json.loads(json_str) - - # Check if it's an empty object - if isinstance(data, dict) and not data: - logger.warning("Empty JSON object received") - return False - - # Check if it has an error field - if isinstance(data, dict) and ('error' in data or 'errorCode' in data): - # It's valid JSON but contains an error - logger.warning(f"JSON contains error: {data.get('error', 'Unknown error')} (code: {data.get('errorCode', 'none')})") - return True - - # Check if it has at least some basic fields - if isinstance(data, dict) and ('id' in data or 'title' in data): - logger.info(f"Valid JSON with video data: {data.get('title', 'Unknown title')}") - return True - - # Check if it has token_data which is important - if isinstance(data, dict) and 'token_data' in data and data['token_data']: - logger.info("Valid JSON with token_data") - return True - - logger.warning("JSON is valid but missing expected fields") - return True - except json.JSONDecodeError as e: - logger.warning(f"Invalid JSON: {e}") - return False - except Exception as e: - logger.warning(f"Unexpected error validating JSON: {e}") - return False - -def extract_video_id(url: str) -> Optional[str]: - """Extract video ID from a YouTube URL.""" - # If it's already a video ID - if re.match(r'^[a-zA-Z0-9_-]{11}$', url): - return url - - # Handle youtu.be URLs - youtu_be_match = re.search(r'youtu\.be/([a-zA-Z0-9_-]{11})', url) - if youtu_be_match: - return youtu_be_match.group(1) - - # Handle youtube.com URLs - youtube_match = re.search(r'(?:youtube\.com/(?:watch\?v=|embed/|v/)|youtube\.com/.*[?&]v=)([a-zA-Z0-9_-]{11})', url) - if youtube_match: - return youtube_match.group(1) - - # Handle shorts URLs - shorts_match = re.search(r'youtube\.com/shorts/([a-zA-Z0-9_-]{11})', url) - if shorts_match: - return shorts_match.group(1) - - return None - -def list_available_formats(url: str, args: argparse.Namespace) -> Optional[List[Dict[str, Any]]]: - """List available formats for a YouTube video.""" - ydl_opts = { - 'quiet': not args.no_quiet if hasattr(args, 'no_quiet') else True, - 'no_warnings': True, - 'skip_download': True, - 'extract_flat': True, - } - - try: - with yt_dlp.YoutubeDL(ydl_opts) as ydl: - info = ydl.extract_info(url, download=False) - - if not info: - logger.error("Could not retrieve video information") - return None - - formats = info.get('formats', []) - - if not formats: - logger.warning("No formats available for this video") - return None - - # Create a table of available formats - format_table = [] - for f in formats: - format_table.append({ - 'format_id': f.get('format_id', 'unknown'), - 'ext': f.get('ext', 'unknown'), - 'resolution': f.get('resolution', 'unknown'), - 'fps': f.get('fps', 'unknown'), - 'vcodec': f.get('vcodec', 'unknown'), - 'acodec': f.get('acodec', 'unknown'), - 'filesize': f.get('filesize', 'unknown'), - 'format_note': f.get('format_note', '') - }) - - return format_table - - except Exception as e: - logger.error(f"Error listing formats: {e}") - return None -def suggest_best_formats(formats: List[Dict[str, Any]]) -> List[Dict[str, Any]]: - """Suggest best formats based on resolution and codec.""" - - best = [] - seen_resolutions = set() - - # Prioritize higher resolutions and certain codecs - preferred_codecs = ["vp9", "avc1", "av01"] # In order of preference - - for f in sorted(formats, key=lambda x: ( - -int(x.get('height', 0) or 0), # Higher resolution first - preferred_codecs.index(x.get('vcodec', '').split('.')[0]) if x.get('vcodec', '').split('.')[0] in preferred_codecs else float('inf'), # Preferred codecs - x.get('filesize', 0) or 0 # Smaller filesize - )): - resolution = f.get('resolution') - if resolution and resolution not in seen_resolutions: - best.append(f) - seen_resolutions.add(resolution) - if len(best) >= 3: # Suggest up to 3 formats - break - return best - -def load_info_json(path: str) -> Optional[Dict[str, Any]]: - """Load and validate info.json file.""" - try: - path = Path(path).resolve() - if not path.exists(): - logger.error(f"Info.json file not found: {path}") - return None - - with open(path, 'r') as f: - data = json.load(f) - - # Basic validation - if not isinstance(data, dict): - logger.error("Invalid info.json format: not a JSON object") - return None - - if 'id' not in data: - logger.warning("Info.json missing video ID") - - return data - - except Exception as e: - logger.error(f"Error loading info.json: {e}") - return None - -def save_info_json(info_json: str, video_id: str, context_dir: str) -> Optional[str]: - """Save info.json to disk and return the saved path.""" - try: - # Ensure context directory exists - Path(context_dir).mkdir(parents=True, exist_ok=True) - - # Create filename with video ID and timestamp - timestamp = int(time.time()) - output_path = Path(context_dir) / f"info_json_{video_id}_{timestamp}.json" - - # Write the file - with open(output_path, 'w') as f: - f.write(info_json) - - # Also create a symlink or copy to the standard name for compatibility - standard_path = Path(context_dir) / f"info_json_{video_id}.json" - try: - # Try to create a symlink first (more efficient) - if os.path.exists(standard_path): - os.remove(standard_path) - os.symlink(output_path, standard_path) - except (OSError, AttributeError): - # If symlink fails (e.g., on Windows), make a copy - with open(standard_path, 'w') as f: - f.write(info_json) - - # Save latest.json - latest_path = Path(context_dir) / "latest.json" - with open(latest_path, 'w') as f: - f.write(info_json) - - logger.info(f"Successfully saved info.json to {output_path} and latest.json to {latest_path}") - return str(output_path) - except Exception as e: - logger.error(f"Failed to save info.json: {e}") - logger.error(traceback.format_exc()) - return False - -def main(): - # Create main parser - parser = argparse.ArgumentParser(description='''YtdlpOpsService Client - -This client connects to the YTDLP Operations Server to generate tokens for YouTube videos. -The server performs SOCKS5 proxy connection testing with a 9-second timeout for early detection -of proxy issues. If a proxy connection fails, the server will immediately stop token generation -and return an error instead of trying other clients.''') - - # Add global options - parser.add_argument('--host', default=os.getenv('YTDLP_HOST', 'localhost'), - help='Server host (default: localhost or YTDLP_HOST env)') - parser.add_argument('--port', type=int, default=int(os.getenv('YTDLP_PORT', '9090')), - help='Server port (default: 9090 or YTDLP_PORT env)') - parser.add_argument('--timeout', type=int, default=30000, - help='Timeout in milliseconds (default: 30000)') - parser.add_argument('--timeout-sec', type=int, default=30, - help='Timeout in seconds (default: 30, overrides --timeout if provided)') - parser.add_argument('--context-dir', default='.', help='Context directory to save info.json (default: .)') - parser.add_argument('--load-info-json', help='Path to existing info.json file to load') - parser.add_argument('--framed-transport', action='store_true', - help='Use TFramedTransport instead of TBufferedTransport for handling very large messages') - parser.add_argument('--force-framed-transport', action='store_true', - help='Force the use of TFramedTransport (recommended for large messages)') - - # Create subparsers for commands - subparsers = parser.add_subparsers(dest='command', required=True, help='Commands') - - # getToken command - get_token_parser = subparsers.add_parser('getToken', help='Get token for a YouTube URL', - description='''Get token for a YouTube URL - -This command connects to the server to generate tokens for a YouTube video. -The server will test any configured SOCKS5 proxy with a 9-second timeout. -If the proxy connection fails, token generation will stop immediately with an error.''') - get_token_parser.add_argument('--url', required=True, - help='YouTube URL to process') - # --format removed, format/quality is determined by the server or embedded in the command - get_token_parser.add_argument('--account_id', default='default', - help='Account ID (default: default)') - get_token_parser.add_argument('--list-formats', action='store_true', - help='List available formats for the video') - - args = parser.parse_args() - - # Handle info.json loading - if args.load_info_json: - info_json = load_info_json(args.load_info_json) - if info_json: - print("Loaded info.json:") - print(json.dumps(info_json, indent=2)) - return - - transport = None - try: - # Ensure context directory exists and is writable - try: - Path(args.context_dir).mkdir(parents=True, exist_ok=True) - test_file = Path(args.context_dir) / "test.txt" - test_file.touch() - test_file.unlink() - except Exception as e: - logger.error(f"Could not access context directory {args.context_dir}: {e}") - print(f"Error: Could not access context directory {args.context_dir}") - sys.exit(1) - - try: - # Check if we should use framed transport for very large messages - use_framed_transport = args.framed_transport or args.force_framed_transport or os.environ.get('USE_FRAMED_TRANSPORT', '').lower() in ('1', 'true', 'yes') - logger.debug(f"Using framed transport: {use_framed_transport}") # Changed to DEBUG - - # Create socket with configurable timeout, force IPv4 - socket = TSocket.TSocket(args.host, args.port, socket_family=2) # AF_INET = 2 for IPv4 - - # Use timeout-sec if provided, otherwise use timeout in milliseconds - if args.timeout_sec is not None: - socket.setTimeout(args.timeout_sec * 1000) # Convert seconds to milliseconds - logger.debug(f"Using timeout of {args.timeout_sec} seconds") # Changed to DEBUG - else: - socket.setTimeout(args.timeout) # Use timeout from CLI in milliseconds - logger.debug(f"Using timeout of {args.timeout} milliseconds") # Changed to DEBUG - - # Always use TFramedTransport to match the server - transport = TTransport.TFramedTransport(socket) - logger.debug("Using TFramedTransport for large messages") # Changed to DEBUG - - protocol = TBinaryProtocol.TBinaryProtocol(transport) - client = YTTokenOpService.Client(protocol) - - logger.info(f"Attempting to connect to server at {args.host}:{args.port}...") - try: - transport.open() - logger.info("Successfully connected to server") - except TTransport.TTransportException as e: - logger.error(f"Connection failed: {str(e)}") - print(f"Error: Could not connect to server at {args.host}:{args.port}") - print(f"Reason: {str(e)}") - sys.exit(1) - - # Add connection test - try: - client.ping() - logger.info("Server connection test successful") - except Exception as e: - logger.error(f"Server connection test failed: {e}") - raise - except TTransport.TTransportException as e: - logger.error(f"Connection failed: {str(e)}") - logger.error(f"Could not connect to {args.host}:{args.port}") - sys.exit(1) - except Exception as e: - logger.error(f"Connection failed: {str(e)}") - logger.error(traceback.format_exc()) - sys.exit(1) - - - if args.command == 'getToken': - url = args.url - # format_codes removed - - # Handle format listing - if args.list_formats: - formats = list_available_formats(url, args) - if formats: - print("\nAvailable formats:") - print(tabulate(formats, headers="keys", showindex=True)) # Show index for format selection - - # Suggest best formats based on resolution - best_formats = suggest_best_formats(formats) - if best_formats: - print("\nSuggested formats:") - print(tabulate(best_formats, headers="keys")) - else: - print("No formats available or could not retrieve format information") - return - elif args.youtube_url: - url = args.youtube_url - format_code = args.format - print("Warning: --youtube-url is deprecated, use 'getToken --url' instead") - else: - print("Please provide a YouTube URL using 'getToken --url' command") - return - - # Get token for URL - try: - # Get token for URL - logger.info(f"Requesting token for URL: {url}") - token_data = client.getOrRefreshToken( - accountId=args.account_id, - updateType=TokenUpdateMode.AUTO, - url=url - ) - - if not token_data: - logger.error("Received empty token data from server") - print("Error: Received empty token data from server") - sys.exit(1) - - # Validate token data - if not hasattr(token_data, 'ytdlpCommand') or not token_data.ytdlpCommand: - logger.error("Token data missing required ytdlpCommand") - print("Error: Token data missing required ytdlpCommand") - sys.exit(1) - - logger.info("Successfully received token data from server") - - # Log all attributes of token_data for debugging - token_attrs = [attr for attr in dir(token_data) if not attr.startswith('__') and not callable(getattr(token_data, attr))] - logger.debug(f"Received token_data attributes: {token_attrs}") - - # Handle case where token_data is a dict-like object - if hasattr(token_data, 'items'): - # Convert to dict if needed - token_dict = dict(token_data.items()) - logger.debug(f"Token data as dict: {token_dict}") - - # If we have JSON data directly in the response - if isinstance(token_dict.get('infoJson', None), str): - received_info_json = token_dict['infoJson'] - elif isinstance(token_dict.get('data', None), (dict, str)): - # Try to use the data field if it exists - data = token_dict['data'] - if isinstance(data, str): - received_info_json = data - else: - received_info_json = json.dumps(data) - else: - # Create info.json from available fields - info_data = { - "id": token_dict.get('id', extract_video_id(url)), - "title": token_dict.get('title', ''), - "formats": token_dict.get('formats', []), - "timestamp": int(time.time()), - "ytdlp_command": token_dict.get('ytdlpCommand', '') - } - received_info_json = json.dumps(info_data) - else: - # Handle case where token_data is a regular object - received_info_json = getattr(token_data, 'infoJson', None) - - if received_info_json: - logger.debug(f"Received info.json data ({len(received_info_json)} bytes)") - if len(received_info_json) > 100: - logger.debug(f"Preview: {received_info_json[:100]}...") - else: - logger.warning("No valid info.json data found in response") - - except PBServiceException as e: - logger.error(f"Service exception: {e.message}") - if hasattr(e, 'errorCode'): - if e.errorCode == "BOT_DETECTED": - print(f"Error: {e.message}") - print("\nYouTube has detected bot activity. Authentication is required.") - - # Print suggestions if available - if hasattr(e, 'context') and e.context and 'suggestions' in e.context: - print("\nSuggestions:") - for i, suggestion in enumerate(e.context['suggestions'], 1): - print(f" {i}. {suggestion}") - else: - print("\nTry:") - print(" 1. Use --cookies-from-browser to pass authentication cookies") - print(" 2. Export cookies from a logged-in browser session") - print(" 3. Try a different client type (ios, android, mweb)") - print(" 4. Use a different proxy or IP address") - print(" 5. Try again later") - - sys.exit(1) - elif e.errorCode in ["SOCKS5_CONNECTION_FAILED", "SOCKS5_TIMEOUT", "SOCKS5_CONNECTION_REFUSED", - "SOCKS5_CONNECTION_TIMEOUT", "SOCKS5_HOST_NOT_FOUND", "SOCKS5_NETWORK_UNREACHABLE"]: - print(f"Error: {e.message}") - print("\nSOCKS5 proxy connection failed. Please check your proxy settings.") - - # Provide more specific guidance based on error code - if e.errorCode == "SOCKS5_TIMEOUT" or e.errorCode == "SOCKS5_CONNECTION_TIMEOUT": - print("The proxy server did not respond within the timeout period (9 seconds).") - print("This could indicate network congestion or a proxy server that's overloaded.") - elif e.errorCode == "SOCKS5_CONNECTION_REFUSED": - print("The proxy server actively refused the connection.") - print("This usually means the proxy server is not running or is not accepting connections on the specified port.") - elif e.errorCode == "SOCKS5_HOST_NOT_FOUND": - print("The proxy host could not be resolved.") - print("Please check that the hostname is correct and your DNS is working properly.") - elif e.errorCode == "SOCKS5_NETWORK_UNREACHABLE": - print("The network containing the proxy server is unreachable.") - print("This could indicate network routing issues or firewall restrictions.") - - print("\nPossible solutions:") - print("1. Try using a different proxy server") - print("2. Check if the proxy server is running and accessible") - print("3. Verify your network connection and firewall settings") - print("4. If using a remote proxy, check if it's accessible from your location") - - # Exit with a specific error code for proxy failures - sys.exit(2) - elif e.errorCode == "GLOBAL_TIMEOUT": - print(f"Error: {e.message}") - print("\nThe server timed out while processing your request.") - print("This could be due to:") - print("1. Slow network connection") - print("2. Server overload") - print("3. Complex video that takes too long to process") - print("\nTry again later or with a different video.") - sys.exit(3) - elif e.errorCode == "CLIENT_TIMEOUT": - print(f"Error: {e.message}") - print("\nA client-specific timeout occurred while processing your request.") - print("The server has stopped processing to avoid wasting resources.") - print("\nPossible solutions:") - print("1. Try again later when network conditions improve") - print("2. Try a different video") - print("3. Check your internet connection") - sys.exit(3) - else: - print(f"Error: {e.message}") - else: - print(f"Error: {e.message}") - return - except PBUserException as e: - logger.error(f"User exception: {e.message}") - print(f"Error: {e.message}") - return - except Exception as e: - logger.error(f"Unexpected error: {str(e)}") - logger.error(traceback.format_exc()) - print(f"Unexpected error: {str(e)}") - sys.exit(1) - - # Log the entire token_data object for debugging AFTER potential exceptions - logger.debug(f"Processing received token_data: {token_data}") - - # Check if valid infoJson was received from the server - info_json = None - if hasattr(token_data, 'infoJson') and token_data.infoJson and token_data.infoJson != "{}": - if is_valid_json(token_data.infoJson): - logger.debug("Valid info.json received from server.") # Changed to DEBUG - info_json = token_data.infoJson - else: - logger.warning("Received infoJson from server, but it is not valid JSON or is empty.") - else: - logger.warning("Valid info.json was NOT received from the server.") - - # Proceed only if we have valid info_json - if info_json: - # Save info.json if present in the server response - video_id = extract_video_id(url) - if not video_id: - logger.warning(f"Could not extract video ID from URL: {url}") # Keep as WARNING - video_id = f"unknown_{int(time.time())}" - - try: - info_data = json.loads(info_json) - # Check if it contains an error - if isinstance(info_data, dict) and ('error' in info_data or 'errorCode' in info_data): - error_msg = info_data.get('error', 'Unknown error') - error_code = info_data.get('errorCode', 'UNKNOWN_ERROR') - logger.warning(f"infoJson contains error: {error_msg} (code: {error_code})") - - # If it's a bot detection error, raise appropriate exception - if error_code == 'BOT_DETECTED' or 'bot' in error_msg.lower() or 'sign in' in error_msg.lower(): - raise PBUserException( - message=f"Bot detection triggered: {error_msg}", - errorCode="BOT_DETECTION", - context={ - "video_id": extract_video_id(url), - "url": url, - "suggestions": info_data.get('suggestions', ["Try different client", "Use proxy", "Wait and retry later"]) - } - ) - except json.JSONDecodeError as e: - # This case should ideally not happen due to is_valid_json check, but handle defensively - logger.error(f"Invalid JSON received despite initial check: {e}") - print(f"Error: Received invalid JSON data from server.") - info_json = None # Ensure we don't proceed - - # If info_json is still None after checks, handle the failure case - if not info_json: - logger.error("Failed to obtain valid info.json from the server.") - print("Error: No valid video information (info.json) was received from the server.") - # Optionally, print the raw ytdlp command if available - if hasattr(token_data, 'ytdlpCommand') and token_data.ytdlpCommand: - print("\nRaw command from server (may be incomplete or require info.json):") - print(token_data.ytdlpCommand) - sys.exit(1) # Exit with error - - # --- We have valid info_json, proceed with saving and command generation --- - try: - info_data = json.loads(info_json) # We know this is valid now - - # Check if it's an error response embedded in the JSON - if isinstance(info_data, dict) and "error" in info_data: - logger.error(f"Received error report from server: {info_json}") - - # Check if this is a bot detection error - if (info_data.get('errorCode') == "BOT_DETECTED" or - "bot" in info_data.get('message', '').lower() or - "sign in to confirm" in info_data.get('message', '').lower() or - "sign in to confirm" in info_data.get('error', '').lower() or - "unusual traffic" in info_data.get('message', '').lower() or - "captcha" in info_data.get('message', '').lower() or - info_data.get('requires_auth') == True): - - logger.error("Bot detection error detected in info.json") - # Raise PBServiceException for bot detection - raise PBServiceException( - message=f"Bot detection triggered: {info_data.get('message', 'Authentication required')}", - errorCode="BOT_DETECTED", - context={ - "video_id": video_id, - "url": url, - "requires_auth": True, - "info_data": info_data, - "suggestions": info_data.get('suggestions', [ - "Use --cookies-from-browser to pass authentication cookies", - "Export cookies from a logged-in browser session", - "Try a different client type (ios, android, mweb)", - "Use a different proxy or IP address" - ]) - } - ) - else: - # Raise PBServiceException for other errors - raise PBServiceException( - message=f"Error extracting video info: {info_data.get('error', 'Unknown error')}", - errorCode=info_data.get('errorCode', "EXTRACTION_FAILED"), - context={"video_id": video_id, "url": url, "info_data": info_data} - ) - - # If it's a valid response, process it - if 'title' in info_data or 'id' in info_data: - print(f"Video info retrieved: {info_data.get('title', 'Unknown title')}") - saved_path = save_info_json(info_json, video_id, args.context_dir) - if saved_path: - print(f"info.json saved to: {saved_path}") - - # Create simpler base command using only the saved info.json and proxy - base_cmd = f"yt-dlp --load-info-json \"{saved_path}\"" # Quote the path - if hasattr(token_data, 'socks') and token_data.socks: - if token_data.socks.startswith(('socks5://', 'ss://')): - # Quote the proxy URL as well - base_cmd += f" --proxy \"{token_data.socks}\"" - - # Show format listing command - print("\nTo list available formats:") - format_cmd = f"{base_cmd} -F" - print(format_cmd) - - # Show download command (format is usually embedded in info.json or determined by yt-dlp) - simplified_cmd = f"{base_cmd} --simulate" # Removed format codes - - print("\nTo download (with --simulate to preview):") - print(simplified_cmd) - print("\nRemove --simulate to actually download") - else: - logger.error("Failed to save info.json file") - print("Failed to save info.json file") - else: - logger.warning("info.json appears to be valid JSON but missing expected video fields") - print("Error: Received incomplete or invalid video data") - print("This usually indicates an authentication or access issue") - sys.exit(1) - except Exception as e: # Catch errors during saving or command generation - logger.error(f"Error processing valid info.json: {str(e)}") - # Re-raise the exception to be handled by the main error handler - raise - finally: - if transport: - transport.close() - -if __name__ == "__main__": - main() diff --git a/airflow/ytdlp-ops-auth/ytdlp_utils.py b/airflow/ytdlp-ops-auth/ytdlp_utils.py deleted file mode 100644 index 292c6b7..0000000 --- a/airflow/ytdlp-ops-auth/ytdlp_utils.py +++ /dev/null @@ -1,60 +0,0 @@ -import json -import logging -import re - -logger = logging.getLogger(__name__) - -def get_info_json(token_data): - """Get infoJson from token_data""" - if hasattr(token_data, 'infoJson') and token_data.infoJson: - return token_data.infoJson - - # Log the issue for debugging - logger.warning("infoJson attribute missing or empty in token_data") - logger.info(f"Available attributes: {[attr for attr in dir(token_data) if not attr.startswith('__') and not callable(getattr(token_data, attr))]}") - - return "{}" - -def is_valid_json(json_str): - """Check if a string is valid JSON and not empty""" - if not json_str or json_str == "{}" or json_str == "": - return False - - try: - data = json.loads(json_str) - # Check if it's an empty object - if isinstance(data, dict) and not data: - return False - # Check if it has at least some basic fields - if isinstance(data, dict) and ('id' in data or 'title' in data): - return True - # Check if it has token_data which is important - if isinstance(data, dict) and 'token_data' in data and data['token_data']: - return True - return True - except Exception as e: - logger.warning(f"Invalid JSON: {e}") - return False - -def extract_video_id(url): - """Extract video ID from a YouTube URL""" - # If it's already a video ID - if re.match(r'^[a-zA-Z0-9_-]{11}$', url): - return url - - # Handle youtu.be URLs - youtu_be_match = re.search(r'youtu\.be/([a-zA-Z0-9_-]{11})', url) - if youtu_be_match: - return youtu_be_match.group(1) - - # Handle youtube.com URLs - youtube_match = re.search(r'(?:youtube\.com/(?:watch\?v=|embed/|v/)|youtube\.com/.*[?&]v=)([a-zA-Z0-9_-]{11})', url) - if youtube_match: - return youtube_match.group(1) - - # Handle shorts URLs - shorts_match = re.search(r'youtube\.com/shorts/([a-zA-Z0-9_-]{11})', url) - if shorts_match: - return shorts_match.group(1) - - return None diff --git a/ansible/.gitignore b/ansible/.gitignore new file mode 100644 index 0000000..b0ac3ed --- /dev/null +++ b/ansible/.gitignore @@ -0,0 +1 @@ +.aider* diff --git a/ansible/.vault_pass b/ansible/.vault_pass new file mode 100644 index 0000000..89a0cd2 --- /dev/null +++ b/ansible/.vault_pass @@ -0,0 +1 @@ +ytdlp-ops diff --git a/ansible/MIGRATION.md b/ansible/MIGRATION.md new file mode 100644 index 0000000..78f995b --- /dev/null +++ b/ansible/MIGRATION.md @@ -0,0 +1,9 @@ +# Migration Notes + +This document tracks the process of migrating the Ansible deployment. + +## Guiding Principles + +- No changes to business logic or core functionality are permitted during this phase. +- The focus is solely on resolving file path issues, dependency errors, and structural inconsistencies resulting from the migration of a subset of files. +- All changes should be aimed at making the existing playbooks runnable in the new environment. diff --git a/ansible/README-yt.md b/ansible/README-yt.md index 1ed4ea0..0695db7 100644 --- a/ansible/README-yt.md +++ b/ansible/README-yt.md @@ -94,7 +94,7 @@ For faster development cycles, you can deploy changes to specific parts of the c #### Updating Only the Master Node (Fast Deploy) -To sync configuration, code, and restart services on the master node *without* rebuilding the Airflow image or pulling the `ytdlp-ops-service` image, use the `fast_deploy` flag with the master playbook. This is ideal for pushing changes to DAGs, Python code, or config files. +To sync configuration, code, and restart services on the master node *without* rebuilding the Airflow image or pulling the `ytdlp-ops-server` image, use the `fast_deploy` flag with the master playbook. This is ideal for pushing changes to DAGs, Python code, or config files. ```bash # Run from inside the ansible/ directory diff --git a/ansible/ansible.cfg b/ansible/ansible.cfg index 120a5da..4521bb2 100644 --- a/ansible/ansible.cfg +++ b/ansible/ansible.cfg @@ -1,5 +1,6 @@ [defaults] inventory = inventory.ini +remote_user = alex_p roles_path = ./roles retry_files_enabled = False host_key_checking = False diff --git a/ansible/configs/etc/ssh/sshd_config b/ansible/configs/etc/ssh/sshd_config new file mode 100644 index 0000000..8fafa28 --- /dev/null +++ b/ansible/configs/etc/ssh/sshd_config @@ -0,0 +1,42 @@ +# This file is managed by Ansible. Do not edit manually. + +# SSH port configuration - listen on both standard and custom ports +Port 22 +Port 22822 + +# Protocol settings +Protocol 2 + +# Host keys +HostKey /etc/ssh/ssh_host_rsa_key +HostKey /etc/ssh/ssh_host_ecdsa_key +HostKey /etc/ssh/ssh_host_ed25519_key + +# Authentication settings +PermitRootLogin yes +PasswordAuthentication no +PubkeyAuthentication yes +AuthorizedKeysFile .ssh/authorized_keys + +# Security settings +PermitEmptyPasswords no +ChallengeResponseAuthentication no +UsePAM yes + +# Connection settings +X11Forwarding yes +PrintMotd no +AcceptEnv LANG LC_* + +# Performance settings +TCPKeepAlive yes +ClientAliveInterval 60 +ClientAliveCountMax 3 + +# Login settings +LoginGraceTime 1m +MaxStartups 10:30:60 + +# Logging +SyslogFacility AUTH +LogLevel INFO diff --git a/ansible/configs/etc/sysctl.d/99-system-limits.conf b/ansible/configs/etc/sysctl.d/99-system-limits.conf new file mode 100644 index 0000000..759dfe5 --- /dev/null +++ b/ansible/configs/etc/sysctl.d/99-system-limits.conf @@ -0,0 +1,18 @@ +# System limits configuration for better performance + +# Enable memory overcommit for Redis to prevent background save failures +vm.overcommit_memory = 1 + +# Increase file handle limits +fs.file-max = 1000000 + +# Network tuning +net.core.somaxconn = 65535 +net.core.netdev_max_backlog = 5000 +net.core.rmem_max = 16777216 +net.core.wmem_max = 16777216 +net.ipv4.tcp_wmem = 4096 65536 16777216 +net.ipv4.tcp_rmem = 4096 65536 16777216 +net.ipv4.tcp_max_syn_backlog = 8192 +net.ipv4.tcp_slow_start_after_idle = 0 +net.ipv4.tcp_tw_reuse = 1 diff --git a/ansible/group_vars/all.yml b/ansible/group_vars/all.yml deleted file mode 100644 index e6bcca2..0000000 --- a/ansible/group_vars/all.yml +++ /dev/null @@ -1,52 +0,0 @@ ---- -# Global variables shared across all hosts - -# Docker image versions -ytdlp_ops_image: "pangramia/ytdlp-ops-airflow:latest" -airflow_image_name: "pangramia/ytdlp-ops-airflow:latest" - -# Default ports -redis_port: 52909 -postgres_port: 5432 -ytdlp_base_port: 9090 -envoy_port: 9080 -envoy_admin_port: 9901 -management_service_port: 9091 -camoufox_base_vnc_port: 5901 - -# Default UID -airflow_uid: 1003 - -# Default directories -airflow_master_dir: "/srv/airflow_master" -airflow_worker_dir: "/srv/airflow_dl_worker" - -# Docker network name -docker_network_name: "airflow_proxynet" - -# Default usernames -ssh_user: "alex_p" -ansible_user: "alex_p" - -# Default group -deploy_group: "ytdl" - -# Default file permissions -dir_permissions: "0755" -file_permissions: "0644" - -# Default rsync options -rsync_default_opts: - - "--no-owner" - - "--no-group" - - "--no-times" - - "--copy-links" - - "--copy-unsafe-links" - - "--exclude=.git*" - - "--exclude=__pycache__" - - "--exclude=*.pyc" - - "--exclude=*.log" - - "--exclude=.DS_Store" - -# Docker-Hub credentials -dockerhub_user: "pangramia" diff --git a/ansible/group_vars/all/generated_vars.yml b/ansible/group_vars/all/generated_vars.yml index 995825c..79a77f2 100644 --- a/ansible/group_vars/all/generated_vars.yml +++ b/ansible/group_vars/all/generated_vars.yml @@ -1,7 +1,42 @@ --- # This file is auto-generated by tools/generate-inventory.py # Do not edit – your changes will be overwritten. -master_host_ip: 89.253.221.173 +airflow_image_name: pangramia/ytdlp-ops-airflow:latest +airflow_master_dir: /srv/airflow_master +airflow_uid: 1003 +airflow_worker_dir: /srv/airflow_dl_worker +ansible_user: alex_p +camoufox_base_vnc_port: 5901 +deploy_group: ytdl +dir_permissions: '0755' +docker_network_name: airflow_proxynet +dockerhub_user: pangramia +envoy_admin_port: 9901 +envoy_port: 9080 +external_access_ips: [] +file_permissions: '0644' +host_timezone: Europe/Moscow +management_service_port: 9091 +master_host_ip: 89.253.223.97 +postgres_port: 5432 redis_port: 52909 -external_access_ips: - [] +rsync_default_opts: +- --no-owner +- --no-group +- --no-times +- --copy-links +- --copy-unsafe-links +- --exclude=.git* +- --exclude=__pycache__ +- --exclude=*.pyc +- --exclude=*.log +- --exclude=.DS_Store +shadowsocks_cipher_method: aes-256-gcm +shadowsocks_fast_open: true +shadowsocks_image: ghcr.io/shadowsocks/sslocal-rust:v1.22.0 +shadowsocks_local_address: 0.0.0.0 +shadowsocks_mode: tcp_and_udp +shadowsocks_timeout: 20 +ssh_user: alex_p +ytdlp_base_port: 9090 +ytdlp_ops_image: pangramia/ytdlp-ops-server:latest diff --git a/ansible/group_vars/all/vault.yml b/ansible/group_vars/all/vault.yml index f0343a9..3893d1b 100644 --- a/ansible/group_vars/all/vault.yml +++ b/ansible/group_vars/all/vault.yml @@ -1,4 +1,8 @@ vault_redis_password: "rOhTAIlTFFylXsjhqwxnYxDChFc" vault_postgres_password: "pgdb_pwd_A7bC2xY9zE1wV5uP" -vault_airflow_admin_password: "admin_pwd_X9yZ3aB1cE5dF7gH" +vault_airflow_admin_password: "2r234sdfrt3q454arq45q355" +vault_flower_password: "dO4eXm7UkF81OdMvT8E2tIKFtPYPCzyzwlcZ4RyOmCsmG4qzrNFqM5sNTOT9" vault_vnc_password: "vnc_pwd_Z5xW8cV2bN4mP7lK" +vault_ss_password_1: "UCUAR7vRO/u9Zo71nfA13c+/b1MCiJpfZJo+EmEBCfA=" +vault_ss_password_2: "tgtQcfjJp/A3F01g4woO0bEQoxij3CAOK/iR1OTPuF4=" +vault_dockerhub_password: "dckr_pat_DmFFqwFEdXFvZlgngGY9ooBaq6o" diff --git a/ansible/host_vars/af-green.yml b/ansible/host_vars/af-green.yml deleted file mode 100644 index 35be338..0000000 --- a/ansible/host_vars/af-green.yml +++ /dev/null @@ -1,4 +0,0 @@ ---- -# Variables for af-green -master_host_ip: 89.253.221.173 -redis_port: 52909 diff --git a/ansible/host_vars/af-test.yml b/ansible/host_vars/af-test.yml new file mode 100644 index 0000000..7f00d45 --- /dev/null +++ b/ansible/host_vars/af-test.yml @@ -0,0 +1,23 @@ +--- +# Variables for af-test +master_host_ip: 89.253.223.97 +redis_port: 52909 +shadowsocks_proxies: + sslocal-rust-1087: + server: "91.103.252.51" + server_port: 8388 + local_port: 1087 + vault_password_key: "vault_ss_password_1" + sslocal-rust-1086: + server: "62.60.178.45" + server_port: 8388 + local_port: 1086 + vault_password_key: "vault_ss_password_2" + sslocal-rust-1081: + server: "79.137.207.43" + server_port: 8388 + local_port: 1081 + vault_password_key: "vault_ss_password_2" +worker_proxies: + - "socks5://sslocal-rust-1086:1086" + - "socks5://sslocal-rust-1081:1081" diff --git a/ansible/host_vars/dl002.yml b/ansible/host_vars/dl002.yml new file mode 100644 index 0000000..9ee82b3 --- /dev/null +++ b/ansible/host_vars/dl002.yml @@ -0,0 +1,23 @@ +--- +# Variables for dl002 +master_host_ip: 89.253.223.97 +redis_port: 52909 +shadowsocks_proxies: + sslocal-rust-1087: + server: "91.103.252.51" + server_port: 8388 + local_port: 1087 + vault_password_key: "vault_ss_password_1" + sslocal-rust-1086: + server: "62.60.178.45" + server_port: 8388 + local_port: 1086 + vault_password_key: "vault_ss_password_2" + sslocal-rust-1081: + server: "79.137.207.43" + server_port: 8388 + local_port: 1081 + vault_password_key: "vault_ss_password_2" +worker_proxies: + - "socks5://sslocal-rust-1081:1081" + - "socks5://sslocal-rust-1086:1086" diff --git a/ansible/host_vars/dl003.yml b/ansible/host_vars/dl003.yml deleted file mode 100644 index e8ab03f..0000000 --- a/ansible/host_vars/dl003.yml +++ /dev/null @@ -1,6 +0,0 @@ ---- -# Variables for dl003 -master_host_ip: 89.253.221.173 -redis_port: 52909 -worker_proxies: - - "socks5://sslocal-rust-1087:1087" diff --git a/ansible/inventory.ini b/ansible/inventory.ini index bbe9ca8..1be33c6 100644 --- a/ansible/inventory.ini +++ b/ansible/inventory.ini @@ -3,7 +3,7 @@ # Edit cluster.yml and re-run the generator instead. [airflow_master] -af-green ansible_host=89.253.221.173 +af-test ansible_host=89.253.223.97 ansible_port=22 [airflow_workers] -dl003 ansible_host=62.60.245.103 +dl002 ansible_host=62.60.178.54 diff --git a/ansible/playbook-dags.yml b/ansible/playbook-dags.yml index a703cbe..c9beb54 100644 --- a/ansible/playbook-dags.yml +++ b/ansible/playbook-dags.yml @@ -4,7 +4,6 @@ gather_facts: no vars_files: - group_vars/all.yml - remote_user: "{{ ansible_user }}" tasks: - name: Sync DAGs to MASTER server ansible.posix.synchronize: @@ -35,7 +34,6 @@ gather_facts: no vars_files: - group_vars/all.yml - remote_user: "{{ ansible_user }}" tasks: - name: Sync DAGs to WORKER server ansible.posix.synchronize: diff --git a/ansible/playbook-depricated.dl.yml b/ansible/playbook-depricated.dl.yml new file mode 100644 index 0000000..c2035a2 --- /dev/null +++ b/ansible/playbook-depricated.dl.yml @@ -0,0 +1,73 @@ +--- +- name: Deploy Airflow DL Worker Stack + hosts: airflow_workers + vars_files: + - group_vars/all.yml + - group_vars/all/vault.yml + pre_tasks: + - name: Announce fast deploy mode if enabled + debug: + msg: "🚀 FAST DEPLOY MODE ENABLED: Skipping Docker image builds and pulls. 🚀" + when: fast_deploy | default(false) + run_once: true + tasks: + + - name: Ensure worker directory exists + file: + path: "{{ airflow_worker_dir }}" + state: directory + owner: "{{ ansible_user }}" + group: "{{ ansible_user }}" + + - name: Template .env.worker + template: + src: templates/.env.worker.j2 + dest: "{{ airflow_worker_dir }}/.env" + mode: '0600' + + - name: Template docker-compose file for Airflow worker + template: + src: ../airflow/configs/docker-compose-dl.yaml.j2 + dest: "{{ airflow_worker_dir }}/configs/docker-compose-dl.yaml" + mode: '0644' + + - name: Build Airflow worker image from local Dockerfile + community.docker.docker_image: + name: "{{ airflow_image_name }}" + build: + path: "{{ airflow_worker_dir }}" + dockerfile: "Dockerfile" + source: build + force_source: true + when: not fast_deploy | default(false) + + - name: Build Camoufox image from local Dockerfile + community.docker.docker_image: + name: "camoufox:latest" + build: + path: "{{ airflow_worker_dir }}/camoufox" + source: build + force_source: true + when: not fast_deploy | default(false) + + - name: Pull ytdlp-ops-server image only + community.docker.docker_image: + name: "{{ ytdlp_ops_image }}" + source: pull + when: not fast_deploy | default(false) + + - name: Generate dynamic configs (camoufox + envoy) + shell: + cmd: "docker compose -f configs/docker-compose.config-generate.yaml run --rm config-generator" + chdir: "{{ airflow_worker_dir }}" + + - name: Start worker services + community.docker.docker_compose_v2: + project_src: "{{ airflow_worker_dir }}" + files: + - configs/docker-compose-dl.yaml + - configs/docker-compose-ytdlp-ops.yaml + - configs/docker-compose.camoufox.yaml + state: present + remove_orphans: true + pull: "{{ 'never' if fast_deploy | default(false) else 'missing' }}" diff --git a/ansible/playbook-dl.yml b/ansible/playbook-dl.yml index 2822bab..e69de29 100644 --- a/ansible/playbook-dl.yml +++ b/ansible/playbook-dl.yml @@ -1,73 +0,0 @@ ---- -- name: Deploy Airflow DL Worker Stack - hosts: airflow_workers - vars_files: - - group_vars/all.yml - - group_vars/all/vault.yml - pre_tasks: - - name: Announce fast deploy mode if enabled - debug: - msg: "🚀 FAST DEPLOY MODE ENABLED: Skipping Docker image builds and pulls. 🚀" - when: fast_deploy | default(false) - run_once: true - tasks: - - - name: Ensure worker directory exists - file: - path: "{{ airflow_worker_dir }}" - state: directory - owner: "{{ ansible_user }}" - group: "{{ ansible_user }}" - - - name: Template .env.worker - template: - src: templates/.env.worker.j2 - dest: "{{ airflow_worker_dir }}/.env" - mode: '0600' - - - name: Template docker-compose file for Airflow worker - template: - src: ../airflow/configs/docker-compose-dl.yaml.j2 - dest: "{{ airflow_worker_dir }}/configs/docker-compose-dl.yaml" - mode: '0644' - - - name: Build Airflow worker image from local Dockerfile - community.docker.docker_image: - name: "{{ airflow_image_name }}" - build: - path: "{{ airflow_worker_dir }}" - dockerfile: "Dockerfile" - source: build - force_source: true - when: not fast_deploy | default(false) - - - name: Build Camoufox image from local Dockerfile - community.docker.docker_image: - name: "camoufox:latest" - build: - path: "{{ airflow_worker_dir }}/camoufox" - source: build - force_source: true - when: not fast_deploy | default(false) - - - name: Pull ytdlp-ops-service image only - community.docker.docker_image: - name: "{{ ytdlp_ops_image }}" - source: pull - when: not fast_deploy | default(false) - - - name: Generate dynamic configs (camoufox + envoy) - shell: - cmd: "docker compose -f configs/docker-compose.config-generate.yaml run --rm config-generator" - chdir: "{{ airflow_worker_dir }}" - - - name: Start worker services - community.docker.docker_compose_v2: - project_src: "{{ airflow_worker_dir }}" - files: - - configs/docker-compose-dl.yaml - - configs/docker-compose-ytdlp-ops.yaml - - configs/docker-compose.camoufox.yaml - state: present - remove_orphans: true - pull: "{{ 'never' if fast_deploy | default(false) else 'missing' }}" diff --git a/ansible/playbook-full-with-proxies.yml b/ansible/playbook-full-with-proxies.yml new file mode 100644 index 0000000..de6555a --- /dev/null +++ b/ansible/playbook-full-with-proxies.yml @@ -0,0 +1,6 @@ +--- +- name: Deploy entire cluster with proxies + import_playbook: playbook-proxies.yml + +- name: Deploy application stack + import_playbook: playbook-full.yml diff --git a/ansible/playbook-full.yml b/ansible/playbook-full.yml index 639d46c..60b119c 100644 --- a/ansible/playbook-full.yml +++ b/ansible/playbook-full.yml @@ -11,13 +11,96 @@ msg: "🚀 FAST DEPLOY MODE ENABLED: Skipping Docker image builds and pulls. 🚀" when: fast_deploy | default(false) run_once: true - tasks: - - name: Ensure python3-docker is installed - ansible.builtin.apt: - name: python3-docker - state: present - update_cache: yes + + - name: Check if Docker is already installed + ansible.builtin.stat: + path: /usr/bin/docker + register: docker_binary + + - name: Install Docker if not present + block: + - name: Add Docker's official GPG key + ansible.builtin.apt_key: + url: https://download.docker.com/linux/ubuntu/gpg + state: present + + - name: Find and remove any existing Docker repository files to avoid conflicts + block: + - name: Find legacy docker repository files + ansible.builtin.find: + paths: /etc/apt/sources.list.d/ + patterns: '*.list' + contains: 'deb .*download.docker.com' + register: legacy_docker_repo_files + + - name: Remove legacy docker repository files + ansible.builtin.file: + path: "{{ item.path }}" + state: absent + loop: "{{ legacy_docker_repo_files.files }}" + + - name: Set up the Docker repository + ansible.builtin.apt_repository: + repo: "deb [arch=amd64] https://download.docker.com/linux/ubuntu {{ ansible_lsb.codename }} stable" + state: present + + - name: Install prerequisites for Docker + ansible.builtin.apt: + name: + - apt-transport-https + - ca-certificates + - curl + - software-properties-common + - vim + - python3-pip + - iputils-ping + state: present + update_cache: yes + + - name: Install Docker Engine and Docker Compose + ansible.builtin.apt: + name: + - docker-ce + - docker-ce-cli + - containerd.io + - docker-compose-plugin + - python3-docker + state: present + update_cache: yes + when: not docker_binary.stat.exists become: yes + tasks: + + - name: Install pipx + ansible.builtin.apt: + name: pipx + state: present + become: yes + + - name: Install Glances for system monitoring + ansible.builtin.command: pipx install glances[all] + args: + creates: "{{ ansible_env.HOME }}/.local/bin/glances" + become: yes + become_user: "{{ ansible_user }}" + + - name: Ensure Docker service is started and enabled + ansible.builtin.service: + name: docker + state: started + enabled: yes + become: yes + + - name: Add deploy user to the docker group + ansible.builtin.user: + name: "{{ ansible_user }}" + groups: docker + append: yes + become: yes + + - name: Reset SSH connection to apply group changes + ansible.builtin.meta: reset_connection + - name: Ensure shared Docker network exists community.docker.docker_network: diff --git a/ansible/playbook-master.yml b/ansible/playbook-master.yml index 5409633..cae26f9 100644 --- a/ansible/playbook-master.yml +++ b/ansible/playbook-master.yml @@ -10,6 +10,55 @@ debug: msg: "Starting deployment for Airflow Master: {{ inventory_hostname }} ({{ ansible_host }})" + - name: Configure Redis memory overcommit setting + copy: + src: "configs/etc/sysctl.d/99-redis-overcommit.conf" + dest: "/etc/sysctl.d/99-redis-overcommit.conf" + owner: root + group: root + mode: '0644' + become: yes + register: redis_sysctl_config_copy + + - name: Configure system limits + copy: + src: "configs/etc/sysctl.d/99-system-limits.conf" + dest: "/etc/sysctl.d/99-system-limits.conf" + owner: root + group: root + mode: '0644' + become: yes + register: limits_sysctl_config_copy + + - name: Apply sysctl settings for Redis + command: sysctl --system + become: yes + when: redis_sysctl_config_copy.changed + + - name: Apply sysctl settings for system limits + command: sysctl --system + become: yes + when: limits_sysctl_config_copy.changed + + - name: Configure system timezone + # Ensures all services and logs on this node use a consistent timezone. + community.general.timezone: + name: "{{ host_timezone }}" + become: yes + + - name: Install NTP for time synchronization + ansible.builtin.apt: + name: ntp + state: present + become: yes + + - name: Ensure NTP service is started and enabled + ansible.builtin.service: + name: ntp + state: started + enabled: yes + become: yes + - name: Set deploy_group to a valid single group name set_fact: deploy_group: "ytdl" @@ -22,7 +71,7 @@ - name: Ensure deploy user exists user: - name: "{{ ssh_user }}" + name: "{{ ansible_user }}" group: "{{ deploy_group }}" state: present become: yes @@ -95,6 +144,29 @@ deploy_group_gid: "0" when: deploy_group_gid is not defined or deploy_group_gid == "" + tasks: + - name: Install pipx + ansible.builtin.apt: + name: pipx + state: present + become: yes + + - name: Install Glances for system monitoring + ansible.builtin.command: pipx install glances[all] + args: + creates: "{{ ansible_env.HOME }}/.local/bin/glances" + become: yes + become_user: "{{ ansible_user }}" + + # Include Docker health check + - name: Include Docker health check tasks + include_tasks: tasks/docker_health_check.yml + roles: - ytdlp-master - airflow-master + + post_tasks: + - name: Include camoufox verification tasks + include_tasks: tasks/verify_camoufox.yml + when: not fast_deploy | default(false) diff --git a/ansible/playbook-proxies.yml b/ansible/playbook-proxies.yml new file mode 100644 index 0000000..794fcf0 --- /dev/null +++ b/ansible/playbook-proxies.yml @@ -0,0 +1,151 @@ +--- +- name: Deploy Shadowsocks-Rust Proxy Configurations + hosts: all + gather_facts: yes + pre_tasks: + - name: Check if Docker is already installed + ansible.builtin.stat: + path: /usr/bin/docker + register: docker_binary + + - name: Install Docker if not present + block: + - name: Add Docker's official GPG key + ansible.builtin.apt_key: + url: https://download.docker.com/linux/ubuntu/gpg + state: present + + - name: Find and remove any existing Docker repository files to avoid conflicts + block: + - name: Find legacy docker repository files + ansible.builtin.find: + paths: /etc/apt/sources.list.d/ + patterns: '*.list' + contains: 'deb .*download.docker.com' + register: legacy_docker_repo_files + + - name: Remove legacy docker repository files + ansible.builtin.file: + path: "{{ item.path }}" + state: absent + loop: "{{ legacy_docker_repo_files.files }}" + + - name: Set up the Docker repository + ansible.builtin.apt_repository: + repo: "deb [arch=amd64] https://download.docker.com/linux/ubuntu {{ ansible_lsb.codename }} stable" + state: present + + - name: Install prerequisites for Docker + ansible.builtin.apt: + name: + - apt-transport-https + - ca-certificates + - curl + - software-properties-common + - vim + - python3-pip + state: present + update_cache: yes + + - name: Install Docker Engine and Docker Compose + ansible.builtin.apt: + name: + - docker-ce + - docker-ce-cli + - containerd.io + - docker-compose-plugin + - python3-docker + state: present + update_cache: yes + when: not docker_binary.stat.exists + become: yes + + - name: Ensure Docker service is started and enabled + ansible.builtin.service: + name: docker + state: started + enabled: yes + become: yes + + - name: Add deploy user to the docker group + ansible.builtin.user: + name: "{{ ansible_user }}" + groups: docker + append: yes + become: yes + + - name: Reset SSH connection to apply group changes + ansible.builtin.meta: reset_connection + + - name: Ensure shared Docker network exists + community.docker.docker_network: + name: "{{ docker_network_name }}" + driver: bridge + become: yes + tasks: + - name: Deploy Shadowsocks-Rust proxy services + block: + - name: Ensure base directory for shadowsocks exists + ansible.builtin.file: + path: "/srv/shadowsocks-rust" + state: directory + owner: "{{ ansible_user }}" + group: "{{ deploy_group }}" + mode: '0755' + + - name: Create individual proxy config directories + ansible.builtin.file: + path: "/srv/shadowsocks-rust/config_ssp_{{ item.value.local_port }}" + state: directory + owner: "{{ ansible_user }}" + group: "{{ deploy_group }}" + mode: '0755' + loop: "{{ shadowsocks_proxies | dict2items }}" + + - name: Create Shadowsocks-Rust proxy configuration files + ansible.builtin.copy: + content: | + { + "server": "{{ item.value.server }}", + "server_port": {{ item.value.server_port }}, + "password": "{{ lookup('vars', item.value.vault_password_key) }}", + "local_address": "{{ shadowsocks_local_address }}", + "local_port": {{ item.value.local_port }}, + "timeout": {{ shadowsocks_timeout }}, + "method": "{{ shadowsocks_cipher_method }}", + "fast_open": {{ shadowsocks_fast_open | to_json }}, + "mode": "{{ shadowsocks_mode }}" + } + dest: "/srv/shadowsocks-rust/config_ssp_{{ item.value.local_port }}/config.json" + owner: "{{ ansible_user }}" + group: "{{ deploy_group }}" + mode: '0644' + loop: "{{ shadowsocks_proxies | dict2items }}" + + - name: Create docker-compose.yml for Shadowsocks-Rust proxies + ansible.builtin.template: + src: templates/shadowsocks-compose.yml.j2 + dest: /srv/shadowsocks-rust/docker-compose.yml + owner: "{{ ansible_user }}" + group: "{{ deploy_group }}" + mode: '0644' + + - name: Ensure old docker-compose.yaml file is removed to avoid conflicts + ansible.builtin.file: + path: /srv/shadowsocks-rust/docker-compose.yaml + state: absent + + - name: Stop and remove any existing Shadowsocks-Rust proxy services + community.docker.docker_compose_v2: + project_src: "/srv/shadowsocks-rust" + state: absent + + - name: Start Shadowsocks-Rust proxy services + community.docker.docker_compose_v2: + project_src: "/srv/shadowsocks-rust" + state: present + remove_orphans: true + recreate: always + pull: "{{ 'never' if fast_deploy | default(false) else 'missing' }}" + when: shadowsocks_proxies is defined and shadowsocks_proxies | length > 0 + become: yes diff --git a/ansible/playbook-worker.yml b/ansible/playbook-worker.yml index f940a16..0dc3092 100644 --- a/ansible/playbook-worker.yml +++ b/ansible/playbook-worker.yml @@ -10,6 +10,25 @@ debug: msg: "Starting deployment for Airflow Worker: {{ inventory_hostname }} ({{ ansible_host }})" + - name: Configure system timezone + # Ensures all services and logs on this node use a consistent timezone. + community.general.timezone: + name: "{{ host_timezone }}" + become: yes + + - name: Install NTP for time synchronization + ansible.builtin.apt: + name: ntp + state: present + become: yes + + - name: Ensure NTP service is started and enabled + ansible.builtin.service: + name: ntp + state: started + enabled: yes + become: yes + - name: Set deploy_group to a valid single group name set_fact: deploy_group: "ytdl" @@ -22,7 +41,7 @@ - name: Ensure deploy user exists user: - name: "{{ ssh_user }}" + name: "{{ ansible_user }}" group: "{{ deploy_group }}" state: present become: yes @@ -95,6 +114,44 @@ deploy_group_gid: "0" when: deploy_group_gid is not defined or deploy_group_gid == "" + - name: Configure system limits + copy: + src: "configs/etc/sysctl.d/99-system-limits.conf" + dest: "/etc/sysctl.d/99-system-limits.conf" + owner: root + group: root + mode: '0644' + become: yes + register: limits_sysctl_config_copy + + - name: Apply sysctl settings for system limits + command: sysctl --system + become: yes + when: limits_sysctl_config_copy.changed + + tasks: + - name: Install pipx + ansible.builtin.apt: + name: pipx + state: present + become: yes + + - name: Install Glances for system monitoring + ansible.builtin.command: pipx install glances[all] + args: + creates: "{{ ansible_env.HOME }}/.local/bin/glances" + become: yes + become_user: "{{ ansible_user }}" + + # Include Docker health check + - name: Include Docker health check tasks + include_tasks: tasks/docker_health_check.yml + roles: - - airflow-worker - ytdlp-worker + - airflow-worker + + post_tasks: + - name: Include camoufox verification tasks + include_tasks: tasks/verify_camoufox.yml + when: not fast_deploy | default(false) diff --git a/ansible/roles/airflow-master/tasks/main.yml b/ansible/roles/airflow-master/tasks/main.yml index 71193b7..324456c 100644 --- a/ansible/roles/airflow-master/tasks/main.yml +++ b/ansible/roles/airflow-master/tasks/main.yml @@ -32,6 +32,22 @@ mode: '0755' become: yes +- name: Ensure Airflow operational directories exist with correct permissions + file: + path: "{{ airflow_master_dir }}/{{ item }}" + state: directory + owner: "{{ airflow_uid }}" + group: "{{ deploy_group }}" + mode: '0775' + become: yes + loop: + - "dags" + - "logs" + - "plugins" + - "downloadfiles" + - "addfiles" + - "inputfiles" + - name: Check if source directories exist stat: path: "../{{ item }}" @@ -51,6 +67,7 @@ dest: "{{ airflow_master_dir }}/" archive: yes recursive: yes + delete: yes rsync_path: "sudo rsync" rsync_opts: "{{ rsync_default_opts }}" loop: @@ -66,7 +83,6 @@ - "airflow/update-yt-dlp.sh" - "get_info_json_client.py" - "proxy_manager_client.py" - - "token_generator" - "utils" - name: Copy custom Python config files to master @@ -81,6 +97,13 @@ - "custom_task_hooks.py" - "airflow_local_settings.py" +- name: Ensure any existing airflow.cfg directory is removed + file: + path: "{{ airflow_master_dir }}/config/airflow.cfg" + state: absent + become: yes + ignore_errors: yes + - name: Copy airflow.cfg to master copy: src: "../airflow/airflow.cfg" @@ -188,26 +211,18 @@ recurse: yes become: yes -- name: Ensure logs directory exists on master - file: - path: "{{ airflow_master_dir }}/logs" - state: directory - owner: "{{ airflow_uid }}" - group: "{{ deploy_group }}" - mode: '0775' - become: yes - - name: Ensure postgres-data directory exists on master and has correct permissions file: path: "{{ airflow_master_dir }}/postgres-data" state: directory - owner: "{{ airflow_uid }}" - group: "{{ deploy_group }}" - mode: '0775' + owner: "999" # UID for the 'postgres' user in the official postgres image + group: "999" # GID for the 'postgres' group in the official postgres image + mode: '0700' become: yes -- name: Set group-writable and setgid permissions on master logs directory contents +- name: Set proper ownership and permissions on master logs directory contents shell: | + chown -R {{ airflow_uid }}:{{ deploy_group }} {{ airflow_master_dir }}/logs find {{ airflow_master_dir }}/logs -type d -exec chmod g+rws {} + find {{ airflow_master_dir }}/logs -type f -exec chmod g+rw {} + become: yes @@ -236,6 +251,59 @@ force_source: true when: not fast_deploy | default(false) +- name: "Log: Preparing assets for Caddy image" + debug: + msg: "Extracting static assets from the Airflow image to build the Caddy reverse proxy." + when: not fast_deploy | default(false) + +- name: Prepare Caddy asset extraction directory + file: + path: "{{ airflow_master_dir }}/caddy_build_assets" + state: "{{ item }}" + owner: "{{ ssh_user }}" + group: "{{ deploy_group }}" + mode: '0755' + loop: + - absent + - directory + become: yes + when: not fast_deploy | default(false) + +- name: Ensure subdirectories exist with correct permissions + file: + path: "{{ airflow_master_dir }}/caddy_build_assets/{{ item }}" + state: directory + owner: "{{ ssh_user }}" + group: "{{ deploy_group }}" + mode: '0755' + loop: + - "appbuilder" + - "dist" + become: yes + when: not fast_deploy | default(false) + +- name: Extract static assets from Airflow image for Caddy build + shell: | + set -e + CONTAINER_ID=$(docker create {{ airflow_image_name }}) + # Dynamically find paths inside the container + APPBUILDER_PATH=$(docker run --rm --entrypoint "" {{ airflow_image_name }} python -c 'import os, flask_appbuilder; print(os.path.join(os.path.dirname(flask_appbuilder.__file__), "static", "appbuilder"))') + AIRFLOW_DIST_PATH=$(docker run --rm --entrypoint "" {{ airflow_image_name }} python -c 'import os, airflow; print(os.path.join(os.path.dirname(airflow.__file__), "www/static/dist"))') + # Copy assets from container to host + docker cp "${CONTAINER_ID}:${APPBUILDER_PATH}/." "./caddy_build_assets/appbuilder" + docker cp "${CONTAINER_ID}:${AIRFLOW_DIST_PATH}/." "./caddy_build_assets/dist" + docker rm -f $CONTAINER_ID + # Pre-compress assets + find ./caddy_build_assets/appbuilder -type f -print0 | xargs -0 gzip -k -9 + find ./caddy_build_assets/dist -type f -print0 | xargs -0 gzip -k -9 + args: + chdir: "{{ airflow_master_dir }}" + executable: /bin/bash + become: yes + register: asset_extraction + changed_when: asset_extraction.rc == 0 + when: not fast_deploy | default(false) + - name: "Log: Building Caddy reverse proxy image" debug: msg: "Building the Caddy image (pangramia/ytdlp-ops-caddy:latest) to serve static assets." diff --git a/ansible/roles/airflow-worker/tasks/main.yml b/ansible/roles/airflow-worker/tasks/main.yml index 2b8474a..020596a 100644 --- a/ansible/roles/airflow-worker/tasks/main.yml +++ b/ansible/roles/airflow-worker/tasks/main.yml @@ -32,6 +32,22 @@ mode: '0755' become: yes +- name: Ensure Airflow operational directories exist with correct permissions + file: + path: "{{ airflow_worker_dir }}/{{ item }}" + state: directory + owner: "{{ airflow_uid }}" + group: "{{ deploy_group }}" + mode: '0775' + become: yes + loop: + - "dags" + - "logs" + - "plugins" + - "downloadfiles" + - "addfiles" + - "inputfiles" + - name: "Log: Syncing Airflow core files" debug: msg: "Syncing DAGs, configs, and Python source code to the worker node." @@ -42,6 +58,7 @@ dest: "{{ airflow_worker_dir }}/" archive: yes recursive: yes + delete: yes rsync_path: "sudo rsync" rsync_opts: "{{ rsync_default_opts }}" loop: @@ -56,7 +73,6 @@ - "airflow/update-yt-dlp.sh" - "get_info_json_client.py" - "proxy_manager_client.py" - - "token_generator" - "utils" - name: Copy custom Python config files to worker @@ -184,17 +200,9 @@ recurse: yes become: yes -- name: Ensure logs directory exists on worker - file: - path: "{{ airflow_worker_dir }}/logs" - state: directory - owner: "{{ airflow_uid }}" - group: "{{ deploy_group }}" - mode: '0775' - become: yes - -- name: Set group-writable and setgid permissions on worker logs directory contents +- name: Set proper ownership and permissions on worker logs directory contents shell: | + chown -R {{ airflow_uid }}:{{ deploy_group }} {{ airflow_worker_dir }}/logs find {{ airflow_worker_dir }}/logs -type d -exec chmod g+rws {} + find {{ airflow_worker_dir }}/logs -type f -exec chmod g+rw {} + become: yes diff --git a/ansible/roles/fail2ban/handlers/main.yml b/ansible/roles/fail2ban/handlers/main.yml new file mode 100644 index 0000000..6bd3b61 --- /dev/null +++ b/ansible/roles/fail2ban/handlers/main.yml @@ -0,0 +1,6 @@ +--- +- name: Restart fail2ban + ansible.builtin.service: + name: fail2ban + state: restarted + become: yes diff --git a/ansible/roles/fail2ban/tasks/main.yml b/ansible/roles/fail2ban/tasks/main.yml new file mode 100644 index 0000000..fd2b4fc --- /dev/null +++ b/ansible/roles/fail2ban/tasks/main.yml @@ -0,0 +1,24 @@ +--- +- name: Install fail2ban + ansible.builtin.apt: + name: fail2ban + state: present + update_cache: yes + become: yes + +- name: Template fail2ban jail.local configuration + ansible.builtin.template: + src: jail.local.j2 + dest: /etc/fail2ban/jail.local + owner: root + group: root + mode: '0644' + become: yes + notify: Restart fail2ban + +- name: Ensure fail2ban service is started and enabled + ansible.builtin.service: + name: fail2ban + state: started + enabled: yes + become: yes diff --git a/ansible/roles/fail2ban/templates/jail.local.j2 b/ansible/roles/fail2ban/templates/jail.local.j2 new file mode 100644 index 0000000..feee86e --- /dev/null +++ b/ansible/roles/fail2ban/templates/jail.local.j2 @@ -0,0 +1,16 @@ +# This file is managed by Ansible. Do not edit manually. + +[DEFAULT] +ignoreip = 127.0.0.1/8 ::1 +bantime = 86400 +findtime = 600 +maxretry = 3 +banaction = iptables-multiport +backend = systemd + +[sshd] +enabled = true +port = ssh +logpath = /var/log/auth.log +maxretry = 3 +bantime = 86400 diff --git a/ansible/roles/shadowsocks-deploy/tasks/main.yml b/ansible/roles/shadowsocks-deploy/tasks/main.yml new file mode 100644 index 0000000..180ebb2 --- /dev/null +++ b/ansible/roles/shadowsocks-deploy/tasks/main.yml @@ -0,0 +1,60 @@ +--- +- name: Set shadowsocks base directory fact + set_fact: + shadowsocks_dir: "/srv/shadowsocks-rust" + +- name: Ensure shadowsocks base directory exists + file: + path: "{{ shadowsocks_dir }}" + state: directory + owner: "{{ ssh_user }}" + group: "{{ deploy_group }}" + mode: '0755' + become: yes + +- name: Ensure proxy configuration directories exist + file: + path: "{{ shadowsocks_dir }}/config_ssp_{{ item.value.local_port }}" + state: directory + owner: "{{ ssh_user }}" + group: "{{ deploy_group }}" + mode: '0755' + loop: "{{ shadowsocks_proxies | default({}) | dict2items }}" + loop_control: + label: "{{ item.key }}" + become: yes + when: shadowsocks_proxies is defined + +- name: Template proxy configuration files + template: + src: "config.json.j2" + dest: "{{ shadowsocks_dir }}/config_ssp_{{ item.value.local_port }}/config.json" + owner: "{{ ssh_user }}" + group: "{{ deploy_group }}" + mode: '0644' + loop: "{{ shadowsocks_proxies | default({}) | dict2items }}" + loop_control: + label: "{{ item.key }}" + become: yes + when: shadowsocks_proxies is defined + +- name: Template docker-compose file for proxies + template: + src: "docker-compose.proxies.yaml.j2" + dest: "{{ shadowsocks_dir }}/docker-compose.proxies.yaml" + owner: "{{ ssh_user }}" + group: "{{ deploy_group }}" + mode: '0644' + become: yes + when: shadowsocks_proxies is defined + +- name: Create symlink for docker-compose.yaml + file: + src: "{{ shadowsocks_dir }}/docker-compose.proxies.yaml" + dest: "{{ shadowsocks_dir }}/docker-compose.yaml" + state: link + force: yes + owner: "{{ ssh_user }}" + group: "{{ deploy_group }}" + become: yes + when: shadowsocks_proxies is defined diff --git a/ansible/roles/shadowsocks-deploy/templates/config.json.j2 b/ansible/roles/shadowsocks-deploy/templates/config.json.j2 new file mode 100644 index 0000000..846b84d --- /dev/null +++ b/ansible/roles/shadowsocks-deploy/templates/config.json.j2 @@ -0,0 +1,11 @@ +{ + "server": "{{ item.value.server }}", + "server_port": {{ item.value.server_port }}, + "password": "{{ lookup('vars', item.value.vault_password_key) }}", + "local_address": "0.0.0.0", + "local_port": {{ item.value.local_port }}, + "timeout": 20, + "method": "aes-256-gcm", + "fast_open": true, + "mode": "tcp_and_udp" +} diff --git a/ansible/roles/shadowsocks-deploy/templates/docker-compose.proxies.yaml.j2 b/ansible/roles/shadowsocks-deploy/templates/docker-compose.proxies.yaml.j2 new file mode 100644 index 0000000..ca51296 --- /dev/null +++ b/ansible/roles/shadowsocks-deploy/templates/docker-compose.proxies.yaml.j2 @@ -0,0 +1,22 @@ +# This file is managed by Ansible. +name: "shadowsocks-proxies" +services: +{% for name, config in shadowsocks_proxies.items() %} + {{ name }}: + image: ghcr.io/shadowsocks/sslocal-rust:v1.22.0 + container_name: {{ name }} + restart: always + ports: + - "127.0.0.1:{{ config.local_port }}:{{ config.local_port }}/tcp" + - "127.0.0.1:{{ config.local_port }}:{{ config.local_port }}/udp" + volumes: + - /srv/shadowsocks-rust/config_ssp_{{ config.local_port }}/config.json:/etc/shadowsocks-rust/config.json:ro + networks: + - default + - airflow_proxynet +{% endfor %} + +networks: + airflow_proxynet: + name: airflow_proxynet + external: true diff --git a/ansible/roles/ytdlp-master/tasks/main.yml b/ansible/roles/ytdlp-master/tasks/main.yml index c93d07b..04d8d8b 100644 --- a/ansible/roles/ytdlp-master/tasks/main.yml +++ b/ansible/roles/ytdlp-master/tasks/main.yml @@ -60,6 +60,16 @@ service_role: "management" server_identity: "ytdlp-ops-service-mgmt" +- name: Create symlink for .env in configs directory for manual docker-compose commands + file: + src: "../.env" + dest: "{{ airflow_master_dir }}/configs/.env" + state: link + force: yes + owner: "{{ ssh_user }}" + group: "{{ deploy_group }}" + become: yes + - name: Template docker-compose file for YT-DLP master service template: src: "../airflow/configs/docker-compose-ytdlp-ops.yaml.j2" @@ -81,19 +91,22 @@ state: absent become: yes +- name: Create placeholder envoy.yaml to prevent Docker from creating a directory + file: + path: "{{ airflow_master_dir }}/envoy.yaml" + state: touch + owner: "{{ ssh_user }}" + group: "{{ deploy_group }}" + mode: '0664' + become: yes + - name: Generate YT-DLP service configurations shell: - cmd: "docker compose --project-directory . --env-file .env -f configs/docker-compose.config-generate.yaml run --rm config-generator" + cmd: "docker compose --project-directory {{ airflow_master_dir }} -f configs/docker-compose.config-generate.yaml run --rm config-generator" chdir: "{{ airflow_master_dir }}" become: yes become_user: "{{ ssh_user }}" -- name: Pull YT-DLP service image - community.docker.docker_image: - name: "{{ ytdlp_ops_image }}" - source: pull - when: not fast_deploy | default(false) - - name: Ensure correct permissions for build context after generation file: path: "{{ airflow_master_dir }}" @@ -117,10 +130,21 @@ group: "{{ deploy_group }}" become: yes +- name: Check for shadowsocks-rust proxy compose file + stat: + path: "/srv/shadowsocks-rust/docker-compose.proxies.yaml" + register: proxy_compose_file + - name: "Log: Starting YT-DLP management service" debug: msg: "Starting the YT-DLP management service on the master node. This service handles account and proxy management." +- name: Log in to Docker Hub to pull private images + community.docker.docker_login: + username: "{{ dockerhub_user }}" + password: "{{ vault_dockerhub_password }}" + when: vault_dockerhub_password is defined and vault_dockerhub_password | length > 0 + - name: Start YT-DLP master service community.docker.docker_compose_v2: project_src: "{{ airflow_master_dir }}" diff --git a/ansible/roles/ytdlp-worker/tasks/main.yml b/ansible/roles/ytdlp-worker/tasks/main.yml index fb64f63..9a0cfaa 100644 --- a/ansible/roles/ytdlp-worker/tasks/main.yml +++ b/ansible/roles/ytdlp-worker/tasks/main.yml @@ -72,24 +72,52 @@ service_role: "worker" server_identity: "ytdlp-ops-service-worker-{{ inventory_hostname }}" +- name: Create symlink for .env in configs directory for manual docker-compose commands + file: + src: "../.env" + dest: "{{ airflow_worker_dir }}/configs/.env" + state: link + force: yes + owner: "{{ ssh_user }}" + group: "{{ deploy_group }}" + become: yes + +- name: Log in to Docker Hub to pull private images + community.docker.docker_login: + username: "{{ dockerhub_user }}" + password: "{{ vault_dockerhub_password }}" + when: vault_dockerhub_password is defined and vault_dockerhub_password | length > 0 - name: "Log: Generating YT-DLP service configurations" debug: msg: "Running the configuration generator script inside a temporary Docker container. This creates docker-compose, envoy, and camoufox files based on .env variables." +- name: Ensure previously generated config files are removed before generation + file: + path: "{{ item }}" + state: absent + loop: + - "{{ airflow_worker_dir }}/envoy.yaml" + - "{{ airflow_worker_dir }}/configs/docker-compose.camoufox.yaml" + - "{{ airflow_worker_dir }}/configs/camoufox_endpoints.json" + become: yes + +- name: Create placeholder envoy.yaml to prevent Docker from creating a directory + file: + path: "{{ airflow_worker_dir }}/envoy.yaml" + state: touch + owner: "{{ ssh_user }}" + group: "{{ deploy_group }}" + mode: '0664' + become: yes + - name: Generate YT-DLP service configurations shell: - cmd: "docker compose --project-directory . --env-file .env -f configs/docker-compose.config-generate.yaml run --rm config-generator" + cmd: "docker compose --project-directory {{ airflow_worker_dir }} -f configs/docker-compose.config-generate.yaml run --rm config-generator" chdir: "{{ airflow_worker_dir }}" become: yes become_user: "{{ ssh_user }}" -- name: Pull YT-DLP service image - community.docker.docker_image: - name: "{{ ytdlp_ops_image }}" - source: pull - when: not fast_deploy | default(false) - - name: "Log: Building Camoufox (remote browser) image" debug: msg: "Building the Camoufox image locally. This image provides remote-controlled Firefox browsers for token generation." @@ -112,6 +140,11 @@ recurse: yes become: yes +- name: Check for shadowsocks-rust proxy compose file + stat: + path: "/srv/shadowsocks-rust/docker-compose.proxies.yaml" + register: proxy_compose_file + - name: "Log: Starting YT-DLP worker services" debug: msg: "Starting the core YT-DLP worker services: ytdlp-ops-service (Thrift API), envoy (load balancer), and camoufox (remote browsers)." @@ -121,7 +154,6 @@ project_src: "{{ airflow_worker_dir }}" files: - "configs/docker-compose-ytdlp-ops.yaml" - - "configs/docker-compose.camoufox.yaml" state: present remove_orphans: true pull: "{{ 'never' if fast_deploy | default(false) else 'missing' }}" diff --git a/ansible/scripts/verify_camoufox_services.py b/ansible/scripts/verify_camoufox_services.py new file mode 100644 index 0000000..4eade42 --- /dev/null +++ b/ansible/scripts/verify_camoufox_services.py @@ -0,0 +1,242 @@ +#!/usr/bin/env python3 +""" +Script to verify that all camoufox services are running and accessible. +This script should be run after deployment to ensure the cluster is healthy. +""" + +import subprocess +import sys +import json +import time +import logging +from typing import List, Dict, Tuple + +# Configure logging +logging.basicConfig( + level=logging.INFO, + format='%(asctime)s - %(levelname)s - %(message)s' +) +logger = logging.getLogger(__name__) + +def run_docker_command(cmd: List[str]) -> Tuple[int, str, str]: + """Run a docker command and return (returncode, stdout, stderr)""" + try: + result = subprocess.run(cmd, capture_output=True, text=True, timeout=30) + return result.returncode, result.stdout.strip(), result.stderr.strip() + except subprocess.TimeoutExpired: + logger.error(f"Command timed out: {' '.join(cmd)}") + return 1, "", "Command timed out" + except Exception as e: + logger.error(f"Error running command: {' '.join(cmd)} - {e}") + return 1, "", str(e) + +def get_docker_compose_services(project_path: str) -> List[Dict]: + """Get list of services from docker-compose""" + # Try different ways to get services since the project naming might vary + possible_commands = [ + ["docker", "compose", "-p", "ytdlp-ops", "ps", "--format", "json"], + ["docker", "compose", "-p", "ytdlp-ops-camoufox", "ps", "--format", "json"], + ["docker", "compose", "--project-directory", project_path, "ps", "--format", "json"], + ["docker", "compose", "ps", "--format", "json"] + ] + + for cmd in possible_commands: + returncode, stdout, stderr = run_docker_command(cmd) + if returncode == 0 and stdout: + try: + # Handle both single JSON object and JSON array + if stdout.startswith('['): + services = json.loads(stdout) + else: + # Multiple JSON objects, one per line + services = [] + for line in stdout.split('\n'): + if line.strip(): + services.append(json.loads(line)) + if services: + return services + except json.JSONDecodeError as e: + logger.debug(f"Failed to parse docker-compose output with command {' '.join(cmd)}: {e}") + continue + + # If all commands failed, try to get all containers and filter for camoufox + logger.info("Falling back to direct container inspection") + returncode, stdout, stderr = run_docker_command(["docker", "ps", "--format", "json"]) + if returncode == 0 and stdout: + try: + containers = [] + for line in stdout.split('\n'): + if line.strip(): + containers.append(json.loads(line)) + + # Filter for camoufox containers + camoufox_containers = [c for c in containers if 'camoufox' in c.get('Names', '')] + return camoufox_containers + except json.JSONDecodeError: + pass + + logger.error("Failed to get docker-compose services with all methods") + return [] + +def check_service_health(service_name: str, port: int = 12345) -> bool: + """Check if a service is responding on its expected port""" + # For camoufox services, we can check if they're running and have network connectivity + # Since they're WebSocket services, we'll just verify they're running for now + cmd = ["docker", "inspect", service_name] + returncode, stdout, stderr = run_docker_command(cmd) + + if returncode != 0: + logger.error(f"Failed to inspect service {service_name}: {stderr}") + return False + + try: + service_info = json.loads(stdout) + if service_info and len(service_info) > 0: + state = service_info[0].get('State', {}) + running = state.get('Running', False) + health = state.get('Health', {}).get('Status', 'unknown') + + if running: + logger.info(f"Service {service_name} is running (health: {health})") + return True + else: + logger.error(f"Service {service_name} is not running") + return False + except json.JSONDecodeError as e: + logger.error(f"Failed to parse docker inspect output for {service_name}: {e}") + return False + +def verify_camoufox_services(project_path: str = "/srv/airflow_dl_worker") -> bool: + """Main function to verify all camoufox services""" + logger.info("Starting camoufox service verification...") + + # Get all services + services = get_docker_compose_services(project_path) + + if not services: + logger.warning("No services found through docker-compose. Checking for running camoufox containers directly...") + # Try to find camoufox containers directly + cmd = ["docker", "ps", "--filter", "name=camoufox", "--format", "json"] + returncode, stdout, stderr = run_docker_command(cmd) + + if returncode == 0 and stdout: + try: + camoufox_containers = [] + for line in stdout.split('\n'): + if line.strip(): + camoufox_containers.append(json.loads(line)) + services = camoufox_containers + except json.JSONDecodeError: + services = [] + + if not services: + logger.error("No camoufox services or containers found.") + # Check if we're on a worker node by looking for camoufox config + import os + if os.path.exists(f"{project_path}/configs/docker-compose.camoufox.yaml"): + logger.info("Camoufox config exists but no services running. This might indicate a startup issue.") + return False + else: + logger.info("No camoufox config found. This might be a master node.") + return True + + logger.info(f"Found {len(services)} camoufox service(s) or container(s)") + + # Check each service + all_healthy = True + camoufox_services_found = 0 + + for service in services: + # Different docker output formats have different field names + service_name = (service.get('Name') or + service.get('Names') or + service.get('name') or + service.get('Service', 'unknown')) + + # If we're dealing with container output, Names might be a string + if isinstance(service_name, str): + service_names = [service_name] + else: + service_names = service_name if isinstance(service_name, list) else [str(service_name)] + + # Check if any of the service names contain 'camoufox' + is_camoufox_service = any('camoufox' in name.lower() for name in service_names) + + if not is_camoufox_service: + continue + + camoufox_services_found += 1 + logger.info(f"Checking service: {service_names[0] if service_names else 'unknown'}") + + # Use the first service name for health check + name_to_check = service_names[0] if service_names else 'unknown' + + # Check if service is running + if not check_service_health(name_to_check): + all_healthy = False + continue + + # Check service status from docker output + service_status = (service.get('State') or + service.get('status') or + service.get('Status') or + 'unknown') + service_health = (service.get('Health') or + service.get('health') or + 'unknown') + + logger.info(f"Service {name_to_check} - Status: {service_status}, Health: {service_health}") + + if service_status not in ['running', 'Running']: + logger.error(f"Service {name_to_check} is not running (status: {service_status})") + all_healthy = False + elif service_health not in ['healthy', 'unknown', '']: # unknown or empty is OK for services without healthcheck + logger.warning(f"Service {name_to_check} health is {service_health}") + + if camoufox_services_found == 0: + logger.warning("No camoufox services found in the service list") + return False + + logger.info(f"Successfully verified {camoufox_services_found} camoufox service(s)") + return all_healthy + +def main(): + """Main entry point""" + logger.info("Camoufox Service Verification Script") + logger.info("=" * 40) + + # Try to detect project path + import os + project_paths = [ + "/srv/airflow_dl_worker", # Worker node + "/srv/airflow_master", # Master node + "/app", # Container path + "." # Current directory + ] + + project_path = None + for path in project_paths: + if os.path.exists(path): + project_path = path + break + + if not project_path: + logger.error("Could not determine project path") + return 1 + + logger.info(f"Using project path: {project_path}") + + try: + success = verify_camoufox_services(project_path) + if success: + logger.info("✅ All camoufox services verification PASSED") + return 0 + else: + logger.error("❌ Camoufox services verification FAILED") + return 1 + except Exception as e: + logger.error(f"Unexpected error during verification: {e}", exc_info=True) + return 1 + +if __name__ == "__main__": + sys.exit(main()) diff --git a/ansible/tasks/docker_health_check.yml b/ansible/tasks/docker_health_check.yml new file mode 100644 index 0000000..077eddd --- /dev/null +++ b/ansible/tasks/docker_health_check.yml @@ -0,0 +1,35 @@ +--- +- name: Check if Docker daemon is running + systemd: + name: docker + state: started + enabled: yes + become: yes + register: docker_service + +- name: Restart Docker if it was not running + systemd: + name: docker + state: restarted + become: yes + when: docker_service.changed + +- name: Wait for Docker to be ready + command: docker info + register: docker_info + until: docker_info.rc == 0 + retries: 10 + delay: 3 + become: yes + ignore_errors: yes + +- name: Check Docker networks + command: docker network ls + register: docker_networks + become: yes + +- name: Ensure airflow_proxynet network exists + docker_network: + name: airflow_proxynet + state: present + become: yes diff --git a/ansible/tasks/verify_camoufox.yml b/ansible/tasks/verify_camoufox.yml new file mode 100644 index 0000000..b574cf9 --- /dev/null +++ b/ansible/tasks/verify_camoufox.yml @@ -0,0 +1,38 @@ +--- +- name: Copy camoufox verification script to worker + copy: + src: scripts/verify_camoufox_services.py + dest: "{{ airflow_worker_dir }}/verify_camoufox_services.py" + mode: '0755' + owner: "{{ ssh_user }}" + group: "{{ deploy_group }}" + become: yes + when: inventory_hostname in groups['airflow_workers'] + +- name: Run camoufox service verification + command: python3 verify_camoufox_services.py + args: + chdir: "{{ airflow_worker_dir }}" + environment: + PATH: "{{ ansible_env.PATH }}:/usr/local/bin" + register: verification_result + become: yes + become_user: "{{ ssh_user }}" + when: inventory_hostname in groups['airflow_workers'] + ignore_errors: yes + +- name: Check verification results + debug: + msg: | + Camoufox verification {{ 'PASSED' if verification_result.rc == 0 else 'FAILED' }} + Output: {{ verification_result.stdout }} + Errors: {{ verification_result.stderr }} + when: inventory_hostname in groups['airflow_workers'] and verification_result is defined + +- name: Fail deployment if camoufox verification failed + fail: + msg: "Camoufox service verification failed. Check service status and network connectivity." + when: > + inventory_hostname in groups['airflow_workers'] and + verification_result is defined and + verification_result.rc != 0 diff --git a/ansible/templates/.env.j2 b/ansible/templates/.env.j2 index 768e3e7..91e117a 100644 --- a/ansible/templates/.env.j2 +++ b/ansible/templates/.env.j2 @@ -1,4 +1,7 @@ # This file is managed by Ansible. +# Set the timezone for all services to ensure consistency in logs. +TZ=Europe/Moscow + HOSTNAME="{{ inventory_hostname }}" SERVICE_ROLE={{ service_role }} {% if server_identity is defined %} @@ -18,8 +21,11 @@ REDIS_PORT={{ redis_port }} # Master-specific settings {% if 'master' in service_role or 'management' in service_role %} AIRFLOW_ADMIN_PASSWORD="{{ vault_airflow_admin_password }}" +FLOWER_PASSWORD="{{ vault_flower_password }}" AIRFLOW_VAR_MASTER_HOST_IP={{ hostvars[groups['airflow_master'][0]].ansible_host }} -MASTER_HOST_IP={{ hostvars[groups['airflow_master'][0]].ansible_host }} +# MASTER_HOST_IP is not needed on the master node itself for ytdlp-ops, +# as it connects to Redis via the internal Docker service name 'redis'. +# It is defined for workers to connect back to the master. # Camoufox is not used on master, but the config generator expects the variable. CAMOUFOX_PROXIES= @@ -27,6 +33,7 @@ CAMOUFOX_PROXIES= # Worker-specific settings {% if 'worker' in service_role %} +AIRFLOW_PROJ_DIR={{ airflow_worker_dir }} MASTER_HOST_IP={{ hostvars[groups['airflow_master'][0]].ansible_host }} # --- Envoy & Worker Configuration --- diff --git a/ansible/templates/shadowsocks-compose.yml.j2 b/ansible/templates/shadowsocks-compose.yml.j2 new file mode 100644 index 0000000..6ae640a --- /dev/null +++ b/ansible/templates/shadowsocks-compose.yml.j2 @@ -0,0 +1,21 @@ +name: "shadowsocks-proxies" +services: +{% for proxy_name, proxy_config in shadowsocks_proxies.items() %} + {{ proxy_name }}: + image: {{ shadowsocks_image }} + container_name: {{ proxy_name }} + restart: always + ports: + - "127.0.0.1:{{ proxy_config.local_port }}:{{ proxy_config.local_port }}/tcp" + - "127.0.0.1:{{ proxy_config.local_port }}:{{ proxy_config.local_port }}/udp" + volumes: + - ./config_ssp_{{ proxy_config.local_port }}/:/etc/shadowsocks-rust/:ro + networks: + - default + - {{ docker_network_name }} +{% endfor %} + +networks: + {{ docker_network_name }}: + name: {{ docker_network_name }} + external: true diff --git a/cluster.green.yml b/cluster.green.yml new file mode 100644 index 0000000..f9818c6 --- /dev/null +++ b/cluster.green.yml @@ -0,0 +1,105 @@ +global_vars: + # Docker image versions + ytdlp_ops_image: "pangramia/ytdlp-ops-server:latest" + airflow_image_name: "pangramia/ytdlp-ops-airflow:latest" + + # Default ports + postgres_port: 5432 + ytdlp_base_port: 9090 + envoy_port: 9080 + envoy_admin_port: 9901 + management_service_port: 9091 + camoufox_base_vnc_port: 5901 + + # Default UID + airflow_uid: 1003 + + # Default directories + airflow_master_dir: "/srv/airflow_master" + airflow_worker_dir: "/srv/airflow_dl_worker" + + # Docker network name + docker_network_name: "airflow_proxynet" + + # Default usernames + ssh_user: "alex_p" + ansible_user: "alex_p" + + # Default group + deploy_group: "ytdl" + + # Default file permissions + dir_permissions: "0755" + file_permissions: "0644" + + # Default rsync options + rsync_default_opts: + - "--no-owner" + - "--no-group" + - "--no-times" + - "--copy-links" + - "--copy-unsafe-links" + - "--exclude=.git*" + - "--exclude=__pycache__" + - "--exclude=*.pyc" + - "--exclude=*.log" + - "--exclude=.DS_Store" + + # Docker-Hub credentials + dockerhub_user: "pangramia" + + # Host timezone + host_timezone: "Europe/Moscow" + + # Shadowsocks cipher method + shadowsocks_cipher_method: "aes-256-gcm" + + # Shadowsocks image + shadowsocks_image: "ghcr.io/shadowsocks/sslocal-rust:v1.22.0" + + # Shadowsocks config options + shadowsocks_local_address: "0.0.0.0" + shadowsocks_timeout: 20 + shadowsocks_fast_open: true + shadowsocks_mode: "tcp_and_udp" + +# Global list of all available proxies to be deployed everywhere. +# The key (e.g., 'sslocal-rust-1087') becomes the service name. +shadowsocks_proxies: + + sslocal-rust-1087: + server: 91.103.252.51 + server_port: 8388 + local_port: 1087 + vault_password_key: vault_ss_password_1 + + sslocal-rust-1086: + server: 62.60.178.45 + server_port: 8388 + local_port: 1086 + vault_password_key: vault_ss_password_2 + + + sslocal-rust-1081: + server: 79.137.207.43 + server_port: 8388 + local_port: 1081 + vault_password_key: vault_ss_password_2 + +master: + af-green: + ip: 89.253.223.97 + port: 22 + proxies: + - "socks5://sslocal-rust-1087:1087" + +workers: + dl003: + ip: 62.60.245.103 + proxies: + - "socks5://sslocal-rust-1087:1087" + + dl001: + ip: 109.107.189.106 + proxies: + - "socks5://sslocal-rust-1087:1087" diff --git a/cluster.test.yml b/cluster.test.yml new file mode 100644 index 0000000..69db96a --- /dev/null +++ b/cluster.test.yml @@ -0,0 +1,101 @@ +global_vars: + # Docker image versions + ytdlp_ops_image: "pangramia/ytdlp-ops-server:latest" + airflow_image_name: "pangramia/ytdlp-ops-airflow:latest" + + # Default ports + postgres_port: 5432 + ytdlp_base_port: 9090 + envoy_port: 9080 + envoy_admin_port: 9901 + management_service_port: 9091 + camoufox_base_vnc_port: 5901 + + # Default UID + airflow_uid: 1003 + + # Default directories + airflow_master_dir: "/srv/airflow_master" + airflow_worker_dir: "/srv/airflow_dl_worker" + + # Docker network name + docker_network_name: "airflow_proxynet" + + # Default usernames + ssh_user: "alex_p" + ansible_user: "alex_p" + + # Default group + deploy_group: "ytdl" + + # Default file permissions + dir_permissions: "0755" + file_permissions: "0644" + + # Default rsync options + rsync_default_opts: + - "--no-owner" + - "--no-group" + - "--no-times" + - "--copy-links" + - "--copy-unsafe-links" + - "--exclude=.git*" + - "--exclude=__pycache__" + - "--exclude=*.pyc" + - "--exclude=*.log" + - "--exclude=.DS_Store" + + # Docker-Hub credentials + dockerhub_user: "pangramia" + + # Host timezone + host_timezone: "Europe/Moscow" + + # Shadowsocks cipher method + shadowsocks_cipher_method: "aes-256-gcm" + + # Shadowsocks image + shadowsocks_image: "ghcr.io/shadowsocks/sslocal-rust:v1.22.0" + + # Shadowsocks config options + shadowsocks_local_address: "0.0.0.0" + shadowsocks_timeout: 20 + shadowsocks_fast_open: true + shadowsocks_mode: "tcp_and_udp" + +# Global list of all available proxies to be deployed everywhere. +# The key (e.g., 'sslocal-rust-1087') becomes the service name. +shadowsocks_proxies: + + sslocal-rust-1087: + server: 91.103.252.51 + server_port: 8388 + local_port: 1087 + vault_password_key: vault_ss_password_1 + + sslocal-rust-1086: + server: 62.60.178.45 + server_port: 8388 + local_port: 1086 + vault_password_key: vault_ss_password_2 + + + sslocal-rust-1081: + server: 79.137.207.43 + server_port: 8388 + local_port: 1081 + vault_password_key: vault_ss_password_2 + +master: + af-test: + ip: 89.253.223.97 + port: 22 + proxies: + - "socks5://sslocal-rust-1086:1086" + - "socks5://sslocal-rust-1081:1081" +workers: + dl002: + ip: 62.60.178.54 + proxies: + - "socks5://sslocal-rust-1081:1081" + - "socks5://sslocal-rust-1086:1086" diff --git a/cluster.yml b/cluster.yml deleted file mode 100644 index b5bd126..0000000 --- a/cluster.yml +++ /dev/null @@ -1,8 +0,0 @@ -master: - af-green: 89.253.221.173 - -workers: - dl003: - ip: 62.60.245.103 - proxies: - - "socks5://sslocal-rust-1087:1087" diff --git a/get_info_json_client.py b/get_info_json_client.py new file mode 100644 index 0000000..ba4393c --- /dev/null +++ b/get_info_json_client.py @@ -0,0 +1,150 @@ +#!/usr/bin/env python3 +""" +Client script to get info.json from the Thrift service. + +Usage: + python get_info_json_client.py [URL] --host [HOST] --port [PORT] [options] + +Options: + --host HOST Thrift server host + --port PORT Thrift server port + --account-id ID Account ID to use + --output FILE Output file path + --verbose Enable verbose output +""" + +import argparse +import json +import os +import sys +import logging +from typing import Dict, Any, Optional + +# Configure logging +logging.basicConfig( + level=logging.INFO, + format='%(asctime)s - %(name)s - %(levelname)s - %(message)s' +) +logger = logging.getLogger('info_json_client') + +# Import Thrift modules +sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) +from thrift.transport import TTransport +from pangramia.yt.common.ttypes import TokenUpdateMode +from pangramia.yt.exceptions.ttypes import PBServiceException, PBUserException +from yt_ops_services.client_utils import get_thrift_client + +def parse_args(): + """Parse command line arguments""" + parser = argparse.ArgumentParser(description='Get info.json from Thrift service') + parser.add_argument('url', help='YouTube URL or video ID') + parser.add_argument('--host', default='127.0.0.1', help="Thrift server host. Using 127.0.0.1 avoids harmless connection errors when the local Envoy proxy only listens on IPv4.") + parser.add_argument('--port', type=int, default=9080, help='Thrift server port') + parser.add_argument('--profile', default='default_profile', help='The profile name (accountId) to use for the request.') + parser.add_argument('--client', help='Specific client to use (e.g., web, ios, android). Overrides server default.') + parser.add_argument('--output', help='Output file path for the info.json. If not provided, prints to stdout.') + parser.add_argument('--machine-id', help='Identifier for the client machine. Defaults to hostname.') + parser.add_argument('--verbose', action='store_true', help='Enable verbose output') + return parser.parse_args() + +def main(): + """Main entry point""" + args = parse_args() + + # Set log level + if args.verbose: + logger.setLevel(logging.DEBUG) + + transport = None + try: + # Create Thrift client + client, transport = get_thrift_client(args.host, args.port) + + # Get token data, which includes the info.json + logger.info(f"Requesting info.json for URL '{args.url}' using profile '{args.profile}'") + + # Prepare arguments for the Thrift call + machine_id = args.machine_id + if not machine_id: + import socket + machine_id = socket.gethostname() + logger.info(f"No machine ID provided, using hostname: {machine_id}") + + thrift_args = { + 'accountId': args.profile, + 'updateType': TokenUpdateMode.AUTO, + 'url': args.url, + 'clients': args.client, + 'machineId': machine_id + } + if args.client: + logger.info(f"Requesting to use specific client: {args.client}") + else: + logger.info("No specific client requested, server will use its default.") + + token_data = client.getOrRefreshToken(**thrift_args) + + if not token_data or not hasattr(token_data, 'infoJson') or not token_data.infoJson: + logger.error("Server did not return valid info.json data.") + print("Error: Server did not return valid info.json data.", file=sys.stderr) + return 1 + + info_json_str = token_data.infoJson + + # Check if the returned info.json is an error report + try: + info_data = json.loads(info_json_str) + if isinstance(info_data, dict) and 'error' in info_data: + error_code = info_data.get('errorCode', 'N/A') + error_message = info_data.get('message', info_data.get('error', 'Unknown error')) + logger.error(f"Server returned an error in info.json (Code: {error_code}): {error_message}") + print(f"Error from server (Code: {error_code}): {error_message}", file=sys.stderr) + # Optionally print the full error JSON + if args.verbose: + print(json.dumps(info_data, indent=2), file=sys.stderr) + return 1 + except json.JSONDecodeError: + logger.error(f"Failed to parse info.json from server: {info_json_str[:200]}...") + print("Error: Failed to parse the info.json response from the server.", file=sys.stderr) + return 1 + + logger.info(f"Successfully retrieved info.json ({len(info_json_str)} bytes)") + + # Write to output file if specified, otherwise print to stdout + if args.output: + try: + with open(args.output, 'w', encoding='utf-8') as f: + # Pretty-print the JSON to the file + json.dump(info_data, f, indent=2) + logger.info(f"Wrote info.json to {args.output}") + print(f"Successfully saved info.json to {args.output}") + except IOError as e: + logger.error(f"Failed to write to output file {args.output}: {e}") + print(f"Error: Failed to write to output file {args.output}: {e}", file=sys.stderr) + return 1 + else: + # Pretty-print the JSON to stdout + print(json.dumps(info_data, indent=2)) + + return 0 + except (PBServiceException, PBUserException) as e: + logger.error(f"A Thrift error occurred: {e.message}", exc_info=args.verbose) + print(f"Error: {e.message}", file=sys.stderr) + if hasattr(e, 'context') and e.context: + print(f"Context: {e.context}", file=sys.stderr) + return 1 + except TTransport.TTransportException as e: + logger.error(f"Connection to server failed: {e}", exc_info=args.verbose) + print(f"Error: Connection to server at {args.host}:{args.port} failed.", file=sys.stderr) + return 1 + except Exception as e: + logger.exception(f"An unexpected error occurred: {e}") + print(f"An unexpected error occurred: {e}", file=sys.stderr) + return 1 + finally: + if transport and transport.isOpen(): + transport.close() + logger.info("Thrift connection closed.") + +if __name__ == "__main__": + sys.exit(main()) diff --git a/proxy_manager_client.py b/proxy_manager_client.py new file mode 100644 index 0000000..19b57e0 --- /dev/null +++ b/proxy_manager_client.py @@ -0,0 +1,192 @@ +#!/usr/bin/env python3 +""" +Client script to manage proxies in the YTTokenOpService. + +This script allows you to list, ban, unban, and reset proxies that are managed +by a ytdlp-ops-server instance via Redis. +""" + +import argparse +import sys +import os +import logging +from pathlib import Path +import datetime + +# Configure logging +logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s') +logger = logging.getLogger(__name__) + + + +try: + from thrift.transport import TTransport + from pangramia.yt.exceptions.ttypes import PBServiceException, PBUserException + from pangramia.yt.common.constants import ErrorCode + from tabulate import tabulate + from yt_ops_services.client_utils import get_thrift_client, format_timestamp +except ImportError as e: + print(f"Error importing required modules: {e}") + print("Please ensure you have installed dependencies by running: pip install -e .") + sys.exit(1) + + + + +def main(): + parser = argparse.ArgumentParser( + description="Manage proxies for the YTDLP Operations Server.\n\n" + "This script allows you to list, ban, unban, and reset proxies that are managed\n" + "by a ytdlp-ops-server instance via Redis. It provides a command-line interface\n" + "to interact with the proxy management features of the server.", + epilog="Usage examples:\n" + " # List statuses for a specific server identity\n" + " python proxy_manager_client.py list --server-identity ytdlp-ops-airflow-service\n\n" + " # Ban a proxy for a specific server\n" + " python proxy_manager_client.py ban --server-identity ytdlp-ops-airflow-service --proxy-url socks5://proxy.example.com:1080\n\n" + " # Unban a proxy\n" + " python proxy_manager_client.py unban --server-identity ytdlp-ops-airflow-service --proxy-url socks5://proxy.example.com:1080\n\n" + " # Reset all proxies for a server to ACTIVE\n" + " python proxy_manager_client.py reset --server-identity ytdlp-ops-airflow-service", + formatter_class=argparse.RawTextHelpFormatter + ) + parser.add_argument('--host', default=os.getenv('YTDLP_HOST', '127.0.0.1'), help="Server host (default: 127.0.0.1 or YTDLP_HOST env). Using 127.0.0.1 avoids harmless connection errors when the local Envoy proxy only listens on IPv4.") + parser.add_argument('--port', type=int, default=int(os.getenv('YTDLP_PORT', '9080')), help='Server port (default: 9080 or YTDLP_PORT env)') + + subparsers = parser.add_subparsers(dest='command', required=True, help='Available commands') + + # List command + list_parser = subparsers.add_parser( + 'list', + help='List proxy statuses for a given server identity.', + description="List the status of all proxies associated with a specific server identity.\n" + "The status includes:\n" + "- Server: The server identity.\n" + "- Proxy URL: The URL of the proxy.\n" + "- Status: ACTIVE or BANNED.\n" + "- Success: Count of successful uses.\n" + "- Failures: Count of failed uses.\n" + "- Last Success: Timestamp of the last successful use.\n" + "- Last Failure: Timestamp of the last failed use.", + formatter_class=argparse.RawTextHelpFormatter + ) + list_parser.add_argument('--server-identity', type=str, help='The identity of the server to query. If not provided, shows status for the connected server instance.') + + # Ban command + ban_parser = subparsers.add_parser( + 'ban', + help='Ban a specific proxy for a server.', + description="Manually set a proxy's status to BANNED for a specific server identity.\n" + "A banned proxy will not be used for future requests by that server instance\n" + "until it is unbanned or reset.", + formatter_class=argparse.RawTextHelpFormatter + ) + ban_parser.add_argument('--server-identity', type=str, required=True, help='The identity of the server where the proxy should be banned.') + ban_parser.add_argument('--proxy-url', type=str, required=True, help="The full URL of the proxy to ban (e.g., 'socks5://host:port').") + + # Unban command + unban_parser = subparsers.add_parser( + 'unban', + help='Unban a specific proxy for a server.', + description="Manually set a proxy's status to ACTIVE for a specific server identity.\n" + "This will allow the server instance to use the proxy for future requests.", + formatter_class=argparse.RawTextHelpFormatter + ) + unban_parser.add_argument('--server-identity', type=str, required=True, help='The identity of the server where the proxy should be unbanned.') + unban_parser.add_argument('--proxy-url', type=str, required=True, help="The full URL of the proxy to unban (e.g., 'socks5://host:port').") + + # Reset command + reset_parser = subparsers.add_parser( + 'reset', + help='Reset all proxy statuses for a server to ACTIVE.', + description="Reset the status of all proxies associated with a specific server identity to ACTIVE.\n" + "This is useful for clearing all bans and making all configured proxies available again.", + formatter_class=argparse.RawTextHelpFormatter + ) + reset_parser.add_argument('--server-identity', type=str, required=True, help='The identity of the server whose proxies should be reset.') + + args = parser.parse_args() + + client, transport = None, None + try: + client, transport = get_thrift_client(args.host, args.port) + + if args.command == 'list': + logger.info(f"Getting proxy statuses for server: {args.server_identity or 'local server'}") + statuses = client.getProxyStatus(args.server_identity) + if not statuses: + print("\nThe server reported no proxy statuses.") + print("This can happen if no proxies are configured, or if all configured proxies failed their initial health check on server startup.\n") + else: + # Determine which proxy is next in rotation for each server identity + next_proxies = {s.serverIdentity: s.proxyUrl for s in statuses if '(next)' in s.status} + + status_list = [] + for s in statuses: + is_next = next_proxies.get(s.serverIdentity) == s.proxyUrl + status_list.append({ + "Server": s.serverIdentity, + "Proxy URL": f"{s.proxyUrl} ->" if is_next else s.proxyUrl, + "Status": s.status.replace(" (next)", ""), + "Success": s.successCount, + "Failures": s.failureCount, + "Last Success": format_timestamp(s.lastSuccessTimestamp), + "Last Failure": format_timestamp(s.lastFailureTimestamp), + }) + print("\n--- Proxy Statuses ---") + print(tabulate(status_list, headers="keys", tablefmt="grid")) + print("----------------------\n") + + elif args.command == 'ban': + logger.info(f"Banning proxy '{args.proxy_url}' for server '{args.server_identity}'...") + success = client.banProxy(args.proxy_url, args.server_identity) + if success: + print(f"Successfully banned proxy '{args.proxy_url}' for server '{args.server_identity}'.") + else: + print("Failed to ban proxy. Check server logs for details.") + sys.exit(1) + + elif args.command == 'unban': + logger.info(f"Unbanning proxy '{args.proxy_url}' for server '{args.server_identity}'...") + success = client.unbanProxy(args.proxy_url, args.server_identity) + if success: + print(f"Successfully unbanned proxy '{args.proxy_url}' for server '{args.server_identity}'.") + else: + print("Failed to unban proxy. Check server logs for details.") + sys.exit(1) + + elif args.command == 'reset': + logger.info(f"Resetting all proxy statuses for server '{args.server_identity}'...") + success = client.resetAllProxyStatuses(args.server_identity) + if success: + print(f"Successfully reset all proxy statuses for server '{args.server_identity}'.") + else: + print("Failed to reset all proxy statuses. Check server logs for details.") + sys.exit(1) + + except (PBServiceException, PBUserException) as e: + if hasattr(e, 'errorCode') and e.errorCode == ErrorCode.NOT_IMPLEMENTED: + logger.error(f"Action '{args.command}' is not implemented by the server. It may be running in the wrong service mode.") + print(f"Error: The server does not support the action '{args.command}'.") + print("Please check that the server is running in 'all-in-one' or 'management' mode.") + else: + logger.error(f"Thrift error performing action '{args.command}': {e.message}", exc_info=True) + print(f"Error: {e.message}") + sys.exit(1) + except TTransport.TTransportException as e: + # The logger.error is not needed here because TSocket already logs connection errors. + print(f"Error: Connection to server at {args.host}:{args.port} failed. Is the server running?") + print(f"Details: {e}") + sys.exit(1) + except Exception as e: + logger.error(f"An unexpected error occurred: {e}", exc_info=True) + print(f"An unexpected error occurred: {e}") + sys.exit(1) + finally: + if transport and transport.isOpen(): + transport.close() + logger.info("Thrift connection closed.") + + +if __name__ == "__main__": + main() diff --git a/thrift_exceptions_patch.py b/thrift_exceptions_patch.py new file mode 100644 index 0000000..46e3ead --- /dev/null +++ b/thrift_exceptions_patch.py @@ -0,0 +1,58 @@ +""" +Patch for Thrift-generated exception classes to make them compatible with Airflow's secret masking. +""" + +import logging +import sys +from pathlib import Path +from typing import Any, Dict, List, Optional, Tuple, Union + +# --- Python Path Setup --- +project_root = Path(__file__).parent.absolute() +# Add project root to sys.path (needed for the 'pangramia' symlink) +if str(project_root) not in sys.path: sys.path.insert(0, str(project_root)) +# --- End Python Path Setup --- + +logger = logging.getLogger(__name__) + +def patch_thrift_exceptions(): + """ + Patch Thrift-generated exception classes to make them compatible with Airflow's secret masking. + """ + try: + from pangramia.yt.exceptions.ttypes import PBServiceException, PBUserException + + # Save original __setattr__ methods + original_service_setattr = PBServiceException.__setattr__ + original_user_setattr = PBUserException.__setattr__ + + # Define a new __setattr__ method that allows modifying any attribute + def new_service_setattr(self, name, value): + logger.debug(f"Setting attribute {name} on PBServiceException") + object.__setattr__(self, name, value) + + def new_user_setattr(self, name, value): + logger.debug(f"Setting attribute {name} on PBUserException") + object.__setattr__(self, name, value) + + # Apply the patch to both exception classes + PBServiceException.__setattr__ = new_service_setattr + PBUserException.__setattr__ = new_user_setattr + + logger.info("Successfully patched Thrift exception classes for Airflow compatibility") + + # Verify the patch + try: + test_exception = PBServiceException(message="Test") + test_exception.args = ("Test",) # Try to modify an attribute + logger.info("Verified Thrift exception patch is working correctly") + except Exception as e: + logger.error(f"Thrift exception patch verification failed: {e}") + except ImportError as e: + logger.warning(f"Could not import Thrift exception classes: {e}") + logger.warning("Airflow error handling may not work properly with Thrift exceptions") + except Exception as e: + logger.error(f"Error patching Thrift exception classes: {e}") + +# Apply the patch when this module is imported +patch_thrift_exceptions() diff --git a/thrift_model/data/common.thrift b/thrift_model/data/common.thrift deleted file mode 100644 index 27babc1..0000000 --- a/thrift_model/data/common.thrift +++ /dev/null @@ -1,131 +0,0 @@ -namespace py pangramia.yt.common -namespace java com.pangramia.yt.common - -typedef string JobID -typedef string Timestamp - - -/** - * Standard error codes for service exceptions. - */ -enum ErrorCode { - UNKNOWN = 0, - NOT_IMPLEMENTED = 1, - INTERNAL_ERROR = 2, - INVALID_REQUEST = 3, - PROXY_UNAVAILABLE = 4, - ACCOUNT_UNAVAILABLE = 5, - BOT_DETECTED = 6, - BOT_DETECTION_SIGN_IN_REQUIRED = 7 -} - - -enum JobState { - SUCCESS, - FAIL, - BOT_FORBIDDEN_ON_URL_ACCESS, - BOT_FORBIDDEN_ON_FILE_DOWNLOAD, - BOT_CAPTCHA, - BOT_AUTH_RELOGIN_REQUIRED, - BOT_AUTH_SMS_REQUIRED, - BOT_AUTH_DEVICE_QR_REQUIRED, - BOT_ACCOUNT_BANNED, - BOT_IP_BANNED -} - -struct JobTokenData { - 1: optional string infoJson, - 2: optional string ytdlpCommand, - 3: optional string socks, - 4: optional JobID jobId, - 5: optional string url, - 6: optional string cookiesBlob, -} - - -enum TokenUpdateMode { - AUTOREFRESH_AND_REMAIN_ANONYMOUS, - AUTOREFRESH_AND_ALLOW_AUTH, - AUTOREFRESH_AND_ONLY_AUTH, - CLEANUP_THEN_AUTOREFRESH_AND_ONLY_AUTH, - CLEANUP_THEN_AUTOREFRESH_AND_REMAIN_ANONYMOUS, - CLEANUP_THEN_AUTOREFRESH_AND_ALLOW_AUTH, - AUTO,// AUTOREFRESH_AND_ONLY_AUTH, -} - - -struct AccountData { - 1: required string username, - 2: required string password, - 3: optional string countryCode -} - -struct ProxyData { - 1: required string proxyUrl, - 2: optional string countryCode -} - - -enum AccountPairState { - ACTIVE, - PAUSED, - REMOVED, - IN_PROGRESS, - ALL -} - - -struct AccountPairWithState { - 1: required string accountId, - 2: required string proxyId, - 3: optional AccountPairState accountPairState - 4: optional string machineId, -} - -struct JobData { - 1: required string jobId, - 2: required string url, - 3: required string cookiesBlob, - 4: required string potoken, - 5: required string visitorId, - 6: required string ytdlpCommand, - 7: required string createdTime, - 8: required map telemetry, - 9: required JobState state, - 10: optional string errorMessage, - 11: optional string socks5Id -} - -struct RichCollectionPagination { - 1: required bool hasNext, - 2: required i32 totalCount, - 3: required i32 page, - 4: required i32 pageSize -} - -struct RichCollectionJobData { - 1: required list items, - 2: required RichCollectionPagination pagination -} - -struct ProxyStatus { - 1: string proxyUrl, - 2: string status, - 3: i64 successCount, - 4: i64 failureCount, - 5: optional string lastFailureTimestamp, - 6: optional string lastSuccessTimestamp, - 7: optional string serverIdentity -} - -struct AccountStatus { - 1: string accountId, - 2: string status, - 3: i64 successCount, - 4: i64 failureCount, - 5: optional string lastFailureTimestamp, - 6: optional string lastSuccessTimestamp, - 7: optional string lastUsedProxy, - 8: optional string lastUsedMachine -} - diff --git a/thrift_model/data/exceptions.thrift b/thrift_model/data/exceptions.thrift deleted file mode 100644 index 2e0370e..0000000 --- a/thrift_model/data/exceptions.thrift +++ /dev/null @@ -1,14 +0,0 @@ -namespace py pangramia.yt.exceptions -namespace java com.pangramia.yt.exceptions - -exception PBServiceException { - 1: required string message, - 2: optional string errorCode, - 3: optional map context -} - -exception PBUserException { - 1: required string message, - 2: optional string errorCode, - 3: optional map context -} diff --git a/thrift_model/services/base_service.thrift b/thrift_model/services/base_service.thrift deleted file mode 100644 index bce4461..0000000 --- a/thrift_model/services/base_service.thrift +++ /dev/null @@ -1,19 +0,0 @@ -namespace py pangramia.base_service -namespace java com.pangramia.base_service - -include "../data/common.thrift" -include "../data/exceptions.thrift" - -service BaseService { - // Common health check method - bool ping() throws (1: exceptions.PBServiceException serviceExp, - 2: exceptions.PBUserException userExp), - - // Common error reporting - bool reportError(1: string message, - 2: map details) throws (1: exceptions.PBServiceException serviceExp, - 2: exceptions.PBUserException userExp) - - // Add this to fix AsyncProcessor issues - oneway void shutdown() -} diff --git a/thrift_model/services/yt_admin_ops.thrift b/thrift_model/services/yt_admin_ops.thrift deleted file mode 100644 index 5b2b71a..0000000 --- a/thrift_model/services/yt_admin_ops.thrift +++ /dev/null @@ -1,63 +0,0 @@ -namespace py pangramia.yt.admin_ops -namespace java com.pangramia.yt.admin_ops - -include "../data/common.thrift" -include "../data/exceptions.thrift" -include "base_service.thrift" - -// Proxy and Account management -service YTAccountsOpService extends base_service.BaseService { - - // AccountPairs - bool addAccountPair(1: string accountId, 2: string proxyId, 3: string machineId, 4: common.ProxyData proxyData, 5: optional common.AccountData accountData) - throws (1: exceptions.PBServiceException serviceExp, - 2: exceptions.PBUserException userExp), - - common.AccountPairWithState getPair(1: string machineId) - throws (1: exceptions.PBServiceException serviceExp, - 2: exceptions.PBUserException userExp), - - bool pair(1: string accountId, 2: string proxyId, 3:string machineId) - throws (1: exceptions.PBServiceException serviceExp, - 2: exceptions.PBUserException userExp), - - bool unpair(1: string accountId, 2: string proxyId, 3:string machineId) - throws (1: exceptions.PBServiceException serviceExp, - 2: exceptions.PBUserException userExp), - - list listAccountPairs(1: optional common.AccountPairState filter) throws (1: exceptions.PBServiceException serviceExp, - 2: exceptions.PBUserException userExp), - - // ManageAccounts - bool addAccount(1: string accountId, 2: optional common.AccountData accountData) throws (1: exceptions.PBServiceException serviceExp, - 2: exceptions.PBUserException userExp), - - - bool suspendAccount(1: string accountId) throws (1: exceptions.PBServiceException serviceExp, - 2: exceptions.PBUserException userExp), - - bool resumeAccount(1: string accountId) throws (1: exceptions.PBServiceException serviceExp, - 2: exceptions.PBUserException userExp), - - bool removeAccount(1: string accountId) throws (1: exceptions.PBServiceException serviceExp, - 2: exceptions.PBUserException userExp), - - list listActiveAccounts() throws (1: exceptions.PBServiceException serviceExp, - 2: exceptions.PBUserException userExp), - - // ManageProxy - bool addProxy(1: string proxyId, 2: common.ProxyData proxyData) throws (1: exceptions.PBServiceException serviceExp, - 2: exceptions.PBUserException userExp), - - bool suspendProxy(1: string proxyId) throws (1: exceptions.PBServiceException serviceExp, - 2: exceptions.PBUserException userExp), - - bool resumeProxy(1: string proxyId) throws (1: exceptions.PBServiceException serviceExp, - 2: exceptions.PBUserException userExp), - - bool removeProxy(1: string proxyId) throws (1: exceptions.PBServiceException serviceExp, - 2: exceptions.PBUserException userExp), - - list listActiveProxies() throws (1: exceptions.PBServiceException serviceExp, - 2: exceptions.PBUserException userExp), -} diff --git a/thrift_model/services/yt_management.thrift b/thrift_model/services/yt_management.thrift deleted file mode 100644 index 728c9db..0000000 --- a/thrift_model/services/yt_management.thrift +++ /dev/null @@ -1,27 +0,0 @@ -namespace py pangramia.yt.management -namespace java com.pangramia.yt.management - -include "../data/common.thrift" -include "../data/exceptions.thrift" -include "base_service.thrift" - -// Service for managing the state of shared resources like proxies and accounts. -// This service is intended to be run as a single, authoritative instance. -service YTManagementService extends base_service.BaseService { - - // --- Proxy Management Methods --- - list getProxyStatus(1: optional string serverIdentity) throws (1: exceptions.PBServiceException serviceExp, 2: exceptions.PBUserException userExp), - bool banProxy(1: string proxyUrl, 2: string serverIdentity) throws (1: exceptions.PBServiceException serviceExp, 2: exceptions.PBUserException userExp), - bool unbanProxy(1: string proxyUrl, 2: string serverIdentity) throws (1: exceptions.PBServiceException serviceExp, 2: exceptions.PBUserException userExp), - bool resetAllProxyStatuses(1: string serverIdentity) throws (1: exceptions.PBServiceException serviceExp, 2: exceptions.PBUserException userExp), - bool banAllProxies(1: string serverIdentity) throws (1: exceptions.PBServiceException serviceExp, 2: exceptions.PBUserException userExp), - bool deleteProxyFromRedis(1: string proxyUrl, 2: string serverIdentity) throws (1: exceptions.PBServiceException serviceExp, 2: exceptions.PBUserException userExp), - i32 deleteAllProxiesFromRedis(1: optional string serverIdentity) throws (1: exceptions.PBServiceException serviceExp, 2: exceptions.PBUserException userExp), - - // --- Account Management Methods --- - list getAccountStatus(1: optional string accountId, 2: optional string accountPrefix) throws (1: exceptions.PBServiceException serviceExp, 2: exceptions.PBUserException userExp), - bool banAccount(1: string accountId, 2: optional string reason) throws (1: exceptions.PBServiceException serviceExp, 2: exceptions.PBUserException userExp), - bool unbanAccount(1: string accountId, 2: optional string reason) throws (1: exceptions.PBServiceException serviceExp, 2: exceptions.PBUserException userExp), - bool deleteAccountFromRedis(1: string accountId) throws (1: exceptions.PBServiceException serviceExp, 2: exceptions.PBUserException userExp), - i32 deleteAllAccountsFromRedis(1: optional string accountPrefix) throws (1: exceptions.PBServiceException serviceExp, 2: exceptions.PBUserException userExp) -} diff --git a/thrift_model/services/yt_tokens_ops.thrift b/thrift_model/services/yt_tokens_ops.thrift deleted file mode 100644 index 82e3ed9..0000000 --- a/thrift_model/services/yt_tokens_ops.thrift +++ /dev/null @@ -1,40 +0,0 @@ -namespace py pangramia.yt.tokens_ops -namespace java com.pangramia.yt.tokens_ops - -include "../data/common.thrift" -include "../data/exceptions.thrift" -include "yt_management.thrift" - -// The unified service that combines token operations and management functions. -// The server implementation will decide which functions are active based on its role. -service YTTokenOpService extends yt_management.YTManagementService { - - common.JobTokenData getOrRefreshTokenWithReport ( 1: string accountId, - 2: string oldUrl, - 3: common.JobState status, - 4: optional string details, - 5: optional string jobId, - 6: optional common.TokenUpdateMode updateType = common.TokenUpdateMode.AUTO, - 7: optional string url, - 8: optional string clients) throws (1: exceptions.PBServiceException serviceExp, - 2: exceptions.PBUserException userExp) - - common.JobTokenData getOrRefreshToken ( 1: string accountId, - 2: optional common.TokenUpdateMode updateType = common.TokenUpdateMode.AUTO, - 3: optional string url, - 4: optional string clients, - 5: optional string machineId) throws (1: exceptions.PBServiceException serviceExp, - 2: exceptions.PBUserException userExp) - - common.JobTokenData getLatestToken (1: string accountId) throws (1: exceptions.PBServiceException serviceExp, - 2: exceptions.PBUserException userExp), - common.JobTokenData refreshToken ( 1: string accountId, - 2: optional common.TokenUpdateMode updateType = common.TokenUpdateMode.AUTO, - 3: optional string url ) throws (1: exceptions.PBServiceException serviceExp, - 2: exceptions.PBUserException userExp) - bool reportState( 1: string url, - 2: common.JobState status, - 3: optional string details, - 4: optional string jobId) throws (1: exceptions.PBServiceException serviceExp, - 2: exceptions.PBUserException userExp) -} diff --git a/tools/create-deployment-bundle.sh b/tools/create-deployment-bundle.sh new file mode 100755 index 0000000..2a21263 --- /dev/null +++ b/tools/create-deployment-bundle.sh @@ -0,0 +1,99 @@ +#!/bin/bash +# +# Creates a clean deployment bundle of the project in a specified local directory. +# +# This script is designed to be run from the root of the project directory. +# It uses rsync with a "whitelist" of files and directories to ensure only +# artifacts required for deployment are included in the bundle. +# +# Usage: +# ./tools/create-deployment-bundle.sh [DESTINATION_PATH] +# +# If DESTINATION_PATH is not provided, it defaults to /opt/yt-ops-deploys/yt-ops-services. + +set -e # Exit immediately if a command exits with a non-zero status. +set -u # Treat unset variables as an error. + +# --- Configuration --- +# The root directory of the project on the local machine. +SOURCE_DIR="." +# Default destination for the deployment bundle. Can be overridden by the first argument. +DEFAULT_DEST_DIR="/opt/yt-ops-deploys/yt-ops-services" +DEST_DIR="${1:-$DEFAULT_DEST_DIR}" + +# --- rsync command --- +echo ">>> Creating deployment bundle from '$SOURCE_DIR' to '$DEST_DIR'..." + +# Ensure the parent directory of the destination exists. +# This requires sudo if the user doesn't have permissions for the parent path. +if [ ! -d "$(dirname "$DEST_DIR")" ]; then + echo "Parent directory of destination does not exist. Attempting to create with sudo..." + sudo mkdir -p "$(dirname "$DEST_DIR")" + sudo chown "$USER" "$(dirname "$DEST_DIR")" +fi + +# Create a temporary file to list the files to be included. +# This is a "whitelist" approach, ensuring only necessary files are bundled. +# This list is generated by analyzing the Ansible playbooks to determine +# exactly which files and directories are required for deployment. +INCLUDE_FILE=$(mktemp) +EXCLUDE_FILE=$(mktemp) +trap 'rm -f -- "$INCLUDE_FILE" "$EXCLUDE_FILE"' EXIT + +# Define files and directories to exclude from the bundle. +cat > "$EXCLUDE_FILE" < "$INCLUDE_FILE" <>> Deployment bundle created successfully at '$DEST_DIR'." diff --git a/tools/generate-inventory.py b/tools/generate-inventory.py index e805121..5ebf313 100755 --- a/tools/generate-inventory.py +++ b/tools/generate-inventory.py @@ -20,15 +20,21 @@ def generate_inventory(cluster_config, inventory_path): # Master group f.write("[airflow_master]\n") - for hostname, ip in cluster_config['master'].items(): - f.write(f"{hostname} ansible_host={ip}\n") + for hostname, config in cluster_config['master'].items(): + line = f"{hostname} ansible_host={config['ip']}" + if 'port' in config: + line += f" ansible_port={config['port']}" + f.write(line + "\n") f.write("\n") # Workers group f.write("[airflow_workers]\n") for hostname, config in cluster_config['workers'].items(): - f.write(f"{hostname} ansible_host={config['ip']}\n") + line = f"{hostname} ansible_host={config['ip']}" + if 'port' in config: + line += f" ansible_port={config['port']}" + f.write(line + "\n") def generate_host_vars(cluster_config, host_vars_dir): """Generate host-specific variables""" @@ -46,21 +52,19 @@ def generate_host_vars(cluster_config, host_vars_dir): except Exception as e: print(f'Failed to delete {file_path}. Reason: {e}') - # Get master IP for Redis configuration - master_ip = list(cluster_config['master'].values())[0] + # Get master IP for Redis configuration from the new structure + master_ip = list(cluster_config['master'].values())[0]['ip'] - # Generate master host variables - for hostname, ip in cluster_config['master'].items(): - host_vars_file = os.path.join(host_vars_dir, f"{hostname}.yml") - with open(host_vars_file, 'w') as f: - f.write("---\n") - f.write(f"# Variables for {hostname}\n") - f.write(f"master_host_ip: {ip}\n") - f.write(f"redis_port: 52909\n") + # Get global proxy definitions + shadowsocks_proxies = cluster_config.get('shadowsocks_proxies', {}) - # Generate worker proxy variables - for hostname, config in cluster_config['workers'].items(): + # Combine master and worker nodes for processing + all_nodes = {**cluster_config['master'], **cluster_config['workers']} + + for hostname, config in all_nodes.items(): host_vars_file = os.path.join(host_vars_dir, f"{hostname}.yml") + + # Per-node list of proxies to USE worker_proxies = config.get('proxies', []) with open(host_vars_file, 'w') as f: @@ -68,6 +72,18 @@ def generate_host_vars(cluster_config, host_vars_dir): f.write(f"# Variables for {hostname}\n") f.write(f"master_host_ip: {master_ip}\n") f.write("redis_port: 52909\n") + + # Write the global proxy definitions for deployment + if shadowsocks_proxies: + f.write("shadowsocks_proxies:\n") + for name, proxy_config in shadowsocks_proxies.items(): + f.write(f" {name}:\n") + f.write(f" server: \"{proxy_config['server']}\"\n") + f.write(f" server_port: {proxy_config['server_port']}\n") + f.write(f" local_port: {proxy_config['local_port']}\n") + f.write(f" vault_password_key: \"{proxy_config['vault_password_key']}\"\n") + + # Write the per-node list of proxies to USE if worker_proxies: f.write("worker_proxies:\n") for proxy in worker_proxies: @@ -87,23 +103,26 @@ def generate_group_vars(cluster_config, group_vars_dir): all_vars_file = os.path.join(all_vars_dir, "generated_vars.yml") if os.path.exists(all_vars_file): os.remove(all_vars_file) + + global_vars = cluster_config.get('global_vars', {}) external_ips = cluster_config.get('external_access_ips', []) # Get master IP for Redis configuration - master_ip = list(cluster_config['master'].values())[0] + master_ip = list(cluster_config['master'].values())[0]['ip'] + + # Prepare data for YAML dump + generated_data = { + 'master_host_ip': master_ip, + 'redis_port': 52909, + 'external_access_ips': external_ips if external_ips else [] + } + generated_data.update(global_vars) with open(all_vars_file, 'w') as f: f.write("---\n") f.write("# This file is auto-generated by tools/generate-inventory.py\n") f.write("# Do not edit – your changes will be overwritten.\n") - f.write(f"master_host_ip: {master_ip}\n") - f.write("redis_port: 52909\n") - f.write("external_access_ips:\n") - if external_ips: - for ip in external_ips: - f.write(f" - \"{ip}\"\n") - else: - f.write(" []\n") + yaml.dump(generated_data, f, default_flow_style=False) def main(): if len(sys.argv) != 2: diff --git a/tools/host_vars/dl-master.yml b/tools/host_vars/dl-master.yml deleted file mode 100644 index f553382..0000000 --- a/tools/host_vars/dl-master.yml +++ /dev/null @@ -1,19 +0,0 @@ -# Master server specific variables -hostname: "dl-master" -service_role: "management" - -# Ports -ytdlp_base_port: 9090 -management_service_port: 9091 - -# Redis / Postgres run on the master itself -redis_host: "localhost" - -# All secrets live in vault – only the master needs them -# These will be replaced with encrypted values -redis_password: "rOhTAIlTFFylXsjhqwxnYxDChFc" -postgres_password: "pgdb_pwd_A7bC2xY9zE1wV5uP" -airflow_admin_password: "2r234sdfrt3q454arq45q355" - -server_identity: "ytdlp-ops-service-mgmt" -ansible_user: "alex_p" diff --git a/tools/host_vars/dl-worker-001.yml b/tools/host_vars/dl-worker-001.yml deleted file mode 100644 index a3410de..0000000 --- a/tools/host_vars/dl-worker-001.yml +++ /dev/null @@ -1,26 +0,0 @@ -# Worker server specific variables -hostname: "dl-worker-001" -service_role: "worker" - -# Master server connection -master_host_ip: "89.253.223.97" -redis_host: "89.253.223.97" - -# Ports -ytdlp_base_port: 9090 -envoy_port: 9080 -envoy_admin_port: 9901 -management_service_port: 9091 - -# Camoufox configuration -camoufox_proxies: "socks5://172.17.0.1:1087" -camoufox_base_vnc_port: 5901 -vnc_password: "vnc_pwd_Z5xW8cV2bN4mP7lK" - -# Account management -account_active_duration_min: 7 -account_cooldown_duration_min: 30 - -# Server identity -server_identity: "ytdlp-ops-service-worker" -ansible_user: "alex_p" diff --git a/tools/sync-to-tower.sh b/tools/sync-to-tower.sh new file mode 100755 index 0000000..06a8e83 --- /dev/null +++ b/tools/sync-to-tower.sh @@ -0,0 +1,63 @@ +#!/bin/bash +# +# Syncs the project directory to a remote "tower" host for deployment orchestration. +# +# This script is designed to be run from the root of the project directory. +# It excludes generated files, local data, logs, and other non-essential files +# to ensure a clean copy of the source code and configuration templates is synced. + +set -e # Exit immediately if a command exits with a non-zero status. +set -u # Treat unset variables as an error. + +# --- Configuration --- +# IMPORTANT: Update these variables to match your environment. +# +# The remote host to sync to (e.g., user@hostname) +REMOTE_HOST="user@your-tower-host.com" +# The destination path on the remote host +REMOTE_PATH="/path/to/your/project" +# The root directory of the project on the local machine. +SOURCE_DIR="." + +# --- rsync command --- +echo ">>> Syncing project from '$SOURCE_DIR' to '$REMOTE_HOST:$REMOTE_PATH'..." + +# Use an array for exclude options for clarity and to handle spaces correctly. +# This list is based on an analysis of the project structure and generated artifacts. +EXCLUDE_OPTS=( + "--exclude=.git" + "--exclude=__pycache__" + "--exclude='*.pyc'" + "--exclude='*.log'" + "--exclude=.DS_Store" + "--exclude=.vault_pass" + "--exclude=.env" + "--exclude=ansible/inventory.ini" + "--exclude=ansible/host_vars/" + "--exclude=ansible/group_vars/all/generated_vars.yml" + "--exclude=postgres-data/" + "--exclude=redis-data/" + "--exclude=minio-data/" + "--exclude=logs/" + "--exclude=downloadfiles/" + "--exclude=addfiles/" + "--exclude=token_generator/node_modules/" + # Exclude files generated on remote hosts by Ansible/config-generator + "--exclude=airflow/configs/envoy.yaml" + "--exclude=airflow/configs/docker-compose.camoufox.yaml" + "--exclude=airflow/configs/camoufox_endpoints.json" + # Exclude local development notes + "--exclude=TODO-*.md" +) + +# The rsync command: +# -a: archive mode (recursive, preserves permissions, etc.) +# -v: verbose +# -z: compress file data during the transfer +# --delete: delete extraneous files from the destination directory +rsync -avz --delete \ + "${EXCLUDE_OPTS[@]}" \ + "$SOURCE_DIR/" \ + "$REMOTE_HOST:$REMOTE_PATH/" + +echo ">>> Sync complete." diff --git a/yt_ops_services/__init__.py b/yt_ops_services/__init__.py new file mode 100644 index 0000000..22bf027 --- /dev/null +++ b/yt_ops_services/__init__.py @@ -0,0 +1,3 @@ +from .version import VERSION +# Package initialization +__version__ = "1.6.2.dev0" diff --git a/yt_ops_services/client_utils.py b/yt_ops_services/client_utils.py new file mode 100644 index 0000000..089de29 --- /dev/null +++ b/yt_ops_services/client_utils.py @@ -0,0 +1,36 @@ +import logging +import datetime +from thrift.transport import TSocket, TTransport +from thrift.protocol import TBinaryProtocol +from pangramia.yt.tokens_ops import YTTokenOpService + +logger = logging.getLogger(__name__) + +def get_thrift_client(host: str, port: int, timeout_ms: int = 30000): + """ + Helper function to create and connect a Thrift client. + Returns a tuple of (client, transport). + """ + logger.info(f"Connecting to Thrift server at {host}:{port}...") + transport = TSocket.TSocket(host, port) + transport.setTimeout(timeout_ms) + transport = TTransport.TFramedTransport(transport) + protocol = TBinaryProtocol.TBinaryProtocol(transport) + client = YTTokenOpService.Client(protocol) + transport.open() + logger.info("Connection successful.") + return client, transport + +def format_timestamp(ts_str: str) -> str: + """Formats a string timestamp into a human-readable date string.""" + if not ts_str: + return "" + try: + ts_float = float(ts_str) + # Handle cases where timestamp might be 0 or negative + if ts_float <= 0: + return "" + dt_obj = datetime.datetime.fromtimestamp(ts_float) + return dt_obj.strftime('%Y-%m-%d %H:%M:%S') + except (ValueError, TypeError): + return ts_str # Return original string if conversion fails diff --git a/yt_ops_services/version.py b/yt_ops_services/version.py new file mode 100644 index 0000000..b4c109c --- /dev/null +++ b/yt_ops_services/version.py @@ -0,0 +1,9 @@ +import os + +def get_version(): + """Reads the version from the VERSION file in the project root.""" + version_path = os.path.join(os.path.dirname(__file__), '..', 'VERSION') + with open(version_path, 'r') as f: + return f.read().strip() + +VERSION = get_version()