Upload client service dags
This commit is contained in:
commit
8b689fc5c8
14
.airflowignore
Normal file
14
.airflowignore
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
# Ignore test files
|
||||||
|
*_test.py
|
||||||
|
# Ignore development files
|
||||||
|
dev_*.py
|
||||||
|
# Ignore temporary files
|
||||||
|
*.tmp
|
||||||
|
*.temp
|
||||||
|
# Ignore version control
|
||||||
|
.git/
|
||||||
|
.gitignore
|
||||||
|
# Ignore Python cache files
|
||||||
|
__pycache__/
|
||||||
|
*.py[cod]
|
||||||
|
*$py.class
|
||||||
64
.dockerignore
Normal file
64
.dockerignore
Normal file
@ -0,0 +1,64 @@
|
|||||||
|
# Git files
|
||||||
|
.git
|
||||||
|
.gitignore
|
||||||
|
.gitattributes
|
||||||
|
.github/
|
||||||
|
|
||||||
|
# Node.js
|
||||||
|
node_modules
|
||||||
|
npm-debug.log
|
||||||
|
|
||||||
|
# Python
|
||||||
|
__pycache__/
|
||||||
|
*.py[cod]
|
||||||
|
*$py.class
|
||||||
|
*.so
|
||||||
|
.Python
|
||||||
|
env/
|
||||||
|
venv/
|
||||||
|
.env
|
||||||
|
.venv
|
||||||
|
build/
|
||||||
|
develop-eggs/
|
||||||
|
dist/
|
||||||
|
downloads/
|
||||||
|
eggs/
|
||||||
|
.eggs/
|
||||||
|
lib/
|
||||||
|
lib64/
|
||||||
|
parts/
|
||||||
|
sdist/
|
||||||
|
var/
|
||||||
|
*.egg-info/
|
||||||
|
.installed.cfg
|
||||||
|
*.egg
|
||||||
|
|
||||||
|
# Media and temporary files
|
||||||
|
*.mp4
|
||||||
|
*.part
|
||||||
|
*.info.json
|
||||||
|
*.webm
|
||||||
|
*.m4a
|
||||||
|
*.mp3
|
||||||
|
|
||||||
|
# Specific files to exclude
|
||||||
|
generate_tokens_parallel.mjs
|
||||||
|
generate_tokens_playwright.mjs
|
||||||
|
|
||||||
|
# OS specific files
|
||||||
|
.DS_Store
|
||||||
|
.DS_Store?
|
||||||
|
._*
|
||||||
|
.Spotlight-V100
|
||||||
|
.Trashes
|
||||||
|
ehthumbs.db
|
||||||
|
Thumbs.db
|
||||||
|
|
||||||
|
# IDE files
|
||||||
|
.idea/
|
||||||
|
.vscode/
|
||||||
|
*.swp
|
||||||
|
*.swo
|
||||||
|
|
||||||
|
# Build artifacts
|
||||||
|
target/
|
||||||
180
.gitignore
vendored
Normal file
180
.gitignore
vendored
Normal file
@ -0,0 +1,180 @@
|
|||||||
|
.DS_Store
|
||||||
|
*.csv
|
||||||
|
results/
|
||||||
|
tmp/
|
||||||
|
context_data/
|
||||||
|
# Byte-compiled / optimized / DLL files
|
||||||
|
__pycache__/
|
||||||
|
*.py[cod]
|
||||||
|
*$py.class
|
||||||
|
|
||||||
|
# C extensions
|
||||||
|
*.so
|
||||||
|
node_modules/
|
||||||
|
# Distribution / packaging
|
||||||
|
.Python
|
||||||
|
build/
|
||||||
|
develop-eggs/
|
||||||
|
dist/
|
||||||
|
downloads/
|
||||||
|
eggs/
|
||||||
|
.eggs/
|
||||||
|
lib64/
|
||||||
|
lib/
|
||||||
|
parts/
|
||||||
|
sdist/
|
||||||
|
var/
|
||||||
|
wheels/
|
||||||
|
share/python-wheels/
|
||||||
|
*.egg-info/
|
||||||
|
.installed.cfg
|
||||||
|
*.egg
|
||||||
|
MANIFEST
|
||||||
|
|
||||||
|
# PyInstaller
|
||||||
|
# Usually these files are written by a python script from a template
|
||||||
|
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||||
|
*.manifest
|
||||||
|
*.spec
|
||||||
|
|
||||||
|
# Installer logs
|
||||||
|
pip-log.txt
|
||||||
|
pip-delete-this-directory.txt
|
||||||
|
|
||||||
|
# Unit test / coverage reports
|
||||||
|
htmlcov/
|
||||||
|
.tox/
|
||||||
|
.nox/
|
||||||
|
.coverage
|
||||||
|
.coverage.*
|
||||||
|
.cache
|
||||||
|
nosetests.xml
|
||||||
|
coverage.xml
|
||||||
|
*.cover
|
||||||
|
*.py,cover
|
||||||
|
.hypothesis/
|
||||||
|
.pytest_cache/
|
||||||
|
cover/
|
||||||
|
|
||||||
|
# Translations
|
||||||
|
*.mo
|
||||||
|
*.pot
|
||||||
|
|
||||||
|
# Django stuff:
|
||||||
|
*.log
|
||||||
|
local_settings.py
|
||||||
|
db.sqlite3
|
||||||
|
db.sqlite3-journal
|
||||||
|
|
||||||
|
# Flask stuff:
|
||||||
|
instance/
|
||||||
|
.webassets-cache
|
||||||
|
|
||||||
|
# Scrapy stuff:
|
||||||
|
.scrapy
|
||||||
|
|
||||||
|
# Sphinx documentation
|
||||||
|
docs/_build/
|
||||||
|
|
||||||
|
# PyBuilder
|
||||||
|
.pybuilder/
|
||||||
|
target/
|
||||||
|
|
||||||
|
# Jupyter Notebook
|
||||||
|
.ipynb_checkpoints
|
||||||
|
|
||||||
|
# IPython
|
||||||
|
profile_default/
|
||||||
|
ipython_config.py
|
||||||
|
|
||||||
|
# pyenv
|
||||||
|
# For a library or package, you might want to ignore these files since the code is
|
||||||
|
# intended to run in multiple environments; otherwise, check them in:
|
||||||
|
# .python-version
|
||||||
|
|
||||||
|
# pipenv
|
||||||
|
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||||
|
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||||
|
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||||
|
# install all needed dependencies.
|
||||||
|
#Pipfile.lock
|
||||||
|
|
||||||
|
# UV
|
||||||
|
# Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control.
|
||||||
|
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||||
|
# commonly ignored for libraries.
|
||||||
|
#uv.lock
|
||||||
|
|
||||||
|
# poetry
|
||||||
|
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
||||||
|
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||||
|
# commonly ignored for libraries.
|
||||||
|
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
||||||
|
#poetry.lock
|
||||||
|
|
||||||
|
# pdm
|
||||||
|
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
||||||
|
#pdm.lock
|
||||||
|
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
||||||
|
# in version control.
|
||||||
|
# https://pdm.fming.dev/latest/usage/project/#working-with-version-control
|
||||||
|
.pdm.toml
|
||||||
|
.pdm-python
|
||||||
|
.pdm-build/
|
||||||
|
|
||||||
|
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
||||||
|
__pypackages__/
|
||||||
|
|
||||||
|
# Celery stuff
|
||||||
|
celerybeat-schedule
|
||||||
|
celerybeat.pid
|
||||||
|
|
||||||
|
# SageMath parsed files
|
||||||
|
*.sage.py
|
||||||
|
|
||||||
|
# Environments
|
||||||
|
.env
|
||||||
|
.venv
|
||||||
|
env/
|
||||||
|
venv/
|
||||||
|
ENV/
|
||||||
|
env.bak/
|
||||||
|
venv.bak/
|
||||||
|
|
||||||
|
# Spyder project settings
|
||||||
|
.spyderproject
|
||||||
|
.spyproject
|
||||||
|
|
||||||
|
# Rope project settings
|
||||||
|
.ropeproject
|
||||||
|
|
||||||
|
# mkdocs documentation
|
||||||
|
/site
|
||||||
|
|
||||||
|
# mypy
|
||||||
|
.mypy_cache/
|
||||||
|
.dmypy.json
|
||||||
|
dmypy.json
|
||||||
|
|
||||||
|
# Pyre type checker
|
||||||
|
.pyre/
|
||||||
|
|
||||||
|
# pytype static type analyzer
|
||||||
|
.pytype/
|
||||||
|
|
||||||
|
# Cython debug symbols
|
||||||
|
cython_debug/
|
||||||
|
|
||||||
|
# PyCharm
|
||||||
|
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
||||||
|
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||||
|
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||||
|
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||||
|
#.idea/
|
||||||
|
|
||||||
|
# Ruff stuff:
|
||||||
|
.ruff_cache/
|
||||||
|
|
||||||
|
# PyPI configuration file
|
||||||
|
.pypirc
|
||||||
|
.aider*
|
||||||
61
Dockerfile
Normal file
61
Dockerfile
Normal file
@ -0,0 +1,61 @@
|
|||||||
|
FROM apache/airflow:2.10.5
|
||||||
|
ENV AIRFLOW_VERSION=2.10.5
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Copy necessary files from the ytdlp-ops-auth subdirectory (present in the build context) into /app
|
||||||
|
# setup.py is removed as we are not using 'pip install -e' anymore
|
||||||
|
COPY ytdlp-ops-auth/generate-thrift.py ytdlp-ops-auth/requirements.txt /app/
|
||||||
|
COPY ytdlp-ops-auth/thrift_model/ /app/thrift_model/
|
||||||
|
COPY ytdlp-ops-auth/ytdlp_utils.py /app/
|
||||||
|
COPY ytdlp-ops-auth/thrift_exceptions_patch.py /app/
|
||||||
|
COPY ytdlp-ops-auth/ytdlp_ops_client.py /app/
|
||||||
|
|
||||||
|
# Set Python path relative to the WORKDIR /app
|
||||||
|
ENV PYTHONPATH=/app:${PYTHONPATH}
|
||||||
|
|
||||||
|
# Install system dependencies
|
||||||
|
USER root
|
||||||
|
RUN apt-get update && \
|
||||||
|
apt-get install -y --no-install-recommends \
|
||||||
|
vim \
|
||||||
|
mc \
|
||||||
|
jq \
|
||||||
|
build-essential \
|
||||||
|
python3-dev && \
|
||||||
|
apt-get clean && \
|
||||||
|
rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* /usr/share/man /usr/share/doc /usr/share/doc-base
|
||||||
|
|
||||||
|
# Ensure proper permissions, aligning GID with docker-compose.yaml (1001)
|
||||||
|
RUN groupadd -g 1001 airflow && \
|
||||||
|
usermod -a -G airflow airflow && \
|
||||||
|
chown -R airflow:1001 /app
|
||||||
|
|
||||||
|
# Switch to airflow user for package installation
|
||||||
|
USER airflow
|
||||||
|
|
||||||
|
# Install Python dependencies and ensure ffprobe3 is installed correctly
|
||||||
|
RUN pip install --no-cache-dir \
|
||||||
|
"apache-airflow==${AIRFLOW_VERSION}" && \
|
||||||
|
pip install --no-cache-dir -r /app/requirements.txt && \
|
||||||
|
pip install --no-cache-dir ffprobe3 python-ffmpeg
|
||||||
|
|
||||||
|
# Only generate Thrift files if gen_py directory doesn't exist
|
||||||
|
RUN if [ ! -d "/app/thrift_model/gen_py" ]; then \
|
||||||
|
python3 /app/generate-thrift.py; \
|
||||||
|
else \
|
||||||
|
echo "Skipping Thrift generation - gen_py directory already exists"; \
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Create proper Python package structure
|
||||||
|
RUN mkdir -p /app/pangramia && \
|
||||||
|
ln -s /app/thrift_model/gen_py/pangramia /app/pangramia && \
|
||||||
|
echo "Created symlink: /app/pangramia -> /app/thrift_model/gen_py/pangramia"
|
||||||
|
|
||||||
|
# Ensure base_service is accessible
|
||||||
|
RUN mkdir -p /app/pangramia/base_service && \
|
||||||
|
ln -s /app/thrift_model/gen_py/pangramia/base_service /app/pangramia/base_service && \
|
||||||
|
echo "Created symlink: /app/pangramia/base_service -> /app/thrift_model/gen_py/pangramia/base_service"
|
||||||
|
|
||||||
|
# Add to Python path
|
||||||
|
ENV PYTHONPATH=/app:/app/thrift_model/gen_py:${PYTHONPATH}
|
||||||
95
README-ytdlp-ops-auth.md
Normal file
95
README-ytdlp-ops-auth.md
Normal file
@ -0,0 +1,95 @@
|
|||||||
|
# YTDLP Client Side Integration
|
||||||
|
|
||||||
|
This document describes how to integrate and use the YTDLP client with the token service. The client DAG is `ytdlp_client_dag`.
|
||||||
|
|
||||||
|
## Building & Testing on the Remote Worker Host
|
||||||
|
|
||||||
|
1. **(OPTIONAL if done) Start server if needed:**
|
||||||
|
```bash
|
||||||
|
cd /srv/airflow_worker/
|
||||||
|
docker login pangramia #if not performed auth before
|
||||||
|
docker compose -f docker-compose-ytdlp-ops.yaml up -d
|
||||||
|
docker compose -f docker-compose-ytdlp-ops.yaml logs -f
|
||||||
|
```
|
||||||
|
The server is bound to a certain proxy, like "socks5://sslocal-rust-1084:1084".
|
||||||
|
|
||||||
|
2. **Rebuild worker with custom base:**
|
||||||
|
```bash
|
||||||
|
cd /srv/airflow_worker/
|
||||||
|
docker compose build airflow-worker
|
||||||
|
docker compose down airflow-worker
|
||||||
|
docker compose up -d --no-deps airflow-worker
|
||||||
|
```
|
||||||
|
|
||||||
|
3. **Test the Client Inside a Worker Container:**
|
||||||
|
```bash
|
||||||
|
# Show client help
|
||||||
|
docker compose exec airflow-worker python /app/ytdlp_ops_client.py --help
|
||||||
|
|
||||||
|
# Get token and info.json
|
||||||
|
docker compose exec airflow-worker python /app/ytdlp_ops_client.py --host 85.192.30.55 --port 9090 getToken --url 'https://www.youtube.com/watch?v=vKTVLpmvznI'
|
||||||
|
|
||||||
|
# List formats using saved info.json
|
||||||
|
docker compose exec airflow-worker yt-dlp --load-info-json "latest.json" -F
|
||||||
|
|
||||||
|
# Simulate download using saved info.json
|
||||||
|
docker compose exec airflow-worker yt-dlp --load-info-json "latest.json" --proxy "socks5://sslocal-rust-1084:1084" --simulate --verbose
|
||||||
|
|
||||||
|
# Extract metadata and download URLs using jq
|
||||||
|
docker compose exec airflow-worker jq -r '"Title: \(.title)", "Date: \(.upload_date | strptime("%Y%m%d") | strftime("%Y-%m-%d"))", "Author: \(.uploader)", "Length: \(.duration_string)", "", "Download URLs:", (.formats[] | select(.vcodec != "none" or .acodec != "none") | .url)' latest.json
|
||||||
|
```
|
||||||
|
|
||||||
|
4. **Instructions to run DAGs:**
|
||||||
|
|
||||||
|
To run the `ytdlp_client_dag` DAG:
|
||||||
|
|
||||||
|
Set up required Airflow variables
|
||||||
|
```bash
|
||||||
|
docker compose exec airflow-worker airflow variables set DOWNLOAD_OPTIONS '{"formats": ["bestvideo[height<=1080]+bestaudio/best[height<=1080]"]}'
|
||||||
|
docker compose exec airflow-worker airflow variables set DOWNLOADS_TEMP '/opt/airflow/downloadfiles'
|
||||||
|
docker compose exec airflow-worker airflow variables set DOWNLOADS_PATH '/opt/airflow/downloadfiles'
|
||||||
|
|
||||||
|
docker compose exec airflow-worker airflow variables list
|
||||||
|
docker compose exec airflow-worker airflow variables set TOKEN_TIMEOUT '300'
|
||||||
|
|
||||||
|
docker compose exec airflow-worker airflow connections import /opt/airflow/config/docker_hub_repo.json
|
||||||
|
docker compose exec airflow-worker airflow connections delete redis_default
|
||||||
|
docker compose exec airflow-worker airflow connections import /opt/airflow/config/redis_default_conn.json
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
**Using direct connection:**
|
||||||
|
```bash
|
||||||
|
docker compose exec airflow-worker airflow db reset
|
||||||
|
docker compose exec airflow-worker airflow dags reserialize
|
||||||
|
|
||||||
|
docker compose exec airflow-worker airflow dags list
|
||||||
|
docker compose exec airflow-worker airflow dags list-import-errors
|
||||||
|
docker compose exec airflow-worker airflow tasks test ytdlp_client_dag_v2.1 get_token $(date -u +"%Y-%m-%dT%H:%M:%S+00:00") --task-params '{"url": "https://www.youtube.com/watch?v=sOlTX9uxUtM", "redis_enabled": false, "service_ip": "85.192.30.55", "service_port": 9090}'
|
||||||
|
docker compose exec airflow-worker yt-dlp --load-info-json /opt/airflow/downloadfiles/latest.json --proxy "socks5://sslocal-rust-1084:1084" --verbose --simulate
|
||||||
|
|
||||||
|
docker compose exec airflow-worker airflow dags list-runs -d ytdlp_client_dag
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
or deploy using trigger
|
||||||
|
```bash
|
||||||
|
docker compose exec airflow-worker airflow dags list
|
||||||
|
docker compose exec airflow-worker airflow dags unpause ytdlp_client_dag_v2.1
|
||||||
|
|
||||||
|
// Check if works from server deploy
|
||||||
|
docker compose exec airflow-worker airflow dags trigger ytdlp_client_dag_v2.1 -c '{"url": "https://www.youtube.com/watch?v=sOlTX9uxUtM", "account_id": "test_direct", "redis_enabled": false, "service_ip": "85.192.30.55", "service_port": 9090}'
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
Check Redis for stored data
|
||||||
|
```bash
|
||||||
|
docker compose exec redis redis-cli -a XXXXXX -h 89.253.221.173 -p 52909 HGETALL "token_info:sOlTX9uxUtM" | jq -R -s 'split("\n") | del(.[] | select(. == "")) | [.[range(0;length;2)]]'
|
||||||
|
```
|
||||||
|
|
||||||
9
config/docker_hub_repo.json
Normal file
9
config/docker_hub_repo.json
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
{
|
||||||
|
"docker_hub":
|
||||||
|
{
|
||||||
|
"conn_type": "docker",
|
||||||
|
"host": "https://index.docker.io/v1/",
|
||||||
|
"login": "pangramia",
|
||||||
|
"password": "PROVIDE_OUTSIDE_REPO"
|
||||||
|
}
|
||||||
|
}
|
||||||
13
config/redis_default_conn.json
Normal file
13
config/redis_default_conn.json
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
{
|
||||||
|
"redis_default":
|
||||||
|
{
|
||||||
|
"conn_type": "redis",
|
||||||
|
"host": "89.253.221.173",
|
||||||
|
"password": "PROVIDE_OUTSIDE_REPO",
|
||||||
|
"port": 52909,
|
||||||
|
"extra":
|
||||||
|
{
|
||||||
|
"db": 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
940
dags/ytdlp_client_dag_v2.1.py
Normal file
940
dags/ytdlp_client_dag_v2.1.py
Normal file
@ -0,0 +1,940 @@
|
|||||||
|
from airflow import DAG
|
||||||
|
from airflow.models import BaseOperator, Variable
|
||||||
|
from airflow.utils.decorators import apply_defaults
|
||||||
|
from airflow.hooks.base import BaseHook
|
||||||
|
from airflow.exceptions import AirflowException
|
||||||
|
from airflow.utils.dates import days_ago
|
||||||
|
from thrift.transport import TSocket, TTransport
|
||||||
|
from thrift.protocol import TBinaryProtocol
|
||||||
|
from thrift.transport.TTransport import TTransportException
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
from pangramia.yt.exceptions.ttypes import PBServiceException
|
||||||
|
import redis
|
||||||
|
import logging
|
||||||
|
import time
|
||||||
|
import socket
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
from pangramia.yt.tokens_ops import YTTokenOpService
|
||||||
|
from pangramia.yt.common.ttypes import TokenUpdateMode
|
||||||
|
from airflow.providers.redis.hooks.redis import RedisHook
|
||||||
|
from airflow.operators.python import PythonOperator
|
||||||
|
from airflow.models.param import Param
|
||||||
|
# Assuming ytdlp_utils exists in the same directory or PYTHONPATH
|
||||||
|
# from ytdlp_utils import get_info_json, is_valid_json, extract_video_id
|
||||||
|
|
||||||
|
# Configure logging
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# Default settings (similar to ytdlp_client_dag.py)
|
||||||
|
MAX_RETRIES = 1
|
||||||
|
RETRY_DELAY = timedelta(seconds=10)
|
||||||
|
DEFAULT_TIMEOUT = 30
|
||||||
|
|
||||||
|
class YtdlpOpsOperator(BaseOperator):
|
||||||
|
"""
|
||||||
|
Custom Airflow operator to interact with YTDLP Thrift service. Handles direct connections
|
||||||
|
and Redis-based discovery, retrieves tokens, saves info.json, and manages errors.
|
||||||
|
"""
|
||||||
|
template_fields = ('url', 'service_ip', 'service_port', 'account_id', 'timeout', 'info_json_dir')
|
||||||
|
|
||||||
|
@apply_defaults
|
||||||
|
def __init__(self, url, redis_conn_id='redis_default', max_retries=3, retry_delay=10,
|
||||||
|
service_ip=None, service_port=None, redis_enabled=False, account_id=None,
|
||||||
|
save_info_json=True, info_json_dir=None, get_socks_proxy=True,
|
||||||
|
store_socks_proxy=False, timeout=DEFAULT_TIMEOUT, *args, **kwargs):
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
logger.info(f"Initializing YtdlpOpsOperator with parameters: url={url}, "
|
||||||
|
f"redis_conn_id={redis_conn_id}, max_retries={max_retries}, retry_delay={retry_delay}, "
|
||||||
|
f"service_ip={service_ip}, service_port={service_port}, redis_enabled={redis_enabled}, "
|
||||||
|
f"account_id={account_id}, save_info_json={save_info_json}, info_json_dir={info_json_dir}, "
|
||||||
|
f"get_socks_proxy={get_socks_proxy}, store_socks_proxy={store_socks_proxy}, timeout={timeout}")
|
||||||
|
|
||||||
|
# Validate required parameters
|
||||||
|
if not url:
|
||||||
|
raise ValueError("url is required")
|
||||||
|
|
||||||
|
# Validate parameters based on connection mode
|
||||||
|
if redis_enabled:
|
||||||
|
if not account_id:
|
||||||
|
raise ValueError("account_id is required when redis_enabled=True")
|
||||||
|
# Use default Redis connection if not specified
|
||||||
|
if not redis_conn_id:
|
||||||
|
redis_conn_id = 'redis_default'
|
||||||
|
logger.info(f"Using default Redis connection ID: {redis_conn_id}")
|
||||||
|
else:
|
||||||
|
if not service_ip or not service_port:
|
||||||
|
raise ValueError("Both service_ip and service_port must be specified when redis_enabled=False")
|
||||||
|
if not account_id:
|
||||||
|
logger.warning("No account_id provided for direct connection mode. Using 'default'")
|
||||||
|
account_id = 'default' # Assign default if missing in direct mode
|
||||||
|
|
||||||
|
self.url = url
|
||||||
|
self.redis_conn_id = redis_conn_id
|
||||||
|
self.max_retries = max_retries
|
||||||
|
self.retry_delay = int(retry_delay.total_seconds() if isinstance(retry_delay, timedelta) else retry_delay)
|
||||||
|
self.service_ip = service_ip
|
||||||
|
self.service_port = service_port
|
||||||
|
self.redis_enabled = redis_enabled
|
||||||
|
self.account_id = account_id
|
||||||
|
self.save_info_json = save_info_json
|
||||||
|
self.info_json_dir = info_json_dir
|
||||||
|
self.get_socks_proxy = get_socks_proxy
|
||||||
|
self.store_socks_proxy = store_socks_proxy
|
||||||
|
self.timeout = timeout
|
||||||
|
|
||||||
|
def execute(self, context):
|
||||||
|
logger.info("Executing YtdlpOpsOperator")
|
||||||
|
transport = None
|
||||||
|
try:
|
||||||
|
logger.info("Getting task parameters")
|
||||||
|
params = context.get('params', {})
|
||||||
|
redis_enabled = params.get('redis_enabled', self.redis_enabled)
|
||||||
|
logger.info(f"Using redis_enabled={redis_enabled} (from {'task params' if 'redis_enabled' in params else 'operator init'})")
|
||||||
|
|
||||||
|
# Determine account_id to use (from params or operator default)
|
||||||
|
account_id = context['params'].get('account_id', self.account_id)
|
||||||
|
logger.info(f"Using account_id='{account_id}' (from {'task params' if 'account_id' in params else 'operator init'})")
|
||||||
|
|
||||||
|
if redis_enabled:
|
||||||
|
# Get Redis connection with proper authentication and error handling
|
||||||
|
redis_conn = BaseHook.get_connection(self.redis_conn_id)
|
||||||
|
redis_client = redis.Redis(
|
||||||
|
host=redis_conn.host,
|
||||||
|
port=redis_conn.port,
|
||||||
|
password=redis_conn.password,
|
||||||
|
db=0,
|
||||||
|
decode_responses=True # Important for consistent key handling
|
||||||
|
)
|
||||||
|
|
||||||
|
# Test Redis connection
|
||||||
|
try:
|
||||||
|
if not redis_client.ping():
|
||||||
|
raise redis.exceptions.ConnectionError("Redis ping failed")
|
||||||
|
logger.info(f"Successfully connected to Redis at {redis_conn.host}:{redis_conn.port}")
|
||||||
|
except redis.exceptions.AuthenticationError:
|
||||||
|
logger.error(f"Redis authentication failed for connection '{self.redis_conn_id}'. Check password.")
|
||||||
|
raise AirflowException("Redis authentication failed.")
|
||||||
|
except redis.exceptions.ConnectionError as e:
|
||||||
|
logger.error(f"Could not connect to Redis at {redis_conn.host}:{redis_conn.port}. Error: {e}")
|
||||||
|
raise AirflowException(f"Redis connection failed: {e}")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Unexpected Redis error: {str(e)}")
|
||||||
|
raise AirflowException(f"Unexpected Redis error: {e}")
|
||||||
|
|
||||||
|
# Get service details from Redis with retries and proper key handling
|
||||||
|
service_key = f"ytdlp:{account_id}"
|
||||||
|
legacy_key = account_id # For backward compatibility
|
||||||
|
|
||||||
|
host = None
|
||||||
|
port = None
|
||||||
|
for attempt in range(self.max_retries):
|
||||||
|
try:
|
||||||
|
logger.info(f"Attempt {attempt + 1}/{self.max_retries}: Fetching service details from Redis for keys: '{service_key}', '{legacy_key}'")
|
||||||
|
service_details = redis_client.hgetall(service_key)
|
||||||
|
if not service_details:
|
||||||
|
logger.warning(f"Key '{service_key}' not found, trying legacy key '{legacy_key}'")
|
||||||
|
service_details = redis_client.hgetall(legacy_key)
|
||||||
|
|
||||||
|
if not service_details:
|
||||||
|
raise ValueError(f"No service details found in Redis for keys: {service_key} or {legacy_key}")
|
||||||
|
|
||||||
|
# Find IP and port, handling potential case differences and byte/string types
|
||||||
|
ip_key = next((k for k in service_details if k.lower() == 'ip'), None)
|
||||||
|
port_key = next((k for k in service_details if k.lower() == 'port'), None)
|
||||||
|
|
||||||
|
if not ip_key: raise ValueError(f"'ip' key not found in Redis hash for {service_key}/{legacy_key}")
|
||||||
|
if not port_key: raise ValueError(f"'port' key not found in Redis hash for {service_key}/{legacy_key}")
|
||||||
|
|
||||||
|
host = service_details[ip_key] # Already decoded due to decode_responses=True
|
||||||
|
port_str = service_details[port_key]
|
||||||
|
|
||||||
|
try:
|
||||||
|
port = int(port_str)
|
||||||
|
except ValueError:
|
||||||
|
raise ValueError(f"Invalid port value '{port_str}' found in Redis for {service_key}/{legacy_key}")
|
||||||
|
|
||||||
|
logger.info(f"Extracted from Redis - Service IP: {host}, Service Port: {port}")
|
||||||
|
break # Success
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Attempt {attempt + 1} failed to get Redis details: {str(e)}")
|
||||||
|
if attempt == self.max_retries - 1:
|
||||||
|
logger.error("Max retries reached for fetching Redis details.")
|
||||||
|
raise AirflowException(f"Failed to get service details from Redis after {self.max_retries} attempts: {e}")
|
||||||
|
logger.info(f"Retrying in {self.retry_delay} seconds...")
|
||||||
|
time.sleep(self.retry_delay)
|
||||||
|
else:
|
||||||
|
# Direct connection: Log parameter sources
|
||||||
|
params = context.get('params', {})
|
||||||
|
host = params.get('service_ip', self.service_ip)
|
||||||
|
host_source = 'task params' if 'service_ip' in params else 'operator init'
|
||||||
|
port_str = params.get('service_port', self.service_port)
|
||||||
|
port_source = 'task params' if 'service_port' in params else 'operator init'
|
||||||
|
url = params.get('url', self.url)
|
||||||
|
url_source = 'task params' if 'url' in params else 'operator init'
|
||||||
|
|
||||||
|
logger.info(f"Using service_ip={host} (from {host_source})")
|
||||||
|
logger.info(f"Using service_port={port_str} (from {port_source})")
|
||||||
|
logger.info(f"Using url={url} (from {url_source})")
|
||||||
|
|
||||||
|
if not host or not port_str:
|
||||||
|
raise ValueError("Direct connection requires service_ip and service_port")
|
||||||
|
try:
|
||||||
|
port = int(port_str)
|
||||||
|
except ValueError:
|
||||||
|
raise ValueError(f"Invalid service_port value: {port_str}")
|
||||||
|
|
||||||
|
logger.info(f"Connecting directly to Thrift service at {host}:{port} (Redis bypassed)")
|
||||||
|
|
||||||
|
# Render and validate timeout
|
||||||
|
timeout_param = context.get('params', {}).get('timeout', self.timeout)
|
||||||
|
if isinstance(self.timeout, str) and '{{' in self.timeout:
|
||||||
|
timeout_rendered = self.render_template(self.timeout, context)
|
||||||
|
logger.info(f"Rendered timeout template: '{self.timeout}' -> '{timeout_rendered}'")
|
||||||
|
timeout_param = timeout_rendered
|
||||||
|
try:
|
||||||
|
timeout = int(timeout_param)
|
||||||
|
if timeout <= 0: raise ValueError("Timeout must be positive")
|
||||||
|
logger.info(f"Using timeout: {timeout} seconds")
|
||||||
|
except (ValueError, TypeError):
|
||||||
|
logger.warning(f"Invalid timeout value: '{timeout_param}'. Using default: {DEFAULT_TIMEOUT}")
|
||||||
|
timeout = DEFAULT_TIMEOUT
|
||||||
|
|
||||||
|
# Create Thrift connection objects
|
||||||
|
socket_conn = TSocket.TSocket(host, port, socket_family=socket.AF_INET) # Explicitly use AF_INET (IPv4)
|
||||||
|
socket_conn.setTimeout(timeout * 1000) # Thrift timeout is in milliseconds
|
||||||
|
transport = TTransport.TFramedTransport(socket_conn)
|
||||||
|
protocol = TBinaryProtocol.TBinaryProtocol(transport)
|
||||||
|
client = YTTokenOpService.Client(protocol)
|
||||||
|
|
||||||
|
logger.info(f"Attempting to connect to Thrift server at {host}:{port}...")
|
||||||
|
try:
|
||||||
|
transport.open()
|
||||||
|
logger.info("Successfully connected to Thrift server.")
|
||||||
|
|
||||||
|
# Test connection with ping
|
||||||
|
try:
|
||||||
|
client.ping()
|
||||||
|
logger.info("Server ping successful.")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Server ping failed: {e}")
|
||||||
|
raise AirflowException(f"Server connection test (ping) failed: {e}")
|
||||||
|
|
||||||
|
# Get token from service with specific error handling
|
||||||
|
try:
|
||||||
|
url_param = context.get('params', {}).get('url', self.url)
|
||||||
|
logger.info(f"Requesting token for accountId='{account_id}', url='{url_param}'")
|
||||||
|
token_data = client.getOrRefreshToken(
|
||||||
|
accountId=account_id,
|
||||||
|
updateType=TokenUpdateMode.AUTO,
|
||||||
|
url=url_param
|
||||||
|
)
|
||||||
|
logger.info("Successfully retrieved token data from service.")
|
||||||
|
except PBServiceException as e:
|
||||||
|
logger.error(f"PBServiceException occurred: Code={getattr(e, 'errorCode', 'N/A')}, Message={getattr(e, 'message', 'N/A')}")
|
||||||
|
error_code = getattr(e, 'errorCode', None)
|
||||||
|
error_msg = f"YTDLP service error: {getattr(e, 'message', str(e))}"
|
||||||
|
# Handle specific known error codes
|
||||||
|
if error_code in [
|
||||||
|
"SOCKS5_CONNECTION_FAILED", "SOCKS5_TIMEOUT",
|
||||||
|
"SOCKS5_CONNECTION_REFUSED", "SOCKS5_CONNECTION_TIMEOUT",
|
||||||
|
"SOCKS5_HOST_NOT_FOUND", "SOCKS5_NETWORK_UNREACHABLE"
|
||||||
|
]:
|
||||||
|
error_msg = f"SOCKS5 proxy error ({error_code}): {e.message}. Check proxy settings."
|
||||||
|
elif error_code == "BOT_DETECTION":
|
||||||
|
error_msg = f"Bot detection triggered ({error_code}): {e.message}."
|
||||||
|
suggestions = getattr(e, 'context', {}).get('suggestions', [])
|
||||||
|
if suggestions: error_msg += "\nSuggestions:\n" + "\n".join(f"- {s}" for s in suggestions)
|
||||||
|
elif error_code == "NODEJS_SCRIPT_ERROR":
|
||||||
|
error_msg = f"Node.js script error ({error_code}): {e.message}."
|
||||||
|
elif error_code == "NODEJS_TIMEOUT":
|
||||||
|
error_msg = f"Node.js timeout ({error_code}): {e.message}."
|
||||||
|
# Add more specific error handling as needed
|
||||||
|
raise AirflowException(error_msg)
|
||||||
|
except TTransportException as e:
|
||||||
|
logger.error(f"Thrift transport error during getOrRefreshToken: {e}")
|
||||||
|
raise AirflowException(f"Transport error during API call: {e}")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Unexpected error during getOrRefreshToken: {e}")
|
||||||
|
raise AirflowException(f"Unexpected error during API call: {e}")
|
||||||
|
|
||||||
|
except TTransportException as e:
|
||||||
|
# Handle connection-specific transport errors
|
||||||
|
if "read 0 bytes" in str(e) or "Could not connect to" in str(e) or "Connection refused" in str(e):
|
||||||
|
logger.error(f"Connection failed to {host}:{port}. Details: {e}")
|
||||||
|
logger.error("Possible causes: Server down, firewall block, incorrect IP/port.")
|
||||||
|
raise AirflowException(f"Failed to connect to YTDLP service at {host}:{port}: {e}")
|
||||||
|
else:
|
||||||
|
logger.error(f"Thrift transport error during connection: {str(e)}")
|
||||||
|
raise AirflowException(f"Transport error connecting to YTDLP service: {str(e)}")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Unexpected error during connection or ping: {str(e)}")
|
||||||
|
raise # Re-raise other unexpected errors
|
||||||
|
|
||||||
|
# Log received token data attributes for debugging
|
||||||
|
logger.debug(f"Token data received. Attributes: {dir(token_data)}")
|
||||||
|
for attr in dir(token_data):
|
||||||
|
if not attr.startswith('__') and not callable(getattr(token_data, attr)): # Log non-callable attributes
|
||||||
|
value = getattr(token_data, attr)
|
||||||
|
if attr == 'infoJson' and value:
|
||||||
|
logger.debug(f"infoJson: {value[:50]}...")
|
||||||
|
else:
|
||||||
|
logger.debug(f"{attr}: {value}")
|
||||||
|
|
||||||
|
info_json_path = None # Initialize info_json_path
|
||||||
|
|
||||||
|
save_info_json_param = context['params'].get('save_info_json', self.save_info_json)
|
||||||
|
# Render if it's a string template
|
||||||
|
if isinstance(save_info_json_param, str):
|
||||||
|
save_info_json_rendered = self.render_template(save_info_json_param, context)
|
||||||
|
# Convert common string representations to boolean
|
||||||
|
save_info_json = str(save_info_json_rendered).lower() in ['true', '1', 't', 'y', 'yes']
|
||||||
|
else:
|
||||||
|
save_info_json = bool(save_info_json_param)
|
||||||
|
|
||||||
|
|
||||||
|
# Save info.json if requested and valid
|
||||||
|
if self.save_info_json:
|
||||||
|
info_json = self._get_info_json(token_data)
|
||||||
|
if info_json and self._is_valid_json(info_json):
|
||||||
|
try:
|
||||||
|
# Use internal _save_info_json method which handles rendering, dir creation, logging
|
||||||
|
info_json_path = self._save_info_json(context, info_json)
|
||||||
|
if info_json_path: # Check if saving was successful
|
||||||
|
context['task_instance'].xcom_push(key='info_json_path', value=info_json_path)
|
||||||
|
logger.info(f"Successfully saved info.json and pushed path to XCom: {info_json_path}")
|
||||||
|
else:
|
||||||
|
# _save_info_json should log errors, push None to indicate failure
|
||||||
|
context['task_instance'].xcom_push(key='info_json_path', value=None)
|
||||||
|
logger.warning("info.json saving failed (check logs from _save_info_json), pushing None to XCom for info_json_path.")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Unexpected error during info.json saving process: {e}", exc_info=True)
|
||||||
|
context['task_instance'].xcom_push(key='info_json_path', value=None) # Push None on error
|
||||||
|
elif info_json:
|
||||||
|
logger.warning("Retrieved infoJson is not valid JSON. Skipping save.")
|
||||||
|
context['task_instance'].xcom_push(key='info_json_path', value=None)
|
||||||
|
else:
|
||||||
|
logger.info("No infoJson found in token data. Skipping save.")
|
||||||
|
context['task_instance'].xcom_push(key='info_json_path', value=None)
|
||||||
|
else:
|
||||||
|
logger.info("save_info_json is False. Skipping info.json save.")
|
||||||
|
context['task_instance'].xcom_push(key='info_json_path', value=None)
|
||||||
|
|
||||||
|
|
||||||
|
# Extract and potentially store SOCKS proxy
|
||||||
|
socks_proxy = None
|
||||||
|
if self.get_socks_proxy: # Use instance attribute
|
||||||
|
# Check for common attribute names for proxy
|
||||||
|
proxy_attr = next((attr for attr in ['socks5Proxy', 'socksProxy', 'socks'] if hasattr(token_data, attr)), None)
|
||||||
|
if proxy_attr:
|
||||||
|
socks_proxy = getattr(token_data, proxy_attr)
|
||||||
|
if socks_proxy: # Ensure proxy value is not empty
|
||||||
|
logger.info(f"Extracted SOCKS proxy ({proxy_attr}): {socks_proxy}")
|
||||||
|
if self.store_socks_proxy: # Use instance attribute
|
||||||
|
context['task_instance'].xcom_push(key='socks_proxy', value=socks_proxy)
|
||||||
|
logger.info(f"Pushed key 'socks_proxy' to XCom with value: {socks_proxy}")
|
||||||
|
else:
|
||||||
|
logger.info("SOCKS proxy extracted but not pushed to XCom (store_socks_proxy=False).")
|
||||||
|
else:
|
||||||
|
logger.info(f"Found proxy attribute '{proxy_attr}' but value is empty. No proxy extracted.")
|
||||||
|
# Push None even if found but empty, if storing is enabled
|
||||||
|
if self.store_socks_proxy: # Use instance attribute
|
||||||
|
context['task_instance'].xcom_push(key='socks_proxy', value=None)
|
||||||
|
logger.info("Pushed None to XCom for 'socks_proxy' as extracted value was empty.")
|
||||||
|
else:
|
||||||
|
logger.info("get_socks_proxy is True, but no SOCKS proxy attribute found in token data.")
|
||||||
|
# Push None if storing is enabled but attribute not found
|
||||||
|
if self.store_socks_proxy: # Use instance attribute
|
||||||
|
context['task_instance'].xcom_push(key='socks_proxy', value=None)
|
||||||
|
logger.info("Pushed None to XCom for 'socks_proxy' as attribute was not found.")
|
||||||
|
else:
|
||||||
|
logger.info("get_socks_proxy is False. Skipping proxy extraction.")
|
||||||
|
# Push None if storing is enabled but extraction was skipped
|
||||||
|
if self.store_socks_proxy: # Use instance attribute
|
||||||
|
context['task_instance'].xcom_push(key='socks_proxy', value=None)
|
||||||
|
logger.info("Pushed None to XCom for 'socks_proxy' as get_socks_proxy=False.")
|
||||||
|
|
||||||
|
|
||||||
|
# Get the original command from the server
|
||||||
|
ytdlp_cmd = getattr(token_data, 'ytdlpCommand', None)
|
||||||
|
if not ytdlp_cmd:
|
||||||
|
logger.error("No 'ytdlpCommand' attribute found in token data.")
|
||||||
|
raise AirflowException("Required 'ytdlpCommand' not received from service.")
|
||||||
|
|
||||||
|
logger.info(f"Original command received from server: {ytdlp_cmd}")
|
||||||
|
|
||||||
|
# Log example usage command (DO NOT MODIFY the original command here)
|
||||||
|
if info_json_path:
|
||||||
|
# Use double quotes for paths/proxy in example for robustness
|
||||||
|
example_cmd = f"yt-dlp --load-info-json \"{info_json_path}\""
|
||||||
|
if socks_proxy:
|
||||||
|
example_cmd += f" --proxy \"{socks_proxy}\""
|
||||||
|
example_cmd += " --verbose --simulate" # Add useful flags for testing
|
||||||
|
logger.info(f"\n--- Example usage with saved info.json ---")
|
||||||
|
logger.info(example_cmd)
|
||||||
|
logger.info(f"(Note: The actual command with tokens/cookies is pushed to XCom as 'ytdlp_command')")
|
||||||
|
latest_json_path = os.path.join(os.path.dirname(info_json_path), 'latest.json')
|
||||||
|
logger.info(f"(You can also use 'latest.json': {latest_json_path})")
|
||||||
|
logger.info(f"-------------------------------------------\n")
|
||||||
|
|
||||||
|
else:
|
||||||
|
logger.info("\n--- Original command pushed to XCom ('ytdlp_command') ---")
|
||||||
|
if socks_proxy:
|
||||||
|
logger.info(f"Use the extracted proxy '{socks_proxy}' (pushed to XCom if store_socks_proxy=True) with the --proxy flag.")
|
||||||
|
logger.info("Add --verbose and --simulate flags for testing the command.")
|
||||||
|
logger.info(f"-------------------------------------------------------\n")
|
||||||
|
|
||||||
|
|
||||||
|
# Push the *original* command to XCom
|
||||||
|
context['task_instance'].xcom_push(key='ytdlp_command', value=ytdlp_cmd)
|
||||||
|
logger.info(f"Pushed original command to XCom key 'ytdlp_command'.")
|
||||||
|
|
||||||
|
# Note: Returning ytdlp_cmd below implicitly pushes the same value
|
||||||
|
# to XCom under the key 'return_value'. Downstream tasks should
|
||||||
|
# preferably use the explicitly pushed 'ytdlp_command' key for clarity.
|
||||||
|
return ytdlp_cmd # Return the original command
|
||||||
|
|
||||||
|
# Keep specific exception handling from original block if needed, but AirflowException is often sufficient
|
||||||
|
except AirflowException as e: # Catch AirflowExceptions raised earlier
|
||||||
|
logger.error(f"Operation failed due to AirflowException: {e}")
|
||||||
|
raise # Re-raise AirflowExceptions
|
||||||
|
except (TTransportException, PBServiceException) as e: # Catch specific Thrift/Service errors not already wrapped
|
||||||
|
logger.error(f"Unhandled Thrift/Service error: {e}")
|
||||||
|
raise AirflowException(f"Unhandled YTDLP service error: {e}")
|
||||||
|
except Exception as e: # General catch-all
|
||||||
|
# Log with traceback for unexpected errors
|
||||||
|
logger.error(f"Unexpected error in YtdlpOpsOperator: {e}", exc_info=True)
|
||||||
|
raise AirflowException(f"Unexpected error in YtdlpOpsOperator: {e}")
|
||||||
|
finally:
|
||||||
|
if transport and transport.isOpen(): # Check if transport exists and is open before closing
|
||||||
|
logger.info("Closing Thrift transport.")
|
||||||
|
transport.close()
|
||||||
|
|
||||||
|
# --- Helper Methods ---
|
||||||
|
|
||||||
|
def _get_info_json(self, token_data):
|
||||||
|
"""Safely extracts infoJson from token data."""
|
||||||
|
info_json = getattr(token_data, 'infoJson', None)
|
||||||
|
if info_json:
|
||||||
|
logger.debug("Extracted infoJson from token data.")
|
||||||
|
else:
|
||||||
|
logger.debug("No infoJson attribute found in token data.")
|
||||||
|
return info_json
|
||||||
|
|
||||||
|
def _is_valid_json(self, json_str):
|
||||||
|
"""Checks if a string is valid JSON."""
|
||||||
|
if not json_str or not isinstance(json_str, str):
|
||||||
|
logger.debug("Input is not a non-empty string, considered invalid JSON.")
|
||||||
|
return False
|
||||||
|
try:
|
||||||
|
json.loads(json_str)
|
||||||
|
logger.debug("JSON string validation successful.")
|
||||||
|
return True
|
||||||
|
except json.JSONDecodeError as e:
|
||||||
|
logger.warning(f"JSON validation failed: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
def _save_info_json(self, context, info_json):
|
||||||
|
"""Saves info_json to a file, handling directory creation and logging. Returns the path on success, None on failure."""
|
||||||
|
try:
|
||||||
|
# Get URL from params/context for video ID extraction
|
||||||
|
url_param = context.get('params', {}).get('url', self.url)
|
||||||
|
video_id = self._extract_video_id(url_param) # Use internal helper
|
||||||
|
|
||||||
|
# Render the info_json_dir template
|
||||||
|
save_dir_template = self.info_json_dir or "." # Default to current dir if template is None or empty string
|
||||||
|
save_dir = self.render_template(save_dir_template, context)
|
||||||
|
if not save_dir: # Handle case where template renders to empty string
|
||||||
|
logger.warning(f"Rendered info_json_dir template '{save_dir_template}' resulted in an empty path. Defaulting to '.'")
|
||||||
|
save_dir = "."
|
||||||
|
logger.info(f"Target directory for info.json (rendered): {save_dir}")
|
||||||
|
|
||||||
|
# Ensure directory exists
|
||||||
|
try:
|
||||||
|
os.makedirs(save_dir, exist_ok=True)
|
||||||
|
logger.info(f"Ensured directory exists: {save_dir}")
|
||||||
|
except OSError as e:
|
||||||
|
logger.error(f"Could not create directory {save_dir}: {e}. Cannot save info.json.")
|
||||||
|
return None # Indicate failure
|
||||||
|
|
||||||
|
# Construct filename (using potentially overridden account_id)
|
||||||
|
account_id_param = context.get('params', {}).get('account_id', self.account_id)
|
||||||
|
timestamp = int(time.time())
|
||||||
|
base_filename = f"info_{video_id}_{account_id_param}_{timestamp}.json" if video_id else f"info_{account_id_param}_{timestamp}.json"
|
||||||
|
info_json_path = os.path.join(save_dir, base_filename)
|
||||||
|
latest_json_path = os.path.join(save_dir, "latest.json") # Path for the latest symlink/copy
|
||||||
|
|
||||||
|
# Write to timestamped file
|
||||||
|
try:
|
||||||
|
with open(info_json_path, 'w', encoding='utf-8') as f:
|
||||||
|
f.write(info_json)
|
||||||
|
logger.info(f"Saved info.json to timestamped file: {info_json_path}")
|
||||||
|
except IOError as e:
|
||||||
|
logger.error(f"Failed to write info.json to {info_json_path}: {e}")
|
||||||
|
return None # Indicate failure
|
||||||
|
|
||||||
|
# Write to latest.json (overwrite) - best effort
|
||||||
|
try:
|
||||||
|
with open(latest_json_path, 'w', encoding='utf-8') as f:
|
||||||
|
f.write(info_json)
|
||||||
|
logger.info(f"Updated latest.json file: {latest_json_path}")
|
||||||
|
except IOError as e:
|
||||||
|
# Log warning but don't fail the whole save if only latest.json fails
|
||||||
|
logger.warning(f"Failed to update latest.json at {latest_json_path}: {e}")
|
||||||
|
|
||||||
|
return info_json_path # Return path on success (even if latest.json failed)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Unexpected error in _save_info_json: {e}", exc_info=True)
|
||||||
|
return None # Indicate failure
|
||||||
|
|
||||||
|
def _extract_video_id(self, url):
|
||||||
|
"""Extracts YouTube video ID from URL (internal helper)."""
|
||||||
|
if not url or not isinstance(url, str):
|
||||||
|
logger.debug("URL is empty or not a string, cannot extract video ID.")
|
||||||
|
return None
|
||||||
|
try:
|
||||||
|
# Basic extraction logic (can be enhanced for more URL types)
|
||||||
|
video_id = None
|
||||||
|
if 'youtube.com/watch?v=' in url:
|
||||||
|
video_id = url.split('v=')[1].split('&')[0]
|
||||||
|
elif 'youtu.be/' in url:
|
||||||
|
video_id = url.split('youtu.be/')[1].split('?')[0]
|
||||||
|
|
||||||
|
# Ensure it looks like a video ID (typically 11 chars, but can vary)
|
||||||
|
if video_id and len(video_id) >= 11:
|
||||||
|
video_id = video_id[:11] # Take first 11 chars as standard ID length
|
||||||
|
logger.debug(f"Extracted video ID '{video_id}' from URL: {url}")
|
||||||
|
return video_id
|
||||||
|
else:
|
||||||
|
logger.debug(f"Could not extract a standard video ID pattern from URL: {url}")
|
||||||
|
return None
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to extract video ID from URL '{url}'. Error: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# Python Callables for Tasks
|
||||||
|
# =============================================================================
|
||||||
|
|
||||||
|
def display_token_info(**context):
|
||||||
|
"""Displays token info from XCom, parses info.json, and logs example commands."""
|
||||||
|
ti = context['task_instance']
|
||||||
|
logger.info("Starting display_token_info task.")
|
||||||
|
|
||||||
|
# Pull data from XCom (provide default values)
|
||||||
|
info_json_path = ti.xcom_pull(task_ids='get_token', key='info_json_path')
|
||||||
|
socks_proxy = ti.xcom_pull(task_ids='get_token', key='socks_proxy')
|
||||||
|
ytdlp_command = ti.xcom_pull(task_ids='get_token', key='ytdlp_command')
|
||||||
|
|
||||||
|
logger.info("\n=== Pulled Token Information from XCom ===")
|
||||||
|
logger.info(f"Info.json path: {info_json_path or 'Not found/Not saved'}")
|
||||||
|
logger.info(f"SOCKS Proxy: {socks_proxy or 'Not found/Not extracted'}")
|
||||||
|
logger.info(f"Original yt-dlp command (with tokens): {ytdlp_command or 'Not found'}")
|
||||||
|
|
||||||
|
result = {
|
||||||
|
'info_path': info_json_path,
|
||||||
|
'proxy': socks_proxy,
|
||||||
|
'ytdlp_command': ytdlp_command,
|
||||||
|
'video_info': None,
|
||||||
|
'commands': {},
|
||||||
|
'error': None
|
||||||
|
}
|
||||||
|
|
||||||
|
if info_json_path and os.path.exists(info_json_path):
|
||||||
|
logger.info(f"\n=== Processing Video Information from: {info_json_path} ===")
|
||||||
|
try:
|
||||||
|
with open(info_json_path, 'r', encoding='utf-8') as f:
|
||||||
|
info = json.load(f)
|
||||||
|
|
||||||
|
# Extract and log basic video info safely
|
||||||
|
title = info.get('title', 'Unknown Title')
|
||||||
|
uploader = info.get('uploader', 'Unknown Author')
|
||||||
|
duration = info.get('duration_string', 'Unknown Length')
|
||||||
|
upload_date_str = info.get('upload_date') # Format: YYYYMMDD
|
||||||
|
upload_date_formatted = 'Unknown Date'
|
||||||
|
if upload_date_str:
|
||||||
|
try:
|
||||||
|
# Validate format before parsing
|
||||||
|
if len(upload_date_str) == 8 and upload_date_str.isdigit():
|
||||||
|
upload_date_formatted = datetime.strptime(upload_date_str, '%Y%m%d').strftime('%Y-%m-%d')
|
||||||
|
else:
|
||||||
|
logger.warning(f"Upload date '{upload_date_str}' is not in YYYYMMDD format.")
|
||||||
|
except ValueError:
|
||||||
|
logger.warning(f"Could not parse upload_date '{upload_date_str}'")
|
||||||
|
|
||||||
|
result['video_info'] = {
|
||||||
|
'title': title,
|
||||||
|
'uploader': uploader,
|
||||||
|
'upload_date': upload_date_formatted, # Store formatted date
|
||||||
|
'duration': duration
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(f"Title: {title}")
|
||||||
|
logger.info(f"Author: {uploader}")
|
||||||
|
logger.info(f"Date: {upload_date_formatted}")
|
||||||
|
logger.info(f"Length: {duration}")
|
||||||
|
|
||||||
|
logger.info("\n=== Example yt-dlp Commands (using saved info.json) ===")
|
||||||
|
base_cmd = f"yt-dlp --load-info-json \"{info_json_path}\""
|
||||||
|
if socks_proxy:
|
||||||
|
base_cmd += f" --proxy \"{socks_proxy}\""
|
||||||
|
|
||||||
|
# Command to list formats
|
||||||
|
format_cmd = f"{base_cmd} -F"
|
||||||
|
result['commands']['format'] = format_cmd
|
||||||
|
logger.info(f"List formats command: {format_cmd}")
|
||||||
|
|
||||||
|
# Execute and log the format listing command
|
||||||
|
logger.info("\n--- Executing Format List Command ---")
|
||||||
|
try:
|
||||||
|
# Use os.popen for simplicity, capture output
|
||||||
|
logger.info(f"Running: {format_cmd}")
|
||||||
|
format_output = os.popen(format_cmd).read()
|
||||||
|
logger.info("--- Format List Output ---")
|
||||||
|
logger.info(format_output)
|
||||||
|
logger.info("--------------------------")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error executing format command: {e}")
|
||||||
|
|
||||||
|
# Command to simulate download
|
||||||
|
simulate_cmd = f"{base_cmd} --simulate --verbose" # Add verbose for more info
|
||||||
|
result['commands']['simulate'] = simulate_cmd
|
||||||
|
logger.info(f"Simulate download command: {simulate_cmd}")
|
||||||
|
|
||||||
|
# Execute and log the simulation command
|
||||||
|
logger.info("\n--- Executing Simulation Command ---")
|
||||||
|
try:
|
||||||
|
logger.info(f"Running: {simulate_cmd}")
|
||||||
|
simulate_output = os.popen(simulate_cmd).read()
|
||||||
|
logger.info("--- Simulation Output ---")
|
||||||
|
logger.info(simulate_output)
|
||||||
|
logger.info("-------------------------")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error executing simulation command: {e}")
|
||||||
|
|
||||||
|
# Basic download command
|
||||||
|
download_cmd = base_cmd
|
||||||
|
result['commands']['download_base'] = download_cmd
|
||||||
|
logger.info(f"Base download command (add format selection, output path): {download_cmd}")
|
||||||
|
|
||||||
|
# Push generated example commands to XCom for potential downstream use
|
||||||
|
# ti.xcom_push(key='format_cmd', value=format_cmd) # Removed as requested
|
||||||
|
# ti.xcom_push(key='simulate_cmd', value=simulate_cmd) # Removed as requested
|
||||||
|
ti.xcom_push(key='download_cmd', value=download_cmd)
|
||||||
|
logger.info(f"Pushed key 'download_cmd' to XCom with value: {download_cmd}")
|
||||||
|
|
||||||
|
except json.JSONDecodeError as e:
|
||||||
|
error_msg = f"Failed to parse info.json file '{info_json_path}': {e}"
|
||||||
|
logger.error(error_msg)
|
||||||
|
result['error'] = error_msg
|
||||||
|
except FileNotFoundError:
|
||||||
|
error_msg = f"Info.json file not found at path: {info_json_path}"
|
||||||
|
logger.error(error_msg)
|
||||||
|
result['error'] = error_msg
|
||||||
|
except Exception as e:
|
||||||
|
error_msg = f"Error processing info.json file '{info_json_path}': {str(e)}"
|
||||||
|
logger.error(error_msg, exc_info=True)
|
||||||
|
result['error'] = error_msg
|
||||||
|
elif info_json_path:
|
||||||
|
error_msg = f"Info.json path provided ('{info_json_path}') but file does not exist."
|
||||||
|
logger.warning(error_msg)
|
||||||
|
result['error'] = error_msg
|
||||||
|
else:
|
||||||
|
logger.warning("No info.json path found in XCom. Cannot display video details or generate example commands.")
|
||||||
|
result['error'] = "Info.json path not available."
|
||||||
|
|
||||||
|
logger.info("Finished display_token_info task.")
|
||||||
|
# Return the collected information (useful if used as a PythonOperator return value)
|
||||||
|
return json.dumps(result) # Return as JSON string for XCom compatibility if needed
|
||||||
|
|
||||||
|
|
||||||
|
def store_token_info(**context):
|
||||||
|
"""Stores retrieved token information (command, proxy, info.json) in Redis."""
|
||||||
|
ti = context['task_instance']
|
||||||
|
# Use the redis_conn_id defined in the operator/DAG params if possible, else default
|
||||||
|
redis_conn_id = context['params'].get('redis_conn_id', 'redis_default')
|
||||||
|
redis_hook = RedisHook(redis_conn_id=redis_conn_id)
|
||||||
|
logger.info(f"Starting store_token_info task using Redis connection '{redis_conn_id}'.")
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Pull necessary data from XCom and context
|
||||||
|
url = context['params'].get('url')
|
||||||
|
if not url:
|
||||||
|
# Attempt to get URL from DAG run conf as fallback
|
||||||
|
url = context.get('dag_run', {}).conf.get('url')
|
||||||
|
if not url:
|
||||||
|
raise ValueError("URL parameter is missing in context['params'] and dag_run.conf")
|
||||||
|
logger.warning("URL parameter missing in context['params'], using URL from dag_run.conf.")
|
||||||
|
|
||||||
|
|
||||||
|
ytdlp_command = ti.xcom_pull(task_ids='get_token', key='ytdlp_command')
|
||||||
|
socks_proxy = ti.xcom_pull(task_ids='get_token', key='socks_proxy') or '' # Default to empty string if None
|
||||||
|
info_json_path = ti.xcom_pull(task_ids='get_token', key='info_json_path')
|
||||||
|
|
||||||
|
if not ytdlp_command:
|
||||||
|
logger.warning("ytdlp_command not found in XCom. Storing empty value.")
|
||||||
|
ytdlp_command = '' # Store empty if not found
|
||||||
|
|
||||||
|
# Construct the base command using info.json
|
||||||
|
ytdlp_command_base = ''
|
||||||
|
if info_json_path and os.path.exists(info_json_path):
|
||||||
|
ytdlp_command_base = f"yt-dlp --load-info-json \"{info_json_path}\""
|
||||||
|
logger.info(f"Constructed base command: {ytdlp_command_base}")
|
||||||
|
else:
|
||||||
|
logger.warning("Cannot construct base command: info_json_path not valid.")
|
||||||
|
|
||||||
|
# Construct the command with tokens and proxy
|
||||||
|
ytdlp_command_tokens = ytdlp_command # Start with original command from server
|
||||||
|
if socks_proxy:
|
||||||
|
ytdlp_command_tokens += f" --proxy \"{socks_proxy}\""
|
||||||
|
logger.info("Appended proxy to token command.")
|
||||||
|
|
||||||
|
data_to_store = {
|
||||||
|
'url': url,
|
||||||
|
'ytdlp_command': ytdlp_command_base, # Store the base command
|
||||||
|
'proxy': socks_proxy,
|
||||||
|
'info_json_path': info_json_path or '' # Store path even if None/empty
|
||||||
|
# 'info_json' will be added below
|
||||||
|
}
|
||||||
|
|
||||||
|
# Read info.json content if path exists
|
||||||
|
info_json_content = None
|
||||||
|
if info_json_path and os.path.exists(info_json_path):
|
||||||
|
try:
|
||||||
|
with open(info_json_path, 'r', encoding='utf-8') as f:
|
||||||
|
# Read and immediately validate JSON structure before storing
|
||||||
|
info_json_content = json.load(f)
|
||||||
|
# Store the validated JSON as a string
|
||||||
|
data_to_store['info_json'] = json.dumps(info_json_content)
|
||||||
|
logger.info(f"Read and validated info.json content from: {info_json_path}")
|
||||||
|
except json.JSONDecodeError as e:
|
||||||
|
logger.error(f"Failed to parse info.json file '{info_json_path}' as JSON: {e}. Storing empty content.")
|
||||||
|
data_to_store['info_json'] = '' # Store empty string on parse error
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to read info.json file '{info_json_path}': {e}. Storing empty content.")
|
||||||
|
data_to_store['info_json'] = '' # Store empty string on other read errors
|
||||||
|
else:
|
||||||
|
logger.warning(f"info_json_path ('{info_json_path}') not found or invalid. Storing without info_json content.")
|
||||||
|
data_to_store['info_json'] = '' # Store empty string if no path
|
||||||
|
|
||||||
|
# Determine Redis key using video ID
|
||||||
|
# Use the same helper method as the operator for consistency
|
||||||
|
# Need an instance or static method call. Let's make _extract_video_id static temporarily
|
||||||
|
# Or instantiate the operator just for this - less ideal.
|
||||||
|
# Simplest: Re-implement or assume utils.
|
||||||
|
# Re-implementing basic logic here for simplicity:
|
||||||
|
video_id = None
|
||||||
|
try:
|
||||||
|
if 'youtube.com/watch?v=' in url:
|
||||||
|
video_id = url.split('v=')[1].split('&')[0][:11]
|
||||||
|
elif 'youtu.be/' in url:
|
||||||
|
video_id = url.split('youtu.be/')[1].split('?')[0][:11]
|
||||||
|
except Exception:
|
||||||
|
pass # Ignore errors in ID extraction for key generation
|
||||||
|
redis_key = f"token_info:{video_id or 'unknown'}"
|
||||||
|
logger.info(f"Determined Redis key: {redis_key}")
|
||||||
|
|
||||||
|
# Store data in Redis hash
|
||||||
|
# Log presence/absence rather than full content for potentially large fields
|
||||||
|
logger.info(f"Data to store in Redis key '{redis_key}': "
|
||||||
|
f"URL='{data_to_store['url']}', "
|
||||||
|
f"Command={'<present>' if data_to_store['ytdlp_command'] else '<empty>'}, "
|
||||||
|
f"Proxy='{data_to_store['proxy'] or '<empty>'}', "
|
||||||
|
f"Path='{data_to_store['info_json_path'] or '<empty>'}', "
|
||||||
|
f"JSON Content={'<present>' if data_to_store.get('info_json') else '<empty>'}")
|
||||||
|
|
||||||
|
with redis_hook.get_conn() as redis_client:
|
||||||
|
# Extract video ID from URL
|
||||||
|
video_id = None
|
||||||
|
try:
|
||||||
|
if 'youtube.com/watch?v=' in url:
|
||||||
|
video_id = url.split('v=')[1].split('&')[0][:11]
|
||||||
|
elif 'youtu.be/' in url:
|
||||||
|
video_id = url.split('youtu.be/')[1].split('?')[0][:11]
|
||||||
|
except Exception:
|
||||||
|
pass # Ignore errors in ID extraction for key generation
|
||||||
|
|
||||||
|
# Use video ID as part of the Redis key
|
||||||
|
redis_key = f"token_info:{video_id or 'unknown'}"
|
||||||
|
logger.info(f"Determined Redis key: {redis_key}")
|
||||||
|
|
||||||
|
# Store data in Redis hash
|
||||||
|
# Add video_id, timestamp, and the constructed ytdlp_command_tokens
|
||||||
|
data_to_store['video_id'] = video_id or 'unknown'
|
||||||
|
data_to_store['timestamp'] = int(time.time())
|
||||||
|
data_to_store['ytdlp_command_tokens'] = ytdlp_command_tokens # Store the original token command
|
||||||
|
|
||||||
|
# Log fields being stored
|
||||||
|
log_data = {k: (f"<{len(v)} bytes>" if isinstance(v, str) and len(v) > 100 else v) for k, v in data_to_store.items()}
|
||||||
|
logger.info(f"Storing in Redis key '{redis_key}': {log_data}")
|
||||||
|
|
||||||
|
redis_client.hset(redis_key, mapping=data_to_store)
|
||||||
|
# Set expiration (e.g., 24 hours = 86400 seconds)
|
||||||
|
redis_client.expire(redis_key, 86400)
|
||||||
|
logger.info(f"Successfully stored token info in Redis key '{redis_key}' with 24h expiration.")
|
||||||
|
# Log the final stored data again for clarity
|
||||||
|
final_log_data = {k: (f"<{len(v)} bytes>" if isinstance(v, str) and len(v) > 100 else v) for k, v in data_to_store.items()}
|
||||||
|
logger.info(f"--- Final Data Stored in Redis Key '{redis_key}' ---")
|
||||||
|
logger.info(final_log_data)
|
||||||
|
logger.info("----------------------------------------------------")
|
||||||
|
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to store token info in Redis: {e}", exc_info=True)
|
||||||
|
# Re-raise as AirflowException to fail the task
|
||||||
|
raise AirflowException(f"Failed to store token info in Redis: {e}")
|
||||||
|
|
||||||
|
logger.info("Finished store_token_info task.")
|
||||||
|
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# DAG Definition
|
||||||
|
# =============================================================================
|
||||||
|
|
||||||
|
# Update default_args to match ytdlp_client_dag.py structure
|
||||||
|
default_args = {
|
||||||
|
'owner': 'airflow',
|
||||||
|
'depends_on_past': False,
|
||||||
|
'email_on_failure': False, # Match reference DAG
|
||||||
|
'email_on_retry': False, # Match reference DAG
|
||||||
|
'retries': 1, # Default task retries
|
||||||
|
'retry_delay': timedelta(minutes=5), # Standard task retry delay
|
||||||
|
'start_date': days_ago(1) # Best practice start date
|
||||||
|
}
|
||||||
|
|
||||||
|
# Update DAG definition
|
||||||
|
with DAG(
|
||||||
|
dag_id='ytdlp_client_dag_v2.1',
|
||||||
|
default_args=default_args,
|
||||||
|
schedule_interval=None, # Manually triggered DAG
|
||||||
|
catchup=False, # Don't run for past missed schedules
|
||||||
|
description='DAG for YTDLP operations using Thrift client (V2 - Refactored)', # Updated description
|
||||||
|
tags=['ytdlp', 'thrift', 'client', 'v2'], # Updated tags for better filtering
|
||||||
|
params={
|
||||||
|
# Define DAG parameters with defaults and types for UI clarity
|
||||||
|
'url': Param('https://www.youtube.com/watch?v=sOlTX9uxUtM', type=["null", "string"], description="Required: The video URL to process."), # Default URL
|
||||||
|
'redis_enabled': Param(False, type="boolean", description="Use Redis for service discovery? If False, uses service_ip/port."), # Default to direct connection
|
||||||
|
'service_ip': Param('85.192.30.55', type="string", description="Service IP if redis_enabled=False."), # Default service IP
|
||||||
|
'service_port': Param(9090, type="integer", description="Service port if redis_enabled=False."), # Default service port
|
||||||
|
'account_id': Param('accoutns_fr_2025-04-03T1220_anonomyous_2ssdfsf2342afga09', type="string", description="Account ID for Redis lookup or direct call."), # Updated default account_id
|
||||||
|
'timeout': Param(DEFAULT_TIMEOUT, type="integer", description="Timeout in seconds for the Thrift connection."),
|
||||||
|
# Use Airflow Variable for downloads directory, matching reference DAG structure
|
||||||
|
'info_json_dir': Param("{{ var.value.get('DOWNLOADS_TEMP', '/opt/airflow/downloadfiles') }}", type="string", description="Directory to save info.json. Uses Airflow Variable 'DOWNLOADS_TEMP' or default.")
|
||||||
|
}
|
||||||
|
) as dag:
|
||||||
|
|
||||||
|
# Define Tasks
|
||||||
|
|
||||||
|
get_token = YtdlpOpsOperator(
|
||||||
|
task_id='get_token',
|
||||||
|
# Pass templated parameters from DAG run config
|
||||||
|
url="{{ params.url }}",
|
||||||
|
redis_enabled="{{ params.redis_enabled }}",
|
||||||
|
service_ip="{{ params.service_ip }}",
|
||||||
|
service_port="{{ params.service_port }}",
|
||||||
|
account_id="{{ params.account_id }}",
|
||||||
|
save_info_json=True,
|
||||||
|
info_json_dir="{{ params.info_json_dir }}",
|
||||||
|
get_socks_proxy=True,
|
||||||
|
store_socks_proxy=True,
|
||||||
|
timeout="{{ params.timeout }}",
|
||||||
|
retries=MAX_RETRIES, # Operator-specific retries if needed, else use DAG default
|
||||||
|
retry_delay=RETRY_DELAY, # Operator-specific delay if needed
|
||||||
|
# Add callbacks for logging success/failure, similar to reference DAG
|
||||||
|
on_failure_callback=lambda context: logger.error(f"Task {context['task_instance_key_str']} failed."),
|
||||||
|
on_success_callback=lambda context: logger.info(f"Task {context['task_instance_key_str']} succeeded.")
|
||||||
|
)
|
||||||
|
# Add task documentation (visible in Airflow UI)
|
||||||
|
get_token.doc_md = """
|
||||||
|
### Get Token Task
|
||||||
|
Connects to the YTDLP Thrift service (either directly or via Redis discovery)
|
||||||
|
to retrieve an authentication token and video metadata (info.json).
|
||||||
|
|
||||||
|
**Pushes to XCom:**
|
||||||
|
- `info_json_path`: Path to the saved info.json file (or None if not saved/failed).
|
||||||
|
- `socks_proxy`: The extracted SOCKS proxy string (or None if not requested/found).
|
||||||
|
- `ytdlp_command`: The original command string received from the server (contains tokens/cookies).
|
||||||
|
|
||||||
|
- Uses parameters defined in the DAG run configuration.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Optional: Add a task to explicitly check XComs for debugging (like in reference DAG)
|
||||||
|
def _check_xcom_callable(**context):
|
||||||
|
"""Logs XCom values pushed by the get_token task."""
|
||||||
|
ti = context['task_instance']
|
||||||
|
logger.info("--- Checking XCom values pushed by get_token ---")
|
||||||
|
keys_to_check = ['info_json_path', 'socks_proxy', 'ytdlp_command']
|
||||||
|
xcom_values = {}
|
||||||
|
for key in keys_to_check:
|
||||||
|
value = ti.xcom_pull(task_ids='get_token', key=key)
|
||||||
|
xcom_values[key] = value
|
||||||
|
# Avoid logging potentially sensitive command details fully in production
|
||||||
|
if key == 'ytdlp_command' and value:
|
||||||
|
log_value = f"{value[:50]}..." # Log truncated command
|
||||||
|
else:
|
||||||
|
log_value = value
|
||||||
|
logger.info(f"XCom key='{key}': {log_value}")
|
||||||
|
logger.info("----------------------------------------------")
|
||||||
|
return xcom_values # Return values for potential future use
|
||||||
|
|
||||||
|
check_xcom_task = PythonOperator(
|
||||||
|
task_id='check_xcom_after_get_token',
|
||||||
|
python_callable=_check_xcom_callable,
|
||||||
|
)
|
||||||
|
check_xcom_task.doc_md = "Logs the values pushed to XCom by the 'get_token' task for debugging purposes."
|
||||||
|
|
||||||
|
display_info = PythonOperator(
|
||||||
|
task_id='display_token_info',
|
||||||
|
python_callable=display_token_info,
|
||||||
|
trigger_rule='all_success'
|
||||||
|
)
|
||||||
|
display_info.doc_md = """
|
||||||
|
### Display Token Info Task
|
||||||
|
Pulls information from XCom, parses the `info.json` file (if available),
|
||||||
|
logs video details, and generates example `yt-dlp` commands.
|
||||||
|
|
||||||
|
**Pulls from XCom (task_id='get_token'):**
|
||||||
|
- `info_json_path`
|
||||||
|
- `socks_proxy`
|
||||||
|
- `ytdlp_command`
|
||||||
|
|
||||||
|
**Pushes to XCom:**
|
||||||
|
- `download_cmd`: Base command using `--load-info-json` (user needs to add format/output).
|
||||||
|
"""
|
||||||
|
|
||||||
|
store_info = PythonOperator(
|
||||||
|
task_id='store_token_info', # Use consistent task ID naming
|
||||||
|
python_callable=store_token_info,
|
||||||
|
)
|
||||||
|
store_info.doc_md = """
|
||||||
|
### Store Token Info Task
|
||||||
|
Pulls information from XCom and DAG parameters, reads the `info.json` content,
|
||||||
|
and stores relevant data in a Redis hash.
|
||||||
|
|
||||||
|
**Pulls from XCom (task_id='get_token'):**
|
||||||
|
- `ytdlp_command`
|
||||||
|
- `socks_proxy`
|
||||||
|
- `info_json_path`
|
||||||
|
|
||||||
|
**Pulls from DAG context:**
|
||||||
|
- `params['url']` (or `dag_run.conf['url']`)
|
||||||
|
|
||||||
|
**Stores in Redis Hash (key: `token_info:<video_id>`):**
|
||||||
|
- `url`: The video URL.
|
||||||
|
- `ytdlp_command`: Base command using `--load-info-json`.
|
||||||
|
- `proxy`: The SOCKS proxy string.
|
||||||
|
- `info_json_path`: Path to the saved info.json file.
|
||||||
|
- `info_json`: The full content of the info.json file (as a JSON string).
|
||||||
|
- `video_id`: Extracted video ID.
|
||||||
|
- `timestamp`: Unix timestamp of storage.
|
||||||
|
- `ytdlp_command_tokens`: The original command string from the server (contains tokens/cookies).
|
||||||
|
|
||||||
|
Sets a 24-hour expiration on the Redis key.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Define task dependencies matching the reference DAG structure
|
||||||
|
get_token >> check_xcom_task >> display_info >> store_info
|
||||||
33
docker-compose-ytdlp-ops.yaml
Normal file
33
docker-compose-ytdlp-ops.yaml
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
version: '3.8'
|
||||||
|
|
||||||
|
services:
|
||||||
|
ytdlp-ops:
|
||||||
|
image: pangramia/ytdlp-ops-server:latest
|
||||||
|
ports:
|
||||||
|
- "9090:9090"
|
||||||
|
- "9091:9091"
|
||||||
|
volumes:
|
||||||
|
- context-data:/app/context-data
|
||||||
|
networks:
|
||||||
|
- airflow_worker_proxynet
|
||||||
|
command:
|
||||||
|
- "--script-dir"
|
||||||
|
- "/app/scripts"
|
||||||
|
- "--context-dir"
|
||||||
|
- "/app/context-data"
|
||||||
|
- "--port"
|
||||||
|
- "9090"
|
||||||
|
- "--clients"
|
||||||
|
- "ios,android,mweb"
|
||||||
|
- "--proxy"
|
||||||
|
- "socks5://sslocal-rust-1084:1084"
|
||||||
|
restart: unless-stopped
|
||||||
|
pull_policy: always
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
context-data:
|
||||||
|
name: context-data
|
||||||
|
|
||||||
|
networks:
|
||||||
|
airflow_worker_proxynet:
|
||||||
|
external: true
|
||||||
194
ytdlp-ops-auth/generate-thrift.py
Executable file
194
ytdlp-ops-auth/generate-thrift.py
Executable file
@ -0,0 +1,194 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
import os
|
||||||
|
import subprocess
|
||||||
|
import shutil
|
||||||
|
from pathlib import Path
|
||||||
|
import xml.etree.ElementTree as ET
|
||||||
|
|
||||||
|
# Update paths to match actual project structure
|
||||||
|
THRIFT_MODEL_DIR = Path("thrift_model")
|
||||||
|
SERVICES_DIR = THRIFT_MODEL_DIR / "services"
|
||||||
|
DATA_DIR = THRIFT_MODEL_DIR / "data"
|
||||||
|
GEN_PY_DIR = THRIFT_MODEL_DIR / "gen_py"
|
||||||
|
|
||||||
|
def get_version_from_pom():
|
||||||
|
"""Parse version from pom.xml"""
|
||||||
|
pom_path = THRIFT_MODEL_DIR / "pom.xml"
|
||||||
|
tree = ET.parse(pom_path)
|
||||||
|
root = tree.getroot()
|
||||||
|
|
||||||
|
# XML namespaces
|
||||||
|
ns = {'mvn': 'http://maven.apache.org/POM/4.0.0'}
|
||||||
|
|
||||||
|
version = root.find('mvn:version', ns).text
|
||||||
|
if version.endswith('-SNAPSHOT'):
|
||||||
|
version = version.replace('-SNAPSHOT', '.dev0')
|
||||||
|
return version
|
||||||
|
|
||||||
|
def find_thrift_files():
|
||||||
|
"""Find all .thrift files in the thrift_model directory"""
|
||||||
|
data_files = list(DATA_DIR.glob("*.thrift"))
|
||||||
|
service_files = list(SERVICES_DIR.glob("*.thrift"))
|
||||||
|
# Process data files first (for dependencies), then service files
|
||||||
|
return data_files + service_files
|
||||||
|
|
||||||
|
def generate_python_code(thrift_files):
|
||||||
|
"""Generate Python code from Thrift files"""
|
||||||
|
# First process data files (for dependencies)
|
||||||
|
data_files = [f for f in thrift_files if f.parent == DATA_DIR]
|
||||||
|
service_files = [f for f in thrift_files if f.parent == SERVICES_DIR]
|
||||||
|
|
||||||
|
# Process in the right order: first data files, then service files
|
||||||
|
ordered_files = data_files + service_files
|
||||||
|
|
||||||
|
for thrift_file in ordered_files:
|
||||||
|
print(f"Generating code for {thrift_file}...")
|
||||||
|
try:
|
||||||
|
subprocess.run([
|
||||||
|
"thrift",
|
||||||
|
"--gen", "py",
|
||||||
|
"-out", str(GEN_PY_DIR),
|
||||||
|
str(thrift_file)
|
||||||
|
], check=True)
|
||||||
|
print(f"Successfully generated code for {thrift_file}")
|
||||||
|
except subprocess.CalledProcessError as e:
|
||||||
|
print(f"Error generating code for {thrift_file}: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
def create_init_files():
|
||||||
|
"""Create __init__.py files in all generated directories"""
|
||||||
|
for root, dirs, files in os.walk(GEN_PY_DIR):
|
||||||
|
path = Path(root)
|
||||||
|
init_file = path / "__init__.py"
|
||||||
|
if not init_file.exists():
|
||||||
|
print(f"Creating __init__.py in {path}")
|
||||||
|
with open(init_file, 'w') as f:
|
||||||
|
# For the top-level pangramia directory, we don't need special content
|
||||||
|
if path.name == "pangramia":
|
||||||
|
pass
|
||||||
|
# For module directories, add the standard __all__ pattern if there are modules
|
||||||
|
elif any(f.endswith('.py') and f != '__init__.py' for f in files):
|
||||||
|
modules = [f[:-3] for f in files if f.endswith('.py') and f != '__init__.py']
|
||||||
|
if modules:
|
||||||
|
f.write(f"__all__ = {repr(modules)}\n")
|
||||||
|
|
||||||
|
# Ensure we have an __init__.py in the thrift_model directory
|
||||||
|
thrift_model_init = THRIFT_MODEL_DIR / "__init__.py"
|
||||||
|
if not thrift_model_init.exists():
|
||||||
|
print(f"Creating {thrift_model_init}")
|
||||||
|
thrift_model_init.touch()
|
||||||
|
|
||||||
|
def clean_gen_py():
|
||||||
|
"""Clean the gen_py directory before generation"""
|
||||||
|
if GEN_PY_DIR.exists():
|
||||||
|
print(f"Cleaning {GEN_PY_DIR}...")
|
||||||
|
shutil.rmtree(GEN_PY_DIR)
|
||||||
|
print(f"Cleaned {GEN_PY_DIR}")
|
||||||
|
|
||||||
|
# Recreate the directory
|
||||||
|
GEN_PY_DIR.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
def update_version_file():
|
||||||
|
"""Update the version in __init__.py"""
|
||||||
|
version = get_version_from_pom()
|
||||||
|
print(f"Detected version from pom.xml: {version}")
|
||||||
|
|
||||||
|
# Update the version in __init__.py
|
||||||
|
init_path = Path("__init__.py")
|
||||||
|
if init_path.exists():
|
||||||
|
with open(init_path, 'r') as f:
|
||||||
|
content = f.read()
|
||||||
|
|
||||||
|
# Replace the VERSION assignment if it exists
|
||||||
|
if "VERSION = " in content:
|
||||||
|
new_content = []
|
||||||
|
for line in content.splitlines():
|
||||||
|
if line.startswith("VERSION = "):
|
||||||
|
new_content.append(f'VERSION = "{version}"')
|
||||||
|
else:
|
||||||
|
new_content.append(line)
|
||||||
|
|
||||||
|
with open(init_path, 'w') as f:
|
||||||
|
f.write('\n'.join(new_content))
|
||||||
|
|
||||||
|
print(f"Updated version in __init__.py to {version}")
|
||||||
|
|
||||||
|
def main():
|
||||||
|
# Ensure directories exist
|
||||||
|
SERVICES_DIR.mkdir(parents=True, exist_ok=True)
|
||||||
|
DATA_DIR.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
# Clean existing generated code
|
||||||
|
clean_gen_py()
|
||||||
|
|
||||||
|
# Find all Thrift files
|
||||||
|
thrift_files = find_thrift_files()
|
||||||
|
if not thrift_files:
|
||||||
|
print("No .thrift files found in thrift_model directory")
|
||||||
|
return
|
||||||
|
|
||||||
|
print(f"Found {len(thrift_files)} Thrift files to process")
|
||||||
|
|
||||||
|
# Generate Python code
|
||||||
|
generate_python_code(thrift_files)
|
||||||
|
|
||||||
|
# Create __init__.py files
|
||||||
|
create_init_files()
|
||||||
|
|
||||||
|
# Update version file
|
||||||
|
update_version_file()
|
||||||
|
|
||||||
|
# Create a symbolic link to make the modules importable
|
||||||
|
try:
|
||||||
|
# Check if we're in the project root
|
||||||
|
if not (Path.cwd() / "thrift_model").exists():
|
||||||
|
print("Warning: Not running from project root, symbolic link may not work correctly")
|
||||||
|
|
||||||
|
# Create pangramia directory if it doesn't exist
|
||||||
|
pangramia_dir = Path("pangramia")
|
||||||
|
if not pangramia_dir.exists():
|
||||||
|
pangramia_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
(pangramia_dir / "__init__.py").touch()
|
||||||
|
print(f"Created {pangramia_dir} directory with __init__.py")
|
||||||
|
|
||||||
|
# Create symbolic link from pangramia -> thrift_model/gen_py/pangramia
|
||||||
|
link_path = Path("pangramia") # Link in the project root
|
||||||
|
target_path = GEN_PY_DIR / "pangramia"
|
||||||
|
|
||||||
|
# Ensure the target directory exists before creating the link
|
||||||
|
if not target_path.exists():
|
||||||
|
print(f"Warning: Target directory {target_path} does not exist, cannot create symbolic link")
|
||||||
|
else:
|
||||||
|
# Remove existing link or directory at the destination
|
||||||
|
if link_path.is_symlink():
|
||||||
|
print(f"Removing existing symbolic link: {link_path}")
|
||||||
|
link_path.unlink()
|
||||||
|
elif link_path.is_dir():
|
||||||
|
print(f"Removing existing directory: {link_path}")
|
||||||
|
shutil.rmtree(link_path)
|
||||||
|
elif link_path.exists(): # Handle case where it might be a file
|
||||||
|
print(f"Removing existing file: {link_path}")
|
||||||
|
link_path.unlink()
|
||||||
|
|
||||||
|
# Create the new symbolic link
|
||||||
|
try:
|
||||||
|
# Use relative path for the link source for better portability
|
||||||
|
relative_target = os.path.relpath(target_path, start=link_path.parent)
|
||||||
|
os.symlink(relative_target, link_path, target_is_directory=True)
|
||||||
|
print(f"Created symbolic link: {link_path} -> {relative_target}")
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error creating symbolic link: {e}")
|
||||||
|
print("You may need to manually add the generated code to your Python path")
|
||||||
|
# This else block corresponds to the `if not target_path.exists():` check further up
|
||||||
|
# else:
|
||||||
|
# print(f"Warning: Target directory {yt_target} does not exist, cannot create symbolic link")
|
||||||
|
except Exception as e:
|
||||||
|
print(f"An unexpected error occurred during symlink setup: {e}")
|
||||||
|
# Optionally re-raise or handle more specifically
|
||||||
|
|
||||||
|
print("\nThrift code generation completed successfully!")
|
||||||
|
print(f"Generated Python code in {GEN_PY_DIR}")
|
||||||
|
print(f"Current version: {get_version_from_pom()}")
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
1
ytdlp-ops-auth/info_json_vKTVLpmvznI.json
Symbolic link
1
ytdlp-ops-auth/info_json_vKTVLpmvznI.json
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
info_json_vKTVLpmvznI_1743507631.json
|
||||||
1474
ytdlp-ops-auth/info_json_vKTVLpmvznI_1743507631.json
Normal file
1474
ytdlp-ops-auth/info_json_vKTVLpmvznI_1743507631.json
Normal file
File diff suppressed because one or more lines are too long
1474
ytdlp-ops-auth/latest.json
Normal file
1474
ytdlp-ops-auth/latest.json
Normal file
File diff suppressed because one or more lines are too long
1
ytdlp-ops-auth/pangramia
Symbolic link
1
ytdlp-ops-auth/pangramia
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
thrift_model/gen_py/pangramia
|
||||||
8
ytdlp-ops-auth/requirements.txt
Normal file
8
ytdlp-ops-auth/requirements.txt
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
thrift>=0.16.0,<=0.20.0
|
||||||
|
python-dotenv==1.0.1
|
||||||
|
psutil
|
||||||
|
flask
|
||||||
|
waitress
|
||||||
|
yt_dlp>=2025.3.27
|
||||||
|
yt-dlp-get-pot==0.3.0
|
||||||
|
requests>=2.31.0
|
||||||
38
ytdlp-ops-auth/setup.py
Normal file
38
ytdlp-ops-auth/setup.py
Normal file
@ -0,0 +1,38 @@
|
|||||||
|
from setuptools import setup, find_packages
|
||||||
|
import xml.etree.ElementTree as ET
|
||||||
|
import os
|
||||||
|
|
||||||
|
def get_version_from_pom():
|
||||||
|
"""Parse version from pom.xml"""
|
||||||
|
pom_path = os.path.join(os.path.dirname(__file__), 'thrift_model/pom.xml')
|
||||||
|
tree = ET.parse(pom_path)
|
||||||
|
root = tree.getroot()
|
||||||
|
|
||||||
|
# XML namespaces
|
||||||
|
ns = {'mvn': 'http://maven.apache.org/POM/4.0.0'}
|
||||||
|
|
||||||
|
version = root.find('mvn:version', ns).text
|
||||||
|
if version.endswith('-SNAPSHOT'):
|
||||||
|
version = version.replace('-SNAPSHOT', '.dev0')
|
||||||
|
return version
|
||||||
|
|
||||||
|
VERSION = get_version_from_pom()
|
||||||
|
|
||||||
|
setup(
|
||||||
|
name='yt_ops_services',
|
||||||
|
version=VERSION,
|
||||||
|
package_data={
|
||||||
|
'yt_ops_services': ['thrift_model/pom.xml'],
|
||||||
|
},
|
||||||
|
packages=find_packages(where='.', exclude=['tests*']),
|
||||||
|
package_dir={
|
||||||
|
'': '.', # Look for packages in the root directory
|
||||||
|
},
|
||||||
|
include_package_data=True,
|
||||||
|
install_requires=[
|
||||||
|
'thrift>=0.16.0,<=0.20.0',
|
||||||
|
'python-dotenv>=1.0.0',
|
||||||
|
'psutil',
|
||||||
|
],
|
||||||
|
python_requires='>=3.9',
|
||||||
|
)
|
||||||
58
ytdlp-ops-auth/thrift_exceptions_patch.py
Normal file
58
ytdlp-ops-auth/thrift_exceptions_patch.py
Normal file
@ -0,0 +1,58 @@
|
|||||||
|
"""
|
||||||
|
Patch for Thrift-generated exception classes to make them compatible with Airflow's secret masking.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any, Dict, List, Optional, Tuple, Union
|
||||||
|
|
||||||
|
# --- Python Path Setup ---
|
||||||
|
project_root = Path(__file__).parent.absolute()
|
||||||
|
# Add project root to sys.path (needed for the 'pangramia' symlink)
|
||||||
|
if str(project_root) not in sys.path: sys.path.insert(0, str(project_root))
|
||||||
|
# --- End Python Path Setup ---
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
def patch_thrift_exceptions():
|
||||||
|
"""
|
||||||
|
Patch Thrift-generated exception classes to make them compatible with Airflow's secret masking.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
from pangramia.yt.exceptions.ttypes import PBServiceException, PBUserException
|
||||||
|
|
||||||
|
# Save original __setattr__ methods
|
||||||
|
original_service_setattr = PBServiceException.__setattr__
|
||||||
|
original_user_setattr = PBUserException.__setattr__
|
||||||
|
|
||||||
|
# Define a new __setattr__ method that allows modifying any attribute
|
||||||
|
def new_service_setattr(self, name, value):
|
||||||
|
logger.debug(f"Setting attribute {name} on PBServiceException")
|
||||||
|
object.__setattr__(self, name, value)
|
||||||
|
|
||||||
|
def new_user_setattr(self, name, value):
|
||||||
|
logger.debug(f"Setting attribute {name} on PBUserException")
|
||||||
|
object.__setattr__(self, name, value)
|
||||||
|
|
||||||
|
# Apply the patch to both exception classes
|
||||||
|
PBServiceException.__setattr__ = new_service_setattr
|
||||||
|
PBUserException.__setattr__ = new_user_setattr
|
||||||
|
|
||||||
|
logger.info("Successfully patched Thrift exception classes for Airflow compatibility")
|
||||||
|
|
||||||
|
# Verify the patch
|
||||||
|
try:
|
||||||
|
test_exception = PBServiceException(message="Test")
|
||||||
|
test_exception.args = ("Test",) # Try to modify an attribute
|
||||||
|
logger.info("Verified Thrift exception patch is working correctly")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Thrift exception patch verification failed: {e}")
|
||||||
|
except ImportError as e:
|
||||||
|
logger.warning(f"Could not import Thrift exception classes: {e}")
|
||||||
|
logger.warning("Airflow error handling may not work properly with Thrift exceptions")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error patching Thrift exception classes: {e}")
|
||||||
|
|
||||||
|
# Apply the patch when this module is imported
|
||||||
|
patch_thrift_exceptions()
|
||||||
1
ytdlp-ops-auth/thrift_model/.gitignore
vendored
Normal file
1
ytdlp-ops-auth/thrift_model/.gitignore
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
target/
|
||||||
0
ytdlp-ops-auth/thrift_model/__init__.py
Normal file
0
ytdlp-ops-auth/thrift_model/__init__.py
Normal file
95
ytdlp-ops-auth/thrift_model/data/common.thrift
Normal file
95
ytdlp-ops-auth/thrift_model/data/common.thrift
Normal file
@ -0,0 +1,95 @@
|
|||||||
|
namespace py pangramia.yt.common
|
||||||
|
namespace java com.pangramia.yt.common
|
||||||
|
|
||||||
|
typedef string JobID
|
||||||
|
typedef string Timestamp
|
||||||
|
|
||||||
|
|
||||||
|
enum JobState {
|
||||||
|
SUCCESS,
|
||||||
|
FAIL,
|
||||||
|
BOT_FORBIDDEN_ON_URL_ACCESS,
|
||||||
|
BOT_FORBIDDEN_ON_FILE_DOWNLOAD,
|
||||||
|
BOT_CAPTCHA,
|
||||||
|
BOT_AUTH_RELOGIN_REQUIRED,
|
||||||
|
BOT_AUTH_SMS_REQUIRED,
|
||||||
|
BOT_AUTH_DEVICE_QR_REQUIRED,
|
||||||
|
BOT_ACCOUNT_BANNED,
|
||||||
|
BOT_IP_BANNED
|
||||||
|
}
|
||||||
|
|
||||||
|
struct JobTokenData {
|
||||||
|
1: optional string infoJson,
|
||||||
|
2: optional string ytdlpCommand,
|
||||||
|
3: optional string socks,
|
||||||
|
4: optional JobID jobId,
|
||||||
|
5: optional string url,
|
||||||
|
6: optional string cookiesBlob,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
enum TokenUpdateMode {
|
||||||
|
AUTOREFRESH_AND_REMAIN_ANONYMOUS,
|
||||||
|
AUTOREFRESH_AND_ALLOW_AUTH,
|
||||||
|
AUTOREFRESH_AND_ONLY_AUTH,
|
||||||
|
CLEANUP_THEN_AUTOREFRESH_AND_ONLY_AUTH,
|
||||||
|
CLEANUP_THEN_AUTOREFRESH_AND_REMAIN_ANONYMOUS,
|
||||||
|
CLEANUP_THEN_AUTOREFRESH_AND_ALLOW_AUTH,
|
||||||
|
AUTO,// AUTOREFRESH_AND_ONLY_AUTH,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
struct AccountData {
|
||||||
|
1: required string username,
|
||||||
|
2: required string password,
|
||||||
|
3: optional string countryCode
|
||||||
|
}
|
||||||
|
|
||||||
|
struct ProxyData {
|
||||||
|
1: required string proxyUrl,
|
||||||
|
2: optional string countryCode
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
enum AccountPairState {
|
||||||
|
ACTIVE,
|
||||||
|
PAUSED,
|
||||||
|
REMOVED,
|
||||||
|
IN_PROGRESS,
|
||||||
|
ALL
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
struct AccountPairWithState {
|
||||||
|
1: required string accountId,
|
||||||
|
2: required string proxyId,
|
||||||
|
3: optional AccountPairState accountPairState
|
||||||
|
4: optional string machineId,
|
||||||
|
}
|
||||||
|
|
||||||
|
struct JobData {
|
||||||
|
1: required string jobId,
|
||||||
|
2: required string url,
|
||||||
|
3: required string cookiesBlob,
|
||||||
|
4: required string potoken,
|
||||||
|
5: required string visitorId,
|
||||||
|
6: required string ytdlpCommand,
|
||||||
|
7: required string createdTime,
|
||||||
|
8: required map<string,string> telemetry,
|
||||||
|
9: required JobState state,
|
||||||
|
10: optional string errorMessage,
|
||||||
|
11: optional string socks5Id
|
||||||
|
}
|
||||||
|
|
||||||
|
struct RichCollectionPagination {
|
||||||
|
1: required bool hasNext,
|
||||||
|
2: required i32 totalCount,
|
||||||
|
3: required i32 page,
|
||||||
|
4: required i32 pageSize
|
||||||
|
}
|
||||||
|
|
||||||
|
struct RichCollectionJobData {
|
||||||
|
1: required list<JobData> items,
|
||||||
|
2: required RichCollectionPagination pagination
|
||||||
|
}
|
||||||
|
|
||||||
14
ytdlp-ops-auth/thrift_model/data/exceptions.thrift
Normal file
14
ytdlp-ops-auth/thrift_model/data/exceptions.thrift
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
namespace py pangramia.yt.exceptions
|
||||||
|
namespace java com.pangramia.yt.exceptions
|
||||||
|
|
||||||
|
exception PBServiceException {
|
||||||
|
1: required string message,
|
||||||
|
2: optional string errorCode,
|
||||||
|
3: optional map<string, string> context
|
||||||
|
}
|
||||||
|
|
||||||
|
exception PBUserException {
|
||||||
|
1: required string message,
|
||||||
|
2: optional string errorCode,
|
||||||
|
3: optional map<string, string> context
|
||||||
|
}
|
||||||
0
ytdlp-ops-auth/thrift_model/gen_py/__init__.py
Normal file
0
ytdlp-ops-auth/thrift_model/gen_py/__init__.py
Normal file
131
ytdlp-ops-auth/thrift_model/gen_py/pangramia/base_service/BaseService-remote
Executable file
131
ytdlp-ops-auth/thrift_model/gen_py/pangramia/base_service/BaseService-remote
Executable file
@ -0,0 +1,131 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
#
|
||||||
|
# Autogenerated by Thrift Compiler (0.20.0)
|
||||||
|
#
|
||||||
|
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
|
||||||
|
#
|
||||||
|
# options string: py
|
||||||
|
#
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import pprint
|
||||||
|
if sys.version_info[0] > 2:
|
||||||
|
from urllib.parse import urlparse
|
||||||
|
else:
|
||||||
|
from urlparse import urlparse
|
||||||
|
from thrift.transport import TTransport, TSocket, TSSLSocket, THttpClient
|
||||||
|
from thrift.protocol.TBinaryProtocol import TBinaryProtocol
|
||||||
|
|
||||||
|
from pangramia.base_service import BaseService
|
||||||
|
from pangramia.base_service.ttypes import *
|
||||||
|
|
||||||
|
if len(sys.argv) <= 1 or sys.argv[1] == '--help':
|
||||||
|
print('')
|
||||||
|
print('Usage: ' + sys.argv[0] + ' [-h host[:port]] [-u url] [-f[ramed]] [-s[sl]] [-novalidate] [-ca_certs certs] [-keyfile keyfile] [-certfile certfile] function [arg1 [arg2...]]')
|
||||||
|
print('')
|
||||||
|
print('Functions:')
|
||||||
|
print(' bool ping()')
|
||||||
|
print(' bool reportError(string message, details)')
|
||||||
|
print(' void shutdown()')
|
||||||
|
print('')
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
|
pp = pprint.PrettyPrinter(indent=2)
|
||||||
|
host = 'localhost'
|
||||||
|
port = 9090
|
||||||
|
uri = ''
|
||||||
|
framed = False
|
||||||
|
ssl = False
|
||||||
|
validate = True
|
||||||
|
ca_certs = None
|
||||||
|
keyfile = None
|
||||||
|
certfile = None
|
||||||
|
http = False
|
||||||
|
argi = 1
|
||||||
|
|
||||||
|
if sys.argv[argi] == '-h':
|
||||||
|
parts = sys.argv[argi + 1].split(':')
|
||||||
|
host = parts[0]
|
||||||
|
if len(parts) > 1:
|
||||||
|
port = int(parts[1])
|
||||||
|
argi += 2
|
||||||
|
|
||||||
|
if sys.argv[argi] == '-u':
|
||||||
|
url = urlparse(sys.argv[argi + 1])
|
||||||
|
parts = url[1].split(':')
|
||||||
|
host = parts[0]
|
||||||
|
if len(parts) > 1:
|
||||||
|
port = int(parts[1])
|
||||||
|
else:
|
||||||
|
port = 80
|
||||||
|
uri = url[2]
|
||||||
|
if url[4]:
|
||||||
|
uri += '?%s' % url[4]
|
||||||
|
http = True
|
||||||
|
argi += 2
|
||||||
|
|
||||||
|
if sys.argv[argi] == '-f' or sys.argv[argi] == '-framed':
|
||||||
|
framed = True
|
||||||
|
argi += 1
|
||||||
|
|
||||||
|
if sys.argv[argi] == '-s' or sys.argv[argi] == '-ssl':
|
||||||
|
ssl = True
|
||||||
|
argi += 1
|
||||||
|
|
||||||
|
if sys.argv[argi] == '-novalidate':
|
||||||
|
validate = False
|
||||||
|
argi += 1
|
||||||
|
|
||||||
|
if sys.argv[argi] == '-ca_certs':
|
||||||
|
ca_certs = sys.argv[argi+1]
|
||||||
|
argi += 2
|
||||||
|
|
||||||
|
if sys.argv[argi] == '-keyfile':
|
||||||
|
keyfile = sys.argv[argi+1]
|
||||||
|
argi += 2
|
||||||
|
|
||||||
|
if sys.argv[argi] == '-certfile':
|
||||||
|
certfile = sys.argv[argi+1]
|
||||||
|
argi += 2
|
||||||
|
|
||||||
|
cmd = sys.argv[argi]
|
||||||
|
args = sys.argv[argi + 1:]
|
||||||
|
|
||||||
|
if http:
|
||||||
|
transport = THttpClient.THttpClient(host, port, uri)
|
||||||
|
else:
|
||||||
|
if ssl:
|
||||||
|
socket = TSSLSocket.TSSLSocket(host, port, validate=validate, ca_certs=ca_certs, keyfile=keyfile, certfile=certfile)
|
||||||
|
else:
|
||||||
|
socket = TSocket.TSocket(host, port)
|
||||||
|
if framed:
|
||||||
|
transport = TTransport.TFramedTransport(socket)
|
||||||
|
else:
|
||||||
|
transport = TTransport.TBufferedTransport(socket)
|
||||||
|
protocol = TBinaryProtocol(transport)
|
||||||
|
client = BaseService.Client(protocol)
|
||||||
|
transport.open()
|
||||||
|
|
||||||
|
if cmd == 'ping':
|
||||||
|
if len(args) != 0:
|
||||||
|
print('ping requires 0 args')
|
||||||
|
sys.exit(1)
|
||||||
|
pp.pprint(client.ping())
|
||||||
|
|
||||||
|
elif cmd == 'reportError':
|
||||||
|
if len(args) != 2:
|
||||||
|
print('reportError requires 2 args')
|
||||||
|
sys.exit(1)
|
||||||
|
pp.pprint(client.reportError(args[0], eval(args[1]),))
|
||||||
|
|
||||||
|
elif cmd == 'shutdown':
|
||||||
|
if len(args) != 0:
|
||||||
|
print('shutdown requires 0 args')
|
||||||
|
sys.exit(1)
|
||||||
|
pp.pprint(client.shutdown())
|
||||||
|
|
||||||
|
else:
|
||||||
|
print('Unrecognized method %s' % cmd)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
transport.close()
|
||||||
@ -0,0 +1,564 @@
|
|||||||
|
#
|
||||||
|
# Autogenerated by Thrift Compiler (0.20.0)
|
||||||
|
#
|
||||||
|
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
|
||||||
|
#
|
||||||
|
# options string: py
|
||||||
|
#
|
||||||
|
|
||||||
|
from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException
|
||||||
|
from thrift.protocol.TProtocol import TProtocolException
|
||||||
|
from thrift.TRecursive import fix_spec
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import logging
|
||||||
|
from .ttypes import *
|
||||||
|
from thrift.Thrift import TProcessor
|
||||||
|
from thrift.transport import TTransport
|
||||||
|
all_structs = []
|
||||||
|
|
||||||
|
|
||||||
|
class Iface(object):
|
||||||
|
def ping(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def reportError(self, message, details):
|
||||||
|
"""
|
||||||
|
Parameters:
|
||||||
|
- message
|
||||||
|
- details
|
||||||
|
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
def shutdown(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class Client(Iface):
|
||||||
|
def __init__(self, iprot, oprot=None):
|
||||||
|
self._iprot = self._oprot = iprot
|
||||||
|
if oprot is not None:
|
||||||
|
self._oprot = oprot
|
||||||
|
self._seqid = 0
|
||||||
|
|
||||||
|
def ping(self):
|
||||||
|
self.send_ping()
|
||||||
|
return self.recv_ping()
|
||||||
|
|
||||||
|
def send_ping(self):
|
||||||
|
self._oprot.writeMessageBegin('ping', TMessageType.CALL, self._seqid)
|
||||||
|
args = ping_args()
|
||||||
|
args.write(self._oprot)
|
||||||
|
self._oprot.writeMessageEnd()
|
||||||
|
self._oprot.trans.flush()
|
||||||
|
|
||||||
|
def recv_ping(self):
|
||||||
|
iprot = self._iprot
|
||||||
|
(fname, mtype, rseqid) = iprot.readMessageBegin()
|
||||||
|
if mtype == TMessageType.EXCEPTION:
|
||||||
|
x = TApplicationException()
|
||||||
|
x.read(iprot)
|
||||||
|
iprot.readMessageEnd()
|
||||||
|
raise x
|
||||||
|
result = ping_result()
|
||||||
|
result.read(iprot)
|
||||||
|
iprot.readMessageEnd()
|
||||||
|
if result.success is not None:
|
||||||
|
return result.success
|
||||||
|
if result.serviceExp is not None:
|
||||||
|
raise result.serviceExp
|
||||||
|
if result.userExp is not None:
|
||||||
|
raise result.userExp
|
||||||
|
raise TApplicationException(TApplicationException.MISSING_RESULT, "ping failed: unknown result")
|
||||||
|
|
||||||
|
def reportError(self, message, details):
|
||||||
|
"""
|
||||||
|
Parameters:
|
||||||
|
- message
|
||||||
|
- details
|
||||||
|
|
||||||
|
"""
|
||||||
|
self.send_reportError(message, details)
|
||||||
|
return self.recv_reportError()
|
||||||
|
|
||||||
|
def send_reportError(self, message, details):
|
||||||
|
self._oprot.writeMessageBegin('reportError', TMessageType.CALL, self._seqid)
|
||||||
|
args = reportError_args()
|
||||||
|
args.message = message
|
||||||
|
args.details = details
|
||||||
|
args.write(self._oprot)
|
||||||
|
self._oprot.writeMessageEnd()
|
||||||
|
self._oprot.trans.flush()
|
||||||
|
|
||||||
|
def recv_reportError(self):
|
||||||
|
iprot = self._iprot
|
||||||
|
(fname, mtype, rseqid) = iprot.readMessageBegin()
|
||||||
|
if mtype == TMessageType.EXCEPTION:
|
||||||
|
x = TApplicationException()
|
||||||
|
x.read(iprot)
|
||||||
|
iprot.readMessageEnd()
|
||||||
|
raise x
|
||||||
|
result = reportError_result()
|
||||||
|
result.read(iprot)
|
||||||
|
iprot.readMessageEnd()
|
||||||
|
if result.success is not None:
|
||||||
|
return result.success
|
||||||
|
if result.serviceExp is not None:
|
||||||
|
raise result.serviceExp
|
||||||
|
if result.userExp is not None:
|
||||||
|
raise result.userExp
|
||||||
|
raise TApplicationException(TApplicationException.MISSING_RESULT, "reportError failed: unknown result")
|
||||||
|
|
||||||
|
def shutdown(self):
|
||||||
|
self.send_shutdown()
|
||||||
|
|
||||||
|
def send_shutdown(self):
|
||||||
|
self._oprot.writeMessageBegin('shutdown', TMessageType.ONEWAY, self._seqid)
|
||||||
|
args = shutdown_args()
|
||||||
|
args.write(self._oprot)
|
||||||
|
self._oprot.writeMessageEnd()
|
||||||
|
self._oprot.trans.flush()
|
||||||
|
|
||||||
|
|
||||||
|
class Processor(Iface, TProcessor):
|
||||||
|
def __init__(self, handler):
|
||||||
|
self._handler = handler
|
||||||
|
self._processMap = {}
|
||||||
|
self._processMap["ping"] = Processor.process_ping
|
||||||
|
self._processMap["reportError"] = Processor.process_reportError
|
||||||
|
self._processMap["shutdown"] = Processor.process_shutdown
|
||||||
|
self._on_message_begin = None
|
||||||
|
|
||||||
|
def on_message_begin(self, func):
|
||||||
|
self._on_message_begin = func
|
||||||
|
|
||||||
|
def process(self, iprot, oprot):
|
||||||
|
(name, type, seqid) = iprot.readMessageBegin()
|
||||||
|
if self._on_message_begin:
|
||||||
|
self._on_message_begin(name, type, seqid)
|
||||||
|
if name not in self._processMap:
|
||||||
|
iprot.skip(TType.STRUCT)
|
||||||
|
iprot.readMessageEnd()
|
||||||
|
x = TApplicationException(TApplicationException.UNKNOWN_METHOD, 'Unknown function %s' % (name))
|
||||||
|
oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid)
|
||||||
|
x.write(oprot)
|
||||||
|
oprot.writeMessageEnd()
|
||||||
|
oprot.trans.flush()
|
||||||
|
return
|
||||||
|
else:
|
||||||
|
self._processMap[name](self, seqid, iprot, oprot)
|
||||||
|
return True
|
||||||
|
|
||||||
|
def process_ping(self, seqid, iprot, oprot):
|
||||||
|
args = ping_args()
|
||||||
|
args.read(iprot)
|
||||||
|
iprot.readMessageEnd()
|
||||||
|
result = ping_result()
|
||||||
|
try:
|
||||||
|
result.success = self._handler.ping()
|
||||||
|
msg_type = TMessageType.REPLY
|
||||||
|
except TTransport.TTransportException:
|
||||||
|
raise
|
||||||
|
except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp:
|
||||||
|
msg_type = TMessageType.REPLY
|
||||||
|
result.serviceExp = serviceExp
|
||||||
|
except pangramia.yt.exceptions.ttypes.PBUserException as userExp:
|
||||||
|
msg_type = TMessageType.REPLY
|
||||||
|
result.userExp = userExp
|
||||||
|
except TApplicationException as ex:
|
||||||
|
logging.exception('TApplication exception in handler')
|
||||||
|
msg_type = TMessageType.EXCEPTION
|
||||||
|
result = ex
|
||||||
|
except Exception:
|
||||||
|
logging.exception('Unexpected exception in handler')
|
||||||
|
msg_type = TMessageType.EXCEPTION
|
||||||
|
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
|
||||||
|
oprot.writeMessageBegin("ping", msg_type, seqid)
|
||||||
|
result.write(oprot)
|
||||||
|
oprot.writeMessageEnd()
|
||||||
|
oprot.trans.flush()
|
||||||
|
|
||||||
|
def process_reportError(self, seqid, iprot, oprot):
|
||||||
|
args = reportError_args()
|
||||||
|
args.read(iprot)
|
||||||
|
iprot.readMessageEnd()
|
||||||
|
result = reportError_result()
|
||||||
|
try:
|
||||||
|
result.success = self._handler.reportError(args.message, args.details)
|
||||||
|
msg_type = TMessageType.REPLY
|
||||||
|
except TTransport.TTransportException:
|
||||||
|
raise
|
||||||
|
except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp:
|
||||||
|
msg_type = TMessageType.REPLY
|
||||||
|
result.serviceExp = serviceExp
|
||||||
|
except pangramia.yt.exceptions.ttypes.PBUserException as userExp:
|
||||||
|
msg_type = TMessageType.REPLY
|
||||||
|
result.userExp = userExp
|
||||||
|
except TApplicationException as ex:
|
||||||
|
logging.exception('TApplication exception in handler')
|
||||||
|
msg_type = TMessageType.EXCEPTION
|
||||||
|
result = ex
|
||||||
|
except Exception:
|
||||||
|
logging.exception('Unexpected exception in handler')
|
||||||
|
msg_type = TMessageType.EXCEPTION
|
||||||
|
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
|
||||||
|
oprot.writeMessageBegin("reportError", msg_type, seqid)
|
||||||
|
result.write(oprot)
|
||||||
|
oprot.writeMessageEnd()
|
||||||
|
oprot.trans.flush()
|
||||||
|
|
||||||
|
def process_shutdown(self, seqid, iprot, oprot):
|
||||||
|
args = shutdown_args()
|
||||||
|
args.read(iprot)
|
||||||
|
iprot.readMessageEnd()
|
||||||
|
try:
|
||||||
|
self._handler.shutdown()
|
||||||
|
except TTransport.TTransportException:
|
||||||
|
raise
|
||||||
|
except Exception:
|
||||||
|
logging.exception('Exception in oneway handler')
|
||||||
|
|
||||||
|
# HELPER FUNCTIONS AND STRUCTURES
|
||||||
|
|
||||||
|
|
||||||
|
class ping_args(object):
|
||||||
|
|
||||||
|
|
||||||
|
def read(self, iprot):
|
||||||
|
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
|
||||||
|
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
|
||||||
|
return
|
||||||
|
iprot.readStructBegin()
|
||||||
|
while True:
|
||||||
|
(fname, ftype, fid) = iprot.readFieldBegin()
|
||||||
|
if ftype == TType.STOP:
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
iprot.skip(ftype)
|
||||||
|
iprot.readFieldEnd()
|
||||||
|
iprot.readStructEnd()
|
||||||
|
|
||||||
|
def write(self, oprot):
|
||||||
|
if oprot._fast_encode is not None and self.thrift_spec is not None:
|
||||||
|
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
|
||||||
|
return
|
||||||
|
oprot.writeStructBegin('ping_args')
|
||||||
|
oprot.writeFieldStop()
|
||||||
|
oprot.writeStructEnd()
|
||||||
|
|
||||||
|
def validate(self):
|
||||||
|
return
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
L = ['%s=%r' % (key, value)
|
||||||
|
for key, value in self.__dict__.items()]
|
||||||
|
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
return not (self == other)
|
||||||
|
all_structs.append(ping_args)
|
||||||
|
ping_args.thrift_spec = (
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ping_result(object):
|
||||||
|
"""
|
||||||
|
Attributes:
|
||||||
|
- success
|
||||||
|
- serviceExp
|
||||||
|
- userExp
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def __init__(self, success=None, serviceExp=None, userExp=None,):
|
||||||
|
self.success = success
|
||||||
|
self.serviceExp = serviceExp
|
||||||
|
self.userExp = userExp
|
||||||
|
|
||||||
|
def read(self, iprot):
|
||||||
|
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
|
||||||
|
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
|
||||||
|
return
|
||||||
|
iprot.readStructBegin()
|
||||||
|
while True:
|
||||||
|
(fname, ftype, fid) = iprot.readFieldBegin()
|
||||||
|
if ftype == TType.STOP:
|
||||||
|
break
|
||||||
|
if fid == 0:
|
||||||
|
if ftype == TType.BOOL:
|
||||||
|
self.success = iprot.readBool()
|
||||||
|
else:
|
||||||
|
iprot.skip(ftype)
|
||||||
|
elif fid == 1:
|
||||||
|
if ftype == TType.STRUCT:
|
||||||
|
self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot)
|
||||||
|
else:
|
||||||
|
iprot.skip(ftype)
|
||||||
|
elif fid == 2:
|
||||||
|
if ftype == TType.STRUCT:
|
||||||
|
self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot)
|
||||||
|
else:
|
||||||
|
iprot.skip(ftype)
|
||||||
|
else:
|
||||||
|
iprot.skip(ftype)
|
||||||
|
iprot.readFieldEnd()
|
||||||
|
iprot.readStructEnd()
|
||||||
|
|
||||||
|
def write(self, oprot):
|
||||||
|
if oprot._fast_encode is not None and self.thrift_spec is not None:
|
||||||
|
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
|
||||||
|
return
|
||||||
|
oprot.writeStructBegin('ping_result')
|
||||||
|
if self.success is not None:
|
||||||
|
oprot.writeFieldBegin('success', TType.BOOL, 0)
|
||||||
|
oprot.writeBool(self.success)
|
||||||
|
oprot.writeFieldEnd()
|
||||||
|
if self.serviceExp is not None:
|
||||||
|
oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1)
|
||||||
|
self.serviceExp.write(oprot)
|
||||||
|
oprot.writeFieldEnd()
|
||||||
|
if self.userExp is not None:
|
||||||
|
oprot.writeFieldBegin('userExp', TType.STRUCT, 2)
|
||||||
|
self.userExp.write(oprot)
|
||||||
|
oprot.writeFieldEnd()
|
||||||
|
oprot.writeFieldStop()
|
||||||
|
oprot.writeStructEnd()
|
||||||
|
|
||||||
|
def validate(self):
|
||||||
|
return
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
L = ['%s=%r' % (key, value)
|
||||||
|
for key, value in self.__dict__.items()]
|
||||||
|
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
return not (self == other)
|
||||||
|
all_structs.append(ping_result)
|
||||||
|
ping_result.thrift_spec = (
|
||||||
|
(0, TType.BOOL, 'success', None, None, ), # 0
|
||||||
|
(1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1
|
||||||
|
(2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class reportError_args(object):
|
||||||
|
"""
|
||||||
|
Attributes:
|
||||||
|
- message
|
||||||
|
- details
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def __init__(self, message=None, details=None,):
|
||||||
|
self.message = message
|
||||||
|
self.details = details
|
||||||
|
|
||||||
|
def read(self, iprot):
|
||||||
|
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
|
||||||
|
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
|
||||||
|
return
|
||||||
|
iprot.readStructBegin()
|
||||||
|
while True:
|
||||||
|
(fname, ftype, fid) = iprot.readFieldBegin()
|
||||||
|
if ftype == TType.STOP:
|
||||||
|
break
|
||||||
|
if fid == 1:
|
||||||
|
if ftype == TType.STRING:
|
||||||
|
self.message = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||||
|
else:
|
||||||
|
iprot.skip(ftype)
|
||||||
|
elif fid == 2:
|
||||||
|
if ftype == TType.MAP:
|
||||||
|
self.details = {}
|
||||||
|
(_ktype1, _vtype2, _size0) = iprot.readMapBegin()
|
||||||
|
for _i4 in range(_size0):
|
||||||
|
_key5 = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||||
|
_val6 = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||||
|
self.details[_key5] = _val6
|
||||||
|
iprot.readMapEnd()
|
||||||
|
else:
|
||||||
|
iprot.skip(ftype)
|
||||||
|
else:
|
||||||
|
iprot.skip(ftype)
|
||||||
|
iprot.readFieldEnd()
|
||||||
|
iprot.readStructEnd()
|
||||||
|
|
||||||
|
def write(self, oprot):
|
||||||
|
if oprot._fast_encode is not None and self.thrift_spec is not None:
|
||||||
|
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
|
||||||
|
return
|
||||||
|
oprot.writeStructBegin('reportError_args')
|
||||||
|
if self.message is not None:
|
||||||
|
oprot.writeFieldBegin('message', TType.STRING, 1)
|
||||||
|
oprot.writeString(self.message.encode('utf-8') if sys.version_info[0] == 2 else self.message)
|
||||||
|
oprot.writeFieldEnd()
|
||||||
|
if self.details is not None:
|
||||||
|
oprot.writeFieldBegin('details', TType.MAP, 2)
|
||||||
|
oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.details))
|
||||||
|
for kiter7, viter8 in self.details.items():
|
||||||
|
oprot.writeString(kiter7.encode('utf-8') if sys.version_info[0] == 2 else kiter7)
|
||||||
|
oprot.writeString(viter8.encode('utf-8') if sys.version_info[0] == 2 else viter8)
|
||||||
|
oprot.writeMapEnd()
|
||||||
|
oprot.writeFieldEnd()
|
||||||
|
oprot.writeFieldStop()
|
||||||
|
oprot.writeStructEnd()
|
||||||
|
|
||||||
|
def validate(self):
|
||||||
|
return
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
L = ['%s=%r' % (key, value)
|
||||||
|
for key, value in self.__dict__.items()]
|
||||||
|
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
return not (self == other)
|
||||||
|
all_structs.append(reportError_args)
|
||||||
|
reportError_args.thrift_spec = (
|
||||||
|
None, # 0
|
||||||
|
(1, TType.STRING, 'message', 'UTF8', None, ), # 1
|
||||||
|
(2, TType.MAP, 'details', (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), None, ), # 2
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class reportError_result(object):
|
||||||
|
"""
|
||||||
|
Attributes:
|
||||||
|
- success
|
||||||
|
- serviceExp
|
||||||
|
- userExp
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def __init__(self, success=None, serviceExp=None, userExp=None,):
|
||||||
|
self.success = success
|
||||||
|
self.serviceExp = serviceExp
|
||||||
|
self.userExp = userExp
|
||||||
|
|
||||||
|
def read(self, iprot):
|
||||||
|
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
|
||||||
|
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
|
||||||
|
return
|
||||||
|
iprot.readStructBegin()
|
||||||
|
while True:
|
||||||
|
(fname, ftype, fid) = iprot.readFieldBegin()
|
||||||
|
if ftype == TType.STOP:
|
||||||
|
break
|
||||||
|
if fid == 0:
|
||||||
|
if ftype == TType.BOOL:
|
||||||
|
self.success = iprot.readBool()
|
||||||
|
else:
|
||||||
|
iprot.skip(ftype)
|
||||||
|
elif fid == 1:
|
||||||
|
if ftype == TType.STRUCT:
|
||||||
|
self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot)
|
||||||
|
else:
|
||||||
|
iprot.skip(ftype)
|
||||||
|
elif fid == 2:
|
||||||
|
if ftype == TType.STRUCT:
|
||||||
|
self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot)
|
||||||
|
else:
|
||||||
|
iprot.skip(ftype)
|
||||||
|
else:
|
||||||
|
iprot.skip(ftype)
|
||||||
|
iprot.readFieldEnd()
|
||||||
|
iprot.readStructEnd()
|
||||||
|
|
||||||
|
def write(self, oprot):
|
||||||
|
if oprot._fast_encode is not None and self.thrift_spec is not None:
|
||||||
|
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
|
||||||
|
return
|
||||||
|
oprot.writeStructBegin('reportError_result')
|
||||||
|
if self.success is not None:
|
||||||
|
oprot.writeFieldBegin('success', TType.BOOL, 0)
|
||||||
|
oprot.writeBool(self.success)
|
||||||
|
oprot.writeFieldEnd()
|
||||||
|
if self.serviceExp is not None:
|
||||||
|
oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1)
|
||||||
|
self.serviceExp.write(oprot)
|
||||||
|
oprot.writeFieldEnd()
|
||||||
|
if self.userExp is not None:
|
||||||
|
oprot.writeFieldBegin('userExp', TType.STRUCT, 2)
|
||||||
|
self.userExp.write(oprot)
|
||||||
|
oprot.writeFieldEnd()
|
||||||
|
oprot.writeFieldStop()
|
||||||
|
oprot.writeStructEnd()
|
||||||
|
|
||||||
|
def validate(self):
|
||||||
|
return
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
L = ['%s=%r' % (key, value)
|
||||||
|
for key, value in self.__dict__.items()]
|
||||||
|
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
return not (self == other)
|
||||||
|
all_structs.append(reportError_result)
|
||||||
|
reportError_result.thrift_spec = (
|
||||||
|
(0, TType.BOOL, 'success', None, None, ), # 0
|
||||||
|
(1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1
|
||||||
|
(2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class shutdown_args(object):
|
||||||
|
|
||||||
|
|
||||||
|
def read(self, iprot):
|
||||||
|
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
|
||||||
|
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
|
||||||
|
return
|
||||||
|
iprot.readStructBegin()
|
||||||
|
while True:
|
||||||
|
(fname, ftype, fid) = iprot.readFieldBegin()
|
||||||
|
if ftype == TType.STOP:
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
iprot.skip(ftype)
|
||||||
|
iprot.readFieldEnd()
|
||||||
|
iprot.readStructEnd()
|
||||||
|
|
||||||
|
def write(self, oprot):
|
||||||
|
if oprot._fast_encode is not None and self.thrift_spec is not None:
|
||||||
|
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
|
||||||
|
return
|
||||||
|
oprot.writeStructBegin('shutdown_args')
|
||||||
|
oprot.writeFieldStop()
|
||||||
|
oprot.writeStructEnd()
|
||||||
|
|
||||||
|
def validate(self):
|
||||||
|
return
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
L = ['%s=%r' % (key, value)
|
||||||
|
for key, value in self.__dict__.items()]
|
||||||
|
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
return not (self == other)
|
||||||
|
all_structs.append(shutdown_args)
|
||||||
|
shutdown_args.thrift_spec = (
|
||||||
|
)
|
||||||
|
fix_spec(all_structs)
|
||||||
|
del all_structs
|
||||||
@ -0,0 +1 @@
|
|||||||
|
__all__ = ['ttypes', 'constants', 'BaseService']
|
||||||
@ -0,0 +1,14 @@
|
|||||||
|
#
|
||||||
|
# Autogenerated by Thrift Compiler (0.20.0)
|
||||||
|
#
|
||||||
|
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
|
||||||
|
#
|
||||||
|
# options string: py
|
||||||
|
#
|
||||||
|
|
||||||
|
from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException
|
||||||
|
from thrift.protocol.TProtocol import TProtocolException
|
||||||
|
from thrift.TRecursive import fix_spec
|
||||||
|
|
||||||
|
import sys
|
||||||
|
from .ttypes import *
|
||||||
@ -0,0 +1,20 @@
|
|||||||
|
#
|
||||||
|
# Autogenerated by Thrift Compiler (0.20.0)
|
||||||
|
#
|
||||||
|
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
|
||||||
|
#
|
||||||
|
# options string: py
|
||||||
|
#
|
||||||
|
|
||||||
|
from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException
|
||||||
|
from thrift.protocol.TProtocol import TProtocolException
|
||||||
|
from thrift.TRecursive import fix_spec
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import pangramia.yt.common.ttypes
|
||||||
|
import pangramia.yt.exceptions.ttypes
|
||||||
|
|
||||||
|
from thrift.transport import TTransport
|
||||||
|
all_structs = []
|
||||||
|
fix_spec(all_structs)
|
||||||
|
del all_structs
|
||||||
@ -0,0 +1,236 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
#
|
||||||
|
# Autogenerated by Thrift Compiler (0.20.0)
|
||||||
|
#
|
||||||
|
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
|
||||||
|
#
|
||||||
|
# options string: py
|
||||||
|
#
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import pprint
|
||||||
|
if sys.version_info[0] > 2:
|
||||||
|
from urllib.parse import urlparse
|
||||||
|
else:
|
||||||
|
from urlparse import urlparse
|
||||||
|
from thrift.transport import TTransport, TSocket, TSSLSocket, THttpClient
|
||||||
|
from thrift.protocol.TBinaryProtocol import TBinaryProtocol
|
||||||
|
|
||||||
|
from pangramia.yt.admin_ops import YTAccountsOpService
|
||||||
|
from pangramia.yt.admin_ops.ttypes import *
|
||||||
|
|
||||||
|
if len(sys.argv) <= 1 or sys.argv[1] == '--help':
|
||||||
|
print('')
|
||||||
|
print('Usage: ' + sys.argv[0] + ' [-h host[:port]] [-u url] [-f[ramed]] [-s[sl]] [-novalidate] [-ca_certs certs] [-keyfile keyfile] [-certfile certfile] function [arg1 [arg2...]]')
|
||||||
|
print('')
|
||||||
|
print('Functions:')
|
||||||
|
print(' bool addAccountPair(string accountId, string proxyId, string machineId, ProxyData proxyData, AccountData accountData)')
|
||||||
|
print(' AccountPairWithState getPair(string machineId)')
|
||||||
|
print(' bool pair(string accountId, string proxyId, string machineId)')
|
||||||
|
print(' bool unpair(string accountId, string proxyId, string machineId)')
|
||||||
|
print(' listAccountPairs(AccountPairState filter)')
|
||||||
|
print(' bool addAccount(string accountId, AccountData accountData)')
|
||||||
|
print(' bool suspendAccount(string accountId)')
|
||||||
|
print(' bool resumeAccount(string accountId)')
|
||||||
|
print(' bool removeAccount(string accountId)')
|
||||||
|
print(' listActiveAccounts()')
|
||||||
|
print(' bool addProxy(string proxyId, ProxyData proxyData)')
|
||||||
|
print(' bool suspendProxy(string proxyId)')
|
||||||
|
print(' bool resumeProxy(string proxyId)')
|
||||||
|
print(' bool removeProxy(string proxyId)')
|
||||||
|
print(' listActiveProxies()')
|
||||||
|
print(' bool ping()')
|
||||||
|
print(' bool reportError(string message, details)')
|
||||||
|
print(' void shutdown()')
|
||||||
|
print('')
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
|
pp = pprint.PrettyPrinter(indent=2)
|
||||||
|
host = 'localhost'
|
||||||
|
port = 9090
|
||||||
|
uri = ''
|
||||||
|
framed = False
|
||||||
|
ssl = False
|
||||||
|
validate = True
|
||||||
|
ca_certs = None
|
||||||
|
keyfile = None
|
||||||
|
certfile = None
|
||||||
|
http = False
|
||||||
|
argi = 1
|
||||||
|
|
||||||
|
if sys.argv[argi] == '-h':
|
||||||
|
parts = sys.argv[argi + 1].split(':')
|
||||||
|
host = parts[0]
|
||||||
|
if len(parts) > 1:
|
||||||
|
port = int(parts[1])
|
||||||
|
argi += 2
|
||||||
|
|
||||||
|
if sys.argv[argi] == '-u':
|
||||||
|
url = urlparse(sys.argv[argi + 1])
|
||||||
|
parts = url[1].split(':')
|
||||||
|
host = parts[0]
|
||||||
|
if len(parts) > 1:
|
||||||
|
port = int(parts[1])
|
||||||
|
else:
|
||||||
|
port = 80
|
||||||
|
uri = url[2]
|
||||||
|
if url[4]:
|
||||||
|
uri += '?%s' % url[4]
|
||||||
|
http = True
|
||||||
|
argi += 2
|
||||||
|
|
||||||
|
if sys.argv[argi] == '-f' or sys.argv[argi] == '-framed':
|
||||||
|
framed = True
|
||||||
|
argi += 1
|
||||||
|
|
||||||
|
if sys.argv[argi] == '-s' or sys.argv[argi] == '-ssl':
|
||||||
|
ssl = True
|
||||||
|
argi += 1
|
||||||
|
|
||||||
|
if sys.argv[argi] == '-novalidate':
|
||||||
|
validate = False
|
||||||
|
argi += 1
|
||||||
|
|
||||||
|
if sys.argv[argi] == '-ca_certs':
|
||||||
|
ca_certs = sys.argv[argi+1]
|
||||||
|
argi += 2
|
||||||
|
|
||||||
|
if sys.argv[argi] == '-keyfile':
|
||||||
|
keyfile = sys.argv[argi+1]
|
||||||
|
argi += 2
|
||||||
|
|
||||||
|
if sys.argv[argi] == '-certfile':
|
||||||
|
certfile = sys.argv[argi+1]
|
||||||
|
argi += 2
|
||||||
|
|
||||||
|
cmd = sys.argv[argi]
|
||||||
|
args = sys.argv[argi + 1:]
|
||||||
|
|
||||||
|
if http:
|
||||||
|
transport = THttpClient.THttpClient(host, port, uri)
|
||||||
|
else:
|
||||||
|
if ssl:
|
||||||
|
socket = TSSLSocket.TSSLSocket(host, port, validate=validate, ca_certs=ca_certs, keyfile=keyfile, certfile=certfile)
|
||||||
|
else:
|
||||||
|
socket = TSocket.TSocket(host, port)
|
||||||
|
if framed:
|
||||||
|
transport = TTransport.TFramedTransport(socket)
|
||||||
|
else:
|
||||||
|
transport = TTransport.TBufferedTransport(socket)
|
||||||
|
protocol = TBinaryProtocol(transport)
|
||||||
|
client = YTAccountsOpService.Client(protocol)
|
||||||
|
transport.open()
|
||||||
|
|
||||||
|
if cmd == 'addAccountPair':
|
||||||
|
if len(args) != 5:
|
||||||
|
print('addAccountPair requires 5 args')
|
||||||
|
sys.exit(1)
|
||||||
|
pp.pprint(client.addAccountPair(args[0], args[1], args[2], eval(args[3]), eval(args[4]),))
|
||||||
|
|
||||||
|
elif cmd == 'getPair':
|
||||||
|
if len(args) != 1:
|
||||||
|
print('getPair requires 1 args')
|
||||||
|
sys.exit(1)
|
||||||
|
pp.pprint(client.getPair(args[0],))
|
||||||
|
|
||||||
|
elif cmd == 'pair':
|
||||||
|
if len(args) != 3:
|
||||||
|
print('pair requires 3 args')
|
||||||
|
sys.exit(1)
|
||||||
|
pp.pprint(client.pair(args[0], args[1], args[2],))
|
||||||
|
|
||||||
|
elif cmd == 'unpair':
|
||||||
|
if len(args) != 3:
|
||||||
|
print('unpair requires 3 args')
|
||||||
|
sys.exit(1)
|
||||||
|
pp.pprint(client.unpair(args[0], args[1], args[2],))
|
||||||
|
|
||||||
|
elif cmd == 'listAccountPairs':
|
||||||
|
if len(args) != 1:
|
||||||
|
print('listAccountPairs requires 1 args')
|
||||||
|
sys.exit(1)
|
||||||
|
pp.pprint(client.listAccountPairs(eval(args[0]),))
|
||||||
|
|
||||||
|
elif cmd == 'addAccount':
|
||||||
|
if len(args) != 2:
|
||||||
|
print('addAccount requires 2 args')
|
||||||
|
sys.exit(1)
|
||||||
|
pp.pprint(client.addAccount(args[0], eval(args[1]),))
|
||||||
|
|
||||||
|
elif cmd == 'suspendAccount':
|
||||||
|
if len(args) != 1:
|
||||||
|
print('suspendAccount requires 1 args')
|
||||||
|
sys.exit(1)
|
||||||
|
pp.pprint(client.suspendAccount(args[0],))
|
||||||
|
|
||||||
|
elif cmd == 'resumeAccount':
|
||||||
|
if len(args) != 1:
|
||||||
|
print('resumeAccount requires 1 args')
|
||||||
|
sys.exit(1)
|
||||||
|
pp.pprint(client.resumeAccount(args[0],))
|
||||||
|
|
||||||
|
elif cmd == 'removeAccount':
|
||||||
|
if len(args) != 1:
|
||||||
|
print('removeAccount requires 1 args')
|
||||||
|
sys.exit(1)
|
||||||
|
pp.pprint(client.removeAccount(args[0],))
|
||||||
|
|
||||||
|
elif cmd == 'listActiveAccounts':
|
||||||
|
if len(args) != 0:
|
||||||
|
print('listActiveAccounts requires 0 args')
|
||||||
|
sys.exit(1)
|
||||||
|
pp.pprint(client.listActiveAccounts())
|
||||||
|
|
||||||
|
elif cmd == 'addProxy':
|
||||||
|
if len(args) != 2:
|
||||||
|
print('addProxy requires 2 args')
|
||||||
|
sys.exit(1)
|
||||||
|
pp.pprint(client.addProxy(args[0], eval(args[1]),))
|
||||||
|
|
||||||
|
elif cmd == 'suspendProxy':
|
||||||
|
if len(args) != 1:
|
||||||
|
print('suspendProxy requires 1 args')
|
||||||
|
sys.exit(1)
|
||||||
|
pp.pprint(client.suspendProxy(args[0],))
|
||||||
|
|
||||||
|
elif cmd == 'resumeProxy':
|
||||||
|
if len(args) != 1:
|
||||||
|
print('resumeProxy requires 1 args')
|
||||||
|
sys.exit(1)
|
||||||
|
pp.pprint(client.resumeProxy(args[0],))
|
||||||
|
|
||||||
|
elif cmd == 'removeProxy':
|
||||||
|
if len(args) != 1:
|
||||||
|
print('removeProxy requires 1 args')
|
||||||
|
sys.exit(1)
|
||||||
|
pp.pprint(client.removeProxy(args[0],))
|
||||||
|
|
||||||
|
elif cmd == 'listActiveProxies':
|
||||||
|
if len(args) != 0:
|
||||||
|
print('listActiveProxies requires 0 args')
|
||||||
|
sys.exit(1)
|
||||||
|
pp.pprint(client.listActiveProxies())
|
||||||
|
|
||||||
|
elif cmd == 'ping':
|
||||||
|
if len(args) != 0:
|
||||||
|
print('ping requires 0 args')
|
||||||
|
sys.exit(1)
|
||||||
|
pp.pprint(client.ping())
|
||||||
|
|
||||||
|
elif cmd == 'reportError':
|
||||||
|
if len(args) != 2:
|
||||||
|
print('reportError requires 2 args')
|
||||||
|
sys.exit(1)
|
||||||
|
pp.pprint(client.reportError(args[0], eval(args[1]),))
|
||||||
|
|
||||||
|
elif cmd == 'shutdown':
|
||||||
|
if len(args) != 0:
|
||||||
|
print('shutdown requires 0 args')
|
||||||
|
sys.exit(1)
|
||||||
|
pp.pprint(client.shutdown())
|
||||||
|
|
||||||
|
else:
|
||||||
|
print('Unrecognized method %s' % cmd)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
transport.close()
|
||||||
File diff suppressed because it is too large
Load Diff
@ -0,0 +1 @@
|
|||||||
|
__all__ = ['ttypes', 'constants', 'YTAccountsOpService']
|
||||||
@ -0,0 +1,14 @@
|
|||||||
|
#
|
||||||
|
# Autogenerated by Thrift Compiler (0.20.0)
|
||||||
|
#
|
||||||
|
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
|
||||||
|
#
|
||||||
|
# options string: py
|
||||||
|
#
|
||||||
|
|
||||||
|
from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException
|
||||||
|
from thrift.protocol.TProtocol import TProtocolException
|
||||||
|
from thrift.TRecursive import fix_spec
|
||||||
|
|
||||||
|
import sys
|
||||||
|
from .ttypes import *
|
||||||
@ -0,0 +1,21 @@
|
|||||||
|
#
|
||||||
|
# Autogenerated by Thrift Compiler (0.20.0)
|
||||||
|
#
|
||||||
|
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
|
||||||
|
#
|
||||||
|
# options string: py
|
||||||
|
#
|
||||||
|
|
||||||
|
from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException
|
||||||
|
from thrift.protocol.TProtocol import TProtocolException
|
||||||
|
from thrift.TRecursive import fix_spec
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import pangramia.yt.common.ttypes
|
||||||
|
import pangramia.yt.exceptions.ttypes
|
||||||
|
import pangramia.base_service.ttypes
|
||||||
|
|
||||||
|
from thrift.transport import TTransport
|
||||||
|
all_structs = []
|
||||||
|
fix_spec(all_structs)
|
||||||
|
del all_structs
|
||||||
@ -0,0 +1 @@
|
|||||||
|
__all__ = ['ttypes', 'constants']
|
||||||
@ -0,0 +1,14 @@
|
|||||||
|
#
|
||||||
|
# Autogenerated by Thrift Compiler (0.20.0)
|
||||||
|
#
|
||||||
|
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
|
||||||
|
#
|
||||||
|
# options string: py
|
||||||
|
#
|
||||||
|
|
||||||
|
from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException
|
||||||
|
from thrift.protocol.TProtocol import TProtocolException
|
||||||
|
from thrift.TRecursive import fix_spec
|
||||||
|
|
||||||
|
import sys
|
||||||
|
from .ttypes import *
|
||||||
905
ytdlp-ops-auth/thrift_model/gen_py/pangramia/yt/common/ttypes.py
Normal file
905
ytdlp-ops-auth/thrift_model/gen_py/pangramia/yt/common/ttypes.py
Normal file
@ -0,0 +1,905 @@
|
|||||||
|
#
|
||||||
|
# Autogenerated by Thrift Compiler (0.20.0)
|
||||||
|
#
|
||||||
|
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
|
||||||
|
#
|
||||||
|
# options string: py
|
||||||
|
#
|
||||||
|
|
||||||
|
from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException
|
||||||
|
from thrift.protocol.TProtocol import TProtocolException
|
||||||
|
from thrift.TRecursive import fix_spec
|
||||||
|
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from thrift.transport import TTransport
|
||||||
|
all_structs = []
|
||||||
|
|
||||||
|
|
||||||
|
class JobState(object):
|
||||||
|
SUCCESS = 0
|
||||||
|
FAIL = 1
|
||||||
|
BOT_FORBIDDEN_ON_URL_ACCESS = 2
|
||||||
|
BOT_FORBIDDEN_ON_FILE_DOWNLOAD = 3
|
||||||
|
BOT_CAPTCHA = 4
|
||||||
|
BOT_AUTH_RELOGIN_REQUIRED = 5
|
||||||
|
BOT_AUTH_SMS_REQUIRED = 6
|
||||||
|
BOT_AUTH_DEVICE_QR_REQUIRED = 7
|
||||||
|
BOT_ACCOUNT_BANNED = 8
|
||||||
|
BOT_IP_BANNED = 9
|
||||||
|
|
||||||
|
_VALUES_TO_NAMES = {
|
||||||
|
0: "SUCCESS",
|
||||||
|
1: "FAIL",
|
||||||
|
2: "BOT_FORBIDDEN_ON_URL_ACCESS",
|
||||||
|
3: "BOT_FORBIDDEN_ON_FILE_DOWNLOAD",
|
||||||
|
4: "BOT_CAPTCHA",
|
||||||
|
5: "BOT_AUTH_RELOGIN_REQUIRED",
|
||||||
|
6: "BOT_AUTH_SMS_REQUIRED",
|
||||||
|
7: "BOT_AUTH_DEVICE_QR_REQUIRED",
|
||||||
|
8: "BOT_ACCOUNT_BANNED",
|
||||||
|
9: "BOT_IP_BANNED",
|
||||||
|
}
|
||||||
|
|
||||||
|
_NAMES_TO_VALUES = {
|
||||||
|
"SUCCESS": 0,
|
||||||
|
"FAIL": 1,
|
||||||
|
"BOT_FORBIDDEN_ON_URL_ACCESS": 2,
|
||||||
|
"BOT_FORBIDDEN_ON_FILE_DOWNLOAD": 3,
|
||||||
|
"BOT_CAPTCHA": 4,
|
||||||
|
"BOT_AUTH_RELOGIN_REQUIRED": 5,
|
||||||
|
"BOT_AUTH_SMS_REQUIRED": 6,
|
||||||
|
"BOT_AUTH_DEVICE_QR_REQUIRED": 7,
|
||||||
|
"BOT_ACCOUNT_BANNED": 8,
|
||||||
|
"BOT_IP_BANNED": 9,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class TokenUpdateMode(object):
|
||||||
|
AUTOREFRESH_AND_REMAIN_ANONYMOUS = 0
|
||||||
|
AUTOREFRESH_AND_ALLOW_AUTH = 1
|
||||||
|
AUTOREFRESH_AND_ONLY_AUTH = 2
|
||||||
|
CLEANUP_THEN_AUTOREFRESH_AND_ONLY_AUTH = 3
|
||||||
|
CLEANUP_THEN_AUTOREFRESH_AND_REMAIN_ANONYMOUS = 4
|
||||||
|
CLEANUP_THEN_AUTOREFRESH_AND_ALLOW_AUTH = 5
|
||||||
|
AUTO = 6
|
||||||
|
|
||||||
|
_VALUES_TO_NAMES = {
|
||||||
|
0: "AUTOREFRESH_AND_REMAIN_ANONYMOUS",
|
||||||
|
1: "AUTOREFRESH_AND_ALLOW_AUTH",
|
||||||
|
2: "AUTOREFRESH_AND_ONLY_AUTH",
|
||||||
|
3: "CLEANUP_THEN_AUTOREFRESH_AND_ONLY_AUTH",
|
||||||
|
4: "CLEANUP_THEN_AUTOREFRESH_AND_REMAIN_ANONYMOUS",
|
||||||
|
5: "CLEANUP_THEN_AUTOREFRESH_AND_ALLOW_AUTH",
|
||||||
|
6: "AUTO",
|
||||||
|
}
|
||||||
|
|
||||||
|
_NAMES_TO_VALUES = {
|
||||||
|
"AUTOREFRESH_AND_REMAIN_ANONYMOUS": 0,
|
||||||
|
"AUTOREFRESH_AND_ALLOW_AUTH": 1,
|
||||||
|
"AUTOREFRESH_AND_ONLY_AUTH": 2,
|
||||||
|
"CLEANUP_THEN_AUTOREFRESH_AND_ONLY_AUTH": 3,
|
||||||
|
"CLEANUP_THEN_AUTOREFRESH_AND_REMAIN_ANONYMOUS": 4,
|
||||||
|
"CLEANUP_THEN_AUTOREFRESH_AND_ALLOW_AUTH": 5,
|
||||||
|
"AUTO": 6,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class AccountPairState(object):
|
||||||
|
ACTIVE = 0
|
||||||
|
PAUSED = 1
|
||||||
|
REMOVED = 2
|
||||||
|
IN_PROGRESS = 3
|
||||||
|
ALL = 4
|
||||||
|
|
||||||
|
_VALUES_TO_NAMES = {
|
||||||
|
0: "ACTIVE",
|
||||||
|
1: "PAUSED",
|
||||||
|
2: "REMOVED",
|
||||||
|
3: "IN_PROGRESS",
|
||||||
|
4: "ALL",
|
||||||
|
}
|
||||||
|
|
||||||
|
_NAMES_TO_VALUES = {
|
||||||
|
"ACTIVE": 0,
|
||||||
|
"PAUSED": 1,
|
||||||
|
"REMOVED": 2,
|
||||||
|
"IN_PROGRESS": 3,
|
||||||
|
"ALL": 4,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class JobTokenData(object):
|
||||||
|
"""
|
||||||
|
Attributes:
|
||||||
|
- infoJson
|
||||||
|
- ytdlpCommand
|
||||||
|
- socks
|
||||||
|
- jobId
|
||||||
|
- url
|
||||||
|
- cookiesBlob
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def __init__(self, infoJson=None, ytdlpCommand=None, socks=None, jobId=None, url=None, cookiesBlob=None,):
|
||||||
|
self.infoJson = infoJson
|
||||||
|
self.ytdlpCommand = ytdlpCommand
|
||||||
|
self.socks = socks
|
||||||
|
self.jobId = jobId
|
||||||
|
self.url = url
|
||||||
|
self.cookiesBlob = cookiesBlob
|
||||||
|
|
||||||
|
def read(self, iprot):
|
||||||
|
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
|
||||||
|
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
|
||||||
|
return
|
||||||
|
iprot.readStructBegin()
|
||||||
|
while True:
|
||||||
|
(fname, ftype, fid) = iprot.readFieldBegin()
|
||||||
|
if ftype == TType.STOP:
|
||||||
|
break
|
||||||
|
if fid == 1:
|
||||||
|
if ftype == TType.STRING:
|
||||||
|
self.infoJson = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||||
|
else:
|
||||||
|
iprot.skip(ftype)
|
||||||
|
elif fid == 2:
|
||||||
|
if ftype == TType.STRING:
|
||||||
|
self.ytdlpCommand = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||||
|
else:
|
||||||
|
iprot.skip(ftype)
|
||||||
|
elif fid == 3:
|
||||||
|
if ftype == TType.STRING:
|
||||||
|
self.socks = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||||
|
else:
|
||||||
|
iprot.skip(ftype)
|
||||||
|
elif fid == 4:
|
||||||
|
if ftype == TType.STRING:
|
||||||
|
self.jobId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||||
|
else:
|
||||||
|
iprot.skip(ftype)
|
||||||
|
elif fid == 5:
|
||||||
|
if ftype == TType.STRING:
|
||||||
|
self.url = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||||
|
else:
|
||||||
|
iprot.skip(ftype)
|
||||||
|
elif fid == 6:
|
||||||
|
if ftype == TType.STRING:
|
||||||
|
self.cookiesBlob = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||||
|
else:
|
||||||
|
iprot.skip(ftype)
|
||||||
|
else:
|
||||||
|
iprot.skip(ftype)
|
||||||
|
iprot.readFieldEnd()
|
||||||
|
iprot.readStructEnd()
|
||||||
|
|
||||||
|
def write(self, oprot):
|
||||||
|
if oprot._fast_encode is not None and self.thrift_spec is not None:
|
||||||
|
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
|
||||||
|
return
|
||||||
|
oprot.writeStructBegin('JobTokenData')
|
||||||
|
if self.infoJson is not None:
|
||||||
|
oprot.writeFieldBegin('infoJson', TType.STRING, 1)
|
||||||
|
oprot.writeString(self.infoJson.encode('utf-8') if sys.version_info[0] == 2 else self.infoJson)
|
||||||
|
oprot.writeFieldEnd()
|
||||||
|
if self.ytdlpCommand is not None:
|
||||||
|
oprot.writeFieldBegin('ytdlpCommand', TType.STRING, 2)
|
||||||
|
oprot.writeString(self.ytdlpCommand.encode('utf-8') if sys.version_info[0] == 2 else self.ytdlpCommand)
|
||||||
|
oprot.writeFieldEnd()
|
||||||
|
if self.socks is not None:
|
||||||
|
oprot.writeFieldBegin('socks', TType.STRING, 3)
|
||||||
|
oprot.writeString(self.socks.encode('utf-8') if sys.version_info[0] == 2 else self.socks)
|
||||||
|
oprot.writeFieldEnd()
|
||||||
|
if self.jobId is not None:
|
||||||
|
oprot.writeFieldBegin('jobId', TType.STRING, 4)
|
||||||
|
oprot.writeString(self.jobId.encode('utf-8') if sys.version_info[0] == 2 else self.jobId)
|
||||||
|
oprot.writeFieldEnd()
|
||||||
|
if self.url is not None:
|
||||||
|
oprot.writeFieldBegin('url', TType.STRING, 5)
|
||||||
|
oprot.writeString(self.url.encode('utf-8') if sys.version_info[0] == 2 else self.url)
|
||||||
|
oprot.writeFieldEnd()
|
||||||
|
if self.cookiesBlob is not None:
|
||||||
|
oprot.writeFieldBegin('cookiesBlob', TType.STRING, 6)
|
||||||
|
oprot.writeString(self.cookiesBlob.encode('utf-8') if sys.version_info[0] == 2 else self.cookiesBlob)
|
||||||
|
oprot.writeFieldEnd()
|
||||||
|
oprot.writeFieldStop()
|
||||||
|
oprot.writeStructEnd()
|
||||||
|
|
||||||
|
def validate(self):
|
||||||
|
return
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
L = ['%s=%r' % (key, value)
|
||||||
|
for key, value in self.__dict__.items()]
|
||||||
|
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
return not (self == other)
|
||||||
|
|
||||||
|
|
||||||
|
class AccountData(object):
|
||||||
|
"""
|
||||||
|
Attributes:
|
||||||
|
- username
|
||||||
|
- password
|
||||||
|
- countryCode
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def __init__(self, username=None, password=None, countryCode=None,):
|
||||||
|
self.username = username
|
||||||
|
self.password = password
|
||||||
|
self.countryCode = countryCode
|
||||||
|
|
||||||
|
def read(self, iprot):
|
||||||
|
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
|
||||||
|
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
|
||||||
|
return
|
||||||
|
iprot.readStructBegin()
|
||||||
|
while True:
|
||||||
|
(fname, ftype, fid) = iprot.readFieldBegin()
|
||||||
|
if ftype == TType.STOP:
|
||||||
|
break
|
||||||
|
if fid == 1:
|
||||||
|
if ftype == TType.STRING:
|
||||||
|
self.username = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||||
|
else:
|
||||||
|
iprot.skip(ftype)
|
||||||
|
elif fid == 2:
|
||||||
|
if ftype == TType.STRING:
|
||||||
|
self.password = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||||
|
else:
|
||||||
|
iprot.skip(ftype)
|
||||||
|
elif fid == 3:
|
||||||
|
if ftype == TType.STRING:
|
||||||
|
self.countryCode = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||||
|
else:
|
||||||
|
iprot.skip(ftype)
|
||||||
|
else:
|
||||||
|
iprot.skip(ftype)
|
||||||
|
iprot.readFieldEnd()
|
||||||
|
iprot.readStructEnd()
|
||||||
|
|
||||||
|
def write(self, oprot):
|
||||||
|
if oprot._fast_encode is not None and self.thrift_spec is not None:
|
||||||
|
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
|
||||||
|
return
|
||||||
|
oprot.writeStructBegin('AccountData')
|
||||||
|
if self.username is not None:
|
||||||
|
oprot.writeFieldBegin('username', TType.STRING, 1)
|
||||||
|
oprot.writeString(self.username.encode('utf-8') if sys.version_info[0] == 2 else self.username)
|
||||||
|
oprot.writeFieldEnd()
|
||||||
|
if self.password is not None:
|
||||||
|
oprot.writeFieldBegin('password', TType.STRING, 2)
|
||||||
|
oprot.writeString(self.password.encode('utf-8') if sys.version_info[0] == 2 else self.password)
|
||||||
|
oprot.writeFieldEnd()
|
||||||
|
if self.countryCode is not None:
|
||||||
|
oprot.writeFieldBegin('countryCode', TType.STRING, 3)
|
||||||
|
oprot.writeString(self.countryCode.encode('utf-8') if sys.version_info[0] == 2 else self.countryCode)
|
||||||
|
oprot.writeFieldEnd()
|
||||||
|
oprot.writeFieldStop()
|
||||||
|
oprot.writeStructEnd()
|
||||||
|
|
||||||
|
def validate(self):
|
||||||
|
if self.username is None:
|
||||||
|
raise TProtocolException(message='Required field username is unset!')
|
||||||
|
if self.password is None:
|
||||||
|
raise TProtocolException(message='Required field password is unset!')
|
||||||
|
return
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
L = ['%s=%r' % (key, value)
|
||||||
|
for key, value in self.__dict__.items()]
|
||||||
|
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
return not (self == other)
|
||||||
|
|
||||||
|
|
||||||
|
class ProxyData(object):
|
||||||
|
"""
|
||||||
|
Attributes:
|
||||||
|
- proxyUrl
|
||||||
|
- countryCode
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def __init__(self, proxyUrl=None, countryCode=None,):
|
||||||
|
self.proxyUrl = proxyUrl
|
||||||
|
self.countryCode = countryCode
|
||||||
|
|
||||||
|
def read(self, iprot):
|
||||||
|
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
|
||||||
|
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
|
||||||
|
return
|
||||||
|
iprot.readStructBegin()
|
||||||
|
while True:
|
||||||
|
(fname, ftype, fid) = iprot.readFieldBegin()
|
||||||
|
if ftype == TType.STOP:
|
||||||
|
break
|
||||||
|
if fid == 1:
|
||||||
|
if ftype == TType.STRING:
|
||||||
|
self.proxyUrl = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||||
|
else:
|
||||||
|
iprot.skip(ftype)
|
||||||
|
elif fid == 2:
|
||||||
|
if ftype == TType.STRING:
|
||||||
|
self.countryCode = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||||
|
else:
|
||||||
|
iprot.skip(ftype)
|
||||||
|
else:
|
||||||
|
iprot.skip(ftype)
|
||||||
|
iprot.readFieldEnd()
|
||||||
|
iprot.readStructEnd()
|
||||||
|
|
||||||
|
def write(self, oprot):
|
||||||
|
if oprot._fast_encode is not None and self.thrift_spec is not None:
|
||||||
|
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
|
||||||
|
return
|
||||||
|
oprot.writeStructBegin('ProxyData')
|
||||||
|
if self.proxyUrl is not None:
|
||||||
|
oprot.writeFieldBegin('proxyUrl', TType.STRING, 1)
|
||||||
|
oprot.writeString(self.proxyUrl.encode('utf-8') if sys.version_info[0] == 2 else self.proxyUrl)
|
||||||
|
oprot.writeFieldEnd()
|
||||||
|
if self.countryCode is not None:
|
||||||
|
oprot.writeFieldBegin('countryCode', TType.STRING, 2)
|
||||||
|
oprot.writeString(self.countryCode.encode('utf-8') if sys.version_info[0] == 2 else self.countryCode)
|
||||||
|
oprot.writeFieldEnd()
|
||||||
|
oprot.writeFieldStop()
|
||||||
|
oprot.writeStructEnd()
|
||||||
|
|
||||||
|
def validate(self):
|
||||||
|
if self.proxyUrl is None:
|
||||||
|
raise TProtocolException(message='Required field proxyUrl is unset!')
|
||||||
|
return
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
L = ['%s=%r' % (key, value)
|
||||||
|
for key, value in self.__dict__.items()]
|
||||||
|
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
return not (self == other)
|
||||||
|
|
||||||
|
|
||||||
|
class AccountPairWithState(object):
|
||||||
|
"""
|
||||||
|
Attributes:
|
||||||
|
- accountId
|
||||||
|
- proxyId
|
||||||
|
- accountPairState
|
||||||
|
- machineId
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def __init__(self, accountId=None, proxyId=None, accountPairState=None, machineId=None,):
|
||||||
|
self.accountId = accountId
|
||||||
|
self.proxyId = proxyId
|
||||||
|
self.accountPairState = accountPairState
|
||||||
|
self.machineId = machineId
|
||||||
|
|
||||||
|
def read(self, iprot):
|
||||||
|
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
|
||||||
|
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
|
||||||
|
return
|
||||||
|
iprot.readStructBegin()
|
||||||
|
while True:
|
||||||
|
(fname, ftype, fid) = iprot.readFieldBegin()
|
||||||
|
if ftype == TType.STOP:
|
||||||
|
break
|
||||||
|
if fid == 1:
|
||||||
|
if ftype == TType.STRING:
|
||||||
|
self.accountId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||||
|
else:
|
||||||
|
iprot.skip(ftype)
|
||||||
|
elif fid == 2:
|
||||||
|
if ftype == TType.STRING:
|
||||||
|
self.proxyId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||||
|
else:
|
||||||
|
iprot.skip(ftype)
|
||||||
|
elif fid == 3:
|
||||||
|
if ftype == TType.I32:
|
||||||
|
self.accountPairState = iprot.readI32()
|
||||||
|
else:
|
||||||
|
iprot.skip(ftype)
|
||||||
|
elif fid == 4:
|
||||||
|
if ftype == TType.STRING:
|
||||||
|
self.machineId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||||
|
else:
|
||||||
|
iprot.skip(ftype)
|
||||||
|
else:
|
||||||
|
iprot.skip(ftype)
|
||||||
|
iprot.readFieldEnd()
|
||||||
|
iprot.readStructEnd()
|
||||||
|
|
||||||
|
def write(self, oprot):
|
||||||
|
if oprot._fast_encode is not None and self.thrift_spec is not None:
|
||||||
|
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
|
||||||
|
return
|
||||||
|
oprot.writeStructBegin('AccountPairWithState')
|
||||||
|
if self.accountId is not None:
|
||||||
|
oprot.writeFieldBegin('accountId', TType.STRING, 1)
|
||||||
|
oprot.writeString(self.accountId.encode('utf-8') if sys.version_info[0] == 2 else self.accountId)
|
||||||
|
oprot.writeFieldEnd()
|
||||||
|
if self.proxyId is not None:
|
||||||
|
oprot.writeFieldBegin('proxyId', TType.STRING, 2)
|
||||||
|
oprot.writeString(self.proxyId.encode('utf-8') if sys.version_info[0] == 2 else self.proxyId)
|
||||||
|
oprot.writeFieldEnd()
|
||||||
|
if self.accountPairState is not None:
|
||||||
|
oprot.writeFieldBegin('accountPairState', TType.I32, 3)
|
||||||
|
oprot.writeI32(self.accountPairState)
|
||||||
|
oprot.writeFieldEnd()
|
||||||
|
if self.machineId is not None:
|
||||||
|
oprot.writeFieldBegin('machineId', TType.STRING, 4)
|
||||||
|
oprot.writeString(self.machineId.encode('utf-8') if sys.version_info[0] == 2 else self.machineId)
|
||||||
|
oprot.writeFieldEnd()
|
||||||
|
oprot.writeFieldStop()
|
||||||
|
oprot.writeStructEnd()
|
||||||
|
|
||||||
|
def validate(self):
|
||||||
|
if self.accountId is None:
|
||||||
|
raise TProtocolException(message='Required field accountId is unset!')
|
||||||
|
if self.proxyId is None:
|
||||||
|
raise TProtocolException(message='Required field proxyId is unset!')
|
||||||
|
return
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
L = ['%s=%r' % (key, value)
|
||||||
|
for key, value in self.__dict__.items()]
|
||||||
|
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
return not (self == other)
|
||||||
|
|
||||||
|
|
||||||
|
class JobData(object):
|
||||||
|
"""
|
||||||
|
Attributes:
|
||||||
|
- jobId
|
||||||
|
- url
|
||||||
|
- cookiesBlob
|
||||||
|
- potoken
|
||||||
|
- visitorId
|
||||||
|
- ytdlpCommand
|
||||||
|
- createdTime
|
||||||
|
- telemetry
|
||||||
|
- state
|
||||||
|
- errorMessage
|
||||||
|
- socks5Id
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def __init__(self, jobId=None, url=None, cookiesBlob=None, potoken=None, visitorId=None, ytdlpCommand=None, createdTime=None, telemetry=None, state=None, errorMessage=None, socks5Id=None,):
|
||||||
|
self.jobId = jobId
|
||||||
|
self.url = url
|
||||||
|
self.cookiesBlob = cookiesBlob
|
||||||
|
self.potoken = potoken
|
||||||
|
self.visitorId = visitorId
|
||||||
|
self.ytdlpCommand = ytdlpCommand
|
||||||
|
self.createdTime = createdTime
|
||||||
|
self.telemetry = telemetry
|
||||||
|
self.state = state
|
||||||
|
self.errorMessage = errorMessage
|
||||||
|
self.socks5Id = socks5Id
|
||||||
|
|
||||||
|
def read(self, iprot):
|
||||||
|
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
|
||||||
|
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
|
||||||
|
return
|
||||||
|
iprot.readStructBegin()
|
||||||
|
while True:
|
||||||
|
(fname, ftype, fid) = iprot.readFieldBegin()
|
||||||
|
if ftype == TType.STOP:
|
||||||
|
break
|
||||||
|
if fid == 1:
|
||||||
|
if ftype == TType.STRING:
|
||||||
|
self.jobId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||||
|
else:
|
||||||
|
iprot.skip(ftype)
|
||||||
|
elif fid == 2:
|
||||||
|
if ftype == TType.STRING:
|
||||||
|
self.url = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||||
|
else:
|
||||||
|
iprot.skip(ftype)
|
||||||
|
elif fid == 3:
|
||||||
|
if ftype == TType.STRING:
|
||||||
|
self.cookiesBlob = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||||
|
else:
|
||||||
|
iprot.skip(ftype)
|
||||||
|
elif fid == 4:
|
||||||
|
if ftype == TType.STRING:
|
||||||
|
self.potoken = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||||
|
else:
|
||||||
|
iprot.skip(ftype)
|
||||||
|
elif fid == 5:
|
||||||
|
if ftype == TType.STRING:
|
||||||
|
self.visitorId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||||
|
else:
|
||||||
|
iprot.skip(ftype)
|
||||||
|
elif fid == 6:
|
||||||
|
if ftype == TType.STRING:
|
||||||
|
self.ytdlpCommand = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||||
|
else:
|
||||||
|
iprot.skip(ftype)
|
||||||
|
elif fid == 7:
|
||||||
|
if ftype == TType.STRING:
|
||||||
|
self.createdTime = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||||
|
else:
|
||||||
|
iprot.skip(ftype)
|
||||||
|
elif fid == 8:
|
||||||
|
if ftype == TType.MAP:
|
||||||
|
self.telemetry = {}
|
||||||
|
(_ktype1, _vtype2, _size0) = iprot.readMapBegin()
|
||||||
|
for _i4 in range(_size0):
|
||||||
|
_key5 = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||||
|
_val6 = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||||
|
self.telemetry[_key5] = _val6
|
||||||
|
iprot.readMapEnd()
|
||||||
|
else:
|
||||||
|
iprot.skip(ftype)
|
||||||
|
elif fid == 9:
|
||||||
|
if ftype == TType.I32:
|
||||||
|
self.state = iprot.readI32()
|
||||||
|
else:
|
||||||
|
iprot.skip(ftype)
|
||||||
|
elif fid == 10:
|
||||||
|
if ftype == TType.STRING:
|
||||||
|
self.errorMessage = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||||
|
else:
|
||||||
|
iprot.skip(ftype)
|
||||||
|
elif fid == 11:
|
||||||
|
if ftype == TType.STRING:
|
||||||
|
self.socks5Id = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||||
|
else:
|
||||||
|
iprot.skip(ftype)
|
||||||
|
else:
|
||||||
|
iprot.skip(ftype)
|
||||||
|
iprot.readFieldEnd()
|
||||||
|
iprot.readStructEnd()
|
||||||
|
|
||||||
|
def write(self, oprot):
|
||||||
|
if oprot._fast_encode is not None and self.thrift_spec is not None:
|
||||||
|
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
|
||||||
|
return
|
||||||
|
oprot.writeStructBegin('JobData')
|
||||||
|
if self.jobId is not None:
|
||||||
|
oprot.writeFieldBegin('jobId', TType.STRING, 1)
|
||||||
|
oprot.writeString(self.jobId.encode('utf-8') if sys.version_info[0] == 2 else self.jobId)
|
||||||
|
oprot.writeFieldEnd()
|
||||||
|
if self.url is not None:
|
||||||
|
oprot.writeFieldBegin('url', TType.STRING, 2)
|
||||||
|
oprot.writeString(self.url.encode('utf-8') if sys.version_info[0] == 2 else self.url)
|
||||||
|
oprot.writeFieldEnd()
|
||||||
|
if self.cookiesBlob is not None:
|
||||||
|
oprot.writeFieldBegin('cookiesBlob', TType.STRING, 3)
|
||||||
|
oprot.writeString(self.cookiesBlob.encode('utf-8') if sys.version_info[0] == 2 else self.cookiesBlob)
|
||||||
|
oprot.writeFieldEnd()
|
||||||
|
if self.potoken is not None:
|
||||||
|
oprot.writeFieldBegin('potoken', TType.STRING, 4)
|
||||||
|
oprot.writeString(self.potoken.encode('utf-8') if sys.version_info[0] == 2 else self.potoken)
|
||||||
|
oprot.writeFieldEnd()
|
||||||
|
if self.visitorId is not None:
|
||||||
|
oprot.writeFieldBegin('visitorId', TType.STRING, 5)
|
||||||
|
oprot.writeString(self.visitorId.encode('utf-8') if sys.version_info[0] == 2 else self.visitorId)
|
||||||
|
oprot.writeFieldEnd()
|
||||||
|
if self.ytdlpCommand is not None:
|
||||||
|
oprot.writeFieldBegin('ytdlpCommand', TType.STRING, 6)
|
||||||
|
oprot.writeString(self.ytdlpCommand.encode('utf-8') if sys.version_info[0] == 2 else self.ytdlpCommand)
|
||||||
|
oprot.writeFieldEnd()
|
||||||
|
if self.createdTime is not None:
|
||||||
|
oprot.writeFieldBegin('createdTime', TType.STRING, 7)
|
||||||
|
oprot.writeString(self.createdTime.encode('utf-8') if sys.version_info[0] == 2 else self.createdTime)
|
||||||
|
oprot.writeFieldEnd()
|
||||||
|
if self.telemetry is not None:
|
||||||
|
oprot.writeFieldBegin('telemetry', TType.MAP, 8)
|
||||||
|
oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.telemetry))
|
||||||
|
for kiter7, viter8 in self.telemetry.items():
|
||||||
|
oprot.writeString(kiter7.encode('utf-8') if sys.version_info[0] == 2 else kiter7)
|
||||||
|
oprot.writeString(viter8.encode('utf-8') if sys.version_info[0] == 2 else viter8)
|
||||||
|
oprot.writeMapEnd()
|
||||||
|
oprot.writeFieldEnd()
|
||||||
|
if self.state is not None:
|
||||||
|
oprot.writeFieldBegin('state', TType.I32, 9)
|
||||||
|
oprot.writeI32(self.state)
|
||||||
|
oprot.writeFieldEnd()
|
||||||
|
if self.errorMessage is not None:
|
||||||
|
oprot.writeFieldBegin('errorMessage', TType.STRING, 10)
|
||||||
|
oprot.writeString(self.errorMessage.encode('utf-8') if sys.version_info[0] == 2 else self.errorMessage)
|
||||||
|
oprot.writeFieldEnd()
|
||||||
|
if self.socks5Id is not None:
|
||||||
|
oprot.writeFieldBegin('socks5Id', TType.STRING, 11)
|
||||||
|
oprot.writeString(self.socks5Id.encode('utf-8') if sys.version_info[0] == 2 else self.socks5Id)
|
||||||
|
oprot.writeFieldEnd()
|
||||||
|
oprot.writeFieldStop()
|
||||||
|
oprot.writeStructEnd()
|
||||||
|
|
||||||
|
def validate(self):
|
||||||
|
if self.jobId is None:
|
||||||
|
raise TProtocolException(message='Required field jobId is unset!')
|
||||||
|
if self.url is None:
|
||||||
|
raise TProtocolException(message='Required field url is unset!')
|
||||||
|
if self.cookiesBlob is None:
|
||||||
|
raise TProtocolException(message='Required field cookiesBlob is unset!')
|
||||||
|
if self.potoken is None:
|
||||||
|
raise TProtocolException(message='Required field potoken is unset!')
|
||||||
|
if self.visitorId is None:
|
||||||
|
raise TProtocolException(message='Required field visitorId is unset!')
|
||||||
|
if self.ytdlpCommand is None:
|
||||||
|
raise TProtocolException(message='Required field ytdlpCommand is unset!')
|
||||||
|
if self.createdTime is None:
|
||||||
|
raise TProtocolException(message='Required field createdTime is unset!')
|
||||||
|
if self.telemetry is None:
|
||||||
|
raise TProtocolException(message='Required field telemetry is unset!')
|
||||||
|
if self.state is None:
|
||||||
|
raise TProtocolException(message='Required field state is unset!')
|
||||||
|
return
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
L = ['%s=%r' % (key, value)
|
||||||
|
for key, value in self.__dict__.items()]
|
||||||
|
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
return not (self == other)
|
||||||
|
|
||||||
|
|
||||||
|
class RichCollectionPagination(object):
|
||||||
|
"""
|
||||||
|
Attributes:
|
||||||
|
- hasNext
|
||||||
|
- totalCount
|
||||||
|
- page
|
||||||
|
- pageSize
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def __init__(self, hasNext=None, totalCount=None, page=None, pageSize=None,):
|
||||||
|
self.hasNext = hasNext
|
||||||
|
self.totalCount = totalCount
|
||||||
|
self.page = page
|
||||||
|
self.pageSize = pageSize
|
||||||
|
|
||||||
|
def read(self, iprot):
|
||||||
|
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
|
||||||
|
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
|
||||||
|
return
|
||||||
|
iprot.readStructBegin()
|
||||||
|
while True:
|
||||||
|
(fname, ftype, fid) = iprot.readFieldBegin()
|
||||||
|
if ftype == TType.STOP:
|
||||||
|
break
|
||||||
|
if fid == 1:
|
||||||
|
if ftype == TType.BOOL:
|
||||||
|
self.hasNext = iprot.readBool()
|
||||||
|
else:
|
||||||
|
iprot.skip(ftype)
|
||||||
|
elif fid == 2:
|
||||||
|
if ftype == TType.I32:
|
||||||
|
self.totalCount = iprot.readI32()
|
||||||
|
else:
|
||||||
|
iprot.skip(ftype)
|
||||||
|
elif fid == 3:
|
||||||
|
if ftype == TType.I32:
|
||||||
|
self.page = iprot.readI32()
|
||||||
|
else:
|
||||||
|
iprot.skip(ftype)
|
||||||
|
elif fid == 4:
|
||||||
|
if ftype == TType.I32:
|
||||||
|
self.pageSize = iprot.readI32()
|
||||||
|
else:
|
||||||
|
iprot.skip(ftype)
|
||||||
|
else:
|
||||||
|
iprot.skip(ftype)
|
||||||
|
iprot.readFieldEnd()
|
||||||
|
iprot.readStructEnd()
|
||||||
|
|
||||||
|
def write(self, oprot):
|
||||||
|
if oprot._fast_encode is not None and self.thrift_spec is not None:
|
||||||
|
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
|
||||||
|
return
|
||||||
|
oprot.writeStructBegin('RichCollectionPagination')
|
||||||
|
if self.hasNext is not None:
|
||||||
|
oprot.writeFieldBegin('hasNext', TType.BOOL, 1)
|
||||||
|
oprot.writeBool(self.hasNext)
|
||||||
|
oprot.writeFieldEnd()
|
||||||
|
if self.totalCount is not None:
|
||||||
|
oprot.writeFieldBegin('totalCount', TType.I32, 2)
|
||||||
|
oprot.writeI32(self.totalCount)
|
||||||
|
oprot.writeFieldEnd()
|
||||||
|
if self.page is not None:
|
||||||
|
oprot.writeFieldBegin('page', TType.I32, 3)
|
||||||
|
oprot.writeI32(self.page)
|
||||||
|
oprot.writeFieldEnd()
|
||||||
|
if self.pageSize is not None:
|
||||||
|
oprot.writeFieldBegin('pageSize', TType.I32, 4)
|
||||||
|
oprot.writeI32(self.pageSize)
|
||||||
|
oprot.writeFieldEnd()
|
||||||
|
oprot.writeFieldStop()
|
||||||
|
oprot.writeStructEnd()
|
||||||
|
|
||||||
|
def validate(self):
|
||||||
|
if self.hasNext is None:
|
||||||
|
raise TProtocolException(message='Required field hasNext is unset!')
|
||||||
|
if self.totalCount is None:
|
||||||
|
raise TProtocolException(message='Required field totalCount is unset!')
|
||||||
|
if self.page is None:
|
||||||
|
raise TProtocolException(message='Required field page is unset!')
|
||||||
|
if self.pageSize is None:
|
||||||
|
raise TProtocolException(message='Required field pageSize is unset!')
|
||||||
|
return
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
L = ['%s=%r' % (key, value)
|
||||||
|
for key, value in self.__dict__.items()]
|
||||||
|
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
return not (self == other)
|
||||||
|
|
||||||
|
|
||||||
|
class RichCollectionJobData(object):
|
||||||
|
"""
|
||||||
|
Attributes:
|
||||||
|
- items
|
||||||
|
- pagination
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def __init__(self, items=None, pagination=None,):
|
||||||
|
self.items = items
|
||||||
|
self.pagination = pagination
|
||||||
|
|
||||||
|
def read(self, iprot):
|
||||||
|
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
|
||||||
|
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
|
||||||
|
return
|
||||||
|
iprot.readStructBegin()
|
||||||
|
while True:
|
||||||
|
(fname, ftype, fid) = iprot.readFieldBegin()
|
||||||
|
if ftype == TType.STOP:
|
||||||
|
break
|
||||||
|
if fid == 1:
|
||||||
|
if ftype == TType.LIST:
|
||||||
|
self.items = []
|
||||||
|
(_etype12, _size9) = iprot.readListBegin()
|
||||||
|
for _i13 in range(_size9):
|
||||||
|
_elem14 = JobData()
|
||||||
|
_elem14.read(iprot)
|
||||||
|
self.items.append(_elem14)
|
||||||
|
iprot.readListEnd()
|
||||||
|
else:
|
||||||
|
iprot.skip(ftype)
|
||||||
|
elif fid == 2:
|
||||||
|
if ftype == TType.STRUCT:
|
||||||
|
self.pagination = RichCollectionPagination()
|
||||||
|
self.pagination.read(iprot)
|
||||||
|
else:
|
||||||
|
iprot.skip(ftype)
|
||||||
|
else:
|
||||||
|
iprot.skip(ftype)
|
||||||
|
iprot.readFieldEnd()
|
||||||
|
iprot.readStructEnd()
|
||||||
|
|
||||||
|
def write(self, oprot):
|
||||||
|
if oprot._fast_encode is not None and self.thrift_spec is not None:
|
||||||
|
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
|
||||||
|
return
|
||||||
|
oprot.writeStructBegin('RichCollectionJobData')
|
||||||
|
if self.items is not None:
|
||||||
|
oprot.writeFieldBegin('items', TType.LIST, 1)
|
||||||
|
oprot.writeListBegin(TType.STRUCT, len(self.items))
|
||||||
|
for iter15 in self.items:
|
||||||
|
iter15.write(oprot)
|
||||||
|
oprot.writeListEnd()
|
||||||
|
oprot.writeFieldEnd()
|
||||||
|
if self.pagination is not None:
|
||||||
|
oprot.writeFieldBegin('pagination', TType.STRUCT, 2)
|
||||||
|
self.pagination.write(oprot)
|
||||||
|
oprot.writeFieldEnd()
|
||||||
|
oprot.writeFieldStop()
|
||||||
|
oprot.writeStructEnd()
|
||||||
|
|
||||||
|
def validate(self):
|
||||||
|
if self.items is None:
|
||||||
|
raise TProtocolException(message='Required field items is unset!')
|
||||||
|
if self.pagination is None:
|
||||||
|
raise TProtocolException(message='Required field pagination is unset!')
|
||||||
|
return
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
L = ['%s=%r' % (key, value)
|
||||||
|
for key, value in self.__dict__.items()]
|
||||||
|
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
return not (self == other)
|
||||||
|
all_structs.append(JobTokenData)
|
||||||
|
JobTokenData.thrift_spec = (
|
||||||
|
None, # 0
|
||||||
|
(1, TType.STRING, 'infoJson', 'UTF8', None, ), # 1
|
||||||
|
(2, TType.STRING, 'ytdlpCommand', 'UTF8', None, ), # 2
|
||||||
|
(3, TType.STRING, 'socks', 'UTF8', None, ), # 3
|
||||||
|
(4, TType.STRING, 'jobId', 'UTF8', None, ), # 4
|
||||||
|
(5, TType.STRING, 'url', 'UTF8', None, ), # 5
|
||||||
|
(6, TType.STRING, 'cookiesBlob', 'UTF8', None, ), # 6
|
||||||
|
)
|
||||||
|
all_structs.append(AccountData)
|
||||||
|
AccountData.thrift_spec = (
|
||||||
|
None, # 0
|
||||||
|
(1, TType.STRING, 'username', 'UTF8', None, ), # 1
|
||||||
|
(2, TType.STRING, 'password', 'UTF8', None, ), # 2
|
||||||
|
(3, TType.STRING, 'countryCode', 'UTF8', None, ), # 3
|
||||||
|
)
|
||||||
|
all_structs.append(ProxyData)
|
||||||
|
ProxyData.thrift_spec = (
|
||||||
|
None, # 0
|
||||||
|
(1, TType.STRING, 'proxyUrl', 'UTF8', None, ), # 1
|
||||||
|
(2, TType.STRING, 'countryCode', 'UTF8', None, ), # 2
|
||||||
|
)
|
||||||
|
all_structs.append(AccountPairWithState)
|
||||||
|
AccountPairWithState.thrift_spec = (
|
||||||
|
None, # 0
|
||||||
|
(1, TType.STRING, 'accountId', 'UTF8', None, ), # 1
|
||||||
|
(2, TType.STRING, 'proxyId', 'UTF8', None, ), # 2
|
||||||
|
(3, TType.I32, 'accountPairState', None, None, ), # 3
|
||||||
|
(4, TType.STRING, 'machineId', 'UTF8', None, ), # 4
|
||||||
|
)
|
||||||
|
all_structs.append(JobData)
|
||||||
|
JobData.thrift_spec = (
|
||||||
|
None, # 0
|
||||||
|
(1, TType.STRING, 'jobId', 'UTF8', None, ), # 1
|
||||||
|
(2, TType.STRING, 'url', 'UTF8', None, ), # 2
|
||||||
|
(3, TType.STRING, 'cookiesBlob', 'UTF8', None, ), # 3
|
||||||
|
(4, TType.STRING, 'potoken', 'UTF8', None, ), # 4
|
||||||
|
(5, TType.STRING, 'visitorId', 'UTF8', None, ), # 5
|
||||||
|
(6, TType.STRING, 'ytdlpCommand', 'UTF8', None, ), # 6
|
||||||
|
(7, TType.STRING, 'createdTime', 'UTF8', None, ), # 7
|
||||||
|
(8, TType.MAP, 'telemetry', (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), None, ), # 8
|
||||||
|
(9, TType.I32, 'state', None, None, ), # 9
|
||||||
|
(10, TType.STRING, 'errorMessage', 'UTF8', None, ), # 10
|
||||||
|
(11, TType.STRING, 'socks5Id', 'UTF8', None, ), # 11
|
||||||
|
)
|
||||||
|
all_structs.append(RichCollectionPagination)
|
||||||
|
RichCollectionPagination.thrift_spec = (
|
||||||
|
None, # 0
|
||||||
|
(1, TType.BOOL, 'hasNext', None, None, ), # 1
|
||||||
|
(2, TType.I32, 'totalCount', None, None, ), # 2
|
||||||
|
(3, TType.I32, 'page', None, None, ), # 3
|
||||||
|
(4, TType.I32, 'pageSize', None, None, ), # 4
|
||||||
|
)
|
||||||
|
all_structs.append(RichCollectionJobData)
|
||||||
|
RichCollectionJobData.thrift_spec = (
|
||||||
|
None, # 0
|
||||||
|
(1, TType.LIST, 'items', (TType.STRUCT, [JobData, None], False), None, ), # 1
|
||||||
|
(2, TType.STRUCT, 'pagination', [RichCollectionPagination, None], None, ), # 2
|
||||||
|
)
|
||||||
|
fix_spec(all_structs)
|
||||||
|
del all_structs
|
||||||
@ -0,0 +1 @@
|
|||||||
|
__all__ = ['ttypes', 'constants']
|
||||||
@ -0,0 +1,14 @@
|
|||||||
|
#
|
||||||
|
# Autogenerated by Thrift Compiler (0.20.0)
|
||||||
|
#
|
||||||
|
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
|
||||||
|
#
|
||||||
|
# options string: py
|
||||||
|
#
|
||||||
|
|
||||||
|
from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException
|
||||||
|
from thrift.protocol.TProtocol import TProtocolException
|
||||||
|
from thrift.TRecursive import fix_spec
|
||||||
|
|
||||||
|
import sys
|
||||||
|
from .ttypes import *
|
||||||
@ -0,0 +1,254 @@
|
|||||||
|
#
|
||||||
|
# Autogenerated by Thrift Compiler (0.20.0)
|
||||||
|
#
|
||||||
|
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
|
||||||
|
#
|
||||||
|
# options string: py
|
||||||
|
#
|
||||||
|
|
||||||
|
from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException
|
||||||
|
from thrift.protocol.TProtocol import TProtocolException
|
||||||
|
from thrift.TRecursive import fix_spec
|
||||||
|
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from thrift.transport import TTransport
|
||||||
|
all_structs = []
|
||||||
|
|
||||||
|
|
||||||
|
class PBServiceException(TException):
|
||||||
|
"""
|
||||||
|
Attributes:
|
||||||
|
- message
|
||||||
|
- errorCode
|
||||||
|
- context
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def __init__(self, message=None, errorCode=None, context=None,):
|
||||||
|
super(PBServiceException, self).__setattr__('message', message)
|
||||||
|
super(PBServiceException, self).__setattr__('errorCode', errorCode)
|
||||||
|
super(PBServiceException, self).__setattr__('context', context)
|
||||||
|
|
||||||
|
def __setattr__(self, *args):
|
||||||
|
raise TypeError("can't modify immutable instance")
|
||||||
|
|
||||||
|
def __delattr__(self, *args):
|
||||||
|
raise TypeError("can't modify immutable instance")
|
||||||
|
|
||||||
|
def __hash__(self):
|
||||||
|
return hash(self.__class__) ^ hash((self.message, self.errorCode, self.context, ))
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def read(cls, iprot):
|
||||||
|
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and cls.thrift_spec is not None:
|
||||||
|
return iprot._fast_decode(None, iprot, [cls, cls.thrift_spec])
|
||||||
|
iprot.readStructBegin()
|
||||||
|
message = None
|
||||||
|
errorCode = None
|
||||||
|
context = None
|
||||||
|
while True:
|
||||||
|
(fname, ftype, fid) = iprot.readFieldBegin()
|
||||||
|
if ftype == TType.STOP:
|
||||||
|
break
|
||||||
|
if fid == 1:
|
||||||
|
if ftype == TType.STRING:
|
||||||
|
message = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||||
|
else:
|
||||||
|
iprot.skip(ftype)
|
||||||
|
elif fid == 2:
|
||||||
|
if ftype == TType.STRING:
|
||||||
|
errorCode = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||||
|
else:
|
||||||
|
iprot.skip(ftype)
|
||||||
|
elif fid == 3:
|
||||||
|
if ftype == TType.MAP:
|
||||||
|
context = {}
|
||||||
|
(_ktype1, _vtype2, _size0) = iprot.readMapBegin()
|
||||||
|
for _i4 in range(_size0):
|
||||||
|
_key5 = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||||
|
_val6 = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||||
|
context[_key5] = _val6
|
||||||
|
iprot.readMapEnd()
|
||||||
|
else:
|
||||||
|
iprot.skip(ftype)
|
||||||
|
else:
|
||||||
|
iprot.skip(ftype)
|
||||||
|
iprot.readFieldEnd()
|
||||||
|
iprot.readStructEnd()
|
||||||
|
return cls(
|
||||||
|
message=message,
|
||||||
|
errorCode=errorCode,
|
||||||
|
context=context,
|
||||||
|
)
|
||||||
|
|
||||||
|
def write(self, oprot):
|
||||||
|
if oprot._fast_encode is not None and self.thrift_spec is not None:
|
||||||
|
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
|
||||||
|
return
|
||||||
|
oprot.writeStructBegin('PBServiceException')
|
||||||
|
if self.message is not None:
|
||||||
|
oprot.writeFieldBegin('message', TType.STRING, 1)
|
||||||
|
oprot.writeString(self.message.encode('utf-8') if sys.version_info[0] == 2 else self.message)
|
||||||
|
oprot.writeFieldEnd()
|
||||||
|
if self.errorCode is not None:
|
||||||
|
oprot.writeFieldBegin('errorCode', TType.STRING, 2)
|
||||||
|
oprot.writeString(self.errorCode.encode('utf-8') if sys.version_info[0] == 2 else self.errorCode)
|
||||||
|
oprot.writeFieldEnd()
|
||||||
|
if self.context is not None:
|
||||||
|
oprot.writeFieldBegin('context', TType.MAP, 3)
|
||||||
|
oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.context))
|
||||||
|
for kiter7, viter8 in self.context.items():
|
||||||
|
oprot.writeString(kiter7.encode('utf-8') if sys.version_info[0] == 2 else kiter7)
|
||||||
|
oprot.writeString(viter8.encode('utf-8') if sys.version_info[0] == 2 else viter8)
|
||||||
|
oprot.writeMapEnd()
|
||||||
|
oprot.writeFieldEnd()
|
||||||
|
oprot.writeFieldStop()
|
||||||
|
oprot.writeStructEnd()
|
||||||
|
|
||||||
|
def validate(self):
|
||||||
|
if self.message is None:
|
||||||
|
raise TProtocolException(message='Required field message is unset!')
|
||||||
|
return
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return repr(self)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
L = ['%s=%r' % (key, value)
|
||||||
|
for key, value in self.__dict__.items()]
|
||||||
|
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
return not (self == other)
|
||||||
|
|
||||||
|
|
||||||
|
class PBUserException(TException):
|
||||||
|
"""
|
||||||
|
Attributes:
|
||||||
|
- message
|
||||||
|
- errorCode
|
||||||
|
- context
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def __init__(self, message=None, errorCode=None, context=None,):
|
||||||
|
super(PBUserException, self).__setattr__('message', message)
|
||||||
|
super(PBUserException, self).__setattr__('errorCode', errorCode)
|
||||||
|
super(PBUserException, self).__setattr__('context', context)
|
||||||
|
|
||||||
|
def __setattr__(self, *args):
|
||||||
|
raise TypeError("can't modify immutable instance")
|
||||||
|
|
||||||
|
def __delattr__(self, *args):
|
||||||
|
raise TypeError("can't modify immutable instance")
|
||||||
|
|
||||||
|
def __hash__(self):
|
||||||
|
return hash(self.__class__) ^ hash((self.message, self.errorCode, self.context, ))
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def read(cls, iprot):
|
||||||
|
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and cls.thrift_spec is not None:
|
||||||
|
return iprot._fast_decode(None, iprot, [cls, cls.thrift_spec])
|
||||||
|
iprot.readStructBegin()
|
||||||
|
message = None
|
||||||
|
errorCode = None
|
||||||
|
context = None
|
||||||
|
while True:
|
||||||
|
(fname, ftype, fid) = iprot.readFieldBegin()
|
||||||
|
if ftype == TType.STOP:
|
||||||
|
break
|
||||||
|
if fid == 1:
|
||||||
|
if ftype == TType.STRING:
|
||||||
|
message = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||||
|
else:
|
||||||
|
iprot.skip(ftype)
|
||||||
|
elif fid == 2:
|
||||||
|
if ftype == TType.STRING:
|
||||||
|
errorCode = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||||
|
else:
|
||||||
|
iprot.skip(ftype)
|
||||||
|
elif fid == 3:
|
||||||
|
if ftype == TType.MAP:
|
||||||
|
context = {}
|
||||||
|
(_ktype10, _vtype11, _size9) = iprot.readMapBegin()
|
||||||
|
for _i13 in range(_size9):
|
||||||
|
_key14 = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||||
|
_val15 = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||||
|
context[_key14] = _val15
|
||||||
|
iprot.readMapEnd()
|
||||||
|
else:
|
||||||
|
iprot.skip(ftype)
|
||||||
|
else:
|
||||||
|
iprot.skip(ftype)
|
||||||
|
iprot.readFieldEnd()
|
||||||
|
iprot.readStructEnd()
|
||||||
|
return cls(
|
||||||
|
message=message,
|
||||||
|
errorCode=errorCode,
|
||||||
|
context=context,
|
||||||
|
)
|
||||||
|
|
||||||
|
def write(self, oprot):
|
||||||
|
if oprot._fast_encode is not None and self.thrift_spec is not None:
|
||||||
|
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
|
||||||
|
return
|
||||||
|
oprot.writeStructBegin('PBUserException')
|
||||||
|
if self.message is not None:
|
||||||
|
oprot.writeFieldBegin('message', TType.STRING, 1)
|
||||||
|
oprot.writeString(self.message.encode('utf-8') if sys.version_info[0] == 2 else self.message)
|
||||||
|
oprot.writeFieldEnd()
|
||||||
|
if self.errorCode is not None:
|
||||||
|
oprot.writeFieldBegin('errorCode', TType.STRING, 2)
|
||||||
|
oprot.writeString(self.errorCode.encode('utf-8') if sys.version_info[0] == 2 else self.errorCode)
|
||||||
|
oprot.writeFieldEnd()
|
||||||
|
if self.context is not None:
|
||||||
|
oprot.writeFieldBegin('context', TType.MAP, 3)
|
||||||
|
oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.context))
|
||||||
|
for kiter16, viter17 in self.context.items():
|
||||||
|
oprot.writeString(kiter16.encode('utf-8') if sys.version_info[0] == 2 else kiter16)
|
||||||
|
oprot.writeString(viter17.encode('utf-8') if sys.version_info[0] == 2 else viter17)
|
||||||
|
oprot.writeMapEnd()
|
||||||
|
oprot.writeFieldEnd()
|
||||||
|
oprot.writeFieldStop()
|
||||||
|
oprot.writeStructEnd()
|
||||||
|
|
||||||
|
def validate(self):
|
||||||
|
if self.message is None:
|
||||||
|
raise TProtocolException(message='Required field message is unset!')
|
||||||
|
return
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return repr(self)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
L = ['%s=%r' % (key, value)
|
||||||
|
for key, value in self.__dict__.items()]
|
||||||
|
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
return not (self == other)
|
||||||
|
all_structs.append(PBServiceException)
|
||||||
|
PBServiceException.thrift_spec = (
|
||||||
|
None, # 0
|
||||||
|
(1, TType.STRING, 'message', 'UTF8', None, ), # 1
|
||||||
|
(2, TType.STRING, 'errorCode', 'UTF8', None, ), # 2
|
||||||
|
(3, TType.MAP, 'context', (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), None, ), # 3
|
||||||
|
)
|
||||||
|
all_structs.append(PBUserException)
|
||||||
|
PBUserException.thrift_spec = (
|
||||||
|
None, # 0
|
||||||
|
(1, TType.STRING, 'message', 'UTF8', None, ), # 1
|
||||||
|
(2, TType.STRING, 'errorCode', 'UTF8', None, ), # 2
|
||||||
|
(3, TType.MAP, 'context', (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), None, ), # 3
|
||||||
|
)
|
||||||
|
fix_spec(all_structs)
|
||||||
|
del all_structs
|
||||||
@ -0,0 +1,166 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
#
|
||||||
|
# Autogenerated by Thrift Compiler (0.20.0)
|
||||||
|
#
|
||||||
|
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
|
||||||
|
#
|
||||||
|
# options string: py
|
||||||
|
#
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import pprint
|
||||||
|
if sys.version_info[0] > 2:
|
||||||
|
from urllib.parse import urlparse
|
||||||
|
else:
|
||||||
|
from urlparse import urlparse
|
||||||
|
from thrift.transport import TTransport, TSocket, TSSLSocket, THttpClient
|
||||||
|
from thrift.protocol.TBinaryProtocol import TBinaryProtocol
|
||||||
|
|
||||||
|
from pangramia.yt.tokens_ops import YTTokenOpService
|
||||||
|
from pangramia.yt.tokens_ops.ttypes import *
|
||||||
|
|
||||||
|
if len(sys.argv) <= 1 or sys.argv[1] == '--help':
|
||||||
|
print('')
|
||||||
|
print('Usage: ' + sys.argv[0] + ' [-h host[:port]] [-u url] [-f[ramed]] [-s[sl]] [-novalidate] [-ca_certs certs] [-keyfile keyfile] [-certfile certfile] function [arg1 [arg2...]]')
|
||||||
|
print('')
|
||||||
|
print('Functions:')
|
||||||
|
print(' JobTokenData getOrRefreshTokenWithReport(string accountId, string oldUrl, JobState status, string details, string jobId, TokenUpdateMode updateType, string url)')
|
||||||
|
print(' JobTokenData getOrRefreshToken(string accountId, TokenUpdateMode updateType, string url)')
|
||||||
|
print(' JobTokenData getLatestToken(string accountId)')
|
||||||
|
print(' JobTokenData refreshToken(string accountId, TokenUpdateMode updateType, string url)')
|
||||||
|
print(' bool reportState(string url, JobState status, string details, string jobId)')
|
||||||
|
print(' bool ping()')
|
||||||
|
print(' bool reportError(string message, details)')
|
||||||
|
print(' void shutdown()')
|
||||||
|
print('')
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
|
pp = pprint.PrettyPrinter(indent=2)
|
||||||
|
host = 'localhost'
|
||||||
|
port = 9090
|
||||||
|
uri = ''
|
||||||
|
framed = False
|
||||||
|
ssl = False
|
||||||
|
validate = True
|
||||||
|
ca_certs = None
|
||||||
|
keyfile = None
|
||||||
|
certfile = None
|
||||||
|
http = False
|
||||||
|
argi = 1
|
||||||
|
|
||||||
|
if sys.argv[argi] == '-h':
|
||||||
|
parts = sys.argv[argi + 1].split(':')
|
||||||
|
host = parts[0]
|
||||||
|
if len(parts) > 1:
|
||||||
|
port = int(parts[1])
|
||||||
|
argi += 2
|
||||||
|
|
||||||
|
if sys.argv[argi] == '-u':
|
||||||
|
url = urlparse(sys.argv[argi + 1])
|
||||||
|
parts = url[1].split(':')
|
||||||
|
host = parts[0]
|
||||||
|
if len(parts) > 1:
|
||||||
|
port = int(parts[1])
|
||||||
|
else:
|
||||||
|
port = 80
|
||||||
|
uri = url[2]
|
||||||
|
if url[4]:
|
||||||
|
uri += '?%s' % url[4]
|
||||||
|
http = True
|
||||||
|
argi += 2
|
||||||
|
|
||||||
|
if sys.argv[argi] == '-f' or sys.argv[argi] == '-framed':
|
||||||
|
framed = True
|
||||||
|
argi += 1
|
||||||
|
|
||||||
|
if sys.argv[argi] == '-s' or sys.argv[argi] == '-ssl':
|
||||||
|
ssl = True
|
||||||
|
argi += 1
|
||||||
|
|
||||||
|
if sys.argv[argi] == '-novalidate':
|
||||||
|
validate = False
|
||||||
|
argi += 1
|
||||||
|
|
||||||
|
if sys.argv[argi] == '-ca_certs':
|
||||||
|
ca_certs = sys.argv[argi+1]
|
||||||
|
argi += 2
|
||||||
|
|
||||||
|
if sys.argv[argi] == '-keyfile':
|
||||||
|
keyfile = sys.argv[argi+1]
|
||||||
|
argi += 2
|
||||||
|
|
||||||
|
if sys.argv[argi] == '-certfile':
|
||||||
|
certfile = sys.argv[argi+1]
|
||||||
|
argi += 2
|
||||||
|
|
||||||
|
cmd = sys.argv[argi]
|
||||||
|
args = sys.argv[argi + 1:]
|
||||||
|
|
||||||
|
if http:
|
||||||
|
transport = THttpClient.THttpClient(host, port, uri)
|
||||||
|
else:
|
||||||
|
if ssl:
|
||||||
|
socket = TSSLSocket.TSSLSocket(host, port, validate=validate, ca_certs=ca_certs, keyfile=keyfile, certfile=certfile)
|
||||||
|
else:
|
||||||
|
socket = TSocket.TSocket(host, port)
|
||||||
|
if framed:
|
||||||
|
transport = TTransport.TFramedTransport(socket)
|
||||||
|
else:
|
||||||
|
transport = TTransport.TBufferedTransport(socket)
|
||||||
|
protocol = TBinaryProtocol(transport)
|
||||||
|
client = YTTokenOpService.Client(protocol)
|
||||||
|
transport.open()
|
||||||
|
|
||||||
|
if cmd == 'getOrRefreshTokenWithReport':
|
||||||
|
if len(args) != 7:
|
||||||
|
print('getOrRefreshTokenWithReport requires 7 args')
|
||||||
|
sys.exit(1)
|
||||||
|
pp.pprint(client.getOrRefreshTokenWithReport(args[0], args[1], eval(args[2]), args[3], args[4], eval(args[5]), args[6],))
|
||||||
|
|
||||||
|
elif cmd == 'getOrRefreshToken':
|
||||||
|
if len(args) != 3:
|
||||||
|
print('getOrRefreshToken requires 3 args')
|
||||||
|
sys.exit(1)
|
||||||
|
pp.pprint(client.getOrRefreshToken(args[0], eval(args[1]), args[2],))
|
||||||
|
|
||||||
|
elif cmd == 'getLatestToken':
|
||||||
|
if len(args) != 1:
|
||||||
|
print('getLatestToken requires 1 args')
|
||||||
|
sys.exit(1)
|
||||||
|
pp.pprint(client.getLatestToken(args[0],))
|
||||||
|
|
||||||
|
elif cmd == 'refreshToken':
|
||||||
|
if len(args) != 3:
|
||||||
|
print('refreshToken requires 3 args')
|
||||||
|
sys.exit(1)
|
||||||
|
pp.pprint(client.refreshToken(args[0], eval(args[1]), args[2],))
|
||||||
|
|
||||||
|
elif cmd == 'reportState':
|
||||||
|
if len(args) != 4:
|
||||||
|
print('reportState requires 4 args')
|
||||||
|
sys.exit(1)
|
||||||
|
pp.pprint(client.reportState(args[0], eval(args[1]), args[2], args[3],))
|
||||||
|
|
||||||
|
elif cmd == 'ping':
|
||||||
|
if len(args) != 0:
|
||||||
|
print('ping requires 0 args')
|
||||||
|
sys.exit(1)
|
||||||
|
pp.pprint(client.ping())
|
||||||
|
|
||||||
|
elif cmd == 'reportError':
|
||||||
|
if len(args) != 2:
|
||||||
|
print('reportError requires 2 args')
|
||||||
|
sys.exit(1)
|
||||||
|
pp.pprint(client.reportError(args[0], eval(args[1]),))
|
||||||
|
|
||||||
|
elif cmd == 'shutdown':
|
||||||
|
if len(args) != 0:
|
||||||
|
print('shutdown requires 0 args')
|
||||||
|
sys.exit(1)
|
||||||
|
pp.pprint(client.shutdown())
|
||||||
|
|
||||||
|
else:
|
||||||
|
print('Unrecognized method %s' % cmd)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
transport.close()
|
||||||
File diff suppressed because it is too large
Load Diff
@ -0,0 +1 @@
|
|||||||
|
__all__ = ['ttypes', 'constants', 'YTTokenOpService']
|
||||||
@ -0,0 +1,14 @@
|
|||||||
|
#
|
||||||
|
# Autogenerated by Thrift Compiler (0.20.0)
|
||||||
|
#
|
||||||
|
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
|
||||||
|
#
|
||||||
|
# options string: py
|
||||||
|
#
|
||||||
|
|
||||||
|
from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException
|
||||||
|
from thrift.protocol.TProtocol import TProtocolException
|
||||||
|
from thrift.TRecursive import fix_spec
|
||||||
|
|
||||||
|
import sys
|
||||||
|
from .ttypes import *
|
||||||
@ -0,0 +1,21 @@
|
|||||||
|
#
|
||||||
|
# Autogenerated by Thrift Compiler (0.20.0)
|
||||||
|
#
|
||||||
|
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
|
||||||
|
#
|
||||||
|
# options string: py
|
||||||
|
#
|
||||||
|
|
||||||
|
from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException
|
||||||
|
from thrift.protocol.TProtocol import TProtocolException
|
||||||
|
from thrift.TRecursive import fix_spec
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import pangramia.yt.common.ttypes
|
||||||
|
import pangramia.yt.exceptions.ttypes
|
||||||
|
import pangramia.base_service.ttypes
|
||||||
|
|
||||||
|
from thrift.transport import TTransport
|
||||||
|
all_structs = []
|
||||||
|
fix_spec(all_structs)
|
||||||
|
del all_structs
|
||||||
94
ytdlp-ops-auth/thrift_model/pom.xml
Normal file
94
ytdlp-ops-auth/thrift_model/pom.xml
Normal file
@ -0,0 +1,94 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||||
|
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||||
|
<modelVersion>4.0.0</modelVersion>
|
||||||
|
|
||||||
|
<groupId>com.pangramia.yt</groupId>
|
||||||
|
<artifactId>thrift-services</artifactId>
|
||||||
|
<version>1.6.2-SNAPSHOT</version>
|
||||||
|
|
||||||
|
<properties>
|
||||||
|
<thrift.version>0.16.0</thrift.version>
|
||||||
|
<java.version>11</java.version>
|
||||||
|
</properties>
|
||||||
|
|
||||||
|
<dependencies>
|
||||||
|
<!-- Thrift library -->
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.thrift</groupId>
|
||||||
|
<artifactId>libthrift</artifactId>
|
||||||
|
<version>${thrift.version}</version>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<!-- SLF4J for logging -->
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.slf4j</groupId>
|
||||||
|
<artifactId>slf4j-api</artifactId>
|
||||||
|
<version>1.7.36</version>
|
||||||
|
</dependency>
|
||||||
|
</dependencies>
|
||||||
|
|
||||||
|
<build>
|
||||||
|
<plugins>
|
||||||
|
<!-- Thrift compiler plugin -->
|
||||||
|
<plugin>
|
||||||
|
<groupId>org.apache.thrift.tools</groupId>
|
||||||
|
<artifactId>maven-thrift-plugin</artifactId>
|
||||||
|
<version>0.1.11</version>
|
||||||
|
|
||||||
|
<configuration>
|
||||||
|
<thriftExecutable>/usr/local/bin/thrift</thriftExecutable>
|
||||||
|
<thriftSourceRoot>${project.basedir}</thriftSourceRoot>
|
||||||
|
<generator>java</generator>
|
||||||
|
<outputDirectory>${project.build.directory}/generated-sources/thrift</outputDirectory>
|
||||||
|
</configuration>
|
||||||
|
<executions>
|
||||||
|
<execution>
|
||||||
|
<id>thrift-sources-java</id>
|
||||||
|
<phase>generate-sources</phase>
|
||||||
|
<goals>
|
||||||
|
<goal>compile</goal>
|
||||||
|
</goals>
|
||||||
|
</execution>
|
||||||
|
<execution>
|
||||||
|
<id>thrift-sources-py</id>
|
||||||
|
<phase>compile</phase>
|
||||||
|
<goals>
|
||||||
|
<goal>compile</goal>
|
||||||
|
</goals>
|
||||||
|
<configuration>
|
||||||
|
<generator>py</generator>
|
||||||
|
<outputDirectory>${project.basedir}/gen_py</outputDirectory>
|
||||||
|
</configuration>
|
||||||
|
</execution>
|
||||||
|
</executions>
|
||||||
|
</plugin>
|
||||||
|
|
||||||
|
<!-- Java compiler plugin -->
|
||||||
|
<plugin>
|
||||||
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
|
<artifactId>maven-compiler-plugin</artifactId>
|
||||||
|
<version>3.8.1</version>
|
||||||
|
<configuration>
|
||||||
|
<source>${java.version}</source>
|
||||||
|
<target>${java.version}</target>
|
||||||
|
</configuration>
|
||||||
|
</plugin>
|
||||||
|
|
||||||
|
<!-- Clean plugin to remove gen_py directory -->
|
||||||
|
<plugin>
|
||||||
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
|
<artifactId>maven-clean-plugin</artifactId>
|
||||||
|
<version>3.2.0</version>
|
||||||
|
<configuration>
|
||||||
|
<filesets>
|
||||||
|
<fileset>
|
||||||
|
<directory>${project.basedir}/gen_py</directory>
|
||||||
|
</fileset>
|
||||||
|
</filesets>
|
||||||
|
</configuration>
|
||||||
|
</plugin>
|
||||||
|
</plugins>
|
||||||
|
</build>
|
||||||
|
</project>
|
||||||
19
ytdlp-ops-auth/thrift_model/services/base_service.thrift
Normal file
19
ytdlp-ops-auth/thrift_model/services/base_service.thrift
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
namespace py pangramia.base_service
|
||||||
|
namespace java com.pangramia.base_service
|
||||||
|
|
||||||
|
include "../data/common.thrift"
|
||||||
|
include "../data/exceptions.thrift"
|
||||||
|
|
||||||
|
service BaseService {
|
||||||
|
// Common health check method
|
||||||
|
bool ping() throws (1: exceptions.PBServiceException serviceExp,
|
||||||
|
2: exceptions.PBUserException userExp),
|
||||||
|
|
||||||
|
// Common error reporting
|
||||||
|
bool reportError(1: string message,
|
||||||
|
2: map<string, string> details) throws (1: exceptions.PBServiceException serviceExp,
|
||||||
|
2: exceptions.PBUserException userExp)
|
||||||
|
|
||||||
|
// Add this to fix AsyncProcessor issues
|
||||||
|
oneway void shutdown()
|
||||||
|
}
|
||||||
63
ytdlp-ops-auth/thrift_model/services/yt_admin_ops.thrift
Normal file
63
ytdlp-ops-auth/thrift_model/services/yt_admin_ops.thrift
Normal file
@ -0,0 +1,63 @@
|
|||||||
|
namespace py pangramia.yt.admin_ops
|
||||||
|
namespace java com.pangramia.yt.admin_ops
|
||||||
|
|
||||||
|
include "../data/common.thrift"
|
||||||
|
include "../data/exceptions.thrift"
|
||||||
|
include "base_service.thrift"
|
||||||
|
|
||||||
|
// Proxy and Account management
|
||||||
|
service YTAccountsOpService extends base_service.BaseService {
|
||||||
|
|
||||||
|
// AccountPairs
|
||||||
|
bool addAccountPair(1: string accountId, 2: string proxyId, 3: string machineId, 4: common.ProxyData proxyData, 5: optional common.AccountData accountData)
|
||||||
|
throws (1: exceptions.PBServiceException serviceExp,
|
||||||
|
2: exceptions.PBUserException userExp),
|
||||||
|
|
||||||
|
common.AccountPairWithState getPair(1: string machineId)
|
||||||
|
throws (1: exceptions.PBServiceException serviceExp,
|
||||||
|
2: exceptions.PBUserException userExp),
|
||||||
|
|
||||||
|
bool pair(1: string accountId, 2: string proxyId, 3:string machineId)
|
||||||
|
throws (1: exceptions.PBServiceException serviceExp,
|
||||||
|
2: exceptions.PBUserException userExp),
|
||||||
|
|
||||||
|
bool unpair(1: string accountId, 2: string proxyId, 3:string machineId)
|
||||||
|
throws (1: exceptions.PBServiceException serviceExp,
|
||||||
|
2: exceptions.PBUserException userExp),
|
||||||
|
|
||||||
|
list<common.AccountPairWithState> listAccountPairs(1: optional common.AccountPairState filter) throws (1: exceptions.PBServiceException serviceExp,
|
||||||
|
2: exceptions.PBUserException userExp),
|
||||||
|
|
||||||
|
// ManageAccounts
|
||||||
|
bool addAccount(1: string accountId, 2: optional common.AccountData accountData) throws (1: exceptions.PBServiceException serviceExp,
|
||||||
|
2: exceptions.PBUserException userExp),
|
||||||
|
|
||||||
|
|
||||||
|
bool suspendAccount(1: string accountId) throws (1: exceptions.PBServiceException serviceExp,
|
||||||
|
2: exceptions.PBUserException userExp),
|
||||||
|
|
||||||
|
bool resumeAccount(1: string accountId) throws (1: exceptions.PBServiceException serviceExp,
|
||||||
|
2: exceptions.PBUserException userExp),
|
||||||
|
|
||||||
|
bool removeAccount(1: string accountId) throws (1: exceptions.PBServiceException serviceExp,
|
||||||
|
2: exceptions.PBUserException userExp),
|
||||||
|
|
||||||
|
list<string> listActiveAccounts() throws (1: exceptions.PBServiceException serviceExp,
|
||||||
|
2: exceptions.PBUserException userExp),
|
||||||
|
|
||||||
|
// ManageProxy
|
||||||
|
bool addProxy(1: string proxyId, 2: common.ProxyData proxyData) throws (1: exceptions.PBServiceException serviceExp,
|
||||||
|
2: exceptions.PBUserException userExp),
|
||||||
|
|
||||||
|
bool suspendProxy(1: string proxyId) throws (1: exceptions.PBServiceException serviceExp,
|
||||||
|
2: exceptions.PBUserException userExp),
|
||||||
|
|
||||||
|
bool resumeProxy(1: string proxyId) throws (1: exceptions.PBServiceException serviceExp,
|
||||||
|
2: exceptions.PBUserException userExp),
|
||||||
|
|
||||||
|
bool removeProxy(1: string proxyId) throws (1: exceptions.PBServiceException serviceExp,
|
||||||
|
2: exceptions.PBUserException userExp),
|
||||||
|
|
||||||
|
list<string> listActiveProxies() throws (1: exceptions.PBServiceException serviceExp,
|
||||||
|
2: exceptions.PBUserException userExp),
|
||||||
|
}
|
||||||
36
ytdlp-ops-auth/thrift_model/services/yt_tokens_ops.thrift
Normal file
36
ytdlp-ops-auth/thrift_model/services/yt_tokens_ops.thrift
Normal file
@ -0,0 +1,36 @@
|
|||||||
|
namespace py pangramia.yt.tokens_ops
|
||||||
|
namespace java com.pangramia.yt.tokens_ops
|
||||||
|
|
||||||
|
include "../data/common.thrift"
|
||||||
|
include "../data/exceptions.thrift"
|
||||||
|
include "base_service.thrift"
|
||||||
|
|
||||||
|
service YTTokenOpService extends base_service.BaseService {
|
||||||
|
|
||||||
|
common.JobTokenData getOrRefreshTokenWithReport ( 1: string accountId,
|
||||||
|
2: string oldUrl,
|
||||||
|
3: common.JobState status,
|
||||||
|
4: optional string details,
|
||||||
|
5: optional string jobId,
|
||||||
|
6: optional common.TokenUpdateMode updateType = common.TokenUpdateMode.AUTO,
|
||||||
|
7: optional string url ) throws (1: exceptions.PBServiceException serviceExp,
|
||||||
|
2: exceptions.PBUserException userExp)
|
||||||
|
|
||||||
|
common.JobTokenData getOrRefreshToken ( 1: string accountId,
|
||||||
|
2: optional common.TokenUpdateMode updateType = common.TokenUpdateMode.AUTO,
|
||||||
|
3: optional string url ) throws (1: exceptions.PBServiceException serviceExp,
|
||||||
|
2: exceptions.PBUserException userExp)
|
||||||
|
|
||||||
|
common.JobTokenData getLatestToken (1: string accountId) throws (1: exceptions.PBServiceException serviceExp,
|
||||||
|
2: exceptions.PBUserException userExp),
|
||||||
|
common.JobTokenData refreshToken ( 1: string accountId,
|
||||||
|
2: optional common.TokenUpdateMode updateType = common.TokenUpdateMode.AUTO,
|
||||||
|
3: optional string url ) throws (1: exceptions.PBServiceException serviceExp,
|
||||||
|
2: exceptions.PBUserException userExp)
|
||||||
|
bool reportState( 1: string url,
|
||||||
|
2: common.JobState status,
|
||||||
|
3: optional string details,
|
||||||
|
4: optional string jobId) throws (1: exceptions.PBServiceException serviceExp,
|
||||||
|
2: exceptions.PBUserException userExp)
|
||||||
|
|
||||||
|
}
|
||||||
815
ytdlp-ops-auth/ytdlp_ops_client.py
Normal file
815
ytdlp-ops-auth/ytdlp_ops_client.py
Normal file
@ -0,0 +1,815 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
from typing import Dict, List, Optional, Any
|
||||||
|
import argparse
|
||||||
|
import csv
|
||||||
|
import datetime
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
import uuid
|
||||||
|
import traceback
|
||||||
|
import logging
|
||||||
|
import signal
|
||||||
|
from pathlib import Path
|
||||||
|
from tabulate import tabulate
|
||||||
|
import yt_dlp
|
||||||
|
|
||||||
|
def signal_handler(sig: int, frame) -> None:
|
||||||
|
"""Handle shutdown signals gracefully."""
|
||||||
|
logger.info(f"Received signal {sig}, shutting down...")
|
||||||
|
# Clean up any resources here
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
|
# Register signal handlers
|
||||||
|
signal.signal(signal.SIGINT, signal_handler)
|
||||||
|
signal.signal(signal.SIGTERM, signal_handler)
|
||||||
|
|
||||||
|
# Import the patch for Thrift exceptions
|
||||||
|
try:
|
||||||
|
import os
|
||||||
|
from thrift_exceptions_patch import patch_thrift_exceptions
|
||||||
|
# Explicitly call the patch function to ensure it's applied
|
||||||
|
patch_thrift_exceptions()
|
||||||
|
print("Applied Thrift exceptions patch for compatibility")
|
||||||
|
if 'AIRFLOW_HOME' in os.environ:
|
||||||
|
print("Running in Airflow environment - patch is essential")
|
||||||
|
else:
|
||||||
|
print("Not running in Airflow environment, but patch applied anyway for consistency")
|
||||||
|
except ImportError:
|
||||||
|
print("Could not import thrift_exceptions_patch, compatibility may be affected")
|
||||||
|
print("If running in Airflow, this may cause 'immutable instance' errors")
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error applying Thrift exceptions patch: {e}")
|
||||||
|
|
||||||
|
# --- Python Path Setup ---
|
||||||
|
# Ensure the script can find necessary modules, especially Thrift-generated code.
|
||||||
|
# Assumes the script is run from the project root or the path is adjusted accordingly.
|
||||||
|
project_root = Path(__file__).parent.absolute()
|
||||||
|
gen_py_dir = project_root / "thrift_model" / "gen_py"
|
||||||
|
|
||||||
|
# Add project root to sys.path (needed for the 'pangramia' symlink)
|
||||||
|
if str(project_root) not in sys.path:
|
||||||
|
sys.path.insert(0, str(project_root))
|
||||||
|
|
||||||
|
# Verify paths for debugging
|
||||||
|
# print("Project Root:", project_root)
|
||||||
|
# print("Project Root:", project_root)
|
||||||
|
# print("Gen Py Dir:", gen_py_dir)
|
||||||
|
# print("Sys Path:", sys.path)
|
||||||
|
# --- End Python Path Setup ---
|
||||||
|
|
||||||
|
from thrift.transport import TSocket, TTransport
|
||||||
|
from thrift.protocol import TBinaryProtocol
|
||||||
|
|
||||||
|
try:
|
||||||
|
from pangramia.yt.tokens_ops import YTTokenOpService
|
||||||
|
from pangramia.yt.common.ttypes import JobTokenData, TokenUpdateMode, JobState
|
||||||
|
from pangramia.yt.exceptions.ttypes import PBServiceException, PBUserException
|
||||||
|
except ImportError as e:
|
||||||
|
print(f"Error importing Thrift-generated modules: {e}")
|
||||||
|
print("Please ensure you have run './generate-thrift.py' successfully from the project root.")
|
||||||
|
print(f"Current sys.path includes: {gen_py_dir}")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Configure logging
|
||||||
|
logging.basicConfig(
|
||||||
|
level=logging.INFO,
|
||||||
|
format='%(asctime)s - %(levelname)s - %(message)s',
|
||||||
|
handlers=[
|
||||||
|
logging.StreamHandler(),
|
||||||
|
logging.FileHandler('ytdlp_ops_client.log')
|
||||||
|
]
|
||||||
|
)
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
def get_info_json(token_data):
|
||||||
|
"""Get infoJson from token_data"""
|
||||||
|
if not hasattr(token_data, 'infoJson'):
|
||||||
|
logger.error("infoJson attribute missing in token_data")
|
||||||
|
raise ValueError("Server response missing infoJson")
|
||||||
|
|
||||||
|
if not token_data.infoJson or token_data.infoJson == "{}":
|
||||||
|
logger.error("Empty infoJson received from server")
|
||||||
|
raise ValueError("Empty infoJson received from server")
|
||||||
|
|
||||||
|
logger.info(f"Using infoJson from server response ({len(token_data.infoJson)} bytes)")
|
||||||
|
return token_data.infoJson
|
||||||
|
|
||||||
|
# Try multiple possible file paths
|
||||||
|
possible_paths = [
|
||||||
|
os.path.join('context-data', f"info_json_{video_id}.json"),
|
||||||
|
os.path.join('context-data', f"info_{video_id}.json"),
|
||||||
|
os.path.join('.', f"info_json_{video_id}.json"),
|
||||||
|
os.path.join('.', 'context-data', f"info_json_{video_id}.json")
|
||||||
|
]
|
||||||
|
|
||||||
|
# Add current directory to possible paths
|
||||||
|
import glob
|
||||||
|
for pattern in [f"info_json_{video_id}.json", f"info_json_{video_id}_*.json"]:
|
||||||
|
possible_paths.extend(glob.glob(pattern))
|
||||||
|
possible_paths.extend(glob.glob(os.path.join('context-data', pattern)))
|
||||||
|
|
||||||
|
# Remove duplicates while preserving order
|
||||||
|
seen = set()
|
||||||
|
possible_paths = [p for p in possible_paths if not (p in seen or seen.add(p))]
|
||||||
|
|
||||||
|
# Try each path
|
||||||
|
for info_json_path in possible_paths:
|
||||||
|
if os.path.exists(info_json_path):
|
||||||
|
logger.info(f"Found info.json file: {info_json_path}")
|
||||||
|
try:
|
||||||
|
with open(info_json_path, 'r', encoding='utf-8') as f:
|
||||||
|
content = f.read()
|
||||||
|
if content and len(content) > 10: # Basic validation
|
||||||
|
logger.info(f"Successfully read info.json from {info_json_path} ({len(content)} bytes)")
|
||||||
|
# Add the infoJson attribute to the token_data object for future use
|
||||||
|
try:
|
||||||
|
setattr(token_data, 'infoJson', content)
|
||||||
|
logger.info(f"Added infoJson attribute to token_data with length: {len(content)}")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error adding infoJson attribute to token_data: {e}")
|
||||||
|
return content
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error reading info.json file {info_json_path}: {e}")
|
||||||
|
|
||||||
|
# If we get here, try to find any info_json file in the context directory
|
||||||
|
try:
|
||||||
|
context_dir = 'context-data'
|
||||||
|
info_json_files = glob.glob(os.path.join(context_dir, "info_json_*.json"))
|
||||||
|
if info_json_files:
|
||||||
|
# Sort by modification time, newest first
|
||||||
|
info_json_files.sort(key=os.path.getmtime, reverse=True)
|
||||||
|
newest_file = info_json_files[0]
|
||||||
|
logger.info(f"Found newest info.json file: {newest_file}")
|
||||||
|
with open(newest_file, 'r', encoding='utf-8') as f:
|
||||||
|
content = f.read()
|
||||||
|
if content and len(content) > 10: # Basic validation
|
||||||
|
logger.info(f"Successfully read info.json from {newest_file} ({len(content)} bytes)")
|
||||||
|
# Add the infoJson attribute to the token_data object for future use
|
||||||
|
try:
|
||||||
|
setattr(token_data, 'infoJson', content)
|
||||||
|
logger.info(f"Added infoJson attribute to token_data with length: {len(content)}")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error adding infoJson attribute to token_data: {e}")
|
||||||
|
return content
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error searching for info.json files: {e}")
|
||||||
|
|
||||||
|
# Try to add the attribute if it's missing
|
||||||
|
if not hasattr(token_data, 'infoJson'):
|
||||||
|
try:
|
||||||
|
# Try using __dict__ to add the attribute
|
||||||
|
if hasattr(token_data, '__dict__'):
|
||||||
|
token_data.__dict__['infoJson'] = "{}"
|
||||||
|
logger.info("Added infoJson attribute to token_data.__dict__")
|
||||||
|
else:
|
||||||
|
# Try using setattr
|
||||||
|
setattr(token_data, 'infoJson', "{}")
|
||||||
|
logger.info("Added empty infoJson attribute to token_data using setattr")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error adding infoJson attribute to token_data: {e}")
|
||||||
|
# Create a new object with the same attributes plus infoJson
|
||||||
|
try:
|
||||||
|
from pangramia.yt.common.ttypes import JobTokenData
|
||||||
|
new_token = JobTokenData()
|
||||||
|
# Copy all attributes
|
||||||
|
for attr in dir(token_data):
|
||||||
|
if not attr.startswith('__') and not callable(getattr(token_data, attr)):
|
||||||
|
try:
|
||||||
|
setattr(new_token, attr, getattr(token_data, attr))
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
# Add infoJson
|
||||||
|
new_token.infoJson = "{}"
|
||||||
|
logger.info("Created new token object with infoJson attribute")
|
||||||
|
# Replace token_data with new_token
|
||||||
|
token_data = new_token
|
||||||
|
except Exception as e2:
|
||||||
|
logger.error(f"Error creating new token object: {e2}")
|
||||||
|
|
||||||
|
logger.warning("Could not find valid info.json data")
|
||||||
|
return "{}"
|
||||||
|
|
||||||
|
def is_valid_json(json_str):
|
||||||
|
"""Check if a string is valid JSON and not empty"""
|
||||||
|
if not json_str or json_str == "{}" or json_str == "":
|
||||||
|
logger.warning("Empty JSON string received")
|
||||||
|
return False
|
||||||
|
|
||||||
|
try:
|
||||||
|
data = json.loads(json_str)
|
||||||
|
|
||||||
|
# Check if it's an empty object
|
||||||
|
if isinstance(data, dict) and not data:
|
||||||
|
logger.warning("Empty JSON object received")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Check if it has an error field
|
||||||
|
if isinstance(data, dict) and ('error' in data or 'errorCode' in data):
|
||||||
|
# It's valid JSON but contains an error
|
||||||
|
logger.warning(f"JSON contains error: {data.get('error', 'Unknown error')} (code: {data.get('errorCode', 'none')})")
|
||||||
|
return True
|
||||||
|
|
||||||
|
# Check if it has at least some basic fields
|
||||||
|
if isinstance(data, dict) and ('id' in data or 'title' in data):
|
||||||
|
logger.info(f"Valid JSON with video data: {data.get('title', 'Unknown title')}")
|
||||||
|
return True
|
||||||
|
|
||||||
|
# Check if it has token_data which is important
|
||||||
|
if isinstance(data, dict) and 'token_data' in data and data['token_data']:
|
||||||
|
logger.info("Valid JSON with token_data")
|
||||||
|
return True
|
||||||
|
|
||||||
|
logger.warning("JSON is valid but missing expected fields")
|
||||||
|
return True
|
||||||
|
except json.JSONDecodeError as e:
|
||||||
|
logger.warning(f"Invalid JSON: {e}")
|
||||||
|
return False
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Unexpected error validating JSON: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
def extract_video_id(url: str) -> Optional[str]:
|
||||||
|
"""Extract video ID from a YouTube URL."""
|
||||||
|
# If it's already a video ID
|
||||||
|
if re.match(r'^[a-zA-Z0-9_-]{11}$', url):
|
||||||
|
return url
|
||||||
|
|
||||||
|
# Handle youtu.be URLs
|
||||||
|
youtu_be_match = re.search(r'youtu\.be/([a-zA-Z0-9_-]{11})', url)
|
||||||
|
if youtu_be_match:
|
||||||
|
return youtu_be_match.group(1)
|
||||||
|
|
||||||
|
# Handle youtube.com URLs
|
||||||
|
youtube_match = re.search(r'(?:youtube\.com/(?:watch\?v=|embed/|v/)|youtube\.com/.*[?&]v=)([a-zA-Z0-9_-]{11})', url)
|
||||||
|
if youtube_match:
|
||||||
|
return youtube_match.group(1)
|
||||||
|
|
||||||
|
# Handle shorts URLs
|
||||||
|
shorts_match = re.search(r'youtube\.com/shorts/([a-zA-Z0-9_-]{11})', url)
|
||||||
|
if shorts_match:
|
||||||
|
return shorts_match.group(1)
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def list_available_formats(url: str, args: argparse.Namespace) -> Optional[List[Dict[str, Any]]]:
|
||||||
|
"""List available formats for a YouTube video."""
|
||||||
|
ydl_opts = {
|
||||||
|
'quiet': not args.no_quiet if hasattr(args, 'no_quiet') else True,
|
||||||
|
'no_warnings': True,
|
||||||
|
'skip_download': True,
|
||||||
|
'extract_flat': True,
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
with yt_dlp.YoutubeDL(ydl_opts) as ydl:
|
||||||
|
info = ydl.extract_info(url, download=False)
|
||||||
|
|
||||||
|
if not info:
|
||||||
|
logger.error("Could not retrieve video information")
|
||||||
|
return None
|
||||||
|
|
||||||
|
formats = info.get('formats', [])
|
||||||
|
|
||||||
|
if not formats:
|
||||||
|
logger.warning("No formats available for this video")
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Create a table of available formats
|
||||||
|
format_table = []
|
||||||
|
for f in formats:
|
||||||
|
format_table.append({
|
||||||
|
'format_id': f.get('format_id', 'unknown'),
|
||||||
|
'ext': f.get('ext', 'unknown'),
|
||||||
|
'resolution': f.get('resolution', 'unknown'),
|
||||||
|
'fps': f.get('fps', 'unknown'),
|
||||||
|
'vcodec': f.get('vcodec', 'unknown'),
|
||||||
|
'acodec': f.get('acodec', 'unknown'),
|
||||||
|
'filesize': f.get('filesize', 'unknown'),
|
||||||
|
'format_note': f.get('format_note', '')
|
||||||
|
})
|
||||||
|
|
||||||
|
return format_table
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error listing formats: {e}")
|
||||||
|
return None
|
||||||
|
def suggest_best_formats(formats: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
|
||||||
|
"""Suggest best formats based on resolution and codec."""
|
||||||
|
|
||||||
|
best = []
|
||||||
|
seen_resolutions = set()
|
||||||
|
|
||||||
|
# Prioritize higher resolutions and certain codecs
|
||||||
|
preferred_codecs = ["vp9", "avc1", "av01"] # In order of preference
|
||||||
|
|
||||||
|
for f in sorted(formats, key=lambda x: (
|
||||||
|
-int(x.get('height', 0) or 0), # Higher resolution first
|
||||||
|
preferred_codecs.index(x.get('vcodec', '').split('.')[0]) if x.get('vcodec', '').split('.')[0] in preferred_codecs else float('inf'), # Preferred codecs
|
||||||
|
x.get('filesize', 0) or 0 # Smaller filesize
|
||||||
|
)):
|
||||||
|
resolution = f.get('resolution')
|
||||||
|
if resolution and resolution not in seen_resolutions:
|
||||||
|
best.append(f)
|
||||||
|
seen_resolutions.add(resolution)
|
||||||
|
if len(best) >= 3: # Suggest up to 3 formats
|
||||||
|
break
|
||||||
|
return best
|
||||||
|
|
||||||
|
def load_info_json(path: str) -> Optional[Dict[str, Any]]:
|
||||||
|
"""Load and validate info.json file."""
|
||||||
|
try:
|
||||||
|
path = Path(path).resolve()
|
||||||
|
if not path.exists():
|
||||||
|
logger.error(f"Info.json file not found: {path}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
with open(path, 'r') as f:
|
||||||
|
data = json.load(f)
|
||||||
|
|
||||||
|
# Basic validation
|
||||||
|
if not isinstance(data, dict):
|
||||||
|
logger.error("Invalid info.json format: not a JSON object")
|
||||||
|
return None
|
||||||
|
|
||||||
|
if 'id' not in data:
|
||||||
|
logger.warning("Info.json missing video ID")
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error loading info.json: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
def save_info_json(info_json: str, video_id: str, context_dir: str) -> Optional[str]:
|
||||||
|
"""Save info.json to disk and return the saved path."""
|
||||||
|
try:
|
||||||
|
# Ensure context directory exists
|
||||||
|
Path(context_dir).mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
# Create filename with video ID and timestamp
|
||||||
|
timestamp = int(time.time())
|
||||||
|
output_path = Path(context_dir) / f"info_json_{video_id}_{timestamp}.json"
|
||||||
|
|
||||||
|
# Write the file
|
||||||
|
with open(output_path, 'w') as f:
|
||||||
|
f.write(info_json)
|
||||||
|
|
||||||
|
# Also create a symlink or copy to the standard name for compatibility
|
||||||
|
standard_path = Path(context_dir) / f"info_json_{video_id}.json"
|
||||||
|
try:
|
||||||
|
# Try to create a symlink first (more efficient)
|
||||||
|
if os.path.exists(standard_path):
|
||||||
|
os.remove(standard_path)
|
||||||
|
os.symlink(output_path, standard_path)
|
||||||
|
except (OSError, AttributeError):
|
||||||
|
# If symlink fails (e.g., on Windows), make a copy
|
||||||
|
with open(standard_path, 'w') as f:
|
||||||
|
f.write(info_json)
|
||||||
|
|
||||||
|
# Save latest.json
|
||||||
|
latest_path = Path(context_dir) / "latest.json"
|
||||||
|
with open(latest_path, 'w') as f:
|
||||||
|
f.write(info_json)
|
||||||
|
|
||||||
|
logger.info(f"Successfully saved info.json to {output_path} and latest.json to {latest_path}")
|
||||||
|
return str(output_path)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to save info.json: {e}")
|
||||||
|
logger.error(traceback.format_exc())
|
||||||
|
return False
|
||||||
|
|
||||||
|
def main():
|
||||||
|
# Create main parser
|
||||||
|
parser = argparse.ArgumentParser(description='''YtdlpOpsService Client
|
||||||
|
|
||||||
|
This client connects to the YTDLP Operations Server to generate tokens for YouTube videos.
|
||||||
|
The server performs SOCKS5 proxy connection testing with a 9-second timeout for early detection
|
||||||
|
of proxy issues. If a proxy connection fails, the server will immediately stop token generation
|
||||||
|
and return an error instead of trying other clients.''')
|
||||||
|
|
||||||
|
# Add global options
|
||||||
|
parser.add_argument('--host', default=os.getenv('YTDLP_HOST', 'localhost'),
|
||||||
|
help='Server host (default: localhost or YTDLP_HOST env)')
|
||||||
|
parser.add_argument('--port', type=int, default=int(os.getenv('YTDLP_PORT', '9090')),
|
||||||
|
help='Server port (default: 9090 or YTDLP_PORT env)')
|
||||||
|
parser.add_argument('--timeout', type=int, default=30000,
|
||||||
|
help='Timeout in milliseconds (default: 30000)')
|
||||||
|
parser.add_argument('--timeout-sec', type=int, default=30,
|
||||||
|
help='Timeout in seconds (default: 30, overrides --timeout if provided)')
|
||||||
|
parser.add_argument('--context-dir', default='.', help='Context directory to save info.json (default: .)')
|
||||||
|
parser.add_argument('--load-info-json', help='Path to existing info.json file to load')
|
||||||
|
parser.add_argument('--framed-transport', action='store_true',
|
||||||
|
help='Use TFramedTransport instead of TBufferedTransport for handling very large messages')
|
||||||
|
parser.add_argument('--force-framed-transport', action='store_true',
|
||||||
|
help='Force the use of TFramedTransport (recommended for large messages)')
|
||||||
|
|
||||||
|
# Create subparsers for commands
|
||||||
|
subparsers = parser.add_subparsers(dest='command', required=True, help='Commands')
|
||||||
|
|
||||||
|
# getToken command
|
||||||
|
get_token_parser = subparsers.add_parser('getToken', help='Get token for a YouTube URL',
|
||||||
|
description='''Get token for a YouTube URL
|
||||||
|
|
||||||
|
This command connects to the server to generate tokens for a YouTube video.
|
||||||
|
The server will test any configured SOCKS5 proxy with a 9-second timeout.
|
||||||
|
If the proxy connection fails, token generation will stop immediately with an error.''')
|
||||||
|
get_token_parser.add_argument('--url', required=True,
|
||||||
|
help='YouTube URL to process')
|
||||||
|
# --format removed, format/quality is determined by the server or embedded in the command
|
||||||
|
get_token_parser.add_argument('--account_id', default='default',
|
||||||
|
help='Account ID (default: default)')
|
||||||
|
get_token_parser.add_argument('--list-formats', action='store_true',
|
||||||
|
help='List available formats for the video')
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
# Handle info.json loading
|
||||||
|
if args.load_info_json:
|
||||||
|
info_json = load_info_json(args.load_info_json)
|
||||||
|
if info_json:
|
||||||
|
print("Loaded info.json:")
|
||||||
|
print(json.dumps(info_json, indent=2))
|
||||||
|
return
|
||||||
|
|
||||||
|
transport = None
|
||||||
|
try:
|
||||||
|
# Ensure context directory exists and is writable
|
||||||
|
try:
|
||||||
|
Path(args.context_dir).mkdir(parents=True, exist_ok=True)
|
||||||
|
test_file = Path(args.context_dir) / "test.txt"
|
||||||
|
test_file.touch()
|
||||||
|
test_file.unlink()
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Could not access context directory {args.context_dir}: {e}")
|
||||||
|
print(f"Error: Could not access context directory {args.context_dir}")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Check if we should use framed transport for very large messages
|
||||||
|
use_framed_transport = args.framed_transport or args.force_framed_transport or os.environ.get('USE_FRAMED_TRANSPORT', '').lower() in ('1', 'true', 'yes')
|
||||||
|
logger.debug(f"Using framed transport: {use_framed_transport}") # Changed to DEBUG
|
||||||
|
|
||||||
|
# Create socket with configurable timeout, force IPv4
|
||||||
|
socket = TSocket.TSocket(args.host, args.port, socket_family=2) # AF_INET = 2 for IPv4
|
||||||
|
|
||||||
|
# Use timeout-sec if provided, otherwise use timeout in milliseconds
|
||||||
|
if args.timeout_sec is not None:
|
||||||
|
socket.setTimeout(args.timeout_sec * 1000) # Convert seconds to milliseconds
|
||||||
|
logger.debug(f"Using timeout of {args.timeout_sec} seconds") # Changed to DEBUG
|
||||||
|
else:
|
||||||
|
socket.setTimeout(args.timeout) # Use timeout from CLI in milliseconds
|
||||||
|
logger.debug(f"Using timeout of {args.timeout} milliseconds") # Changed to DEBUG
|
||||||
|
|
||||||
|
# Always use TFramedTransport to match the server
|
||||||
|
transport = TTransport.TFramedTransport(socket)
|
||||||
|
logger.debug("Using TFramedTransport for large messages") # Changed to DEBUG
|
||||||
|
|
||||||
|
protocol = TBinaryProtocol.TBinaryProtocol(transport)
|
||||||
|
client = YTTokenOpService.Client(protocol)
|
||||||
|
|
||||||
|
logger.info(f"Attempting to connect to server at {args.host}:{args.port}...")
|
||||||
|
try:
|
||||||
|
transport.open()
|
||||||
|
logger.info("Successfully connected to server")
|
||||||
|
except TTransport.TTransportException as e:
|
||||||
|
logger.error(f"Connection failed: {str(e)}")
|
||||||
|
print(f"Error: Could not connect to server at {args.host}:{args.port}")
|
||||||
|
print(f"Reason: {str(e)}")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Add connection test
|
||||||
|
try:
|
||||||
|
client.ping()
|
||||||
|
logger.info("Server connection test successful")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Server connection test failed: {e}")
|
||||||
|
raise
|
||||||
|
except TTransport.TTransportException as e:
|
||||||
|
logger.error(f"Connection failed: {str(e)}")
|
||||||
|
logger.error(f"Could not connect to {args.host}:{args.port}")
|
||||||
|
sys.exit(1)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Connection failed: {str(e)}")
|
||||||
|
logger.error(traceback.format_exc())
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
if args.command == 'getToken':
|
||||||
|
url = args.url
|
||||||
|
# format_codes removed
|
||||||
|
|
||||||
|
# Handle format listing
|
||||||
|
if args.list_formats:
|
||||||
|
formats = list_available_formats(url, args)
|
||||||
|
if formats:
|
||||||
|
print("\nAvailable formats:")
|
||||||
|
print(tabulate(formats, headers="keys", showindex=True)) # Show index for format selection
|
||||||
|
|
||||||
|
# Suggest best formats based on resolution
|
||||||
|
best_formats = suggest_best_formats(formats)
|
||||||
|
if best_formats:
|
||||||
|
print("\nSuggested formats:")
|
||||||
|
print(tabulate(best_formats, headers="keys"))
|
||||||
|
else:
|
||||||
|
print("No formats available or could not retrieve format information")
|
||||||
|
return
|
||||||
|
elif args.youtube_url:
|
||||||
|
url = args.youtube_url
|
||||||
|
format_code = args.format
|
||||||
|
print("Warning: --youtube-url is deprecated, use 'getToken --url' instead")
|
||||||
|
else:
|
||||||
|
print("Please provide a YouTube URL using 'getToken --url' command")
|
||||||
|
return
|
||||||
|
|
||||||
|
# Get token for URL
|
||||||
|
try:
|
||||||
|
# Get token for URL
|
||||||
|
logger.info(f"Requesting token for URL: {url}")
|
||||||
|
token_data = client.getOrRefreshToken(
|
||||||
|
accountId=args.account_id,
|
||||||
|
updateType=TokenUpdateMode.AUTO,
|
||||||
|
url=url
|
||||||
|
)
|
||||||
|
|
||||||
|
if not token_data:
|
||||||
|
logger.error("Received empty token data from server")
|
||||||
|
print("Error: Received empty token data from server")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Validate token data
|
||||||
|
if not hasattr(token_data, 'ytdlpCommand') or not token_data.ytdlpCommand:
|
||||||
|
logger.error("Token data missing required ytdlpCommand")
|
||||||
|
print("Error: Token data missing required ytdlpCommand")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
logger.info("Successfully received token data from server")
|
||||||
|
|
||||||
|
# Log all attributes of token_data for debugging
|
||||||
|
token_attrs = [attr for attr in dir(token_data) if not attr.startswith('__') and not callable(getattr(token_data, attr))]
|
||||||
|
logger.debug(f"Received token_data attributes: {token_attrs}")
|
||||||
|
|
||||||
|
# Handle case where token_data is a dict-like object
|
||||||
|
if hasattr(token_data, 'items'):
|
||||||
|
# Convert to dict if needed
|
||||||
|
token_dict = dict(token_data.items())
|
||||||
|
logger.debug(f"Token data as dict: {token_dict}")
|
||||||
|
|
||||||
|
# If we have JSON data directly in the response
|
||||||
|
if isinstance(token_dict.get('infoJson', None), str):
|
||||||
|
received_info_json = token_dict['infoJson']
|
||||||
|
elif isinstance(token_dict.get('data', None), (dict, str)):
|
||||||
|
# Try to use the data field if it exists
|
||||||
|
data = token_dict['data']
|
||||||
|
if isinstance(data, str):
|
||||||
|
received_info_json = data
|
||||||
|
else:
|
||||||
|
received_info_json = json.dumps(data)
|
||||||
|
else:
|
||||||
|
# Create info.json from available fields
|
||||||
|
info_data = {
|
||||||
|
"id": token_dict.get('id', extract_video_id(url)),
|
||||||
|
"title": token_dict.get('title', ''),
|
||||||
|
"formats": token_dict.get('formats', []),
|
||||||
|
"timestamp": int(time.time()),
|
||||||
|
"ytdlp_command": token_dict.get('ytdlpCommand', '')
|
||||||
|
}
|
||||||
|
received_info_json = json.dumps(info_data)
|
||||||
|
else:
|
||||||
|
# Handle case where token_data is a regular object
|
||||||
|
received_info_json = getattr(token_data, 'infoJson', None)
|
||||||
|
|
||||||
|
if received_info_json:
|
||||||
|
logger.debug(f"Received info.json data ({len(received_info_json)} bytes)")
|
||||||
|
if len(received_info_json) > 100:
|
||||||
|
logger.debug(f"Preview: {received_info_json[:100]}...")
|
||||||
|
else:
|
||||||
|
logger.warning("No valid info.json data found in response")
|
||||||
|
|
||||||
|
except PBServiceException as e:
|
||||||
|
logger.error(f"Service exception: {e.message}")
|
||||||
|
if hasattr(e, 'errorCode'):
|
||||||
|
if e.errorCode == "BOT_DETECTED":
|
||||||
|
print(f"Error: {e.message}")
|
||||||
|
print("\nYouTube has detected bot activity. Authentication is required.")
|
||||||
|
|
||||||
|
# Print suggestions if available
|
||||||
|
if hasattr(e, 'context') and e.context and 'suggestions' in e.context:
|
||||||
|
print("\nSuggestions:")
|
||||||
|
for i, suggestion in enumerate(e.context['suggestions'], 1):
|
||||||
|
print(f" {i}. {suggestion}")
|
||||||
|
else:
|
||||||
|
print("\nTry:")
|
||||||
|
print(" 1. Use --cookies-from-browser to pass authentication cookies")
|
||||||
|
print(" 2. Export cookies from a logged-in browser session")
|
||||||
|
print(" 3. Try a different client type (ios, android, mweb)")
|
||||||
|
print(" 4. Use a different proxy or IP address")
|
||||||
|
print(" 5. Try again later")
|
||||||
|
|
||||||
|
sys.exit(1)
|
||||||
|
elif e.errorCode in ["SOCKS5_CONNECTION_FAILED", "SOCKS5_TIMEOUT", "SOCKS5_CONNECTION_REFUSED",
|
||||||
|
"SOCKS5_CONNECTION_TIMEOUT", "SOCKS5_HOST_NOT_FOUND", "SOCKS5_NETWORK_UNREACHABLE"]:
|
||||||
|
print(f"Error: {e.message}")
|
||||||
|
print("\nSOCKS5 proxy connection failed. Please check your proxy settings.")
|
||||||
|
|
||||||
|
# Provide more specific guidance based on error code
|
||||||
|
if e.errorCode == "SOCKS5_TIMEOUT" or e.errorCode == "SOCKS5_CONNECTION_TIMEOUT":
|
||||||
|
print("The proxy server did not respond within the timeout period (9 seconds).")
|
||||||
|
print("This could indicate network congestion or a proxy server that's overloaded.")
|
||||||
|
elif e.errorCode == "SOCKS5_CONNECTION_REFUSED":
|
||||||
|
print("The proxy server actively refused the connection.")
|
||||||
|
print("This usually means the proxy server is not running or is not accepting connections on the specified port.")
|
||||||
|
elif e.errorCode == "SOCKS5_HOST_NOT_FOUND":
|
||||||
|
print("The proxy host could not be resolved.")
|
||||||
|
print("Please check that the hostname is correct and your DNS is working properly.")
|
||||||
|
elif e.errorCode == "SOCKS5_NETWORK_UNREACHABLE":
|
||||||
|
print("The network containing the proxy server is unreachable.")
|
||||||
|
print("This could indicate network routing issues or firewall restrictions.")
|
||||||
|
|
||||||
|
print("\nPossible solutions:")
|
||||||
|
print("1. Try using a different proxy server")
|
||||||
|
print("2. Check if the proxy server is running and accessible")
|
||||||
|
print("3. Verify your network connection and firewall settings")
|
||||||
|
print("4. If using a remote proxy, check if it's accessible from your location")
|
||||||
|
|
||||||
|
# Exit with a specific error code for proxy failures
|
||||||
|
sys.exit(2)
|
||||||
|
elif e.errorCode == "GLOBAL_TIMEOUT":
|
||||||
|
print(f"Error: {e.message}")
|
||||||
|
print("\nThe server timed out while processing your request.")
|
||||||
|
print("This could be due to:")
|
||||||
|
print("1. Slow network connection")
|
||||||
|
print("2. Server overload")
|
||||||
|
print("3. Complex video that takes too long to process")
|
||||||
|
print("\nTry again later or with a different video.")
|
||||||
|
sys.exit(3)
|
||||||
|
elif e.errorCode == "CLIENT_TIMEOUT":
|
||||||
|
print(f"Error: {e.message}")
|
||||||
|
print("\nA client-specific timeout occurred while processing your request.")
|
||||||
|
print("The server has stopped processing to avoid wasting resources.")
|
||||||
|
print("\nPossible solutions:")
|
||||||
|
print("1. Try again later when network conditions improve")
|
||||||
|
print("2. Try a different video")
|
||||||
|
print("3. Check your internet connection")
|
||||||
|
sys.exit(3)
|
||||||
|
else:
|
||||||
|
print(f"Error: {e.message}")
|
||||||
|
else:
|
||||||
|
print(f"Error: {e.message}")
|
||||||
|
return
|
||||||
|
except PBUserException as e:
|
||||||
|
logger.error(f"User exception: {e.message}")
|
||||||
|
print(f"Error: {e.message}")
|
||||||
|
return
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Unexpected error: {str(e)}")
|
||||||
|
logger.error(traceback.format_exc())
|
||||||
|
print(f"Unexpected error: {str(e)}")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Log the entire token_data object for debugging AFTER potential exceptions
|
||||||
|
logger.debug(f"Processing received token_data: {token_data}")
|
||||||
|
|
||||||
|
# Check if valid infoJson was received from the server
|
||||||
|
info_json = None
|
||||||
|
if hasattr(token_data, 'infoJson') and token_data.infoJson and token_data.infoJson != "{}":
|
||||||
|
if is_valid_json(token_data.infoJson):
|
||||||
|
logger.debug("Valid info.json received from server.") # Changed to DEBUG
|
||||||
|
info_json = token_data.infoJson
|
||||||
|
else:
|
||||||
|
logger.warning("Received infoJson from server, but it is not valid JSON or is empty.")
|
||||||
|
else:
|
||||||
|
logger.warning("Valid info.json was NOT received from the server.")
|
||||||
|
|
||||||
|
# Proceed only if we have valid info_json
|
||||||
|
if info_json:
|
||||||
|
# Save info.json if present in the server response
|
||||||
|
video_id = extract_video_id(url)
|
||||||
|
if not video_id:
|
||||||
|
logger.warning(f"Could not extract video ID from URL: {url}") # Keep as WARNING
|
||||||
|
video_id = f"unknown_{int(time.time())}"
|
||||||
|
|
||||||
|
try:
|
||||||
|
info_data = json.loads(info_json)
|
||||||
|
# Check if it contains an error
|
||||||
|
if isinstance(info_data, dict) and ('error' in info_data or 'errorCode' in info_data):
|
||||||
|
error_msg = info_data.get('error', 'Unknown error')
|
||||||
|
error_code = info_data.get('errorCode', 'UNKNOWN_ERROR')
|
||||||
|
logger.warning(f"infoJson contains error: {error_msg} (code: {error_code})")
|
||||||
|
|
||||||
|
# If it's a bot detection error, raise appropriate exception
|
||||||
|
if error_code == 'BOT_DETECTED' or 'bot' in error_msg.lower() or 'sign in' in error_msg.lower():
|
||||||
|
raise PBUserException(
|
||||||
|
message=f"Bot detection triggered: {error_msg}",
|
||||||
|
errorCode="BOT_DETECTION",
|
||||||
|
context={
|
||||||
|
"video_id": extract_video_id(url),
|
||||||
|
"url": url,
|
||||||
|
"suggestions": info_data.get('suggestions', ["Try different client", "Use proxy", "Wait and retry later"])
|
||||||
|
}
|
||||||
|
)
|
||||||
|
except json.JSONDecodeError as e:
|
||||||
|
# This case should ideally not happen due to is_valid_json check, but handle defensively
|
||||||
|
logger.error(f"Invalid JSON received despite initial check: {e}")
|
||||||
|
print(f"Error: Received invalid JSON data from server.")
|
||||||
|
info_json = None # Ensure we don't proceed
|
||||||
|
|
||||||
|
# If info_json is still None after checks, handle the failure case
|
||||||
|
if not info_json:
|
||||||
|
logger.error("Failed to obtain valid info.json from the server.")
|
||||||
|
print("Error: No valid video information (info.json) was received from the server.")
|
||||||
|
# Optionally, print the raw ytdlp command if available
|
||||||
|
if hasattr(token_data, 'ytdlpCommand') and token_data.ytdlpCommand:
|
||||||
|
print("\nRaw command from server (may be incomplete or require info.json):")
|
||||||
|
print(token_data.ytdlpCommand)
|
||||||
|
sys.exit(1) # Exit with error
|
||||||
|
|
||||||
|
# --- We have valid info_json, proceed with saving and command generation ---
|
||||||
|
try:
|
||||||
|
info_data = json.loads(info_json) # We know this is valid now
|
||||||
|
|
||||||
|
# Check if it's an error response embedded in the JSON
|
||||||
|
if isinstance(info_data, dict) and "error" in info_data:
|
||||||
|
logger.error(f"Received error report from server: {info_json}")
|
||||||
|
|
||||||
|
# Check if this is a bot detection error
|
||||||
|
if (info_data.get('errorCode') == "BOT_DETECTED" or
|
||||||
|
"bot" in info_data.get('message', '').lower() or
|
||||||
|
"sign in to confirm" in info_data.get('message', '').lower() or
|
||||||
|
"sign in to confirm" in info_data.get('error', '').lower() or
|
||||||
|
"unusual traffic" in info_data.get('message', '').lower() or
|
||||||
|
"captcha" in info_data.get('message', '').lower() or
|
||||||
|
info_data.get('requires_auth') == True):
|
||||||
|
|
||||||
|
logger.error("Bot detection error detected in info.json")
|
||||||
|
# Raise PBServiceException for bot detection
|
||||||
|
raise PBServiceException(
|
||||||
|
message=f"Bot detection triggered: {info_data.get('message', 'Authentication required')}",
|
||||||
|
errorCode="BOT_DETECTED",
|
||||||
|
context={
|
||||||
|
"video_id": video_id,
|
||||||
|
"url": url,
|
||||||
|
"requires_auth": True,
|
||||||
|
"info_data": info_data,
|
||||||
|
"suggestions": info_data.get('suggestions', [
|
||||||
|
"Use --cookies-from-browser to pass authentication cookies",
|
||||||
|
"Export cookies from a logged-in browser session",
|
||||||
|
"Try a different client type (ios, android, mweb)",
|
||||||
|
"Use a different proxy or IP address"
|
||||||
|
])
|
||||||
|
}
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
# Raise PBServiceException for other errors
|
||||||
|
raise PBServiceException(
|
||||||
|
message=f"Error extracting video info: {info_data.get('error', 'Unknown error')}",
|
||||||
|
errorCode=info_data.get('errorCode', "EXTRACTION_FAILED"),
|
||||||
|
context={"video_id": video_id, "url": url, "info_data": info_data}
|
||||||
|
)
|
||||||
|
|
||||||
|
# If it's a valid response, process it
|
||||||
|
if 'title' in info_data or 'id' in info_data:
|
||||||
|
print(f"Video info retrieved: {info_data.get('title', 'Unknown title')}")
|
||||||
|
saved_path = save_info_json(info_json, video_id, args.context_dir)
|
||||||
|
if saved_path:
|
||||||
|
print(f"info.json saved to: {saved_path}")
|
||||||
|
|
||||||
|
# Create simpler base command using only the saved info.json and proxy
|
||||||
|
base_cmd = f"yt-dlp --load-info-json \"{saved_path}\"" # Quote the path
|
||||||
|
if hasattr(token_data, 'socks') and token_data.socks:
|
||||||
|
if token_data.socks.startswith(('socks5://', 'ss://')):
|
||||||
|
# Quote the proxy URL as well
|
||||||
|
base_cmd += f" --proxy \"{token_data.socks}\""
|
||||||
|
|
||||||
|
# Show format listing command
|
||||||
|
print("\nTo list available formats:")
|
||||||
|
format_cmd = f"{base_cmd} -F"
|
||||||
|
print(format_cmd)
|
||||||
|
|
||||||
|
# Show download command (format is usually embedded in info.json or determined by yt-dlp)
|
||||||
|
simplified_cmd = f"{base_cmd} --simulate" # Removed format codes
|
||||||
|
|
||||||
|
print("\nTo download (with --simulate to preview):")
|
||||||
|
print(simplified_cmd)
|
||||||
|
print("\nRemove --simulate to actually download")
|
||||||
|
else:
|
||||||
|
logger.error("Failed to save info.json file")
|
||||||
|
print("Failed to save info.json file")
|
||||||
|
else:
|
||||||
|
logger.warning("info.json appears to be valid JSON but missing expected video fields")
|
||||||
|
print("Error: Received incomplete or invalid video data")
|
||||||
|
print("This usually indicates an authentication or access issue")
|
||||||
|
sys.exit(1)
|
||||||
|
except Exception as e: # Catch errors during saving or command generation
|
||||||
|
logger.error(f"Error processing valid info.json: {str(e)}")
|
||||||
|
# Removed traceback.format_exc() call which caused the NoneType error
|
||||||
|
finally:
|
||||||
|
if transport:
|
||||||
|
transport.close()
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
60
ytdlp-ops-auth/ytdlp_utils.py
Normal file
60
ytdlp-ops-auth/ytdlp_utils.py
Normal file
@ -0,0 +1,60 @@
|
|||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import re
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
def get_info_json(token_data):
|
||||||
|
"""Get infoJson from token_data"""
|
||||||
|
if hasattr(token_data, 'infoJson') and token_data.infoJson:
|
||||||
|
return token_data.infoJson
|
||||||
|
|
||||||
|
# Log the issue for debugging
|
||||||
|
logger.warning("infoJson attribute missing or empty in token_data")
|
||||||
|
logger.info(f"Available attributes: {[attr for attr in dir(token_data) if not attr.startswith('__') and not callable(getattr(token_data, attr))]}")
|
||||||
|
|
||||||
|
return "{}"
|
||||||
|
|
||||||
|
def is_valid_json(json_str):
|
||||||
|
"""Check if a string is valid JSON and not empty"""
|
||||||
|
if not json_str or json_str == "{}" or json_str == "":
|
||||||
|
return False
|
||||||
|
|
||||||
|
try:
|
||||||
|
data = json.loads(json_str)
|
||||||
|
# Check if it's an empty object
|
||||||
|
if isinstance(data, dict) and not data:
|
||||||
|
return False
|
||||||
|
# Check if it has at least some basic fields
|
||||||
|
if isinstance(data, dict) and ('id' in data or 'title' in data):
|
||||||
|
return True
|
||||||
|
# Check if it has token_data which is important
|
||||||
|
if isinstance(data, dict) and 'token_data' in data and data['token_data']:
|
||||||
|
return True
|
||||||
|
return True
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Invalid JSON: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
def extract_video_id(url):
|
||||||
|
"""Extract video ID from a YouTube URL"""
|
||||||
|
# If it's already a video ID
|
||||||
|
if re.match(r'^[a-zA-Z0-9_-]{11}$', url):
|
||||||
|
return url
|
||||||
|
|
||||||
|
# Handle youtu.be URLs
|
||||||
|
youtu_be_match = re.search(r'youtu\.be/([a-zA-Z0-9_-]{11})', url)
|
||||||
|
if youtu_be_match:
|
||||||
|
return youtu_be_match.group(1)
|
||||||
|
|
||||||
|
# Handle youtube.com URLs
|
||||||
|
youtube_match = re.search(r'(?:youtube\.com/(?:watch\?v=|embed/|v/)|youtube\.com/.*[?&]v=)([a-zA-Z0-9_-]{11})', url)
|
||||||
|
if youtube_match:
|
||||||
|
return youtube_match.group(1)
|
||||||
|
|
||||||
|
# Handle shorts URLs
|
||||||
|
shorts_match = re.search(r'youtube\.com/shorts/([a-zA-Z0-9_-]{11})', url)
|
||||||
|
if shorts_match:
|
||||||
|
return shorts_match.group(1)
|
||||||
|
|
||||||
|
return None
|
||||||
Loading…
x
Reference in New Issue
Block a user