Update dags for multi worker
This commit is contained in:
parent
4455a10726
commit
182deac14e
66
Dockerfile
66
Dockerfile
@ -3,17 +3,6 @@ ENV AIRFLOW_VERSION=2.10.5
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Copy necessary files from the ytdlp-ops-auth subdirectory (present in the build context) into /app
|
||||
# setup.py is removed as we are not using 'pip install -e' anymore
|
||||
COPY ytdlp-ops-auth/generate-thrift.py ytdlp-ops-auth/requirements.txt /app/
|
||||
COPY ytdlp-ops-auth/thrift_model/ /app/thrift_model/
|
||||
COPY ytdlp-ops-auth/ytdlp_utils.py /app/
|
||||
COPY ytdlp-ops-auth/thrift_exceptions_patch.py /app/
|
||||
COPY ytdlp-ops-auth/ytdlp_ops_client.py /app/
|
||||
|
||||
# Set Python path relative to the WORKDIR /app
|
||||
ENV PYTHONPATH=/app:${PYTHONPATH}
|
||||
|
||||
# Install system dependencies
|
||||
USER root
|
||||
RUN apt-get update && \
|
||||
@ -22,10 +11,24 @@ RUN apt-get update && \
|
||||
mc \
|
||||
jq \
|
||||
build-essential \
|
||||
python3-dev && \
|
||||
python3-dev \
|
||||
wget \
|
||||
tar \
|
||||
xz-utils && \
|
||||
apt-get clean && \
|
||||
rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* /usr/share/man /usr/share/doc /usr/share/doc-base
|
||||
|
||||
# Download and install custom FFmpeg build from yt-dlp's recommended source
|
||||
RUN FFMPEG_URL="https://github.com/yt-dlp/FFmpeg-Builds/releases/download/latest/ffmpeg-master-latest-linux64-gpl.tar.xz" && \
|
||||
echo "Downloading FFmpeg from $FFMPEG_URL" && \
|
||||
wget -qO /tmp/ffmpeg.tar.xz "$FFMPEG_URL" && \
|
||||
mkdir -p /opt/ffmpeg && \
|
||||
tar -xf /tmp/ffmpeg.tar.xz -C /opt/ffmpeg --strip-components=1 && \
|
||||
ln -sf /opt/ffmpeg/bin/ffmpeg /usr/local/bin/ffmpeg && \
|
||||
ln -sf /opt/ffmpeg/bin/ffprobe /usr/local/bin/ffprobe && \
|
||||
rm -rf /tmp/ffmpeg.tar.xz && \
|
||||
ffmpeg -version
|
||||
|
||||
# Ensure proper permissions, aligning GID with docker-compose.yaml (1001)
|
||||
RUN groupadd -g 1001 airflow && \
|
||||
usermod -a -G airflow airflow && \
|
||||
@ -34,28 +37,25 @@ RUN groupadd -g 1001 airflow && \
|
||||
# Switch to airflow user for package installation
|
||||
USER airflow
|
||||
|
||||
# Install Python dependencies and ensure ffprobe3 is installed correctly
|
||||
# Install base Airflow dependencies
|
||||
RUN pip install --no-cache-dir \
|
||||
"apache-airflow==${AIRFLOW_VERSION}" apache-airflow-providers-docker apache-airflow-providers-http && \
|
||||
pip install --no-cache-dir -r /app/requirements.txt && \
|
||||
pip install --no-cache-dir ffprobe3 python-ffmpeg
|
||||
"apache-airflow==${AIRFLOW_VERSION}" apache-airflow-providers-docker apache-airflow-providers-http
|
||||
|
||||
# Only generate Thrift files if gen_py directory doesn't exist
|
||||
RUN if [ ! -d "/app/thrift_model/gen_py" ]; then \
|
||||
python3 /app/generate-thrift.py; \
|
||||
else \
|
||||
echo "Skipping Thrift generation - gen_py directory already exists"; \
|
||||
fi
|
||||
# --- Install the custom yt_ops_services package ---
|
||||
# Copy all the necessary source code for the package.
|
||||
# The deploy script ensures these files are in the build context.
|
||||
COPY --chown=airflow:airflow setup.py ./
|
||||
COPY --chown=airflow:airflow VERSION ./
|
||||
COPY --chown=airflow:airflow yt_ops_services ./yt_ops_services/
|
||||
COPY --chown=airflow:airflow server_fix ./server_fix/
|
||||
COPY --chown=airflow:airflow thrift_model ./thrift_model/
|
||||
COPY --chown=airflow:airflow pangramia ./pangramia/
|
||||
|
||||
# Create proper Python package structure
|
||||
RUN mkdir -p /app/pangramia && \
|
||||
ln -s /app/thrift_model/gen_py/pangramia /app/pangramia && \
|
||||
echo "Created symlink: /app/pangramia -> /app/thrift_model/gen_py/pangramia"
|
||||
# Install the package in editable mode. This runs setup.py and installs all dependencies
|
||||
# listed in `install_requires`, making the `yt_ops_services` module available everywhere.
|
||||
RUN pip install --no-cache-dir -e .
|
||||
|
||||
# Ensure base_service is accessible
|
||||
RUN mkdir -p /app/pangramia/base_service && \
|
||||
ln -s /app/thrift_model/gen_py/pangramia/base_service /app/pangramia/base_service && \
|
||||
echo "Created symlink: /app/pangramia/base_service -> /app/thrift_model/gen_py/pangramia/base_service"
|
||||
|
||||
# Add to Python path
|
||||
ENV PYTHONPATH=/app:/app/thrift_model/gen_py:${PYTHONPATH}
|
||||
# Copy token generator scripts and utils with correct permissions
|
||||
COPY --chown=airflow:airflow generate_tokens_direct.mjs ./
|
||||
COPY --chown=airflow:airflow utils ./utils/
|
||||
COPY --chown=airflow:airflow token_generator ./token_generator/
|
||||
|
||||
@ -1,97 +0,0 @@
|
||||
# YTDLP Client Side Integration
|
||||
|
||||
This document describes how to integrate and use the YTDLP client with the token service.
|
||||
|
||||
## Build
|
||||
|
||||
1. **Pull, configure and start server if needed:**
|
||||
```bash
|
||||
cd /srv/airflow_worker/
|
||||
docker login pangramia # It used to be performed beforehand otherwise ask pull password
|
||||
docker compose -f docker-compose-ytdlp-ops.yaml up -d
|
||||
docker compose -f docker-compose-ytdlp-ops.yaml logs -f
|
||||
```
|
||||
The server is bound to a certain proxy, like "socks5://sslocal-rust-1084:1084".
|
||||
|
||||
Also check that redis in bind to 0.0.0.0 in config
|
||||
|
||||
2. **Build airflow-worker with custom dependencies:**
|
||||
```bash
|
||||
cd /srv/airflow_worker/
|
||||
docker compose build airflow-worker
|
||||
docker compose down airflow-worker
|
||||
docker compose up -d --no-deps airflow-worker
|
||||
```
|
||||
|
||||
3. **Test the built-in client:**
|
||||
```bash
|
||||
# Show client help
|
||||
docker compose exec airflow-worker python /app/ytdlp_ops_client.py --help
|
||||
|
||||
# Get token and info.json
|
||||
docker compose exec airflow-worker python /app/ytdlp_ops_client.py --host 85.192.30.55 --port 9090 getToken --url 'https://www.youtube.com/watch?v=vKTVLpmvznI'
|
||||
|
||||
# List formats using saved info.json
|
||||
docker compose exec airflow-worker yt-dlp --load-info-json "latest.json" -F
|
||||
|
||||
# Simulate download using saved info.json
|
||||
docker compose exec airflow-worker yt-dlp --load-info-json "latest.json" --proxy "socks5://sslocal-rust-1084:1084" --simulate --verbose
|
||||
|
||||
# Extract metadata and download URLs using jq
|
||||
docker compose exec airflow-worker jq -r '"Title: \(.title)", "Date: \(.upload_date | strptime("%Y%m%d") | strftime("%Y-%m-%d"))", "Author: \(.uploader)", "Length: \(.duration_string)", "", "Download URLs:", (.formats[] | select(.vcodec != "none" or .acodec != "none") | .url)' latest.json
|
||||
```
|
||||
|
||||
4. **Test Airflow task:**
|
||||
|
||||
To run the `ytdlp_client_dag_v2.1` DAG:
|
||||
|
||||
Set up required Airflow variables
|
||||
```bash
|
||||
docker compose exec airflow-worker airflow variables set DOWNLOAD_OPTIONS '{"formats": ["bestvideo[height<=1080]+bestaudio/best[height<=1080]"]}'
|
||||
docker compose exec airflow-worker airflow variables set DOWNLOADS_TEMP '/opt/airflow/downloadfiles'
|
||||
docker compose exec airflow-worker airflow variables set DOWNLOADS_PATH '/opt/airflow/downloadfiles'
|
||||
|
||||
docker compose exec airflow-worker airflow variables list
|
||||
docker compose exec airflow-worker airflow variables set TOKEN_TIMEOUT '300'
|
||||
|
||||
docker compose exec airflow-worker airflow connections import /opt/airflow/config/docker_hub_repo.json
|
||||
docker compose exec airflow-worker airflow connections delete redis_default
|
||||
docker compose exec airflow-worker airflow connections import /opt/airflow/config/redis_default_conn.json
|
||||
```
|
||||
|
||||
|
||||
**Using direct connection with task test:**
|
||||
```bash
|
||||
docker compose exec airflow-worker airflow db reset
|
||||
docker compose exec airflow-worker airflow dags reserialize
|
||||
|
||||
docker compose exec airflow-worker airflow dags list
|
||||
docker compose exec airflow-worker airflow dags list-import-errors
|
||||
docker compose exec airflow-worker airflow tasks test ytdlp_client_dag_v2.1 get_token $(date -u +"%Y-%m-%dT%H:%M:%S+00:00") --task-params '{"url": "https://www.youtube.com/watch?v=sOlTX9uxUtM", "redis_enabled": false, "service_ip": "85.192.30.55", "service_port": 9090}'
|
||||
docker compose exec airflow-worker yt-dlp --load-info-json /opt/airflow/downloadfiles/latest.json --proxy "socks5://sslocal-rust-1084:1084" --verbose --simulate
|
||||
|
||||
docker compose exec airflow-worker airflow dags list-runs -d ytdlp_client_dag
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
```
|
||||
|
||||
|
||||
or deploy using trigger
|
||||
```bash
|
||||
docker compose exec airflow-worker airflow dags list
|
||||
docker compose exec airflow-worker airflow dags unpause ytdlp_client_dag_v2.1
|
||||
|
||||
// Try UI or recheck if works from server deploy
|
||||
docker compose exec airflow-worker airflow dags trigger ytdlp_client_dag_v2.1 -c '{"url": "https://www.youtube.com/watch?v=sOlTX9uxUtM", "redis_enabled": false, "service_ip": "85.192.30.55", "service_port": 9090}'
|
||||
|
||||
```
|
||||
|
||||
|
||||
Check Redis for stored data by videoID
|
||||
```bash
|
||||
docker compose exec redis redis-cli -a XXXXXX -h 89.253.221.173 -p 52909 HGETALL "token_info:sOlTX9uxUtM" | jq -R -s 'split("\n") | del(.[] | select(. == "")) | [.[range(0;length;2)]]'
|
||||
```
|
||||
|
||||
100
README.en.old.md
100
README.en.old.md
@ -1,100 +0,0 @@
|
||||
# YTDLP Airflow DAGs
|
||||
|
||||
This document describes the Airflow DAGs used for interacting with the YTDLP Ops service and managing processing queues.
|
||||
|
||||
## DAG Descriptions
|
||||
|
||||
### `ytdlp_client_dag_v2.1`
|
||||
|
||||
* **File:** `airflow/dags/ytdlp_client_dag_v2.1.py`
|
||||
* **Purpose:** Provides a way to test the YTDLP Ops Thrift service interaction for a *single* video URL. Useful for debugging connection issues, testing specific account IDs, or verifying the service response for a particular URL independently of the queueing system.
|
||||
* **Parameters (Defaults):**
|
||||
* `url` (`'https://www.youtube.com/watch?v=sOlTX9uxUtM'`): The video URL to process.
|
||||
* `redis_enabled` (`False`): Use Redis for service discovery?
|
||||
* `service_ip` (`'85.192.30.55'`): Service IP if `redis_enabled=False`.
|
||||
* `service_port` (`9090`): Service port if `redis_enabled=False`.
|
||||
* `account_id` (`'account_fr_2025-04-03T1220_anonomyous_2ssdfsf2342afga09'`): Account ID for lookup or call.
|
||||
* `timeout` (`30`): Timeout in seconds for Thrift connection.
|
||||
* `info_json_dir` (`"{{ var.value.get('DOWNLOADS_TEMP', '/opt/airflow/downloadfiles') }}"`): Directory to save `info.json`.
|
||||
* **Results:**
|
||||
* Connects to the YTDLP Ops service using the specified method (Redis or direct IP).
|
||||
* Retrieves token data for the given URL and account ID.
|
||||
* Saves the video's `info.json` metadata to the specified directory.
|
||||
* Extracts the SOCKS proxy (if available).
|
||||
* Pushes `info_json_path`, `socks_proxy`, and the original `ytdlp_command` (with tokens) to XCom.
|
||||
* Optionally stores detailed results in a Redis hash (`token_info:<video_id>`).
|
||||
|
||||
### `ytdlp_mgmt_queue_add_urls`
|
||||
|
||||
* **File:** `airflow/dags/ytdlp_mgmt_queue_add_urls.py`
|
||||
* **Purpose:** Manually add video URLs to a specific YTDLP inbox queue (Redis List).
|
||||
* **Parameters (Defaults):**
|
||||
* `redis_conn_id` (`'redis_default'`): Airflow Redis connection ID.
|
||||
* `queue_name` (`'video_queue_inbox_account_fr_2025-04-03T1220_anonomyous_2ssdfsf2342afga09'`): Target Redis list (inbox queue).
|
||||
* `urls` (`""`): Multiline string of video URLs to add.
|
||||
* **Results:**
|
||||
* Parses the multiline `urls` parameter.
|
||||
* Adds each valid URL to the end of the Redis list specified by `queue_name`.
|
||||
* Logs the number of URLs added.
|
||||
|
||||
### `ytdlp_mgmt_queue_clear`
|
||||
|
||||
* **File:** `airflow/dags/ytdlp_mgmt_queue_clear.py`
|
||||
* **Purpose:** Manually delete a specific Redis key used by the YTDLP queues.
|
||||
* **Parameters (Defaults):**
|
||||
* `redis_conn_id` (`'redis_default'`): Airflow Redis connection ID.
|
||||
* `queue_to_clear` (`'PLEASE_SPECIFY_QUEUE_TO_CLEAR'`): Exact name of the Redis key to clear. **Must be changed by user.**
|
||||
* **Results:**
|
||||
* Deletes the Redis key specified by the `queue_to_clear` parameter.
|
||||
* **Warning:** This operation is destructive and irreversible. Use with extreme caution. Ensure you specify the correct key name (e.g., `video_queue_inbox_account_xyz`, `video_queue_progress`, `video_queue_result`, `video_queue_fail`).
|
||||
|
||||
### `ytdlp_mgmt_queue_check_status`
|
||||
|
||||
* **File:** `airflow/dags/ytdlp_mgmt_queue_check_status.py`
|
||||
* **Purpose:** Manually check the type and size of a specific YTDLP Redis queue/key.
|
||||
* **Parameters (Defaults):**
|
||||
* `redis_conn_id` (`'redis_default'`): Airflow Redis connection ID.
|
||||
* `queue_to_check` (`'video_queue_inbox_account_fr_2025-04-03T1220_anonomyous_2ssdfsf2342afga09'`): Exact name of the Redis key to check.
|
||||
* **Results:**
|
||||
* Connects to Redis and determines the type of the key specified by `queue_to_check`.
|
||||
* Determines the size (length for lists, number of fields for hashes).
|
||||
* Logs the key type and size.
|
||||
* Pushes `queue_key_type` and `queue_size` to XCom.
|
||||
|
||||
### `ytdlp_mgmt_queue_list_contents`
|
||||
|
||||
* **File:** `airflow/dags/ytdlp_mgmt_queue_list_contents.py`
|
||||
* **Purpose:** Manually list the contents of a specific YTDLP Redis queue/key (list or hash). Useful for inspecting queue state or results.
|
||||
* **Parameters (Defaults):**
|
||||
* `redis_conn_id` (`'redis_default'`): Airflow Redis connection ID.
|
||||
* `queue_to_list` (`'video_queue_inbox_account_fr_2025-04-03T1220_anonomyous_2ssdfsf2342afga09'`): Exact name of the Redis key to list.
|
||||
* `max_items` (`100`): Maximum number of items/fields to list.
|
||||
* **Results:**
|
||||
* Connects to Redis and identifies the type of the key specified by `queue_to_list`.
|
||||
* If it's a List, logs the first `max_items` elements.
|
||||
* If it's a Hash, logs up to `max_items` key-value pairs, attempting to pretty-print JSON values.
|
||||
* Logs warnings for very large hashes.
|
||||
|
||||
### `ytdlp_proc_sequential_processor`
|
||||
|
||||
* **File:** `airflow/dags/ytdlp_proc_sequential_processor.py`
|
||||
* **Purpose:** Processes YouTube URLs sequentially from a Redis queue. Designed for batch processing. Pops a URL, gets token/metadata via YTDLP Ops service, downloads the media using `yt-dlp`, and records the result.
|
||||
* **Parameters (Defaults):**
|
||||
* `queue_name` (`'video_queue'`): Base name for Redis queues (e.g., `video_queue_inbox`, `video_queue_progress`).
|
||||
* `redis_conn_id` (`'redis_default'`): Airflow Redis connection ID.
|
||||
* `redis_enabled` (`False`): Use Redis for service discovery? If False, uses `service_ip`/`port`.
|
||||
* `service_ip` (`None`): Required Service IP if `redis_enabled=False`.
|
||||
* `service_port` (`None`): Required Service port if `redis_enabled=False`.
|
||||
* `account_id` (`'default_account'`): Account ID for the API call (used for Redis lookup if `redis_enabled=True`).
|
||||
* `timeout` (`30`): Timeout in seconds for the Thrift connection.
|
||||
* `download_format` (`'ba[ext=m4a]/bestaudio/best'`): yt-dlp format selection string.
|
||||
* `output_path_template` (`"{{ var.value.get('DOWNLOADS_TEMP', '/opt/airflow/downloads') }}/%(title)s [%(id)s].%(ext)s"`): yt-dlp output template. Uses Airflow Variable `DOWNLOADS_TEMP`.
|
||||
* `info_json_dir` (`"{{ var.value.get('DOWNLOADS_TEMP', '/opt/airflow/downloadfiles') }}"`): Directory to save `info.json`. Uses Airflow Variable `DOWNLOADS_TEMP`.
|
||||
* **Results:**
|
||||
* Pops one URL from the `{{ params.queue_name }}_inbox` Redis list.
|
||||
* If a URL is popped, it's added to the `{{ params.queue_name }}_progress` Redis hash.
|
||||
* The `YtdlpOpsOperator` (`get_token` task) attempts to get token data (including `info.json`, proxy, command) for the URL using the specified connection method and account ID.
|
||||
* If token retrieval succeeds, the `download_video` task executes `yt-dlp` using the retrieved `info.json`, proxy, the `download_format` parameter, and the `output_path_template` parameter to download the actual media.
|
||||
* **On Successful Download:** The URL is removed from the progress hash and added to the `{{ params.queue_name }}_result` hash along with results (`info_json_path`, `socks_proxy`, `ytdlp_command`).
|
||||
* **On Failure (Token Retrieval or Download):** The URL is removed from the progress hash and added to the `{{ params.queue_name }}_fail` hash along with error details (message, traceback).
|
||||
* If the inbox queue is empty, the DAG run skips processing via `AirflowSkipException`.
|
||||
49
config/airflow_local_settings.py
Normal file
49
config/airflow_local_settings.py
Normal file
@ -0,0 +1,49 @@
|
||||
import logging
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def task_instance_mutation_hook(ti):
|
||||
|
||||
|
||||
|
||||
if ti.dag_id == 'ytdlp_ops_worker_per_url':
|
||||
|
||||
|
||||
|
||||
worker_queue = ti.dag_run.conf.get('worker_queue')
|
||||
|
||||
|
||||
|
||||
if worker_queue:
|
||||
|
||||
|
||||
|
||||
logger.info(f"Mutating queue for task {ti.task_id} to {worker_queue} based on dag_run.conf")
|
||||
|
||||
|
||||
|
||||
ti.queue = worker_queue
|
||||
|
||||
|
||||
|
||||
else:
|
||||
|
||||
|
||||
|
||||
logger.warning(f"No worker_queue in conf for {ti.dag_id}. Falling back to 'queue-dl'")
|
||||
|
||||
|
||||
|
||||
ti.queue = 'queue-dl'
|
||||
3
config/camoufox_endpoints.json
Normal file
3
config/camoufox_endpoints.json
Normal file
@ -0,0 +1,3 @@
|
||||
{
|
||||
"endpoints": {}
|
||||
}
|
||||
@ -4,6 +4,6 @@
|
||||
"conn_type": "docker",
|
||||
"host": "https://index.docker.io/v1/",
|
||||
"login": "pangramia",
|
||||
"password": "PROVIDE_OUTSIDE_REPO"
|
||||
"password": "dckr_pat_PEDco1yeURKYFY9cSXTCokQNb4A"
|
||||
}
|
||||
}
|
||||
}
|
||||
52
config/envoy.yaml
Normal file
52
config/envoy.yaml
Normal file
@ -0,0 +1,52 @@
|
||||
# Jinja2 template for Envoy configuration
|
||||
admin:
|
||||
address:
|
||||
socket_address:
|
||||
address: 0.0.0.0
|
||||
port_value: 9901
|
||||
|
||||
static_resources:
|
||||
listeners:
|
||||
# Listener for ytdlp-ops Thrift traffic
|
||||
- name: ytdlp_ops_listener
|
||||
address:
|
||||
socket_address:
|
||||
address: 0.0.0.0
|
||||
port_value: 9080
|
||||
filter_chains:
|
||||
- filters:
|
||||
- name: envoy.filters.network.thrift_proxy
|
||||
typed_config:
|
||||
"@type": type.googleapis.com/envoy.extensions.filters.network.thrift_proxy.v3.ThriftProxy
|
||||
stat_prefix: thrift_ingress
|
||||
transport: FRAMED
|
||||
protocol: BINARY
|
||||
route_config:
|
||||
name: local_route
|
||||
routes:
|
||||
- match:
|
||||
method_name: ""
|
||||
route:
|
||||
cluster: ytdlp_ops_cluster
|
||||
|
||||
clusters:
|
||||
# Cluster for the ytdlp-ops workers
|
||||
- name: ytdlp_ops_cluster
|
||||
connect_timeout: 5s
|
||||
type: STRICT_DNS
|
||||
lb_policy: ROUND_ROBIN
|
||||
health_checks:
|
||||
- timeout: 1s
|
||||
interval: 5s
|
||||
unhealthy_threshold: 3
|
||||
healthy_threshold: 2
|
||||
tcp_health_check: {}
|
||||
load_assignment:
|
||||
cluster_name: ytdlp_ops_cluster
|
||||
endpoints:
|
||||
- lb_endpoints:
|
||||
- endpoint:
|
||||
address:
|
||||
socket_address:
|
||||
address: ytdlp-ops-server
|
||||
port_value: 9090
|
||||
17
config/minio_default_conn.json
Normal file
17
config/minio_default_conn.json
Normal file
@ -0,0 +1,17 @@
|
||||
{
|
||||
"minio_default":
|
||||
{
|
||||
"conn_type": "s3",
|
||||
"host": "89.253.221.173",
|
||||
"login": "admin",
|
||||
"password": "0153093693-0009",
|
||||
"port": 9000,
|
||||
"extra":
|
||||
{
|
||||
"endpoint_url": "http://89.253.221.173:9000",
|
||||
"aws_access_key_id": "admin",
|
||||
"aws_secret_access_key": "0153093693-0009",
|
||||
"region_name": "us-east-1"
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -3,11 +3,11 @@
|
||||
{
|
||||
"conn_type": "redis",
|
||||
"host": "89.253.221.173",
|
||||
"password": "PROVIDE_OUTSIDE_REPO",
|
||||
"password": "rOhTAIlTFFylXsjhqwxnYxDChFc",
|
||||
"port": 52909,
|
||||
"extra":
|
||||
{
|
||||
"db": 0
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -9,11 +9,15 @@ import socket
|
||||
|
||||
from airflow.exceptions import AirflowException
|
||||
from airflow.models.dag import DAG
|
||||
from airflow.models.dagbag import DagBag
|
||||
from airflow.models.dagrun import DagRun
|
||||
from airflow.models.param import Param
|
||||
from airflow.models.taskinstance import TaskInstance
|
||||
from airflow.operators.python import PythonOperator
|
||||
from airflow.utils.dates import days_ago
|
||||
from airflow.models.variable import Variable
|
||||
from airflow.providers.redis.hooks.redis import RedisHook
|
||||
from airflow.utils.session import create_session
|
||||
|
||||
# Configure logging
|
||||
logger = logging.getLogger(__name__)
|
||||
@ -30,17 +34,15 @@ except Exception as e:
|
||||
|
||||
# Thrift imports
|
||||
try:
|
||||
from thrift.transport import TSocket, TTransport
|
||||
from thrift.protocol import TBinaryProtocol
|
||||
from pangramia.yt.tokens_ops import YTTokenOpService
|
||||
from pangramia.yt.exceptions.ttypes import PBServiceException, PBUserException
|
||||
from yt_ops_services.client_utils import get_thrift_client, format_timestamp
|
||||
except ImportError as e:
|
||||
logger.critical(f"Could not import Thrift modules: {e}. Ensure ytdlp-ops-auth package is installed.")
|
||||
logger.critical(f"Could not import Thrift modules: {e}. Ensure yt_ops_services package is installed correctly.")
|
||||
# Fail DAG parsing if thrift modules are not available
|
||||
raise
|
||||
|
||||
DEFAULT_YT_AUTH_SERVICE_IP = Variable.get("YT_AUTH_SERVICE_IP", default_var="16.162.82.212")
|
||||
DEFAULT_YT_AUTH_SERVICE_PORT = Variable.get("YT_AUTH_SERVICE_PORT", default_var=9080)
|
||||
DEFAULT_MANAGEMENT_SERVICE_IP = Variable.get("MANAGEMENT_SERVICE_HOST", default_var="envoy-thrift-lb")
|
||||
DEFAULT_MANAGEMENT_SERVICE_PORT = Variable.get("MANAGEMENT_SERVICE_PORT", default_var=9080)
|
||||
DEFAULT_REDIS_CONN_ID = "redis_default"
|
||||
|
||||
|
||||
@ -58,30 +60,6 @@ def _get_redis_client(redis_conn_id: str):
|
||||
raise AirflowException(f"Redis connection failed: {e}")
|
||||
|
||||
|
||||
def format_timestamp(ts_str: str) -> str:
|
||||
"""Formats a string timestamp into a human-readable date string."""
|
||||
if not ts_str:
|
||||
return ""
|
||||
try:
|
||||
ts_float = float(ts_str)
|
||||
if ts_float <= 0:
|
||||
return ""
|
||||
# Use datetime from the imported 'from datetime import datetime'
|
||||
dt_obj = datetime.fromtimestamp(ts_float)
|
||||
return dt_obj.strftime('%Y-%m-%d %H:%M:%S')
|
||||
except (ValueError, TypeError):
|
||||
return ts_str # Return original string if conversion fails
|
||||
|
||||
def get_thrift_client(host: str, port: int):
|
||||
"""Helper function to create and connect a Thrift client."""
|
||||
transport = TSocket.TSocket(host, port)
|
||||
transport.setTimeout(30 * 1000) # 30s timeout
|
||||
transport = TTransport.TFramedTransport(transport)
|
||||
protocol = TBinaryProtocol.TBinaryProtocol(transport)
|
||||
client = YTTokenOpService.Client(protocol)
|
||||
transport.open()
|
||||
logger.info(f"Connected to Thrift server at {host}:{port}")
|
||||
return client, transport
|
||||
|
||||
def _list_proxy_statuses(client, server_identity):
|
||||
"""Lists the status of proxies."""
|
||||
@ -215,17 +193,24 @@ def manage_system_callable(**context):
|
||||
params = context["params"]
|
||||
entity = params["entity"]
|
||||
action = params["action"]
|
||||
host = params["host"]
|
||||
port = params["port"]
|
||||
|
||||
# For Thrift actions, use the new management host/port
|
||||
if entity not in ["airflow_meta"]:
|
||||
host = params["management_host"]
|
||||
port = params["management_port"]
|
||||
else:
|
||||
host, port = None, None # Not needed for meta actions
|
||||
|
||||
server_identity = params.get("server_identity")
|
||||
proxy_url = params.get("proxy_url")
|
||||
account_id = params.get("account_id")
|
||||
|
||||
# --- Validate Action/Entity Combination and Parameters ---
|
||||
valid_actions = {
|
||||
"proxy": ["list_statuses", "ban", "unban", "unban_all", "delete_from_redis"],
|
||||
"account": ["list_statuses", "ban", "unban", "unban_all", "delete_from_redis"],
|
||||
"all": ["list_statuses"]
|
||||
"proxy": ["list_with_status", "ban", "unban", "unban_all", "delete_from_redis"],
|
||||
"account": ["list_with_status", "ban", "unban", "unban_all", "delete_from_redis"],
|
||||
"accounts_and_proxies": ["list_with_status", "ban", "unban", "unban_all", "delete_from_redis"],
|
||||
"airflow_meta": ["clear_dag_runs"],
|
||||
}
|
||||
|
||||
if action not in valid_actions.get(entity, []):
|
||||
@ -245,11 +230,93 @@ def manage_system_callable(**context):
|
||||
if action in ["ban", "unban"] and not account_id:
|
||||
raise ValueError(f"An 'account_id' is required for account action '{action}'.")
|
||||
|
||||
# --- Handle Airflow Meta actions separately as they don't use Thrift ---
|
||||
if entity == "airflow_meta":
|
||||
dag_id = params.get("dag_id_to_manage")
|
||||
if not dag_id:
|
||||
raise AirflowException("An 'dag_id_to_manage' is required for airflow_meta actions.")
|
||||
|
||||
if action == "clear_dag_runs":
|
||||
clear_scope = params.get("clear_scope")
|
||||
logger.info(f"Attempting to delete DagRuns for DAG '{dag_id}' with scope '{clear_scope}'.")
|
||||
|
||||
with create_session() as session:
|
||||
dag_run_query = session.query(DagRun).filter(DagRun.dag_id == dag_id)
|
||||
|
||||
if clear_scope == "last_run":
|
||||
last_run = dag_run_query.order_by(DagRun.execution_date.desc()).first()
|
||||
if not last_run:
|
||||
logger.info(f"No runs found for DAG '{dag_id}'. Nothing to delete.")
|
||||
print(f"\nNo runs found for DAG '{dag_id}'.\n")
|
||||
return
|
||||
|
||||
logger.warning(f"Deleting last DagRun for DAG '{dag_id}' (run_id: {last_run.run_id}, execution_date: {last_run.execution_date}). This will also delete its task instances.")
|
||||
# Deleting the DagRun object should cascade and delete related TaskInstances.
|
||||
session.delete(last_run)
|
||||
deleted_count = 1
|
||||
else: # all_runs
|
||||
logger.warning(f"Deleting ALL DagRuns and associated TaskInstances for DAG '{dag_id}'. This will remove all history from the UI.")
|
||||
# To ensure all related data is cleared, we explicitly delete TaskInstances first.
|
||||
# This is safer than relying on DB-level cascades which may not be configured.
|
||||
ti_deleted_count = session.query(TaskInstance).filter(TaskInstance.dag_id == dag_id).delete(synchronize_session=False)
|
||||
logger.info(f"Deleted {ti_deleted_count} TaskInstance records for DAG '{dag_id}'.")
|
||||
|
||||
deleted_count = dag_run_query.delete(synchronize_session=False)
|
||||
|
||||
# The session is committed automatically by the `with create_session()` context manager.
|
||||
logger.info(f"Successfully deleted {deleted_count} DagRun(s) for DAG '{dag_id}'.")
|
||||
print(f"\nSuccessfully deleted {deleted_count} DagRun(s) for DAG '{dag_id}'.\n")
|
||||
return # End execution
|
||||
|
||||
# Handle direct Redis actions separately to avoid creating an unnecessary Thrift connection.
|
||||
if action == "delete_from_redis":
|
||||
redis_conn_id = params["redis_conn_id"]
|
||||
redis_client = _get_redis_client(redis_conn_id)
|
||||
|
||||
if entity == "accounts_and_proxies":
|
||||
# --- Delete Proxy ---
|
||||
proxy_url = params.get("proxy_url")
|
||||
server_identity = params.get("server_identity")
|
||||
if not proxy_url:
|
||||
raise ValueError("A 'proxy_url' is required for proxy action 'delete_from_redis'.")
|
||||
if not server_identity:
|
||||
raise ValueError("A 'server_identity' is required for proxy action 'delete_from_redis'.")
|
||||
|
||||
proxy_state_key = f"proxies:{server_identity}"
|
||||
proxy_failure_key = f"proxy_failures:{proxy_url}"
|
||||
|
||||
logger.warning(f"Deleting proxy '{proxy_url}' state from hash '{proxy_state_key}' and failure key '{proxy_failure_key}' from Redis.")
|
||||
|
||||
with redis_client.pipeline() as pipe:
|
||||
pipe.hdel(proxy_state_key, proxy_url)
|
||||
pipe.delete(proxy_failure_key)
|
||||
results = pipe.execute()
|
||||
|
||||
hdel_result = results[0]
|
||||
del_result = results[1]
|
||||
print(f"\nSuccessfully removed proxy '{proxy_url}' from state hash (result: {hdel_result}) and deleted failure key (result: {del_result}).")
|
||||
|
||||
# --- Delete Account ---
|
||||
account_prefix = params.get("account_id") # Repurpose account_id param as an optional prefix
|
||||
pattern = f"account_status:{account_prefix}*" if account_prefix else "account_status:*"
|
||||
logger.warning(f"Searching for account status keys in Redis with pattern: '{pattern}'")
|
||||
|
||||
keys_to_delete = [key for key in redis_client.scan_iter(pattern)]
|
||||
|
||||
if not keys_to_delete:
|
||||
print(f"\nNo accounts found matching pattern '{pattern}'.\n")
|
||||
else:
|
||||
print(f"\nWARNING: Found {len(keys_to_delete)} accounts to remove from Redis.")
|
||||
for key in keys_to_delete[:10]:
|
||||
print(f" - {key.decode('utf-8')}")
|
||||
if len(keys_to_delete) > 10:
|
||||
print(f" ... and {len(keys_to_delete) - 10} more.")
|
||||
|
||||
deleted_count = redis_client.delete(*keys_to_delete)
|
||||
print(f"\nSuccessfully removed {deleted_count} accounts from Redis.\n")
|
||||
|
||||
return # End execution for this action
|
||||
|
||||
if entity == "account":
|
||||
account_prefix = params.get("account_id") # Repurpose account_id param as an optional prefix
|
||||
pattern = f"account_status:{account_prefix}*" if account_prefix else "account_status:*"
|
||||
@ -299,7 +366,7 @@ def manage_system_callable(**context):
|
||||
client, transport = get_thrift_client(host, port)
|
||||
|
||||
if entity == "proxy":
|
||||
if action == "list_statuses":
|
||||
if action == "list_with_status":
|
||||
_list_proxy_statuses(client, server_identity)
|
||||
elif action == "ban":
|
||||
if not proxy_url: raise ValueError("A 'proxy_url' is required.")
|
||||
@ -317,7 +384,7 @@ def manage_system_callable(**context):
|
||||
print(f"Successfully sent request to unban all proxy statuses for '{server_identity}'.")
|
||||
|
||||
elif entity == "account":
|
||||
if action == "list_statuses":
|
||||
if action == "list_with_status":
|
||||
_list_account_statuses(client, account_id, params["redis_conn_id"])
|
||||
elif action == "ban":
|
||||
if not account_id: raise ValueError("An 'account_id' is required.")
|
||||
@ -364,8 +431,86 @@ def manage_system_callable(**context):
|
||||
print("\n--- Listing statuses after unban_all ---")
|
||||
_list_account_statuses(client, account_prefix, params["redis_conn_id"])
|
||||
|
||||
elif entity == "accounts_and_proxies":
|
||||
print(f"\n--- Performing action '{action}' on BOTH Proxies and Accounts ---")
|
||||
|
||||
# --- Proxy Action ---
|
||||
try:
|
||||
print("\n-- Running Proxy Action --")
|
||||
if action == "list_with_status":
|
||||
_list_proxy_statuses(client, server_identity)
|
||||
elif action == "ban":
|
||||
if not proxy_url: raise ValueError("A 'proxy_url' is required.")
|
||||
logger.info(f"Banning proxy '{proxy_url}' for server '{server_identity}'...")
|
||||
client.banProxy(proxy_url, server_identity)
|
||||
print(f"Successfully sent request to ban proxy '{proxy_url}'.")
|
||||
elif action == "unban":
|
||||
if not proxy_url: raise ValueError("A 'proxy_url' is required.")
|
||||
logger.info(f"Unbanning proxy '{proxy_url}' for server '{server_identity}'...")
|
||||
client.unbanProxy(proxy_url, server_identity)
|
||||
print(f"Successfully sent request to unban proxy '{proxy_url}'.")
|
||||
elif action == "unban_all":
|
||||
logger.info(f"Unbanning all proxy statuses for server '{server_identity}'...")
|
||||
client.resetAllProxyStatuses(server_identity)
|
||||
print(f"Successfully sent request to unban all proxy statuses for '{server_identity}'.")
|
||||
except Exception as proxy_e:
|
||||
logger.error(f"Error during proxy action '{action}': {proxy_e}", exc_info=True)
|
||||
print(f"\nERROR during proxy action: {proxy_e}")
|
||||
|
||||
# --- Account Action ---
|
||||
try:
|
||||
print("\n-- Running Account Action --")
|
||||
if action == "list_with_status":
|
||||
_list_account_statuses(client, account_id, params["redis_conn_id"])
|
||||
elif action == "ban":
|
||||
if not account_id: raise ValueError("An 'account_id' is required.")
|
||||
reason = f"Manual ban from Airflow mgmt DAG by {socket.gethostname()}"
|
||||
logger.info(f"Banning account '{account_id}'...")
|
||||
client.banAccount(accountId=account_id, reason=reason)
|
||||
print(f"Successfully sent request to ban account '{account_id}'.")
|
||||
elif action == "unban":
|
||||
if not account_id: raise ValueError("An 'account_id' is required.")
|
||||
reason = f"Manual un-ban from Airflow mgmt DAG by {socket.gethostname()}"
|
||||
logger.info(f"Unbanning account '{account_id}'...")
|
||||
client.unbanAccount(accountId=account_id, reason=reason)
|
||||
print(f"Successfully sent request to unban account '{account_id}'.")
|
||||
elif action == "unban_all":
|
||||
account_prefix = account_id # Repurpose account_id param as an optional prefix
|
||||
logger.info(f"Unbanning all account statuses to ACTIVE (prefix: '{account_prefix or 'ALL'}')...")
|
||||
|
||||
all_statuses = client.getAccountStatus(accountId=None, accountPrefix=account_prefix)
|
||||
if not all_statuses:
|
||||
print(f"No accounts found with prefix '{account_prefix or 'ALL'}' to unban.")
|
||||
else:
|
||||
accounts_to_unban = [s.accountId for s in all_statuses]
|
||||
logger.info(f"Found {len(accounts_to_unban)} accounts to unban.")
|
||||
print(f"Found {len(accounts_to_unban)} accounts. Sending unban request for each...")
|
||||
|
||||
unban_count = 0
|
||||
fail_count = 0
|
||||
for acc_id in accounts_to_unban:
|
||||
try:
|
||||
reason = f"Manual unban_all from Airflow mgmt DAG by {socket.gethostname()}"
|
||||
client.unbanAccount(accountId=acc_id, reason=reason)
|
||||
logger.info(f" - Sent unban for '{acc_id}'.")
|
||||
unban_count += 1
|
||||
except Exception as e:
|
||||
logger.error(f" - Failed to unban account '{acc_id}': {e}")
|
||||
fail_count += 1
|
||||
|
||||
print(f"\nSuccessfully sent unban requests for {unban_count} accounts.")
|
||||
if fail_count > 0:
|
||||
print(f"Failed to send unban requests for {fail_count} accounts. See logs for details.")
|
||||
|
||||
# Optionally, list statuses again to confirm
|
||||
print("\n--- Listing statuses after unban_all ---")
|
||||
_list_account_statuses(client, account_prefix, params["redis_conn_id"])
|
||||
except Exception as account_e:
|
||||
logger.error(f"Error during account action '{action}': {account_e}", exc_info=True)
|
||||
print(f"\nERROR during account action: {account_e}")
|
||||
|
||||
elif entity == "all":
|
||||
if action == "list_statuses":
|
||||
if action == "list_with_status":
|
||||
print("\nListing all entities...")
|
||||
_list_proxy_statuses(client, server_identity)
|
||||
_list_account_statuses(client, account_id, params["redis_conn_id"])
|
||||
@ -392,50 +537,51 @@ with DAG(
|
||||
tags=["ytdlp", "mgmt", "master"],
|
||||
doc_md="""
|
||||
### YT-DLP Proxy and Account Manager DAG
|
||||
|
||||
This DAG provides tools to manage the state of **proxies and accounts** used by the `ytdlp-ops-server`.
|
||||
Select an `entity` and an `action` to perform. Note that not all actions are available for all entities.
|
||||
|
||||
---
|
||||
|
||||
#### Actions for `entity: proxy`
|
||||
- `list_statuses`: View status of all proxies, optionally filtered by `server_identity`.
|
||||
- `ban`: Ban a specific proxy for a given `server_identity`. Requires `proxy_url`.
|
||||
- `unban`: Un-ban a specific proxy. Requires `proxy_url`.
|
||||
- `unban_all`: Resets the status of all proxies for a given `server_identity` to `ACTIVE`.
|
||||
- `delete_from_redis`: **(Destructive)** Deletes a proxy's state from Redis for a specific `server_identity`. This removes its state (ACTIVE/BANNED) and its failure history. The server will re-create it with a default `ACTIVE` state on its next refresh if the proxy is still in the server's configuration. Use this to reset a single proxy's state completely. Requires `proxy_url` and `server_identity`.
|
||||
|
||||
#### Actions for `entity: account`
|
||||
- `list_statuses`: View status of all accounts, optionally filtered by `account_id` (as a prefix).
|
||||
- `ban`: Ban a specific account. Requires `account_id`.
|
||||
- `unban`: Un-ban a specific account. Requires `account_id`.
|
||||
- `unban_all`: Sets the status of all accounts (or those matching a prefix in `account_id`) to `ACTIVE`.
|
||||
- `delete_from_redis`: **(Destructive)** Deletes account status keys from Redis. This permanently removes the account from being tracked by the system. This is different from `unban`. Use with caution.
|
||||
|
||||
#### Actions for `entity: all`
|
||||
- `list_statuses`: A convenience to view statuses for both proxies and accounts in one run.
|
||||
|
||||
---
|
||||
|
||||
**When to use `delete_from_redis`?**
|
||||
|
||||
- **For Accounts:** Account state is managed entirely within Redis. Deleting an account's key is a permanent removal from the system's tracking. This is different from `unban`, which just resets the status. Use this when you want to completely remove an account.
|
||||
- **For Proxies:** Proxies are defined in the server's startup configuration. Redis only stores their *state* (e.g., `BANNED` or `ACTIVE`) and failure history. Deleting a proxy's state from Redis will cause the server to re-create it with a default `ACTIVE` state on its next refresh cycle. This action is useful for completely resetting a single proxy that may be stuck or has a long failure history, without having to reset all proxies for that server.
|
||||
This DAG provides tools to manage the state of proxies and accounts used by the `ytdlp-ops-server`.
|
||||
Select an `entity` and an `action` to perform.
|
||||
""",
|
||||
params={
|
||||
"host": Param(DEFAULT_YT_AUTH_SERVICE_IP, type="string", description="The hostname of the ytdlp-ops-server service. Default is from Airflow variable YT_AUTH_SERVICE_IP or hardcoded."),
|
||||
"port": Param(DEFAULT_YT_AUTH_SERVICE_PORT, type="integer", description="The port of the ytdlp-ops-server service (Envoy load balancer). Default is from Airflow variable YT_AUTH_SERVICE_PORT or hardcoded."),
|
||||
"management_host": Param(DEFAULT_MANAGEMENT_SERVICE_IP, type="string", title="Management Service Host", description="The hostname or IP of the management service. Can be a Docker container name (e.g., 'envoy-thrift-lb') if on the same network."),
|
||||
"management_port": Param(DEFAULT_MANAGEMENT_SERVICE_PORT, type="integer", title="Management Service Port", description="The port of the dedicated management service."),
|
||||
"entity": Param(
|
||||
"account",
|
||||
"accounts_and_proxies",
|
||||
type="string",
|
||||
enum=["account", "proxy", "all"],
|
||||
enum=["account", "proxy", "accounts_and_proxies", "airflow_meta"],
|
||||
description="The type of entity to manage.",
|
||||
),
|
||||
"action": Param(
|
||||
"list_statuses",
|
||||
"list_with_status",
|
||||
type="string",
|
||||
enum=["list_statuses", "ban", "unban", "unban_all", "delete_from_redis"],
|
||||
description="The management action to perform. See the DAG documentation for which actions are valid for each entity.",
|
||||
enum=["list_with_status", "ban", "unban", "unban_all", "delete_from_redis", "clear_dag_runs"],
|
||||
description="""The management action to perform.
|
||||
---
|
||||
#### Actions for `entity: proxy`
|
||||
- `list_with_status`: View status of all proxies, optionally filtered by `server_identity`.
|
||||
- `ban`: Ban a specific proxy for a given `server_identity`. Requires `proxy_url`.
|
||||
- `unban`: Un-ban a specific proxy. Requires `proxy_url`.
|
||||
- `unban_all`: Resets the status of all proxies for a given `server_identity` to `ACTIVE`.
|
||||
- `delete_from_redis`: **(Destructive)** Deletes a proxy's state from Redis for a specific `server_identity`. This removes its state (ACTIVE/BANNED) and its failure history. The server will re-create it with a default `ACTIVE` state on its next refresh if the proxy is still in the server's configuration. Use this to reset a single proxy's state completely. Requires `proxy_url` and `server_identity`.
|
||||
|
||||
#### Actions for `entity: account`
|
||||
- `list_with_status`: View status of all accounts, optionally filtered by `account_id` (as a prefix).
|
||||
- `ban`: Ban a specific account. Requires `account_id`.
|
||||
- `unban`: Un-ban a specific account. Requires `account_id`.
|
||||
- `unban_all`: Sets the status of all accounts (or those matching a prefix in `account_id`) to `ACTIVE`.
|
||||
- `delete_from_redis`: **(Destructive)** Deletes account status keys from Redis. This permanently removes the account from being tracked by the system. This is different from `unban`. Use with caution.
|
||||
|
||||
#### Actions for `entity: accounts_and_proxies`
|
||||
- This entity performs the selected action on **both** proxies and accounts where applicable.
|
||||
- `list_with_status`: View statuses for both proxies and accounts.
|
||||
- `ban`: Ban a specific proxy AND a specific account. Requires `proxy_url`, `server_identity`, and `account_id`.
|
||||
- `unban`: Un-ban a specific proxy AND a specific account. Requires `proxy_url`, `server_identity`, and `account_id`.
|
||||
- `unban_all`: Un-ban all proxies for a `server_identity` AND all accounts (optionally filtered by `account_id` as a prefix).
|
||||
- `delete_from_redis`: Deletes a specific proxy's state AND all accounts matching a prefix from Redis.
|
||||
|
||||
#### Actions for `entity: airflow_meta`
|
||||
- `clear_dag_runs`: **(Destructive)** Deletes DAG run history and associated task instances from the database, removing them from the UI. This allows the runs to be re-created if backfilling is enabled.
|
||||
- `clear_scope: last_run`: Deletes only the most recent DAG run and its task instances.
|
||||
- `clear_scope: all_runs`: Deletes all historical DAG runs and task instances for the selected DAG.
|
||||
""",
|
||||
),
|
||||
"server_identity": Param(
|
||||
"ytdlp-ops-airflow-service",
|
||||
@ -458,6 +604,20 @@ with DAG(
|
||||
title="Redis Connection ID",
|
||||
description="The Airflow connection ID for the Redis server (used for 'delete_from_redis' and for fetching detailed account status).",
|
||||
),
|
||||
"dag_id_to_manage": Param(
|
||||
"ytdlp_ops_worker_per_url",
|
||||
type="string",
|
||||
enum=["ytdlp_ops_worker_per_url", "ytdlp_ops_orchestrator"],
|
||||
title="[Airflow Meta] DAG ID",
|
||||
description="The DAG ID to perform the action on.",
|
||||
),
|
||||
"clear_scope": Param(
|
||||
"last_run",
|
||||
type="string",
|
||||
enum=["last_run", "all_runs"],
|
||||
title="[Airflow Meta] Clear Scope",
|
||||
description="For 'clear_dag_runs' action, specifies the scope of runs to clear.",
|
||||
),
|
||||
},
|
||||
) as dag:
|
||||
system_management_task = PythonOperator(
|
||||
|
||||
@ -18,6 +18,7 @@ from airflow.models.dag import DAG
|
||||
from airflow.models.param import Param
|
||||
from airflow.operators.python import PythonOperator, BranchPythonOperator
|
||||
from airflow.operators.empty import EmptyOperator
|
||||
from airflow.operators.bash import BashOperator
|
||||
from airflow.providers.redis.hooks.redis import RedisHook
|
||||
from airflow.utils.dates import days_ago
|
||||
from airflow.models.variable import Variable
|
||||
@ -550,7 +551,7 @@ with DAG(
|
||||
"action": Param(
|
||||
"add_videos",
|
||||
type="string",
|
||||
enum=["add_videos", "clear_queue", "list_contents", "check_status", "requeue_failed"],
|
||||
enum=["add_videos", "clear_queue", "list_contents", "check_status", "requeue_failed", "inspect_celery_cluster"],
|
||||
title="Action",
|
||||
description="The management action to perform.",
|
||||
),
|
||||
@ -690,6 +691,31 @@ with DAG(
|
||||
python_callable=requeue_failed_callable,
|
||||
)
|
||||
|
||||
action_inspect_celery_cluster = BashOperator(
|
||||
task_id="action_inspect_celery_cluster",
|
||||
bash_command="""
|
||||
# Get the broker URL from Airflow config
|
||||
BROKER_URL=$(airflow config get-value celery broker_url)
|
||||
echo "--- Inspecting Celery Cluster (Broker: $BROKER_URL) ---"
|
||||
|
||||
echo ""
|
||||
echo "--- Active Queues (shows queues with consumers) ---"
|
||||
celery -A airflow.providers.celery.executors.celery_executor.app -b "$BROKER_URL" inspect active_queues
|
||||
|
||||
echo ""
|
||||
echo "--- Worker Stats (shows connected workers) ---"
|
||||
celery -A airflow.providers.celery.executors.celery_executor.app -b "$BROKER_URL" inspect stats
|
||||
|
||||
echo ""
|
||||
echo "--- Active Tasks (tasks currently running) ---"
|
||||
celery -A airflow.providers.celery.executors.celery_executor.app -b "$BROKER_URL" inspect active
|
||||
|
||||
echo ""
|
||||
echo "--- Reserved Tasks (tasks prefetched by workers) ---"
|
||||
celery -A airflow.providers.celery.executors.celery_executor.app -b "$BROKER_URL" inspect reserved
|
||||
""",
|
||||
)
|
||||
|
||||
# --- Wire up tasks ---
|
||||
branch_on_action >> [
|
||||
action_add_videos,
|
||||
@ -697,4 +723,5 @@ with DAG(
|
||||
action_list_contents,
|
||||
action_check_status,
|
||||
action_requeue_failed,
|
||||
action_inspect_celery_cluster,
|
||||
]
|
||||
|
||||
88
dags/ytdlp_ops_dispatcher.py
Normal file
88
dags/ytdlp_ops_dispatcher.py
Normal file
@ -0,0 +1,88 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
DAG to dispatch work to ytdlp_ops_worker_per_url DAGs.
|
||||
It pulls a URL from Redis and triggers a worker with a pinned queue.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
import logging
|
||||
import socket
|
||||
from datetime import timedelta
|
||||
|
||||
from airflow.decorators import task
|
||||
from airflow.exceptions import AirflowSkipException
|
||||
from airflow.models.dag import DAG
|
||||
from airflow.models.param import Param
|
||||
from airflow.api.common.trigger_dag import trigger_dag
|
||||
from airflow.utils.dates import days_ago
|
||||
|
||||
from utils.redis_utils import _get_redis_client
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
DEFAULT_QUEUE_NAME = 'video_queue'
|
||||
DEFAULT_REDIS_CONN_ID = 'redis_default'
|
||||
|
||||
@task(queue='queue-dl')
|
||||
def dispatch_url_to_worker(**context):
|
||||
"""
|
||||
Pulls one URL from Redis, determines the current worker's dedicated queue,
|
||||
and triggers the main worker DAG to process the URL on that specific queue.
|
||||
"""
|
||||
params = context['params']
|
||||
redis_conn_id = params['redis_conn_id']
|
||||
queue_name = params['queue_name']
|
||||
inbox_queue = f"{queue_name}_inbox"
|
||||
|
||||
logger.info(f"Attempting to pull one URL from Redis queue '{inbox_queue}'...")
|
||||
client = _get_redis_client(redis_conn_id)
|
||||
url_bytes = client.lpop(inbox_queue)
|
||||
|
||||
if not url_bytes:
|
||||
raise AirflowSkipException("Redis queue is empty. No work to dispatch.")
|
||||
|
||||
url_to_process = url_bytes.decode('utf-8')
|
||||
logger.info(f"Pulled URL '{url_to_process}' from the queue.")
|
||||
|
||||
# Determine the worker-specific queue for affinity
|
||||
hostname = socket.gethostname()
|
||||
worker_queue = f"queue-dl-{hostname}"
|
||||
logger.info(f"Running on worker '{hostname}'. Dispatching job to its dedicated queue '{worker_queue}'.")
|
||||
|
||||
# The orchestrator passes all its params, which we will pass through to the worker.
|
||||
# We add the specific URL and the determined worker queue to the configuration.
|
||||
conf_to_pass = {**params, 'url_to_process': url_to_process, 'worker_queue': worker_queue}
|
||||
|
||||
run_id = f"worker_run_{context['dag_run'].run_id}_{context['ts_nodash']}"
|
||||
|
||||
logger.info(f"Triggering 'ytdlp_ops_worker_per_url' with run_id '{run_id}'")
|
||||
trigger_dag(
|
||||
dag_id='ytdlp_ops_worker_per_url',
|
||||
run_id=run_id,
|
||||
conf=conf_to_pass,
|
||||
replace_microseconds=False
|
||||
)
|
||||
|
||||
with DAG(
|
||||
dag_id='ytdlp_ops_dispatcher',
|
||||
default_args={'owner': 'airflow', 'retries': 0},
|
||||
schedule=None, # This DAG is only triggered by the orchestrator.
|
||||
start_date=days_ago(1),
|
||||
catchup=False,
|
||||
tags=['ytdlp', 'worker', 'dispatcher'],
|
||||
doc_md="""
|
||||
### YT-DLP URL Dispatcher
|
||||
|
||||
This DAG is responsible for dispatching a single URL to a worker with a pinned queue.
|
||||
1. It pulls a single URL from the Redis `_inbox` queue.
|
||||
2. It runs on the generic `queue-dl` to find any available worker.
|
||||
3. It determines the worker's hostname and constructs a dedicated queue name (e.g., `queue-dl-dl-worker-1`).
|
||||
4. It triggers the `ytdlp_ops_worker_per_url` DAG, passing the URL and the dedicated queue name in the configuration.
|
||||
|
||||
This dispatcher-led affinity, combined with the `task_instance_mutation_hook` cluster policy, ensures that all subsequent processing for that URL happens on the same machine.
|
||||
The `ytdlp_ops_orchestrator` is used to trigger a batch of these dispatcher runs.
|
||||
""",
|
||||
# All params are passed through from the orchestrator
|
||||
render_template_as_native_obj=True,
|
||||
) as dag:
|
||||
dispatch_url_to_worker()
|
||||
@ -6,8 +6,9 @@
|
||||
# Distributed under terms of the MIT license.
|
||||
|
||||
"""
|
||||
DAG to orchestrate ytdlp_ops_worker_per_url DAG runs based on a defined policy.
|
||||
It fetches URLs from a Redis queue and launches workers in controlled bunches.
|
||||
DAG to orchestrate ytdlp_ops_dispatcher DAG runs based on a defined policy.
|
||||
It fetches URLs from a Redis queue and launches dispatchers in controlled bunches,
|
||||
which in turn trigger workers with affinity.
|
||||
"""
|
||||
|
||||
from airflow import DAG
|
||||
@ -23,6 +24,7 @@ from datetime import timedelta
|
||||
import logging
|
||||
import random
|
||||
import time
|
||||
import json
|
||||
|
||||
# Import utility functions
|
||||
from utils.redis_utils import _get_redis_client
|
||||
@ -43,29 +45,77 @@ DEFAULT_WORKERS_PER_BUNCH = 1
|
||||
DEFAULT_WORKER_DELAY_S = 5
|
||||
DEFAULT_BUNCH_DELAY_S = 20
|
||||
|
||||
DEFAULT_YT_AUTH_SERVICE_IP = Variable.get("YT_AUTH_SERVICE_IP", default_var="16.162.82.212")
|
||||
DEFAULT_YT_AUTH_SERVICE_IP = Variable.get("YT_AUTH_SERVICE_IP", default_var="172.17.0.1")
|
||||
DEFAULT_YT_AUTH_SERVICE_PORT = Variable.get("YT_AUTH_SERVICE_PORT", default_var=9080)
|
||||
|
||||
# --- Helper Functions ---
|
||||
|
||||
def _check_application_queue(redis_client, queue_base_name: str) -> int:
|
||||
"""Checks and logs the length of the application's inbox queue."""
|
||||
inbox_queue_name = f"{queue_base_name}_inbox"
|
||||
logger.info(f"--- Checking Application Work Queue ---")
|
||||
try:
|
||||
q_len = redis_client.llen(inbox_queue_name)
|
||||
logger.info(f"Application work queue '{inbox_queue_name}' has {q_len} item(s).")
|
||||
return q_len
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to check application queue '{inbox_queue_name}': {e}", exc_info=True)
|
||||
return -1 # Indicate an error
|
||||
|
||||
def _inspect_celery_queues(redis_client, queue_names: list):
|
||||
"""Inspects Celery queues in Redis and logs their status."""
|
||||
logger.info("--- Inspecting Celery Queues in Redis ---")
|
||||
for queue_name in queue_names:
|
||||
try:
|
||||
q_len = redis_client.llen(queue_name)
|
||||
logger.info(f"Queue '{queue_name}': Length = {q_len}")
|
||||
|
||||
if q_len > 0:
|
||||
logger.info(f"Showing up to 10 tasks in '{queue_name}':")
|
||||
# Fetch up to 10 items from the start of the list (queue)
|
||||
items_bytes = redis_client.lrange(queue_name, 0, 9)
|
||||
for i, item_bytes in enumerate(items_bytes):
|
||||
try:
|
||||
# Celery tasks are JSON-encoded strings
|
||||
task_data = json.loads(item_bytes.decode('utf-8'))
|
||||
# Pretty print for readability in logs
|
||||
pretty_task_data = json.dumps(task_data, indent=2)
|
||||
logger.info(f" Task {i+1}:\n{pretty_task_data}")
|
||||
except (json.JSONDecodeError, UnicodeDecodeError) as e:
|
||||
logger.warning(f" Task {i+1}: Could not decode/parse task data. Error: {e}. Raw: {item_bytes!r}")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to inspect queue '{queue_name}': {e}", exc_info=True)
|
||||
logger.info("--- End of Queue Inspection ---")
|
||||
|
||||
|
||||
# --- Main Orchestration Callable ---
|
||||
|
||||
def orchestrate_workers_ignition_callable(**context):
|
||||
"""
|
||||
Main orchestration logic. Triggers a specified number of worker DAGs
|
||||
Main orchestration logic. Triggers a specified number of dispatcher DAGs
|
||||
to initiate self-sustaining processing loops.
|
||||
"""
|
||||
params = context['params']
|
||||
logger.info("Starting worker ignition sequence.")
|
||||
logger.info("Starting dispatcher ignition sequence.")
|
||||
|
||||
worker_dag_id = 'ytdlp_ops_worker_per_url'
|
||||
dag_model = DagModel.get_dagmodel(worker_dag_id)
|
||||
dispatcher_dag_id = 'ytdlp_ops_dispatcher'
|
||||
dag_model = DagModel.get_dagmodel(dispatcher_dag_id)
|
||||
if dag_model and dag_model.is_paused:
|
||||
raise AirflowException(f"Worker DAG '{worker_dag_id}' is paused. Cannot start worker loops.")
|
||||
raise AirflowException(f"Dispatcher DAG '{dispatcher_dag_id}' is paused. Cannot start dispatcher loops.")
|
||||
|
||||
total_workers = int(params['total_workers'])
|
||||
workers_per_bunch = int(params['workers_per_bunch'])
|
||||
|
||||
# --- Input Validation ---
|
||||
if total_workers <= 0:
|
||||
logger.warning(f"'total_workers' is {total_workers}. No workers will be started. Skipping ignition.")
|
||||
raise AirflowSkipException(f"No workers to start (total_workers={total_workers}).")
|
||||
|
||||
if workers_per_bunch <= 0:
|
||||
logger.error(f"'workers_per_bunch' must be a positive integer, but got {workers_per_bunch}. Aborting.")
|
||||
raise AirflowException(f"'workers_per_bunch' must be a positive integer, but got {workers_per_bunch}.")
|
||||
# --- End Input Validation ---
|
||||
|
||||
worker_delay = int(params['delay_between_workers_s'])
|
||||
bunch_delay = int(params['delay_between_bunches_s'])
|
||||
|
||||
@ -73,55 +123,85 @@ def orchestrate_workers_ignition_callable(**context):
|
||||
worker_indices = list(range(total_workers))
|
||||
bunches = [worker_indices[i:i + workers_per_bunch] for i in range(0, len(worker_indices), workers_per_bunch)]
|
||||
|
||||
# Get and parse worker hosts (which are used as queue names)
|
||||
worker_hosts_str = params.get('worker_hosts', 'celery@dl002')
|
||||
worker_hosts = [h.strip() for h in worker_hosts_str.split(',') if h.strip()]
|
||||
if not worker_hosts:
|
||||
raise AirflowException("The 'worker_hosts' parameter cannot be empty.")
|
||||
# --- Inspect Queues before starting ---
|
||||
worker_queue = 'queue-dl' # The static queue the worker DAG uses.
|
||||
try:
|
||||
redis_conn_id = params.get('redis_conn_id', DEFAULT_REDIS_CONN_ID)
|
||||
redis_client = _get_redis_client(redis_conn_id)
|
||||
|
||||
# First, check the application queue for work
|
||||
app_queue_len = _check_application_queue(redis_client, params['queue_name'])
|
||||
|
||||
if params.get('skip_if_queue_empty') and app_queue_len == 0:
|
||||
logger.info("'skip_if_queue_empty' is True and application queue is empty. Skipping worker ignition.")
|
||||
raise AirflowSkipException("Application work queue is empty.")
|
||||
|
||||
logger.info(f"Plan: Starting {total_workers} total workers in {len(bunches)} bunches, distributing across hosts (queues): {worker_hosts}")
|
||||
# Then, inspect the target Celery queue for debugging
|
||||
_inspect_celery_queues(redis_client, [worker_queue])
|
||||
except AirflowSkipException:
|
||||
raise # Re-raise to let Airflow handle the skip
|
||||
except Exception as e:
|
||||
logger.error(f"Could not inspect queues due to an error: {e}. Continuing with ignition sequence.")
|
||||
# --- End of Inspection ---
|
||||
|
||||
logger.info(f"Plan: Triggering {total_workers} total dispatcher runs in {len(bunches)} bunches. Each run will attempt to process one URL.")
|
||||
|
||||
dag_run_id = context['dag_run'].run_id
|
||||
total_triggered = 0
|
||||
|
||||
for i, bunch in enumerate(bunches):
|
||||
logger.info(f"--- Igniting Bunch {i+1}/{len(bunches)} (contains {len(bunch)} worker(s)) ---")
|
||||
logger.info(f"--- Triggering Bunch {i+1}/{len(bunches)} (contains {len(bunch)} dispatcher(s)) ---")
|
||||
for j, _ in enumerate(bunch):
|
||||
# Create a unique run_id for each worker loop starter
|
||||
run_id = f"ignited_{dag_run_id}_{total_triggered}"
|
||||
# Create a unique run_id for each dispatcher run
|
||||
run_id = f"dispatched_{dag_run_id}_{total_triggered}"
|
||||
|
||||
# Pass all orchestrator params to the worker so it has the full context for its loop.
|
||||
# Pass all orchestrator params to the dispatcher, which will then pass them to the worker.
|
||||
conf_to_pass = {p: params[p] for p in params}
|
||||
# The worker pulls its own URL, so we don't pass one.
|
||||
if 'url' in conf_to_pass:
|
||||
del conf_to_pass['url']
|
||||
|
||||
# Assign host/queue in a round-robin fashion
|
||||
queue_for_worker = worker_hosts[total_triggered % len(worker_hosts)]
|
||||
conf_to_pass['queue'] = queue_for_worker
|
||||
|
||||
logger.info(f"Igniting worker {j+1}/{len(bunch)} in bunch {i+1} (loop {total_triggered + 1}/{total_workers}) on host (queue) '{queue_for_worker}' (Run ID: {run_id})")
|
||||
logger.debug(f"Full conf for worker loop {run_id}: {conf_to_pass}")
|
||||
logger.info(f"Triggering dispatcher {j+1}/{len(bunch)} in bunch {i+1} (run {total_triggered + 1}/{total_workers}) (Run ID: {run_id})")
|
||||
logger.debug(f"Full conf for dispatcher run {run_id}: {conf_to_pass}")
|
||||
|
||||
trigger_dag(
|
||||
dag_id=worker_dag_id,
|
||||
dag_id=dispatcher_dag_id,
|
||||
run_id=run_id,
|
||||
conf=conf_to_pass,
|
||||
replace_microseconds=False
|
||||
)
|
||||
total_triggered += 1
|
||||
|
||||
# Delay between workers in a bunch
|
||||
# Delay between dispatches in a bunch
|
||||
if j < len(bunch) - 1:
|
||||
logger.info(f"Waiting {worker_delay}s before next worker in bunch...")
|
||||
logger.info(f"Waiting {worker_delay}s before next dispatcher in bunch...")
|
||||
time.sleep(worker_delay)
|
||||
|
||||
# Delay between bunches
|
||||
if i < len(bunches) - 1:
|
||||
logger.info(f"--- Bunch {i+1} ignited. Waiting {bunch_delay}s before next bunch... ---")
|
||||
logger.info(f"--- Bunch {i+1} triggered. Waiting {bunch_delay}s before next bunch... ---")
|
||||
time.sleep(bunch_delay)
|
||||
|
||||
logger.info(f"--- Ignition sequence complete. Total worker loops started: {total_triggered}. ---")
|
||||
logger.info(f"--- Ignition sequence complete. Total dispatcher runs triggered: {total_triggered}. ---")
|
||||
|
||||
# --- Final Queue Inspection ---
|
||||
final_check_delay = 30 # seconds
|
||||
logger.info(f"Waiting {final_check_delay}s for a final queue status check to see if workers picked up tasks...")
|
||||
time.sleep(final_check_delay)
|
||||
|
||||
try:
|
||||
redis_conn_id = params.get('redis_conn_id', DEFAULT_REDIS_CONN_ID)
|
||||
redis_client = _get_redis_client(redis_conn_id)
|
||||
|
||||
# Log connection details for debugging broker mismatch issues
|
||||
conn_kwargs = redis_client.connection_pool.connection_kwargs
|
||||
logger.info(f"Final check using Redis connection '{redis_conn_id}': "
|
||||
f"host={conn_kwargs.get('host')}, "
|
||||
f"port={conn_kwargs.get('port')}, "
|
||||
f"db={conn_kwargs.get('db')}")
|
||||
|
||||
_inspect_celery_queues(redis_client, [worker_queue])
|
||||
logger.info("Final queue inspection complete. If queues are not empty, workers have not picked up tasks yet. "
|
||||
"If queues are empty, workers have started processing.")
|
||||
except Exception as e:
|
||||
logger.error(f"Could not perform final queue inspection: {e}. This does not affect worker ignition.")
|
||||
|
||||
|
||||
|
||||
@ -146,39 +226,41 @@ with DAG(
|
||||
schedule_interval=None, # This DAG runs only when triggered.
|
||||
max_active_runs=1, # Only one ignition process should run at a time.
|
||||
catchup=False,
|
||||
description='Ignition system for ytdlp_ops_worker_per_url DAGs. Starts self-sustaining worker loops.',
|
||||
description='Ignition system for ytdlp_ops_dispatcher DAGs. Starts self-sustaining worker loops via dispatchers.',
|
||||
doc_md="""
|
||||
### YT-DLP Worker Ignition System
|
||||
|
||||
This DAG acts as an "ignition system" to start one or more self-sustaining worker loops.
|
||||
It does **not** process URLs itself. Its only job is to trigger a specified number of `ytdlp_ops_worker_per_url` DAGs.
|
||||
It does **not** process URLs itself. Its only job is to trigger a specified number of `ytdlp_ops_dispatcher` DAGs,
|
||||
which in turn pull URLs and trigger `ytdlp_ops_worker_per_url` with worker affinity.
|
||||
|
||||
#### How it Works:
|
||||
|
||||
1. **Manual Trigger:** You manually trigger this DAG with parameters defining how many worker loops to start (`total_workers`), in what configuration (`workers_per_bunch`, delays).
|
||||
2. **Ignition:** The orchestrator triggers the initial set of worker DAGs in a "fire-and-forget" manner, passing all its configuration parameters to them.
|
||||
3. **Completion:** Once all initial workers have been triggered, the orchestrator's job is complete.
|
||||
1. **Manual Trigger:** You manually trigger this DAG with parameters defining how many dispatcher loops to start (`total_workers`), in what configuration (`workers_per_bunch`, delays).
|
||||
2. **Ignition:** The orchestrator triggers the initial set of dispatcher DAGs in a "fire-and-forget" manner, passing all its configuration parameters to them.
|
||||
3. **Completion:** Once all initial dispatchers have been triggered, the orchestrator's job is complete.
|
||||
|
||||
The workers then take over, each running its own continuous processing loop.
|
||||
The dispatchers then take over, each pulling a URL, determining affinity, and triggering a worker DAG.
|
||||
""",
|
||||
tags=['ytdlp', 'mgmt', 'master'],
|
||||
params={
|
||||
# --- Ignition Control Parameters ---
|
||||
'total_workers': Param(DEFAULT_TOTAL_WORKERS, type="integer", description="Total number of worker loops to start."),
|
||||
'workers_per_bunch': Param(DEFAULT_WORKERS_PER_BUNCH, type="integer", description="Number of workers to start in each bunch."),
|
||||
'delay_between_workers_s': Param(DEFAULT_WORKER_DELAY_S, type="integer", description="Delay in seconds between starting each worker within a bunch."),
|
||||
'total_workers': Param(DEFAULT_TOTAL_WORKERS, type="integer", description="Total number of dispatcher loops to start."),
|
||||
'workers_per_bunch': Param(DEFAULT_WORKERS_PER_BUNCH, type="integer", description="Number of dispatchers to start in each bunch."),
|
||||
'delay_between_workers_s': Param(DEFAULT_WORKER_DELAY_S, type="integer", description="Delay in seconds between starting each dispatcher within a bunch."),
|
||||
'delay_between_bunches_s': Param(DEFAULT_BUNCH_DELAY_S, type="integer", description="Delay in seconds between starting each bunch."),
|
||||
'skip_if_queue_empty': Param(False, type="boolean", title="[Ignition Control] Skip if Queue Empty", description="If True, the orchestrator will not start any dispatchers if the application's work queue is empty."),
|
||||
|
||||
# --- Worker Passthrough Parameters ---
|
||||
'worker_hosts': Param('celery@dl002', type="string", title="[Worker Param] Worker Hosts", description="Comma-separated list of Celery worker hostnames (e.g., 'celery@dl002') to distribute workers to. These are used as queue names. Workers will be assigned to these queues in a round-robin fashion."),
|
||||
'on_bannable_failure': Param(
|
||||
'retry_with_new_account',
|
||||
type="string",
|
||||
enum=['stop_loop', 'retry_with_new_account'],
|
||||
enum=['stop_loop', 'retry_with_new_account', 'retry_without_ban', 'retry_and_ban_account_only', 'retry_on_connection_error'],
|
||||
title="[Worker Param] On Bannable Failure Policy",
|
||||
description="Policy for a worker when a bannable error occurs. "
|
||||
"'stop_loop': Ban the account, mark URL as failed, and stop the worker's loop. "
|
||||
"'retry_with_new_account': Ban the failed account, retry ONCE with a new account. If retry fails, ban the second account and proxy, then stop."
|
||||
"'retry_on_connection_error': If a connection error (e.g. SOCKS timeout) occurs, retry with a new account but do NOT ban the first account/proxy. If retry fails, stop the loop without banning."
|
||||
),
|
||||
'queue_name': Param(DEFAULT_QUEUE_NAME, type="string", description="[Worker Param] Base name for Redis queues."),
|
||||
'redis_conn_id': Param(DEFAULT_REDIS_CONN_ID, type="string", description="[Worker Param] Airflow Redis connection ID."),
|
||||
@ -200,6 +282,6 @@ with DAG(
|
||||
orchestrate_task.doc_md = """
|
||||
### Start Worker Loops
|
||||
This is the main task that executes the ignition policy.
|
||||
- It triggers `ytdlp_ops_worker_per_url` DAGs according to the batch settings.
|
||||
- It passes all its parameters down to the workers, which will use them to run their continuous loops.
|
||||
- It triggers `ytdlp_ops_dispatcher` DAGs according to the batch settings.
|
||||
- It passes all its parameters down to the dispatchers, which will use them to trigger workers.
|
||||
"""
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@ -1,127 +0,0 @@
|
||||
services:
|
||||
config-generator:
|
||||
image: python:3.9-slim
|
||||
container_name: ytdlp-ops-config-generator
|
||||
working_dir: /app
|
||||
volumes:
|
||||
# Mount the current directory to access the template, .env, and script
|
||||
- .:/app
|
||||
env_file:
|
||||
- ./.env
|
||||
environment:
|
||||
ENVOY_CLUSTER_TYPE: STRICT_DNS
|
||||
# Pass worker count and base port to ensure Envoy config matches the workers
|
||||
YTDLP_WORKERS: ${YTDLP_WORKERS:-3}
|
||||
YTDLP_BASE_PORT: ${YTDLP_BASE_PORT:-9090}
|
||||
# This command cleans up old runs, installs jinja2, and generates the config.
|
||||
command: >
|
||||
sh -c "rm -rf ./envoy.yaml &&
|
||||
pip install --no-cache-dir -q jinja2 &&
|
||||
python3 ./generate_envoy_config.py"
|
||||
|
||||
envoy:
|
||||
image: envoyproxy/envoy:v1.29-latest
|
||||
container_name: envoy-thrift-lb
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
# Mount the generated config file from the host
|
||||
- ./envoy.yaml:/etc/envoy/envoy.yaml:ro
|
||||
ports:
|
||||
# This is the single public port for all Thrift traffic
|
||||
- "${ENVOY_PORT:-9080}:${ENVOY_PORT:-9080}"
|
||||
networks:
|
||||
- airflow_prod_proxynet
|
||||
depends_on:
|
||||
config-generator:
|
||||
condition: service_completed_successfully
|
||||
ytdlp-ops:
|
||||
condition: service_started
|
||||
|
||||
camoufox:
|
||||
build:
|
||||
context: ./camoufox # Path relative to the docker-compose file
|
||||
dockerfile: Dockerfile
|
||||
args:
|
||||
VNC_PASSWORD: ${VNC_PASSWORD:-supersecret} # Use environment variable or default
|
||||
ports:
|
||||
# Optionally expose the camoufox port to the host for debugging
|
||||
- "12345:12345"
|
||||
- "5900:5900" # Expose VNC port to the host
|
||||
networks:
|
||||
- airflow_prod_proxynet
|
||||
command: [
|
||||
"--ws-host", "0.0.0.0",
|
||||
"--port", "12345",
|
||||
"--ws-path", "mypath",
|
||||
"--proxy-url", "socks5://${SOCKS5_SOCK_SERVER_IP:-89.253.221.173}:1084",
|
||||
"--locale", "en-US",
|
||||
"--extensions", "/app/extensions/google_sign_in_popup_blocker-1.0.2.xpi,/app/extensions/spoof_timezone-0.3.4.xpi,/app/extensions/youtube_ad_auto_skipper-0.6.0.xpi"
|
||||
]
|
||||
restart: unless-stopped
|
||||
# Add healthcheck if desired
|
||||
|
||||
ytdlp-ops:
|
||||
image: pangramia/ytdlp-ops-server:latest # Don't comment out or remove, build is performed externally
|
||||
container_name: ytdlp-ops-workers # Renamed for clarity
|
||||
depends_on:
|
||||
- camoufox # Ensure camoufox starts first
|
||||
# Ports are no longer exposed directly. Envoy will connect to them on the internal network.
|
||||
env_file:
|
||||
- ./.env # Path is relative to the compose file
|
||||
volumes:
|
||||
- context-data:/app/context-data
|
||||
# Mount the plugin source code for live updates without rebuilding the image.
|
||||
# Assumes the plugin source is in a 'bgutil-ytdlp-pot-provider' directory
|
||||
# next to your docker-compose.yaml file.
|
||||
#- ./bgutil-ytdlp-pot-provider:/app/bgutil-ytdlp-pot-provider
|
||||
networks:
|
||||
- airflow_prod_proxynet
|
||||
command:
|
||||
- "--script-dir"
|
||||
- "/app"
|
||||
- "--context-dir"
|
||||
- "/app/context-data"
|
||||
# Use environment variables for port and worker count
|
||||
- "--port"
|
||||
- "${YTDLP_BASE_PORT:-9090}"
|
||||
- "--workers"
|
||||
- "${YTDLP_WORKERS:-3}"
|
||||
- "--clients"
|
||||
- "web,ios,android,mweb"
|
||||
- "--proxies"
|
||||
#- "socks5://sslocal-rust-1081:1081,socks5://sslocal-rust-1082:1082,socks5://sslocal-rust-1083:1083,socks5://sslocal-rust-1084:1084,socks5://sslocal-rust-1085:1085"
|
||||
- "socks5://${SOCKS5_SOCK_SERVER_IP:-89.253.221.173}:1084"
|
||||
#
|
||||
# Add the endpoint argument pointing to the camoufox service
|
||||
- "--endpoint"
|
||||
- "ws://camoufox:12345/mypath"
|
||||
- "--probe"
|
||||
# Add --camouflage-only if you don't want ytdlp-ops to manage the browser directly
|
||||
- "--camouflage-only"
|
||||
# Add flag to print full tokens in logs by default
|
||||
- "--print-tokens"
|
||||
# Add server identity and Redis connection details
|
||||
- "--server-identity"
|
||||
- "ytdlp-ops-airflow-service"
|
||||
- "--redis-host"
|
||||
- "${REDIS_HOST:-redis}"
|
||||
- "--redis-port"
|
||||
- "${REDIS_PORT:-6379}"
|
||||
- "--redis-password"
|
||||
- "${REDIS_PASSWORD}"
|
||||
# Add account cooldown parameters (values are in minutes)
|
||||
- "--account-active-duration-min"
|
||||
- "${ACCOUNT_ACTIVE_DURATION_MIN:-30}"
|
||||
- "--account-cooldown-duration-min"
|
||||
- "${ACCOUNT_COOLDOWN_DURATION_MIN:-60}"
|
||||
# Add flag to clean context directory on start
|
||||
- "--clean-context-dir"
|
||||
restart: unless-stopped
|
||||
pull_policy: always
|
||||
|
||||
volumes:
|
||||
context-data:
|
||||
name: context-data
|
||||
|
||||
networks:
|
||||
airflow_prod_proxynet: {}
|
||||
@ -1,194 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
import os
|
||||
import subprocess
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
import xml.etree.ElementTree as ET
|
||||
|
||||
# Update paths to match actual project structure
|
||||
THRIFT_MODEL_DIR = Path("thrift_model")
|
||||
SERVICES_DIR = THRIFT_MODEL_DIR / "services"
|
||||
DATA_DIR = THRIFT_MODEL_DIR / "data"
|
||||
GEN_PY_DIR = THRIFT_MODEL_DIR / "gen_py"
|
||||
|
||||
def get_version_from_pom():
|
||||
"""Parse version from pom.xml"""
|
||||
pom_path = THRIFT_MODEL_DIR / "pom.xml"
|
||||
tree = ET.parse(pom_path)
|
||||
root = tree.getroot()
|
||||
|
||||
# XML namespaces
|
||||
ns = {'mvn': 'http://maven.apache.org/POM/4.0.0'}
|
||||
|
||||
version = root.find('mvn:version', ns).text
|
||||
if version.endswith('-SNAPSHOT'):
|
||||
version = version.replace('-SNAPSHOT', '.dev0')
|
||||
return version
|
||||
|
||||
def find_thrift_files():
|
||||
"""Find all .thrift files in the thrift_model directory"""
|
||||
data_files = list(DATA_DIR.glob("*.thrift"))
|
||||
service_files = list(SERVICES_DIR.glob("*.thrift"))
|
||||
# Process data files first (for dependencies), then service files
|
||||
return data_files + service_files
|
||||
|
||||
def generate_python_code(thrift_files):
|
||||
"""Generate Python code from Thrift files"""
|
||||
# First process data files (for dependencies)
|
||||
data_files = [f for f in thrift_files if f.parent == DATA_DIR]
|
||||
service_files = [f for f in thrift_files if f.parent == SERVICES_DIR]
|
||||
|
||||
# Process in the right order: first data files, then service files
|
||||
ordered_files = data_files + service_files
|
||||
|
||||
for thrift_file in ordered_files:
|
||||
print(f"Generating code for {thrift_file}...")
|
||||
try:
|
||||
subprocess.run([
|
||||
"thrift",
|
||||
"--gen", "py",
|
||||
"-out", str(GEN_PY_DIR),
|
||||
str(thrift_file)
|
||||
], check=True)
|
||||
print(f"Successfully generated code for {thrift_file}")
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(f"Error generating code for {thrift_file}: {e}")
|
||||
raise
|
||||
|
||||
def create_init_files():
|
||||
"""Create __init__.py files in all generated directories"""
|
||||
for root, dirs, files in os.walk(GEN_PY_DIR):
|
||||
path = Path(root)
|
||||
init_file = path / "__init__.py"
|
||||
if not init_file.exists():
|
||||
print(f"Creating __init__.py in {path}")
|
||||
with open(init_file, 'w') as f:
|
||||
# For the top-level pangramia directory, we don't need special content
|
||||
if path.name == "pangramia":
|
||||
pass
|
||||
# For module directories, add the standard __all__ pattern if there are modules
|
||||
elif any(f.endswith('.py') and f != '__init__.py' for f in files):
|
||||
modules = [f[:-3] for f in files if f.endswith('.py') and f != '__init__.py']
|
||||
if modules:
|
||||
f.write(f"__all__ = {repr(modules)}\n")
|
||||
|
||||
# Ensure we have an __init__.py in the thrift_model directory
|
||||
thrift_model_init = THRIFT_MODEL_DIR / "__init__.py"
|
||||
if not thrift_model_init.exists():
|
||||
print(f"Creating {thrift_model_init}")
|
||||
thrift_model_init.touch()
|
||||
|
||||
def clean_gen_py():
|
||||
"""Clean the gen_py directory before generation"""
|
||||
if GEN_PY_DIR.exists():
|
||||
print(f"Cleaning {GEN_PY_DIR}...")
|
||||
shutil.rmtree(GEN_PY_DIR)
|
||||
print(f"Cleaned {GEN_PY_DIR}")
|
||||
|
||||
# Recreate the directory
|
||||
GEN_PY_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
def update_version_file():
|
||||
"""Update the version in __init__.py"""
|
||||
version = get_version_from_pom()
|
||||
print(f"Detected version from pom.xml: {version}")
|
||||
|
||||
# Update the version in __init__.py
|
||||
init_path = Path("__init__.py")
|
||||
if init_path.exists():
|
||||
with open(init_path, 'r') as f:
|
||||
content = f.read()
|
||||
|
||||
# Replace the VERSION assignment if it exists
|
||||
if "VERSION = " in content:
|
||||
new_content = []
|
||||
for line in content.splitlines():
|
||||
if line.startswith("VERSION = "):
|
||||
new_content.append(f'VERSION = "{version}"')
|
||||
else:
|
||||
new_content.append(line)
|
||||
|
||||
with open(init_path, 'w') as f:
|
||||
f.write('\n'.join(new_content))
|
||||
|
||||
print(f"Updated version in __init__.py to {version}")
|
||||
|
||||
def main():
|
||||
# Ensure directories exist
|
||||
SERVICES_DIR.mkdir(parents=True, exist_ok=True)
|
||||
DATA_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Clean existing generated code
|
||||
clean_gen_py()
|
||||
|
||||
# Find all Thrift files
|
||||
thrift_files = find_thrift_files()
|
||||
if not thrift_files:
|
||||
print("No .thrift files found in thrift_model directory")
|
||||
return
|
||||
|
||||
print(f"Found {len(thrift_files)} Thrift files to process")
|
||||
|
||||
# Generate Python code
|
||||
generate_python_code(thrift_files)
|
||||
|
||||
# Create __init__.py files
|
||||
create_init_files()
|
||||
|
||||
# Update version file
|
||||
update_version_file()
|
||||
|
||||
# Create a symbolic link to make the modules importable
|
||||
try:
|
||||
# Check if we're in the project root
|
||||
if not (Path.cwd() / "thrift_model").exists():
|
||||
print("Warning: Not running from project root, symbolic link may not work correctly")
|
||||
|
||||
# Create pangramia directory if it doesn't exist
|
||||
pangramia_dir = Path("pangramia")
|
||||
if not pangramia_dir.exists():
|
||||
pangramia_dir.mkdir(parents=True, exist_ok=True)
|
||||
(pangramia_dir / "__init__.py").touch()
|
||||
print(f"Created {pangramia_dir} directory with __init__.py")
|
||||
|
||||
# Create symbolic link from pangramia -> thrift_model/gen_py/pangramia
|
||||
link_path = Path("pangramia") # Link in the project root
|
||||
target_path = GEN_PY_DIR / "pangramia"
|
||||
|
||||
# Ensure the target directory exists before creating the link
|
||||
if not target_path.exists():
|
||||
print(f"Warning: Target directory {target_path} does not exist, cannot create symbolic link")
|
||||
else:
|
||||
# Remove existing link or directory at the destination
|
||||
if link_path.is_symlink():
|
||||
print(f"Removing existing symbolic link: {link_path}")
|
||||
link_path.unlink()
|
||||
elif link_path.is_dir():
|
||||
print(f"Removing existing directory: {link_path}")
|
||||
shutil.rmtree(link_path)
|
||||
elif link_path.exists(): # Handle case where it might be a file
|
||||
print(f"Removing existing file: {link_path}")
|
||||
link_path.unlink()
|
||||
|
||||
# Create the new symbolic link
|
||||
try:
|
||||
# Use relative path for the link source for better portability
|
||||
relative_target = os.path.relpath(target_path, start=link_path.parent)
|
||||
os.symlink(relative_target, link_path, target_is_directory=True)
|
||||
print(f"Created symbolic link: {link_path} -> {relative_target}")
|
||||
except Exception as e:
|
||||
print(f"Error creating symbolic link: {e}")
|
||||
print("You may need to manually add the generated code to your Python path")
|
||||
# This else block corresponds to the `if not target_path.exists():` check further up
|
||||
# else:
|
||||
# print(f"Warning: Target directory {yt_target} does not exist, cannot create symbolic link")
|
||||
except Exception as e:
|
||||
print(f"An unexpected error occurred during symlink setup: {e}")
|
||||
# Optionally re-raise or handle more specifically
|
||||
|
||||
print("\nThrift code generation completed successfully!")
|
||||
print(f"Generated Python code in {GEN_PY_DIR}")
|
||||
print(f"Current version: {get_version_from_pom()}")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@ -1 +0,0 @@
|
||||
thrift_model/gen_py/pangramia
|
||||
@ -1,38 +1,2 @@
|
||||
from setuptools import setup, find_packages
|
||||
import xml.etree.ElementTree as ET
|
||||
import os
|
||||
|
||||
def get_version_from_pom():
|
||||
"""Parse version from pom.xml"""
|
||||
pom_path = os.path.join(os.path.dirname(__file__), 'thrift_model/pom.xml')
|
||||
tree = ET.parse(pom_path)
|
||||
root = tree.getroot()
|
||||
|
||||
# XML namespaces
|
||||
ns = {'mvn': 'http://maven.apache.org/POM/4.0.0'}
|
||||
|
||||
version = root.find('mvn:version', ns).text
|
||||
if version.endswith('-SNAPSHOT'):
|
||||
version = version.replace('-SNAPSHOT', '.dev0')
|
||||
return version
|
||||
|
||||
VERSION = get_version_from_pom()
|
||||
|
||||
setup(
|
||||
name='yt_ops_services',
|
||||
version=VERSION,
|
||||
package_data={
|
||||
'yt_ops_services': ['thrift_model/pom.xml'],
|
||||
},
|
||||
packages=find_packages(where='.', exclude=['tests*']),
|
||||
package_dir={
|
||||
'': '.', # Look for packages in the root directory
|
||||
},
|
||||
include_package_data=True,
|
||||
install_requires=[
|
||||
'thrift>=0.16.0,<=0.20.0',
|
||||
'python-dotenv>=1.0.0',
|
||||
'psutil',
|
||||
],
|
||||
python_requires='>=3.9',
|
||||
)
|
||||
# This file is no longer needed and will be removed.
|
||||
# The packaging logic has been consolidated into the root setup.py file.
|
||||
|
||||
1
ytdlp-ops-auth/thrift_model/.gitignore
vendored
1
ytdlp-ops-auth/thrift_model/.gitignore
vendored
@ -1 +0,0 @@
|
||||
target/
|
||||
@ -1,95 +0,0 @@
|
||||
namespace py pangramia.yt.common
|
||||
namespace java com.pangramia.yt.common
|
||||
|
||||
typedef string JobID
|
||||
typedef string Timestamp
|
||||
|
||||
|
||||
enum JobState {
|
||||
SUCCESS,
|
||||
FAIL,
|
||||
BOT_FORBIDDEN_ON_URL_ACCESS,
|
||||
BOT_FORBIDDEN_ON_FILE_DOWNLOAD,
|
||||
BOT_CAPTCHA,
|
||||
BOT_AUTH_RELOGIN_REQUIRED,
|
||||
BOT_AUTH_SMS_REQUIRED,
|
||||
BOT_AUTH_DEVICE_QR_REQUIRED,
|
||||
BOT_ACCOUNT_BANNED,
|
||||
BOT_IP_BANNED
|
||||
}
|
||||
|
||||
struct JobTokenData {
|
||||
1: optional string infoJson,
|
||||
2: optional string ytdlpCommand,
|
||||
3: optional string socks,
|
||||
4: optional JobID jobId,
|
||||
5: optional string url,
|
||||
6: optional string cookiesBlob,
|
||||
}
|
||||
|
||||
|
||||
enum TokenUpdateMode {
|
||||
AUTOREFRESH_AND_REMAIN_ANONYMOUS,
|
||||
AUTOREFRESH_AND_ALLOW_AUTH,
|
||||
AUTOREFRESH_AND_ONLY_AUTH,
|
||||
CLEANUP_THEN_AUTOREFRESH_AND_ONLY_AUTH,
|
||||
CLEANUP_THEN_AUTOREFRESH_AND_REMAIN_ANONYMOUS,
|
||||
CLEANUP_THEN_AUTOREFRESH_AND_ALLOW_AUTH,
|
||||
AUTO,// AUTOREFRESH_AND_ONLY_AUTH,
|
||||
}
|
||||
|
||||
|
||||
struct AccountData {
|
||||
1: required string username,
|
||||
2: required string password,
|
||||
3: optional string countryCode
|
||||
}
|
||||
|
||||
struct ProxyData {
|
||||
1: required string proxyUrl,
|
||||
2: optional string countryCode
|
||||
}
|
||||
|
||||
|
||||
enum AccountPairState {
|
||||
ACTIVE,
|
||||
PAUSED,
|
||||
REMOVED,
|
||||
IN_PROGRESS,
|
||||
ALL
|
||||
}
|
||||
|
||||
|
||||
struct AccountPairWithState {
|
||||
1: required string accountId,
|
||||
2: required string proxyId,
|
||||
3: optional AccountPairState accountPairState
|
||||
4: optional string machineId,
|
||||
}
|
||||
|
||||
struct JobData {
|
||||
1: required string jobId,
|
||||
2: required string url,
|
||||
3: required string cookiesBlob,
|
||||
4: required string potoken,
|
||||
5: required string visitorId,
|
||||
6: required string ytdlpCommand,
|
||||
7: required string createdTime,
|
||||
8: required map<string,string> telemetry,
|
||||
9: required JobState state,
|
||||
10: optional string errorMessage,
|
||||
11: optional string socks5Id
|
||||
}
|
||||
|
||||
struct RichCollectionPagination {
|
||||
1: required bool hasNext,
|
||||
2: required i32 totalCount,
|
||||
3: required i32 page,
|
||||
4: required i32 pageSize
|
||||
}
|
||||
|
||||
struct RichCollectionJobData {
|
||||
1: required list<JobData> items,
|
||||
2: required RichCollectionPagination pagination
|
||||
}
|
||||
|
||||
@ -1,14 +0,0 @@
|
||||
namespace py pangramia.yt.exceptions
|
||||
namespace java com.pangramia.yt.exceptions
|
||||
|
||||
exception PBServiceException {
|
||||
1: required string message,
|
||||
2: optional string errorCode,
|
||||
3: optional map<string, string> context
|
||||
}
|
||||
|
||||
exception PBUserException {
|
||||
1: required string message,
|
||||
2: optional string errorCode,
|
||||
3: optional map<string, string> context
|
||||
}
|
||||
@ -1,131 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
#
|
||||
# Autogenerated by Thrift Compiler (0.20.0)
|
||||
#
|
||||
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
|
||||
#
|
||||
# options string: py
|
||||
#
|
||||
|
||||
import sys
|
||||
import pprint
|
||||
if sys.version_info[0] > 2:
|
||||
from urllib.parse import urlparse
|
||||
else:
|
||||
from urlparse import urlparse
|
||||
from thrift.transport import TTransport, TSocket, TSSLSocket, THttpClient
|
||||
from thrift.protocol.TBinaryProtocol import TBinaryProtocol
|
||||
|
||||
from pangramia.base_service import BaseService
|
||||
from pangramia.base_service.ttypes import *
|
||||
|
||||
if len(sys.argv) <= 1 or sys.argv[1] == '--help':
|
||||
print('')
|
||||
print('Usage: ' + sys.argv[0] + ' [-h host[:port]] [-u url] [-f[ramed]] [-s[sl]] [-novalidate] [-ca_certs certs] [-keyfile keyfile] [-certfile certfile] function [arg1 [arg2...]]')
|
||||
print('')
|
||||
print('Functions:')
|
||||
print(' bool ping()')
|
||||
print(' bool reportError(string message, details)')
|
||||
print(' void shutdown()')
|
||||
print('')
|
||||
sys.exit(0)
|
||||
|
||||
pp = pprint.PrettyPrinter(indent=2)
|
||||
host = 'localhost'
|
||||
port = 9090
|
||||
uri = ''
|
||||
framed = False
|
||||
ssl = False
|
||||
validate = True
|
||||
ca_certs = None
|
||||
keyfile = None
|
||||
certfile = None
|
||||
http = False
|
||||
argi = 1
|
||||
|
||||
if sys.argv[argi] == '-h':
|
||||
parts = sys.argv[argi + 1].split(':')
|
||||
host = parts[0]
|
||||
if len(parts) > 1:
|
||||
port = int(parts[1])
|
||||
argi += 2
|
||||
|
||||
if sys.argv[argi] == '-u':
|
||||
url = urlparse(sys.argv[argi + 1])
|
||||
parts = url[1].split(':')
|
||||
host = parts[0]
|
||||
if len(parts) > 1:
|
||||
port = int(parts[1])
|
||||
else:
|
||||
port = 80
|
||||
uri = url[2]
|
||||
if url[4]:
|
||||
uri += '?%s' % url[4]
|
||||
http = True
|
||||
argi += 2
|
||||
|
||||
if sys.argv[argi] == '-f' or sys.argv[argi] == '-framed':
|
||||
framed = True
|
||||
argi += 1
|
||||
|
||||
if sys.argv[argi] == '-s' or sys.argv[argi] == '-ssl':
|
||||
ssl = True
|
||||
argi += 1
|
||||
|
||||
if sys.argv[argi] == '-novalidate':
|
||||
validate = False
|
||||
argi += 1
|
||||
|
||||
if sys.argv[argi] == '-ca_certs':
|
||||
ca_certs = sys.argv[argi+1]
|
||||
argi += 2
|
||||
|
||||
if sys.argv[argi] == '-keyfile':
|
||||
keyfile = sys.argv[argi+1]
|
||||
argi += 2
|
||||
|
||||
if sys.argv[argi] == '-certfile':
|
||||
certfile = sys.argv[argi+1]
|
||||
argi += 2
|
||||
|
||||
cmd = sys.argv[argi]
|
||||
args = sys.argv[argi + 1:]
|
||||
|
||||
if http:
|
||||
transport = THttpClient.THttpClient(host, port, uri)
|
||||
else:
|
||||
if ssl:
|
||||
socket = TSSLSocket.TSSLSocket(host, port, validate=validate, ca_certs=ca_certs, keyfile=keyfile, certfile=certfile)
|
||||
else:
|
||||
socket = TSocket.TSocket(host, port)
|
||||
if framed:
|
||||
transport = TTransport.TFramedTransport(socket)
|
||||
else:
|
||||
transport = TTransport.TBufferedTransport(socket)
|
||||
protocol = TBinaryProtocol(transport)
|
||||
client = BaseService.Client(protocol)
|
||||
transport.open()
|
||||
|
||||
if cmd == 'ping':
|
||||
if len(args) != 0:
|
||||
print('ping requires 0 args')
|
||||
sys.exit(1)
|
||||
pp.pprint(client.ping())
|
||||
|
||||
elif cmd == 'reportError':
|
||||
if len(args) != 2:
|
||||
print('reportError requires 2 args')
|
||||
sys.exit(1)
|
||||
pp.pprint(client.reportError(args[0], eval(args[1]),))
|
||||
|
||||
elif cmd == 'shutdown':
|
||||
if len(args) != 0:
|
||||
print('shutdown requires 0 args')
|
||||
sys.exit(1)
|
||||
pp.pprint(client.shutdown())
|
||||
|
||||
else:
|
||||
print('Unrecognized method %s' % cmd)
|
||||
sys.exit(1)
|
||||
|
||||
transport.close()
|
||||
@ -1,564 +0,0 @@
|
||||
#
|
||||
# Autogenerated by Thrift Compiler (0.20.0)
|
||||
#
|
||||
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
|
||||
#
|
||||
# options string: py
|
||||
#
|
||||
|
||||
from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException
|
||||
from thrift.protocol.TProtocol import TProtocolException
|
||||
from thrift.TRecursive import fix_spec
|
||||
|
||||
import sys
|
||||
import logging
|
||||
from .ttypes import *
|
||||
from thrift.Thrift import TProcessor
|
||||
from thrift.transport import TTransport
|
||||
all_structs = []
|
||||
|
||||
|
||||
class Iface(object):
|
||||
def ping(self):
|
||||
pass
|
||||
|
||||
def reportError(self, message, details):
|
||||
"""
|
||||
Parameters:
|
||||
- message
|
||||
- details
|
||||
|
||||
"""
|
||||
pass
|
||||
|
||||
def shutdown(self):
|
||||
pass
|
||||
|
||||
|
||||
class Client(Iface):
|
||||
def __init__(self, iprot, oprot=None):
|
||||
self._iprot = self._oprot = iprot
|
||||
if oprot is not None:
|
||||
self._oprot = oprot
|
||||
self._seqid = 0
|
||||
|
||||
def ping(self):
|
||||
self.send_ping()
|
||||
return self.recv_ping()
|
||||
|
||||
def send_ping(self):
|
||||
self._oprot.writeMessageBegin('ping', TMessageType.CALL, self._seqid)
|
||||
args = ping_args()
|
||||
args.write(self._oprot)
|
||||
self._oprot.writeMessageEnd()
|
||||
self._oprot.trans.flush()
|
||||
|
||||
def recv_ping(self):
|
||||
iprot = self._iprot
|
||||
(fname, mtype, rseqid) = iprot.readMessageBegin()
|
||||
if mtype == TMessageType.EXCEPTION:
|
||||
x = TApplicationException()
|
||||
x.read(iprot)
|
||||
iprot.readMessageEnd()
|
||||
raise x
|
||||
result = ping_result()
|
||||
result.read(iprot)
|
||||
iprot.readMessageEnd()
|
||||
if result.success is not None:
|
||||
return result.success
|
||||
if result.serviceExp is not None:
|
||||
raise result.serviceExp
|
||||
if result.userExp is not None:
|
||||
raise result.userExp
|
||||
raise TApplicationException(TApplicationException.MISSING_RESULT, "ping failed: unknown result")
|
||||
|
||||
def reportError(self, message, details):
|
||||
"""
|
||||
Parameters:
|
||||
- message
|
||||
- details
|
||||
|
||||
"""
|
||||
self.send_reportError(message, details)
|
||||
return self.recv_reportError()
|
||||
|
||||
def send_reportError(self, message, details):
|
||||
self._oprot.writeMessageBegin('reportError', TMessageType.CALL, self._seqid)
|
||||
args = reportError_args()
|
||||
args.message = message
|
||||
args.details = details
|
||||
args.write(self._oprot)
|
||||
self._oprot.writeMessageEnd()
|
||||
self._oprot.trans.flush()
|
||||
|
||||
def recv_reportError(self):
|
||||
iprot = self._iprot
|
||||
(fname, mtype, rseqid) = iprot.readMessageBegin()
|
||||
if mtype == TMessageType.EXCEPTION:
|
||||
x = TApplicationException()
|
||||
x.read(iprot)
|
||||
iprot.readMessageEnd()
|
||||
raise x
|
||||
result = reportError_result()
|
||||
result.read(iprot)
|
||||
iprot.readMessageEnd()
|
||||
if result.success is not None:
|
||||
return result.success
|
||||
if result.serviceExp is not None:
|
||||
raise result.serviceExp
|
||||
if result.userExp is not None:
|
||||
raise result.userExp
|
||||
raise TApplicationException(TApplicationException.MISSING_RESULT, "reportError failed: unknown result")
|
||||
|
||||
def shutdown(self):
|
||||
self.send_shutdown()
|
||||
|
||||
def send_shutdown(self):
|
||||
self._oprot.writeMessageBegin('shutdown', TMessageType.ONEWAY, self._seqid)
|
||||
args = shutdown_args()
|
||||
args.write(self._oprot)
|
||||
self._oprot.writeMessageEnd()
|
||||
self._oprot.trans.flush()
|
||||
|
||||
|
||||
class Processor(Iface, TProcessor):
|
||||
def __init__(self, handler):
|
||||
self._handler = handler
|
||||
self._processMap = {}
|
||||
self._processMap["ping"] = Processor.process_ping
|
||||
self._processMap["reportError"] = Processor.process_reportError
|
||||
self._processMap["shutdown"] = Processor.process_shutdown
|
||||
self._on_message_begin = None
|
||||
|
||||
def on_message_begin(self, func):
|
||||
self._on_message_begin = func
|
||||
|
||||
def process(self, iprot, oprot):
|
||||
(name, type, seqid) = iprot.readMessageBegin()
|
||||
if self._on_message_begin:
|
||||
self._on_message_begin(name, type, seqid)
|
||||
if name not in self._processMap:
|
||||
iprot.skip(TType.STRUCT)
|
||||
iprot.readMessageEnd()
|
||||
x = TApplicationException(TApplicationException.UNKNOWN_METHOD, 'Unknown function %s' % (name))
|
||||
oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid)
|
||||
x.write(oprot)
|
||||
oprot.writeMessageEnd()
|
||||
oprot.trans.flush()
|
||||
return
|
||||
else:
|
||||
self._processMap[name](self, seqid, iprot, oprot)
|
||||
return True
|
||||
|
||||
def process_ping(self, seqid, iprot, oprot):
|
||||
args = ping_args()
|
||||
args.read(iprot)
|
||||
iprot.readMessageEnd()
|
||||
result = ping_result()
|
||||
try:
|
||||
result.success = self._handler.ping()
|
||||
msg_type = TMessageType.REPLY
|
||||
except TTransport.TTransportException:
|
||||
raise
|
||||
except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp:
|
||||
msg_type = TMessageType.REPLY
|
||||
result.serviceExp = serviceExp
|
||||
except pangramia.yt.exceptions.ttypes.PBUserException as userExp:
|
||||
msg_type = TMessageType.REPLY
|
||||
result.userExp = userExp
|
||||
except TApplicationException as ex:
|
||||
logging.exception('TApplication exception in handler')
|
||||
msg_type = TMessageType.EXCEPTION
|
||||
result = ex
|
||||
except Exception:
|
||||
logging.exception('Unexpected exception in handler')
|
||||
msg_type = TMessageType.EXCEPTION
|
||||
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
|
||||
oprot.writeMessageBegin("ping", msg_type, seqid)
|
||||
result.write(oprot)
|
||||
oprot.writeMessageEnd()
|
||||
oprot.trans.flush()
|
||||
|
||||
def process_reportError(self, seqid, iprot, oprot):
|
||||
args = reportError_args()
|
||||
args.read(iprot)
|
||||
iprot.readMessageEnd()
|
||||
result = reportError_result()
|
||||
try:
|
||||
result.success = self._handler.reportError(args.message, args.details)
|
||||
msg_type = TMessageType.REPLY
|
||||
except TTransport.TTransportException:
|
||||
raise
|
||||
except pangramia.yt.exceptions.ttypes.PBServiceException as serviceExp:
|
||||
msg_type = TMessageType.REPLY
|
||||
result.serviceExp = serviceExp
|
||||
except pangramia.yt.exceptions.ttypes.PBUserException as userExp:
|
||||
msg_type = TMessageType.REPLY
|
||||
result.userExp = userExp
|
||||
except TApplicationException as ex:
|
||||
logging.exception('TApplication exception in handler')
|
||||
msg_type = TMessageType.EXCEPTION
|
||||
result = ex
|
||||
except Exception:
|
||||
logging.exception('Unexpected exception in handler')
|
||||
msg_type = TMessageType.EXCEPTION
|
||||
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
|
||||
oprot.writeMessageBegin("reportError", msg_type, seqid)
|
||||
result.write(oprot)
|
||||
oprot.writeMessageEnd()
|
||||
oprot.trans.flush()
|
||||
|
||||
def process_shutdown(self, seqid, iprot, oprot):
|
||||
args = shutdown_args()
|
||||
args.read(iprot)
|
||||
iprot.readMessageEnd()
|
||||
try:
|
||||
self._handler.shutdown()
|
||||
except TTransport.TTransportException:
|
||||
raise
|
||||
except Exception:
|
||||
logging.exception('Exception in oneway handler')
|
||||
|
||||
# HELPER FUNCTIONS AND STRUCTURES
|
||||
|
||||
|
||||
class ping_args(object):
|
||||
|
||||
|
||||
def read(self, iprot):
|
||||
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
|
||||
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
|
||||
return
|
||||
iprot.readStructBegin()
|
||||
while True:
|
||||
(fname, ftype, fid) = iprot.readFieldBegin()
|
||||
if ftype == TType.STOP:
|
||||
break
|
||||
else:
|
||||
iprot.skip(ftype)
|
||||
iprot.readFieldEnd()
|
||||
iprot.readStructEnd()
|
||||
|
||||
def write(self, oprot):
|
||||
if oprot._fast_encode is not None and self.thrift_spec is not None:
|
||||
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
|
||||
return
|
||||
oprot.writeStructBegin('ping_args')
|
||||
oprot.writeFieldStop()
|
||||
oprot.writeStructEnd()
|
||||
|
||||
def validate(self):
|
||||
return
|
||||
|
||||
def __repr__(self):
|
||||
L = ['%s=%r' % (key, value)
|
||||
for key, value in self.__dict__.items()]
|
||||
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
|
||||
|
||||
def __eq__(self, other):
|
||||
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
|
||||
|
||||
def __ne__(self, other):
|
||||
return not (self == other)
|
||||
all_structs.append(ping_args)
|
||||
ping_args.thrift_spec = (
|
||||
)
|
||||
|
||||
|
||||
class ping_result(object):
|
||||
"""
|
||||
Attributes:
|
||||
- success
|
||||
- serviceExp
|
||||
- userExp
|
||||
|
||||
"""
|
||||
|
||||
|
||||
def __init__(self, success=None, serviceExp=None, userExp=None,):
|
||||
self.success = success
|
||||
self.serviceExp = serviceExp
|
||||
self.userExp = userExp
|
||||
|
||||
def read(self, iprot):
|
||||
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
|
||||
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
|
||||
return
|
||||
iprot.readStructBegin()
|
||||
while True:
|
||||
(fname, ftype, fid) = iprot.readFieldBegin()
|
||||
if ftype == TType.STOP:
|
||||
break
|
||||
if fid == 0:
|
||||
if ftype == TType.BOOL:
|
||||
self.success = iprot.readBool()
|
||||
else:
|
||||
iprot.skip(ftype)
|
||||
elif fid == 1:
|
||||
if ftype == TType.STRUCT:
|
||||
self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot)
|
||||
else:
|
||||
iprot.skip(ftype)
|
||||
elif fid == 2:
|
||||
if ftype == TType.STRUCT:
|
||||
self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot)
|
||||
else:
|
||||
iprot.skip(ftype)
|
||||
else:
|
||||
iprot.skip(ftype)
|
||||
iprot.readFieldEnd()
|
||||
iprot.readStructEnd()
|
||||
|
||||
def write(self, oprot):
|
||||
if oprot._fast_encode is not None and self.thrift_spec is not None:
|
||||
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
|
||||
return
|
||||
oprot.writeStructBegin('ping_result')
|
||||
if self.success is not None:
|
||||
oprot.writeFieldBegin('success', TType.BOOL, 0)
|
||||
oprot.writeBool(self.success)
|
||||
oprot.writeFieldEnd()
|
||||
if self.serviceExp is not None:
|
||||
oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1)
|
||||
self.serviceExp.write(oprot)
|
||||
oprot.writeFieldEnd()
|
||||
if self.userExp is not None:
|
||||
oprot.writeFieldBegin('userExp', TType.STRUCT, 2)
|
||||
self.userExp.write(oprot)
|
||||
oprot.writeFieldEnd()
|
||||
oprot.writeFieldStop()
|
||||
oprot.writeStructEnd()
|
||||
|
||||
def validate(self):
|
||||
return
|
||||
|
||||
def __repr__(self):
|
||||
L = ['%s=%r' % (key, value)
|
||||
for key, value in self.__dict__.items()]
|
||||
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
|
||||
|
||||
def __eq__(self, other):
|
||||
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
|
||||
|
||||
def __ne__(self, other):
|
||||
return not (self == other)
|
||||
all_structs.append(ping_result)
|
||||
ping_result.thrift_spec = (
|
||||
(0, TType.BOOL, 'success', None, None, ), # 0
|
||||
(1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1
|
||||
(2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2
|
||||
)
|
||||
|
||||
|
||||
class reportError_args(object):
|
||||
"""
|
||||
Attributes:
|
||||
- message
|
||||
- details
|
||||
|
||||
"""
|
||||
|
||||
|
||||
def __init__(self, message=None, details=None,):
|
||||
self.message = message
|
||||
self.details = details
|
||||
|
||||
def read(self, iprot):
|
||||
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
|
||||
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
|
||||
return
|
||||
iprot.readStructBegin()
|
||||
while True:
|
||||
(fname, ftype, fid) = iprot.readFieldBegin()
|
||||
if ftype == TType.STOP:
|
||||
break
|
||||
if fid == 1:
|
||||
if ftype == TType.STRING:
|
||||
self.message = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||
else:
|
||||
iprot.skip(ftype)
|
||||
elif fid == 2:
|
||||
if ftype == TType.MAP:
|
||||
self.details = {}
|
||||
(_ktype1, _vtype2, _size0) = iprot.readMapBegin()
|
||||
for _i4 in range(_size0):
|
||||
_key5 = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||
_val6 = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||
self.details[_key5] = _val6
|
||||
iprot.readMapEnd()
|
||||
else:
|
||||
iprot.skip(ftype)
|
||||
else:
|
||||
iprot.skip(ftype)
|
||||
iprot.readFieldEnd()
|
||||
iprot.readStructEnd()
|
||||
|
||||
def write(self, oprot):
|
||||
if oprot._fast_encode is not None and self.thrift_spec is not None:
|
||||
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
|
||||
return
|
||||
oprot.writeStructBegin('reportError_args')
|
||||
if self.message is not None:
|
||||
oprot.writeFieldBegin('message', TType.STRING, 1)
|
||||
oprot.writeString(self.message.encode('utf-8') if sys.version_info[0] == 2 else self.message)
|
||||
oprot.writeFieldEnd()
|
||||
if self.details is not None:
|
||||
oprot.writeFieldBegin('details', TType.MAP, 2)
|
||||
oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.details))
|
||||
for kiter7, viter8 in self.details.items():
|
||||
oprot.writeString(kiter7.encode('utf-8') if sys.version_info[0] == 2 else kiter7)
|
||||
oprot.writeString(viter8.encode('utf-8') if sys.version_info[0] == 2 else viter8)
|
||||
oprot.writeMapEnd()
|
||||
oprot.writeFieldEnd()
|
||||
oprot.writeFieldStop()
|
||||
oprot.writeStructEnd()
|
||||
|
||||
def validate(self):
|
||||
return
|
||||
|
||||
def __repr__(self):
|
||||
L = ['%s=%r' % (key, value)
|
||||
for key, value in self.__dict__.items()]
|
||||
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
|
||||
|
||||
def __eq__(self, other):
|
||||
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
|
||||
|
||||
def __ne__(self, other):
|
||||
return not (self == other)
|
||||
all_structs.append(reportError_args)
|
||||
reportError_args.thrift_spec = (
|
||||
None, # 0
|
||||
(1, TType.STRING, 'message', 'UTF8', None, ), # 1
|
||||
(2, TType.MAP, 'details', (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), None, ), # 2
|
||||
)
|
||||
|
||||
|
||||
class reportError_result(object):
|
||||
"""
|
||||
Attributes:
|
||||
- success
|
||||
- serviceExp
|
||||
- userExp
|
||||
|
||||
"""
|
||||
|
||||
|
||||
def __init__(self, success=None, serviceExp=None, userExp=None,):
|
||||
self.success = success
|
||||
self.serviceExp = serviceExp
|
||||
self.userExp = userExp
|
||||
|
||||
def read(self, iprot):
|
||||
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
|
||||
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
|
||||
return
|
||||
iprot.readStructBegin()
|
||||
while True:
|
||||
(fname, ftype, fid) = iprot.readFieldBegin()
|
||||
if ftype == TType.STOP:
|
||||
break
|
||||
if fid == 0:
|
||||
if ftype == TType.BOOL:
|
||||
self.success = iprot.readBool()
|
||||
else:
|
||||
iprot.skip(ftype)
|
||||
elif fid == 1:
|
||||
if ftype == TType.STRUCT:
|
||||
self.serviceExp = pangramia.yt.exceptions.ttypes.PBServiceException.read(iprot)
|
||||
else:
|
||||
iprot.skip(ftype)
|
||||
elif fid == 2:
|
||||
if ftype == TType.STRUCT:
|
||||
self.userExp = pangramia.yt.exceptions.ttypes.PBUserException.read(iprot)
|
||||
else:
|
||||
iprot.skip(ftype)
|
||||
else:
|
||||
iprot.skip(ftype)
|
||||
iprot.readFieldEnd()
|
||||
iprot.readStructEnd()
|
||||
|
||||
def write(self, oprot):
|
||||
if oprot._fast_encode is not None and self.thrift_spec is not None:
|
||||
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
|
||||
return
|
||||
oprot.writeStructBegin('reportError_result')
|
||||
if self.success is not None:
|
||||
oprot.writeFieldBegin('success', TType.BOOL, 0)
|
||||
oprot.writeBool(self.success)
|
||||
oprot.writeFieldEnd()
|
||||
if self.serviceExp is not None:
|
||||
oprot.writeFieldBegin('serviceExp', TType.STRUCT, 1)
|
||||
self.serviceExp.write(oprot)
|
||||
oprot.writeFieldEnd()
|
||||
if self.userExp is not None:
|
||||
oprot.writeFieldBegin('userExp', TType.STRUCT, 2)
|
||||
self.userExp.write(oprot)
|
||||
oprot.writeFieldEnd()
|
||||
oprot.writeFieldStop()
|
||||
oprot.writeStructEnd()
|
||||
|
||||
def validate(self):
|
||||
return
|
||||
|
||||
def __repr__(self):
|
||||
L = ['%s=%r' % (key, value)
|
||||
for key, value in self.__dict__.items()]
|
||||
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
|
||||
|
||||
def __eq__(self, other):
|
||||
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
|
||||
|
||||
def __ne__(self, other):
|
||||
return not (self == other)
|
||||
all_structs.append(reportError_result)
|
||||
reportError_result.thrift_spec = (
|
||||
(0, TType.BOOL, 'success', None, None, ), # 0
|
||||
(1, TType.STRUCT, 'serviceExp', [pangramia.yt.exceptions.ttypes.PBServiceException, None], None, ), # 1
|
||||
(2, TType.STRUCT, 'userExp', [pangramia.yt.exceptions.ttypes.PBUserException, None], None, ), # 2
|
||||
)
|
||||
|
||||
|
||||
class shutdown_args(object):
|
||||
|
||||
|
||||
def read(self, iprot):
|
||||
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
|
||||
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
|
||||
return
|
||||
iprot.readStructBegin()
|
||||
while True:
|
||||
(fname, ftype, fid) = iprot.readFieldBegin()
|
||||
if ftype == TType.STOP:
|
||||
break
|
||||
else:
|
||||
iprot.skip(ftype)
|
||||
iprot.readFieldEnd()
|
||||
iprot.readStructEnd()
|
||||
|
||||
def write(self, oprot):
|
||||
if oprot._fast_encode is not None and self.thrift_spec is not None:
|
||||
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
|
||||
return
|
||||
oprot.writeStructBegin('shutdown_args')
|
||||
oprot.writeFieldStop()
|
||||
oprot.writeStructEnd()
|
||||
|
||||
def validate(self):
|
||||
return
|
||||
|
||||
def __repr__(self):
|
||||
L = ['%s=%r' % (key, value)
|
||||
for key, value in self.__dict__.items()]
|
||||
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
|
||||
|
||||
def __eq__(self, other):
|
||||
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
|
||||
|
||||
def __ne__(self, other):
|
||||
return not (self == other)
|
||||
all_structs.append(shutdown_args)
|
||||
shutdown_args.thrift_spec = (
|
||||
)
|
||||
fix_spec(all_structs)
|
||||
del all_structs
|
||||
@ -1 +0,0 @@
|
||||
__all__ = ['ttypes', 'constants', 'BaseService']
|
||||
@ -1,14 +0,0 @@
|
||||
#
|
||||
# Autogenerated by Thrift Compiler (0.20.0)
|
||||
#
|
||||
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
|
||||
#
|
||||
# options string: py
|
||||
#
|
||||
|
||||
from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException
|
||||
from thrift.protocol.TProtocol import TProtocolException
|
||||
from thrift.TRecursive import fix_spec
|
||||
|
||||
import sys
|
||||
from .ttypes import *
|
||||
@ -1,20 +0,0 @@
|
||||
#
|
||||
# Autogenerated by Thrift Compiler (0.20.0)
|
||||
#
|
||||
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
|
||||
#
|
||||
# options string: py
|
||||
#
|
||||
|
||||
from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException
|
||||
from thrift.protocol.TProtocol import TProtocolException
|
||||
from thrift.TRecursive import fix_spec
|
||||
|
||||
import sys
|
||||
import pangramia.yt.common.ttypes
|
||||
import pangramia.yt.exceptions.ttypes
|
||||
|
||||
from thrift.transport import TTransport
|
||||
all_structs = []
|
||||
fix_spec(all_structs)
|
||||
del all_structs
|
||||
@ -1,236 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
#
|
||||
# Autogenerated by Thrift Compiler (0.20.0)
|
||||
#
|
||||
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
|
||||
#
|
||||
# options string: py
|
||||
#
|
||||
|
||||
import sys
|
||||
import pprint
|
||||
if sys.version_info[0] > 2:
|
||||
from urllib.parse import urlparse
|
||||
else:
|
||||
from urlparse import urlparse
|
||||
from thrift.transport import TTransport, TSocket, TSSLSocket, THttpClient
|
||||
from thrift.protocol.TBinaryProtocol import TBinaryProtocol
|
||||
|
||||
from pangramia.yt.admin_ops import YTAccountsOpService
|
||||
from pangramia.yt.admin_ops.ttypes import *
|
||||
|
||||
if len(sys.argv) <= 1 or sys.argv[1] == '--help':
|
||||
print('')
|
||||
print('Usage: ' + sys.argv[0] + ' [-h host[:port]] [-u url] [-f[ramed]] [-s[sl]] [-novalidate] [-ca_certs certs] [-keyfile keyfile] [-certfile certfile] function [arg1 [arg2...]]')
|
||||
print('')
|
||||
print('Functions:')
|
||||
print(' bool addAccountPair(string accountId, string proxyId, string machineId, ProxyData proxyData, AccountData accountData)')
|
||||
print(' AccountPairWithState getPair(string machineId)')
|
||||
print(' bool pair(string accountId, string proxyId, string machineId)')
|
||||
print(' bool unpair(string accountId, string proxyId, string machineId)')
|
||||
print(' listAccountPairs(AccountPairState filter)')
|
||||
print(' bool addAccount(string accountId, AccountData accountData)')
|
||||
print(' bool suspendAccount(string accountId)')
|
||||
print(' bool resumeAccount(string accountId)')
|
||||
print(' bool removeAccount(string accountId)')
|
||||
print(' listActiveAccounts()')
|
||||
print(' bool addProxy(string proxyId, ProxyData proxyData)')
|
||||
print(' bool suspendProxy(string proxyId)')
|
||||
print(' bool resumeProxy(string proxyId)')
|
||||
print(' bool removeProxy(string proxyId)')
|
||||
print(' listActiveProxies()')
|
||||
print(' bool ping()')
|
||||
print(' bool reportError(string message, details)')
|
||||
print(' void shutdown()')
|
||||
print('')
|
||||
sys.exit(0)
|
||||
|
||||
pp = pprint.PrettyPrinter(indent=2)
|
||||
host = 'localhost'
|
||||
port = 9090
|
||||
uri = ''
|
||||
framed = False
|
||||
ssl = False
|
||||
validate = True
|
||||
ca_certs = None
|
||||
keyfile = None
|
||||
certfile = None
|
||||
http = False
|
||||
argi = 1
|
||||
|
||||
if sys.argv[argi] == '-h':
|
||||
parts = sys.argv[argi + 1].split(':')
|
||||
host = parts[0]
|
||||
if len(parts) > 1:
|
||||
port = int(parts[1])
|
||||
argi += 2
|
||||
|
||||
if sys.argv[argi] == '-u':
|
||||
url = urlparse(sys.argv[argi + 1])
|
||||
parts = url[1].split(':')
|
||||
host = parts[0]
|
||||
if len(parts) > 1:
|
||||
port = int(parts[1])
|
||||
else:
|
||||
port = 80
|
||||
uri = url[2]
|
||||
if url[4]:
|
||||
uri += '?%s' % url[4]
|
||||
http = True
|
||||
argi += 2
|
||||
|
||||
if sys.argv[argi] == '-f' or sys.argv[argi] == '-framed':
|
||||
framed = True
|
||||
argi += 1
|
||||
|
||||
if sys.argv[argi] == '-s' or sys.argv[argi] == '-ssl':
|
||||
ssl = True
|
||||
argi += 1
|
||||
|
||||
if sys.argv[argi] == '-novalidate':
|
||||
validate = False
|
||||
argi += 1
|
||||
|
||||
if sys.argv[argi] == '-ca_certs':
|
||||
ca_certs = sys.argv[argi+1]
|
||||
argi += 2
|
||||
|
||||
if sys.argv[argi] == '-keyfile':
|
||||
keyfile = sys.argv[argi+1]
|
||||
argi += 2
|
||||
|
||||
if sys.argv[argi] == '-certfile':
|
||||
certfile = sys.argv[argi+1]
|
||||
argi += 2
|
||||
|
||||
cmd = sys.argv[argi]
|
||||
args = sys.argv[argi + 1:]
|
||||
|
||||
if http:
|
||||
transport = THttpClient.THttpClient(host, port, uri)
|
||||
else:
|
||||
if ssl:
|
||||
socket = TSSLSocket.TSSLSocket(host, port, validate=validate, ca_certs=ca_certs, keyfile=keyfile, certfile=certfile)
|
||||
else:
|
||||
socket = TSocket.TSocket(host, port)
|
||||
if framed:
|
||||
transport = TTransport.TFramedTransport(socket)
|
||||
else:
|
||||
transport = TTransport.TBufferedTransport(socket)
|
||||
protocol = TBinaryProtocol(transport)
|
||||
client = YTAccountsOpService.Client(protocol)
|
||||
transport.open()
|
||||
|
||||
if cmd == 'addAccountPair':
|
||||
if len(args) != 5:
|
||||
print('addAccountPair requires 5 args')
|
||||
sys.exit(1)
|
||||
pp.pprint(client.addAccountPair(args[0], args[1], args[2], eval(args[3]), eval(args[4]),))
|
||||
|
||||
elif cmd == 'getPair':
|
||||
if len(args) != 1:
|
||||
print('getPair requires 1 args')
|
||||
sys.exit(1)
|
||||
pp.pprint(client.getPair(args[0],))
|
||||
|
||||
elif cmd == 'pair':
|
||||
if len(args) != 3:
|
||||
print('pair requires 3 args')
|
||||
sys.exit(1)
|
||||
pp.pprint(client.pair(args[0], args[1], args[2],))
|
||||
|
||||
elif cmd == 'unpair':
|
||||
if len(args) != 3:
|
||||
print('unpair requires 3 args')
|
||||
sys.exit(1)
|
||||
pp.pprint(client.unpair(args[0], args[1], args[2],))
|
||||
|
||||
elif cmd == 'listAccountPairs':
|
||||
if len(args) != 1:
|
||||
print('listAccountPairs requires 1 args')
|
||||
sys.exit(1)
|
||||
pp.pprint(client.listAccountPairs(eval(args[0]),))
|
||||
|
||||
elif cmd == 'addAccount':
|
||||
if len(args) != 2:
|
||||
print('addAccount requires 2 args')
|
||||
sys.exit(1)
|
||||
pp.pprint(client.addAccount(args[0], eval(args[1]),))
|
||||
|
||||
elif cmd == 'suspendAccount':
|
||||
if len(args) != 1:
|
||||
print('suspendAccount requires 1 args')
|
||||
sys.exit(1)
|
||||
pp.pprint(client.suspendAccount(args[0],))
|
||||
|
||||
elif cmd == 'resumeAccount':
|
||||
if len(args) != 1:
|
||||
print('resumeAccount requires 1 args')
|
||||
sys.exit(1)
|
||||
pp.pprint(client.resumeAccount(args[0],))
|
||||
|
||||
elif cmd == 'removeAccount':
|
||||
if len(args) != 1:
|
||||
print('removeAccount requires 1 args')
|
||||
sys.exit(1)
|
||||
pp.pprint(client.removeAccount(args[0],))
|
||||
|
||||
elif cmd == 'listActiveAccounts':
|
||||
if len(args) != 0:
|
||||
print('listActiveAccounts requires 0 args')
|
||||
sys.exit(1)
|
||||
pp.pprint(client.listActiveAccounts())
|
||||
|
||||
elif cmd == 'addProxy':
|
||||
if len(args) != 2:
|
||||
print('addProxy requires 2 args')
|
||||
sys.exit(1)
|
||||
pp.pprint(client.addProxy(args[0], eval(args[1]),))
|
||||
|
||||
elif cmd == 'suspendProxy':
|
||||
if len(args) != 1:
|
||||
print('suspendProxy requires 1 args')
|
||||
sys.exit(1)
|
||||
pp.pprint(client.suspendProxy(args[0],))
|
||||
|
||||
elif cmd == 'resumeProxy':
|
||||
if len(args) != 1:
|
||||
print('resumeProxy requires 1 args')
|
||||
sys.exit(1)
|
||||
pp.pprint(client.resumeProxy(args[0],))
|
||||
|
||||
elif cmd == 'removeProxy':
|
||||
if len(args) != 1:
|
||||
print('removeProxy requires 1 args')
|
||||
sys.exit(1)
|
||||
pp.pprint(client.removeProxy(args[0],))
|
||||
|
||||
elif cmd == 'listActiveProxies':
|
||||
if len(args) != 0:
|
||||
print('listActiveProxies requires 0 args')
|
||||
sys.exit(1)
|
||||
pp.pprint(client.listActiveProxies())
|
||||
|
||||
elif cmd == 'ping':
|
||||
if len(args) != 0:
|
||||
print('ping requires 0 args')
|
||||
sys.exit(1)
|
||||
pp.pprint(client.ping())
|
||||
|
||||
elif cmd == 'reportError':
|
||||
if len(args) != 2:
|
||||
print('reportError requires 2 args')
|
||||
sys.exit(1)
|
||||
pp.pprint(client.reportError(args[0], eval(args[1]),))
|
||||
|
||||
elif cmd == 'shutdown':
|
||||
if len(args) != 0:
|
||||
print('shutdown requires 0 args')
|
||||
sys.exit(1)
|
||||
pp.pprint(client.shutdown())
|
||||
|
||||
else:
|
||||
print('Unrecognized method %s' % cmd)
|
||||
sys.exit(1)
|
||||
|
||||
transport.close()
|
||||
File diff suppressed because it is too large
Load Diff
@ -1 +0,0 @@
|
||||
__all__ = ['ttypes', 'constants', 'YTAccountsOpService']
|
||||
@ -1,14 +0,0 @@
|
||||
#
|
||||
# Autogenerated by Thrift Compiler (0.20.0)
|
||||
#
|
||||
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
|
||||
#
|
||||
# options string: py
|
||||
#
|
||||
|
||||
from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException
|
||||
from thrift.protocol.TProtocol import TProtocolException
|
||||
from thrift.TRecursive import fix_spec
|
||||
|
||||
import sys
|
||||
from .ttypes import *
|
||||
@ -1,21 +0,0 @@
|
||||
#
|
||||
# Autogenerated by Thrift Compiler (0.20.0)
|
||||
#
|
||||
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
|
||||
#
|
||||
# options string: py
|
||||
#
|
||||
|
||||
from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException
|
||||
from thrift.protocol.TProtocol import TProtocolException
|
||||
from thrift.TRecursive import fix_spec
|
||||
|
||||
import sys
|
||||
import pangramia.yt.common.ttypes
|
||||
import pangramia.yt.exceptions.ttypes
|
||||
import pangramia.base_service.ttypes
|
||||
|
||||
from thrift.transport import TTransport
|
||||
all_structs = []
|
||||
fix_spec(all_structs)
|
||||
del all_structs
|
||||
@ -1 +0,0 @@
|
||||
__all__ = ['ttypes', 'constants']
|
||||
@ -1,14 +0,0 @@
|
||||
#
|
||||
# Autogenerated by Thrift Compiler (0.20.0)
|
||||
#
|
||||
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
|
||||
#
|
||||
# options string: py
|
||||
#
|
||||
|
||||
from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException
|
||||
from thrift.protocol.TProtocol import TProtocolException
|
||||
from thrift.TRecursive import fix_spec
|
||||
|
||||
import sys
|
||||
from .ttypes import *
|
||||
@ -1,905 +0,0 @@
|
||||
#
|
||||
# Autogenerated by Thrift Compiler (0.20.0)
|
||||
#
|
||||
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
|
||||
#
|
||||
# options string: py
|
||||
#
|
||||
|
||||
from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException
|
||||
from thrift.protocol.TProtocol import TProtocolException
|
||||
from thrift.TRecursive import fix_spec
|
||||
|
||||
import sys
|
||||
|
||||
from thrift.transport import TTransport
|
||||
all_structs = []
|
||||
|
||||
|
||||
class JobState(object):
|
||||
SUCCESS = 0
|
||||
FAIL = 1
|
||||
BOT_FORBIDDEN_ON_URL_ACCESS = 2
|
||||
BOT_FORBIDDEN_ON_FILE_DOWNLOAD = 3
|
||||
BOT_CAPTCHA = 4
|
||||
BOT_AUTH_RELOGIN_REQUIRED = 5
|
||||
BOT_AUTH_SMS_REQUIRED = 6
|
||||
BOT_AUTH_DEVICE_QR_REQUIRED = 7
|
||||
BOT_ACCOUNT_BANNED = 8
|
||||
BOT_IP_BANNED = 9
|
||||
|
||||
_VALUES_TO_NAMES = {
|
||||
0: "SUCCESS",
|
||||
1: "FAIL",
|
||||
2: "BOT_FORBIDDEN_ON_URL_ACCESS",
|
||||
3: "BOT_FORBIDDEN_ON_FILE_DOWNLOAD",
|
||||
4: "BOT_CAPTCHA",
|
||||
5: "BOT_AUTH_RELOGIN_REQUIRED",
|
||||
6: "BOT_AUTH_SMS_REQUIRED",
|
||||
7: "BOT_AUTH_DEVICE_QR_REQUIRED",
|
||||
8: "BOT_ACCOUNT_BANNED",
|
||||
9: "BOT_IP_BANNED",
|
||||
}
|
||||
|
||||
_NAMES_TO_VALUES = {
|
||||
"SUCCESS": 0,
|
||||
"FAIL": 1,
|
||||
"BOT_FORBIDDEN_ON_URL_ACCESS": 2,
|
||||
"BOT_FORBIDDEN_ON_FILE_DOWNLOAD": 3,
|
||||
"BOT_CAPTCHA": 4,
|
||||
"BOT_AUTH_RELOGIN_REQUIRED": 5,
|
||||
"BOT_AUTH_SMS_REQUIRED": 6,
|
||||
"BOT_AUTH_DEVICE_QR_REQUIRED": 7,
|
||||
"BOT_ACCOUNT_BANNED": 8,
|
||||
"BOT_IP_BANNED": 9,
|
||||
}
|
||||
|
||||
|
||||
class TokenUpdateMode(object):
|
||||
AUTOREFRESH_AND_REMAIN_ANONYMOUS = 0
|
||||
AUTOREFRESH_AND_ALLOW_AUTH = 1
|
||||
AUTOREFRESH_AND_ONLY_AUTH = 2
|
||||
CLEANUP_THEN_AUTOREFRESH_AND_ONLY_AUTH = 3
|
||||
CLEANUP_THEN_AUTOREFRESH_AND_REMAIN_ANONYMOUS = 4
|
||||
CLEANUP_THEN_AUTOREFRESH_AND_ALLOW_AUTH = 5
|
||||
AUTO = 6
|
||||
|
||||
_VALUES_TO_NAMES = {
|
||||
0: "AUTOREFRESH_AND_REMAIN_ANONYMOUS",
|
||||
1: "AUTOREFRESH_AND_ALLOW_AUTH",
|
||||
2: "AUTOREFRESH_AND_ONLY_AUTH",
|
||||
3: "CLEANUP_THEN_AUTOREFRESH_AND_ONLY_AUTH",
|
||||
4: "CLEANUP_THEN_AUTOREFRESH_AND_REMAIN_ANONYMOUS",
|
||||
5: "CLEANUP_THEN_AUTOREFRESH_AND_ALLOW_AUTH",
|
||||
6: "AUTO",
|
||||
}
|
||||
|
||||
_NAMES_TO_VALUES = {
|
||||
"AUTOREFRESH_AND_REMAIN_ANONYMOUS": 0,
|
||||
"AUTOREFRESH_AND_ALLOW_AUTH": 1,
|
||||
"AUTOREFRESH_AND_ONLY_AUTH": 2,
|
||||
"CLEANUP_THEN_AUTOREFRESH_AND_ONLY_AUTH": 3,
|
||||
"CLEANUP_THEN_AUTOREFRESH_AND_REMAIN_ANONYMOUS": 4,
|
||||
"CLEANUP_THEN_AUTOREFRESH_AND_ALLOW_AUTH": 5,
|
||||
"AUTO": 6,
|
||||
}
|
||||
|
||||
|
||||
class AccountPairState(object):
|
||||
ACTIVE = 0
|
||||
PAUSED = 1
|
||||
REMOVED = 2
|
||||
IN_PROGRESS = 3
|
||||
ALL = 4
|
||||
|
||||
_VALUES_TO_NAMES = {
|
||||
0: "ACTIVE",
|
||||
1: "PAUSED",
|
||||
2: "REMOVED",
|
||||
3: "IN_PROGRESS",
|
||||
4: "ALL",
|
||||
}
|
||||
|
||||
_NAMES_TO_VALUES = {
|
||||
"ACTIVE": 0,
|
||||
"PAUSED": 1,
|
||||
"REMOVED": 2,
|
||||
"IN_PROGRESS": 3,
|
||||
"ALL": 4,
|
||||
}
|
||||
|
||||
|
||||
class JobTokenData(object):
|
||||
"""
|
||||
Attributes:
|
||||
- infoJson
|
||||
- ytdlpCommand
|
||||
- socks
|
||||
- jobId
|
||||
- url
|
||||
- cookiesBlob
|
||||
|
||||
"""
|
||||
|
||||
|
||||
def __init__(self, infoJson=None, ytdlpCommand=None, socks=None, jobId=None, url=None, cookiesBlob=None,):
|
||||
self.infoJson = infoJson
|
||||
self.ytdlpCommand = ytdlpCommand
|
||||
self.socks = socks
|
||||
self.jobId = jobId
|
||||
self.url = url
|
||||
self.cookiesBlob = cookiesBlob
|
||||
|
||||
def read(self, iprot):
|
||||
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
|
||||
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
|
||||
return
|
||||
iprot.readStructBegin()
|
||||
while True:
|
||||
(fname, ftype, fid) = iprot.readFieldBegin()
|
||||
if ftype == TType.STOP:
|
||||
break
|
||||
if fid == 1:
|
||||
if ftype == TType.STRING:
|
||||
self.infoJson = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||
else:
|
||||
iprot.skip(ftype)
|
||||
elif fid == 2:
|
||||
if ftype == TType.STRING:
|
||||
self.ytdlpCommand = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||
else:
|
||||
iprot.skip(ftype)
|
||||
elif fid == 3:
|
||||
if ftype == TType.STRING:
|
||||
self.socks = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||
else:
|
||||
iprot.skip(ftype)
|
||||
elif fid == 4:
|
||||
if ftype == TType.STRING:
|
||||
self.jobId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||
else:
|
||||
iprot.skip(ftype)
|
||||
elif fid == 5:
|
||||
if ftype == TType.STRING:
|
||||
self.url = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||
else:
|
||||
iprot.skip(ftype)
|
||||
elif fid == 6:
|
||||
if ftype == TType.STRING:
|
||||
self.cookiesBlob = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||
else:
|
||||
iprot.skip(ftype)
|
||||
else:
|
||||
iprot.skip(ftype)
|
||||
iprot.readFieldEnd()
|
||||
iprot.readStructEnd()
|
||||
|
||||
def write(self, oprot):
|
||||
if oprot._fast_encode is not None and self.thrift_spec is not None:
|
||||
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
|
||||
return
|
||||
oprot.writeStructBegin('JobTokenData')
|
||||
if self.infoJson is not None:
|
||||
oprot.writeFieldBegin('infoJson', TType.STRING, 1)
|
||||
oprot.writeString(self.infoJson.encode('utf-8') if sys.version_info[0] == 2 else self.infoJson)
|
||||
oprot.writeFieldEnd()
|
||||
if self.ytdlpCommand is not None:
|
||||
oprot.writeFieldBegin('ytdlpCommand', TType.STRING, 2)
|
||||
oprot.writeString(self.ytdlpCommand.encode('utf-8') if sys.version_info[0] == 2 else self.ytdlpCommand)
|
||||
oprot.writeFieldEnd()
|
||||
if self.socks is not None:
|
||||
oprot.writeFieldBegin('socks', TType.STRING, 3)
|
||||
oprot.writeString(self.socks.encode('utf-8') if sys.version_info[0] == 2 else self.socks)
|
||||
oprot.writeFieldEnd()
|
||||
if self.jobId is not None:
|
||||
oprot.writeFieldBegin('jobId', TType.STRING, 4)
|
||||
oprot.writeString(self.jobId.encode('utf-8') if sys.version_info[0] == 2 else self.jobId)
|
||||
oprot.writeFieldEnd()
|
||||
if self.url is not None:
|
||||
oprot.writeFieldBegin('url', TType.STRING, 5)
|
||||
oprot.writeString(self.url.encode('utf-8') if sys.version_info[0] == 2 else self.url)
|
||||
oprot.writeFieldEnd()
|
||||
if self.cookiesBlob is not None:
|
||||
oprot.writeFieldBegin('cookiesBlob', TType.STRING, 6)
|
||||
oprot.writeString(self.cookiesBlob.encode('utf-8') if sys.version_info[0] == 2 else self.cookiesBlob)
|
||||
oprot.writeFieldEnd()
|
||||
oprot.writeFieldStop()
|
||||
oprot.writeStructEnd()
|
||||
|
||||
def validate(self):
|
||||
return
|
||||
|
||||
def __repr__(self):
|
||||
L = ['%s=%r' % (key, value)
|
||||
for key, value in self.__dict__.items()]
|
||||
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
|
||||
|
||||
def __eq__(self, other):
|
||||
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
|
||||
|
||||
def __ne__(self, other):
|
||||
return not (self == other)
|
||||
|
||||
|
||||
class AccountData(object):
|
||||
"""
|
||||
Attributes:
|
||||
- username
|
||||
- password
|
||||
- countryCode
|
||||
|
||||
"""
|
||||
|
||||
|
||||
def __init__(self, username=None, password=None, countryCode=None,):
|
||||
self.username = username
|
||||
self.password = password
|
||||
self.countryCode = countryCode
|
||||
|
||||
def read(self, iprot):
|
||||
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
|
||||
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
|
||||
return
|
||||
iprot.readStructBegin()
|
||||
while True:
|
||||
(fname, ftype, fid) = iprot.readFieldBegin()
|
||||
if ftype == TType.STOP:
|
||||
break
|
||||
if fid == 1:
|
||||
if ftype == TType.STRING:
|
||||
self.username = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||
else:
|
||||
iprot.skip(ftype)
|
||||
elif fid == 2:
|
||||
if ftype == TType.STRING:
|
||||
self.password = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||
else:
|
||||
iprot.skip(ftype)
|
||||
elif fid == 3:
|
||||
if ftype == TType.STRING:
|
||||
self.countryCode = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||
else:
|
||||
iprot.skip(ftype)
|
||||
else:
|
||||
iprot.skip(ftype)
|
||||
iprot.readFieldEnd()
|
||||
iprot.readStructEnd()
|
||||
|
||||
def write(self, oprot):
|
||||
if oprot._fast_encode is not None and self.thrift_spec is not None:
|
||||
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
|
||||
return
|
||||
oprot.writeStructBegin('AccountData')
|
||||
if self.username is not None:
|
||||
oprot.writeFieldBegin('username', TType.STRING, 1)
|
||||
oprot.writeString(self.username.encode('utf-8') if sys.version_info[0] == 2 else self.username)
|
||||
oprot.writeFieldEnd()
|
||||
if self.password is not None:
|
||||
oprot.writeFieldBegin('password', TType.STRING, 2)
|
||||
oprot.writeString(self.password.encode('utf-8') if sys.version_info[0] == 2 else self.password)
|
||||
oprot.writeFieldEnd()
|
||||
if self.countryCode is not None:
|
||||
oprot.writeFieldBegin('countryCode', TType.STRING, 3)
|
||||
oprot.writeString(self.countryCode.encode('utf-8') if sys.version_info[0] == 2 else self.countryCode)
|
||||
oprot.writeFieldEnd()
|
||||
oprot.writeFieldStop()
|
||||
oprot.writeStructEnd()
|
||||
|
||||
def validate(self):
|
||||
if self.username is None:
|
||||
raise TProtocolException(message='Required field username is unset!')
|
||||
if self.password is None:
|
||||
raise TProtocolException(message='Required field password is unset!')
|
||||
return
|
||||
|
||||
def __repr__(self):
|
||||
L = ['%s=%r' % (key, value)
|
||||
for key, value in self.__dict__.items()]
|
||||
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
|
||||
|
||||
def __eq__(self, other):
|
||||
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
|
||||
|
||||
def __ne__(self, other):
|
||||
return not (self == other)
|
||||
|
||||
|
||||
class ProxyData(object):
|
||||
"""
|
||||
Attributes:
|
||||
- proxyUrl
|
||||
- countryCode
|
||||
|
||||
"""
|
||||
|
||||
|
||||
def __init__(self, proxyUrl=None, countryCode=None,):
|
||||
self.proxyUrl = proxyUrl
|
||||
self.countryCode = countryCode
|
||||
|
||||
def read(self, iprot):
|
||||
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
|
||||
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
|
||||
return
|
||||
iprot.readStructBegin()
|
||||
while True:
|
||||
(fname, ftype, fid) = iprot.readFieldBegin()
|
||||
if ftype == TType.STOP:
|
||||
break
|
||||
if fid == 1:
|
||||
if ftype == TType.STRING:
|
||||
self.proxyUrl = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||
else:
|
||||
iprot.skip(ftype)
|
||||
elif fid == 2:
|
||||
if ftype == TType.STRING:
|
||||
self.countryCode = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||
else:
|
||||
iprot.skip(ftype)
|
||||
else:
|
||||
iprot.skip(ftype)
|
||||
iprot.readFieldEnd()
|
||||
iprot.readStructEnd()
|
||||
|
||||
def write(self, oprot):
|
||||
if oprot._fast_encode is not None and self.thrift_spec is not None:
|
||||
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
|
||||
return
|
||||
oprot.writeStructBegin('ProxyData')
|
||||
if self.proxyUrl is not None:
|
||||
oprot.writeFieldBegin('proxyUrl', TType.STRING, 1)
|
||||
oprot.writeString(self.proxyUrl.encode('utf-8') if sys.version_info[0] == 2 else self.proxyUrl)
|
||||
oprot.writeFieldEnd()
|
||||
if self.countryCode is not None:
|
||||
oprot.writeFieldBegin('countryCode', TType.STRING, 2)
|
||||
oprot.writeString(self.countryCode.encode('utf-8') if sys.version_info[0] == 2 else self.countryCode)
|
||||
oprot.writeFieldEnd()
|
||||
oprot.writeFieldStop()
|
||||
oprot.writeStructEnd()
|
||||
|
||||
def validate(self):
|
||||
if self.proxyUrl is None:
|
||||
raise TProtocolException(message='Required field proxyUrl is unset!')
|
||||
return
|
||||
|
||||
def __repr__(self):
|
||||
L = ['%s=%r' % (key, value)
|
||||
for key, value in self.__dict__.items()]
|
||||
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
|
||||
|
||||
def __eq__(self, other):
|
||||
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
|
||||
|
||||
def __ne__(self, other):
|
||||
return not (self == other)
|
||||
|
||||
|
||||
class AccountPairWithState(object):
|
||||
"""
|
||||
Attributes:
|
||||
- accountId
|
||||
- proxyId
|
||||
- accountPairState
|
||||
- machineId
|
||||
|
||||
"""
|
||||
|
||||
|
||||
def __init__(self, accountId=None, proxyId=None, accountPairState=None, machineId=None,):
|
||||
self.accountId = accountId
|
||||
self.proxyId = proxyId
|
||||
self.accountPairState = accountPairState
|
||||
self.machineId = machineId
|
||||
|
||||
def read(self, iprot):
|
||||
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
|
||||
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
|
||||
return
|
||||
iprot.readStructBegin()
|
||||
while True:
|
||||
(fname, ftype, fid) = iprot.readFieldBegin()
|
||||
if ftype == TType.STOP:
|
||||
break
|
||||
if fid == 1:
|
||||
if ftype == TType.STRING:
|
||||
self.accountId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||
else:
|
||||
iprot.skip(ftype)
|
||||
elif fid == 2:
|
||||
if ftype == TType.STRING:
|
||||
self.proxyId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||
else:
|
||||
iprot.skip(ftype)
|
||||
elif fid == 3:
|
||||
if ftype == TType.I32:
|
||||
self.accountPairState = iprot.readI32()
|
||||
else:
|
||||
iprot.skip(ftype)
|
||||
elif fid == 4:
|
||||
if ftype == TType.STRING:
|
||||
self.machineId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||
else:
|
||||
iprot.skip(ftype)
|
||||
else:
|
||||
iprot.skip(ftype)
|
||||
iprot.readFieldEnd()
|
||||
iprot.readStructEnd()
|
||||
|
||||
def write(self, oprot):
|
||||
if oprot._fast_encode is not None and self.thrift_spec is not None:
|
||||
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
|
||||
return
|
||||
oprot.writeStructBegin('AccountPairWithState')
|
||||
if self.accountId is not None:
|
||||
oprot.writeFieldBegin('accountId', TType.STRING, 1)
|
||||
oprot.writeString(self.accountId.encode('utf-8') if sys.version_info[0] == 2 else self.accountId)
|
||||
oprot.writeFieldEnd()
|
||||
if self.proxyId is not None:
|
||||
oprot.writeFieldBegin('proxyId', TType.STRING, 2)
|
||||
oprot.writeString(self.proxyId.encode('utf-8') if sys.version_info[0] == 2 else self.proxyId)
|
||||
oprot.writeFieldEnd()
|
||||
if self.accountPairState is not None:
|
||||
oprot.writeFieldBegin('accountPairState', TType.I32, 3)
|
||||
oprot.writeI32(self.accountPairState)
|
||||
oprot.writeFieldEnd()
|
||||
if self.machineId is not None:
|
||||
oprot.writeFieldBegin('machineId', TType.STRING, 4)
|
||||
oprot.writeString(self.machineId.encode('utf-8') if sys.version_info[0] == 2 else self.machineId)
|
||||
oprot.writeFieldEnd()
|
||||
oprot.writeFieldStop()
|
||||
oprot.writeStructEnd()
|
||||
|
||||
def validate(self):
|
||||
if self.accountId is None:
|
||||
raise TProtocolException(message='Required field accountId is unset!')
|
||||
if self.proxyId is None:
|
||||
raise TProtocolException(message='Required field proxyId is unset!')
|
||||
return
|
||||
|
||||
def __repr__(self):
|
||||
L = ['%s=%r' % (key, value)
|
||||
for key, value in self.__dict__.items()]
|
||||
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
|
||||
|
||||
def __eq__(self, other):
|
||||
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
|
||||
|
||||
def __ne__(self, other):
|
||||
return not (self == other)
|
||||
|
||||
|
||||
class JobData(object):
|
||||
"""
|
||||
Attributes:
|
||||
- jobId
|
||||
- url
|
||||
- cookiesBlob
|
||||
- potoken
|
||||
- visitorId
|
||||
- ytdlpCommand
|
||||
- createdTime
|
||||
- telemetry
|
||||
- state
|
||||
- errorMessage
|
||||
- socks5Id
|
||||
|
||||
"""
|
||||
|
||||
|
||||
def __init__(self, jobId=None, url=None, cookiesBlob=None, potoken=None, visitorId=None, ytdlpCommand=None, createdTime=None, telemetry=None, state=None, errorMessage=None, socks5Id=None,):
|
||||
self.jobId = jobId
|
||||
self.url = url
|
||||
self.cookiesBlob = cookiesBlob
|
||||
self.potoken = potoken
|
||||
self.visitorId = visitorId
|
||||
self.ytdlpCommand = ytdlpCommand
|
||||
self.createdTime = createdTime
|
||||
self.telemetry = telemetry
|
||||
self.state = state
|
||||
self.errorMessage = errorMessage
|
||||
self.socks5Id = socks5Id
|
||||
|
||||
def read(self, iprot):
|
||||
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
|
||||
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
|
||||
return
|
||||
iprot.readStructBegin()
|
||||
while True:
|
||||
(fname, ftype, fid) = iprot.readFieldBegin()
|
||||
if ftype == TType.STOP:
|
||||
break
|
||||
if fid == 1:
|
||||
if ftype == TType.STRING:
|
||||
self.jobId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||
else:
|
||||
iprot.skip(ftype)
|
||||
elif fid == 2:
|
||||
if ftype == TType.STRING:
|
||||
self.url = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||
else:
|
||||
iprot.skip(ftype)
|
||||
elif fid == 3:
|
||||
if ftype == TType.STRING:
|
||||
self.cookiesBlob = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||
else:
|
||||
iprot.skip(ftype)
|
||||
elif fid == 4:
|
||||
if ftype == TType.STRING:
|
||||
self.potoken = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||
else:
|
||||
iprot.skip(ftype)
|
||||
elif fid == 5:
|
||||
if ftype == TType.STRING:
|
||||
self.visitorId = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||
else:
|
||||
iprot.skip(ftype)
|
||||
elif fid == 6:
|
||||
if ftype == TType.STRING:
|
||||
self.ytdlpCommand = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||
else:
|
||||
iprot.skip(ftype)
|
||||
elif fid == 7:
|
||||
if ftype == TType.STRING:
|
||||
self.createdTime = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||
else:
|
||||
iprot.skip(ftype)
|
||||
elif fid == 8:
|
||||
if ftype == TType.MAP:
|
||||
self.telemetry = {}
|
||||
(_ktype1, _vtype2, _size0) = iprot.readMapBegin()
|
||||
for _i4 in range(_size0):
|
||||
_key5 = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||
_val6 = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||
self.telemetry[_key5] = _val6
|
||||
iprot.readMapEnd()
|
||||
else:
|
||||
iprot.skip(ftype)
|
||||
elif fid == 9:
|
||||
if ftype == TType.I32:
|
||||
self.state = iprot.readI32()
|
||||
else:
|
||||
iprot.skip(ftype)
|
||||
elif fid == 10:
|
||||
if ftype == TType.STRING:
|
||||
self.errorMessage = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||
else:
|
||||
iprot.skip(ftype)
|
||||
elif fid == 11:
|
||||
if ftype == TType.STRING:
|
||||
self.socks5Id = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||
else:
|
||||
iprot.skip(ftype)
|
||||
else:
|
||||
iprot.skip(ftype)
|
||||
iprot.readFieldEnd()
|
||||
iprot.readStructEnd()
|
||||
|
||||
def write(self, oprot):
|
||||
if oprot._fast_encode is not None and self.thrift_spec is not None:
|
||||
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
|
||||
return
|
||||
oprot.writeStructBegin('JobData')
|
||||
if self.jobId is not None:
|
||||
oprot.writeFieldBegin('jobId', TType.STRING, 1)
|
||||
oprot.writeString(self.jobId.encode('utf-8') if sys.version_info[0] == 2 else self.jobId)
|
||||
oprot.writeFieldEnd()
|
||||
if self.url is not None:
|
||||
oprot.writeFieldBegin('url', TType.STRING, 2)
|
||||
oprot.writeString(self.url.encode('utf-8') if sys.version_info[0] == 2 else self.url)
|
||||
oprot.writeFieldEnd()
|
||||
if self.cookiesBlob is not None:
|
||||
oprot.writeFieldBegin('cookiesBlob', TType.STRING, 3)
|
||||
oprot.writeString(self.cookiesBlob.encode('utf-8') if sys.version_info[0] == 2 else self.cookiesBlob)
|
||||
oprot.writeFieldEnd()
|
||||
if self.potoken is not None:
|
||||
oprot.writeFieldBegin('potoken', TType.STRING, 4)
|
||||
oprot.writeString(self.potoken.encode('utf-8') if sys.version_info[0] == 2 else self.potoken)
|
||||
oprot.writeFieldEnd()
|
||||
if self.visitorId is not None:
|
||||
oprot.writeFieldBegin('visitorId', TType.STRING, 5)
|
||||
oprot.writeString(self.visitorId.encode('utf-8') if sys.version_info[0] == 2 else self.visitorId)
|
||||
oprot.writeFieldEnd()
|
||||
if self.ytdlpCommand is not None:
|
||||
oprot.writeFieldBegin('ytdlpCommand', TType.STRING, 6)
|
||||
oprot.writeString(self.ytdlpCommand.encode('utf-8') if sys.version_info[0] == 2 else self.ytdlpCommand)
|
||||
oprot.writeFieldEnd()
|
||||
if self.createdTime is not None:
|
||||
oprot.writeFieldBegin('createdTime', TType.STRING, 7)
|
||||
oprot.writeString(self.createdTime.encode('utf-8') if sys.version_info[0] == 2 else self.createdTime)
|
||||
oprot.writeFieldEnd()
|
||||
if self.telemetry is not None:
|
||||
oprot.writeFieldBegin('telemetry', TType.MAP, 8)
|
||||
oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.telemetry))
|
||||
for kiter7, viter8 in self.telemetry.items():
|
||||
oprot.writeString(kiter7.encode('utf-8') if sys.version_info[0] == 2 else kiter7)
|
||||
oprot.writeString(viter8.encode('utf-8') if sys.version_info[0] == 2 else viter8)
|
||||
oprot.writeMapEnd()
|
||||
oprot.writeFieldEnd()
|
||||
if self.state is not None:
|
||||
oprot.writeFieldBegin('state', TType.I32, 9)
|
||||
oprot.writeI32(self.state)
|
||||
oprot.writeFieldEnd()
|
||||
if self.errorMessage is not None:
|
||||
oprot.writeFieldBegin('errorMessage', TType.STRING, 10)
|
||||
oprot.writeString(self.errorMessage.encode('utf-8') if sys.version_info[0] == 2 else self.errorMessage)
|
||||
oprot.writeFieldEnd()
|
||||
if self.socks5Id is not None:
|
||||
oprot.writeFieldBegin('socks5Id', TType.STRING, 11)
|
||||
oprot.writeString(self.socks5Id.encode('utf-8') if sys.version_info[0] == 2 else self.socks5Id)
|
||||
oprot.writeFieldEnd()
|
||||
oprot.writeFieldStop()
|
||||
oprot.writeStructEnd()
|
||||
|
||||
def validate(self):
|
||||
if self.jobId is None:
|
||||
raise TProtocolException(message='Required field jobId is unset!')
|
||||
if self.url is None:
|
||||
raise TProtocolException(message='Required field url is unset!')
|
||||
if self.cookiesBlob is None:
|
||||
raise TProtocolException(message='Required field cookiesBlob is unset!')
|
||||
if self.potoken is None:
|
||||
raise TProtocolException(message='Required field potoken is unset!')
|
||||
if self.visitorId is None:
|
||||
raise TProtocolException(message='Required field visitorId is unset!')
|
||||
if self.ytdlpCommand is None:
|
||||
raise TProtocolException(message='Required field ytdlpCommand is unset!')
|
||||
if self.createdTime is None:
|
||||
raise TProtocolException(message='Required field createdTime is unset!')
|
||||
if self.telemetry is None:
|
||||
raise TProtocolException(message='Required field telemetry is unset!')
|
||||
if self.state is None:
|
||||
raise TProtocolException(message='Required field state is unset!')
|
||||
return
|
||||
|
||||
def __repr__(self):
|
||||
L = ['%s=%r' % (key, value)
|
||||
for key, value in self.__dict__.items()]
|
||||
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
|
||||
|
||||
def __eq__(self, other):
|
||||
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
|
||||
|
||||
def __ne__(self, other):
|
||||
return not (self == other)
|
||||
|
||||
|
||||
class RichCollectionPagination(object):
|
||||
"""
|
||||
Attributes:
|
||||
- hasNext
|
||||
- totalCount
|
||||
- page
|
||||
- pageSize
|
||||
|
||||
"""
|
||||
|
||||
|
||||
def __init__(self, hasNext=None, totalCount=None, page=None, pageSize=None,):
|
||||
self.hasNext = hasNext
|
||||
self.totalCount = totalCount
|
||||
self.page = page
|
||||
self.pageSize = pageSize
|
||||
|
||||
def read(self, iprot):
|
||||
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
|
||||
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
|
||||
return
|
||||
iprot.readStructBegin()
|
||||
while True:
|
||||
(fname, ftype, fid) = iprot.readFieldBegin()
|
||||
if ftype == TType.STOP:
|
||||
break
|
||||
if fid == 1:
|
||||
if ftype == TType.BOOL:
|
||||
self.hasNext = iprot.readBool()
|
||||
else:
|
||||
iprot.skip(ftype)
|
||||
elif fid == 2:
|
||||
if ftype == TType.I32:
|
||||
self.totalCount = iprot.readI32()
|
||||
else:
|
||||
iprot.skip(ftype)
|
||||
elif fid == 3:
|
||||
if ftype == TType.I32:
|
||||
self.page = iprot.readI32()
|
||||
else:
|
||||
iprot.skip(ftype)
|
||||
elif fid == 4:
|
||||
if ftype == TType.I32:
|
||||
self.pageSize = iprot.readI32()
|
||||
else:
|
||||
iprot.skip(ftype)
|
||||
else:
|
||||
iprot.skip(ftype)
|
||||
iprot.readFieldEnd()
|
||||
iprot.readStructEnd()
|
||||
|
||||
def write(self, oprot):
|
||||
if oprot._fast_encode is not None and self.thrift_spec is not None:
|
||||
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
|
||||
return
|
||||
oprot.writeStructBegin('RichCollectionPagination')
|
||||
if self.hasNext is not None:
|
||||
oprot.writeFieldBegin('hasNext', TType.BOOL, 1)
|
||||
oprot.writeBool(self.hasNext)
|
||||
oprot.writeFieldEnd()
|
||||
if self.totalCount is not None:
|
||||
oprot.writeFieldBegin('totalCount', TType.I32, 2)
|
||||
oprot.writeI32(self.totalCount)
|
||||
oprot.writeFieldEnd()
|
||||
if self.page is not None:
|
||||
oprot.writeFieldBegin('page', TType.I32, 3)
|
||||
oprot.writeI32(self.page)
|
||||
oprot.writeFieldEnd()
|
||||
if self.pageSize is not None:
|
||||
oprot.writeFieldBegin('pageSize', TType.I32, 4)
|
||||
oprot.writeI32(self.pageSize)
|
||||
oprot.writeFieldEnd()
|
||||
oprot.writeFieldStop()
|
||||
oprot.writeStructEnd()
|
||||
|
||||
def validate(self):
|
||||
if self.hasNext is None:
|
||||
raise TProtocolException(message='Required field hasNext is unset!')
|
||||
if self.totalCount is None:
|
||||
raise TProtocolException(message='Required field totalCount is unset!')
|
||||
if self.page is None:
|
||||
raise TProtocolException(message='Required field page is unset!')
|
||||
if self.pageSize is None:
|
||||
raise TProtocolException(message='Required field pageSize is unset!')
|
||||
return
|
||||
|
||||
def __repr__(self):
|
||||
L = ['%s=%r' % (key, value)
|
||||
for key, value in self.__dict__.items()]
|
||||
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
|
||||
|
||||
def __eq__(self, other):
|
||||
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
|
||||
|
||||
def __ne__(self, other):
|
||||
return not (self == other)
|
||||
|
||||
|
||||
class RichCollectionJobData(object):
|
||||
"""
|
||||
Attributes:
|
||||
- items
|
||||
- pagination
|
||||
|
||||
"""
|
||||
|
||||
|
||||
def __init__(self, items=None, pagination=None,):
|
||||
self.items = items
|
||||
self.pagination = pagination
|
||||
|
||||
def read(self, iprot):
|
||||
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
|
||||
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
|
||||
return
|
||||
iprot.readStructBegin()
|
||||
while True:
|
||||
(fname, ftype, fid) = iprot.readFieldBegin()
|
||||
if ftype == TType.STOP:
|
||||
break
|
||||
if fid == 1:
|
||||
if ftype == TType.LIST:
|
||||
self.items = []
|
||||
(_etype12, _size9) = iprot.readListBegin()
|
||||
for _i13 in range(_size9):
|
||||
_elem14 = JobData()
|
||||
_elem14.read(iprot)
|
||||
self.items.append(_elem14)
|
||||
iprot.readListEnd()
|
||||
else:
|
||||
iprot.skip(ftype)
|
||||
elif fid == 2:
|
||||
if ftype == TType.STRUCT:
|
||||
self.pagination = RichCollectionPagination()
|
||||
self.pagination.read(iprot)
|
||||
else:
|
||||
iprot.skip(ftype)
|
||||
else:
|
||||
iprot.skip(ftype)
|
||||
iprot.readFieldEnd()
|
||||
iprot.readStructEnd()
|
||||
|
||||
def write(self, oprot):
|
||||
if oprot._fast_encode is not None and self.thrift_spec is not None:
|
||||
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
|
||||
return
|
||||
oprot.writeStructBegin('RichCollectionJobData')
|
||||
if self.items is not None:
|
||||
oprot.writeFieldBegin('items', TType.LIST, 1)
|
||||
oprot.writeListBegin(TType.STRUCT, len(self.items))
|
||||
for iter15 in self.items:
|
||||
iter15.write(oprot)
|
||||
oprot.writeListEnd()
|
||||
oprot.writeFieldEnd()
|
||||
if self.pagination is not None:
|
||||
oprot.writeFieldBegin('pagination', TType.STRUCT, 2)
|
||||
self.pagination.write(oprot)
|
||||
oprot.writeFieldEnd()
|
||||
oprot.writeFieldStop()
|
||||
oprot.writeStructEnd()
|
||||
|
||||
def validate(self):
|
||||
if self.items is None:
|
||||
raise TProtocolException(message='Required field items is unset!')
|
||||
if self.pagination is None:
|
||||
raise TProtocolException(message='Required field pagination is unset!')
|
||||
return
|
||||
|
||||
def __repr__(self):
|
||||
L = ['%s=%r' % (key, value)
|
||||
for key, value in self.__dict__.items()]
|
||||
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
|
||||
|
||||
def __eq__(self, other):
|
||||
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
|
||||
|
||||
def __ne__(self, other):
|
||||
return not (self == other)
|
||||
all_structs.append(JobTokenData)
|
||||
JobTokenData.thrift_spec = (
|
||||
None, # 0
|
||||
(1, TType.STRING, 'infoJson', 'UTF8', None, ), # 1
|
||||
(2, TType.STRING, 'ytdlpCommand', 'UTF8', None, ), # 2
|
||||
(3, TType.STRING, 'socks', 'UTF8', None, ), # 3
|
||||
(4, TType.STRING, 'jobId', 'UTF8', None, ), # 4
|
||||
(5, TType.STRING, 'url', 'UTF8', None, ), # 5
|
||||
(6, TType.STRING, 'cookiesBlob', 'UTF8', None, ), # 6
|
||||
)
|
||||
all_structs.append(AccountData)
|
||||
AccountData.thrift_spec = (
|
||||
None, # 0
|
||||
(1, TType.STRING, 'username', 'UTF8', None, ), # 1
|
||||
(2, TType.STRING, 'password', 'UTF8', None, ), # 2
|
||||
(3, TType.STRING, 'countryCode', 'UTF8', None, ), # 3
|
||||
)
|
||||
all_structs.append(ProxyData)
|
||||
ProxyData.thrift_spec = (
|
||||
None, # 0
|
||||
(1, TType.STRING, 'proxyUrl', 'UTF8', None, ), # 1
|
||||
(2, TType.STRING, 'countryCode', 'UTF8', None, ), # 2
|
||||
)
|
||||
all_structs.append(AccountPairWithState)
|
||||
AccountPairWithState.thrift_spec = (
|
||||
None, # 0
|
||||
(1, TType.STRING, 'accountId', 'UTF8', None, ), # 1
|
||||
(2, TType.STRING, 'proxyId', 'UTF8', None, ), # 2
|
||||
(3, TType.I32, 'accountPairState', None, None, ), # 3
|
||||
(4, TType.STRING, 'machineId', 'UTF8', None, ), # 4
|
||||
)
|
||||
all_structs.append(JobData)
|
||||
JobData.thrift_spec = (
|
||||
None, # 0
|
||||
(1, TType.STRING, 'jobId', 'UTF8', None, ), # 1
|
||||
(2, TType.STRING, 'url', 'UTF8', None, ), # 2
|
||||
(3, TType.STRING, 'cookiesBlob', 'UTF8', None, ), # 3
|
||||
(4, TType.STRING, 'potoken', 'UTF8', None, ), # 4
|
||||
(5, TType.STRING, 'visitorId', 'UTF8', None, ), # 5
|
||||
(6, TType.STRING, 'ytdlpCommand', 'UTF8', None, ), # 6
|
||||
(7, TType.STRING, 'createdTime', 'UTF8', None, ), # 7
|
||||
(8, TType.MAP, 'telemetry', (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), None, ), # 8
|
||||
(9, TType.I32, 'state', None, None, ), # 9
|
||||
(10, TType.STRING, 'errorMessage', 'UTF8', None, ), # 10
|
||||
(11, TType.STRING, 'socks5Id', 'UTF8', None, ), # 11
|
||||
)
|
||||
all_structs.append(RichCollectionPagination)
|
||||
RichCollectionPagination.thrift_spec = (
|
||||
None, # 0
|
||||
(1, TType.BOOL, 'hasNext', None, None, ), # 1
|
||||
(2, TType.I32, 'totalCount', None, None, ), # 2
|
||||
(3, TType.I32, 'page', None, None, ), # 3
|
||||
(4, TType.I32, 'pageSize', None, None, ), # 4
|
||||
)
|
||||
all_structs.append(RichCollectionJobData)
|
||||
RichCollectionJobData.thrift_spec = (
|
||||
None, # 0
|
||||
(1, TType.LIST, 'items', (TType.STRUCT, [JobData, None], False), None, ), # 1
|
||||
(2, TType.STRUCT, 'pagination', [RichCollectionPagination, None], None, ), # 2
|
||||
)
|
||||
fix_spec(all_structs)
|
||||
del all_structs
|
||||
@ -1 +0,0 @@
|
||||
__all__ = ['ttypes', 'constants']
|
||||
@ -1,14 +0,0 @@
|
||||
#
|
||||
# Autogenerated by Thrift Compiler (0.20.0)
|
||||
#
|
||||
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
|
||||
#
|
||||
# options string: py
|
||||
#
|
||||
|
||||
from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException
|
||||
from thrift.protocol.TProtocol import TProtocolException
|
||||
from thrift.TRecursive import fix_spec
|
||||
|
||||
import sys
|
||||
from .ttypes import *
|
||||
@ -1,254 +0,0 @@
|
||||
#
|
||||
# Autogenerated by Thrift Compiler (0.20.0)
|
||||
#
|
||||
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
|
||||
#
|
||||
# options string: py
|
||||
#
|
||||
|
||||
from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException
|
||||
from thrift.protocol.TProtocol import TProtocolException
|
||||
from thrift.TRecursive import fix_spec
|
||||
|
||||
import sys
|
||||
|
||||
from thrift.transport import TTransport
|
||||
all_structs = []
|
||||
|
||||
|
||||
class PBServiceException(TException):
|
||||
"""
|
||||
Attributes:
|
||||
- message
|
||||
- errorCode
|
||||
- context
|
||||
|
||||
"""
|
||||
|
||||
|
||||
def __init__(self, message=None, errorCode=None, context=None,):
|
||||
super(PBServiceException, self).__setattr__('message', message)
|
||||
super(PBServiceException, self).__setattr__('errorCode', errorCode)
|
||||
super(PBServiceException, self).__setattr__('context', context)
|
||||
|
||||
def __setattr__(self, *args):
|
||||
raise TypeError("can't modify immutable instance")
|
||||
|
||||
def __delattr__(self, *args):
|
||||
raise TypeError("can't modify immutable instance")
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.__class__) ^ hash((self.message, self.errorCode, self.context, ))
|
||||
|
||||
@classmethod
|
||||
def read(cls, iprot):
|
||||
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and cls.thrift_spec is not None:
|
||||
return iprot._fast_decode(None, iprot, [cls, cls.thrift_spec])
|
||||
iprot.readStructBegin()
|
||||
message = None
|
||||
errorCode = None
|
||||
context = None
|
||||
while True:
|
||||
(fname, ftype, fid) = iprot.readFieldBegin()
|
||||
if ftype == TType.STOP:
|
||||
break
|
||||
if fid == 1:
|
||||
if ftype == TType.STRING:
|
||||
message = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||
else:
|
||||
iprot.skip(ftype)
|
||||
elif fid == 2:
|
||||
if ftype == TType.STRING:
|
||||
errorCode = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||
else:
|
||||
iprot.skip(ftype)
|
||||
elif fid == 3:
|
||||
if ftype == TType.MAP:
|
||||
context = {}
|
||||
(_ktype1, _vtype2, _size0) = iprot.readMapBegin()
|
||||
for _i4 in range(_size0):
|
||||
_key5 = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||
_val6 = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||
context[_key5] = _val6
|
||||
iprot.readMapEnd()
|
||||
else:
|
||||
iprot.skip(ftype)
|
||||
else:
|
||||
iprot.skip(ftype)
|
||||
iprot.readFieldEnd()
|
||||
iprot.readStructEnd()
|
||||
return cls(
|
||||
message=message,
|
||||
errorCode=errorCode,
|
||||
context=context,
|
||||
)
|
||||
|
||||
def write(self, oprot):
|
||||
if oprot._fast_encode is not None and self.thrift_spec is not None:
|
||||
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
|
||||
return
|
||||
oprot.writeStructBegin('PBServiceException')
|
||||
if self.message is not None:
|
||||
oprot.writeFieldBegin('message', TType.STRING, 1)
|
||||
oprot.writeString(self.message.encode('utf-8') if sys.version_info[0] == 2 else self.message)
|
||||
oprot.writeFieldEnd()
|
||||
if self.errorCode is not None:
|
||||
oprot.writeFieldBegin('errorCode', TType.STRING, 2)
|
||||
oprot.writeString(self.errorCode.encode('utf-8') if sys.version_info[0] == 2 else self.errorCode)
|
||||
oprot.writeFieldEnd()
|
||||
if self.context is not None:
|
||||
oprot.writeFieldBegin('context', TType.MAP, 3)
|
||||
oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.context))
|
||||
for kiter7, viter8 in self.context.items():
|
||||
oprot.writeString(kiter7.encode('utf-8') if sys.version_info[0] == 2 else kiter7)
|
||||
oprot.writeString(viter8.encode('utf-8') if sys.version_info[0] == 2 else viter8)
|
||||
oprot.writeMapEnd()
|
||||
oprot.writeFieldEnd()
|
||||
oprot.writeFieldStop()
|
||||
oprot.writeStructEnd()
|
||||
|
||||
def validate(self):
|
||||
if self.message is None:
|
||||
raise TProtocolException(message='Required field message is unset!')
|
||||
return
|
||||
|
||||
def __str__(self):
|
||||
return repr(self)
|
||||
|
||||
def __repr__(self):
|
||||
L = ['%s=%r' % (key, value)
|
||||
for key, value in self.__dict__.items()]
|
||||
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
|
||||
|
||||
def __eq__(self, other):
|
||||
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
|
||||
|
||||
def __ne__(self, other):
|
||||
return not (self == other)
|
||||
|
||||
|
||||
class PBUserException(TException):
|
||||
"""
|
||||
Attributes:
|
||||
- message
|
||||
- errorCode
|
||||
- context
|
||||
|
||||
"""
|
||||
|
||||
|
||||
def __init__(self, message=None, errorCode=None, context=None,):
|
||||
super(PBUserException, self).__setattr__('message', message)
|
||||
super(PBUserException, self).__setattr__('errorCode', errorCode)
|
||||
super(PBUserException, self).__setattr__('context', context)
|
||||
|
||||
def __setattr__(self, *args):
|
||||
raise TypeError("can't modify immutable instance")
|
||||
|
||||
def __delattr__(self, *args):
|
||||
raise TypeError("can't modify immutable instance")
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.__class__) ^ hash((self.message, self.errorCode, self.context, ))
|
||||
|
||||
@classmethod
|
||||
def read(cls, iprot):
|
||||
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and cls.thrift_spec is not None:
|
||||
return iprot._fast_decode(None, iprot, [cls, cls.thrift_spec])
|
||||
iprot.readStructBegin()
|
||||
message = None
|
||||
errorCode = None
|
||||
context = None
|
||||
while True:
|
||||
(fname, ftype, fid) = iprot.readFieldBegin()
|
||||
if ftype == TType.STOP:
|
||||
break
|
||||
if fid == 1:
|
||||
if ftype == TType.STRING:
|
||||
message = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||
else:
|
||||
iprot.skip(ftype)
|
||||
elif fid == 2:
|
||||
if ftype == TType.STRING:
|
||||
errorCode = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||
else:
|
||||
iprot.skip(ftype)
|
||||
elif fid == 3:
|
||||
if ftype == TType.MAP:
|
||||
context = {}
|
||||
(_ktype10, _vtype11, _size9) = iprot.readMapBegin()
|
||||
for _i13 in range(_size9):
|
||||
_key14 = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||
_val15 = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
|
||||
context[_key14] = _val15
|
||||
iprot.readMapEnd()
|
||||
else:
|
||||
iprot.skip(ftype)
|
||||
else:
|
||||
iprot.skip(ftype)
|
||||
iprot.readFieldEnd()
|
||||
iprot.readStructEnd()
|
||||
return cls(
|
||||
message=message,
|
||||
errorCode=errorCode,
|
||||
context=context,
|
||||
)
|
||||
|
||||
def write(self, oprot):
|
||||
if oprot._fast_encode is not None and self.thrift_spec is not None:
|
||||
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
|
||||
return
|
||||
oprot.writeStructBegin('PBUserException')
|
||||
if self.message is not None:
|
||||
oprot.writeFieldBegin('message', TType.STRING, 1)
|
||||
oprot.writeString(self.message.encode('utf-8') if sys.version_info[0] == 2 else self.message)
|
||||
oprot.writeFieldEnd()
|
||||
if self.errorCode is not None:
|
||||
oprot.writeFieldBegin('errorCode', TType.STRING, 2)
|
||||
oprot.writeString(self.errorCode.encode('utf-8') if sys.version_info[0] == 2 else self.errorCode)
|
||||
oprot.writeFieldEnd()
|
||||
if self.context is not None:
|
||||
oprot.writeFieldBegin('context', TType.MAP, 3)
|
||||
oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.context))
|
||||
for kiter16, viter17 in self.context.items():
|
||||
oprot.writeString(kiter16.encode('utf-8') if sys.version_info[0] == 2 else kiter16)
|
||||
oprot.writeString(viter17.encode('utf-8') if sys.version_info[0] == 2 else viter17)
|
||||
oprot.writeMapEnd()
|
||||
oprot.writeFieldEnd()
|
||||
oprot.writeFieldStop()
|
||||
oprot.writeStructEnd()
|
||||
|
||||
def validate(self):
|
||||
if self.message is None:
|
||||
raise TProtocolException(message='Required field message is unset!')
|
||||
return
|
||||
|
||||
def __str__(self):
|
||||
return repr(self)
|
||||
|
||||
def __repr__(self):
|
||||
L = ['%s=%r' % (key, value)
|
||||
for key, value in self.__dict__.items()]
|
||||
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
|
||||
|
||||
def __eq__(self, other):
|
||||
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
|
||||
|
||||
def __ne__(self, other):
|
||||
return not (self == other)
|
||||
all_structs.append(PBServiceException)
|
||||
PBServiceException.thrift_spec = (
|
||||
None, # 0
|
||||
(1, TType.STRING, 'message', 'UTF8', None, ), # 1
|
||||
(2, TType.STRING, 'errorCode', 'UTF8', None, ), # 2
|
||||
(3, TType.MAP, 'context', (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), None, ), # 3
|
||||
)
|
||||
all_structs.append(PBUserException)
|
||||
PBUserException.thrift_spec = (
|
||||
None, # 0
|
||||
(1, TType.STRING, 'message', 'UTF8', None, ), # 1
|
||||
(2, TType.STRING, 'errorCode', 'UTF8', None, ), # 2
|
||||
(3, TType.MAP, 'context', (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), None, ), # 3
|
||||
)
|
||||
fix_spec(all_structs)
|
||||
del all_structs
|
||||
@ -1,166 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
#
|
||||
# Autogenerated by Thrift Compiler (0.20.0)
|
||||
#
|
||||
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
|
||||
#
|
||||
# options string: py
|
||||
#
|
||||
|
||||
import sys
|
||||
import pprint
|
||||
if sys.version_info[0] > 2:
|
||||
from urllib.parse import urlparse
|
||||
else:
|
||||
from urlparse import urlparse
|
||||
from thrift.transport import TTransport, TSocket, TSSLSocket, THttpClient
|
||||
from thrift.protocol.TBinaryProtocol import TBinaryProtocol
|
||||
|
||||
from pangramia.yt.tokens_ops import YTTokenOpService
|
||||
from pangramia.yt.tokens_ops.ttypes import *
|
||||
|
||||
if len(sys.argv) <= 1 or sys.argv[1] == '--help':
|
||||
print('')
|
||||
print('Usage: ' + sys.argv[0] + ' [-h host[:port]] [-u url] [-f[ramed]] [-s[sl]] [-novalidate] [-ca_certs certs] [-keyfile keyfile] [-certfile certfile] function [arg1 [arg2...]]')
|
||||
print('')
|
||||
print('Functions:')
|
||||
print(' JobTokenData getOrRefreshTokenWithReport(string accountId, string oldUrl, JobState status, string details, string jobId, TokenUpdateMode updateType, string url)')
|
||||
print(' JobTokenData getOrRefreshToken(string accountId, TokenUpdateMode updateType, string url)')
|
||||
print(' JobTokenData getLatestToken(string accountId)')
|
||||
print(' JobTokenData refreshToken(string accountId, TokenUpdateMode updateType, string url)')
|
||||
print(' bool reportState(string url, JobState status, string details, string jobId)')
|
||||
print(' bool ping()')
|
||||
print(' bool reportError(string message, details)')
|
||||
print(' void shutdown()')
|
||||
print('')
|
||||
sys.exit(0)
|
||||
|
||||
pp = pprint.PrettyPrinter(indent=2)
|
||||
host = 'localhost'
|
||||
port = 9090
|
||||
uri = ''
|
||||
framed = False
|
||||
ssl = False
|
||||
validate = True
|
||||
ca_certs = None
|
||||
keyfile = None
|
||||
certfile = None
|
||||
http = False
|
||||
argi = 1
|
||||
|
||||
if sys.argv[argi] == '-h':
|
||||
parts = sys.argv[argi + 1].split(':')
|
||||
host = parts[0]
|
||||
if len(parts) > 1:
|
||||
port = int(parts[1])
|
||||
argi += 2
|
||||
|
||||
if sys.argv[argi] == '-u':
|
||||
url = urlparse(sys.argv[argi + 1])
|
||||
parts = url[1].split(':')
|
||||
host = parts[0]
|
||||
if len(parts) > 1:
|
||||
port = int(parts[1])
|
||||
else:
|
||||
port = 80
|
||||
uri = url[2]
|
||||
if url[4]:
|
||||
uri += '?%s' % url[4]
|
||||
http = True
|
||||
argi += 2
|
||||
|
||||
if sys.argv[argi] == '-f' or sys.argv[argi] == '-framed':
|
||||
framed = True
|
||||
argi += 1
|
||||
|
||||
if sys.argv[argi] == '-s' or sys.argv[argi] == '-ssl':
|
||||
ssl = True
|
||||
argi += 1
|
||||
|
||||
if sys.argv[argi] == '-novalidate':
|
||||
validate = False
|
||||
argi += 1
|
||||
|
||||
if sys.argv[argi] == '-ca_certs':
|
||||
ca_certs = sys.argv[argi+1]
|
||||
argi += 2
|
||||
|
||||
if sys.argv[argi] == '-keyfile':
|
||||
keyfile = sys.argv[argi+1]
|
||||
argi += 2
|
||||
|
||||
if sys.argv[argi] == '-certfile':
|
||||
certfile = sys.argv[argi+1]
|
||||
argi += 2
|
||||
|
||||
cmd = sys.argv[argi]
|
||||
args = sys.argv[argi + 1:]
|
||||
|
||||
if http:
|
||||
transport = THttpClient.THttpClient(host, port, uri)
|
||||
else:
|
||||
if ssl:
|
||||
socket = TSSLSocket.TSSLSocket(host, port, validate=validate, ca_certs=ca_certs, keyfile=keyfile, certfile=certfile)
|
||||
else:
|
||||
socket = TSocket.TSocket(host, port)
|
||||
if framed:
|
||||
transport = TTransport.TFramedTransport(socket)
|
||||
else:
|
||||
transport = TTransport.TBufferedTransport(socket)
|
||||
protocol = TBinaryProtocol(transport)
|
||||
client = YTTokenOpService.Client(protocol)
|
||||
transport.open()
|
||||
|
||||
if cmd == 'getOrRefreshTokenWithReport':
|
||||
if len(args) != 7:
|
||||
print('getOrRefreshTokenWithReport requires 7 args')
|
||||
sys.exit(1)
|
||||
pp.pprint(client.getOrRefreshTokenWithReport(args[0], args[1], eval(args[2]), args[3], args[4], eval(args[5]), args[6],))
|
||||
|
||||
elif cmd == 'getOrRefreshToken':
|
||||
if len(args) != 3:
|
||||
print('getOrRefreshToken requires 3 args')
|
||||
sys.exit(1)
|
||||
pp.pprint(client.getOrRefreshToken(args[0], eval(args[1]), args[2],))
|
||||
|
||||
elif cmd == 'getLatestToken':
|
||||
if len(args) != 1:
|
||||
print('getLatestToken requires 1 args')
|
||||
sys.exit(1)
|
||||
pp.pprint(client.getLatestToken(args[0],))
|
||||
|
||||
elif cmd == 'refreshToken':
|
||||
if len(args) != 3:
|
||||
print('refreshToken requires 3 args')
|
||||
sys.exit(1)
|
||||
pp.pprint(client.refreshToken(args[0], eval(args[1]), args[2],))
|
||||
|
||||
elif cmd == 'reportState':
|
||||
if len(args) != 4:
|
||||
print('reportState requires 4 args')
|
||||
sys.exit(1)
|
||||
pp.pprint(client.reportState(args[0], eval(args[1]), args[2], args[3],))
|
||||
|
||||
elif cmd == 'ping':
|
||||
if len(args) != 0:
|
||||
print('ping requires 0 args')
|
||||
sys.exit(1)
|
||||
pp.pprint(client.ping())
|
||||
|
||||
elif cmd == 'reportError':
|
||||
if len(args) != 2:
|
||||
print('reportError requires 2 args')
|
||||
sys.exit(1)
|
||||
pp.pprint(client.reportError(args[0], eval(args[1]),))
|
||||
|
||||
elif cmd == 'shutdown':
|
||||
if len(args) != 0:
|
||||
print('shutdown requires 0 args')
|
||||
sys.exit(1)
|
||||
pp.pprint(client.shutdown())
|
||||
|
||||
else:
|
||||
print('Unrecognized method %s' % cmd)
|
||||
sys.exit(1)
|
||||
|
||||
transport.close()
|
||||
File diff suppressed because it is too large
Load Diff
@ -1 +0,0 @@
|
||||
__all__ = ['ttypes', 'constants', 'YTTokenOpService']
|
||||
@ -1,14 +0,0 @@
|
||||
#
|
||||
# Autogenerated by Thrift Compiler (0.20.0)
|
||||
#
|
||||
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
|
||||
#
|
||||
# options string: py
|
||||
#
|
||||
|
||||
from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException
|
||||
from thrift.protocol.TProtocol import TProtocolException
|
||||
from thrift.TRecursive import fix_spec
|
||||
|
||||
import sys
|
||||
from .ttypes import *
|
||||
@ -1,21 +0,0 @@
|
||||
#
|
||||
# Autogenerated by Thrift Compiler (0.20.0)
|
||||
#
|
||||
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
|
||||
#
|
||||
# options string: py
|
||||
#
|
||||
|
||||
from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException
|
||||
from thrift.protocol.TProtocol import TProtocolException
|
||||
from thrift.TRecursive import fix_spec
|
||||
|
||||
import sys
|
||||
import pangramia.yt.common.ttypes
|
||||
import pangramia.yt.exceptions.ttypes
|
||||
import pangramia.base_service.ttypes
|
||||
|
||||
from thrift.transport import TTransport
|
||||
all_structs = []
|
||||
fix_spec(all_structs)
|
||||
del all_structs
|
||||
@ -1,94 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<groupId>com.pangramia.yt</groupId>
|
||||
<artifactId>thrift-services</artifactId>
|
||||
<version>1.6.2-SNAPSHOT</version>
|
||||
|
||||
<properties>
|
||||
<thrift.version>0.16.0</thrift.version>
|
||||
<java.version>11</java.version>
|
||||
</properties>
|
||||
|
||||
<dependencies>
|
||||
<!-- Thrift library -->
|
||||
<dependency>
|
||||
<groupId>org.apache.thrift</groupId>
|
||||
<artifactId>libthrift</artifactId>
|
||||
<version>${thrift.version}</version>
|
||||
</dependency>
|
||||
|
||||
<!-- SLF4J for logging -->
|
||||
<dependency>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>slf4j-api</artifactId>
|
||||
<version>1.7.36</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
<plugins>
|
||||
<!-- Thrift compiler plugin -->
|
||||
<plugin>
|
||||
<groupId>org.apache.thrift.tools</groupId>
|
||||
<artifactId>maven-thrift-plugin</artifactId>
|
||||
<version>0.1.11</version>
|
||||
|
||||
<configuration>
|
||||
<thriftExecutable>/usr/local/bin/thrift</thriftExecutable>
|
||||
<thriftSourceRoot>${project.basedir}</thriftSourceRoot>
|
||||
<generator>java</generator>
|
||||
<outputDirectory>${project.build.directory}/generated-sources/thrift</outputDirectory>
|
||||
</configuration>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>thrift-sources-java</id>
|
||||
<phase>generate-sources</phase>
|
||||
<goals>
|
||||
<goal>compile</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
<execution>
|
||||
<id>thrift-sources-py</id>
|
||||
<phase>compile</phase>
|
||||
<goals>
|
||||
<goal>compile</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<generator>py</generator>
|
||||
<outputDirectory>${project.basedir}/gen_py</outputDirectory>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
|
||||
<!-- Java compiler plugin -->
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-compiler-plugin</artifactId>
|
||||
<version>3.8.1</version>
|
||||
<configuration>
|
||||
<source>${java.version}</source>
|
||||
<target>${java.version}</target>
|
||||
</configuration>
|
||||
</plugin>
|
||||
|
||||
<!-- Clean plugin to remove gen_py directory -->
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-clean-plugin</artifactId>
|
||||
<version>3.2.0</version>
|
||||
<configuration>
|
||||
<filesets>
|
||||
<fileset>
|
||||
<directory>${project.basedir}/gen_py</directory>
|
||||
</fileset>
|
||||
</filesets>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</project>
|
||||
@ -1,19 +0,0 @@
|
||||
namespace py pangramia.base_service
|
||||
namespace java com.pangramia.base_service
|
||||
|
||||
include "../data/common.thrift"
|
||||
include "../data/exceptions.thrift"
|
||||
|
||||
service BaseService {
|
||||
// Common health check method
|
||||
bool ping() throws (1: exceptions.PBServiceException serviceExp,
|
||||
2: exceptions.PBUserException userExp),
|
||||
|
||||
// Common error reporting
|
||||
bool reportError(1: string message,
|
||||
2: map<string, string> details) throws (1: exceptions.PBServiceException serviceExp,
|
||||
2: exceptions.PBUserException userExp)
|
||||
|
||||
// Add this to fix AsyncProcessor issues
|
||||
oneway void shutdown()
|
||||
}
|
||||
@ -1,63 +0,0 @@
|
||||
namespace py pangramia.yt.admin_ops
|
||||
namespace java com.pangramia.yt.admin_ops
|
||||
|
||||
include "../data/common.thrift"
|
||||
include "../data/exceptions.thrift"
|
||||
include "base_service.thrift"
|
||||
|
||||
// Proxy and Account management
|
||||
service YTAccountsOpService extends base_service.BaseService {
|
||||
|
||||
// AccountPairs
|
||||
bool addAccountPair(1: string accountId, 2: string proxyId, 3: string machineId, 4: common.ProxyData proxyData, 5: optional common.AccountData accountData)
|
||||
throws (1: exceptions.PBServiceException serviceExp,
|
||||
2: exceptions.PBUserException userExp),
|
||||
|
||||
common.AccountPairWithState getPair(1: string machineId)
|
||||
throws (1: exceptions.PBServiceException serviceExp,
|
||||
2: exceptions.PBUserException userExp),
|
||||
|
||||
bool pair(1: string accountId, 2: string proxyId, 3:string machineId)
|
||||
throws (1: exceptions.PBServiceException serviceExp,
|
||||
2: exceptions.PBUserException userExp),
|
||||
|
||||
bool unpair(1: string accountId, 2: string proxyId, 3:string machineId)
|
||||
throws (1: exceptions.PBServiceException serviceExp,
|
||||
2: exceptions.PBUserException userExp),
|
||||
|
||||
list<common.AccountPairWithState> listAccountPairs(1: optional common.AccountPairState filter) throws (1: exceptions.PBServiceException serviceExp,
|
||||
2: exceptions.PBUserException userExp),
|
||||
|
||||
// ManageAccounts
|
||||
bool addAccount(1: string accountId, 2: optional common.AccountData accountData) throws (1: exceptions.PBServiceException serviceExp,
|
||||
2: exceptions.PBUserException userExp),
|
||||
|
||||
|
||||
bool suspendAccount(1: string accountId) throws (1: exceptions.PBServiceException serviceExp,
|
||||
2: exceptions.PBUserException userExp),
|
||||
|
||||
bool resumeAccount(1: string accountId) throws (1: exceptions.PBServiceException serviceExp,
|
||||
2: exceptions.PBUserException userExp),
|
||||
|
||||
bool removeAccount(1: string accountId) throws (1: exceptions.PBServiceException serviceExp,
|
||||
2: exceptions.PBUserException userExp),
|
||||
|
||||
list<string> listActiveAccounts() throws (1: exceptions.PBServiceException serviceExp,
|
||||
2: exceptions.PBUserException userExp),
|
||||
|
||||
// ManageProxy
|
||||
bool addProxy(1: string proxyId, 2: common.ProxyData proxyData) throws (1: exceptions.PBServiceException serviceExp,
|
||||
2: exceptions.PBUserException userExp),
|
||||
|
||||
bool suspendProxy(1: string proxyId) throws (1: exceptions.PBServiceException serviceExp,
|
||||
2: exceptions.PBUserException userExp),
|
||||
|
||||
bool resumeProxy(1: string proxyId) throws (1: exceptions.PBServiceException serviceExp,
|
||||
2: exceptions.PBUserException userExp),
|
||||
|
||||
bool removeProxy(1: string proxyId) throws (1: exceptions.PBServiceException serviceExp,
|
||||
2: exceptions.PBUserException userExp),
|
||||
|
||||
list<string> listActiveProxies() throws (1: exceptions.PBServiceException serviceExp,
|
||||
2: exceptions.PBUserException userExp),
|
||||
}
|
||||
@ -1,36 +0,0 @@
|
||||
namespace py pangramia.yt.tokens_ops
|
||||
namespace java com.pangramia.yt.tokens_ops
|
||||
|
||||
include "../data/common.thrift"
|
||||
include "../data/exceptions.thrift"
|
||||
include "base_service.thrift"
|
||||
|
||||
service YTTokenOpService extends base_service.BaseService {
|
||||
|
||||
common.JobTokenData getOrRefreshTokenWithReport ( 1: string accountId,
|
||||
2: string oldUrl,
|
||||
3: common.JobState status,
|
||||
4: optional string details,
|
||||
5: optional string jobId,
|
||||
6: optional common.TokenUpdateMode updateType = common.TokenUpdateMode.AUTO,
|
||||
7: optional string url ) throws (1: exceptions.PBServiceException serviceExp,
|
||||
2: exceptions.PBUserException userExp)
|
||||
|
||||
common.JobTokenData getOrRefreshToken ( 1: string accountId,
|
||||
2: optional common.TokenUpdateMode updateType = common.TokenUpdateMode.AUTO,
|
||||
3: optional string url ) throws (1: exceptions.PBServiceException serviceExp,
|
||||
2: exceptions.PBUserException userExp)
|
||||
|
||||
common.JobTokenData getLatestToken (1: string accountId) throws (1: exceptions.PBServiceException serviceExp,
|
||||
2: exceptions.PBUserException userExp),
|
||||
common.JobTokenData refreshToken ( 1: string accountId,
|
||||
2: optional common.TokenUpdateMode updateType = common.TokenUpdateMode.AUTO,
|
||||
3: optional string url ) throws (1: exceptions.PBServiceException serviceExp,
|
||||
2: exceptions.PBUserException userExp)
|
||||
bool reportState( 1: string url,
|
||||
2: common.JobState status,
|
||||
3: optional string details,
|
||||
4: optional string jobId) throws (1: exceptions.PBServiceException serviceExp,
|
||||
2: exceptions.PBUserException userExp)
|
||||
|
||||
}
|
||||
@ -98,101 +98,6 @@ def get_info_json(token_data):
|
||||
|
||||
logger.info(f"Using infoJson from server response ({len(token_data.infoJson)} bytes)")
|
||||
return token_data.infoJson
|
||||
|
||||
# Try multiple possible file paths
|
||||
possible_paths = [
|
||||
os.path.join('context-data', f"info_json_{video_id}.json"),
|
||||
os.path.join('context-data', f"info_{video_id}.json"),
|
||||
os.path.join('.', f"info_json_{video_id}.json"),
|
||||
os.path.join('.', 'context-data', f"info_json_{video_id}.json")
|
||||
]
|
||||
|
||||
# Add current directory to possible paths
|
||||
import glob
|
||||
for pattern in [f"info_json_{video_id}.json", f"info_json_{video_id}_*.json"]:
|
||||
possible_paths.extend(glob.glob(pattern))
|
||||
possible_paths.extend(glob.glob(os.path.join('context-data', pattern)))
|
||||
|
||||
# Remove duplicates while preserving order
|
||||
seen = set()
|
||||
possible_paths = [p for p in possible_paths if not (p in seen or seen.add(p))]
|
||||
|
||||
# Try each path
|
||||
for info_json_path in possible_paths:
|
||||
if os.path.exists(info_json_path):
|
||||
logger.info(f"Found info.json file: {info_json_path}")
|
||||
try:
|
||||
with open(info_json_path, 'r', encoding='utf-8') as f:
|
||||
content = f.read()
|
||||
if content and len(content) > 10: # Basic validation
|
||||
logger.info(f"Successfully read info.json from {info_json_path} ({len(content)} bytes)")
|
||||
# Add the infoJson attribute to the token_data object for future use
|
||||
try:
|
||||
setattr(token_data, 'infoJson', content)
|
||||
logger.info(f"Added infoJson attribute to token_data with length: {len(content)}")
|
||||
except Exception as e:
|
||||
logger.error(f"Error adding infoJson attribute to token_data: {e}")
|
||||
return content
|
||||
except Exception as e:
|
||||
logger.error(f"Error reading info.json file {info_json_path}: {e}")
|
||||
|
||||
# If we get here, try to find any info_json file in the context directory
|
||||
try:
|
||||
context_dir = 'context-data'
|
||||
info_json_files = glob.glob(os.path.join(context_dir, "info_json_*.json"))
|
||||
if info_json_files:
|
||||
# Sort by modification time, newest first
|
||||
info_json_files.sort(key=os.path.getmtime, reverse=True)
|
||||
newest_file = info_json_files[0]
|
||||
logger.info(f"Found newest info.json file: {newest_file}")
|
||||
with open(newest_file, 'r', encoding='utf-8') as f:
|
||||
content = f.read()
|
||||
if content and len(content) > 10: # Basic validation
|
||||
logger.info(f"Successfully read info.json from {newest_file} ({len(content)} bytes)")
|
||||
# Add the infoJson attribute to the token_data object for future use
|
||||
try:
|
||||
setattr(token_data, 'infoJson', content)
|
||||
logger.info(f"Added infoJson attribute to token_data with length: {len(content)}")
|
||||
except Exception as e:
|
||||
logger.error(f"Error adding infoJson attribute to token_data: {e}")
|
||||
return content
|
||||
except Exception as e:
|
||||
logger.error(f"Error searching for info.json files: {e}")
|
||||
|
||||
# Try to add the attribute if it's missing
|
||||
if not hasattr(token_data, 'infoJson'):
|
||||
try:
|
||||
# Try using __dict__ to add the attribute
|
||||
if hasattr(token_data, '__dict__'):
|
||||
token_data.__dict__['infoJson'] = "{}"
|
||||
logger.info("Added infoJson attribute to token_data.__dict__")
|
||||
else:
|
||||
# Try using setattr
|
||||
setattr(token_data, 'infoJson', "{}")
|
||||
logger.info("Added empty infoJson attribute to token_data using setattr")
|
||||
except Exception as e:
|
||||
logger.error(f"Error adding infoJson attribute to token_data: {e}")
|
||||
# Create a new object with the same attributes plus infoJson
|
||||
try:
|
||||
from pangramia.yt.common.ttypes import JobTokenData
|
||||
new_token = JobTokenData()
|
||||
# Copy all attributes
|
||||
for attr in dir(token_data):
|
||||
if not attr.startswith('__') and not callable(getattr(token_data, attr)):
|
||||
try:
|
||||
setattr(new_token, attr, getattr(token_data, attr))
|
||||
except Exception:
|
||||
pass
|
||||
# Add infoJson
|
||||
new_token.infoJson = "{}"
|
||||
logger.info("Created new token object with infoJson attribute")
|
||||
# Replace token_data with new_token
|
||||
token_data = new_token
|
||||
except Exception as e2:
|
||||
logger.error(f"Error creating new token object: {e2}")
|
||||
|
||||
logger.warning("Could not find valid info.json data")
|
||||
return "{}"
|
||||
|
||||
def is_valid_json(json_str):
|
||||
"""Check if a string is valid JSON and not empty"""
|
||||
@ -806,7 +711,8 @@ If the proxy connection fails, token generation will stop immediately with an er
|
||||
sys.exit(1)
|
||||
except Exception as e: # Catch errors during saving or command generation
|
||||
logger.error(f"Error processing valid info.json: {str(e)}")
|
||||
# Removed traceback.format_exc() call which caused the NoneType error
|
||||
# Re-raise the exception to be handled by the main error handler
|
||||
raise
|
||||
finally:
|
||||
if transport:
|
||||
transport.close()
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user