yt-dlp-dags/dags/ytdlp_mgmt_proxy_account.py
2025-08-07 18:01:23 +03:00

467 lines
23 KiB
Python

"""
DAG to manage the state of proxies and accounts used by the ytdlp-ops-server.
"""
from __future__ import annotations
import logging
from datetime import datetime
import socket
from airflow.exceptions import AirflowException
from airflow.models.dag import DAG
from airflow.models.param import Param
from airflow.operators.python import PythonOperator
from airflow.utils.dates import days_ago
from airflow.models.variable import Variable
from airflow.providers.redis.hooks.redis import RedisHook
# Configure logging
logger = logging.getLogger(__name__)
# Import and apply Thrift exceptions patch for Airflow compatibility
try:
from thrift_exceptions_patch import patch_thrift_exceptions
patch_thrift_exceptions()
logger.info("Applied Thrift exceptions patch for Airflow compatibility.")
except ImportError:
logger.warning("Could not import thrift_exceptions_patch. Compatibility may be affected.")
except Exception as e:
logger.error(f"Error applying Thrift exceptions patch: {e}")
# Thrift imports
try:
from thrift.transport import TSocket, TTransport
from thrift.protocol import TBinaryProtocol
from pangramia.yt.tokens_ops import YTTokenOpService
from pangramia.yt.exceptions.ttypes import PBServiceException, PBUserException
except ImportError as e:
logger.critical(f"Could not import Thrift modules: {e}. Ensure ytdlp-ops-auth package is installed.")
# Fail DAG parsing if thrift modules are not available
raise
DEFAULT_YT_AUTH_SERVICE_IP = Variable.get("YT_AUTH_SERVICE_IP", default_var="16.162.82.212")
DEFAULT_YT_AUTH_SERVICE_PORT = Variable.get("YT_AUTH_SERVICE_PORT", default_var=9080)
DEFAULT_REDIS_CONN_ID = "redis_default"
# Helper function to connect to Redis, similar to other DAGs
def _get_redis_client(redis_conn_id: str):
"""Gets a Redis client from an Airflow connection."""
try:
# Use the imported RedisHook
redis_hook = RedisHook(redis_conn_id=redis_conn_id)
# get_conn returns a redis.Redis client
return redis_hook.get_conn()
except Exception as e:
logger.error(f"Failed to connect to Redis using connection '{redis_conn_id}': {e}")
# Use the imported AirflowException
raise AirflowException(f"Redis connection failed: {e}")
def format_timestamp(ts_str: str) -> str:
"""Formats a string timestamp into a human-readable date string."""
if not ts_str:
return ""
try:
ts_float = float(ts_str)
if ts_float <= 0:
return ""
# Use datetime from the imported 'from datetime import datetime'
dt_obj = datetime.fromtimestamp(ts_float)
return dt_obj.strftime('%Y-%m-%d %H:%M:%S')
except (ValueError, TypeError):
return ts_str # Return original string if conversion fails
def get_thrift_client(host: str, port: int):
"""Helper function to create and connect a Thrift client."""
transport = TSocket.TSocket(host, port)
transport.setTimeout(30 * 1000) # 30s timeout
transport = TTransport.TFramedTransport(transport)
protocol = TBinaryProtocol.TBinaryProtocol(transport)
client = YTTokenOpService.Client(protocol)
transport.open()
logger.info(f"Connected to Thrift server at {host}:{port}")
return client, transport
def _list_proxy_statuses(client, server_identity):
"""Lists the status of proxies."""
logger.info(f"Listing proxy statuses for server: {server_identity or 'ALL'}")
statuses = client.getProxyStatus(server_identity)
if not statuses:
logger.info("No proxy statuses found.")
return
from tabulate import tabulate
status_list = []
# This is forward-compatible: it checks for new attributes before using them.
has_extended_info = hasattr(statuses[0], 'recentAccounts') or hasattr(statuses[0], 'recentMachines')
headers = ["Server", "Proxy URL", "Status", "Success", "Failures", "Last Success", "Last Failure"]
if has_extended_info:
headers.extend(["Recent Accounts", "Recent Machines"])
for s in statuses:
status_item = {
"Server": s.serverIdentity,
"Proxy URL": s.proxyUrl,
"Status": s.status,
"Success": s.successCount,
"Failures": s.failureCount,
"Last Success": format_timestamp(s.lastSuccessTimestamp),
"Last Failure": format_timestamp(s.lastFailureTimestamp),
}
if has_extended_info:
recent_accounts = getattr(s, 'recentAccounts', [])
recent_machines = getattr(s, 'recentMachines', [])
status_item["Recent Accounts"] = "\n".join(recent_accounts) if recent_accounts else "N/A"
status_item["Recent Machines"] = "\n".join(recent_machines) if recent_machines else "N/A"
status_list.append(status_item)
print("\n--- Proxy Statuses ---")
# The f-string with a newline ensures the table starts on a new line in the logs.
print(f"\n{tabulate(status_list, headers='keys', tablefmt='grid')}")
print("----------------------\n")
if not has_extended_info:
logger.warning("Server does not seem to support 'recentAccounts' or 'recentMachines' fields yet.")
print("NOTE: To see Recent Accounts/Machines, the server's `getProxyStatus` method must be updated to return these fields.")
def _list_account_statuses(client, account_id, redis_conn_id):
"""Lists the status of accounts, enriching with live data from Redis."""
logger.info(f"Listing account statuses for account: {account_id or 'ALL'}")
redis_client = None
try:
redis_client = _get_redis_client(redis_conn_id)
logger.info("Successfully connected to Redis to fetch detailed account status.")
except Exception as e:
logger.warning(f"Could not connect to Redis to get detailed status. Will show basic status. Error: {e}")
redis_client = None
try:
# The thrift method takes accountId (specific) or accountPrefix.
# If account_id is provided, we use it. If not, we get all by leaving both params as None.
statuses = client.getAccountStatus(accountId=account_id, accountPrefix=None)
if not statuses:
print("\n--- Account Statuses ---\nNo account statuses found.\n------------------------\n")
return
from tabulate import tabulate
status_list = []
for s in statuses:
status_str = s.status
# If an account is resting, get the live countdown from Redis for accuracy.
if redis_client and 'RESTING' in status_str:
try:
status_key = f"account_status:{s.accountId}"
# The server stores resting expiry time in 'resting_until'.
expiry_ts_bytes = redis_client.hget(status_key, "resting_until")
if expiry_ts_bytes:
expiry_ts = float(expiry_ts_bytes)
now = datetime.now().timestamp()
if now >= expiry_ts:
status_str = "ACTIVE (was RESTING)"
else:
remaining_seconds = int(expiry_ts - now)
if remaining_seconds > 3600:
status_str = f"RESTING (active in {remaining_seconds // 3600}h {remaining_seconds % 3600 // 60}m)"
elif remaining_seconds > 60:
status_str = f"RESTING (active in {remaining_seconds // 60}m {remaining_seconds % 60}s)"
else:
status_str = f"RESTING (active in {remaining_seconds}s)"
except Exception as e:
logger.warning(f"Could not parse resting time for {s.accountId} from Redis: {e}. Using server status.")
# Determine the last activity timestamp for sorting
last_success = float(s.lastSuccessTimestamp) if s.lastSuccessTimestamp else 0
last_failure = float(s.lastFailureTimestamp) if s.lastFailureTimestamp else 0
last_activity = max(last_success, last_failure)
status_item = {
"Account ID": s.accountId,
"Status": status_str,
"Success": s.successCount,
"Failures": s.failureCount,
"Last Success": format_timestamp(s.lastSuccessTimestamp),
"Last Failure": format_timestamp(s.lastFailureTimestamp),
"Last Proxy": s.lastUsedProxy or "N/A",
"Last Machine": s.lastUsedMachine or "N/A",
"_last_activity": last_activity, # Add a temporary key for sorting
}
status_list.append(status_item)
# Sort the list by the last activity timestamp in descending order
status_list.sort(key=lambda item: item.get('_last_activity', 0), reverse=True)
# Remove the temporary sort key before printing
for item in status_list:
del item['_last_activity']
print("\n--- Account Statuses ---")
# The f-string with a newline ensures the table starts on a new line in the logs.
print(f"\n{tabulate(status_list, headers='keys', tablefmt='grid')}")
print("------------------------\n")
except (PBServiceException, PBUserException) as e:
logger.error(f"Failed to get account statuses: {e.message}", exc_info=True)
print(f"\nERROR: Could not retrieve account statuses. Server returned: {e.message}\n")
except Exception as e:
logger.error(f"An unexpected error occurred while getting account statuses: {e}", exc_info=True)
print(f"\nERROR: An unexpected error occurred: {e}\n")
def manage_system_callable(**context):
"""Main callable to interact with the system management endpoints."""
params = context["params"]
entity = params["entity"]
action = params["action"]
host = params["host"]
port = params["port"]
server_identity = params.get("server_identity")
proxy_url = params.get("proxy_url")
account_id = params.get("account_id")
# --- Validate Action/Entity Combination and Parameters ---
valid_actions = {
"proxy": ["list_statuses", "ban", "unban", "unban_all", "delete_from_redis"],
"account": ["list_statuses", "ban", "unban", "unban_all", "delete_from_redis"],
"all": ["list_statuses"]
}
if action not in valid_actions.get(entity, []):
raise ValueError(
f"The action '{action}' is not valid for entity '{entity}'.\n"
f"Valid actions for '{entity}' are: {', '.join(valid_actions.get(entity, ['None']))}."
)
# Validate required parameters for the chosen action
if entity == "proxy":
if action in ["ban", "unban", "unban_all"] and not server_identity:
raise ValueError(f"A 'server_identity' is required for proxy action '{action}'.")
if action in ["ban", "unban"] and not proxy_url:
raise ValueError(f"A 'proxy_url' is required for proxy action '{action}'.")
if entity == "account":
if action in ["ban", "unban"] and not account_id:
raise ValueError(f"An 'account_id' is required for account action '{action}'.")
# Handle direct Redis actions separately to avoid creating an unnecessary Thrift connection.
if action == "delete_from_redis":
redis_conn_id = params["redis_conn_id"]
redis_client = _get_redis_client(redis_conn_id)
if entity == "account":
account_prefix = params.get("account_id") # Repurpose account_id param as an optional prefix
pattern = f"account_status:{account_prefix}*" if account_prefix else "account_status:*"
logger.warning(f"Searching for account status keys in Redis with pattern: '{pattern}'")
keys_to_delete = [key for key in redis_client.scan_iter(pattern)]
if not keys_to_delete:
print(f"\nNo accounts found matching pattern '{pattern}'.\n")
return
print(f"\nWARNING: Found {len(keys_to_delete)} accounts to remove from Redis.")
for key in keys_to_delete[:10]:
print(f" - {key.decode('utf-8')}")
if len(keys_to_delete) > 10:
print(f" ... and {len(keys_to_delete) - 10} more.")
deleted_count = redis_client.delete(*keys_to_delete)
print(f"\nSuccessfully removed {deleted_count} accounts from Redis.\n")
elif entity == "proxy":
proxy_url = params.get("proxy_url")
server_identity = params.get("server_identity")
if not proxy_url:
raise ValueError("A 'proxy_url' is required for proxy action 'delete_from_redis'.")
if not server_identity:
raise ValueError("A 'server_identity' is required for proxy action 'delete_from_redis'.")
proxy_state_key = f"proxies:{server_identity}"
proxy_failure_key = f"proxy_failures:{proxy_url}"
logger.warning(f"Deleting proxy '{proxy_url}' state from hash '{proxy_state_key}' and failure key '{proxy_failure_key}' from Redis.")
with redis_client.pipeline() as pipe:
pipe.hdel(proxy_state_key, proxy_url)
pipe.delete(proxy_failure_key)
results = pipe.execute()
hdel_result = results[0]
del_result = results[1]
print(f"\nSuccessfully removed proxy '{proxy_url}' from state hash (result: {hdel_result}) and deleted failure key (result: {del_result}).\n")
return # End execution for this action
client, transport = None, None
try:
client, transport = get_thrift_client(host, port)
if entity == "proxy":
if action == "list_statuses":
_list_proxy_statuses(client, server_identity)
elif action == "ban":
if not proxy_url: raise ValueError("A 'proxy_url' is required.")
logger.info(f"Banning proxy '{proxy_url}' for server '{server_identity}'...")
client.banProxy(proxy_url, server_identity)
print(f"Successfully sent request to ban proxy '{proxy_url}'.")
elif action == "unban":
if not proxy_url: raise ValueError("A 'proxy_url' is required.")
logger.info(f"Unbanning proxy '{proxy_url}' for server '{server_identity}'...")
client.unbanProxy(proxy_url, server_identity)
print(f"Successfully sent request to unban proxy '{proxy_url}'.")
elif action == "unban_all":
logger.info(f"Unbanning all proxy statuses for server '{server_identity}'...")
client.resetAllProxyStatuses(server_identity)
print(f"Successfully sent request to unban all proxy statuses for '{server_identity}'.")
elif entity == "account":
if action == "list_statuses":
_list_account_statuses(client, account_id, params["redis_conn_id"])
elif action == "ban":
if not account_id: raise ValueError("An 'account_id' is required.")
reason = f"Manual ban from Airflow mgmt DAG by {socket.gethostname()}"
logger.info(f"Banning account '{account_id}'...")
client.banAccount(accountId=account_id, reason=reason)
print(f"Successfully sent request to ban account '{account_id}'.")
elif action == "unban":
if not account_id: raise ValueError("An 'account_id' is required.")
reason = f"Manual un-ban from Airflow mgmt DAG by {socket.gethostname()}"
logger.info(f"Unbanning account '{account_id}'...")
client.unbanAccount(accountId=account_id, reason=reason)
print(f"Successfully sent request to unban account '{account_id}'.")
elif action == "unban_all":
account_prefix = account_id # Repurpose account_id param as an optional prefix
logger.info(f"Unbanning all account statuses to ACTIVE (prefix: '{account_prefix or 'ALL'}')...")
all_statuses = client.getAccountStatus(accountId=None, accountPrefix=account_prefix)
if not all_statuses:
print(f"No accounts found with prefix '{account_prefix or 'ALL'}' to unban.")
return
accounts_to_unban = [s.accountId for s in all_statuses]
logger.info(f"Found {len(accounts_to_unban)} accounts to unban.")
print(f"Found {len(accounts_to_unban)} accounts. Sending unban request for each...")
unban_count = 0
fail_count = 0
for acc_id in accounts_to_unban:
try:
reason = f"Manual unban_all from Airflow mgmt DAG by {socket.gethostname()}"
client.unbanAccount(accountId=acc_id, reason=reason)
logger.info(f" - Sent unban for '{acc_id}'.")
unban_count += 1
except Exception as e:
logger.error(f" - Failed to unban account '{acc_id}': {e}")
fail_count += 1
print(f"\nSuccessfully sent unban requests for {unban_count} accounts.")
if fail_count > 0:
print(f"Failed to send unban requests for {fail_count} accounts. See logs for details.")
# Optionally, list statuses again to confirm
print("\n--- Listing statuses after unban_all ---")
_list_account_statuses(client, account_prefix, params["redis_conn_id"])
elif entity == "all":
if action == "list_statuses":
print("\nListing all entities...")
_list_proxy_statuses(client, server_identity)
_list_account_statuses(client, account_id, params["redis_conn_id"])
except (PBServiceException, PBUserException) as e:
logger.error(f"Thrift error performing action '{action}': {e.message}", exc_info=True)
raise
except NotImplementedError as e:
logger.error(f"Feature not implemented: {e}", exc_info=True)
raise
except Exception as e:
logger.error(f"Error performing action '{action}': {e}", exc_info=True)
raise
finally:
if transport and transport.isOpen():
transport.close()
logger.info("Thrift connection closed.")
with DAG(
dag_id="ytdlp_mgmt_proxy_account",
start_date=days_ago(1),
schedule=None,
catchup=False,
tags=["ytdlp", "mgmt", "master"],
doc_md="""
### YT-DLP Proxy and Account Manager DAG
This DAG provides tools to manage the state of **proxies and accounts** used by the `ytdlp-ops-server`.
Select an `entity` and an `action` to perform. Note that not all actions are available for all entities.
---
#### Actions for `entity: proxy`
- `list_statuses`: View status of all proxies, optionally filtered by `server_identity`.
- `ban`: Ban a specific proxy for a given `server_identity`. Requires `proxy_url`.
- `unban`: Un-ban a specific proxy. Requires `proxy_url`.
- `unban_all`: Resets the status of all proxies for a given `server_identity` to `ACTIVE`.
- `delete_from_redis`: **(Destructive)** Deletes a proxy's state from Redis for a specific `server_identity`. This removes its state (ACTIVE/BANNED) and its failure history. The server will re-create it with a default `ACTIVE` state on its next refresh if the proxy is still in the server's configuration. Use this to reset a single proxy's state completely. Requires `proxy_url` and `server_identity`.
#### Actions for `entity: account`
- `list_statuses`: View status of all accounts, optionally filtered by `account_id` (as a prefix).
- `ban`: Ban a specific account. Requires `account_id`.
- `unban`: Un-ban a specific account. Requires `account_id`.
- `unban_all`: Sets the status of all accounts (or those matching a prefix in `account_id`) to `ACTIVE`.
- `delete_from_redis`: **(Destructive)** Deletes account status keys from Redis. This permanently removes the account from being tracked by the system. This is different from `unban`. Use with caution.
#### Actions for `entity: all`
- `list_statuses`: A convenience to view statuses for both proxies and accounts in one run.
---
**When to use `delete_from_redis`?**
- **For Accounts:** Account state is managed entirely within Redis. Deleting an account's key is a permanent removal from the system's tracking. This is different from `unban`, which just resets the status. Use this when you want to completely remove an account.
- **For Proxies:** Proxies are defined in the server's startup configuration. Redis only stores their *state* (e.g., `BANNED` or `ACTIVE`) and failure history. Deleting a proxy's state from Redis will cause the server to re-create it with a default `ACTIVE` state on its next refresh cycle. This action is useful for completely resetting a single proxy that may be stuck or has a long failure history, without having to reset all proxies for that server.
""",
params={
"host": Param(DEFAULT_YT_AUTH_SERVICE_IP, type="string", description="The hostname of the ytdlp-ops-server service. Default is from Airflow variable YT_AUTH_SERVICE_IP or hardcoded."),
"port": Param(DEFAULT_YT_AUTH_SERVICE_PORT, type="integer", description="The port of the ytdlp-ops-server service (Envoy load balancer). Default is from Airflow variable YT_AUTH_SERVICE_PORT or hardcoded."),
"entity": Param(
"account",
type="string",
enum=["account", "proxy", "all"],
description="The type of entity to manage.",
),
"action": Param(
"list_statuses",
type="string",
enum=["list_statuses", "ban", "unban", "unban_all", "delete_from_redis"],
description="The management action to perform. See the DAG documentation for which actions are valid for each entity.",
),
"server_identity": Param(
"ytdlp-ops-airflow-service",
type=["null", "string"],
description="The identity of the server instance (for proxy management).",
),
"proxy_url": Param(
None,
type=["null", "string"],
description="The proxy URL to act upon (e.g., 'socks5://host:port').",
),
"account_id": Param(
None,
type=["null", "string"],
description="The account ID to act upon. For `unban_all` or `delete_from_redis` on accounts, this can be an optional prefix.",
),
"redis_conn_id": Param(
DEFAULT_REDIS_CONN_ID,
type="string",
title="Redis Connection ID",
description="The Airflow connection ID for the Redis server (used for 'delete_from_redis' and for fetching detailed account status).",
),
},
) as dag:
system_management_task = PythonOperator(
task_id="system_management_task",
python_callable=manage_system_callable,
)