2025-12-26 10:05:00 +03:00

892 lines
42 KiB
Python

#!/usr/bin/env python3
"""
Tool for managing the ytdlp-ops-server via its Thrift management interface.
"""
import argparse
import json
import logging
import os
import re
import sys
import time
from datetime import datetime
try:
from dotenv import load_dotenv
except ImportError:
load_dotenv = None
try:
from tabulate import tabulate
except ImportError:
print("'tabulate' library not found. Please install it with: pip install tabulate", file=sys.stderr)
sys.exit(1)
try:
import redis
except ImportError:
print("'redis' library not found. Please install it with: pip install redis", file=sys.stderr)
sys.exit(1)
# Add project's thrift gen_py path
script_dir = os.path.dirname(os.path.abspath(__file__))
project_root = os.path.abspath(os.path.join(script_dir, '..'))
sys.path.insert(0, os.path.join(project_root, 'thrift_model', 'gen_py'))
try:
from yt_ops_services.client_utils import get_thrift_client, format_timestamp
from pangramia.yt.exceptions.ttypes import PBServiceException, PBUserException
from .profile_manager_tool import ProfileManager, format_duration
from .stress_policy import utils as sp_utils
except ImportError:
print("Could not import Thrift modules. Ensure this script is run in an environment where 'yt_ops_services' is installed.", file=sys.stderr)
sys.exit(1)
logger = logging.getLogger('manage_tool')
# --- Helper Functions (adapted from regression.py) ---
def _get_redis_client(redis_host, redis_port, redis_password):
"""Gets a Redis client."""
if not redis_host:
return None
try:
client = redis.Redis(host=redis_host, port=redis_port, password=redis_password, decode_responses=True)
client.ping()
logger.info(f"Successfully connected to Redis at {client.connection_pool.connection_kwargs.get('host')}:{client.connection_pool.connection_kwargs.get('port')}")
return client
except redis.exceptions.ConnectionError as e:
logger.error(f"Failed to connect to Redis: {e}")
return None
except Exception as e:
logger.error(f"An unexpected error occurred while connecting to Redis: {e}")
return None
def _list_proxy_statuses(client, server_identity=None, auth_manager=None, download_manager=None):
"""Lists proxy statuses by connecting directly to the Thrift service."""
print("\n--- Proxy Statuses ---")
try:
statuses = client.getProxyStatus(server_identity)
if not statuses:
print("No proxy statuses found.")
return
# Enrich with Redis data from simulation environments
all_proxy_urls = [s.proxyUrl for s in statuses]
auth_proxy_states, download_proxy_states = {}, {}
auth_work_minutes, download_work_minutes = 0, 0
if auth_manager:
auth_proxy_states = auth_manager.get_proxy_states(all_proxy_urls)
work_minutes_str = auth_manager.get_config('proxy_work_minutes')
if work_minutes_str and work_minutes_str.isdigit():
auth_work_minutes = int(work_minutes_str)
if download_manager:
download_proxy_states = download_manager.get_proxy_states(all_proxy_urls)
work_minutes_str = download_manager.get_config('proxy_work_minutes')
if work_minutes_str and work_minutes_str.isdigit():
download_work_minutes = int(work_minutes_str)
status_list = []
# This is forward-compatible: it checks for new attributes before using them.
has_extended_info = hasattr(statuses[0], 'recentAccounts') or hasattr(statuses[0], 'recentMachines')
headers = ["Server", "Proxy URL", "Status", "Success", "Failures", "Last Success", "Last Failure"]
if auth_manager: headers.append("Auth State")
if download_manager: headers.append("Download State")
if has_extended_info:
headers.extend(["Recent Accounts", "Recent Machines"])
for s in statuses:
status_item = {
"Server": s.serverIdentity,
"Proxy URL": s.proxyUrl,
"Status": s.status,
"Success": s.successCount,
"Failures": s.failureCount,
"Last Success": format_timestamp(s.lastSuccessTimestamp),
"Last Failure": format_timestamp(s.lastFailureTimestamp),
}
now = time.time()
if auth_manager:
state_data = auth_proxy_states.get(s.proxyUrl, {})
state = state_data.get('state', 'N/A')
rest_until = state_data.get('rest_until', 0)
work_start = state_data.get('work_start_timestamp', 0)
state_str = state
if state == 'RESTING' and rest_until > now:
state_str += f"\n(ends in {format_duration(rest_until - now)})"
elif state == 'ACTIVE' and work_start > 0 and auth_work_minutes > 0:
work_end_time = work_start + (auth_work_minutes * 60)
if work_end_time > now:
state_str += f"\n(ends in {format_duration(work_end_time - now)})"
status_item["Auth State"] = state_str
if download_manager:
state_data = download_proxy_states.get(s.proxyUrl, {})
state = state_data.get('state', 'N/A')
rest_until = state_data.get('rest_until', 0)
work_start = state_data.get('work_start_timestamp', 0)
state_str = state
if state == 'RESTING' and rest_until > now:
state_str += f"\n(ends in {format_duration(rest_until - now)})"
elif state == 'ACTIVE' and work_start > 0 and download_work_minutes > 0:
work_end_time = work_start + (download_work_minutes * 60)
if work_end_time > now:
state_str += f"\n(ends in {format_duration(work_end_time - now)})"
status_item["Download State"] = state_str
if has_extended_info:
recent_accounts = getattr(s, 'recentAccounts', [])
recent_machines = getattr(s, 'recentMachines', [])
status_item["Recent Accounts"] = "\n".join(recent_accounts) if recent_accounts else "N/A"
status_item["Recent Machines"] = "\n".join(recent_machines) if recent_machines else "N/A"
status_list.append(status_item)
# Stabilize column widths to reduce jitter in --watch mode
if status_list:
headers = list(status_list[0].keys())
table_data = [[item.get(h) for h in headers] for item in status_list]
# Calculate max width for each column based on its content, accounting for multi-line cells.
columns = list(zip(*([headers] + table_data)))
maxwidths = []
for col in columns:
max_w = 0
for cell in col:
cell_w = max((len(line) for line in str(cell).split('\n')), default=0) if cell else 0
if cell_w > max_w:
max_w = cell_w
maxwidths.append(max_w)
# Enforce a minimum width for columns that can have variable content
MIN_WIDTH = 25
if "Recent Accounts" in headers:
idx = headers.index("Recent Accounts")
maxwidths[idx] = max(MIN_WIDTH, maxwidths[idx])
if "Recent Machines" in headers:
idx = headers.index("Recent Machines")
maxwidths[idx] = max(MIN_WIDTH, maxwidths[idx])
print(tabulate(table_data, headers=headers, tablefmt='grid', maxcolwidths=maxwidths))
else:
print(tabulate(status_list, headers='keys', tablefmt='grid'))
if not has_extended_info:
print("\nNOTE: To see Recent Accounts/Machines, the server's `getProxyStatus` method must be updated to return these fields.")
except (PBServiceException, PBUserException) as e:
logger.error(f"Failed to get proxy statuses: {e.message}")
except Exception as e:
logger.error(f"An unexpected error occurred while getting proxy statuses: {e}", exc_info=True)
def _list_account_statuses(client, redis_client, account_id=None):
"""Lists account statuses from Thrift, optionally enriched with live Redis data."""
print(f"\n--- Account Statuses ---")
try:
statuses = client.getAccountStatus(accountId=account_id, accountPrefix=None)
if not statuses:
print("No account statuses found.")
return
status_list = []
for s in statuses:
status_str = s.status
if redis_client and 'RESTING' in status_str:
try:
status_key = f"account_status:{s.accountId}"
expiry_ts_bytes = redis_client.hget(status_key, "resting_until")
if expiry_ts_bytes:
expiry_ts = float(expiry_ts_bytes)
now = datetime.now().timestamp()
if now >= expiry_ts:
status_str = "ACTIVE (was RESTING)"
else:
remaining_seconds = int(expiry_ts - now)
if remaining_seconds > 3600:
status_str = f"RESTING (active in {remaining_seconds // 3600}h {remaining_seconds % 3600 // 60}m)"
elif remaining_seconds > 60:
status_str = f"RESTING (active in {remaining_seconds // 60}m {remaining_seconds % 60}s)"
else:
status_str = f"RESTING (active in {remaining_seconds}s)"
except Exception as e:
logger.warning(f"Could not parse resting time for {s.accountId} from Redis: {e}. Using server status.")
last_success = float(s.lastSuccessTimestamp) if s.lastSuccessTimestamp else 0
last_failure = float(s.lastFailureTimestamp) if s.lastFailureTimestamp else 0
last_activity = max(last_success, last_failure)
status_list.append({
"Account ID": s.accountId, "Status": status_str, "Success": s.successCount,
"Failures": s.failureCount, "Last Success": format_timestamp(s.lastSuccessTimestamp),
"Last Failure": format_timestamp(s.lastFailureTimestamp), "Last Proxy": s.lastUsedProxy or "N/A",
"_last_activity": last_activity,
})
status_list.sort(key=lambda item: item.get('_last_activity', 0), reverse=True)
for item in status_list:
if '_last_activity' in item:
del item['_last_activity']
# Stabilize column widths to reduce jitter in --watch mode
if status_list:
headers = list(status_list[0].keys())
table_data = [[item.get(h) for h in headers] for item in status_list]
columns = list(zip(*([headers] + table_data)))
maxwidths = [max((len(str(x)) for x in col), default=0) if col else 0 for col in columns]
# Enforce a minimum width for the Status column to prevent it from changing size
STATUS_MIN_WIDTH = 30
if "Status" in headers:
idx = headers.index("Status")
maxwidths[idx] = max(STATUS_MIN_WIDTH, maxwidths[idx])
print(tabulate(table_data, headers=headers, tablefmt='grid', maxcolwidths=maxwidths))
else:
print(tabulate(status_list, headers='keys', tablefmt='grid'))
except (PBServiceException, PBUserException) as e:
logger.error(f"Failed to get account statuses: {e.message}")
except Exception as e:
logger.error(f"An unexpected error occurred while getting account statuses: {e}", exc_info=True)
def _list_client_statuses(redis_client):
"""Lists client statistics from Redis."""
if not redis_client:
return
print("\n--- Client Statuses (from Redis) ---")
try:
stats_key = "client_stats"
all_stats_raw = redis_client.hgetall(stats_key)
if not all_stats_raw:
print("No client stats found in Redis.")
return
status_list = []
for client, stats_json in all_stats_raw.items():
try:
stats = json.loads(stats_json)
def format_latest(data):
if not data: return "N/A"
ts = format_timestamp(data.get('timestamp'))
url = data.get('url', 'N/A')
video_id = sp_utils.get_video_id(url)
if video_id == "unknown_video_id":
video_id = "N/A"
return f"{ts} ({video_id})"
status_list.append({
"Client": client, "Success": stats.get('success_count', 0),
"Failures": stats.get('failure_count', 0),
"Last Success": format_latest(stats.get('latest_success')),
"Last Failure": format_latest(stats.get('latest_failure')),
})
except (json.JSONDecodeError, AttributeError):
status_list.append({"Client": client, "Success": "ERROR", "Failures": "ERROR", "Last Success": "Parse Error", "Last Failure": "Parse Error"})
status_list.sort(key=lambda item: item.get('Client', ''))
print(tabulate(status_list, headers='keys', tablefmt='grid'))
except Exception as e:
logger.error(f"An unexpected error occurred while getting client statuses: {e}", exc_info=True)
def _list_activity_counters(redis_client):
"""Lists current activity rates for proxies and accounts from Redis."""
if not redis_client:
print("\n--- Activity Counters ---\nRedis is not configured. Cannot show activity counters.\n---------------------------\n")
return
print("\n--- Activity Counters ---")
now = time.time()
def process_keys(pattern, entity_name):
try:
keys = redis_client.scan_iter(pattern)
except redis.exceptions.RedisError as e:
logger.error(f"Redis error scanning for keys with pattern '{pattern}': {e}")
return
status_list = []
for key in keys:
entity_id = key.split(':', 2)[-1]
try:
count_1m = redis_client.zcount(key, now - 60, now)
count_5m = redis_client.zcount(key, now - 300, now)
count_1h = redis_client.zcount(key, now - 3600, now)
if count_1h == 0: # Don't show entities with no recent activity
continue
status_list.append({
entity_name: entity_id,
"Activity (Last 1m)": count_1m,
"Activity (Last 5m)": count_5m,
"Activity (Last 1h)": count_1h,
})
except redis.exceptions.RedisError as e:
logger.error(f"Redis error processing key '{key}': {e}")
status_list.sort(key=lambda item: item.get(entity_name, ''))
print(f"\n--- {entity_name} Activity Counters ---")
if not status_list:
print(f"No recent activity found for {entity_name.lower()}s.")
else:
print(f"\n{tabulate(status_list, headers='keys', tablefmt='grid')}")
print("-----------------------------------\n")
try:
process_keys("activity:per_proxy:*", "Proxy URL")
process_keys("activity:per_account:*", "Account ID")
except Exception as e:
logger.error(f"An unexpected error occurred while getting activity counters: {e}", exc_info=True)
print(f"\nERROR: An unexpected error occurred: {e}\n")
def get_system_status(args):
"""Connects to services and prints status tables."""
logger.info("--- Getting System Status ---")
client, transport = None, None
redis_client = _get_redis_client(args.redis_host, args.redis_port, args.redis_password)
auth_manager, download_manager = None, None
def _create_manager(env_name):
if not env_name or not args.redis_host: return None
if args.key_prefix:
key_prefix = args.key_prefix
else:
key_prefix = f"{env_name}_profile_mgmt_"
return ProfileManager(args.redis_host, args.redis_port, args.redis_password, key_prefix)
# Precedence: --auth-env > --env
auth_env_to_use = args.auth_env or args.env
if auth_env_to_use:
auth_manager = _create_manager(auth_env_to_use)
# Precedence: --download-env > --env
download_env_to_use = args.download_env or args.env
if download_env_to_use:
# If it's the same env, reuse the manager instance
if download_env_to_use == auth_env_to_use and auth_manager:
download_manager = auth_manager
else:
download_manager = _create_manager(download_env_to_use)
try:
client, transport = get_thrift_client(args.host, args.port)
_list_proxy_statuses(client, args.server_identity, auth_manager=auth_manager, download_manager=download_manager)
_list_account_statuses(client, redis_client)
_list_client_statuses(redis_client)
except Exception as e:
logger.error(f"Could not get system status: {e}")
finally:
if transport and transport.isOpen():
transport.close()
def main_activity_counters(args):
"""Main logic for the 'activity-counters' command."""
redis_client = _get_redis_client(args.redis_host, args.redis_port, args.redis_password)
_list_activity_counters(redis_client)
return 0
def main_status(args):
"""Main logic for the 'status' command."""
if not args.watch:
get_system_status(args)
return 0
try:
while True:
os.system('cls' if os.name == 'nt' else 'clear')
print(f"--- System Status (auto-refreshing every {args.watch} seconds, press Ctrl+C to exit) ---")
print(f"--- Last updated: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')} ---")
get_system_status(args)
for i in range(args.watch, 0, -1):
sys.stdout.write(f"\rRefreshing in {i} seconds... ")
sys.stdout.flush()
time.sleep(1)
# Clear the countdown line before the next refresh
sys.stdout.write("\r" + " " * 30 + "\r")
sys.stdout.flush()
except KeyboardInterrupt:
print("\nStopping status watch.")
return 0
def main_ban_proxy(args):
"""Main logic for the 'ban-proxy' command."""
client, transport = None, None
try:
client, transport = get_thrift_client(args.host, args.port)
success = client.banProxy(args.proxy_url, args.server_identity)
if success:
print(f"Successfully banned proxy '{args.proxy_url}' for server '{args.server_identity}'.")
return 0
else:
logger.error("Server reported failure to ban proxy.")
return 1
except (PBServiceException, PBUserException) as e:
logger.error(f"Failed to ban proxy: {e.message}")
return 1
finally:
if transport and transport.isOpen():
transport.close()
def main_unban_proxy(args):
"""Main logic for the 'unban-proxy' command."""
client, transport = None, None
try:
client, transport = get_thrift_client(args.host, args.port)
success = client.unbanProxy(args.proxy_url, args.server_identity)
if success:
print(f"Successfully unbanned proxy '{args.proxy_url}' for server '{args.server_identity}'.")
return 0
else:
logger.error("Server reported failure to unban proxy.")
return 1
except (PBServiceException, PBUserException) as e:
logger.error(f"Failed to unban proxy: {e.message}")
return 1
finally:
if transport and transport.isOpen():
transport.close()
def main_reset_proxies(args):
"""Main logic for the 'reset-proxies' command."""
client, transport = None, None
try:
client, transport = get_thrift_client(args.host, args.port)
success = client.resetAllProxyStatuses(args.server_identity)
if success:
print(f"Successfully reset all proxy statuses for server '{args.server_identity}'.")
return 0
else:
logger.error("Server reported failure to reset proxies.")
return 1
except (PBServiceException, PBUserException) as e:
logger.error(f"Failed to reset proxies: {e.message}")
return 1
finally:
if transport and transport.isOpen():
transport.close()
def main_ban_all_proxies(args):
"""Main logic for the 'ban-all-proxies' command."""
client, transport = None, None
try:
client, transport = get_thrift_client(args.host, args.port)
if args.server_identity:
print(f"Banning all proxies for server '{args.server_identity}'...")
client.banAllProxies(args.server_identity)
print(f"Successfully sent request to ban all proxies for '{args.server_identity}'.")
else:
print("Banning all proxies for ALL servers...")
all_statuses = client.getProxyStatus(None)
if not all_statuses:
print("No proxies found for any server. Nothing to ban.")
return 0
all_server_identities = sorted(list(set(s.serverIdentity for s in all_statuses)))
print(f"Found {len(all_server_identities)} server identities. Sending ban request for each...")
success_count, fail_count = 0, 0
for identity in all_server_identities:
try:
client.banAllProxies(identity)
print(f" - Sent ban_all for '{identity}'.")
success_count += 1
except Exception as e:
logger.error(f" - Failed to ban all proxies for '{identity}': {e}")
fail_count += 1
print(f"\nSuccessfully sent ban_all requests for {success_count} server identities.")
if fail_count > 0:
print(f"Failed to send ban_all requests for {fail_count} server identities. See logs for details.")
return 0
except (PBServiceException, PBUserException) as e:
logger.error(f"Failed to ban all proxies: {e.message}")
return 1
finally:
if transport and transport.isOpen():
transport.close()
def main_unban_all_proxies(args):
"""Main logic for the 'unban-all-proxies' command."""
client, transport = None, None
try:
client, transport = get_thrift_client(args.host, args.port)
if args.server_identity:
print(f"Unbanning all proxies for server '{args.server_identity}'...")
client.resetAllProxyStatuses(args.server_identity)
print(f"Successfully sent request to unban all proxies for '{args.server_identity}'.")
else:
print("Unbanning all proxies for ALL servers...")
all_statuses = client.getProxyStatus(None)
if not all_statuses:
print("No proxies found for any server. Nothing to unban.")
return 0
all_server_identities = sorted(list(set(s.serverIdentity for s in all_statuses)))
print(f"Found {len(all_server_identities)} server identities. Sending unban request for each...")
success_count, fail_count = 0, 0
for identity in all_server_identities:
try:
client.resetAllProxyStatuses(identity)
print(f" - Sent unban_all for '{identity}'.")
success_count += 1
except Exception as e:
logger.error(f" - Failed to unban all proxies for '{identity}': {e}")
fail_count += 1
print(f"\nSuccessfully sent unban_all requests for {success_count} server identities.")
if fail_count > 0:
print(f"Failed to send unban_all requests for {fail_count} server identities. See logs for details.")
return 0
except (PBServiceException, PBUserException) as e:
logger.error(f"Failed to unban all proxies: {e.message}")
return 1
finally:
if transport and transport.isOpen():
transport.close()
def main_ban_account(args):
"""Main logic for the 'ban-account' command."""
client, transport = None, None
try:
client, transport = get_thrift_client(args.host, args.port)
reason = f"Manual ban from yt-ops-client by {os.getlogin() if hasattr(os, 'getlogin') else 'unknown_user'}"
client.banAccount(accountId=args.account_id, reason=reason)
print(f"Successfully sent request to ban account '{args.account_id}'.")
return 0
except (PBServiceException, PBUserException) as e:
logger.error(f"Failed to ban account: {e.message}")
return 1
finally:
if transport and transport.isOpen():
transport.close()
def main_unban_account(args):
"""Main logic for the 'unban-account' command."""
client, transport = None, None
redis_client = _get_redis_client(args.redis_host, args.redis_port, args.redis_password)
if not redis_client:
logger.error("Redis connection is required to correctly unban an account (to reset success_count_at_activation).")
return 1
try:
client, transport = get_thrift_client(args.host, args.port)
reason = f"Manual un-ban from yt-ops-client by {os.getlogin() if hasattr(os, 'getlogin') else 'unknown_user'}"
statuses = client.getAccountStatus(accountId=args.account_id, accountPrefix=None)
if not statuses:
logger.error(f"Account '{args.account_id}' not found.")
return 1
current_success_count = statuses[0].successCount or 0
client.unbanAccount(accountId=args.account_id, reason=reason)
print(f"Successfully sent request to unban account '{args.account_id}'.")
redis_client.hset(f"account_status:{args.account_id}", "success_count_at_activation", current_success_count)
print(f"Set 'success_count_at_activation' for '{args.account_id}' to {current_success_count}.")
return 0
except (PBServiceException, PBUserException) as e:
logger.error(f"Failed to unban account: {e.message}")
return 1
finally:
if transport and transport.isOpen():
transport.close()
def main_unban_all_accounts(args):
"""Main logic for the 'unban-all-accounts' command."""
client, transport = None, None
redis_client = _get_redis_client(args.redis_host, args.redis_port, args.redis_password)
if not redis_client:
logger.error("Redis connection is required to correctly unban accounts.")
return 1
try:
client, transport = get_thrift_client(args.host, args.port)
account_prefix = args.account_id # can be prefix
print(f"Unbanning all accounts (prefix: '{account_prefix or 'ALL'}')...")
all_statuses = client.getAccountStatus(accountId=None, accountPrefix=account_prefix)
if not all_statuses:
print(f"No accounts found with prefix '{account_prefix or 'ALL'}' to unban.")
return 0
accounts_to_unban = [s.accountId for s in all_statuses]
account_map = {s.accountId: s for s in all_statuses}
print(f"Found {len(accounts_to_unban)} accounts. Sending unban request for each...")
unban_count, fail_count = 0, 0
reason = f"Manual unban_all from yt-ops-client by {os.getlogin() if hasattr(os, 'getlogin') else 'unknown_user'}"
for acc_id in accounts_to_unban:
try:
client.unbanAccount(accountId=acc_id, reason=reason)
current_success_count = account_map[acc_id].successCount or 0
redis_client.hset(f"account_status:{acc_id}", "success_count_at_activation", current_success_count)
unban_count += 1
except Exception as e:
logger.error(f" - Failed to unban account '{acc_id}': {e}")
fail_count += 1
print(f"\nSuccessfully sent unban requests for {unban_count} accounts.")
if fail_count > 0:
print(f"Failed to send unban requests for {fail_count} accounts. See logs for details.")
return 0
except (PBServiceException, PBUserException) as e:
logger.error(f"Failed to unban all accounts: {e.message}")
return 1
finally:
if transport and transport.isOpen():
transport.close()
def main_delete(args):
"""Main logic for 'delete' commands."""
if not args.yes:
print("This is a destructive action. Use --yes to confirm.", file=sys.stderr)
return 1
client, transport = None, None
try:
# For Redis-only actions, we don't need a Thrift client.
if args.delete_entity not in ['client-stats', 'activity-counters']:
client, transport = get_thrift_client(args.host, args.port)
if args.delete_entity == 'proxies':
if args.proxy_url and args.server_identity:
print(f"Deleting proxy '{args.proxy_url}' for server '{args.server_identity}'...")
result = client.deleteProxyFromRedis(args.proxy_url, args.server_identity)
if result:
print(f"Successfully deleted proxy '{args.proxy_url}' for server '{args.server_identity}' from Redis.")
else:
print(f"Failed to delete proxy '{args.proxy_url}'. It may not have existed.")
else:
print(f"Deleting all proxies from Redis (server filter: {args.server_identity or 'ALL'})...")
result = client.deleteAllProxiesFromRedis(args.server_identity)
print(f"Successfully deleted {result} proxy key(s) from Redis.")
elif args.delete_entity == 'accounts':
if args.account_id:
if args.prefix:
print(f"Deleting accounts with prefix '{args.account_id}' from Redis...")
result = client.deleteAllAccountsFromRedis(args.account_id)
print(f"Successfully deleted {result} account(s) with prefix '{args.account_id}' from Redis.")
else:
print(f"Deleting account '{args.account_id}' from Redis...")
result = client.deleteAccountFromRedis(args.account_id)
if result:
print(f"Successfully deleted account '{args.account_id}' from Redis.")
else:
print(f"Failed to delete account '{args.account_id}'. It may not have existed.")
else: # Delete all
print("Deleting ALL accounts from Redis...")
result = client.deleteAllAccountsFromRedis(None)
print(f"Successfully deleted {result} account(s) from Redis.")
elif args.delete_entity == 'client-stats':
redis_client = _get_redis_client(args.redis_host, args.redis_port, args.redis_password)
if not redis_client:
logger.error("Redis connection is required to delete client stats.")
return 1
print("Deleting all client stats from Redis...")
result = redis_client.delete("client_stats")
if result > 0:
print("Successfully deleted 'client_stats' key from Redis.")
else:
print("Key 'client_stats' not found in Redis. Nothing to delete.")
elif args.delete_entity == 'activity-counters':
redis_client = _get_redis_client(args.redis_host, args.redis_port, args.redis_password)
if not redis_client:
logger.error("Redis connection is required to delete activity counters.")
return 1
print("Deleting all activity counters from Redis...")
deleted_count = 0
for pattern in ["activity:per_proxy:*", "activity:per_account:*"]:
keys_to_delete_chunk = []
for key in redis_client.scan_iter(pattern):
keys_to_delete_chunk.append(key)
if len(keys_to_delete_chunk) >= 500:
deleted_count += redis_client.delete(*keys_to_delete_chunk)
keys_to_delete_chunk = []
if keys_to_delete_chunk:
deleted_count += redis_client.delete(*keys_to_delete_chunk)
if deleted_count > 0:
print(f"Successfully deleted {deleted_count} activity counter keys from Redis.")
else:
print("No activity counter keys found to delete.")
return 0
except (PBServiceException, PBUserException) as e:
logger.error(f"Thrift error performing delete action: {e.message}")
return 1
finally:
if transport and transport.isOpen():
transport.close()
def add_manage_parser(subparsers):
"""Add the parser for the 'manage' command."""
parser = subparsers.add_parser(
'manage',
description='Manage the ytdlp-ops-server.',
formatter_class=argparse.RawTextHelpFormatter,
help='Manage the ytdlp-ops-server.'
)
# Common arguments for all manage subcommands
common_parser = argparse.ArgumentParser(add_help=False)
common_parser.add_argument('--env-file', help='Path to a .env file to load environment variables from.')
common_parser.add_argument('--host', default=None, help="Thrift management server host. Defaults to MASTER_HOST_IP env var or 127.0.0.1.")
common_parser.add_argument('--port', type=int, default=9090, help='Thrift management server port.')
common_parser.add_argument('--redis-host', default=None, help='Redis host for client stats. Defaults to REDIS_HOST env var.')
common_parser.add_argument('--redis-port', type=int, default=None, help='Redis port. Defaults to REDIS_PORT env var or 6379.')
common_parser.add_argument('--redis-password', default=None, help='Redis password. Defaults to REDIS_PASSWORD env var.')
common_parser.add_argument('--verbose', action='store_true', help='Enable verbose output.')
common_parser.add_argument('--env', default=None, help="Default environment name for Redis key prefix. Used if --auth-env or --download-env are not specified.")
common_parser.add_argument('--auth-env', help="Environment for the Auth simulation to enrich status from.")
common_parser.add_argument('--download-env', help="Environment for the Download simulation to enrich status from.")
common_parser.add_argument('--key-prefix', default=None, help='Explicit key prefix for Redis. Overrides --env and any defaults.')
manage_subparsers = parser.add_subparsers(dest='manage_command', help='Available management commands', required=True)
# --- Status and Listing Commands ---
status_parser = manage_subparsers.add_parser('status', help='View system status.', parents=[common_parser])
status_parser.add_argument('--server-identity', help='Filter status for a specific server identity.')
status_parser.add_argument('--watch', type=int, nargs='?', const=5, help='Periodically refresh status every N seconds. Default: 5.')
activity_parser = manage_subparsers.add_parser('activity-counters', help='View current activity rates for proxies and accounts.', parents=[common_parser])
# --- Proxy Management Commands ---
ban_proxy_parser = manage_subparsers.add_parser('ban-proxy', help='Ban a proxy.', parents=[common_parser])
ban_proxy_parser.add_argument('proxy_url', help='The full proxy URL to ban (e.g., "socks5://host:port").')
ban_proxy_parser.add_argument('--server-identity', required=True, help='The server identity for which to ban the proxy.')
unban_proxy_parser = manage_subparsers.add_parser('unban-proxy', help='Unban a proxy.', parents=[common_parser])
unban_proxy_parser.add_argument('proxy_url', help='The full proxy URL to unban.')
unban_proxy_parser.add_argument('--server-identity', required=True, help='The server identity for which to unban the proxy.')
ban_all_proxies_parser = manage_subparsers.add_parser('ban-all-proxies', help='Ban all proxies for one or all servers.', parents=[common_parser])
ban_all_proxies_parser.add_argument('--server-identity', help='Optional server identity to ban all proxies for. If omitted, bans for all servers.')
unban_all_proxies_parser = manage_subparsers.add_parser('unban-all-proxies', help='Unban all proxies for one or all servers.', parents=[common_parser])
unban_all_proxies_parser.add_argument('--server-identity', help='Optional server identity to unban all proxies for. If omitted, unbans for all servers.')
# --- Account Management Commands ---
ban_account_parser = manage_subparsers.add_parser('ban-account', help='Ban an account.', parents=[common_parser])
ban_account_parser.add_argument('account_id', help='The account ID to ban.')
unban_account_parser = manage_subparsers.add_parser('unban-account', help='Unban an account.', parents=[common_parser])
unban_account_parser.add_argument('account_id', help='The account ID to unban.')
unban_all_accounts_parser = manage_subparsers.add_parser('unban-all-accounts', help='Unban all accounts, optionally filtered by a prefix.', parents=[common_parser])
unban_all_accounts_parser.add_argument('account_id', nargs='?', help='Optional account prefix to filter which accounts to unban.')
# --- Destructive Delete Commands ---
delete_parser = manage_subparsers.add_parser('delete', help='(Destructive) Delete entities from Redis.')
delete_subparsers = delete_parser.add_subparsers(dest='delete_entity', help='Entity to delete', required=True)
# Create a parent for the confirmation flag, so it can be used on sub-subcommands
confirm_parser = argparse.ArgumentParser(add_help=False)
confirm_parser.add_argument('--yes', action='store_true', help='Confirm the destructive action.')
delete_proxies_parser = delete_subparsers.add_parser('proxies', help='Delete one or all proxies from Redis.', parents=[common_parser, confirm_parser])
delete_proxies_parser.add_argument('--proxy-url', help='The proxy URL to delete.')
delete_proxies_parser.add_argument('--server-identity', help='The server identity of the proxy to delete. Required if --proxy-url is given. If omitted, deletes all proxies for all servers.')
delete_accounts_parser = delete_subparsers.add_parser('accounts', help='Delete one or all accounts from Redis.', parents=[common_parser, confirm_parser])
delete_accounts_parser.add_argument('--account-id', help='The account ID to delete. If --prefix is used, this is treated as a prefix.')
delete_accounts_parser.add_argument('--prefix', action='store_true', help='Treat --account-id as a prefix to delete multiple accounts.')
delete_client_stats_parser = delete_subparsers.add_parser('client-stats', help='Delete all client stats from Redis.', parents=[common_parser, confirm_parser])
delete_activity_counters_parser = delete_subparsers.add_parser('activity-counters', help='Delete all activity counter stats from Redis.', description='Deletes all activity counter stats (keys matching "activity:*") from Redis. This does NOT delete account or proxy statuses.', parents=[common_parser, confirm_parser])
return parser
def main_manage(args):
"""Main dispatcher for 'manage' command."""
if load_dotenv:
# load_dotenv() with no args will search for a .env file.
# If args.env_file is provided, it will use that specific file.
was_loaded = load_dotenv(args.env_file)
if was_loaded:
logger.info(f"Loaded environment variables from {args.env_file or '.env file'}")
elif args.env_file:
# If a specific file was requested but not found, it's an error.
logger.error(f"The specified --env-file was not found: {args.env_file}")
return 1
elif args.env_file:
logger.warning("'python-dotenv' is not installed, but --env-file was provided. Please install it with: pip install python-dotenv")
else:
logger.info("'python-dotenv' not installed. Relying on shell environment variables and command-line arguments.")
# Set defaults for args that were not provided, now that .env is loaded.
if args.host is None:
args.host = os.getenv('MASTER_HOST_IP', '127.0.0.1')
if args.redis_host is None:
# Default REDIS_HOST to the management host if not explicitly set
args.redis_host = os.getenv('REDIS_HOST', args.host)
if args.redis_port is None:
args.redis_port = int(os.getenv('REDIS_PORT', 6379))
if args.redis_password is None:
args.redis_password = os.getenv('REDIS_PASSWORD')
if args.verbose:
logging.getLogger().setLevel(logging.DEBUG)
# Log the effective connection parameters being used.
logger.info(f"Using management host: {args.host}:{args.port}")
if args.redis_host:
redis_password_status = "provided" if args.redis_password else "not provided"
logger.info(f"Using Redis host: {args.redis_host}:{args.redis_port} (password: {redis_password_status})")
else:
logger.warning("Redis host not configured (via --redis-host or REDIS_HOST env var). Redis-dependent features will be unavailable.")
if args.manage_command == 'status':
return main_status(args)
elif args.manage_command == 'activity-counters':
return main_activity_counters(args)
elif args.manage_command == 'ban-proxy':
return main_ban_proxy(args)
elif args.manage_command == 'unban-proxy':
return main_unban_proxy(args)
elif args.manage_command == 'ban-all-proxies':
return main_ban_all_proxies(args)
elif args.manage_command == 'unban-all-proxies':
return main_unban_all_proxies(args)
elif args.manage_command == 'ban-account':
return main_ban_account(args)
elif args.manage_command == 'unban-account':
return main_unban_account(args)
elif args.manage_command == 'unban-all-accounts':
return main_unban_all_accounts(args)
elif args.manage_command == 'delete':
return main_delete(args)
return 1 # Should not be reached