yt-dlp-dags/airflow/dags/ytdlp_mgmt_proxy_account.py

679 lines
31 KiB
Python

"""
DAG to manage the state of proxies and accounts used by the ytdlp-ops-server.
"""
from __future__ import annotations
# --- Add project root to path to allow for yt-ops-client imports ---
import sys
# The yt-ops-client package is installed in editable mode in /app
if '/app' not in sys.path:
sys.path.insert(0, '/app')
import logging
import json
import re
import time
from datetime import datetime
import socket
from airflow.exceptions import AirflowException
from airflow.models.dag import DAG
from airflow.models.dagbag import DagBag
from airflow.models.dagrun import DagRun
from airflow.models.param import Param
from airflow.models.taskinstance import TaskInstance
from airflow.operators.python import PythonOperator
from airflow.decorators import task
from airflow.utils.dates import days_ago
from airflow.models.variable import Variable
from airflow.providers.redis.hooks.redis import RedisHook
from airflow.utils.session import create_session
# Configure logging
logger = logging.getLogger(__name__)
# Import and apply Thrift exceptions patch for Airflow compatibility
try:
from thrift_exceptions_patch import patch_thrift_exceptions
patch_thrift_exceptions()
logger.info("Applied Thrift exceptions patch for Airflow compatibility.")
except ImportError:
logger.warning("Could not import thrift_exceptions_patch. Compatibility may be affected.")
except Exception as e:
logger.error(f"Error applying Thrift exceptions patch: {e}")
# Thrift imports (kept for DEPRECATED proxy management)
try:
from ytops_client.profile_manager_tool import ProfileManager, format_duration, format_timestamp
from pangramia.yt.exceptions.ttypes import PBServiceException, PBUserException
from yt_ops_services.client_utils import get_thrift_client
except ImportError as e:
logger.critical(f"Could not import project modules: {e}. Ensure yt-ops-client and services are installed correctly.")
# Fail DAG parsing if thrift modules are not available
raise
DEFAULT_MANAGEMENT_SERVICE_IP = Variable.get("MANAGEMENT_SERVICE_HOST", default_var="172.17.0.1")
DEFAULT_MANAGEMENT_SERVICE_PORT = Variable.get("MANAGEMENT_SERVICE_PORT", default_var=9080)
DEFAULT_REDIS_CONN_ID = "redis_default"
# Version tracking for debugging
DAG_VERSION = "1.7.1" # Updated to handle Redis configuration errors
# Helper function to connect to Redis, similar to other DAGs
def _get_redis_client(redis_conn_id: str):
"""Gets a Redis client from an Airflow connection."""
try:
# Use the imported RedisHook
redis_hook = RedisHook(redis_conn_id=redis_conn_id)
# get_conn returns a redis.Redis client
return redis_hook.get_conn()
except Exception as e:
logger.error(f"Failed to connect to Redis using connection '{redis_conn_id}': {e}")
# Use the imported AirflowException
raise AirflowException(f"Redis connection failed: {e}")
def _list_proxy_statuses(client, server_identity):
"""Lists the status of proxies."""
logger.info(f"Listing proxy statuses for server: {server_identity or 'ALL'}")
logger.warning("DEPRECATED: Proxy management is now handled by the standalone policy-enforcer.")
logger.info("NOTE: Proxy statuses are read from server's internal state via Thrift service")
try:
statuses = client.getProxyStatus(server_identity)
except PBServiceException as e:
if "Redis is not configured for this server" in e.message:
logger.error(f"Redis not configured on server: {e.message}")
print(f"\nERROR: Server configuration issue - {e.message}\n")
print("This server does not have Redis configured for proxy management.\n")
return
else:
# Re-raise if it's a different PBServiceException
raise
except Exception as e:
logger.error(f"Unexpected error getting proxy statuses: {e}", exc_info=True)
print(f"\nERROR: Unexpected error getting proxy statuses: {e}\n")
return
if not statuses:
logger.info("No proxy statuses found.")
return
from tabulate import tabulate
status_list = []
# This is forward-compatible: it checks for new attributes before using them.
has_extended_info = hasattr(statuses[0], 'recentAccounts') or hasattr(statuses[0], 'recentMachines')
headers = ["Server", "Proxy URL", "Status", "Success", "Failures", "Last Success", "Last Failure"]
if has_extended_info:
headers.extend(["Recent Accounts", "Recent Machines"])
for s in statuses:
status_item = {
"Server": s.serverIdentity,
"Proxy URL": s.proxyUrl,
"Status": s.status,
"Success": s.successCount,
"Failures": s.failureCount,
"Last Success": format_timestamp(s.lastSuccessTimestamp),
"Last Failure": format_timestamp(s.lastFailureTimestamp),
}
if has_extended_info:
recent_accounts = getattr(s, 'recentAccounts', [])
recent_machines = getattr(s, 'recentMachines', [])
status_item["Recent Accounts"] = "\n".join(recent_accounts) if recent_accounts else "N/A"
status_item["Recent Machines"] = "\n".join(recent_machines) if recent_machines else "N/A"
status_list.append(status_item)
print("\n--- Proxy Statuses ---")
# The f-string with a newline ensures the table starts on a new line in the logs.
print(f"\n{tabulate(status_list, headers='keys', tablefmt='grid')}")
print("----------------------\n")
if not has_extended_info:
logger.warning("Server does not seem to support 'recentAccounts' or 'recentMachines' fields yet.")
print("NOTE: To see Recent Accounts/Machines, the server's `getProxyStatus` method must be updated to return these fields.")
def _list_account_statuses(pm: ProfileManager, account_id_prefix: str | None):
"""Lists the status of profiles from Redis using ProfileManager."""
logger.info(f"Listing v2 profile statuses from Redis for prefix: {account_id_prefix or 'ALL'}")
try:
profiles = pm.list_profiles()
if not profiles:
print("\n--- V2 Profile Statuses ---\nNo profiles found.\n---------------------------\n")
return
from tabulate import tabulate
status_list = []
now = time.time()
for p in profiles:
if account_id_prefix and not p['name'].startswith(account_id_prefix):
continue
status = p.get('state', 'UNKNOWN')
if status == 'RESTING':
rest_until = p.get('rest_until', 0)
if rest_until > now:
status += f" ({format_duration(rest_until - now)} left)"
elif status == 'COOLDOWN':
cooldown_until = p.get('cooldown_until', 0)
if cooldown_until > now:
status += f" ({format_duration(cooldown_until - now)} left)"
status_item = {
"Name": p.get('name'),
"Status": status,
"Proxy": p.get('proxy', 'N/A'),
"Success": p.get('success', 0),
"Failures": p.get('failure', 0),
"Last Activity": format_timestamp(p.get('last_activity_ts', 0)),
"Owner": p.get('owner', 'None'),
"Lock Time": format_duration(now - p.get('lock_ts', 0)) if p.get('state') == 'LOCKED' else 'N/A',
}
status_list.append(status_item)
status_list.sort(key=lambda item: item.get('Name', ''))
print("\n--- V2 Profile Statuses ---")
print(f"\n{tabulate(status_list, headers='keys', tablefmt='grid')}")
print("---------------------------\n")
except Exception as e:
logger.error(f"An unexpected error occurred while getting v2 profile statuses: {e}", exc_info=True)
print(f"\nERROR: An unexpected error occurred: {e}\n")
def _list_client_statuses(redis_conn_id):
"""Lists the status of different client types from Redis."""
logger.info("Listing client statuses from Redis key 'client_stats'")
try:
redis_client = _get_redis_client(redis_conn_id)
stats_key = "client_stats"
all_stats_raw = redis_client.hgetall(stats_key)
if not all_stats_raw:
print("\n--- Client Statuses ---\nNo client stats found in Redis.\n-----------------------\n")
return
from tabulate import tabulate
status_list = []
for client_bytes, stats_json_bytes in all_stats_raw.items():
client_name = client_bytes.decode('utf-8')
try:
stats = json.loads(stats_json_bytes.decode('utf-8'))
def format_latest(data):
if not data: return "N/A"
ts = format_timestamp(data.get('timestamp'))
url = data.get('url') or 'N/A'
machine = data.get('machine_id', 'N/A')
video_id_match = re.search(r'v=([a-zA-Z0-9_-]{11})', url)
video_id = video_id_match.group(1) if video_id_match else 'N/A'
return f"{ts}\nMachine: {machine}\nVideo ID: {video_id}"
status_item = {
"Client": client_name,
"Success": stats.get('success_count', 0),
"Failures": stats.get('failure_count', 0),
"Last Success": format_latest(stats.get('latest_success')),
"Last Failure": format_latest(stats.get('latest_failure')),
}
status_list.append(status_item)
except (json.JSONDecodeError, AttributeError) as e:
logger.error(f"Could not parse stats for client '{client_name}': {e}")
status_list.append({
"Client": client_name, "Success": "ERROR", "Failures": "ERROR",
"Last Success": "Could not parse data", "Last Failure": "Could not parse data"
})
status_list.sort(key=lambda item: item.get('Client', ''))
print("\n--- Client Statuses ---")
print(f"\n{tabulate(status_list, headers='keys', tablefmt='grid')}")
print("-----------------------\n")
except Exception as e:
logger.error(f"An unexpected error occurred while getting client statuses: {e}", exc_info=True)
print(f"\nERROR: An unexpected error occurred: {e}\n")
def _list_activity_counters(redis_conn_id: str):
"""Lists current activity rates for proxies and accounts from Redis."""
logger.info("Listing activity counters from Redis keys 'activity:per_proxy:*' and 'activity:per_account:*'")
try:
redis_client = _get_redis_client(redis_conn_id)
from tabulate import tabulate
now = time.time()
def process_keys(pattern, entity_name):
keys = redis_client.scan_iter(pattern)
status_list = []
for key_bytes in keys:
key = key_bytes.decode('utf-8')
entity_id = key.split(':', 2)[-1]
# Clean up old entries before counting
redis_client.zremrangebyscore(key, '-inf', now - 3660) # Clean up > 1hr old
count_1m = redis_client.zcount(key, now - 60, now)
count_5m = redis_client.zcount(key, now - 300, now)
count_1h = redis_client.zcount(key, now - 3600, now)
if count_1h == 0: # Don't show entities with no recent activity
continue
status_list.append({
entity_name: entity_id,
"Activity (Last 1m)": count_1m,
"Activity (Last 5m)": count_5m,
"Activity (Last 1h)": count_1h,
})
status_list.sort(key=lambda item: item.get(entity_name, ''))
print(f"\n--- {entity_name} Activity Counters ---")
if not status_list:
print(f"No recent activity found for {entity_name.lower()}s.")
else:
print(f"\n{tabulate(status_list, headers='keys', tablefmt='grid')}")
print("-----------------------------------\n")
process_keys("activity:per_proxy:*", "Proxy URL")
process_keys("activity:per_account:*", "Account ID")
except Exception as e:
logger.error(f"An unexpected error occurred while getting activity counters: {e}", exc_info=True)
print(f"\nERROR: An unexpected error occurred: {e}\n")
def _create_profiles_from_json(**context):
"""Creates profiles by calling the yt-ops-client setup-profiles tool."""
import subprocess
import tempfile
import yaml
params = context['params']
json_payload_str = params.get('create_profiles_json')
if not json_payload_str:
raise AirflowException("Parameter 'create_profiles_json' is empty.")
try:
# We accept JSON but the setup tool uses YAML, so we parse and dump.
# This also serves as validation.
json_payload = json.loads(json_payload_str)
yaml_payload = yaml.dump(json_payload)
except (json.JSONDecodeError, yaml.YAMLError) as e:
raise AirflowException(f"Invalid JSON/YAML in 'create_profiles_json': {e}")
with tempfile.NamedTemporaryFile(mode='w+', delete=True, suffix='.yaml', prefix='airflow-profile-setup-') as temp_policy_file:
temp_policy_file.write(yaml_payload)
temp_policy_file.flush()
logger.info(f"Created temporary policy file for profile setup: {temp_policy_file.name}")
cmd = [
'ytops-client', 'setup-profiles',
'--policy', temp_policy_file.name,
]
# Pass through Redis connection params if provided
if params.get('redis_conn_id') != DEFAULT_REDIS_CONN_ID:
logger.warning("Custom Redis connection is not supported for `create_profiles` yet. It will use the default from .env or localhost.")
logger.info(f"Running command: {' '.join(cmd)}")
process = subprocess.run(cmd, capture_output=True, text=True, timeout=300)
if process.stdout:
print(f"\n--- yt-ops-client setup-profiles STDOUT ---\n{process.stdout}\n----------------------------------------\n")
if process.stderr:
print(f"\n--- yt-ops-client setup-profiles STDERR ---\n{process.stderr}\n----------------------------------------\n")
if process.returncode != 0:
raise AirflowException(f"Profile creation failed with exit code {process.returncode}.")
def manage_system_callable(**context):
"""Main callable to interact with the system management endpoints."""
# Log version for debugging
logger.info(f"Running ytdlp_mgmt_proxy_account DAG version {DAG_VERSION}")
params = context["params"]
entity = params["entity"]
action = params["action"]
# For Thrift actions, use the new management host/port
if entity not in ["activity_counters", "account"]:
host = params["management_host"]
port = params["management_port"]
else:
host, port = None, None # Not needed for meta actions
server_identity = params.get("server_identity")
proxy_url = params.get("proxy_url")
account_id = params.get("account_id") # Used as prefix for v2 profiles
redis_env = params.get("redis_env")
# --- Validate Action/Entity Combination and Parameters ---
valid_actions = {
"proxy": ["list_with_status", "ban", "unban", "ban_all", "unban_all", "delete_from_redis"],
"account": ["list_with_status", "create_profiles", "ban", "unban", "activate", "pause", "delete", "delete_all"],
"client": ["list_with_status", "delete_from_redis"],
"accounts_and_proxies": ["list_with_status", "ban", "unban", "ban_all", "unban_all", "delete_from_redis"],
"activity_counters": ["list_with_status"],
}
if action not in valid_actions.get(entity, []):
raise ValueError(
f"The action '{action}' is not valid for entity '{entity}'.\n"
f"Valid actions for '{entity}' are: {', '.join(valid_actions.get(entity, ['None']))}."
)
# Validate required parameters for the chosen action
if entity == "proxy":
if action in ["ban", "unban"] and not server_identity:
raise ValueError(f"A 'server_identity' is required for proxy action '{action}'.")
if action in ["ban", "unban"] and not proxy_url:
raise ValueError(f"A 'proxy_url' is required for proxy action '{action}'.")
if entity == "account":
if action in ["ban", "unban", "pause", "activate", "delete"] and not account_id:
raise ValueError(f"An 'account_id' (profile name) is required for account action '{action}'.")
# --- ProfileManager setup for v2 account actions ---
pm = None
if entity == "account":
try:
redis_hook = RedisHook(redis_conn_id=params["redis_conn_id"])
if redis_env:
key_prefix = f"{redis_env}_profile_mgmt_"
else:
raise ValueError("A 'redis_env' (e.g., 'sim_auth') must be provided for v2 profile actions.")
pm = ProfileManager(redis_hook=redis_hook, key_prefix=key_prefix)
logger.info(f"Initialized ProfileManager for env '{redis_env}' (Redis key prefix: '{key_prefix}')")
except Exception as e:
raise AirflowException(f"Failed to initialize ProfileManager: {e}")
# --- Handle Activity Counter action ---
if entity == "activity_counters":
if action == "list_with_status":
_list_activity_counters(params["redis_conn_id"])
return # End execution
else:
raise ValueError(f"Action '{action}' is not valid for entity 'activity_counters'. Only 'list_with_status' is supported.")
# Handle direct Redis deletion actions
if action == "delete_from_redis":
if entity == "client":
logger.info("Deleting all client stats from Redis...")
redis_client = _get_redis_client(params["redis_conn_id"])
result = redis_client.delete("client_stats")
if result > 0:
print(f"\nSuccessfully deleted 'client_stats' key from Redis.\n")
else:
print(f"\nKey 'client_stats' not found in Redis. Nothing to delete.\n")
return
# All other delete actions are handled by Thrift for now.
client, transport = None, None
try:
client, transport = get_thrift_client(host, port)
if entity == "proxy":
logger.warning("DEPRECATED: Proxy management is now handled by the standalone policy-enforcer.")
proxy_url = params.get("proxy_url")
server_identity = params.get("server_identity")
if proxy_url and server_identity:
logger.info(f"Deleting proxy '{proxy_url}' for server '{server_identity}' from Redis via Thrift service...")
result = client.deleteProxyFromRedis(proxy_url, server_identity)
if result:
print(f"\nSuccessfully deleted proxy '{proxy_url}' for server '{server_identity}' from Redis.\n")
else:
print(f"\nFailed to delete proxy '{proxy_url}' for server '{server_identity}' from Redis.\n")
else:
logger.info("Deleting all proxies from Redis via Thrift service...")
result = client.deleteAllProxiesFromRedis(server_identity)
if server_identity:
print(f"\nSuccessfully deleted all proxies for server '{server_identity}' from Redis. Count: {result}\n")
else:
print(f"\nSuccessfully deleted all proxies from Redis across ALL servers. Count: {result}\n")
except (PBServiceException, PBUserException) as e:
logger.error(f"Thrift error performing delete action: {e.message}", exc_info=True)
print(f"\nERROR: Thrift service error: {e.message}\n")
raise
except Exception as e:
logger.error(f"Error performing delete action: {e}", exc_info=True)
print(f"\nERROR: An unexpected error occurred: {e}\n")
raise
finally:
if transport and transport.isOpen():
transport.close()
logger.info("Thrift connection closed.")
return
# --- Main Action Handler ---
client, transport = None, None
try:
# Connect to Thrift only if needed
if entity == "proxy":
client, transport = get_thrift_client(host, port)
if entity == "client":
if action == "list_with_status":
_list_client_statuses(params["redis_conn_id"])
elif entity == "proxy":
logger.warning("DEPRECATED: Proxy management is now handled by the standalone policy-enforcer. These actions are for legacy support.")
if action == "list_with_status":
_list_proxy_statuses(client, server_identity)
elif action == "ban":
if not proxy_url: raise ValueError("A 'proxy_url' is required.")
logger.info(f"Banning proxy '{proxy_url}' for server '{server_identity}'...")
client.banProxy(proxy_url, server_identity)
print(f"Successfully sent request to ban proxy '{proxy_url}'.")
elif action == "unban":
if not proxy_url: raise ValueError("A 'proxy_url' is required.")
logger.info(f"Unbanning proxy '{proxy_url}' for server '{server_identity}'...")
client.unbanProxy(proxy_url, server_identity)
print(f"Successfully sent request to unban proxy '{proxy_url}'.")
elif action == "ban_all":
if server_identity:
logger.info(f"Banning all proxies for server '{server_identity}'...")
client.banAllProxies(server_identity)
print(f"Successfully sent request to ban all proxies for '{server_identity}'.")
else:
raise ValueError("A 'server_identity' is required for 'ban_all' on proxies.")
elif action == "unban_all":
if server_identity:
logger.info(f"Unbanning all proxy statuses for server '{server_identity}'...")
client.resetAllProxyStatuses(server_identity)
print(f"Successfully sent request to unban all proxy statuses for '{server_identity}'.")
else:
raise ValueError("A 'server_identity' is required for 'unban_all' on proxies.")
elif entity == "account":
if action == "list_with_status":
_list_account_statuses(pm, account_id)
elif action == "create_profiles":
# This action is handled by a separate PythonOperator
pass
elif action == "ban":
logger.info(f"Banning profile '{account_id}' in env '{redis_env}'...")
pm.update_profile_state(account_id, "BANNED", f"Manual ban from Airflow mgmt DAG")
print(f"Successfully set state of profile '{account_id}' to BANNED.")
elif action == "unban" or action == "activate":
logger.info(f"Activating profile '{account_id}' in env '{redis_env}'...")
pm.update_profile_state(account_id, "ACTIVE", f"Manual activation from Airflow mgmt DAG")
print(f"Successfully set state of profile '{account_id}' to ACTIVE.")
elif action == "pause":
logger.info(f"Pausing (resting) profile '{account_id}' in env '{redis_env}'...")
pm.update_profile_state(account_id, "RESTING", f"Manual pause from Airflow mgmt DAG")
print(f"Successfully set state of profile '{account_id}' to RESTING.")
elif action == "delete":
logger.info(f"Deleting profile '{account_id}' in env '{redis_env}'...")
pm.delete_profile(account_id)
print(f"Successfully deleted profile '{account_id}'.")
elif action == "delete_all":
logger.warning(f"DESTRUCTIVE: Deleting all profiles with prefix '{account_id}' in env '{redis_env}'...")
profiles = pm.list_profiles()
deleted_count = 0
for p in profiles:
if not account_id or p['name'].startswith(account_id):
pm.delete_profile(p['name'])
deleted_count += 1
print(f"Successfully deleted {deleted_count} profile(s).")
elif entity == "accounts_and_proxies":
logger.warning("DEPRECATED: Combined 'accounts_and_proxies' actions are no longer supported in v2. Please manage accounts and proxies separately.")
if action == "list_with_status":
print("\n--- Listing statuses for Proxies, V2 Profiles, and Clients ---")
_list_proxy_statuses(client, server_identity)
_list_account_statuses(pm, account_id)
_list_client_statuses(params["redis_conn_id"])
return
except (PBServiceException, PBUserException) as e:
logger.error(f"Thrift error performing action '{action}': {e.message}", exc_info=True)
raise
except NotImplementedError as e:
logger.error(f"Feature not implemented: {e}", exc_info=True)
raise
except Exception as e:
logger.error(f"Error performing action '{action}': {e}", exc_info=True)
raise
finally:
if transport and transport.isOpen():
transport.close()
logger.info("Thrift connection closed.")
with DAG(
dag_id="ytdlp_mgmt_proxy_account",
default_args={"queue": "queue-mgmt"},
start_date=days_ago(1),
schedule=None,
catchup=False,
tags=["ytdlp", "mgmt", "master"],
doc_md="""
### YT-DLP v2 Profile and System Manager
This DAG provides tools to manage the state of **v2 profiles** (formerly accounts) and other system components.
Select an `entity` and an `action` to perform.
**V2 Profile Management (`entity: account`):**
- All account/profile actions are now performed directly on Redis using the `ProfileManager`.
- A `redis_env` (e.g., `sim_auth` or `sim_download`) is **required** to target the correct set of profiles.
- Actions include `list`, `create`, `ban`, `activate`, `pause`, and `delete`.
**Legacy Proxy Management (`entity: proxy`):**
- **DEPRECATED**: Proxy state is now managed automatically by the standalone `policy-enforcer` service.
- These actions are provided for legacy support and interact with the old Thrift service. They may be removed in the future.
""",
params={
"management_host": Param(DEFAULT_MANAGEMENT_SERVICE_IP, type="string", title="Management Service Host (DEPRECATED)", description="The hostname or IP of the management service. Used only for legacy proxy actions."),
"management_port": Param(DEFAULT_MANAGEMENT_SERVICE_PORT, type="integer", title="Management Service Port (DEPRECATED)", description="The port of the dedicated management service."),
"entity": Param(
"account",
type="string",
enum=["account", "proxy", "client", "activity_counters", "accounts_and_proxies"],
description="The type of entity to manage.",
),
"action": Param(
"list_with_status",
type="string",
enum=["list_with_status", "create_profiles", "ban", "unban", "activate", "pause", "delete", "delete_all", "ban_all", "unban_all", "delete_from_redis"],
description="""The management action to perform.
---
#### Actions for `entity: account` (V2 Profiles)
- `list_with_status`: View status of all profiles, optionally filtered by `account_id` as a prefix.
- `create_profiles`: Creates new profiles from a JSON payload. See `create_profiles_json` param.
- `ban`: Sets a profile's state to BANNED. Requires `account_id`.
- `unban`/`activate`: Sets a profile's state to ACTIVE. Requires `account_id`.
- `pause`: Sets a profile's state to RESTING. Requires `account_id`.
- `delete`: Deletes a single profile. Requires `account_id`.
- `delete_all`: **(Destructive)** Deletes all profiles, or those matching the `account_id` as a prefix.
#### Actions for `entity: proxy` (DEPRECATED)
- `list_with_status`, `ban`, `unban`, `ban_all`, `unban_all`, `delete_from_redis`.
#### Actions for `entity: client`
- `list_with_status`: View success/failure statistics for each client type.
- `delete_from_redis`: **(Destructive)** Deletes all client stats from Redis.
#### Actions for `entity: activity_counters`
- `list_with_status`: View current activity rates for proxies and accounts.
""",
),
"redis_env": Param(
"sim_auth",
type="string",
enum=["sim_auth", "sim_download"],
title="[V2 Profiles] Redis Environment",
description="The environment for v2 profile management (e.g., 'sim_auth'). Determines the Redis key prefix.",
),
"account_id": Param(
None,
type=["null", "string"],
description="For v2 profiles: The profile name (e.g., 'auth_user_0') or a prefix for `list` and `delete_all`.",
),
"create_profiles_json": Param(
"""{
"auth_profile_setup": {
"env": "sim_auth",
"cleanup_before_run": false,
"pools": [
{
"prefix": "auth_user",
"proxy": "sslocal-rust-1090:1090",
"count": 2
}
]
}
}""",
type="string",
title="[V2 Profiles] Create Profiles JSON",
description="For action `create_profiles`. A JSON payload defining the profiles to create. This is passed to `yt-ops-client setup-profiles`.",
**{'ui_widget': 'json', 'multi_line': True}
),
"server_identity": Param(
None,
type=["null", "string"],
description="[DEPRECATED] The server identity for proxy management.",
),
"proxy_url": Param(
None,
type=["null", "string"],
description="[DEPRECATED] The proxy URL to act upon.",
),
"redis_conn_id": Param(
DEFAULT_REDIS_CONN_ID,
type="string",
title="Redis Connection ID",
description="The Airflow connection ID for the Redis server.",
),
},
) as dag:
@task.branch(task_id="branch_on_action")
def branch_on_action(**context):
action = context["params"]["action"]
if action == "create_profiles":
return "create_profiles_task"
return "system_management_task"
create_profiles_task = PythonOperator(
task_id="create_profiles_task",
python_callable=_create_profiles_from_json,
)
system_management_task = PythonOperator(
task_id="system_management_task",
python_callable=manage_system_callable,
)
branch_on_action() >> [create_profiles_task, system_management_task]