Files
pyGoEdge-UserPanel/core/goedge_client.py

653 lines
30 KiB
Python
Raw Normal View History

2025-11-18 03:36:49 +08:00
import base64
import datetime
import json
import logging
import os
from typing import Any, Dict, List, Optional
import requests
from urllib.parse import urlparse
from requests import RequestException
from django.utils import timezone
from .models import SystemSettings
logger = logging.getLogger(__name__)
DEFAULT_TIMEOUT = (5, 15) # (connect_timeout, read_timeout)
class GoEdgeClient:
"""
GoEdge 管理员 API 客户端封装
- 令牌获取与缓存SystemSettings + 环境变量回退
- 网站创建HTTP 反向代理
- 流量统计读取按日带宽峰值
"""
def __init__(self):
settings = self._get_settings()
self.base_url: str = settings.goedge_base_url or ''
self.admin_access_key_id: str = settings.admin_access_key_id or os.getenv('GOEDGE_ACCESS_KEY_ID', '')
self.admin_access_key: str = settings.admin_access_key or os.getenv('GOEDGE_ACCESS_KEY', '')
self._token: str = settings.edge_access_token or os.getenv('GOEDGE_ACCESS_TOKEN', '')
self._token_exp: Optional[datetime.datetime] = settings.edge_token_expires_at
# 基础 URL 校验(提前在初始化阶段进行)
self._validate_base_url()
@staticmethod
def _get_settings() -> SystemSettings:
# 只取第一条设置
settings = SystemSettings.objects.order_by('id').first()
if not settings:
settings = SystemSettings.objects.create()
return settings
def _headers(self) -> Dict[str, str]:
token = self._ensure_token()
return {
'Content-Type': 'application/json',
'X-Edge-Access-Token': token,
}
def _ensure_token(self) -> str:
# 若令牌存在且未知过期或仍在有效期,则优先使用令牌
if self._token:
if not self._token_exp or self._token_exp > timezone.now():
return self._token
# 通过 AccessKeyId/AccessKey 获取令牌
if not (self.base_url and self.admin_access_key_id and self.admin_access_key):
raise RuntimeError('GoEdge API配置不完整需 base_url、access_key_id、access_key 或提供现成令牌')
url = self._join('/APIAccessTokenService/getAPIAccessToken')
payload = {
'type': 'admin',
'accessKeyId': self.admin_access_key_id,
'accessKey': self.admin_access_key,
}
resp = requests.post(url, headers={'Content-Type': 'application/json'}, data=json.dumps(payload))
try:
resp = requests.post(url, headers={'Content-Type': 'application/json'}, data=json.dumps(payload), timeout=DEFAULT_TIMEOUT)
except RequestException as e:
raise RuntimeError(f'获取AccessToken请求失败{e}')
resp.raise_for_status()
data = resp.json()
if int(data.get('code', 500)) != 200:
raise RuntimeError(f'获取AccessToken失败: {data}')
token = data['data']['token']
expires_at = data['data'].get('expiresAt')
# 更新缓存与数据库
self._token = token
if expires_at:
try:
exp_dt = datetime.datetime.fromtimestamp(int(expires_at), tz=timezone.utc)
except Exception:
exp_dt = timezone.now() + datetime.timedelta(hours=6)
else:
exp_dt = timezone.now() + datetime.timedelta(hours=6)
self._token_exp = exp_dt
settings = self._get_settings()
settings.edge_access_token = token
settings.edge_token_expires_at = exp_dt
settings.save(update_fields=['edge_access_token', 'edge_token_expires_at'])
return token
def _join(self, path: str) -> str:
return (self.base_url.rstrip('/') + '/' + path.lstrip('/'))
def _validate_base_url(self) -> None:
"""校验 base_url 的格式与端口范围,便于快速定位配置错误。"""
if not self.base_url:
raise RuntimeError('GoEdge API配置不完整缺少 base_url (GOEDGE_ADMIN_API_BASE_URL)')
p = urlparse(self.base_url)
if not p.scheme or not p.netloc:
raise RuntimeError(f'GOEDGE_ADMIN_API_BASE_URL无效{self.base_url}')
# 校验端口
if ':' in p.netloc:
host, port_str = p.netloc.rsplit(':', 1)
try:
port = int(port_str)
if port < 1 or port > 65535:
raise ValueError('port out of range')
except Exception:
raise RuntimeError(f'GOEDGE_ADMIN_API_BASE_URL端口非法需在1-65535{self.base_url}')
# ---------- 业务方法 ----------
def create_basic_http_server(
self,
domains: List[str],
origin_addrs: List[str],
user_id: int = 0,
enable_websocket: bool = False,
node_cluster_id: int = 0,
ssl_cert_ids: Optional[List[int]] = None,
) -> int:
"""调用 createBasicHTTPServer 快速创建HTTP网站并返回 serverId。"""
# 计算有效的 nodeClusterId优先入参其次 SystemSettings最后环境变量
effective_cluster_id = node_cluster_id
if not effective_cluster_id:
s = self._get_settings()
effective_cluster_id = s.default_node_cluster_id or 0
if not effective_cluster_id:
env_val = os.getenv('GOEDGE_DEFAULT_NODE_CLUSTER_ID', '').strip()
if env_val:
try:
effective_cluster_id = int(env_val)
except Exception:
raise RuntimeError(f"GOEDGE_DEFAULT_NODE_CLUSTER_ID 非法:{env_val}")
if not effective_cluster_id:
raise RuntimeError("未配置 nodeClusterId请在 SystemSettings.default_node_cluster_id 或 .env(GOEDGE_DEFAULT_NODE_CLUSTER_ID) 设置有效集群ID")
url = self._join('/ServerService/createBasicHTTPServer')
payload = {
'nodeClusterId': effective_cluster_id,
'userId': user_id,
'domains': domains,
'sslCertIds': ssl_cert_ids or [],
'originAddrs': origin_addrs,
'enableWebsocket': enable_websocket,
}
try:
resp = requests.post(url, headers=self._headers(), data=json.dumps(payload), timeout=DEFAULT_TIMEOUT)
except RequestException as e:
raise RuntimeError(f'创建HTTP网站请求失败{e}')
resp.raise_for_status()
data = resp.json()
server_id = data.get('serverId')
if not server_id:
raise RuntimeError(f'创建HTTP网站失败: {data}')
return int(server_id)
def create_server_http_proxy(
self,
name: str,
domains: List[str],
reverse_proxy_json: Dict[str, Any],
user_id: int = 0,
node_cluster_id: int = 0,
server_group_ids: Optional[List[int]] = None,
) -> int:
"""更灵活的 createServer 创建 httpProxy 类型服务。"""
url = self._join('/ServerService/createServer')
# serverNamesJSON 与 reverseProxyJSON 需要 Base64
server_names_json = base64.b64encode(json.dumps({
'names': [{'name': d} for d in domains]
}).encode('utf-8')).decode('utf-8')
reverse_proxy_b64 = base64.b64encode(json.dumps(reverse_proxy_json).encode('utf-8')).decode('utf-8')
# 计算有效的 nodeClusterId优先入参其次 SystemSettings最后环境变量
effective_cluster_id = node_cluster_id
if not effective_cluster_id:
s = self._get_settings()
effective_cluster_id = s.default_node_cluster_id or 0
if not effective_cluster_id:
env_val = os.getenv('GOEDGE_DEFAULT_NODE_CLUSTER_ID', '').strip()
if env_val:
try:
effective_cluster_id = int(env_val)
except Exception:
raise RuntimeError(f"GOEDGE_DEFAULT_NODE_CLUSTER_ID 非法:{env_val}")
if not effective_cluster_id:
raise RuntimeError("未配置 nodeClusterId请在 SystemSettings.default_node_cluster_id 或 .env(GOEDGE_DEFAULT_NODE_CLUSTER_ID) 设置有效集群ID")
payload = {
'type': 'httpProxy',
'name': name,
'description': name,
'userId': user_id,
'nodeClusterId': effective_cluster_id,
'serverNamesJSON': server_names_json,
'reverseProxyJSON': reverse_proxy_b64,
'serverGroupIds': server_group_ids or [],
}
try:
resp = requests.post(url, headers=self._headers(), data=json.dumps(payload), timeout=DEFAULT_TIMEOUT)
except RequestException as e:
raise RuntimeError(f'创建网站请求失败:{e}')
resp.raise_for_status()
data = resp.json()
server_id = data.get('serverId')
if not server_id:
raise RuntimeError(f'创建网站失败: {data}')
return int(server_id)
def find_latest_server_daily_stats(self, server_id: int, days: int = 1) -> List[Dict[str, Any]]:
url = self._join('/ServerDailyStatService/findLatestServerDailyStats')
payload = {'serverId': server_id, 'days': days}
try:
resp = requests.post(url, headers=self._headers(), data=json.dumps(payload), timeout=DEFAULT_TIMEOUT)
except RequestException as e:
raise RuntimeError(f'读取每日统计请求失败:{e}')
resp.raise_for_status()
data = resp.json()
return data.get('stats', [])
def find_latest_server_hourly_stats(self, server_id: int, hours: int = 24) -> List[Dict[str, Any]]:
url = self._join('/ServerDailyStatService/findLatestServerHourlyStats')
payload = {'serverId': int(server_id), 'hours': int(hours)}
try:
resp = requests.post(url, headers=self._headers(), data=json.dumps(payload), timeout=DEFAULT_TIMEOUT)
except RequestException as e:
raise RuntimeError(f'读取每小时请求数失败:{e}')
resp.raise_for_status()
data = resp.json()
return data.get('stats', [])
def find_daily_bandwidth_stats(self, server_id: int, days: int = 1, algo: str = 'avg') -> List[Dict[str, Any]]:
url = self._join('/ServerBandwidthStatService/findDailyServerBandwidthStats')
payload = {'serverId': server_id, 'days': days, 'algo': algo}
try:
resp = requests.post(url, headers=self._headers(), data=json.dumps(payload), timeout=DEFAULT_TIMEOUT)
except RequestException as e:
raise RuntimeError(f'读取带宽统计请求失败:{e}')
resp.raise_for_status()
data = resp.json()
return data.get('stats', [])
# ---------- 配置查询与更新Web / HTTPS / 策略) ----------
def find_server_web_id(self, server_id: int) -> Optional[int]:
"""通过 findEnabledServerConfig 解析 serverJSON 获取 webId。"""
url = self._join('/ServerService/findEnabledServerConfig')
payload = {'serverId': server_id}
try:
resp = requests.post(url, headers=self._headers(), data=json.dumps(payload), timeout=DEFAULT_TIMEOUT)
except RequestException as e:
raise RuntimeError(f'查询网站配置失败:{e}')
resp.raise_for_status()
data = resp.json()
b64 = data.get('serverJSON')
if not b64:
return None
try:
conf = json.loads(base64.b64decode(b64).decode('utf-8'))
except Exception:
return None
web_id = conf.get('webId')
if web_id is None:
# 兼容嵌套结构(某些版本可能放在 web 字段)
web_id = (conf.get('web') or {}).get('webId')
try:
return int(web_id) if web_id is not None else None
except Exception:
return None
def update_http_web_access_log(self, http_web_id: int, is_on: bool, policy_id: Optional[int] = None) -> None:
"""更新 Web 访问日志设置当开启时可引用既有访问日志策略ID。"""
url = self._join('/HTTPWebService/updateHTTPWebAccessLog')
payload_obj: Dict[str, Any] = {'isOn': bool(is_on)}
if is_on and policy_id:
payload_obj['policyRef'] = {'isOn': True, 'policyId': int(policy_id)}
access_log_b64 = base64.b64encode(json.dumps(payload_obj).encode('utf-8')).decode('utf-8')
payload = {'httpWebId': http_web_id, 'accessLogJSON': access_log_b64}
try:
resp = requests.post(url, headers=self._headers(), data=json.dumps(payload), timeout=DEFAULT_TIMEOUT)
except RequestException as e:
raise RuntimeError(f'更新访问日志配置失败:{e}')
resp.raise_for_status()
def update_http_web_websocket(self, http_web_id: int, is_on: bool) -> None:
"""更新 WebSocket 开关(最简配置)。"""
url = self._join('/HTTPWebService/updateHTTPWebWebsocket')
ws_b64 = base64.b64encode(json.dumps({'isOn': bool(is_on)}).encode('utf-8')).decode('utf-8')
payload = {'httpWebId': http_web_id, 'websocketJSON': ws_b64}
try:
resp = requests.post(url, headers=self._headers(), data=json.dumps(payload), timeout=DEFAULT_TIMEOUT)
except RequestException as e:
raise RuntimeError(f'更新WebSocket配置失败{e}')
resp.raise_for_status()
def find_server_ssl_policy_id(self, server_id: int) -> Optional[int]:
"""通过网站配置解析 sslPolicyId。可能存在 https.sslPolicyId 或 https.sslPolicyRef.policyId。"""
url = self._join('/ServerService/findEnabledServerConfig')
payload = {'serverId': server_id}
try:
resp = requests.post(url, headers=self._headers(), data=json.dumps(payload), timeout=DEFAULT_TIMEOUT)
except RequestException as e:
raise RuntimeError(f'查询网站配置失败:{e}')
resp.raise_for_status()
data = resp.json()
b64 = data.get('serverJSON')
if not b64:
return None
try:
conf = json.loads(base64.b64decode(b64).decode('utf-8'))
except Exception:
return None
https_conf = conf.get('https') or {}
ssl_policy_id = None
if isinstance(https_conf, dict):
ssl_policy_id = https_conf.get('sslPolicyId')
if not ssl_policy_id:
ref = https_conf.get('sslPolicyRef') or {}
ssl_policy_id = ref.get('sslPolicyId') or ref.get('policyId')
try:
return int(ssl_policy_id) if ssl_policy_id is not None else None
except Exception:
return None
def update_ssl_policy_http3(self, ssl_policy_id: int, enabled: bool) -> None:
"""切换 SSL 策略的 HTTP/3 开关。"""
url = self._join('/SSLPolicyService/updateSSLPolicy')
payload = {'sslPolicyId': int(ssl_policy_id), 'http3Enabled': bool(enabled)}
try:
resp = requests.post(url, headers=self._headers(), data=json.dumps(payload), timeout=DEFAULT_TIMEOUT)
except RequestException as e:
raise RuntimeError(f'更新SSL策略失败{e}')
resp.raise_for_status()
def update_http_web_firewall(self, http_web_id: int, is_on: bool, policy_id: Optional[int] = None) -> None:
"""更新 Web 防火墙WAF设置可引用既有防火墙策略ID。"""
url = self._join('/HTTPWebService/updateHTTPWebFirewall')
firewall_obj: Dict[str, Any] = {'isOn': bool(is_on)}
if is_on and policy_id:
firewall_obj['firewallPolicyRef'] = {'isOn': True, 'policyId': int(policy_id)}
firewall_b64 = base64.b64encode(json.dumps(firewall_obj).encode('utf-8')).decode('utf-8')
payload = {'httpWebId': http_web_id, 'firewallJSON': firewall_b64}
try:
resp = requests.post(url, headers=self._headers(), data=json.dumps(payload), timeout=DEFAULT_TIMEOUT)
except RequestException as e:
raise RuntimeError(f'更新防火墙配置失败:{e}')
resp.raise_for_status()
# ---------- 查询状态与策略校验 ----------
def find_http_web_config(self, http_web_id: int) -> Optional[Dict[str, Any]]:
"""查询 HTTPWeb 配置并返回解析后的 JSON。"""
url = self._join('/HTTPWebService/findEnabledHTTPWebConfig')
payload = {'httpWebId': http_web_id}
try:
resp = requests.post(url, headers=self._headers(), data=json.dumps(payload), timeout=DEFAULT_TIMEOUT)
except RequestException as e:
raise RuntimeError(f'查询HTTPWeb配置失败{e}')
resp.raise_for_status()
data = resp.json()
b64 = data.get('httpWebJSON')
if not b64:
return None
try:
conf = json.loads(base64.b64decode(b64).decode('utf-8'))
except Exception:
return None
return conf
def get_server_feature_status(self, server_id: int) -> Dict[str, Any]:
"""汇总服务器的特性开关状态访问日志、WebSocket、WAF、HTTP/3"""
status: Dict[str, Any] = {
'serverId': server_id,
'webId': None,
'accessLog': {'isOn': None, 'policyId': None},
'websocket': {'isOn': None},
'firewall': {'isOn': None, 'policyId': None},
'sslPolicy': {'id': None, 'http3Enabled': None},
}
# webId
try:
web_id = self.find_server_web_id(server_id)
status['webId'] = web_id
except Exception:
logger.exception('find webId failed')
web_id = None
# httpWeb 状态
try:
if web_id:
web_conf = self.find_http_web_config(web_id) or {}
# 访问日志
al = web_conf.get('accessLog') or web_conf.get('accessLogRef') or {}
status['accessLog']['isOn'] = bool(al.get('isOn')) if isinstance(al, dict) else None
status['accessLog']['policyId'] = (al.get('policyId') or al.get('policyRef', {}).get('policyId')) if isinstance(al, dict) else None
# WebSocket
ws = web_conf.get('websocket') or web_conf.get('websocketRef') or {}
status['websocket']['isOn'] = bool(ws.get('isOn')) if isinstance(ws, dict) else None
# 防火墙
fw = web_conf.get('firewall') or web_conf.get('firewallRef') or web_conf.get('firewallPolicyRef') or {}
status['firewall']['isOn'] = bool(fw.get('isOn')) if isinstance(fw, dict) else None
status['firewall']['policyId'] = (fw.get('policyId') or fw.get('firewallPolicyId') or fw.get('policyRef', {}).get('policyId')) if isinstance(fw, dict) else None
except Exception:
logger.exception('parse httpWeb config failed')
# SSL策略与HTTP/3
try:
ssl_id = self.find_server_ssl_policy_id(server_id)
status['sslPolicy']['id'] = ssl_id
if ssl_id is not None:
url = self._join('/SSLPolicyService/findEnabledSSLPolicyConfig')
payload = {'sslPolicyId': int(ssl_id)}
resp = requests.post(url, headers=self._headers(), data=json.dumps(payload), timeout=DEFAULT_TIMEOUT)
resp.raise_for_status()
data = resp.json()
b64 = data.get('sslPolicyJSON')
if b64:
try:
conf = json.loads(base64.b64decode(b64).decode('utf-8'))
status['sslPolicy']['http3Enabled'] = bool(conf.get('http3Enabled')) if isinstance(conf, dict) else None
except Exception:
status['sslPolicy']['http3Enabled'] = None
except Exception:
logger.exception('query sslPolicy/http3 failed')
return status
def check_access_log_policy_exists(self, policy_id: int) -> bool:
"""校验访问日志策略是否存在/启用。"""
url = self._join('/HTTPAccessLogPolicyService/findHTTPAccessLogPolicy')
payload = {'httpAccessLogPolicyId': int(policy_id)}
try:
resp = requests.post(url, headers=self._headers(), data=json.dumps(payload), timeout=DEFAULT_TIMEOUT)
except RequestException as e:
raise RuntimeError(f'查询访问日志策略失败:{e}')
resp.raise_for_status()
data = resp.json()
policy = data.get('httpAccessLogPolicy') or data.get('policy')
return bool(policy and (policy.get('id') or policy.get('policyId')))
def check_firewall_policy_exists(self, policy_id: int) -> bool:
"""校验防火墙策略是否存在/启用。"""
url = self._join('/HTTPFirewallPolicyService/findEnabledHTTPFirewallPolicy')
payload = {'httpFirewallPolicyId': int(policy_id)}
try:
resp = requests.post(url, headers=self._headers(), data=json.dumps(payload), timeout=DEFAULT_TIMEOUT)
except RequestException as e:
raise RuntimeError(f'查询防火墙策略失败:{e}')
resp.raise_for_status()
data = resp.json()
policy = data.get('httpFirewallPolicy') or data.get('policy')
return bool(policy and (policy.get('id') or policy.get('policyId')))
def update_http_web_cache(self, http_web_id: int, cache_conf: Dict[str, Any]) -> None:
url = self._join('/HTTPWebService/updateHTTPWebCache')
cache_b64 = base64.b64encode(json.dumps(cache_conf).encode('utf-8')).decode('utf-8')
payload = {'httpWebId': http_web_id, 'cacheJSON': cache_b64}
try:
resp = requests.post(url, headers=self._headers(), data=json.dumps(payload), timeout=DEFAULT_TIMEOUT)
except RequestException as e:
raise RuntimeError(f'更新缓存配置失败:{e}')
resp.raise_for_status()
def update_http_web_locations(self, http_web_id: int, locations_conf: Dict[str, Any]) -> None:
url = self._join('/HTTPWebService/updateHTTPWebLocations')
locations_b64 = base64.b64encode(json.dumps(locations_conf).encode('utf-8')).decode('utf-8')
payload = {'httpWebId': http_web_id, 'locationsJSON': locations_b64}
try:
resp = requests.post(url, headers=self._headers(), data=json.dumps(payload), timeout=DEFAULT_TIMEOUT)
except RequestException as e:
raise RuntimeError(f'更新路径规则失败:{e}')
resp.raise_for_status()
def update_http_web_rewrite_rules(self, http_web_id: int, rewrite_conf: Dict[str, Any]) -> None:
url = self._join('/HTTPWebService/updateHTTPWebRewriteRules')
rewrite_b64 = base64.b64encode(json.dumps(rewrite_conf).encode('utf-8')).decode('utf-8')
payload = {'httpWebId': http_web_id, 'rewriteRulesJSON': rewrite_b64}
try:
resp = requests.post(url, headers=self._headers(), data=json.dumps(payload), timeout=DEFAULT_TIMEOUT)
except RequestException as e:
raise RuntimeError(f'更新重写规则失败:{e}')
resp.raise_for_status()
def update_http_web_redirect_to_https(self, http_web_id: int, redirect_conf: Dict[str, Any]) -> None:
url = self._join('/HTTPWebService/updateHTTPWebRedirectToHTTPS')
b64 = base64.b64encode(json.dumps(redirect_conf).encode('utf-8')).decode('utf-8')
payload = {'httpWebId': http_web_id, 'redirectToHTTPSJSON': b64}
try:
resp = requests.post(url, headers=self._headers(), data=json.dumps(payload), timeout=DEFAULT_TIMEOUT)
except RequestException as e:
raise RuntimeError(f'更新HTTPS跳转失败{e}')
resp.raise_for_status()
def update_http_web_referers(self, http_web_id: int, referers_conf: Dict[str, Any]) -> None:
url = self._join('/HTTPWebService/updateHTTPWebReferers')
b64 = base64.b64encode(json.dumps(referers_conf).encode('utf-8')).decode('utf-8')
payload = {'httpWebId': http_web_id, 'referersJSON': b64}
try:
resp = requests.post(url, headers=self._headers(), data=json.dumps(payload), timeout=DEFAULT_TIMEOUT)
except RequestException as e:
raise RuntimeError(f'更新防盗链配置失败:{e}')
resp.raise_for_status()
def update_http_web_remote_addr(self, http_web_id: int, remote_addr_conf: Dict[str, Any]) -> None:
url = self._join('/HTTPWebService/updateHTTPWebRemoteAddr')
b64 = base64.b64encode(json.dumps(remote_addr_conf).encode('utf-8')).decode('utf-8')
payload = {'httpWebId': http_web_id, 'remoteAddrJSON': b64}
try:
resp = requests.post(url, headers=self._headers(), data=json.dumps(payload), timeout=DEFAULT_TIMEOUT)
except RequestException as e:
raise RuntimeError(f'更新客户端IP解析失败{e}')
resp.raise_for_status()
def update_http_web_shutdown(self, http_web_id: int, shutdown_conf: Dict[str, Any]) -> None:
url = self._join('/HTTPWebService/updateHTTPWebShutdown')
b64 = base64.b64encode(json.dumps(shutdown_conf).encode('utf-8')).decode('utf-8')
payload = {'httpWebId': http_web_id, 'shutdownJSON': b64}
try:
resp = requests.post(url, headers=self._headers(), data=json.dumps(payload), timeout=DEFAULT_TIMEOUT)
except RequestException as e:
raise RuntimeError(f'更新停服配置失败:{e}')
resp.raise_for_status()
def update_http_web_request_limit(self, http_web_id: int, request_limit_conf: Dict[str, Any]) -> None:
url = self._join('/HTTPWebService/updateHTTPWebRequestLimit')
b64 = base64.b64encode(json.dumps(request_limit_conf).encode('utf-8')).decode('utf-8')
payload = {'httpWebId': http_web_id, 'requestLimitJSON': b64}
try:
resp = requests.post(url, headers=self._headers(), data=json.dumps(payload), timeout=DEFAULT_TIMEOUT)
except RequestException as e:
raise RuntimeError(f'更新请求限速失败:{e}')
resp.raise_for_status()
def list_http_access_logs(
self,
server_id: int,
day: Optional[str] = None,
size: int = 50,
hour_from: Optional[str] = None,
hour_to: Optional[str] = None,
reverse: bool = False,
ip: Optional[str] = None,
keyword: Optional[str] = None,
request_id: Optional[str] = None,
partition: Optional[int] = None,
) -> Dict[str, Any]:
url = self._join('/HTTPAccessLogService/listHTTPAccessLogs')
payload: Dict[str, Any] = {
'serverId': int(server_id),
'size': int(size),
}
if day:
payload['day'] = day
if hour_from:
payload['hourFrom'] = hour_from
if hour_to:
payload['hourTo'] = hour_to
if reverse:
payload['reverse'] = True
if ip:
payload['ip'] = ip
if keyword:
payload['keyword'] = keyword
if request_id:
payload['requestId'] = request_id
if partition is not None:
payload['partition'] = int(partition)
try:
resp = requests.post(url, headers=self._headers(), data=json.dumps(payload), timeout=DEFAULT_TIMEOUT)
except RequestException as e:
raise RuntimeError(f'查询访问日志失败:{e}')
resp.raise_for_status()
data = resp.json()
return {
'logs': data.get('httpAccessLogs') or [],
'requestId': data.get('requestId'),
'hasMore': bool(data.get('hasMore')),
}
def find_http_access_log_partitions(self, day: str) -> Dict[str, Any]:
url = self._join('/HTTPAccessLogService/findHTTPAccessLogPartitions')
payload = {'day': day}
try:
resp = requests.post(url, headers=self._headers(), data=json.dumps(payload), timeout=DEFAULT_TIMEOUT)
except RequestException as e:
raise RuntimeError(f'查询访问日志分区失败:{e}')
resp.raise_for_status()
data = resp.json()
return {
'partitions': data.get('partitions') or [],
'reversePartitions': data.get('reversePartitions') or [],
}
def aggregate_status_codes(
self,
server_id: int,
day: Optional[str] = None,
hour_from: Optional[str] = None,
hour_to: Optional[str] = None,
size: int = 1000,
max_pages_per_partition: int = 200,
) -> Dict[str, Any]:
if not day:
day = timezone.now().strftime('%Y%m%d')
parts = []
try:
part_res = self.find_http_access_log_partitions(day)
parts = part_res.get('partitions') or []
except Exception:
parts = [None]
counts: Dict[str, int] = {}
bins: Dict[str, int] = {'2xx': 0, '3xx': 0, '4xx': 0, '5xx': 0}
for p in parts:
req_id = None
pages = 0
while pages < max_pages_per_partition:
pages += 1
try:
res = self.list_http_access_logs(
server_id=server_id,
day=day,
size=size,
hour_from=hour_from,
hour_to=hour_to,
reverse=False,
request_id=req_id,
partition=(p if p is not None else None),
)
except Exception:
break
logs = res.get('logs') or []
for item in logs:
code = str(item.get('status')) if isinstance(item, dict) else None
if not code:
continue
try:
c = int(code)
except Exception:
continue
counts[code] = counts.get(code, 0) + 1
if 200 <= c <= 299:
bins['2xx'] += 1
elif 300 <= c <= 399:
bins['3xx'] += 1
elif 400 <= c <= 499:
bins['4xx'] += 1
elif 500 <= c <= 599:
bins['5xx'] += 1
req_id = res.get('requestId')
has_more = bool(res.get('hasMore'))
if not has_more:
break
total = sum(counts.values())
top = sorted(counts.items(), key=lambda x: -x[1])[:10]
return {'total': total, 'bins': bins, 'top': top}
__all__ = ['GoEdgeClient']