From 8824aa1fb1a00d28f23e35ba26314edfbf7f0324 Mon Sep 17 00:00:00 2001
From: svefnz
Date: Sun, 5 Apr 2026 10:23:02 +0800
Subject: [PATCH] first commit
---
.DS_Store | Bin 0 -> 18436 bytes
.env.example | 2 +
README.md | 144 +
README_v1.md | 170 +
api_server.py | 736 +++
auto_pool_maintainer.py | 5413 +++++++++++++++++++++
config.example.json | 102 +
dev_services.ps1 | 553 +++
dev_services.sh | 398 ++
docker-compose.yml | 35 +
frontend/.DS_Store | Bin 0 -> 10244 bytes
frontend/Dockerfile | 18 +
frontend/README.md | 65 +
frontend/dist/.DS_Store | Bin 0 -> 8196 bytes
frontend/dist/assets/index-CraRoSIX.css | 1 +
frontend/dist/assets/index-DPSNYdMF.js | 2 +
frontend/dist/index.html | 13 +
frontend/index.html | 12 +
frontend/nginx.conf | 20 +
frontend/package.json | 19 +
frontend/pnpm-lock.yaml | 1291 +++++
frontend/pnpm-workspace.yaml | 2 +
frontend/src/.DS_Store | Bin 0 -> 6148 bytes
frontend/src/app.tsx | 284 ++
frontend/src/components/config-panel.tsx | 322 ++
frontend/src/components/login-gate.tsx | 39 +
frontend/src/components/monitor-panel.tsx | 110 +
frontend/src/components/terminal-log.tsx | 39 +
frontend/src/lib/config-schema.ts | 580 +++
frontend/src/main.tsx | 7 +
frontend/src/mock/data.ts | 51 +
frontend/src/services/api.ts | 127 +
frontend/src/services/events.ts | 3 +
frontend/src/styles/base.css | 29 +
frontend/src/styles/layout.css | 816 ++++
frontend/src/styles/tokens.css | 16 +
frontend/src/types/api.ts | 142 +
frontend/src/types/runtime.ts | 63 +
frontend/tsconfig.json | 24 +
frontend/tsconfig.node.json | 9 +
frontend/vite.config.ts | 16 +
logs/.DS_Store | Bin 0 -> 10244 bytes
output_fixed/.DS_Store | Bin 0 -> 6148 bytes
output_tokens/.DS_Store | Bin 0 -> 6148 bytes
package-lock.json | 6 +
requirements.txt | 14 +
tests/.DS_Store | Bin 0 -> 6148 bytes
tests/test_flow_minimal_refactor.py | 1250 +++++
48 files changed, 12943 insertions(+)
create mode 100644 .DS_Store
create mode 100644 .env.example
create mode 100644 README.md
create mode 100644 README_v1.md
create mode 100644 api_server.py
create mode 100644 auto_pool_maintainer.py
create mode 100644 config.example.json
create mode 100644 dev_services.ps1
create mode 100644 dev_services.sh
create mode 100644 docker-compose.yml
create mode 100644 frontend/.DS_Store
create mode 100644 frontend/Dockerfile
create mode 100644 frontend/README.md
create mode 100644 frontend/dist/.DS_Store
create mode 100644 frontend/dist/assets/index-CraRoSIX.css
create mode 100644 frontend/dist/assets/index-DPSNYdMF.js
create mode 100644 frontend/dist/index.html
create mode 100644 frontend/index.html
create mode 100644 frontend/nginx.conf
create mode 100644 frontend/package.json
create mode 100644 frontend/pnpm-lock.yaml
create mode 100644 frontend/pnpm-workspace.yaml
create mode 100644 frontend/src/.DS_Store
create mode 100644 frontend/src/app.tsx
create mode 100644 frontend/src/components/config-panel.tsx
create mode 100644 frontend/src/components/login-gate.tsx
create mode 100644 frontend/src/components/monitor-panel.tsx
create mode 100644 frontend/src/components/terminal-log.tsx
create mode 100644 frontend/src/lib/config-schema.ts
create mode 100644 frontend/src/main.tsx
create mode 100644 frontend/src/mock/data.ts
create mode 100644 frontend/src/services/api.ts
create mode 100644 frontend/src/services/events.ts
create mode 100644 frontend/src/styles/base.css
create mode 100644 frontend/src/styles/layout.css
create mode 100644 frontend/src/styles/tokens.css
create mode 100644 frontend/src/types/api.ts
create mode 100644 frontend/src/types/runtime.ts
create mode 100644 frontend/tsconfig.json
create mode 100644 frontend/tsconfig.node.json
create mode 100644 frontend/vite.config.ts
create mode 100644 logs/.DS_Store
create mode 100644 output_fixed/.DS_Store
create mode 100644 output_tokens/.DS_Store
create mode 100644 package-lock.json
create mode 100644 requirements.txt
create mode 100644 tests/.DS_Store
create mode 100644 tests/test_flow_minimal_refactor.py
diff --git a/.DS_Store b/.DS_Store
new file mode 100644
index 0000000000000000000000000000000000000000..7e5233225670822ccec2801ec86a233d02371895
GIT binary patch
literal 18436
zcmeHPYit}>6+UO|#52hx89(AUi=Awx}1pNU8RH!H_An_28AN)Z=f^+B2Zua5b
z{s@X3_g?AlxpVJ1_s%_Y?mhFJnOOk9bkW!YkOBaiGNYObASnQGqDji-#M)VINNrSu
zl<%S3PVxW&6rf1IS(t6$78NT^0}`DO%XIBP1vk8MH?N-sm%#)hx9AsgwwoiKXL1GG
z@`GgZS17f$uUXsCp>(Y8cr-KbE@lEfDEevLJIjv7UDwW)%X;pNWiDp5o*~x>bjvZl
zP{1-wVmUWyIflEKc8iYThq4*opeVAE)%vbpy*IgUFuwOd^4ehh>cPFq!T7#|d#_zn
z*N6n(xihAY`_>0cO^+DgHK?lN7fFSh9}ShmQ2sL
zTt_<@RUWEnM-+qxga+P<8i;l}dHq{)@?y?H1408WHNf_VIAumt5nbXm+&buo-9EB*
zb_*FMDiaO}>*5hjMRbYNP?TgsQKDQFBZz@qoa*C@ooFheOPq3XKrRj#jf@dO!Ekhv
zi-+uhDNe<&(16fDqXr1?6E+G61^M*ruNt&hnUR$*Mfeq8E92MKway5^%F4R+>V}OR
z+I4O2>h10A+0wVQ`)&OLS*<=P%BA44V$Qw(F(sqM6k;`Gjy!)(sSrK-6%ri~r?3`yABeS!<8BDPe@}6tkqpoiSgxRL6
zTBm39%j3~_=fV>S%!L!qtV@HllNl*63I7a7dktMpJsLv85hv{ifx9rRd!FvhndvK2
z7EOpvA{;AI(DQ_56A#_s2BRcWck_}4iLwpJJ>^nFG4Y)7`>?Epm^6_ZTS$%sXU
zcf$}&z
z>MHEfX8NSM`pa2mFm+8+GrsEuWERv}R?~{UIc8E{R>>Tu9{tKi
zYS8>Dk5%-ju_?Nm3lw(oqxi+)hhnkwEJSij_k)pw=}`FOM^T()h}vfSdEGNd7Af2m
z)gGhTaP?9>6FO>Gfm?1oNfDQ6*4+`q2;(hXTIZ(NXlzW@elF9cu$+=bAud{5Jkq69
zmO&!5iyK+uxl8e|6b652$s~C_-PjDkUKoZ`Z~;8H3ZH<_!jteF_#wOqzl7hxU*Mnc
zZ$w;+3a-QT*nB87tO++u;2Sa)_lw)VIqN)YI1R!=36qGKKTBwfu0f|2;AV5H%@0RQL4Xt5BFwYvYH5{reSJ
zLw;~beXl|RL#>@34yY+b(I7nNAL>6U(?ZhZ|Cfco3U9!l;RgJNEPNw&V;}CsByI7B
z@i=YoBV^MVoWS$6(d%T{%&r|Q;3f3&gLsW>dkG)I>-cf}Bz_7%L-zdzvT$bKU&F8C
zllTq%9zKoF;P>%)d;wp?m+;4A@xQ>=@K5+x{9BzJpRToI{8z0VFSx_QQK*}t3G48)
zFCD4pFy4ftFkkI>Zml4e(-4@vJj(UzQu;I$XhK&nPAjI%R4uKGtiGi$igxFxsbvW@Kz;WU-
zd^KpXDPIj{Z2vU71!(B24mq}iO%8i*)dcJ-^Qm#sa5sCpce72nX5pGe*!Gsfw$+O%
zdM+k{wx$T$);^Y4=`Oiq#dWUu*~k@Fwb!`dhV~lQ3*i%+ifoXa)s_UHh3z73~!C&Ld-;}@BxyPCF
zZrXR9v)@qnIaJlBLaXEzxfU~A#_hM_7LH#ye&P6U$?>;dUc&VY*T4MhgDV}Ux3KFb
z&cCI^vhKfyM8ge$!2j$fsGQdFLW+I=ceMHU|7CVTg$9HMR-p#4eK<3mriqn5uBe|Q
z`_?W_`2uB@7~c}76%i_gpW+cOdwKRaWyUue(F=^SM0HD?a?#uVAz=Bx8pIpcx9JV*
crkK0QEkFzYWp4nQ@;A7Df&bac1GrB4e`_j8IsgCw
literal 0
HcmV?d00001
diff --git a/.env.example b/.env.example
new file mode 100644
index 0000000..ae24556
--- /dev/null
+++ b/.env.example
@@ -0,0 +1,2 @@
+# Optional: if omitted, backend will auto-generate a token into docker-data/backend/admin_token.txt
+# APP_ADMIN_TOKEN=your-strong-admin-token
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..c28361c
--- /dev/null
+++ b/README.md
@@ -0,0 +1,144 @@
+# gpt-auto
+
+一个用于**账号池维护(清理 + 补号)**的本地工具,包含:
+
+- Python 后端(`api_server.py` + `auto_pool_maintainer.py`)
+- Preact 前端控制台(`frontend/`)
+- 一键启动脚本(`dev_services.sh`)
+
+---
+
+## 新手快速上手(最短路径)
+
+> 目标:第一次就能正确拉起项目并进入前端面板。
+
+### 1) 安装依赖
+
+在项目根目录执行:
+
+```bash
+# 1. Python 依赖
+python3 -m venv .venv
+./.venv/bin/pip install -r requirements.txt
+
+# 2. 前端依赖
+cd frontend
+pnpm install
+cd ..
+```
+
+### 2) 准备配置文件
+
+```bash
+cp config.example.json config.json
+```
+
+然后至少修改以下关键项(不改会跑不起来):
+
+- `clean.base_url`:你的 CLIProxyAPI 地址(例如 `http://127.0.0.1:8317`)
+- `clean.token`:CLIProxyAPI 管理 token
+- `mail.provider` + 对应 provider 的配置(`mail.api_base/api_key/domain/domains` 或 `cfmail.api_base/api_key/domains` 等)
+
+### 3) 启动项目
+
+```bash
+./dev_services.sh fg
+```
+
+启动成功后:
+
+- 前端地址:`http://127.0.0.1:8173`
+- 后端 API:`http://127.0.0.1:8318`
+
+首次启动后端会生成 `admin_token.txt`,把里面的 token 复制到前端登录框(`X-Admin-Token`)。
+
+---
+
+## 关键配置说明(只讲重要的)
+
+配置文件:`config.json`
+
+### `clean`(账号探测/清理)
+
+- `base_url` / `token`:CLIProxyAPI 连接信息(必填)
+- `target_type`:目标账号类型(通常为 `codex`)
+- `sample_size`:随机抽样探测数量,`0` 表示全量探测
+- `used_percent_threshold`:超阈值判定
+
+### `maintainer`(补号目标)
+
+- `min_candidates`:目标可用号数量(低于它就补号)
+- `loop_interval_seconds`:循环模式下每轮检查间隔
+
+### `run`(补号执行参数)
+
+- `workers`:补号并发
+- `failure_threshold_for_cooldown` / `failure_cooldown_seconds`:连续失败冷却策略
+ - 对支持多域名的邮箱 provider,这两个参数作用于“单个域名”的熔断与恢复
+
+### `mail`(邮箱提供方)
+
+- `provider`:`cfmail / self_hosted_mail_api / duckmail / tempmail_lol / yyds_mail`
+- 不同 provider 需要填写对应 section 的鉴权字段
+- `otp_timeout_seconds` / `poll_interval_seconds`:验证码等待与轮询间隔
+- 支持多域名的 provider:`self_hosted_mail_api / duckmail / yyds_mail`
+- `domains`:可选数组,配置多个域名时按顺序轮询;如果填写,优先级高于单个 `domain`
+- `tempmail_lol` 当前不支持自定义多域名切换
+
+### `cfmail`(CF 自建邮箱)
+
+- `api_base`:CF Mail Worker 接口地址
+- `api_key`:CF Mail 管理密钥,请求时会发到 `x-admin-auth`
+- `domains`:域名列表,按顺序轮询;填写后优先于单个 `domain`
+- `cfmail` 是独立 provider,不复用 `self_hosted_mail_api`
+- 节点熔断直接复用 `run.failure_threshold_for_cooldown / run.failure_cooldown_seconds`
+
+---
+
+## 常用命令
+
+```bash
+# 前台启动(推荐调试)
+./dev_services.sh fg
+
+# 后台启动
+./dev_services.sh bg
+
+# 查看状态
+./dev_services.sh status
+
+# 停止后台服务
+./dev_services.sh stop
+```
+
+单次执行维护任务(不走前端):
+
+```bash
+./.venv/bin/python auto_pool_maintainer.py --config config.json --log-dir logs
+```
+
+---
+
+## 日志与产物
+
+- 维护日志:`logs/pool_maintainer_*.log`
+- 服务托管日志:`logs/dev-services/`
+- 本地 token/账号输出(当 `output.save_local=true` 时):
+ - `output_fixed/`
+ - `output_tokens/`
+
+---
+
+## 排错建议(高频)
+
+1. 前端打开但接口 401:通常是 `X-Admin-Token` 错误,重新读取 `admin_token.txt`
+2. 补号不触发:先看日志里的 `清理后统计: candidates=... 阈值=...`
+3. 启动失败:先看 `logs/dev-services/backend.log` / `frontend.log`
+4. OAuth 偶发慢:优先看日志中的 `oauth_mail_otp_timeout` 是否增多(邮箱链路波动)
+
+---
+
+## 安全提示
+
+- `config.json`、`admin_token.txt` 可能包含敏感信息,不要公开上传。
+- 对外发布代码时,建议仅保留 `config.example.json`。
diff --git a/README_v1.md b/README_v1.md
new file mode 100644
index 0000000..d2e660e
--- /dev/null
+++ b/README_v1.md
@@ -0,0 +1,170 @@
+# gpt-auto
+
+一个用于**账号池维护(清理 + 补号)**的本地工具,包含:
+
+- Python 后端(`api_server.py` + `auto_pool_maintainer.py`)
+- Preact 前端控制台(`frontend/`)
+- 一键启动脚本(`dev_services.sh`)
+
+---
+
+## 新手快速上手(最短路径)
+
+> 目标:第一次就能正确拉起项目并进入前端面板。
+
+### 1) 安装依赖
+
+在项目根目录执行:
+
+```bash
+# 1. Python 依赖(推荐使用 uv)
+uv venv .venv
+uv pip install -r requirements.txt
+
+# 如果你不使用 uv,也可以继续用 venv + pip
+# python3 -m venv .venv
+# ./.venv/bin/pip install -r requirements.txt
+
+# 2. 前端依赖
+cd frontend
+pnpm install
+cd ..
+```
+
+说明:
+
+- `uv` 这里只用于安装 Python 后端依赖;前端依赖仍然使用 `pnpm`
+- 当前仓库没有 `pyproject.toml` / `uv.lock`,因此这里使用 `uv pip install -r requirements.txt`,而不是 `uv sync`
+- `uv venv .venv` 创建的虚拟环境与现有启动脚本兼容,无需额外修改脚本
+
+### 2) 准备配置文件
+
+```bash
+cp config.example.json config.json
+```
+
+然后至少修改以下关键项(不改会跑不起来):
+
+- `clean.base_url`:你的 CLIProxyAPI 地址(例如 `http://127.0.0.1:8317`)
+- `clean.token`:CLIProxyAPI 管理 token
+- `mail.provider` + 对应 provider 的配置(`mail.api_base/api_key/domain` 等)
+
+### 3) 启动项目
+
+```bash
+# macOS / Linux
+./dev_services.sh fg
+
+# Windows PowerShell
+.\dev_services.ps1 fg
+```
+
+启动成功后:
+
+- 前端地址:`http://127.0.0.1:8173`
+- 后端 API:`http://127.0.0.1:8318`
+
+首次启动后端会生成 `admin_token.txt`,把里面的 token 复制到前端登录框(`X-Admin-Token`)。
+
+---
+
+## 关键配置说明(只讲重要的)
+
+配置文件:`config.json`
+
+### `clean`(账号探测/清理)
+
+- `base_url` / `token`:CLIProxyAPI 连接信息(必填)
+- `target_type`:目标账号类型(通常为 `codex`)
+- `used_percent_threshold`:超阈值判定
+
+### `maintainer`(补号目标)
+
+- `min_candidates`:目标可用号数量(低于它就补号)
+- `loop_interval_seconds`:循环模式下每轮检查间隔
+
+### `run`(补号执行参数)
+
+- `workers`:补号并发
+- `failure_threshold_for_cooldown` / `failure_cooldown_seconds`:连续失败冷却策略
+
+### `mail`(邮箱提供方)
+
+- `provider`:`self_hosted_mail_api / duckmail / tempmail_lol / yyds_mail`
+- 不同 provider 需要填写对应 section 的鉴权字段
+- `otp_timeout_seconds` / `poll_interval_seconds`:验证码等待与轮询间隔
+
+---
+
+## 常用命令
+
+```bash
+# win 用户可用 dev_services.ps1 脚本启动
+# 下方示例默认使用 bash 写法,Windows 对应命令见后
+
+# macOS / Linux
+
+# 前台启动(推荐调试)
+./dev_services.sh fg
+
+# 后台启动
+./dev_services.sh bg
+
+# 查看状态
+./dev_services.sh status
+
+# 停止后台服务
+./dev_services.sh stop
+```
+
+Windows PowerShell 对应命令:
+
+```powershell
+# 前台启动(推荐调试)
+.\dev_services.ps1 fg
+
+# 后台启动
+.\dev_services.ps1 bg
+
+# 查看状态
+.\dev_services.ps1 status
+
+# 停止后台服务
+.\dev_services.ps1 stop
+```
+
+单次执行维护任务(不走前端):
+
+```bash
+# macOS / Linux
+./.venv/bin/python auto_pool_maintainer.py --config config.json --log-dir logs
+
+# Windows PowerShell
+.\.venv\Scripts\python.exe auto_pool_maintainer.py --config config.json --log-dir logs
+```
+
+---
+
+## 日志与产物
+
+- 维护日志:`logs/pool_maintainer_*.log`
+- 服务托管日志:`logs/dev-services/`
+- 本地 token/账号输出(当 `output.save_local=true` 时):
+ - `output_fixed/`
+ - `output_tokens/`
+
+---
+
+## 排错建议(高频)
+
+1. 前端打开但接口 401:通常是 `X-Admin-Token` 错误,重新读取 `admin_token.txt`
+2. 补号不触发:先看日志里的 `清理后统计: candidates=... 阈值=...`
+3. 启动失败:先看 `logs/dev-services/backend.log` / `frontend.log`
+4. OAuth 偶发慢:优先看日志中的 `oauth_mail_otp_timeout` 是否增多(邮箱链路波动)
+5. 持久化bug 若出现卡住无法stop,把持久化文件`.maintainer_run_state.json`删掉即可
+---
+
+## 安全提示
+
+- `config.json`、`admin_token.txt` 可能包含敏感信息,不要公开上传。
+- 对外发布代码时,建议仅保留 `config.example.json`。
diff --git a/api_server.py b/api_server.py
new file mode 100644
index 0000000..e4522eb
--- /dev/null
+++ b/api_server.py
@@ -0,0 +1,736 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+
+from __future__ import annotations
+
+import json
+import math
+import os
+import re
+import secrets
+import signal
+import shutil
+import subprocess
+import sys
+import threading
+import time
+from collections import deque
+from datetime import datetime
+from http import HTTPStatus
+from http.server import BaseHTTPRequestHandler, ThreadingHTTPServer
+from pathlib import Path
+from typing import Any, Dict, List, Optional
+
+from auto_pool_maintainer import get_candidates_count
+
+PROJECT_ROOT = Path(__file__).resolve().parent
+APP_DATA_DIR = Path(os.environ.get("APP_DATA_DIR", str(PROJECT_ROOT)))
+CONFIG_PATH = Path(os.environ.get("APP_CONFIG_PATH", str(APP_DATA_DIR / "config.json")))
+LOGS_DIR = Path(os.environ.get("APP_LOG_DIR", str(APP_DATA_DIR / "logs")))
+TEMPLATE_CONFIG_PATH = Path(os.environ.get("APP_TEMPLATE_CONFIG_PATH", str(PROJECT_ROOT / "config.example.json")))
+API_HOST = os.environ.get("APP_HOST", "127.0.0.1")
+API_PORT = int(os.environ.get("APP_PORT", "8318"))
+ADMIN_TOKEN_ENV = os.environ.get("APP_ADMIN_TOKEN", "").strip()
+ADMIN_TOKEN_FILE = Path(os.environ.get("APP_ADMIN_TOKEN_FILE", str(APP_DATA_DIR / "admin_token.txt")))
+RUN_STATE_FILE = Path(os.environ.get("APP_RUN_STATE_FILE", str(APP_DATA_DIR / ".maintainer_run_state.json")))
+MASKED_VALUE = "__MASKED__"
+RUN_PROCESS: Optional[subprocess.Popen[str]] = None
+RUN_MODE: str = ""
+RUN_LOG_PATH: str = ""
+RUN_PROCESS_LOCK = threading.Lock()
+ADMIN_TOKEN_LOCK = threading.Lock()
+ADMIN_TOKEN_CACHE: Optional[str] = None
+
+
+def ensure_runtime_paths() -> None:
+ APP_DATA_DIR.mkdir(parents=True, exist_ok=True)
+ CONFIG_PATH.parent.mkdir(parents=True, exist_ok=True)
+ LOGS_DIR.mkdir(parents=True, exist_ok=True)
+ ADMIN_TOKEN_FILE.parent.mkdir(parents=True, exist_ok=True)
+ RUN_STATE_FILE.parent.mkdir(parents=True, exist_ok=True)
+
+
+def load_run_state() -> Dict[str, Any]:
+ ensure_runtime_paths()
+ if not RUN_STATE_FILE.exists():
+ return {}
+ try:
+ data = json.loads(RUN_STATE_FILE.read_text(encoding="utf-8"))
+ except Exception:
+ return {}
+ return data if isinstance(data, dict) else {}
+
+
+def save_run_state(pid: int, mode: str, log_path: str = "") -> None:
+ ensure_runtime_paths()
+ payload = {
+ "pid": int(pid),
+ "mode": str(mode or ""),
+ "log_path": str(log_path or ""),
+ "updated_at": datetime.now().isoformat(),
+ }
+ RUN_STATE_FILE.write_text(json.dumps(payload, ensure_ascii=False), encoding="utf-8")
+
+
+def clear_run_state() -> None:
+ try:
+ RUN_STATE_FILE.unlink(missing_ok=True)
+ except Exception:
+ pass
+
+
+def is_pid_running(pid: int) -> bool:
+ if pid <= 0:
+ return False
+ try:
+ os.kill(pid, 0)
+ return True
+ except ProcessLookupError:
+ return False
+ except PermissionError:
+ return True
+ except Exception:
+ return False
+
+
+def read_running_state() -> tuple[Optional[int], str, str]:
+ state = load_run_state()
+ raw_pid = state.get("pid")
+ mode = str(state.get("mode") or "")
+ log_path = str(state.get("log_path") or "")
+ try:
+ pid = int(raw_pid)
+ except Exception:
+ return None, mode, log_path
+ if not is_pid_running(pid):
+ clear_run_state()
+ return None, mode, log_path
+ return pid, mode, log_path
+
+
+def terminate_pid(pid: int, timeout_seconds: float = 8.0) -> bool:
+ if not is_pid_running(pid):
+ return True
+ try:
+ os.kill(pid, signal.SIGTERM)
+ except ProcessLookupError:
+ return True
+ except Exception:
+ return False
+
+ deadline = time.time() + max(0.5, timeout_seconds)
+ while time.time() < deadline:
+ if not is_pid_running(pid):
+ return True
+ time.sleep(0.2)
+ try:
+ os.kill(pid, signal.SIGKILL)
+ except ProcessLookupError:
+ return True
+ except Exception:
+ return False
+ return not is_pid_running(pid)
+
+
+def ensure_config_exists() -> None:
+ ensure_runtime_paths()
+ if CONFIG_PATH.exists():
+ return
+ if TEMPLATE_CONFIG_PATH.exists():
+ shutil.copyfile(TEMPLATE_CONFIG_PATH, CONFIG_PATH)
+ return
+ raise RuntimeError(f"配置文件不存在,且模板不存在: {CONFIG_PATH} | {TEMPLATE_CONFIG_PATH}")
+
+
+def get_admin_token() -> str:
+ global ADMIN_TOKEN_CACHE
+
+ with ADMIN_TOKEN_LOCK:
+ if ADMIN_TOKEN_CACHE:
+ return ADMIN_TOKEN_CACHE
+
+ if ADMIN_TOKEN_ENV:
+ ADMIN_TOKEN_CACHE = ADMIN_TOKEN_ENV
+ return ADMIN_TOKEN_CACHE
+
+ ensure_runtime_paths()
+ if ADMIN_TOKEN_FILE.exists():
+ token = ADMIN_TOKEN_FILE.read_text(encoding="utf-8").strip()
+ if token:
+ ADMIN_TOKEN_CACHE = token
+ return ADMIN_TOKEN_CACHE
+
+ token = secrets.token_urlsafe(32)
+ ADMIN_TOKEN_FILE.write_text(f"{token}\n", encoding="utf-8")
+ try:
+ os.chmod(ADMIN_TOKEN_FILE, 0o600)
+ except OSError:
+ pass
+ ADMIN_TOKEN_CACHE = token
+ return ADMIN_TOKEN_CACHE
+
+
+def load_config() -> Dict[str, Any]:
+ ensure_config_exists()
+ with CONFIG_PATH.open("r", encoding="utf-8") as handle:
+ data = json.load(handle)
+ if not isinstance(data, dict):
+ raise RuntimeError("config.json 顶层必须是 JSON 对象")
+ return data
+
+
+def save_config(payload: Dict[str, Any]) -> None:
+ if not isinstance(payload, dict):
+ raise RuntimeError("配置数据必须是 JSON 对象")
+ ensure_runtime_paths()
+ merged = merge_config_with_sensitive_fields(load_config(), payload)
+ with CONFIG_PATH.open("w", encoding="utf-8") as handle:
+ json.dump(merged, handle, ensure_ascii=False, indent=2)
+ handle.write("\n")
+
+
+def mask_sensitive_config(config: Dict[str, Any]) -> Dict[str, Any]:
+ masked = json.loads(json.dumps(config))
+ sensitive_fields = [
+ ("clean", "token"),
+ ("mail", "api_key"),
+ ("cfmail", "api_key"),
+ ("duckmail", "bearer"),
+ ("yyds_mail", "api_key"),
+ ]
+ for section, key in sensitive_fields:
+ sec = masked.get(section)
+ if isinstance(sec, dict) and sec.get(key):
+ sec[key] = MASKED_VALUE
+ return masked
+
+
+def merge_config_with_sensitive_fields(current: Dict[str, Any], incoming: Dict[str, Any]) -> Dict[str, Any]:
+ merged = json.loads(json.dumps(incoming))
+ sensitive_fields = [
+ ("clean", "token"),
+ ("mail", "api_key"),
+ ("cfmail", "api_key"),
+ ("duckmail", "bearer"),
+ ("yyds_mail", "api_key"),
+ ]
+ for section, key in sensitive_fields:
+ current_section = current.get(section) if isinstance(current.get(section), dict) else {}
+ merged_section = merged.get(section) if isinstance(merged.get(section), dict) else {}
+ if merged_section.get(key) == MASKED_VALUE and key in current_section:
+ merged_section[key] = current_section.get(key)
+ merged[section] = merged_section
+
+ return merged
+
+
+def is_sensitive_field_masked(value: Any) -> bool:
+ return isinstance(value, str) and value == MASKED_VALUE
+
+
+def get_latest_log_path() -> Optional[Path]:
+ ensure_runtime_paths()
+ if not LOGS_DIR.exists():
+ return None
+ candidates = sorted(LOGS_DIR.glob("pool_maintainer_*.log"), key=lambda item: item.stat().st_mtime, reverse=True)
+ return candidates[0] if candidates else None
+
+
+def tail_lines(path: Path, max_lines: int = 120) -> List[str]:
+ buffer: deque[str] = deque(maxlen=max_lines)
+ with path.open("r", encoding="utf-8", errors="replace") as handle:
+ for line in handle:
+ line = line.rstrip("\n")
+ if line.strip():
+ buffer.append(line)
+ return list(buffer)
+
+
+def tone_from_log(level: str, message: str) -> str:
+ normalized_level = level.upper()
+ normalized_message = message.lower()
+ if normalized_level in {"ERROR", "CRITICAL"}:
+ return "danger"
+ if normalized_level == "WARNING":
+ return "warning"
+ if "成功" in message or "完成" in message or "已达标" in message:
+ return "success"
+ if "失败" in message or "异常" in message or "错误" in message:
+ return "danger"
+ if "等待" in message or "进度" in message:
+ return "info"
+ if normalized_level == "INFO":
+ return "info"
+ if "warning" in normalized_message:
+ return "warning"
+ return "muted"
+
+
+def parse_log_line(index: int, raw_line: str) -> Dict[str, Any]:
+ match = re.match(r"^(?P\d{4}-\d{2}-\d{2}) (?P\d{2}:\d{2}:\d{2}) \| (?P[A-Z]+) \| (?P.*)$", raw_line)
+ if not match:
+ return {
+ "id": f"log-{index}",
+ "prefix": "[系统]",
+ "timestamp": "[--:--:--]",
+ "message": raw_line,
+ "tone": "muted",
+ }
+
+ level = match.group("level")
+ message = match.group("message")
+ prefix = f"[{level}]"
+ task_match = re.match(r"^(任务\d+)\s*\|\s*(.*)$", message)
+ if task_match:
+ prefix = f"[{task_match.group(1)}] [{level}]"
+ message = task_match.group(2)
+ return {
+ "id": f"log-{index}",
+ "prefix": prefix,
+ "timestamp": f"[{match.group('clock')}]",
+ "message": message,
+ "tone": tone_from_log(level, message),
+ }
+
+
+def build_single_account_timing(raw_lines: List[str], window_size: int = 20) -> Dict[str, Any]:
+ pattern = re.compile(
+ r"注册\+OAuth 成功: .*?\| 注册 (?P\d+(?:\.\d+)?)s \+ OAuth (?P\d+(?:\.\d+)?)s = (?P\d+(?:\.\d+)?)s"
+ )
+ samples: List[Dict[str, float]] = []
+ for line in raw_lines:
+ matched = pattern.search(line)
+ if not matched:
+ continue
+ samples.append(
+ {
+ "reg": float(matched.group("reg")),
+ "oauth": float(matched.group("oauth")),
+ "total": float(matched.group("total")),
+ }
+ )
+
+ result: Dict[str, Any] = {
+ "latest_reg_seconds": None,
+ "latest_oauth_seconds": None,
+ "latest_total_seconds": None,
+ "recent_avg_reg_seconds": None,
+ "recent_avg_oauth_seconds": None,
+ "recent_avg_total_seconds": None,
+ "recent_slow_count": 0,
+ "sample_size": 0,
+ "window_size": max(1, int(window_size)),
+ }
+ if not samples:
+ return result
+
+ latest = samples[-1]
+ recent = samples[-result["window_size"] :]
+ result["latest_reg_seconds"] = round(latest["reg"], 1)
+ result["latest_oauth_seconds"] = round(latest["oauth"], 1)
+ result["latest_total_seconds"] = round(latest["total"], 1)
+ result["recent_avg_reg_seconds"] = round(sum(item["reg"] for item in recent) / len(recent), 1)
+ result["recent_avg_oauth_seconds"] = round(sum(item["oauth"] for item in recent) / len(recent), 1)
+ result["recent_avg_total_seconds"] = round(sum(item["total"] for item in recent) / len(recent), 1)
+ result["recent_slow_count"] = sum(1 for item in recent if item["total"] >= 100.0)
+ result["sample_size"] = len(recent)
+ return result
+
+
+def parse_loop_next_check_in_seconds(raw_lines: List[str]) -> Optional[int]:
+ pattern = re.compile(
+ r"^(?P\d{4}-\d{2}-\d{2}) (?P\d{2}:\d{2}:\d{2}) \| [A-Z]+ \| 循环模式休眠 (?P\d+(?:\.\d+)?)s 后再次检查号池$"
+ )
+ now_ts = time.time()
+ for line in reversed(raw_lines):
+ matched = pattern.match(line.strip())
+ if not matched:
+ continue
+ try:
+ sleep_seconds = float(matched.group("seconds"))
+ logged_at = datetime.strptime(
+ f"{matched.group('date')} {matched.group('clock')}",
+ "%Y-%m-%d %H:%M:%S",
+ )
+ next_check_ts = logged_at.timestamp() + sleep_seconds
+ remaining = int(math.ceil(next_check_ts - now_ts))
+ return max(0, remaining)
+ except Exception:
+ continue
+ return None
+
+
+def build_runtime_status() -> Dict[str, Any]:
+ tracked_log_path = ""
+ with RUN_PROCESS_LOCK:
+ process = RUN_PROCESS
+ running = process is not None and process.poll() is None
+ run_mode = RUN_MODE if running else ""
+ if running:
+ tracked_log_path = RUN_LOG_PATH
+ if not running:
+ state_pid, state_mode, state_log_path = read_running_state()
+ if state_pid is not None:
+ running = True
+ run_mode = state_mode
+ tracked_log_path = state_log_path
+
+ status: Dict[str, Any] = {
+ "running": running,
+ "run_mode": run_mode,
+ "loop_running": running and run_mode == "loop",
+ "loop_next_check_in_seconds": None,
+ "phase": "idle",
+ "message": "等待任务启动",
+ "available_candidates": None,
+ "available_candidates_error": "",
+ "completed": 0,
+ "total": 0,
+ "percent": 0,
+ "stats": [
+ {"label": "成功", "value": 0, "icon": "☑", "tone": "success"},
+ {"label": "失败", "value": 0, "icon": "✕", "tone": "danger"},
+ {"label": "剩余", "value": 0, "icon": "⏳", "tone": "pending"},
+ ],
+ "single_account_timing": {
+ "latest_reg_seconds": None,
+ "latest_oauth_seconds": None,
+ "latest_total_seconds": None,
+ "recent_avg_reg_seconds": None,
+ "recent_avg_oauth_seconds": None,
+ "recent_avg_total_seconds": None,
+ "recent_slow_count": 0,
+ "sample_size": 0,
+ "window_size": 20,
+ },
+ "logs": [],
+ "last_log_path": "",
+ }
+
+ latest_log: Optional[Path] = None
+ if tracked_log_path:
+ tracked_path = Path(tracked_log_path)
+ if tracked_path.exists():
+ latest_log = tracked_path
+
+ if latest_log is None:
+ latest_log = get_latest_log_path()
+ if latest_log is None:
+ status["logs"] = [
+ {
+ "id": "log-empty",
+ "prefix": "[系统]",
+ "timestamp": "[--:--:--]",
+ "message": "暂无运行日志",
+ "tone": "muted",
+ }
+ ]
+ return status
+
+ try:
+ config = load_config()
+ base_url = str(((config.get("clean") or {}).get("base_url")) or "").rstrip("/")
+ token = str(((config.get("clean") or {}).get("token")) or "").strip()
+ target_type = str(((config.get("clean") or {}).get("target_type")) or "codex")
+ timeout = int(((config.get("clean") or {}).get("timeout")) or 10)
+ if base_url and token:
+ _, available_candidates = get_candidates_count(
+ base_url=base_url,
+ token=token,
+ target_type=target_type,
+ timeout=timeout,
+ )
+ status["available_candidates"] = available_candidates
+ except Exception as e:
+ status["available_candidates_error"] = str(e)
+
+ status["last_log_path"] = str(latest_log)
+ raw_lines = tail_lines(latest_log)
+ status["logs"] = [parse_log_line(index, line) for index, line in enumerate(raw_lines, start=1)]
+ status["single_account_timing"] = build_single_account_timing(raw_lines, window_size=20)
+ if status.get("loop_running"):
+ status["loop_next_check_in_seconds"] = parse_loop_next_check_in_seconds(raw_lines)
+
+ round_start_pattern = re.compile(r">>> 循环轮次 #\d+ 开始")
+ scan_lines = raw_lines
+ last_round_start_index: Optional[int] = None
+ for index, line in enumerate(raw_lines):
+ if round_start_pattern.search(line):
+ last_round_start_index = index
+ if last_round_start_index is not None:
+ scan_lines = raw_lines[last_round_start_index:]
+
+ progress_patterns = [
+ re.compile(r"补号进度: token (?P\d+)/(?P\d+) \| ✅(?P\d+) ❌(?P\d+) ⏭️(?P\d+)"),
+ re.compile(r"补号完成: token=(?P\d+)/(?P\d+), fail=(?P\d+), skip=(?P\d+)"),
+ ]
+ start_pattern = re.compile(r"开始补号: 目标 token=(?P\d+)")
+
+ success = 0
+ failed = 0
+ skipped = 0
+ total = 0
+
+ for line in reversed(scan_lines):
+ for pattern in progress_patterns:
+ matched = pattern.search(line)
+ if matched:
+ success = int(matched.group("success"))
+ failed = int(matched.group("fail"))
+ skipped = int(matched.group("skip"))
+ total = int(matched.group("total"))
+ break
+ if total:
+ break
+
+ if total == 0:
+ for line in reversed(scan_lines):
+ matched = start_pattern.search(line)
+ if matched:
+ total = int(matched.group("total"))
+ break
+
+ completed = success
+ remaining = max(total - success, 0) if total else 0
+ percent = int((success / total) * 100) if total else 0
+
+ status["completed"] = completed
+ status["total"] = total
+ status["percent"] = percent
+ status["stats"] = [
+ {"label": "成功", "value": success, "icon": "☑", "tone": "success"},
+ {"label": "失败", "value": failed, "icon": "✕", "tone": "danger"},
+ {"label": "剩余", "value": remaining, "icon": "⏳", "tone": "pending"},
+ ]
+
+ if raw_lines:
+ last_message = raw_lines[-1]
+ has_batch_start = "开始补号" in "\n".join(scan_lines)
+
+ if status["running"]:
+ if status.get("loop_running"):
+ status["phase"] = "looping"
+ status["message"] = "循环补号运行中"
+ else:
+ status["phase"] = "maintaining"
+ status["message"] = "补号任务运行中" if has_batch_start else "维护任务运行中"
+ elif "=== 账号池自动维护结束(成功)===" in last_message:
+ status["phase"] = "completed"
+ status["message"] = "最近一次维护已完成"
+ elif "=== 账号池自动维护结束(失败)===" in last_message:
+ status["phase"] = "failed"
+ status["message"] = "最近一次维护失败"
+ elif has_batch_start:
+ status["message"] = "最近一次维护已停止,日志未写入结束标记"
+ else:
+ status["message"] = "已加载最近一次运行日志"
+
+ return status
+
+
+def start_maintainer_process(*, loop_mode: bool = False) -> Dict[str, Any]:
+ global RUN_PROCESS, RUN_MODE, RUN_LOG_PATH
+
+ with RUN_PROCESS_LOCK:
+ if RUN_PROCESS is not None and RUN_PROCESS.poll() is None:
+ return {"ok": True, "started": False, "message": "维护任务已在运行中"}
+ state_pid, state_mode, state_log_path = read_running_state()
+ if state_pid is not None:
+ RUN_MODE = state_mode
+ RUN_LOG_PATH = state_log_path
+ return {
+ "ok": True,
+ "started": False,
+ "pid": state_pid,
+ "mode": state_mode,
+ "message": "维护任务已在运行中",
+ }
+
+ process_env = os.environ.copy()
+ process_env["APP_DATA_DIR"] = str(APP_DATA_DIR)
+ process_env["APP_CONFIG_PATH"] = str(CONFIG_PATH)
+ process_env["APP_LOG_DIR"] = str(LOGS_DIR)
+ planned_log_path = LOGS_DIR / f"pool_maintainer_{datetime.now().strftime('%Y%m%d_%H%M%S_%f')}.log"
+ process_env["APP_LOG_FILE"] = str(planned_log_path)
+
+ command = [sys.executable, str(PROJECT_ROOT / "auto_pool_maintainer.py")]
+ if loop_mode:
+ command.append("--loop")
+ RUN_PROCESS = subprocess.Popen(
+ command,
+ cwd=str(APP_DATA_DIR),
+ stdout=subprocess.DEVNULL,
+ stderr=subprocess.DEVNULL,
+ text=True,
+ env=process_env,
+ )
+ time.sleep(0.3)
+ if RUN_PROCESS.poll() is not None:
+ exit_code = RUN_PROCESS.returncode
+ RUN_PROCESS = None
+ RUN_MODE = ""
+ RUN_LOG_PATH = ""
+ clear_run_state()
+ return {
+ "ok": False,
+ "started": False,
+ "message": f"维护任务启动失败(进程已退出,code={exit_code})",
+ }
+
+ RUN_MODE = "loop" if loop_mode else "single"
+ RUN_LOG_PATH = str(planned_log_path)
+ save_run_state(RUN_PROCESS.pid, RUN_MODE, RUN_LOG_PATH)
+ return {
+ "ok": True,
+ "started": True,
+ "pid": RUN_PROCESS.pid,
+ "mode": RUN_MODE,
+ "message": "已启动循环补号任务" if loop_mode else "已启动维护任务",
+ }
+
+
+def stop_maintainer_process() -> Dict[str, Any]:
+ global RUN_PROCESS, RUN_MODE, RUN_LOG_PATH
+
+ with RUN_PROCESS_LOCK:
+ if RUN_PROCESS is not None and RUN_PROCESS.poll() is None:
+ target_pid = RUN_PROCESS.pid
+ try:
+ RUN_PROCESS.terminate()
+ try:
+ RUN_PROCESS.wait(timeout=8)
+ except subprocess.TimeoutExpired:
+ RUN_PROCESS.kill()
+ RUN_PROCESS.wait(timeout=5)
+ except Exception as e:
+ return {"ok": False, "stopped": False, "message": f"停止维护任务失败: {e}"}
+ RUN_PROCESS = None
+ RUN_MODE = ""
+ RUN_LOG_PATH = ""
+ clear_run_state()
+ return {"ok": True, "stopped": True, "pid": target_pid, "message": "已停止维护任务"}
+
+ state_pid, state_mode, state_log_path = read_running_state()
+ if state_pid is None:
+ RUN_PROCESS = None
+ RUN_MODE = ""
+ RUN_LOG_PATH = ""
+ clear_run_state()
+ return {"ok": True, "stopped": False, "message": "当前没有运行中的维护任务"}
+ target_pid = state_pid
+ RUN_MODE = state_mode
+ RUN_LOG_PATH = state_log_path
+
+ try:
+ if not terminate_pid(target_pid, timeout_seconds=8.0):
+ return {"ok": False, "stopped": False, "message": f"停止维护任务失败: pid={target_pid}"}
+ except Exception as e:
+ return {"ok": False, "stopped": False, "message": f"停止维护任务失败: {e}"}
+
+ RUN_PROCESS = None
+ RUN_MODE = ""
+ RUN_LOG_PATH = ""
+ clear_run_state()
+
+ return {"ok": True, "stopped": True, "pid": target_pid, "message": "已停止维护任务"}
+
+
+class ApiHandler(BaseHTTPRequestHandler):
+ server_version = "AutoPoolMaintainerAPI/0.1"
+
+ def _send_json(self, payload: Any, status: int = HTTPStatus.OK) -> None:
+ data = json.dumps(payload, ensure_ascii=False).encode("utf-8")
+ self.send_response(status)
+ self.send_header("Content-Type", "application/json; charset=utf-8")
+ self.send_header("Content-Length", str(len(data)))
+ self.send_header("Cache-Control", "no-store")
+ origin = self.headers.get("Origin", "")
+ if origin:
+ self.send_header("Access-Control-Allow-Origin", origin)
+ self.send_header("Vary", "Origin")
+ self.send_header("Access-Control-Allow-Headers", "Content-Type, X-Admin-Token")
+ self.send_header("Access-Control-Allow-Methods", "GET, POST, OPTIONS")
+ self.end_headers()
+ self.wfile.write(data)
+
+ def _read_json_body(self) -> Dict[str, Any]:
+ length = int(self.headers.get("Content-Length", "0") or "0")
+ raw = self.rfile.read(length) if length > 0 else b"{}"
+ data = json.loads(raw.decode("utf-8") or "{}")
+ if not isinstance(data, dict):
+ raise RuntimeError("请求体必须是 JSON 对象")
+ return data
+
+ def _send_unauthorized(self, message: str = "Unauthorized") -> None:
+ self._send_json({"error": message}, status=HTTPStatus.UNAUTHORIZED)
+
+ def _is_authorized(self) -> bool:
+ expected = get_admin_token()
+ incoming = self.headers.get("X-Admin-Token", "").strip()
+ return incoming == expected
+
+ def _require_auth(self) -> bool:
+ if self.path == "/api/health":
+ return True
+ if self._is_authorized():
+ return True
+ self._send_unauthorized("Invalid or missing X-Admin-Token")
+ return False
+
+ def do_OPTIONS(self) -> None:
+ if not self._require_auth():
+ return
+ self._send_json({"ok": True})
+
+ def do_GET(self) -> None:
+ if not self._require_auth():
+ return
+ if self.path == "/api/config":
+ self._send_json(mask_sensitive_config(load_config()))
+ return
+ if self.path == "/api/runtime/status":
+ self._send_json(build_runtime_status())
+ return
+ if self.path == "/api/health":
+ self._send_json({"ok": True, "time": datetime.now().isoformat()})
+ return
+ self._send_json({"error": "Not Found"}, status=HTTPStatus.NOT_FOUND)
+
+ def do_POST(self) -> None:
+ if not self._require_auth():
+ return
+ if self.path == "/api/config":
+ payload = self._read_json_body()
+ save_config(payload)
+ self._send_json(mask_sensitive_config(load_config()))
+ return
+ if self.path == "/api/runtime/start":
+ self._send_json(start_maintainer_process())
+ return
+ if self.path == "/api/runtime/start-loop":
+ self._send_json(start_maintainer_process(loop_mode=True))
+ return
+ if self.path == "/api/runtime/stop":
+ self._send_json(stop_maintainer_process())
+ return
+ self._send_json({"error": "Not Found"}, status=HTTPStatus.NOT_FOUND)
+
+ def log_message(self, format: str, *args: Any) -> None:
+ return
+
+
+def run_server(host: str = API_HOST, port: int = API_PORT) -> None:
+ ensure_runtime_paths()
+ admin_token = get_admin_token()
+ if ADMIN_TOKEN_ENV:
+ print("Using APP_ADMIN_TOKEN from environment.")
+ else:
+ print(f"Generated admin token saved to: {ADMIN_TOKEN_FILE}")
+ print(f"Generated admin token: {admin_token}")
+ server = ThreadingHTTPServer((host, port), ApiHandler)
+ print(f"API server listening on http://{host}:{port}")
+ server.serve_forever()
+
+
+if __name__ == "__main__":
+ run_server()
diff --git a/auto_pool_maintainer.py b/auto_pool_maintainer.py
new file mode 100644
index 0000000..a903aaf
--- /dev/null
+++ b/auto_pool_maintainer.py
@@ -0,0 +1,5413 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+
+from __future__ import annotations
+
+import argparse
+import asyncio
+import base64
+import csv
+import datetime as dt
+import hashlib
+import json
+import logging
+import os
+import random
+import re
+import secrets
+import string
+import sys
+import threading
+import time
+import uuid
+from collections import Counter
+from concurrent.futures import FIRST_COMPLETED, ThreadPoolExecutor, wait
+from dataclasses import dataclass
+from email.utils import parsedate_to_datetime
+from pathlib import Path
+from typing import Any, Callable, Dict, List, Optional, Tuple
+from urllib.parse import parse_qs, quote, unquote, urlencode, urlparse
+
+import requests
+from requests.adapters import HTTPAdapter
+from urllib3.util.retry import Retry
+
+try:
+ import aiohttp
+except Exception:
+ aiohttp = None
+
+
+OPENAI_AUTH_BASE = "https://auth.openai.com"
+USER_AGENT = (
+ "Mozilla/5.0 (Windows NT 10.0; Win64; x64) "
+ "AppleWebKit/537.36 (KHTML, like Gecko) "
+ "Chrome/145.0.0.0 Safari/537.36"
+)
+DEFAULT_MGMT_UA = "codex_cli_rs/0.76.0 (Debian 13.0.0; x86_64) WindowsTerminal"
+DEFAULT_LOOP_INTERVAL_SECONDS = 60.0
+MIN_LOOP_INTERVAL_SECONDS = 5.0
+
+COMMON_HEADERS = {
+ "accept": "application/json",
+ "accept-language": "en-US,en;q=0.9",
+ "content-type": "application/json",
+ "origin": OPENAI_AUTH_BASE,
+ "user-agent": USER_AGENT,
+ "sec-ch-ua": '"Google Chrome";v="145", "Not?A_Brand";v="8", "Chromium";v="145"',
+ "sec-ch-ua-mobile": "?0",
+ "sec-ch-ua-platform": '"Windows"',
+ "sec-fetch-dest": "empty",
+ "sec-fetch-mode": "cors",
+ "sec-fetch-site": "same-origin",
+}
+NAVIGATE_HEADERS = {
+ "accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8",
+ "accept-language": "en-US,en;q=0.9",
+ "user-agent": USER_AGENT,
+ "sec-ch-ua": '"Google Chrome";v="145", "Not?A_Brand";v="8", "Chromium";v="145"',
+ "sec-ch-ua-mobile": "?0",
+ "sec-ch-ua-platform": '"Windows"',
+ "sec-fetch-dest": "document",
+ "sec-fetch-mode": "navigate",
+ "sec-fetch-site": "same-origin",
+ "sec-fetch-user": "?1",
+ "upgrade-insecure-requests": "1",
+}
+
+TRANSIENT_FLOW_MARKERS_DEFAULT = (
+ "sentinel_",
+ "oauth_authorization_code_not_found",
+ "headers_failed",
+ "server disconnected",
+ "unexpected_eof_while_reading",
+ "unexpected eof while reading",
+ "timeout",
+ "timed out",
+ "transport",
+ "remoteprotocolerror",
+ "connection reset",
+ "temporarily unavailable",
+ "network",
+ "eof occurred",
+ "http_429",
+ "http_500",
+ "http_502",
+ "http_503",
+ "http_504",
+)
+
+PHONE_VERIFICATION_MARKERS_DEFAULT = (
+ "add_phone",
+ "/add-phone",
+ "phone_verification",
+ "phone-verification",
+ "phone/verify",
+)
+
+TRACE_REDACT_KEYS = (
+ "password",
+ "passwd",
+ "authorization",
+ "cookie",
+ "token",
+ "secret",
+ "session",
+ "csrf",
+ "code_verifier",
+)
+TRACE_REDACT_QUERY_KEYS = {
+ "code",
+ "state",
+ "access_token",
+ "refresh_token",
+ "id_token",
+ "session_token",
+ "csrfToken",
+}
+
+
+def is_transient_flow_error(reason: str | None, markers: tuple[str, ...] = TRANSIENT_FLOW_MARKERS_DEFAULT) -> bool:
+ text = str(reason or "").strip().lower()
+ if not text:
+ return False
+ return any(marker in text for marker in markers)
+
+
+def parse_marker_config(raw: Any, *, fallback: tuple[str, ...]) -> tuple[str, ...]:
+ values: list[str] = []
+ if isinstance(raw, str):
+ values = [part.strip().lower() for part in raw.split(",")]
+ elif isinstance(raw, (list, tuple)):
+ values = [str(item).strip().lower() for item in raw]
+ sanitized = tuple(item for item in values if item)
+ return sanitized or fallback
+
+
+def parse_choice(raw: Any, *, allowed: tuple[str, ...], fallback: str) -> str:
+ text = str(raw or "").strip().lower()
+ return text if text in allowed else fallback
+
+
+def parse_bool(raw: Any, *, fallback: bool) -> bool:
+ if isinstance(raw, bool):
+ return raw
+ text = str(raw or "").strip().lower()
+ if not text:
+ return fallback
+ if text in {"1", "true", "yes", "on"}:
+ return True
+ if text in {"0", "false", "no", "off"}:
+ return False
+ return fallback
+
+
+def parse_otp_validate_order(raw: Any) -> tuple[str, ...]:
+ values = parse_marker_config(raw, fallback=("normal", "sentinel"))
+ sanitized = tuple(item for item in values if item in {"normal", "sentinel"})
+ return sanitized or ("normal", "sentinel")
+
+
+def requires_phone_verification(
+ payload: Dict[str, Any] | None,
+ response_text: str = "",
+ markers: tuple[str, ...] = PHONE_VERIFICATION_MARKERS_DEFAULT,
+) -> bool:
+ data = payload if isinstance(payload, dict) else {}
+ page = data.get("page") or {}
+ page_type = str(page.get("type") or "").strip().lower() if isinstance(page, dict) else ""
+ continue_url = str(data.get("continue_url") or "").strip().lower()
+ haystack = " ".join([page_type, continue_url, str(response_text or "").lower()])
+ return any(str(marker).strip().lower() in haystack for marker in markers if str(marker).strip())
+
+
+def extract_oauth_callback_params_from_url(url: str) -> Optional[Dict[str, str]]:
+ if not url or "code=" not in url:
+ return None
+ try:
+ query = parse_qs(urlparse(url).query)
+ except Exception:
+ return None
+
+ code_values = query.get("code", [])
+ if not code_values or not code_values[0]:
+ return None
+
+ params: Dict[str, str] = {}
+ for key, values in query.items():
+ if values and values[0]:
+ params[str(key)] = str(values[0])
+ return params or None
+
+
+def extract_oauth_code_from_url(url: str) -> Optional[str]:
+ params = extract_oauth_callback_params_from_url(url)
+ return str((params or {}).get("code") or "").strip() or None
+
+
+def extract_oauth_callback_params_from_payload(payload: Any) -> Optional[Dict[str, str]]:
+ seen: set[int] = set()
+ direct_params_candidates: list[Dict[str, str]] = []
+
+ def _extract_from_text(text: Any) -> Optional[Dict[str, str]]:
+ candidate = str(text or "").strip()
+ if not candidate:
+ return None
+ for variant in (candidate, unquote(candidate)):
+ params = extract_oauth_callback_params_from_url(variant)
+ if params:
+ return params
+ if "code=" in variant:
+ match = re.search(r"[?&]code=([^&\"'\\s]+)([^\"'\\s]*)", variant)
+ if match:
+ query = f"code={match.group(1)}{match.group(2)}"
+ return extract_oauth_callback_params_from_url(f"http://localhost/dummy?{query}")
+ return None
+
+ def _walk(value: Any) -> Optional[Dict[str, str]]:
+ if isinstance(value, dict):
+ value_id = id(value)
+ if value_id in seen:
+ return None
+ seen.add(value_id)
+
+ direct_code = str(value.get("code") or "").strip()
+ if direct_code:
+ params = {"code": direct_code}
+ for key in ("scope", "state"):
+ direct_value = str(value.get(key) or "").strip()
+ if direct_value:
+ params[key] = direct_value
+ direct_params_candidates.append(params)
+
+ for key in ("continue_url", "callback_url", "url", "redirect_url"):
+ params = _extract_from_text(value.get(key))
+ if params:
+ return params
+
+ for nested_value in value.values():
+ params = _walk(nested_value)
+ if params:
+ return params
+ return None
+
+ if isinstance(value, (list, tuple, set)):
+ for item in value:
+ params = _walk(item)
+ if params:
+ return params
+ return None
+
+ if isinstance(value, str):
+ return _extract_from_text(value)
+
+ return None
+
+ extracted = _walk(payload)
+ if extracted:
+ return extracted
+ return direct_params_candidates[0] if direct_params_candidates else None
+
+
+def extract_oauth_callback_params_from_response(
+ payload: Dict[str, Any] | None,
+ response_headers: Optional[Dict[str, Any]] = None,
+ response_url: str = "",
+ response_text: str = "",
+) -> Optional[Dict[str, str]]:
+ candidates: list[str] = []
+
+ if isinstance(payload, dict):
+ for key in ("continue_url", "callback_url", "url", "redirect_url"):
+ value = payload.get(key)
+ if value:
+ candidates.append(str(value))
+
+ if isinstance(response_headers, dict):
+ for key in ("Location", "location"):
+ value = response_headers.get(key)
+ if value:
+ candidates.append(str(value))
+
+ if response_url:
+ candidates.append(str(response_url))
+
+ for candidate in candidates:
+ params = extract_oauth_callback_params_from_url(candidate)
+ if params:
+ return params
+
+ payload_params = extract_oauth_callback_params_from_payload(payload)
+ if payload_params:
+ return payload_params
+
+ if response_text and "code=" in response_text:
+ match = re.search(r"[?&]code=([^&\"'\\s]+)([^\"'\\s]*)", response_text)
+ if match:
+ query = f"code={match.group(1)}{match.group(2)}"
+ return extract_oauth_callback_params_from_url(f"http://localhost/dummy?{query}")
+
+ return None
+
+
+def extract_oauth_callback_params_from_session_cookies(session: requests.Session) -> Optional[Dict[str, str]]:
+ jar = getattr(session, "cookies", None)
+ if jar is None:
+ return None
+
+ def _iter_text_candidates(raw_value: Any) -> list[str]:
+ first = str(raw_value or "").strip()
+ if not first:
+ return []
+ queue = [first]
+ queued = {first}
+ processed: set[str] = set()
+ collected: list[str] = []
+
+ def _push(text: str) -> None:
+ candidate = str(text or "").strip()
+ if not candidate or candidate in processed or candidate in queued:
+ return
+ queue.append(candidate)
+ queued.add(candidate)
+
+ while queue:
+ current = queue.pop(0)
+ if not current or current in processed:
+ continue
+ processed.add(current)
+ collected.append(current)
+
+ decoded_variants = [unquote(current)]
+ if "." in current:
+ decoded_variants.append(current.split(".", 1)[0])
+
+ for variant in decoded_variants:
+ stripped = str(variant or "").strip()
+ if not stripped:
+ continue
+ if stripped != current:
+ _push(stripped)
+ base64_candidate = stripped
+ if re.fullmatch(r"[A-Za-z0-9_-]+", base64_candidate):
+ padding = (-len(base64_candidate)) % 4
+ try:
+ decoded = base64.urlsafe_b64decode(base64_candidate + ("=" * padding)).decode("utf-8")
+ except Exception:
+ decoded = ""
+ if decoded and all(ch.isprintable() or ch in "\r\n\t" for ch in decoded):
+ _push(decoded)
+
+ return collected
+
+ try:
+ cookies = list(jar)
+ except Exception:
+ return None
+
+ for cookie in cookies:
+ cookie_name = str(getattr(cookie, "name", "") or "").strip().lower()
+ if cookie_name and not (
+ cookie_name.startswith("oai-")
+ or any(marker in cookie_name for marker in ("auth", "session", "oauth", "login", "callback", "redirect"))
+ ):
+ continue
+ for candidate in _iter_text_candidates(getattr(cookie, "value", "")):
+ if candidate.startswith("{") or candidate.startswith("["):
+ try:
+ parsed = json.loads(candidate)
+ except Exception:
+ parsed = None
+ params = extract_oauth_callback_params_from_payload(parsed)
+ if params:
+ return params
+ params = extract_oauth_callback_params_from_payload(candidate)
+ if params:
+ return params
+ return None
+
+
+def extract_oauth_code_from_response(
+ payload: Dict[str, Any] | None,
+ response_headers: Optional[Dict[str, Any]] = None,
+ response_url: str = "",
+ response_text: str = "",
+) -> Optional[str]:
+ params = extract_oauth_callback_params_from_response(
+ payload,
+ response_headers=response_headers,
+ response_url=response_url,
+ response_text=response_text,
+ )
+ return str((params or {}).get("code") or "").strip() or None
+
+
+def extract_continue_url_from_response(
+ payload: Dict[str, Any] | None,
+ response_headers: Optional[Dict[str, Any]] = None,
+ response_url: str = "",
+) -> str:
+ if isinstance(payload, dict):
+ for key in ("continue_url", "callback_url", "url", "redirect_url"):
+ value = str(payload.get(key) or "").strip()
+ if value:
+ return value
+
+ if isinstance(response_headers, dict):
+ for key in ("Location", "location"):
+ value = str(response_headers.get(key) or "").strip()
+ if value:
+ return value
+
+ return str(response_url or "").strip()
+
+
+def parse_mail_timestamp(value: Any) -> Optional[float]:
+ if value is None:
+ return None
+ if isinstance(value, (int, float)):
+ number = float(value)
+ if number > 10_000_000_000:
+ return number / 1000.0
+ if number > 0:
+ return number
+ return None
+
+ text = str(value).strip()
+ if not text:
+ return None
+
+ try:
+ numeric = float(text)
+ if numeric > 10_000_000_000:
+ return numeric / 1000.0
+ if numeric > 0:
+ return numeric
+ except ValueError:
+ pass
+
+ candidates = [text]
+ if text.endswith("Z"):
+ candidates.append(text[:-1] + "+00:00")
+ for candidate in candidates:
+ try:
+ parsed = dt.datetime.fromisoformat(candidate)
+ if parsed.tzinfo is None:
+ parsed = parsed.replace(tzinfo=dt.timezone.utc)
+ return parsed.timestamp()
+ except ValueError:
+ continue
+
+ try:
+ return parsedate_to_datetime(text).timestamp()
+ except Exception:
+ return None
+
+
+def extract_mail_timestamp(payload: Dict[str, Any]) -> Optional[float]:
+ candidates = (
+ payload.get("received_at"),
+ payload.get("receivedAt"),
+ payload.get("created_at"),
+ payload.get("createdAt"),
+ payload.get("date"),
+ payload.get("timestamp"),
+ )
+ for candidate in candidates:
+ parsed = parse_mail_timestamp(candidate)
+ if parsed is not None:
+ return parsed
+ return None
+
+
+def is_mail_recent_enough(payload: Dict[str, Any], not_before_ts: Optional[float]) -> bool:
+ if not_before_ts is None:
+ return True
+ ts = extract_mail_timestamp(payload)
+ if ts is None:
+ return True
+ return ts >= float(not_before_ts) - 2.0
+
+
+def flow_step_retry_delay(conf: Dict[str, Any], attempt_number: int) -> float:
+ safe_attempt = max(1, int(attempt_number))
+ base = float(pick_conf(conf, "flow", "step_retry_delay_base", default=0.2) or 0.2)
+ cap = float(pick_conf(conf, "flow", "step_retry_delay_cap", default=0.8) or 0.8)
+ return min(max(0.05, cap), max(0.05, base) * safe_attempt)
+
+
+def flow_step_retry_attempts(conf: Dict[str, Any]) -> int:
+ return max(1, int(pick_conf(conf, "flow", "step_retry_attempts", default=2) or 2))
+
+
+def flow_outer_retry_attempts(conf: Dict[str, Any], fallback: int = 2) -> int:
+ return max(1, int(pick_conf(conf, "flow", "outer_retry_attempts", default=fallback) or fallback))
+
+
+def oauth_local_retry_attempts(conf: Dict[str, Any], fallback: int = 3) -> int:
+ return max(1, int(pick_conf(conf, "flow", "oauth_local_retry_attempts", default=fallback) or fallback))
+
+
+def load_json(path: Path) -> Dict[str, Any]:
+ if not path.exists():
+ return {}
+ with path.open("r", encoding="utf-8") as f:
+ data = json.load(f)
+ if not isinstance(data, dict):
+ raise RuntimeError(f"配置文件格式错误,顶层必须是对象: {path}")
+ return data
+
+
+def _mask_trace_secret(value: Any) -> str:
+ text = str(value or "")
+ if not text:
+ return ""
+ if len(text) <= 8:
+ return "***"
+ return f"{text[:4]}...{text[-4:]}(len={len(text)})"
+
+
+def _trim_trace_text(value: Any, limit: int) -> str:
+ text = str(value or "")
+ safe_limit = max(128, int(limit or 0))
+ if len(text) <= safe_limit:
+ return text
+ return f"{text[:safe_limit]}...(truncated,total={len(text)})"
+
+
+def _sanitize_trace_url(url: str, reveal_sensitive: bool, body_limit: int) -> str:
+ text = str(url or "")
+ if not text:
+ return ""
+ try:
+ parsed = urlparse(text)
+ query = parse_qs(parsed.query, keep_blank_values=True)
+ sanitized_query: Dict[str, List[str]] = {}
+ for key, values in query.items():
+ if reveal_sensitive or key not in TRACE_REDACT_QUERY_KEYS:
+ sanitized_query[key] = values
+ else:
+ sanitized_query[key] = [_mask_trace_secret(item) for item in values]
+ encoded_query = urlencode(sanitized_query, doseq=True)
+ return parsed._replace(query=encoded_query).geturl()
+ except Exception:
+ return _trim_trace_text(text, body_limit)
+
+
+def sanitize_trace_value(value: Any, *, key: str = "", reveal_sensitive: bool = False, body_limit: int = 4096) -> Any:
+ normalized_key = str(key or "").strip().lower()
+ is_sensitive_key = any(marker in normalized_key for marker in TRACE_REDACT_KEYS)
+
+ if isinstance(value, dict):
+ return {
+ str(item_key): sanitize_trace_value(
+ item_value,
+ key=f"{normalized_key}.{item_key}" if normalized_key else str(item_key),
+ reveal_sensitive=reveal_sensitive,
+ body_limit=body_limit,
+ )
+ for item_key, item_value in value.items()
+ }
+ if isinstance(value, (list, tuple, set)):
+ return [
+ sanitize_trace_value(
+ item,
+ key=normalized_key,
+ reveal_sensitive=reveal_sensitive,
+ body_limit=body_limit,
+ )
+ for item in value
+ ]
+ if value is None or isinstance(value, (bool, int, float)):
+ return value
+ if isinstance(value, bytes):
+ value = value.decode("utf-8", errors="replace")
+
+ text = str(value)
+ if not reveal_sensitive and is_sensitive_key:
+ return _mask_trace_secret(text)
+ if "url" in normalized_key:
+ return _sanitize_trace_url(text, reveal_sensitive, body_limit)
+ return _trim_trace_text(text, body_limit)
+
+
+def describe_session_cookies(session: Any, *, reveal_sensitive: bool = False) -> List[Dict[str, Any]]:
+ jar = getattr(session, "cookies", None)
+ if jar is None:
+ return []
+ described: List[Dict[str, Any]] = []
+ try:
+ iterator = list(jar)
+ except Exception:
+ return []
+ for cookie in iterator[:20]:
+ name = str(getattr(cookie, "name", "") or "")
+ value = getattr(cookie, "value", "")
+ described.append(
+ {
+ "name": name,
+ "domain": str(getattr(cookie, "domain", "") or ""),
+ "path": str(getattr(cookie, "path", "") or ""),
+ "value": sanitize_trace_value(
+ value,
+ key=f"cookie.{name}",
+ reveal_sensitive=reveal_sensitive,
+ ),
+ }
+ )
+ return described
+
+
+def build_response_trace_payload(response: requests.Response, *, reveal_sensitive: bool = False, body_limit: int = 4096) -> Dict[str, Any]:
+ headers = dict(getattr(response, "headers", {}) or {})
+ history = []
+ for item in getattr(response, "history", []) or []:
+ history.append(
+ {
+ "status_code": getattr(item, "status_code", None),
+ "url": sanitize_trace_value(getattr(item, "url", ""), key="history.url", reveal_sensitive=reveal_sensitive, body_limit=body_limit),
+ "location": sanitize_trace_value(
+ (getattr(item, "headers", {}) or {}).get("Location", ""),
+ key="history.location",
+ reveal_sensitive=reveal_sensitive,
+ body_limit=body_limit,
+ ),
+ }
+ )
+ return {
+ "status_code": getattr(response, "status_code", None),
+ "url": sanitize_trace_value(getattr(response, "url", ""), key="response.url", reveal_sensitive=reveal_sensitive, body_limit=body_limit),
+ "headers": sanitize_trace_value(headers, key="response.headers", reveal_sensitive=reveal_sensitive, body_limit=body_limit),
+ "body_preview": sanitize_trace_value(getattr(response, "text", ""), key="response.body", reveal_sensitive=reveal_sensitive, body_limit=body_limit),
+ "history": history,
+ }
+
+
+class FlowTraceRecorder:
+ def __init__(self, file_path: str | Path, *, reveal_sensitive: bool = False, body_limit: int = 4096, enabled: bool = True):
+ self.path = Path(file_path)
+ self.enabled = bool(enabled)
+ self.reveal_sensitive = bool(reveal_sensitive)
+ self.body_limit = max(256, int(body_limit or 0))
+ self._lock = threading.Lock()
+ if self.enabled:
+ self.path.parent.mkdir(parents=True, exist_ok=True)
+
+ def record(self, event: str, **fields: Any) -> None:
+ if not self.enabled:
+ return
+ payload = {
+ "ts": dt.datetime.now(tz=dt.timezone.utc).isoformat(),
+ "event": str(event or "").strip() or "unknown",
+ }
+ for key, value in fields.items():
+ payload[str(key)] = sanitize_trace_value(
+ value,
+ key=str(key),
+ reveal_sensitive=self.reveal_sensitive,
+ body_limit=self.body_limit,
+ )
+ line = json.dumps(payload, ensure_ascii=False)
+ with self._lock:
+ with self.path.open("a", encoding="utf-8") as trace_file:
+ trace_file.write(f"{line}\n")
+
+
+def build_flow_trace_recorder(log_dir: Path) -> Optional[FlowTraceRecorder]:
+ enabled = parse_bool(os.environ.get("APP_FLOW_TRACE", ""), fallback=True)
+ if not enabled:
+ return None
+
+ reveal_sensitive = parse_bool(os.environ.get("APP_FLOW_TRACE_RAW", ""), fallback=False)
+ body_limit_raw = os.environ.get("APP_FLOW_TRACE_BODY_LIMIT", "")
+ try:
+ body_limit = max(256, int(body_limit_raw or 6000))
+ except Exception:
+ body_limit = 6000
+
+ trace_dir_raw = str(os.environ.get("APP_FLOW_TRACE_DIR", "flow-trace") or "flow-trace").strip()
+ trace_dir = Path(trace_dir_raw)
+ if not trace_dir.is_absolute():
+ trace_dir = (log_dir / trace_dir).resolve()
+
+ ts = dt.datetime.now().strftime("%Y%m%d_%H%M%S")
+ trace_path = trace_dir / f"flow_trace_{ts}.jsonl"
+ recorder = FlowTraceRecorder(trace_path, reveal_sensitive=reveal_sensitive, body_limit=body_limit, enabled=True)
+ recorder.record(
+ "flow_trace_started",
+ path=str(trace_path),
+ reveal_sensitive=reveal_sensitive,
+ body_limit=body_limit,
+ pid=os.getpid(),
+ )
+ return recorder
+
+
+def setup_logger(log_dir: Path) -> tuple[logging.Logger, Path]:
+ custom_log_file = str(os.environ.get("APP_LOG_FILE", "") or "").strip()
+ if custom_log_file:
+ log_path = Path(custom_log_file)
+ if not log_path.is_absolute():
+ log_path = (log_dir / log_path).resolve()
+ log_path.parent.mkdir(parents=True, exist_ok=True)
+ else:
+ log_dir.mkdir(parents=True, exist_ok=True)
+ ts = dt.datetime.now().strftime("%Y%m%d_%H%M%S")
+ log_path = log_dir / f"pool_maintainer_{ts}.log"
+
+ logger = logging.getLogger("pool_maintainer")
+ logger.setLevel(logging.INFO)
+ logger.handlers.clear()
+
+ fmt = logging.Formatter("%(asctime)s | %(levelname)s | %(message)s", datefmt="%Y-%m-%d %H:%M:%S")
+ fh = logging.FileHandler(log_path, encoding="utf-8")
+ fh.setFormatter(fmt)
+ logger.addHandler(fh)
+ sh = logging.StreamHandler(sys.stdout)
+ sh.setFormatter(fmt)
+ logger.addHandler(sh)
+ flow_trace = build_flow_trace_recorder(log_dir)
+ setattr(logger, "flow_trace", flow_trace)
+ if flow_trace is not None:
+ logger.info("详细流程日志: %s", flow_trace.path)
+ return logger, log_path
+
+
+def ensure_parent_dir(path: str) -> None:
+ parent = os.path.dirname(path)
+ if parent:
+ os.makedirs(parent, exist_ok=True)
+
+
+def mgmt_headers(token: str) -> Dict[str, str]:
+ return {"Authorization": f"Bearer {token}", "Accept": "application/json"}
+
+
+def get_item_type(item: Dict[str, Any]) -> str:
+ return str(item.get("type") or item.get("typo") or "")
+
+
+def is_item_disabled(item: Dict[str, Any]) -> bool:
+ if parse_bool(item.get("disabled"), fallback=False):
+ return True
+ status_text = str(item.get("status") or item.get("state") or "").strip().lower()
+ if status_text in {"disabled", "inactive"}:
+ return True
+ return False
+
+
+def extract_chatgpt_account_id(item: Dict[str, Any]) -> Optional[str]:
+ for key in ("chatgpt_account_id", "chatgptAccountId", "account_id", "accountId"):
+ val = item.get(key)
+ if val:
+ return str(val)
+ return None
+
+
+def safe_json_text(text: str) -> Dict[str, Any]:
+ try:
+ return json.loads(text)
+ except Exception:
+ return {}
+
+
+def normalize_status_code(value: Any) -> Optional[int]:
+ try:
+ if value is None:
+ return None
+ return int(value)
+ except Exception:
+ return None
+
+
+def normalize_used_percent(value: Any) -> Optional[float]:
+ try:
+ num = float(value)
+ except Exception:
+ return None
+ if num < 0:
+ return 0.0
+ if num > 100:
+ return 100.0
+ return round(num, 2)
+
+
+def parse_usage_body(body_raw: Any) -> tuple[Dict[str, Any], str]:
+ if isinstance(body_raw, dict):
+ return body_raw, json.dumps(body_raw, ensure_ascii=False)
+ if isinstance(body_raw, str):
+ parsed = safe_json_text(body_raw)
+ return parsed if isinstance(parsed, dict) else {}, body_raw
+ return {}, str(body_raw or "")
+
+
+def analyze_usage_status(
+ *,
+ status_code: Optional[int],
+ body_obj: Dict[str, Any],
+ body_text: str,
+ used_percent_threshold: int,
+) -> Dict[str, Any]:
+ rate_limit = body_obj.get("rate_limit")
+ if not isinstance(rate_limit, dict):
+ rate_limit = {}
+
+ windows: List[Dict[str, Any]] = []
+ for key in ("primary_window", "secondary_window"):
+ window = rate_limit.get(key)
+ if isinstance(window, dict):
+ windows.append(window)
+
+ used_values: List[float] = []
+ for window in windows:
+ value = normalize_used_percent(window.get("used_percent"))
+ if value is not None:
+ used_values.append(value)
+
+ used_percent = max(used_values) if used_values else None
+ over_threshold = bool(used_percent is not None and used_percent >= float(used_percent_threshold))
+
+ limit_reached = bool(rate_limit.get("limit_reached")) or rate_limit.get("allowed") is False
+ if not limit_reached:
+ limit_reached = any(v >= 100.0 for v in used_values)
+
+ merged_text = f"{json.dumps(body_obj, ensure_ascii=False)} {body_text or ''}".lower()
+ quota_markers = ("quota exhausted", "limit reached", "payment_required")
+ is_quota = bool(limit_reached or (status_code == 402) or any(marker in merged_text for marker in quota_markers))
+ is_healthy = bool(status_code == 200 and not is_quota and not over_threshold)
+
+ return {
+ "used_percent": used_percent,
+ "over_threshold": over_threshold,
+ "is_quota": is_quota or over_threshold,
+ "is_healthy": is_healthy,
+ }
+
+
+def decide_clean_action(
+ *,
+ status_code: Optional[int],
+ disabled: bool,
+ is_quota: bool,
+ over_threshold: bool,
+) -> str:
+ if status_code == 401:
+ return "delete"
+ if is_quota or over_threshold:
+ return "keep" if disabled else "disable"
+ if status_code == 200 and disabled:
+ return "enable"
+ return "keep"
+
+
+def pick_conf(root: Dict[str, Any], section: str, key: str, *legacy_keys: str, default: Any = None) -> Any:
+ sec = root.get(section)
+ if not isinstance(sec, dict):
+ sec = {}
+
+ v = sec.get(key)
+ if v is None:
+ for lk in legacy_keys:
+ v = sec.get(lk)
+ if v is not None:
+ break
+ if v is not None:
+ return v
+
+ v = root.get(key)
+ if v is None:
+ for lk in legacy_keys:
+ v = root.get(lk)
+ if v is not None:
+ break
+ if v is not None:
+ return v
+ return default
+
+
+def pick_conf_list(root: Dict[str, Any], section: str, key: str, *legacy_keys: str) -> List[str]:
+ value = pick_conf(root, section, key, *legacy_keys, default=[])
+ return normalize_mail_domains(value)
+
+
+def get_candidates_count_from_files(files: List[Dict[str, Any]], target_type: str) -> tuple[int, int]:
+ """从已获取的文件列表中统计候选账号数量"""
+ candidates = []
+ for f in files:
+ if get_item_type(f).lower() != target_type.lower():
+ continue
+ if is_item_disabled(f):
+ continue
+ candidates.append(f)
+ return len(files), len(candidates)
+
+
+def get_candidates_count(base_url: str, token: str, target_type: str, timeout: int) -> tuple[int, int]:
+ """获取候选账号数量(直接调用API)"""
+ url = f"{base_url.rstrip('/')}/v0/management/auth-files"
+ resp = requests.get(url, headers=mgmt_headers(token), timeout=timeout)
+ resp.raise_for_status()
+ raw = resp.json()
+ payload = raw if isinstance(raw, dict) else {}
+ files = payload.get("files", []) if isinstance(payload, dict) else []
+ return get_candidates_count_from_files(files, target_type)
+
+
+def create_session(proxy: str = "") -> requests.Session:
+ s = requests.Session()
+ retry = Retry(total=3, backoff_factor=1, status_forcelist=[429, 500, 502, 503, 504])
+ adapter = HTTPAdapter(max_retries=retry)
+ s.mount("https://", adapter)
+ s.mount("http://", adapter)
+ if proxy:
+ s.proxies = {"http": proxy, "https": proxy}
+ return s
+
+
+def generate_pkce() -> tuple[str, str]:
+ code_verifier = base64.urlsafe_b64encode(secrets.token_bytes(64)).rstrip(b"=").decode("ascii")
+ digest = hashlib.sha256(code_verifier.encode("ascii")).digest()
+ code_challenge = base64.urlsafe_b64encode(digest).rstrip(b"=").decode("ascii")
+ return code_verifier, code_challenge
+
+
+def generate_datadog_trace() -> Dict[str, str]:
+ trace_id = str(random.getrandbits(64))
+ parent_id = str(random.getrandbits(64))
+ trace_hex = format(int(trace_id), "016x")
+ parent_hex = format(int(parent_id), "016x")
+ return {
+ "traceparent": f"00-0000000000000000{trace_hex}-{parent_hex}-01",
+ "tracestate": "dd=s:1;o:rum",
+ "x-datadog-origin": "rum",
+ "x-datadog-parent-id": parent_id,
+ "x-datadog-sampling-priority": "1",
+ "x-datadog-trace-id": trace_id,
+ }
+
+
+def generate_random_password(length: int = 16) -> str:
+ chars = string.ascii_letters + string.digits + "!@#$%"
+ pwd = list(
+ secrets.choice(string.ascii_uppercase)
+ + secrets.choice(string.ascii_lowercase)
+ + secrets.choice(string.digits)
+ + secrets.choice("!@#$%")
+ + "".join(secrets.choice(chars) for _ in range(length - 4))
+ )
+ random.shuffle(pwd)
+ return "".join(pwd)
+
+
+def generate_random_name() -> tuple[str, str]:
+ first = ["James", "Robert", "John", "Michael", "David", "Mary", "Jennifer", "Linda", "Emma", "Olivia"]
+ last = ["Smith", "Johnson", "Williams", "Brown", "Jones", "Garcia", "Miller"]
+ return random.choice(first), random.choice(last)
+
+
+def generate_random_birthday() -> str:
+ year = random.randint(1996, 2006)
+ month = random.randint(1, 12)
+ day = random.randint(1, 28)
+ return f"{year:04d}-{month:02d}-{day:02d}"
+
+
+class SentinelTokenGenerator:
+ MAX_ATTEMPTS = 500000
+ ERROR_PREFIX = "wQ8Lk5FbGpA2NcR9dShT6gYjU7VxZ4D"
+
+ def __init__(self, device_id: Optional[str] = None):
+ self.device_id = device_id or str(uuid.uuid4())
+ self.requirements_seed = str(random.random())
+ self.sid = str(uuid.uuid4())
+
+ @staticmethod
+ def _fnv1a_32(text: str) -> str:
+ h = 2166136261
+ for ch in text:
+ h ^= ord(ch)
+ h = (h * 16777619) & 0xFFFFFFFF
+ h ^= (h >> 16)
+ h = (h * 2246822507) & 0xFFFFFFFF
+ h ^= (h >> 13)
+ h = (h * 3266489909) & 0xFFFFFFFF
+ h ^= (h >> 16)
+ h &= 0xFFFFFFFF
+ return format(h, "08x")
+
+ @staticmethod
+ def _base64_encode(data: Any) -> str:
+ js = json.dumps(data, separators=(",", ":"), ensure_ascii=False)
+ return base64.b64encode(js.encode("utf-8")).decode("ascii")
+
+ def _get_config(self) -> List[Any]:
+ now = dt.datetime.now(dt.timezone.utc).strftime("%a %b %d %Y %H:%M:%S GMT+0000 (Coordinated Universal Time)")
+ perf_now = random.uniform(1000, 50000)
+ time_origin = time.time() * 1000 - perf_now
+ return [
+ "1920x1080",
+ now,
+ 4294705152,
+ random.random(),
+ USER_AGENT,
+ "https://sentinel.openai.com/sentinel/20260124ceb8/sdk.js",
+ None,
+ None,
+ "en-US",
+ "en-US,en",
+ random.random(),
+ "vendorSub−undefined",
+ "location",
+ "Object",
+ perf_now,
+ self.sid,
+ "",
+ random.choice([4, 8, 12, 16]),
+ time_origin,
+ ]
+
+ def _run_check(self, start_time: float, seed: str, difficulty: str, config: List[Any], nonce: int) -> Optional[str]:
+ config[3] = nonce
+ config[9] = round((time.time() - start_time) * 1000)
+ data = self._base64_encode(config)
+ hash_hex = self._fnv1a_32(seed + data)
+ if hash_hex[: len(difficulty)] <= difficulty:
+ return data + "~S"
+ return None
+
+ def generate_requirements_token(self) -> str:
+ cfg = self._get_config()
+ cfg[3] = 1
+ cfg[9] = round(random.uniform(5, 50))
+ return "gAAAAAC" + self._base64_encode(cfg)
+
+ def generate_token(self, seed: Optional[str] = None, difficulty: Optional[str] = None) -> str:
+ if seed is None:
+ seed = self.requirements_seed
+ difficulty = difficulty or "0"
+ cfg = self._get_config()
+ start = time.time()
+ for i in range(self.MAX_ATTEMPTS):
+ result = self._run_check(start, seed, difficulty or "0", cfg, i)
+ if result:
+ return "gAAAAAB" + result
+ return "gAAAAAB" + self.ERROR_PREFIX + self._base64_encode(str(None))
+
+
+def fetch_sentinel_challenge(session: requests.Session, device_id: str, flow: str = "authorize_continue") -> Optional[Dict[str, Any]]:
+ gen = SentinelTokenGenerator(device_id=device_id)
+ body = {"p": gen.generate_requirements_token(), "id": device_id, "flow": flow}
+ headers = {
+ "Content-Type": "text/plain;charset=UTF-8",
+ "Referer": "https://sentinel.openai.com/backend-api/sentinel/frame.html",
+ "User-Agent": USER_AGENT,
+ "Origin": "https://sentinel.openai.com",
+ "sec-ch-ua": '"Not:A-Brand";v="99", "Google Chrome";v="145", "Chromium";v="145"',
+ "sec-ch-ua-mobile": "?0",
+ "sec-ch-ua-platform": '"Windows"',
+ }
+ try:
+ resp = session.post(
+ "https://sentinel.openai.com/backend-api/sentinel/req",
+ data=json.dumps(body),
+ headers=headers,
+ timeout=15,
+ verify=False,
+ )
+ if resp.status_code != 200:
+ return None
+ data = resp.json()
+ return data if isinstance(data, dict) else None
+ except Exception:
+ return None
+
+
+def build_sentinel_token(session: requests.Session, device_id: str, flow: str = "authorize_continue") -> Optional[str]:
+ challenge = fetch_sentinel_challenge(session, device_id, flow)
+ if not challenge:
+ return None
+ c_value = challenge.get("token", "")
+ pow_data = challenge.get("proofofwork", {})
+ gen = SentinelTokenGenerator(device_id=device_id)
+ if isinstance(pow_data, dict) and pow_data.get("required") and pow_data.get("seed"):
+ p_value = gen.generate_token(seed=pow_data.get("seed"), difficulty=pow_data.get("difficulty", "0"))
+ else:
+ p_value = gen.generate_requirements_token()
+ return json.dumps({"p": p_value, "t": "", "c": c_value, "id": device_id, "flow": flow})
+
+
+
+def extract_verification_code(content: str) -> Optional[str]:
+ if not content:
+ return None
+ m = re.search(r"background-color:\s*#F3F3F3[^>]*>[\s\S]*?(\d{6})[\s\S]*?
", content)
+ if m:
+ return m.group(1)
+ m = re.search(r"Subject:.*?(\d{6})", content)
+ if m and m.group(1) != "177010":
+ return m.group(1)
+ for pat in [r">\s*(\d{6})\s*<", r"(? str:
+ if isinstance(payload, (dict, list)):
+ raw = json.dumps(payload, ensure_ascii=False, sort_keys=True)
+ else:
+ raw = str(payload or "")
+ return hashlib.sha1(raw.encode("utf-8", "ignore")).hexdigest()
+
+
+def _flatten_mail_content(mail_obj: Dict[str, Any]) -> str:
+ parts: List[str] = []
+ for key in ("subject", "body", "text", "html", "intro"):
+ value = mail_obj.get(key)
+ if isinstance(value, list):
+ parts.extend(str(item or "") for item in value)
+ elif isinstance(value, dict):
+ parts.append(json.dumps(value, ensure_ascii=False))
+ elif value:
+ parts.append(str(value))
+
+ sender = mail_obj.get("from")
+ if isinstance(sender, dict):
+ parts.append(str(sender.get("name") or ""))
+ parts.append(str(sender.get("address") or ""))
+ elif isinstance(sender, list):
+ parts.extend(str(item or "") for item in sender)
+ elif sender:
+ parts.append(str(sender))
+
+ recipients = mail_obj.get("to")
+ if isinstance(recipients, list):
+ for item in recipients:
+ if isinstance(item, dict):
+ parts.append(str(item.get("name") or ""))
+ parts.append(str(item.get("address") or ""))
+ elif item:
+ parts.append(str(item))
+ elif recipients:
+ parts.append(str(recipients))
+
+ return " ".join(part for part in parts if part).strip()
+
+
+def build_mail_api_headers(mail_api_key: str) -> Dict[str, str]:
+ headers = {"Accept": "application/json"}
+ token = str(mail_api_key or "").strip()
+ if token:
+ headers["Authorization"] = token if token.lower().startswith("bearer ") else f"Bearer {token}"
+ return headers
+
+
+def normalize_mail_provider(value: str) -> str:
+ raw = str(value or "").strip().lower()
+ aliases = {
+ "": "self_hosted_mail_api",
+ "cfmail": "cfmail",
+ "mail_api": "self_hosted_mail_api",
+ "self_hosted": "self_hosted_mail_api",
+ "self_hosted_mail_api": "self_hosted_mail_api",
+ "duckmail": "duckmail",
+ "tempmail": "tempmail_lol",
+ "tempmail_lol": "tempmail_lol",
+ "215": "yyds_mail",
+ "215.im": "yyds_mail",
+ "vip215": "yyds_mail",
+ "vip.215.im": "yyds_mail",
+ "yyds": "yyds_mail",
+ "yyds_mail": "yyds_mail",
+ }
+ return aliases.get(raw, raw)
+
+
+def apply_log_context(message: str, log_context: str = "") -> str:
+ normalized_context = str(log_context or "").strip()
+ if not normalized_context:
+ return message
+ return f"{normalized_context} | {message}"
+
+
+def normalize_mail_domain(value: str) -> str:
+ return str(value or "").strip().lstrip("@.").rstrip(".")
+
+
+def normalize_mail_domains(values: Any) -> List[str]:
+ if isinstance(values, str):
+ candidates = [values]
+ elif isinstance(values, (list, tuple)):
+ candidates = list(values)
+ else:
+ candidates = []
+
+ normalized: List[str] = []
+ for value in candidates:
+ domain = normalize_mail_domain(str(value or ""))
+ if domain and domain not in normalized:
+ normalized.append(domain)
+ return normalized
+
+
+class MailProviderBase:
+ provider_name = "unknown"
+
+ def __init__(self, *, proxy: str, logger: logging.Logger):
+ self.proxy = str(proxy or "")
+ self.logger = logger
+ self._thread_local = threading.local()
+
+ def create_mailbox(self) -> Optional[Mailbox]:
+ raise NotImplementedError
+
+ @property
+ def last_selected_domain(self) -> str:
+ return ""
+
+ @property
+ def last_selected_target(self) -> str:
+ return self.last_selected_domain
+
+ def wait_for_availability(self, worker_id: int = 0) -> None:
+ return None
+
+ def note_domain_failure(self, domain: str, *, stage: str, detail: str = "") -> None:
+ return None
+
+ def note_domain_success(self, domain: str) -> None:
+ return None
+
+ def note_target_failure(self, target: str, *, stage: str, detail: str = "") -> None:
+ self.note_domain_failure(target, stage=stage, detail=detail)
+
+ def note_target_success(self, target: str) -> None:
+ self.note_domain_success(target)
+
+ def poll_verification_codes(
+ self,
+ mailbox: Mailbox,
+ *,
+ email: str = "",
+ seen_ids: Optional[set[str]] = None,
+ not_before_ts: Optional[float] = None,
+ ) -> List[str]:
+ raise NotImplementedError
+
+ def wait_for_verification_code(
+ self,
+ mailbox: Mailbox,
+ *,
+ email: str = "",
+ timeout: int = 120,
+ not_before_ts: Optional[float] = None,
+ poll_interval_seconds: float = 3.0,
+ log_context: str = "",
+ ) -> Optional[str]:
+ seen_ids: set[str] = set()
+ start = time.time()
+ poll_interval = max(0.2, float(poll_interval_seconds or 3.0))
+ target_email = (email or mailbox.email).strip()
+ self.logger.info(
+ apply_log_context("正在等待邮箱 %s 的验证码... provider=%s timeout=%ss", log_context),
+ target_email or mailbox.email,
+ self.provider_name,
+ timeout,
+ )
+ while time.time() - start < timeout:
+ codes = self.poll_verification_codes(
+ mailbox,
+ email=email,
+ seen_ids=seen_ids,
+ not_before_ts=not_before_ts,
+ )
+ if codes:
+ self.logger.info(apply_log_context("成功获取验证码: %s", log_context), codes[0])
+ return codes[0]
+ time.sleep(poll_interval)
+ return None
+
+ def describe(self) -> str:
+ return self.provider_name
+
+ def _session(self) -> requests.Session:
+ session = getattr(self._thread_local, "session", None)
+ if session is None:
+ session = create_session(proxy=self.proxy)
+ self._thread_local.session = session
+ return session
+
+
+class DomainAwareMailProvider(MailProviderBase):
+ def __init__(
+ self,
+ *,
+ proxy: str,
+ logger: logging.Logger,
+ domain: str = "",
+ domains: Optional[List[str]] = None,
+ failure_threshold: int = 5,
+ failure_cooldown_seconds: float = 45.0,
+ ):
+ super().__init__(proxy=proxy, logger=logger)
+ normalized_domains = normalize_mail_domains(domains or [])
+ if not normalized_domains:
+ normalized_domains = normalize_mail_domains([domain])
+ self.domains = normalized_domains
+ self.failure_threshold = max(1, int(failure_threshold or 5))
+ self.failure_cooldown_seconds = max(1.0, float(failure_cooldown_seconds or 45.0))
+ self.domain_failure_counts: Dict[str, int] = {item: 0 for item in self.domains}
+ self.domain_cooldown_until: Dict[str, float] = {item: 0.0 for item in self.domains}
+ self._domain_lock = threading.Lock()
+ self._round_robin_index = 0
+ self._last_selected_domain = ""
+
+ @property
+ def last_selected_domain(self) -> str:
+ with self._domain_lock:
+ return self._last_selected_domain
+
+ def wait_for_availability(self, worker_id: int = 0) -> None:
+ if not self.domains:
+ return
+ while True:
+ now = time.time()
+ with self._domain_lock:
+ next_ready_at = 0.0
+ for candidate in self.domains:
+ cooldown_until = self.domain_cooldown_until.get(candidate, 0.0)
+ if cooldown_until <= now:
+ return
+ if not next_ready_at or cooldown_until < next_ready_at:
+ next_ready_at = cooldown_until
+ wait_seconds = max(0.0, next_ready_at - now)
+ self.logger.warning(
+ "邮箱域名全部处于冷却期,provider=%s worker=%s 等待 %.1fs",
+ self.provider_name,
+ worker_id or "-",
+ wait_seconds,
+ )
+ time.sleep(min(wait_seconds, 5.0))
+
+ def acquire_domain(self) -> str:
+ if not self.domains:
+ return ""
+ while True:
+ now = time.time()
+ with self._domain_lock:
+ total = len(self.domains)
+ next_ready_at = 0.0
+ for offset in range(total):
+ idx = (self._round_robin_index + offset) % total
+ candidate = self.domains[idx]
+ cooldown_until = self.domain_cooldown_until.get(candidate, 0.0)
+ if cooldown_until > now:
+ if not next_ready_at or cooldown_until < next_ready_at:
+ next_ready_at = cooldown_until
+ continue
+ self._round_robin_index = (idx + 1) % total
+ self._last_selected_domain = candidate
+ return candidate
+ wait_seconds = max(0.0, next_ready_at - now)
+ self.logger.warning(
+ "邮箱域名全部处于冷却期,provider=%s 等待 %.1fs",
+ self.provider_name,
+ wait_seconds,
+ )
+ time.sleep(min(wait_seconds, 5.0))
+
+ def note_domain_failure(self, domain: str, *, stage: str, detail: str = "") -> None:
+ normalized_domain = normalize_mail_domain(domain)
+ if not normalized_domain or normalized_domain not in self.domain_failure_counts:
+ return
+ with self._domain_lock:
+ failure_count = self.domain_failure_counts[normalized_domain] + 1
+ self.domain_failure_counts[normalized_domain] = failure_count
+ should_cooldown = failure_count >= self.failure_threshold
+ cooldown_until = self.domain_cooldown_until.get(normalized_domain, 0.0)
+ if should_cooldown:
+ cooldown_until = max(cooldown_until, time.time() + self.failure_cooldown_seconds)
+ self.domain_cooldown_until[normalized_domain] = cooldown_until
+ if should_cooldown:
+ self.logger.warning(
+ "邮箱域名熔断: provider=%s domain=%s stage=%s detail=%s consecutive=%s/%s cooldown_until=%s",
+ self.provider_name,
+ normalized_domain,
+ stage,
+ detail or "-",
+ failure_count,
+ self.failure_threshold,
+ dt.datetime.fromtimestamp(cooldown_until).strftime("%Y-%m-%d %H:%M:%S"),
+ )
+ else:
+ self.logger.warning(
+ "邮箱域名失败: provider=%s domain=%s stage=%s detail=%s consecutive=%s/%s",
+ self.provider_name,
+ normalized_domain,
+ stage,
+ detail or "-",
+ failure_count,
+ self.failure_threshold,
+ )
+
+ def note_domain_success(self, domain: str) -> None:
+ normalized_domain = normalize_mail_domain(domain)
+ if not normalized_domain or normalized_domain not in self.domain_failure_counts:
+ return
+ with self._domain_lock:
+ self.domain_failure_counts[normalized_domain] = 0
+ self.domain_cooldown_until[normalized_domain] = 0.0
+
+
+class CfmailProvider(DomainAwareMailProvider):
+ provider_name = "cfmail"
+ CODE_PATTERNS = (
+ r"Subject:\s*Your ChatGPT code is\s*(\d{6})",
+ r"Your ChatGPT code is\s*(\d{6})",
+ r"temporary verification code to continue:\s*(\d{6})",
+ r"(? Optional[Mailbox]:
+ local = f"oc{secrets.token_hex(5)}"
+ session = self._session()
+ try:
+ resp = session.post(
+ f"{self.api_base}/admin/new_address",
+ headers={
+ "x-admin-auth": self.api_key,
+ "Accept": "application/json",
+ "Content-Type": "application/json",
+ },
+ json={"enablePrefix": True, "name": local, "domain": domain},
+ timeout=15,
+ verify=False,
+ )
+ if resp.status_code != 200:
+ self.logger.warning("cfmail 创建邮箱失败: domain=%s status=%s body=%s", domain, resp.status_code, resp.text[:200])
+ return None
+ body = resp.json() if resp.content else {}
+ if not isinstance(body, dict):
+ return None
+ email = str(body.get("address") or "").strip()
+ jwt = str(body.get("jwt") or "").strip()
+ if not email or not jwt:
+ return None
+ self.logger.info("生成 Cloudflare 邮箱成功: %s", email)
+ return Mailbox(
+ email=email,
+ token=jwt,
+ domain=domain,
+ failure_target=domain,
+ )
+ except Exception as exc:
+ self.logger.warning("cfmail 创建邮箱异常: domain=%s error=%s", domain, exc)
+ return None
+
+ def create_mailbox(self) -> Optional[Mailbox]:
+ selected_domain = self.acquire_domain()
+ return self._create_address_for_domain(selected_domain)
+
+ def _fetch_cfmail_messages(self, mailbox: Mailbox) -> List[Dict[str, Any]]:
+ if not mailbox.token:
+ return []
+ session = self._session()
+ try:
+ resp = session.get(
+ f"{self.api_base}/api/mails",
+ params={"limit": 10, "offset": 0},
+ headers={"Accept": "application/json", "Content-Type": "application/json", "Authorization": f"Bearer {mailbox.token}"},
+ timeout=15,
+ verify=False,
+ )
+ if resp.status_code != 200:
+ return []
+ body = resp.json() if resp.content else {}
+ results = body.get("results") if isinstance(body, dict) else []
+ return results if isinstance(results, list) else []
+ except Exception:
+ return []
+
+ def poll_verification_codes(
+ self,
+ mailbox: Mailbox,
+ *,
+ email: str = "",
+ seen_ids: Optional[set[str]] = None,
+ not_before_ts: Optional[float] = None,
+ ) -> List[str]:
+ messages = self._fetch_cfmail_messages(mailbox)
+ codes: List[str] = []
+ normalized_email = (email or mailbox.email).strip().lower()
+ for message in messages:
+ if not isinstance(message, dict):
+ continue
+ message_id = str(message.get("id") or message.get("createdAt") or "").strip()
+ if seen_ids is not None and message_id:
+ if message_id in seen_ids:
+ continue
+ seen_ids.add(message_id)
+ if not is_mail_recent_enough(message, not_before_ts):
+ continue
+
+ recipient = str(message.get("address") or "").strip().lower()
+ if recipient and normalized_email and recipient != normalized_email:
+ continue
+ metadata = message.get("metadata") or {}
+ content = "\n".join(
+ [
+ recipient,
+ str(message.get("raw") or ""),
+ json.dumps(metadata, ensure_ascii=False),
+ ]
+ )
+ if "openai" not in content.lower() and "chatgpt" not in content.lower():
+ continue
+ for pattern in self.CODE_PATTERNS:
+ matched = re.search(pattern, content, re.I | re.S)
+ if matched:
+ codes.append(matched.group(1))
+ break
+ return list(dict.fromkeys(codes))
+
+ def describe(self) -> str:
+ return f"{self.provider_name}({self.api_base}, domains={','.join(self.domains)})"
+
+
+class SelfHostedMailApiProvider(DomainAwareMailProvider):
+ provider_name = "self_hosted_mail_api"
+
+ def __init__(
+ self,
+ *,
+ proxy: str,
+ logger: logging.Logger,
+ api_base: str,
+ api_key: str,
+ domain: str,
+ domains: Optional[List[str]] = None,
+ failure_threshold: int = 5,
+ failure_cooldown_seconds: float = 45.0,
+ ):
+ super().__init__(
+ proxy=proxy,
+ logger=logger,
+ domain=domain,
+ domains=domains,
+ failure_threshold=failure_threshold,
+ failure_cooldown_seconds=failure_cooldown_seconds,
+ )
+ self.api_base = str(api_base or "").rstrip("/")
+ self.api_key = str(api_key or "").strip()
+ self.domain = self.domains[0] if self.domains else ""
+ if not self.api_base:
+ raise RuntimeError("mail.api_base 未配置,无法调用自建邮箱 API。")
+ if not self.api_key:
+ raise RuntimeError("mail.api_key 未配置,无法调用自建邮箱 API。")
+ if not self.domains:
+ raise RuntimeError("mail.domain 未配置,无法生成邮箱地址。")
+
+ def create_mailbox(self) -> Optional[Mailbox]:
+ selected_domain = self.acquire_domain()
+ email = f"oc{secrets.token_hex(5)}@{selected_domain}"
+ self.logger.info("生成临时邮箱成功: %s", email)
+ return Mailbox(email=email, domain=selected_domain)
+
+ def _fetch_latest_email(self, email: str) -> Optional[Dict[str, Any]]:
+ if not email:
+ return None
+ session = self._session()
+ try:
+ res = session.get(
+ f"{self.api_base}/api/latest?address={quote(email)}",
+ headers=build_mail_api_headers(self.api_key),
+ timeout=30,
+ verify=False,
+ )
+ if res.status_code != 200:
+ self.logger.warning(
+ "自建邮箱获取邮件失败: status=%s email=%s body=%s",
+ res.status_code,
+ email,
+ (res.text or "")[:200],
+ )
+ return None
+ data = res.json()
+ if not isinstance(data, dict):
+ return None
+ mail_obj = data.get("email")
+ if data.get("ok") and isinstance(mail_obj, dict):
+ return mail_obj
+ if isinstance(mail_obj, dict):
+ return mail_obj
+ if any(key in data for key in ("subject", "body", "text", "html")):
+ return data
+ except Exception:
+ return None
+ return None
+
+ def poll_verification_codes(
+ self,
+ mailbox: Mailbox,
+ *,
+ email: str = "",
+ seen_ids: Optional[set[str]] = None,
+ not_before_ts: Optional[float] = None,
+ ) -> List[str]:
+ mail_obj = self._fetch_latest_email(mailbox.email)
+ if not mail_obj:
+ return []
+ if not is_mail_recent_enough(mail_obj, not_before_ts):
+ return []
+
+ signature = _mail_content_signature(mail_obj)
+ if seen_ids is not None and signature in seen_ids:
+ return []
+ if seen_ids is not None:
+ seen_ids.add(signature)
+
+ content = _flatten_mail_content(mail_obj)
+ code = extract_verification_code(content)
+ return [code] if code else []
+
+ def describe(self) -> str:
+ return f"{self.provider_name}({self.api_base}, domains={','.join(self.domains)})"
+
+
+class DuckMailProvider(DomainAwareMailProvider):
+ provider_name = "duckmail"
+
+ def __init__(
+ self,
+ *,
+ proxy: str,
+ logger: logging.Logger,
+ api_base: str,
+ bearer: str,
+ domain: str = "duckmail.sbs",
+ domains: Optional[List[str]] = None,
+ failure_threshold: int = 5,
+ failure_cooldown_seconds: float = 45.0,
+ ):
+ super().__init__(
+ proxy=proxy,
+ logger=logger,
+ domain=domain or "duckmail.sbs",
+ domains=domains,
+ failure_threshold=failure_threshold,
+ failure_cooldown_seconds=failure_cooldown_seconds,
+ )
+ self.api_base = str(api_base or "https://api.duckmail.sbs").rstrip("/")
+ self.bearer = str(bearer or "").strip()
+ self.domain = self.domains[0] if self.domains else normalize_mail_domain(domain or "duckmail.sbs")
+ if not self.bearer:
+ raise RuntimeError("duckmail.bearer 未配置,无法创建 DuckMail 邮箱。")
+
+ def create_mailbox(self) -> Optional[Mailbox]:
+ selected_domain = self.acquire_domain()
+ local = "".join(random.choice(string.ascii_lowercase + string.digits) for _ in range(random.randint(8, 13)))
+ email = f"{local}@{selected_domain}"
+ password = generate_random_password()
+ session = self._session()
+ headers = {"Authorization": f"Bearer {self.bearer}", "Accept": "application/json"}
+ try:
+ resp = session.post(
+ f"{self.api_base}/accounts",
+ json={"address": email, "password": password},
+ headers=headers,
+ timeout=30,
+ verify=False,
+ )
+ if resp.status_code not in (200, 201):
+ raise RuntimeError(f"HTTP {resp.status_code}: {resp.text[:200]}")
+
+ token_resp = session.post(
+ f"{self.api_base}/token",
+ json={"address": email, "password": password},
+ timeout=30,
+ verify=False,
+ )
+ if token_resp.status_code != 200:
+ raise RuntimeError(f"HTTP {token_resp.status_code}: {token_resp.text[:200]}")
+ data = token_resp.json() if token_resp.content else {}
+ token = str(data.get("token") or "").strip()
+ if not token:
+ raise RuntimeError("token 为空")
+ except Exception as exc:
+ self.logger.warning("DuckMail 创建邮箱失败: %s", exc)
+ return None
+
+ self.logger.info("生成 DuckMail 邮箱成功: %s", email)
+ return Mailbox(email=email, password=password, token=token, domain=selected_domain)
+
+ def _auth_headers(self, token: str) -> Dict[str, str]:
+ return {"Authorization": f"Bearer {token}", "Accept": "application/json"}
+
+ def _fetch_messages(self, token: str) -> List[Dict[str, Any]]:
+ if not token:
+ return []
+ session = self._session()
+ try:
+ resp = session.get(
+ f"{self.api_base}/messages",
+ headers=self._auth_headers(token),
+ timeout=30,
+ verify=False,
+ )
+ if resp.status_code != 200:
+ return []
+ data = resp.json()
+ if isinstance(data, dict):
+ messages = data.get("hydra:member") or data.get("member") or data.get("data") or []
+ return messages if isinstance(messages, list) else []
+ except Exception:
+ return []
+ return []
+
+ def _fetch_message_detail(self, token: str, message_id: str) -> Optional[Dict[str, Any]]:
+ if not token or not message_id:
+ return None
+ normalized_id = str(message_id).split("/")[-1]
+ session = self._session()
+ try:
+ resp = session.get(
+ f"{self.api_base}/messages/{normalized_id}",
+ headers=self._auth_headers(token),
+ timeout=30,
+ verify=False,
+ )
+ if resp.status_code == 200:
+ data = resp.json()
+ return data if isinstance(data, dict) else None
+ except Exception:
+ return None
+ return None
+
+ def poll_verification_codes(
+ self,
+ mailbox: Mailbox,
+ *,
+ email: str = "",
+ seen_ids: Optional[set[str]] = None,
+ not_before_ts: Optional[float] = None,
+ ) -> List[str]:
+ messages = self._fetch_messages(mailbox.token)
+ codes: List[str] = []
+ for message in messages[:12]:
+ message_id = str(message.get("id") or message.get("@id") or "").strip()
+ if seen_ids is not None and message_id:
+ if message_id in seen_ids:
+ continue
+ seen_ids.add(message_id)
+
+ detail = self._fetch_message_detail(mailbox.token, message_id)
+ if not detail:
+ continue
+ if not is_mail_recent_enough(detail, not_before_ts):
+ continue
+
+ content = _flatten_mail_content(detail)
+ code = extract_verification_code(content)
+ if code:
+ codes.append(code)
+ return list(dict.fromkeys(codes))
+
+ def describe(self) -> str:
+ return f"{self.provider_name}({self.api_base}, domains={','.join(self.domains)})"
+
+
+class TempMailLolProvider(MailProviderBase):
+ provider_name = "tempmail_lol"
+
+ def __init__(self, *, proxy: str, logger: logging.Logger, api_base: str):
+ super().__init__(proxy=proxy, logger=logger)
+ self.api_base = str(api_base or "https://api.tempmail.lol/v2").rstrip("/")
+
+ def create_mailbox(self) -> Optional[Mailbox]:
+ session = self._session()
+ try:
+ resp = session.post(
+ f"{self.api_base}/inbox/create",
+ json={},
+ timeout=30,
+ verify=False,
+ )
+ if resp.status_code not in (200, 201):
+ raise RuntimeError(f"HTTP {resp.status_code}: {resp.text[:200]}")
+ data = resp.json() if resp.content else {}
+ email = str(data.get("address") or data.get("email") or "").strip()
+ token = str(data.get("token") or "").strip()
+ if not email or not token:
+ raise RuntimeError("address/email 或 token 为空")
+ except Exception as exc:
+ self.logger.warning("TempMail.lol 创建邮箱失败: %s", exc)
+ return None
+
+ self.logger.info("生成 TempMail.lol 邮箱成功: %s", email)
+ return Mailbox(email=email, token=token)
+
+ def _fetch_messages(self, token: str) -> List[Dict[str, Any]]:
+ if not token:
+ return []
+ session = self._session()
+ try:
+ resp = session.get(
+ f"{self.api_base}/inbox",
+ params={"token": token},
+ timeout=30,
+ verify=False,
+ )
+ if resp.status_code != 200:
+ return []
+ data = resp.json() if resp.content else {}
+ emails = data.get("emails") if isinstance(data, dict) else []
+ return emails if isinstance(emails, list) else []
+ except Exception:
+ return []
+
+ def poll_verification_codes(
+ self,
+ mailbox: Mailbox,
+ *,
+ email: str = "",
+ seen_ids: Optional[set[str]] = None,
+ not_before_ts: Optional[float] = None,
+ ) -> List[str]:
+ messages = self._fetch_messages(mailbox.token)
+ sorted_messages = sorted(
+ messages,
+ key=lambda item: item.get("date") or item.get("createdAt") or 0,
+ reverse=True,
+ )
+ codes: List[str] = []
+ for message in sorted_messages[:20]:
+ message_id = str(message.get("id") or message.get("date") or message.get("createdAt") or "").strip()
+ if seen_ids is not None and message_id:
+ if message_id in seen_ids:
+ continue
+ seen_ids.add(message_id)
+ if not is_mail_recent_enough(message, not_before_ts):
+ continue
+
+ content = _flatten_mail_content(message)
+ code = extract_verification_code(content)
+ if code:
+ codes.append(code)
+ return list(dict.fromkeys(codes))
+
+ def describe(self) -> str:
+ return f"{self.provider_name}({self.api_base})"
+
+
+class YYDSMailProvider(DomainAwareMailProvider):
+ provider_name = "yyds_mail"
+
+ def __init__(
+ self,
+ *,
+ proxy: str,
+ logger: logging.Logger,
+ api_base: str,
+ api_key: str = "",
+ domain: str = "",
+ domains: Optional[List[str]] = None,
+ failure_threshold: int = 5,
+ failure_cooldown_seconds: float = 45.0,
+ ):
+ super().__init__(
+ proxy=proxy,
+ logger=logger,
+ domain=domain,
+ domains=domains,
+ failure_threshold=failure_threshold,
+ failure_cooldown_seconds=failure_cooldown_seconds,
+ )
+ self.api_base = str(api_base or "https://maliapi.215.im/v1").rstrip("/")
+ self.api_key = str(api_key or "").strip()
+ self.domain = self.domains[0] if self.domains else normalize_mail_domain(domain)
+
+ def _request_headers(self) -> Dict[str, str]:
+ headers = {"Accept": "application/json", "Content-Type": "application/json"}
+ if self.api_key:
+ headers["X-API-Key"] = self.api_key
+ return headers
+
+ def _temp_headers(self, token: str) -> Dict[str, str]:
+ return {"Accept": "application/json", "Authorization": f"Bearer {token}"}
+
+ def create_mailbox(self) -> Optional[Mailbox]:
+ payload: Dict[str, Any] = {"address": f"oc{secrets.token_hex(5)}"}
+ selected_domain = self.acquire_domain()
+ if selected_domain:
+ payload["domain"] = selected_domain
+
+ session = self._session()
+ try:
+ resp = session.post(
+ f"{self.api_base}/accounts",
+ json=payload,
+ headers=self._request_headers(),
+ timeout=30,
+ verify=False,
+ )
+ if resp.status_code not in (200, 201):
+ raise RuntimeError(f"HTTP {resp.status_code}: {resp.text[:200]}")
+ body = resp.json() if resp.content else {}
+ data = body.get("data") if isinstance(body, dict) else {}
+ if not isinstance(data, dict):
+ raise RuntimeError("返回 data 结构无效")
+ email = str(data.get("address") or "").strip()
+ token = str(data.get("token") or "").strip()
+ account_id = str(data.get("id") or "").strip()
+ if not email or not token:
+ raise RuntimeError("address 或 token 为空")
+ except Exception as exc:
+ self.logger.warning("YYDS Mail 创建邮箱失败: %s", exc)
+ return None
+
+ mailbox_domain = selected_domain or normalize_mail_domain(email.partition("@")[2])
+ self.logger.info("生成 YYDS Mail 邮箱成功: %s", email)
+ return Mailbox(email=email, token=token, account_id=account_id, domain=mailbox_domain)
+
+ def _fetch_messages(self, token: str) -> List[Dict[str, Any]]:
+ if not token:
+ return []
+ session = self._session()
+ try:
+ resp = session.get(
+ f"{self.api_base}/messages",
+ headers=self._temp_headers(token),
+ timeout=30,
+ verify=False,
+ )
+ if resp.status_code != 200:
+ return []
+ body = resp.json() if resp.content else {}
+ if not isinstance(body, dict):
+ return []
+ data = body.get("data")
+ if isinstance(data, list):
+ return data
+ if isinstance(data, dict):
+ messages = data.get("messages") or data.get("items") or data.get("list") or []
+ return messages if isinstance(messages, list) else []
+ messages = body.get("messages") or []
+ return messages if isinstance(messages, list) else []
+ except Exception:
+ return []
+
+ def _fetch_message_detail(self, token: str, message_id: str) -> Optional[Dict[str, Any]]:
+ if not token or not message_id:
+ return None
+ normalized_id = str(message_id).split("/")[-1]
+ session = self._session()
+ try:
+ resp = session.get(
+ f"{self.api_base}/messages/{quote(normalized_id, safe='')}",
+ headers=self._temp_headers(token),
+ timeout=30,
+ verify=False,
+ )
+ if resp.status_code != 200:
+ return None
+ body = resp.json() if resp.content else {}
+ data = body.get("data") if isinstance(body, dict) else {}
+ return data if isinstance(data, dict) else None
+ except Exception:
+ return None
+
+ def poll_verification_codes(
+ self,
+ mailbox: Mailbox,
+ *,
+ email: str = "",
+ seen_ids: Optional[set[str]] = None,
+ not_before_ts: Optional[float] = None,
+ ) -> List[str]:
+ messages = self._fetch_messages(mailbox.token)
+ codes: List[str] = []
+ for message in messages[:20]:
+ message_id = str(message.get("id") or "").split("/")[-1].strip()
+ if seen_ids is not None and message_id:
+ if message_id in seen_ids:
+ continue
+ seen_ids.add(message_id)
+
+ if is_mail_recent_enough(message, not_before_ts):
+ inline_content = _flatten_mail_content(message)
+ inline_code = extract_verification_code(inline_content)
+ if inline_code:
+ codes.append(inline_code)
+ continue
+
+ detail = self._fetch_message_detail(mailbox.token, message_id)
+ if not detail:
+ continue
+ if not is_mail_recent_enough(detail, not_before_ts):
+ continue
+
+ content = _flatten_mail_content(detail)
+ code = extract_verification_code(content)
+ if code:
+ codes.append(code)
+ return list(dict.fromkeys(codes))
+
+ def describe(self) -> str:
+ detail = self.api_base
+ if self.domains:
+ detail += f", domains={','.join(self.domains)}"
+ elif self.domain:
+ detail += f", domain={self.domain}"
+ return f"{self.provider_name}({detail})"
+
+
+def build_mail_provider(conf: Dict[str, Any], proxy: str, logger: logging.Logger) -> MailProviderBase:
+ raw_provider = str(pick_conf(conf, "mail", "provider", "mail_provider", default="") or "").strip()
+ failure_threshold = int(pick_conf(conf, "run", "failure_threshold_for_cooldown", default=5) or 5)
+ failure_cooldown_seconds = float(pick_conf(conf, "run", "failure_cooldown_seconds", default=45.0) or 45.0)
+
+ if not raw_provider:
+ if pick_conf(conf, "mail", "api_base", default=conf.get("mail_api_base")) or pick_conf(
+ conf, "mail", "domain", default=conf.get("mail_domain")
+ ):
+ provider_name = "self_hosted_mail_api"
+ elif pick_conf(conf, "duckmail", "bearer", default=conf.get("duckmail_bearer")):
+ provider_name = "duckmail"
+ elif pick_conf(conf, "yyds_mail", "api_key", default=conf.get("yyds_mail_api_key")) or pick_conf(
+ conf, "yyds_mail", "domain", default=conf.get("yyds_mail_domain")
+ ):
+ provider_name = "yyds_mail"
+ elif pick_conf(conf, "tempmail_lol", "api_base", default=conf.get("tempmail_lol_api_base")):
+ provider_name = "tempmail_lol"
+ else:
+ provider_name = "self_hosted_mail_api"
+ else:
+ provider_name = normalize_mail_provider(raw_provider)
+
+ if provider_name == "self_hosted_mail_api":
+ return SelfHostedMailApiProvider(
+ proxy=proxy,
+ logger=logger,
+ api_base=str(pick_conf(conf, "mail", "api_base", default=conf.get("mail_api_base", "")) or "").strip(),
+ api_key=str(pick_conf(conf, "mail", "api_key", default=conf.get("mail_api_key", "")) or "").strip(),
+ domain=str(pick_conf(conf, "mail", "domain", default=conf.get("mail_domain", "")) or "").strip(),
+ domains=pick_conf_list(conf, "mail", "domains", "mail_domains"),
+ failure_threshold=failure_threshold,
+ failure_cooldown_seconds=failure_cooldown_seconds,
+ )
+ if provider_name == "duckmail":
+ return DuckMailProvider(
+ proxy=proxy,
+ logger=logger,
+ api_base=str(
+ pick_conf(conf, "duckmail", "api_base", default=conf.get("duckmail_api_base", "https://api.duckmail.sbs"))
+ or "https://api.duckmail.sbs"
+ ).strip(),
+ bearer=str(pick_conf(conf, "duckmail", "bearer", default=conf.get("duckmail_bearer", "")) or "").strip(),
+ domain=str(pick_conf(conf, "duckmail", "domain", default=conf.get("duckmail_domain", "duckmail.sbs")) or "duckmail.sbs").strip(),
+ domains=pick_conf_list(conf, "duckmail", "domains", "duckmail_domains"),
+ failure_threshold=failure_threshold,
+ failure_cooldown_seconds=failure_cooldown_seconds,
+ )
+ if provider_name == "cfmail":
+ return CfmailProvider(
+ proxy=proxy,
+ logger=logger,
+ api_base=str(pick_conf(conf, "cfmail", "api_base", default=conf.get("cfmail_api_base", "")) or "").strip(),
+ api_key=str(pick_conf(conf, "cfmail", "api_key", default=conf.get("cfmail_api_key", "")) or "").strip(),
+ domain=str(pick_conf(conf, "cfmail", "domain", default=conf.get("cfmail_domain", "")) or "").strip(),
+ domains=pick_conf_list(conf, "cfmail", "domains", "cfmail_domains"),
+ failure_threshold=failure_threshold,
+ failure_cooldown_seconds=failure_cooldown_seconds,
+ )
+ if provider_name == "tempmail_lol":
+ return TempMailLolProvider(
+ proxy=proxy,
+ logger=logger,
+ api_base=str(
+ pick_conf(
+ conf,
+ "tempmail_lol",
+ "api_base",
+ default=conf.get("tempmail_lol_api_base", "https://api.tempmail.lol/v2"),
+ )
+ or "https://api.tempmail.lol/v2"
+ ).strip(),
+ )
+ if provider_name == "yyds_mail":
+ return YYDSMailProvider(
+ proxy=proxy,
+ logger=logger,
+ api_base=str(
+ pick_conf(
+ conf,
+ "yyds_mail",
+ "api_base",
+ default=conf.get("yyds_mail_api_base", "https://maliapi.215.im/v1"),
+ )
+ or "https://maliapi.215.im/v1"
+ ).strip(),
+ api_key=str(pick_conf(conf, "yyds_mail", "api_key", default=conf.get("yyds_mail_api_key", "")) or "").strip(),
+ domain=str(pick_conf(conf, "yyds_mail", "domain", default=conf.get("yyds_mail_domain", "")) or "").strip(),
+ domains=pick_conf_list(conf, "yyds_mail", "domains", "yyds_mail_domains"),
+ failure_threshold=failure_threshold,
+ failure_cooldown_seconds=failure_cooldown_seconds,
+ )
+
+ raise RuntimeError(f"不支持的 mail.provider={provider_name}")
+
+
+class ProtocolRegistrar:
+ def __init__(self, proxy: str, logger: logging.Logger, conf: Optional[Dict[str, Any]] = None):
+ self.proxy = proxy
+ self.session = create_session(proxy=proxy)
+ self.device_id = str(uuid.uuid4())
+ self.logger = logger
+ self.flow_trace: Optional[FlowTraceRecorder] = getattr(logger, "flow_trace", None)
+ self.conf = conf or {}
+ self.sentinel_gen = SentinelTokenGenerator(device_id=self.device_id)
+ self.code_verifier: Optional[str] = None
+ self.state: Optional[str] = None
+ self.registration_auth_code = ""
+ self.registration_tokens: Optional[Dict[str, Any]] = None
+ self.last_failure_stage = ""
+ self.last_failure_detail = ""
+ self.chatgpt_base = str(
+ pick_conf(self.conf, "registration", "chatgpt_base", default="https://chatgpt.com") or "https://chatgpt.com"
+ ).rstrip("/")
+ self.step_retry_attempts = flow_step_retry_attempts(self.conf)
+ self.step_retry_delay = lambda attempt: flow_step_retry_delay(self.conf, attempt)
+ self.entry_mode = parse_choice(
+ pick_conf(self.conf, "registration", "entry_mode", default="chatgpt_web"),
+ allowed=("direct_auth", "chatgpt_web"),
+ fallback="chatgpt_web",
+ )
+ self.entry_mode_fallback = parse_bool(
+ pick_conf(self.conf, "registration", "entry_mode_fallback", default=True),
+ fallback=True,
+ )
+ self.transient_markers = parse_marker_config(
+ pick_conf(self.conf, "flow", "transient_markers", default=TRANSIENT_FLOW_MARKERS_DEFAULT),
+ fallback=TRANSIENT_FLOW_MARKERS_DEFAULT,
+ )
+ self.register_otp_validate_order = parse_otp_validate_order(
+ pick_conf(self.conf, "flow", "register_otp_validate_order", default="normal,sentinel")
+ )
+ self.phone_markers = parse_marker_config(
+ pick_conf(self.conf, "registration", "phone_verification_markers", default=PHONE_VERIFICATION_MARKERS_DEFAULT),
+ fallback=PHONE_VERIFICATION_MARKERS_DEFAULT,
+ )
+ self.register_phone_action = parse_choice(
+ pick_conf(
+ self.conf,
+ "registration",
+ "register_create_account_phone_action",
+ default="warn_and_continue",
+ ),
+ allowed=("warn_and_continue", "fail_fast"),
+ fallback="warn_and_continue",
+ )
+
+ def _set_failure(self, stage: str, detail: str = "") -> None:
+ self.last_failure_stage = str(stage or "").strip()
+ self.last_failure_detail = str(detail or "").strip()
+
+ def _capture_registration_tokens(
+ self,
+ response_payload: Dict[str, Any],
+ response_headers: Optional[Dict[str, Any]] = None,
+ response_url: str = "",
+ response_text: str = "",
+ ) -> None:
+ callback_params = extract_oauth_callback_params_from_response(
+ response_payload,
+ response_headers=response_headers,
+ response_url=response_url,
+ response_text=response_text,
+ )
+ auth_code = str((callback_params or {}).get("code") or "").strip()
+ continue_url = extract_continue_url_from_response(
+ response_payload,
+ response_headers=response_headers,
+ response_url=response_url,
+ )
+ if not auth_code:
+ callback_params = extract_oauth_callback_params_from_session_cookies(self.session)
+ auth_code = str((callback_params or {}).get("code") or "").strip()
+ if not auth_code and continue_url:
+ callback_params = extract_oauth_callback_params_from_consent_session(
+ session=self.session,
+ consent_url=continue_url,
+ oauth_issuer=OPENAI_AUTH_BASE,
+ device_id=self.device_id,
+ flow_trace=self.flow_trace,
+ )
+ auth_code = str((callback_params or {}).get("code") or "").strip()
+ if not auth_code:
+ callback_params = extract_oauth_callback_params_from_consent_session(
+ session=self.session,
+ consent_url=f"{OPENAI_AUTH_BASE}/sign-in-with-chatgpt/codex/consent",
+ oauth_issuer=OPENAI_AUTH_BASE,
+ device_id=self.device_id,
+ flow_trace=self.flow_trace,
+ )
+ auth_code = str((callback_params or {}).get("code") or "").strip()
+ self.registration_auth_code = str(auth_code or "")
+ self.registration_tokens = build_chatgpt_session_token_result(
+ session=self.session,
+ auth_code=auth_code,
+ callback_params=callback_params,
+ chatgpt_base=self.chatgpt_base,
+ logger=self.logger,
+ flow_trace=self.flow_trace,
+ )
+ if self.flow_trace is not None:
+ self.flow_trace.record(
+ "registration_capture_tokens",
+ auth_code=auth_code,
+ continue_url=continue_url,
+ has_tokens=bool(self.registration_tokens),
+ email=(self.registration_tokens or {}).get("email", ""),
+ account_id=(self.registration_tokens or {}).get("account_id", ""),
+ )
+
+ def _is_transient_error(self, reason: str | None) -> bool:
+ return is_transient_flow_error(reason, markers=self.transient_markers)
+
+ def _run_step_with_retry(
+ self,
+ step_name: str,
+ action: Callable[[], tuple[bool, str]],
+ ) -> tuple[bool, str]:
+ max_attempts = max(1, int(self.step_retry_attempts))
+ last_reason = ""
+ for attempt in range(1, max_attempts + 1):
+ ok, reason = action()
+ if ok:
+ return True, ""
+ last_reason = str(reason or "")
+ if attempt < max_attempts and self._is_transient_error(last_reason):
+ self.logger.warning(
+ "步骤%s瞬时失败,第 %s/%s 次失败: %s,局部重试",
+ step_name,
+ attempt,
+ max_attempts,
+ last_reason or "unknown",
+ )
+ time.sleep(self.step_retry_delay(attempt))
+ continue
+ return False, last_reason
+ return False, last_reason or f"{step_name}_failed"
+
+ def _entry_mode_candidates(self) -> list[str]:
+ ordered = [self.entry_mode]
+ if self.entry_mode_fallback:
+ fallback = "chatgpt_web" if self.entry_mode == "direct_auth" else "direct_auth"
+ ordered.append(fallback)
+ unique: list[str] = []
+ for mode in ordered:
+ if mode not in unique:
+ unique.append(mode)
+ return unique
+
+ def _build_headers(self, referer: str, with_sentinel: bool = False) -> Dict[str, str]:
+ h = dict(COMMON_HEADERS)
+ h["referer"] = referer
+ h["oai-device-id"] = self.device_id
+ h.update(generate_datadog_trace())
+ if with_sentinel:
+ h["openai-sentinel-token"] = self.sentinel_gen.generate_token()
+ return h
+
+ def _init_session_via_direct_auth(self, client_id: str, redirect_uri: str) -> tuple[bool, str]:
+ def _do_init() -> tuple[bool, str]:
+ self.session.cookies.set("oai-did", self.device_id, domain=".auth.openai.com")
+ self.session.cookies.set("oai-did", self.device_id, domain="auth.openai.com")
+
+ code_verifier, code_challenge = generate_pkce()
+ self.code_verifier = code_verifier
+ self.state = secrets.token_urlsafe(32)
+
+ params = {
+ "response_type": "code",
+ "client_id": client_id,
+ "redirect_uri": redirect_uri,
+ "scope": "openid profile email offline_access",
+ "code_challenge": code_challenge,
+ "code_challenge_method": "S256",
+ "state": self.state,
+ "screen_hint": "signup",
+ "prompt": "login",
+ }
+ url = f"{OPENAI_AUTH_BASE}/oauth/authorize?{urlencode(params)}"
+ try:
+ resp = self.session.get(url, headers=NAVIGATE_HEADERS, allow_redirects=True, verify=False, timeout=30)
+ except Exception as error:
+ return False, f"oauth_authorize_failed:{error}"
+
+ if resp.status_code not in (200, 302):
+ return False, f"oauth_authorize_http_{resp.status_code}"
+ has_login_session = any(c.name == "login_session" for c in self.session.cookies)
+ if not has_login_session:
+ return False, "login_session_missing"
+ return True, ""
+
+ return self._run_step_with_retry("0a_direct_auth", _do_init)
+
+ def _init_session_via_chatgpt_web(self) -> tuple[bool, str]:
+ chatgpt_base = self.chatgpt_base
+
+ def _do_init() -> tuple[bool, str]:
+ try:
+ self.session.get(f"{chatgpt_base}/", headers=NAVIGATE_HEADERS, timeout=15, verify=False)
+ except Exception as error:
+ return False, f"chatgpt_home_failed:{error}"
+
+ csrf_headers = {
+ "accept": "application/json",
+ "referer": f"{chatgpt_base}/auth/login",
+ "user-agent": USER_AGENT,
+ }
+ try:
+ csrf_resp = self.session.get(f"{chatgpt_base}/api/auth/csrf", headers=csrf_headers, timeout=15, verify=False)
+ except Exception as error:
+ return False, f"chatgpt_csrf_failed:{error}"
+ if csrf_resp.status_code != 200:
+ return False, f"chatgpt_csrf_http_{csrf_resp.status_code}"
+ try:
+ csrf_data = csrf_resp.json()
+ except Exception as error:
+ return False, f"chatgpt_csrf_parse_failed:{error}"
+ csrf_token = str((csrf_data or {}).get("csrfToken") or "").strip() if isinstance(csrf_data, dict) else ""
+ if not csrf_token:
+ return False, "chatgpt_csrf_missing"
+
+ signin_form = urlencode(
+ {
+ "csrfToken": csrf_token,
+ "callbackUrl": f"{chatgpt_base}/",
+ "json": "true",
+ }
+ )
+ signin_headers = {
+ "content-type": "application/x-www-form-urlencoded",
+ "accept": "application/json",
+ "origin": chatgpt_base,
+ "referer": f"{chatgpt_base}/auth/login",
+ "user-agent": USER_AGENT,
+ }
+ try:
+ signin_resp = self.session.post(
+ f"{chatgpt_base}/api/auth/signin/openai",
+ headers=signin_headers,
+ data=signin_form,
+ timeout=15,
+ verify=False,
+ allow_redirects=False,
+ )
+ except Exception as error:
+ return False, f"chatgpt_signin_openai_failed:{error}"
+
+ auth_url = ""
+ try:
+ signin_payload = signin_resp.json()
+ except Exception:
+ signin_payload = {}
+ if isinstance(signin_payload, dict):
+ auth_url = str(signin_payload.get("url") or "").strip()
+ if not auth_url and signin_resp.status_code in (301, 302, 303, 307, 308):
+ auth_url = str(signin_resp.headers.get("Location") or "").strip()
+ if not auth_url:
+ return False, "chatgpt_signin_openai_missing_auth_url"
+
+ try:
+ self.session.get(auth_url, headers=NAVIGATE_HEADERS, timeout=20, verify=False)
+ except Exception as error:
+ return False, f"chatgpt_auth_follow_failed:{error}"
+
+ has_login_session = any(c.name == "login_session" for c in self.session.cookies)
+ if not has_login_session:
+ return False, "login_session_missing"
+ return True, ""
+
+ return self._run_step_with_retry("0a_chatgpt_web", _do_init)
+
+ def _submit_signup_email(self, email: str) -> tuple[bool, str]:
+ def _do_submit() -> tuple[bool, str]:
+ headers = self._build_headers(f"{OPENAI_AUTH_BASE}/create-account")
+ sentinel = build_sentinel_token(self.session, self.device_id, flow="authorize_continue")
+ if sentinel:
+ headers["openai-sentinel-token"] = sentinel
+ try:
+ response = self.session.post(
+ f"{OPENAI_AUTH_BASE}/api/accounts/authorize/continue",
+ json={"username": {"kind": "email", "value": email}, "screen_hint": "signup"},
+ headers=headers,
+ verify=False,
+ timeout=30,
+ )
+ except Exception as error:
+ return False, f"authorize_continue_failed:{error}"
+ if response.status_code != 200:
+ return False, f"authorize_continue_http_{response.status_code}"
+ return True, ""
+
+ return self._run_step_with_retry("0b_authorize_continue", _do_submit)
+
+ def step0_init_oauth_session(self, email: str, client_id: str, redirect_uri: str) -> bool:
+ last_reason = "init_oauth_session_failed"
+ for index, mode in enumerate(self._entry_mode_candidates(), start=1):
+ if mode == "chatgpt_web":
+ ok, reason = self._init_session_via_chatgpt_web()
+ else:
+ ok, reason = self._init_session_via_direct_auth(client_id=client_id, redirect_uri=redirect_uri)
+ if not ok:
+ last_reason = reason or f"{mode}_failed"
+ if index < len(self._entry_mode_candidates()):
+ self.logger.warning("会话入口 %s 失败: %s,尝试下一个入口", mode, last_reason)
+ continue
+
+ ok, reason = self._submit_signup_email(email)
+ if ok:
+ if index > 1:
+ self.logger.info("入口回退成功: mode=%s", mode)
+ return True
+ last_reason = reason or "authorize_continue_failed"
+
+ self.logger.warning("步骤0失败: %s", last_reason)
+ self._set_failure("step0_init_oauth_session", last_reason)
+ return False
+
+ def step2_register_user(self, email: str, password: str) -> bool:
+ def _do_register() -> tuple[bool, str]:
+ headers = self._build_headers(
+ f"{OPENAI_AUTH_BASE}/create-account/password",
+ with_sentinel=True,
+ )
+ try:
+ resp = self.session.post(
+ f"{OPENAI_AUTH_BASE}/api/accounts/user/register",
+ json={"username": email, "password": password},
+ headers=headers,
+ verify=False,
+ timeout=30,
+ )
+ except Exception as error:
+ return False, f"user_register_failed:{error}"
+ if resp.status_code == 200:
+ return True, ""
+ if resp.status_code in (301, 302):
+ loc = str(resp.headers.get("Location") or "")
+ ok_redirect = "email-otp" in loc or "email-verification" in loc
+ if ok_redirect:
+ return True, ""
+ return False, f"user_register_redirect_invalid:{loc}"
+ return False, f"user_register_http_{resp.status_code}"
+
+ ok, reason = self._run_step_with_retry("2_register_user", _do_register)
+ if not ok:
+ self._set_failure("step2_register_user", reason)
+ self.logger.warning("步骤2失败: email=%s reason=%s", email, reason)
+ return ok
+
+ def step3_send_otp(self) -> bool:
+ headers = dict(NAVIGATE_HEADERS)
+ headers["referer"] = f"{OPENAI_AUTH_BASE}/create-account/password"
+
+ def _do_send() -> tuple[bool, str]:
+ try:
+ r_send = self.session.get(
+ f"{OPENAI_AUTH_BASE}/api/accounts/email-otp/send",
+ headers=headers,
+ verify=False,
+ timeout=30,
+ allow_redirects=True,
+ )
+ except Exception as error:
+ return False, f"email_otp_send_failed:{error}"
+ if r_send.status_code not in (200, 204, 301, 302):
+ return False, f"email_otp_send_http_{r_send.status_code}"
+ return True, ""
+
+ ok, reason = self._run_step_with_retry("3_send_otp", _do_send)
+ if not ok:
+ self._set_failure("step3_send_otp", reason)
+ self.logger.warning("步骤3失败: send_otp reason=%s", reason)
+ return False
+
+ def _open_verify_page() -> tuple[bool, str]:
+ try:
+ r_page = self.session.get(
+ f"{OPENAI_AUTH_BASE}/email-verification",
+ headers=headers,
+ verify=False,
+ timeout=30,
+ allow_redirects=True,
+ )
+ except Exception as error:
+ return False, f"email_verification_page_failed:{error}"
+ if r_page.status_code >= 400:
+ return False, f"email_verification_page_http_{r_page.status_code}"
+ return True, ""
+
+ ok, reason = self._run_step_with_retry("3_open_verification_page", _open_verify_page)
+ if not ok:
+ self._set_failure("step3_open_verification_page", reason)
+ self.logger.warning("步骤3失败: open_verification_page reason=%s", reason)
+ return ok
+
+ def step4_validate_otp(self, code: str) -> bool:
+ last_reason = "otp_validate_failed"
+ tried_normal = False
+ for mode in self.register_otp_validate_order:
+ include_sentinel = mode == "sentinel"
+ headers = self._build_headers(
+ f"{OPENAI_AUTH_BASE}/email-verification",
+ with_sentinel=include_sentinel,
+ )
+ if include_sentinel and tried_normal:
+ self.logger.warning("步骤4告警: 普通 OTP 校验失败,尝试 Sentinel fallback")
+
+ def _do_validate() -> tuple[bool, str]:
+ try:
+ response = self.session.post(
+ f"{OPENAI_AUTH_BASE}/api/accounts/email-otp/validate",
+ json={"code": code},
+ headers=headers,
+ verify=False,
+ timeout=30,
+ )
+ except Exception as error:
+ return False, f"email_otp_validate_failed:{error}"
+ if response.status_code == 200:
+ return True, ""
+ return False, f"email_otp_validate_http_{response.status_code}"
+
+ ok, reason = self._run_step_with_retry(f"4_validate_otp_{mode}", _do_validate)
+ if ok:
+ if include_sentinel:
+ self.logger.info("步骤4成功: OTP Sentinel fallback 命中")
+ return True
+ last_reason = reason or last_reason
+ tried_normal = tried_normal or not include_sentinel
+
+ self.logger.warning("步骤4失败: code=%s reason=%s", code, last_reason)
+ self._set_failure("step4_validate_otp", last_reason)
+ return False
+
+ def step5_create_account(self, first_name: str, last_name: str, birthdate: str) -> bool:
+ body = {"name": f"{first_name} {last_name}", "birthdate": birthdate}
+
+ def _do_create() -> tuple[bool, str]:
+ headers = self._build_headers(f"{OPENAI_AUTH_BASE}/about-you", with_sentinel=True)
+ try:
+ response = self.session.post(
+ f"{OPENAI_AUTH_BASE}/api/accounts/create_account",
+ json=body,
+ headers=headers,
+ verify=False,
+ timeout=30,
+ )
+ except Exception as error:
+ return False, f"create_account_failed:{error}"
+
+ response_payload: Dict[str, Any] = {}
+ if str(response.headers.get("content-type") or "").startswith("application/json"):
+ try:
+ payload = response.json()
+ except Exception:
+ payload = {}
+ if isinstance(payload, dict):
+ response_payload = payload
+
+ if requires_phone_verification(
+ response_payload,
+ response.text,
+ markers=self.phone_markers,
+ ):
+ if self.register_phone_action == "fail_fast":
+ return False, "create_account_phone_verification_required"
+ # self.logger.warning("步骤5告警: 命中手机验证风控,按策略保留成功态继续后续 OAuth")
+
+ if self.flow_trace is not None:
+ self.flow_trace.record(
+ "registration_create_account_response",
+ status_code=response.status_code,
+ response=build_response_trace_payload(
+ response,
+ reveal_sensitive=self.flow_trace.reveal_sensitive,
+ body_limit=self.flow_trace.body_limit,
+ ),
+ phone_verification=requires_phone_verification(
+ response_payload,
+ response.text,
+ markers=self.phone_markers,
+ ),
+ )
+
+ if response.status_code == 200:
+ self._capture_registration_tokens(
+ response_payload,
+ response_headers=response.headers,
+ response_url=str(response.url or ""),
+ response_text=response.text,
+ )
+ return True, ""
+ if response.status_code in (301, 302):
+ self._capture_registration_tokens(
+ response_payload,
+ response_headers=response.headers,
+ response_url=str(response.url or ""),
+ response_text=response.text,
+ )
+ return True, ""
+ if response.status_code == 400 and "already_exists" in response.text.lower():
+ return True, ""
+ return False, f"create_account_http_{response.status_code}"
+
+ ok, reason = self._run_step_with_retry("5_create_account", _do_create)
+ if not ok:
+ self._set_failure("step5_create_account", reason)
+ self.logger.warning("步骤5失败: reason=%s", reason)
+ return ok
+
+ def register(
+ self,
+ email: str,
+ password: str,
+ client_id: str,
+ redirect_uri: str,
+ mailbox: Mailbox,
+ mail_provider: MailProviderBase,
+ otp_timeout_seconds: int = 120,
+ otp_poll_interval_seconds: float = 3.0,
+ log_context: str = "",
+ ) -> bool:
+ self.last_failure_stage = ""
+ self.last_failure_detail = ""
+ self.registration_auth_code = ""
+ self.registration_tokens = None
+ first_name, last_name = generate_random_name()
+ birthdate = generate_random_birthday()
+ self.logger.info(
+ apply_log_context("注册流程启动: email=%s provider=%s", log_context),
+ email,
+ mail_provider.provider_name,
+ )
+ if not self.step0_init_oauth_session(email, client_id, redirect_uri):
+ if not self.last_failure_stage:
+ self._set_failure("step0_init_oauth_session")
+ self.logger.warning(apply_log_context("注册失败: step0_init_oauth_session | email=%s", log_context), email)
+ return False
+ self.logger.info(apply_log_context("注册: 会话初始化成功", log_context))
+ time.sleep(1)
+ if not self.step2_register_user(email, password):
+ if not self.last_failure_stage:
+ self._set_failure("step2_register_user")
+ self.logger.warning(apply_log_context("注册失败: step2_register_user | email=%s", log_context), email)
+ return False
+ self.logger.info(apply_log_context("注册: 邮箱/密码提交成功", log_context))
+ time.sleep(1)
+ otp_requested_at = time.time()
+ if not self.step3_send_otp():
+ if not self.last_failure_stage:
+ self._set_failure("step3_send_otp")
+ self.logger.warning(apply_log_context("注册失败: step3_send_otp | email=%s", log_context), email)
+ return False
+ self.logger.info(apply_log_context("注册: 已发送邮箱验证码", log_context))
+ code = mail_provider.wait_for_verification_code(
+ mailbox,
+ email=email,
+ timeout=max(30, int(otp_timeout_seconds or 120)),
+ not_before_ts=otp_requested_at,
+ poll_interval_seconds=otp_poll_interval_seconds,
+ log_context=log_context,
+ )
+ if not code:
+ self._set_failure("register_mail_otp_timeout", f"provider={mail_provider.provider_name}")
+ self.logger.warning(apply_log_context("注册失败: 未收到验证码 | email=%s", log_context), email)
+ return False
+ self.logger.info(apply_log_context("注册: 开始校验邮箱验证码", log_context))
+ if not self.step4_validate_otp(code):
+ if not self.last_failure_stage:
+ self._set_failure("step4_validate_otp")
+ self.logger.warning(apply_log_context("注册失败: step4_validate_otp | email=%s", log_context), email)
+ return False
+ self.logger.info(apply_log_context("注册: 邮箱验证码校验成功", log_context))
+ time.sleep(1)
+ self.logger.info(
+ apply_log_context("注册: 生成用户信息 -> %s %s / %s", log_context),
+ first_name,
+ last_name,
+ birthdate,
+ )
+ ok = self.step5_create_account(first_name, last_name, birthdate)
+ if not ok:
+ if not self.last_failure_stage:
+ self._set_failure("step5_create_account")
+ self.logger.warning(apply_log_context("注册失败: step5_create_account | email=%s", log_context), email)
+ return False
+ self.logger.info(apply_log_context("注册: 账号创建成功", log_context))
+ if has_complete_auth_tokens(self.registration_tokens):
+ self.logger.info(apply_log_context("注册: 已捕获完整 token,准备直接保存", log_context))
+ else:
+ self.logger.info(apply_log_context("注册: 注册完成,继续进入 OAuth", log_context))
+ return ok
+
+ def exchange_codex_tokens(self, client_id: str, redirect_uri: str) -> Optional[Dict[str, Any]]:
+ if not self.code_verifier:
+ self.logger.warning("注册会话缺少 code_verifier,无法直接换取 OAuth token")
+ return None
+
+ consent_url = f"{OPENAI_AUTH_BASE}/sign-in-with-chatgpt/codex/consent"
+ return exchange_codex_tokens_from_session(
+ session=self.session,
+ device_id=self.device_id,
+ code_verifier=self.code_verifier,
+ consent_url=consent_url,
+ oauth_issuer=OPENAI_AUTH_BASE,
+ oauth_client_id=client_id,
+ oauth_redirect_uri=redirect_uri,
+ proxy=self.proxy,
+ )
+
+
+def codex_exchange_code(
+ code: str,
+ code_verifier: str,
+ oauth_issuer: str,
+ oauth_client_id: str,
+ oauth_redirect_uri: str,
+ proxy: str,
+) -> Optional[Dict[str, Any]]:
+ session = create_session(proxy=proxy)
+ try:
+ resp = session.post(
+ f"{oauth_issuer}/oauth/token",
+ headers={"Content-Type": "application/x-www-form-urlencoded"},
+ data={
+ "grant_type": "authorization_code",
+ "code": code,
+ "redirect_uri": oauth_redirect_uri,
+ "client_id": oauth_client_id,
+ "code_verifier": code_verifier,
+ },
+ verify=False,
+ timeout=60,
+ )
+ if resp.status_code == 200:
+ data = resp.json()
+ return data if isinstance(data, dict) else None
+ return None
+ except Exception:
+ return None
+
+
+def request_with_local_retry(
+ session: requests.Session,
+ method: str,
+ url: str,
+ *,
+ retry_attempts: int,
+ error_prefix: str,
+ transient_markers: tuple[str, ...] = TRANSIENT_FLOW_MARKERS_DEFAULT,
+ logger: Optional[logging.Logger] = None,
+ flow_trace: Optional[FlowTraceRecorder] = None,
+ **request_kwargs: Any,
+) -> tuple[Optional[requests.Response], str]:
+ request_fn = getattr(session, method)
+ safe_attempts = max(1, int(retry_attempts))
+ for attempt in range(1, safe_attempts + 1):
+ started_at = time.time()
+ if flow_trace is not None:
+ flow_trace.record(
+ "http_attempt",
+ error_prefix=error_prefix,
+ attempt=attempt,
+ total_attempts=safe_attempts,
+ request={
+ "method": method.upper(),
+ "url": url,
+ "headers": request_kwargs.get("headers", {}),
+ "json": request_kwargs.get("json"),
+ "data": request_kwargs.get("data"),
+ "timeout": request_kwargs.get("timeout"),
+ "allow_redirects": request_kwargs.get("allow_redirects"),
+ "verify": request_kwargs.get("verify"),
+ },
+ session_cookies=describe_session_cookies(session, reveal_sensitive=flow_trace.reveal_sensitive),
+ )
+ try:
+ response = request_fn(url, **request_kwargs)
+ except Exception as error:
+ reason = f"{error_prefix}_exception:{error}"
+ if flow_trace is not None:
+ flow_trace.record(
+ "http_exception",
+ error_prefix=error_prefix,
+ attempt=attempt,
+ total_attempts=safe_attempts,
+ elapsed_ms=round((time.time() - started_at) * 1000, 2),
+ reason=reason,
+ error=repr(error),
+ session_cookies=describe_session_cookies(session, reveal_sensitive=flow_trace.reveal_sensitive),
+ )
+ if attempt < safe_attempts and is_transient_flow_error(reason, transient_markers):
+ if logger:
+ logger.warning(
+ "请求%s瞬时异常,第 %s/%s 次失败: %s,局部重试",
+ error_prefix,
+ attempt,
+ safe_attempts,
+ error,
+ )
+ if flow_trace is not None:
+ sleep_seconds = min(0.8, 0.2 * attempt)
+ flow_trace.record(
+ "http_retry_scheduled",
+ error_prefix=error_prefix,
+ attempt=attempt,
+ total_attempts=safe_attempts,
+ reason=reason,
+ sleep_seconds=sleep_seconds,
+ )
+ time.sleep(min(0.8, 0.2 * attempt))
+ continue
+ return None, reason
+
+ reason = f"{error_prefix}_http_{response.status_code}"
+ if flow_trace is not None:
+ flow_trace.record(
+ "http_response",
+ error_prefix=error_prefix,
+ attempt=attempt,
+ total_attempts=safe_attempts,
+ elapsed_ms=round((time.time() - started_at) * 1000, 2),
+ response=build_response_trace_payload(
+ response,
+ reveal_sensitive=flow_trace.reveal_sensitive,
+ body_limit=flow_trace.body_limit,
+ ),
+ session_cookies=describe_session_cookies(session, reveal_sensitive=flow_trace.reveal_sensitive),
+ )
+ if attempt < safe_attempts and is_transient_flow_error(reason, transient_markers):
+ if logger:
+ logger.warning(
+ "请求%s返回 HTTP %s,第 %s/%s 次重试",
+ error_prefix,
+ response.status_code,
+ attempt,
+ safe_attempts,
+ )
+ if flow_trace is not None:
+ sleep_seconds = min(0.8, 0.2 * attempt)
+ flow_trace.record(
+ "http_retry_scheduled",
+ error_prefix=error_prefix,
+ attempt=attempt,
+ total_attempts=safe_attempts,
+ reason=reason,
+ sleep_seconds=sleep_seconds,
+ )
+ time.sleep(min(0.8, 0.2 * attempt))
+ continue
+ return response, ""
+ return None, f"{error_prefix}_failed"
+
+
+def validate_otp_with_fallback(
+ *,
+ session: requests.Session,
+ oauth_issuer: str,
+ device_id: str,
+ code: str,
+ base_headers: Dict[str, str],
+ retry_attempts: int,
+ otp_validate_order: tuple[str, ...],
+ transient_markers: tuple[str, ...],
+ logger: Optional[logging.Logger] = None,
+ flow_trace: Optional[FlowTraceRecorder] = None,
+ log_context: str = "",
+) -> tuple[Optional[requests.Response], str]:
+ last_reason = "oauth_email_otp_validate_failed"
+ tried_normal = False
+
+ for mode in otp_validate_order:
+ headers = dict(base_headers)
+ if flow_trace is not None:
+ flow_trace.record(
+ "oauth_otp_validate_mode_start",
+ mode=mode,
+ tried_normal=tried_normal,
+ )
+ if mode == "sentinel":
+ if tried_normal and logger:
+ logger.warning(apply_log_context("OAuth: 普通 OTP 校验失败,尝试 Sentinel fallback", log_context))
+ sentinel_token = build_sentinel_token(session, device_id, flow="authorize_continue")
+ if not sentinel_token:
+ last_reason = "oauth_email_otp_validate_sentinel_failed"
+ if flow_trace is not None:
+ flow_trace.record("oauth_otp_validate_mode_failed", mode=mode, reason=last_reason)
+ continue
+ headers["openai-sentinel-token"] = sentinel_token
+
+ response, error = request_with_local_retry(
+ session,
+ "post",
+ f"{oauth_issuer}/api/accounts/email-otp/validate",
+ retry_attempts=retry_attempts,
+ error_prefix="oauth_email_otp_validate",
+ transient_markers=transient_markers,
+ logger=logger,
+ flow_trace=flow_trace,
+ json={"code": code},
+ headers=headers,
+ verify=False,
+ timeout=30,
+ )
+ if response is None:
+ last_reason = error or last_reason
+ if flow_trace is not None:
+ flow_trace.record("oauth_otp_validate_mode_failed", mode=mode, reason=last_reason)
+ tried_normal = tried_normal or mode == "normal"
+ continue
+ if response.status_code == 200:
+ if flow_trace is not None:
+ flow_trace.record("oauth_otp_validate_mode_success", mode=mode, status_code=response.status_code)
+ return response, ""
+ last_reason = f"oauth_email_otp_validate_http_{response.status_code}"
+ if flow_trace is not None:
+ flow_trace.record("oauth_otp_validate_mode_failed", mode=mode, reason=last_reason)
+ tried_normal = tried_normal or mode == "normal"
+
+ return None, last_reason
+
+
+def exchange_codex_tokens_from_session(
+ session: requests.Session,
+ device_id: str,
+ code_verifier: str,
+ consent_url: str,
+ oauth_issuer: str,
+ oauth_client_id: str,
+ oauth_redirect_uri: str,
+ proxy: str,
+) -> Optional[Dict[str, Any]]:
+ auth_code = extract_auth_code_from_consent_session(
+ session=session,
+ consent_url=consent_url,
+ oauth_issuer=oauth_issuer,
+ device_id=device_id,
+ )
+ if not auth_code:
+ return None
+
+ return codex_exchange_code(
+ auth_code,
+ code_verifier,
+ oauth_issuer=oauth_issuer,
+ oauth_client_id=oauth_client_id,
+ oauth_redirect_uri=oauth_redirect_uri,
+ proxy=proxy,
+ )
+
+
+def extract_oauth_callback_params_from_consent_session(
+ session: requests.Session,
+ consent_url: str,
+ oauth_issuer: str,
+ device_id: str = "",
+ flow_trace: Optional[FlowTraceRecorder] = None,
+) -> Optional[Dict[str, str]]:
+ if not consent_url:
+ return None
+
+ if consent_url.startswith("/"):
+ consent_url = f"{oauth_issuer}{consent_url}"
+
+ def _decode_auth_session(session_obj: requests.Session) -> Optional[Dict[str, Any]]:
+ for c in session_obj.cookies:
+ if c.name == "oai-client-auth-session":
+ val = c.value
+ first_part = val.split(".")[0] if "." in val else val
+ pad = 4 - len(first_part) % 4
+ if pad != 4:
+ first_part += "=" * pad
+ try:
+ raw = base64.urlsafe_b64decode(first_part)
+ data = json.loads(raw.decode("utf-8"))
+ return data if isinstance(data, dict) else None
+ except Exception:
+ pass
+ return None
+
+ def _follow_and_extract_callback_params(
+ session_obj: requests.Session,
+ url: str,
+ max_depth: int = 10,
+ ) -> Optional[Dict[str, str]]:
+ if max_depth <= 0:
+ return None
+ try:
+ r = session_obj.get(
+ url,
+ headers=NAVIGATE_HEADERS,
+ verify=False,
+ timeout=15,
+ allow_redirects=False,
+ )
+ if r.status_code in (301, 302, 303, 307, 308):
+ loc = r.headers.get("Location", "")
+ callback_params = extract_oauth_callback_params_from_url(loc)
+ if callback_params:
+ return callback_params
+ if loc.startswith("/"):
+ loc = f"{oauth_issuer}{loc}"
+ return _follow_and_extract_callback_params(session_obj, loc, max_depth - 1)
+ if r.status_code == 200:
+ return extract_oauth_callback_params_from_url(str(r.url))
+ except requests.exceptions.ConnectionError as e:
+ m = re.search(r'(https?://localhost[^\s\'"]+)', str(e))
+ if m:
+ return extract_oauth_callback_params_from_url(m.group(1))
+ except Exception:
+ pass
+ return None
+
+ callback_params = None
+ if flow_trace is not None:
+ flow_trace.record("registration_consent_follow_start", consent_url=consent_url)
+
+ try:
+ resp_consent = session.get(
+ consent_url,
+ headers=NAVIGATE_HEADERS,
+ verify=False,
+ timeout=30,
+ allow_redirects=False,
+ )
+ if resp_consent.status_code in (301, 302, 303, 307, 308):
+ loc = resp_consent.headers.get("Location", "")
+ callback_params = extract_oauth_callback_params_from_url(loc)
+ if not callback_params:
+ callback_params = _follow_and_extract_callback_params(session, loc)
+ except requests.exceptions.ConnectionError as e:
+ m = re.search(r'(https?://localhost[^\s\'"]+)', str(e))
+ if m:
+ callback_params = extract_oauth_callback_params_from_url(m.group(1))
+ except Exception:
+ pass
+
+ if not callback_params:
+ session_data = _decode_auth_session(session)
+ workspace_id = None
+ if session_data:
+ workspaces = session_data.get("workspaces", [])
+ if isinstance(workspaces, list) and workspaces:
+ workspace_id = (workspaces[0] or {}).get("id")
+
+ if workspace_id:
+ h_consent = dict(COMMON_HEADERS)
+ h_consent["referer"] = consent_url
+ h_consent["oai-device-id"] = device_id
+ h_consent.update(generate_datadog_trace())
+
+ try:
+ resp_ws = session.post(
+ f"{oauth_issuer}/api/accounts/workspace/select",
+ json={"workspace_id": workspace_id},
+ headers=h_consent,
+ verify=False,
+ timeout=30,
+ allow_redirects=False,
+ )
+ if resp_ws.status_code in (301, 302, 303, 307, 308):
+ loc = resp_ws.headers.get("Location", "")
+ callback_params = extract_oauth_callback_params_from_url(loc)
+ if not callback_params:
+ callback_params = _follow_and_extract_callback_params(session, loc)
+ elif resp_ws.status_code == 200:
+ ws_data = resp_ws.json()
+ ws_next = str(ws_data.get("continue_url") or "")
+ ws_page = str(((ws_data.get("page") or {}).get("type")) or "")
+
+ if "organization" in ws_next or "organization" in ws_page:
+ org_url = ws_next if ws_next.startswith("http") else f"{oauth_issuer}{ws_next}"
+
+ org_id = None
+ project_id = None
+ ws_orgs = (ws_data.get("data") or {}).get("orgs", []) if isinstance(ws_data, dict) else []
+ if ws_orgs:
+ org_id = (ws_orgs[0] or {}).get("id")
+ projects = (ws_orgs[0] or {}).get("projects", [])
+ if projects:
+ project_id = (projects[0] or {}).get("id")
+
+ if org_id:
+ body = {"org_id": org_id}
+ if project_id:
+ body["project_id"] = project_id
+
+ h_org = dict(COMMON_HEADERS)
+ h_org["referer"] = org_url
+ h_org["oai-device-id"] = device_id
+ h_org.update(generate_datadog_trace())
+
+ resp_org = session.post(
+ f"{oauth_issuer}/api/accounts/organization/select",
+ json=body,
+ headers=h_org,
+ verify=False,
+ timeout=30,
+ allow_redirects=False,
+ )
+ if resp_org.status_code in (301, 302, 303, 307, 308):
+ loc = resp_org.headers.get("Location", "")
+ callback_params = extract_oauth_callback_params_from_url(loc)
+ if not callback_params:
+ callback_params = _follow_and_extract_callback_params(session, loc)
+ elif resp_org.status_code == 200:
+ org_data = resp_org.json()
+ org_next = str(org_data.get("continue_url") or "")
+ if org_next:
+ full_next = org_next if org_next.startswith("http") else f"{oauth_issuer}{org_next}"
+ callback_params = _follow_and_extract_callback_params(session, full_next)
+ else:
+ callback_params = _follow_and_extract_callback_params(session, org_url)
+ elif ws_next:
+ full_next = ws_next if ws_next.startswith("http") else f"{oauth_issuer}{ws_next}"
+ callback_params = _follow_and_extract_callback_params(session, full_next)
+ except Exception:
+ pass
+
+ if not callback_params:
+ try:
+ resp_fallback = session.get(
+ consent_url,
+ headers=NAVIGATE_HEADERS,
+ verify=False,
+ timeout=30,
+ allow_redirects=True,
+ )
+ callback_params = extract_oauth_callback_params_from_url(str(resp_fallback.url))
+ if not callback_params and resp_fallback.history:
+ for hist in resp_fallback.history:
+ loc = hist.headers.get("Location", "")
+ callback_params = extract_oauth_callback_params_from_url(loc)
+ if callback_params:
+ break
+ except requests.exceptions.ConnectionError as e:
+ m = re.search(r'(https?://localhost[^\s\'"]+)', str(e))
+ if m:
+ callback_params = extract_oauth_callback_params_from_url(m.group(1))
+ except Exception:
+ pass
+
+ if flow_trace is not None:
+ flow_trace.record(
+ "registration_consent_follow_result",
+ consent_url=consent_url,
+ auth_code=str((callback_params or {}).get("code") or ""),
+ )
+
+ return callback_params
+
+
+def extract_auth_code_from_consent_session(
+ session: requests.Session,
+ consent_url: str,
+ oauth_issuer: str,
+ device_id: str = "",
+ flow_trace: Optional[FlowTraceRecorder] = None,
+) -> Optional[str]:
+ callback_params = extract_oauth_callback_params_from_consent_session(
+ session=session,
+ consent_url=consent_url,
+ oauth_issuer=oauth_issuer,
+ device_id=device_id,
+ flow_trace=flow_trace,
+ )
+ return str((callback_params or {}).get("code") or "").strip() or None
+
+
+def perform_codex_oauth_login_http(
+ email: str,
+ password: str,
+ oauth_issuer: str,
+ oauth_client_id: str,
+ oauth_redirect_uri: str,
+ proxy: str,
+ mail_provider: Optional[MailProviderBase] = None,
+ mailbox: Optional[Mailbox] = None,
+ otp_timeout_seconds: int = 120,
+ otp_poll_interval_seconds: float = 2.0,
+ local_retry_attempts: int = 1,
+ transient_markers: tuple[str, ...] = TRANSIENT_FLOW_MARKERS_DEFAULT,
+ otp_validate_order: tuple[str, ...] = ("normal", "sentinel"),
+ phone_markers: tuple[str, ...] = PHONE_VERIFICATION_MARKERS_DEFAULT,
+ password_phone_action: str = "warn_and_continue",
+ otp_phone_action: str = "warn_and_continue",
+ logger: Optional[logging.Logger] = None,
+ flow_trace: Optional[FlowTraceRecorder] = None,
+ failure_state: Optional[Dict[str, str]] = None,
+ log_context: str = "",
+) -> Optional[Dict[str, Any]]:
+ active_trace = flow_trace or getattr(logger, "flow_trace", None)
+ if failure_state is not None:
+ failure_state.clear()
+ failure_state["stage"] = ""
+ failure_state["detail"] = ""
+
+ def fail(stage: str, detail: str = "") -> Optional[Dict[str, Any]]:
+ if failure_state is not None:
+ failure_state["stage"] = str(stage or "")
+ failure_state["detail"] = str(detail or "")
+ if active_trace is not None:
+ active_trace.record(
+ "oauth_flow_fail",
+ email=email,
+ stage=stage,
+ detail=detail,
+ session_cookies=describe_session_cookies(session, reveal_sensitive=active_trace.reveal_sensitive),
+ )
+ if logger:
+ logger.warning(
+ apply_log_context("OAuth流程失败: stage=%s email=%s detail=%s", log_context),
+ stage,
+ email,
+ detail or "-",
+ )
+ return None
+
+ safe_local_retry_attempts = max(1, int(local_retry_attempts))
+ safe_transient_markers = parse_marker_config(
+ transient_markers,
+ fallback=TRANSIENT_FLOW_MARKERS_DEFAULT,
+ )
+ safe_otp_validate_order = parse_otp_validate_order(otp_validate_order)
+ safe_password_phone_action = parse_choice(
+ password_phone_action,
+ allowed=("warn_and_continue", "fail_fast"),
+ fallback="warn_and_continue",
+ )
+ safe_otp_phone_action = parse_choice(
+ otp_phone_action,
+ allowed=("warn_and_continue", "fail_fast"),
+ fallback="warn_and_continue",
+ )
+ safe_phone_markers = parse_marker_config(phone_markers, fallback=PHONE_VERIFICATION_MARKERS_DEFAULT)
+
+ session = create_session(proxy=proxy)
+ device_id = str(uuid.uuid4())
+ if active_trace is not None:
+ active_trace.record(
+ "oauth_flow_start",
+ email=email,
+ oauth_issuer=oauth_issuer,
+ oauth_client_id=oauth_client_id,
+ oauth_redirect_uri=oauth_redirect_uri,
+ local_retry_attempts=safe_local_retry_attempts,
+ otp_validate_order=safe_otp_validate_order,
+ password_phone_action=safe_password_phone_action,
+ otp_phone_action=safe_otp_phone_action,
+ )
+ if logger:
+ logger.info(apply_log_context("OAuth流程启动: email=%s", log_context), email)
+
+ session.cookies.set("oai-did", device_id, domain=".auth.openai.com")
+ session.cookies.set("oai-did", device_id, domain="auth.openai.com")
+
+ code_verifier, code_challenge = generate_pkce()
+ state = secrets.token_urlsafe(32)
+
+ authorize_params = {
+ "response_type": "code",
+ "client_id": oauth_client_id,
+ "redirect_uri": oauth_redirect_uri,
+ "scope": "openid profile email offline_access",
+ "code_challenge": code_challenge,
+ "code_challenge_method": "S256",
+ "state": state,
+ }
+ authorize_url = f"{oauth_issuer}/oauth/authorize?{urlencode(authorize_params)}"
+
+ authorize_resp, authorize_error = request_with_local_retry(
+ session,
+ "get",
+ authorize_url,
+ retry_attempts=safe_local_retry_attempts,
+ error_prefix="authorize_bootstrap_request",
+ transient_markers=safe_transient_markers,
+ logger=logger,
+ flow_trace=active_trace,
+ headers=NAVIGATE_HEADERS,
+ allow_redirects=True,
+ verify=False,
+ timeout=30,
+ )
+ if authorize_resp is None:
+ return fail("authorize_bootstrap_request", authorize_error)
+ if logger:
+ logger.info(apply_log_context("OAuth: authorize 引导成功", log_context))
+
+ headers = dict(COMMON_HEADERS)
+ headers["referer"] = f"{oauth_issuer}/log-in"
+ headers["oai-device-id"] = device_id
+ headers.update(generate_datadog_trace())
+
+ sentinel_email = build_sentinel_token(session, device_id, flow="authorize_continue")
+ if not sentinel_email:
+ return fail("authorize_continue_sentinel")
+ headers["openai-sentinel-token"] = sentinel_email
+
+ resp, continue_error = request_with_local_retry(
+ session,
+ "post",
+ f"{oauth_issuer}/api/accounts/authorize/continue",
+ retry_attempts=safe_local_retry_attempts,
+ error_prefix="authorize_continue_request",
+ transient_markers=safe_transient_markers,
+ logger=logger,
+ flow_trace=active_trace,
+ json={"username": {"kind": "email", "value": email}, "screen_hint": "login"},
+ headers=headers,
+ verify=False,
+ timeout=30,
+ )
+ if resp is None:
+ return fail("authorize_continue_request", continue_error)
+
+ if resp.status_code != 200:
+ return fail("authorize_continue_status", f"http={resp.status_code}")
+ if logger:
+ logger.info(apply_log_context("OAuth: 账号识别成功,开始提交密码", log_context))
+
+ authorize_continue_url = ""
+ try:
+ continue_payload = resp.json()
+ authorize_continue_url = str(continue_payload.get("continue_url") or "")
+ except Exception:
+ authorize_continue_url = ""
+ continue_payload = {}
+ if active_trace is not None:
+ active_trace.record(
+ "oauth_authorize_continue_parsed",
+ continue_url=authorize_continue_url,
+ payload=continue_payload,
+ )
+
+ if authorize_continue_url:
+ follow_resp, follow_error = request_with_local_retry(
+ session,
+ "get",
+ authorize_continue_url,
+ retry_attempts=safe_local_retry_attempts,
+ error_prefix="authorize_continue_follow_request",
+ transient_markers=safe_transient_markers,
+ logger=logger,
+ flow_trace=active_trace,
+ headers=NAVIGATE_HEADERS,
+ allow_redirects=True,
+ verify=False,
+ timeout=30,
+ )
+ if follow_resp is None:
+ return fail("authorize_continue_follow_request", follow_error)
+
+ headers["referer"] = f"{oauth_issuer}/log-in/password"
+ headers.update(generate_datadog_trace())
+
+ sentinel_pwd = build_sentinel_token(session, device_id, flow="password_verify")
+ if not sentinel_pwd:
+ return fail("password_verify_sentinel")
+ headers["openai-sentinel-token"] = sentinel_pwd
+
+ otp_requested_at = time.time()
+ resp, verify_error = request_with_local_retry(
+ session,
+ "post",
+ f"{oauth_issuer}/api/accounts/password/verify",
+ retry_attempts=safe_local_retry_attempts,
+ error_prefix="password_verify_request",
+ transient_markers=safe_transient_markers,
+ logger=logger,
+ flow_trace=active_trace,
+ json={"password": password},
+ headers=headers,
+ verify=False,
+ timeout=30,
+ allow_redirects=False,
+ )
+ if resp is None:
+ return fail("password_verify_request", verify_error)
+
+ if resp.status_code != 200:
+ return fail("password_verify_status", f"http={resp.status_code}")
+ if logger:
+ logger.info(apply_log_context("OAuth: 密码校验通过", log_context))
+
+ continue_url = None
+ page_type = ""
+ password_payload: Dict[str, Any] = {}
+ try:
+ data = resp.json()
+ if isinstance(data, dict):
+ password_payload = data
+ continue_url = str(data.get("continue_url") or "")
+ page_type = str(((data.get("page") or {}).get("type")) or "")
+ except Exception:
+ pass
+
+ if requires_phone_verification(password_payload, resp.text, markers=safe_phone_markers):
+ if safe_password_phone_action == "fail_fast":
+ return fail("oauth_phone_verification_required", "password_verify")
+ if logger:
+ logger.warning(apply_log_context("OAuth 命中手机验证信号,按策略继续后续路径", log_context))
+ if active_trace is not None:
+ active_trace.record(
+ "oauth_password_verify_parsed",
+ continue_url=continue_url,
+ page_type=page_type,
+ phone_verification=requires_phone_verification(password_payload, resp.text, markers=safe_phone_markers),
+ payload=password_payload,
+ )
+
+ if not continue_url:
+ return fail("missing_continue_url")
+
+ if page_type == "email_otp_verification" or "email-verification" in continue_url:
+ if not mail_provider or not mailbox:
+ return fail("oauth_mailbox_required")
+ if logger:
+ logger.info(apply_log_context("OAuth: 进入邮箱验证码阶段 -> %s", log_context), mailbox.email)
+
+ otp_entry_url = continue_url if continue_url.startswith("http") else f"{oauth_issuer}/email-verification"
+ otp_entry_resp, otp_entry_error = request_with_local_retry(
+ session,
+ "get",
+ otp_entry_url,
+ retry_attempts=safe_local_retry_attempts,
+ error_prefix="email_verification_bootstrap_request",
+ transient_markers=safe_transient_markers,
+ logger=logger,
+ flow_trace=active_trace,
+ headers=NAVIGATE_HEADERS,
+ allow_redirects=True,
+ verify=False,
+ timeout=30,
+ )
+ if otp_entry_resp is None:
+ return fail("email_verification_bootstrap_request", otp_entry_error)
+
+ tried_codes = set()
+ seen_ids: set[str] = set()
+ start_time = time.time()
+
+ h_val = dict(COMMON_HEADERS)
+ h_val["referer"] = f"{oauth_issuer}/email-verification"
+ h_val["oai-device-id"] = device_id
+ h_val.update(generate_datadog_trace())
+
+ code = None
+ otp_timeout = max(30, int(otp_timeout_seconds or 120))
+ poll_interval = max(1.0, float(otp_poll_interval_seconds or 2.0))
+ if active_trace is not None:
+ active_trace.record(
+ "oauth_otp_poll_start",
+ otp_entry_url=otp_entry_url,
+ otp_timeout=otp_timeout,
+ poll_interval=poll_interval,
+ )
+ if logger:
+ logger.info(apply_log_context("OAuth: 正在等待邮箱 %s 的验证码...", log_context), email)
+ while time.time() - start_time < otp_timeout:
+ candidate_codes = [
+ candidate
+ for candidate in mail_provider.poll_verification_codes(
+ mailbox,
+ email=email,
+ seen_ids=seen_ids,
+ not_before_ts=otp_requested_at,
+ )
+ if candidate and candidate not in tried_codes
+ ]
+ if not candidate_codes:
+ time.sleep(poll_interval)
+ continue
+ if active_trace is not None:
+ active_trace.record("oauth_otp_candidates", candidate_count=len(candidate_codes), candidates=candidate_codes)
+ if logger:
+ logger.info(apply_log_context("OAuth: 收到验证码候选 %s 个", log_context), len(candidate_codes))
+
+ for try_code in candidate_codes:
+ tried_codes.add(try_code)
+ resp_val, validate_error = validate_otp_with_fallback(
+ session=session,
+ oauth_issuer=oauth_issuer,
+ device_id=device_id,
+ code=try_code,
+ base_headers=h_val,
+ retry_attempts=safe_local_retry_attempts,
+ otp_validate_order=safe_otp_validate_order,
+ transient_markers=safe_transient_markers,
+ logger=logger,
+ flow_trace=active_trace,
+ log_context=log_context,
+ )
+ if resp_val is not None and resp_val.status_code == 200:
+ code = try_code
+ try:
+ data = resp_val.json()
+ if isinstance(data, dict) and requires_phone_verification(data, resp_val.text, markers=safe_phone_markers):
+ if safe_otp_phone_action == "fail_fast":
+ return fail("oauth_phone_verification_required", "email_otp_validate")
+ if logger:
+ logger.warning(apply_log_context("OAuth 验证码后命中手机验证信号,按策略继续", log_context))
+ continue_url = str(data.get("continue_url") or "")
+ page_type = str(((data.get("page") or {}).get("type")) or "")
+ except Exception:
+ pass
+ if active_trace is not None:
+ active_trace.record(
+ "oauth_otp_success",
+ code=try_code,
+ continue_url=continue_url,
+ page_type=page_type,
+ )
+ if logger:
+ logger.info(apply_log_context("OAuth: 成功获取验证码: %s", log_context), try_code)
+ logger.info(apply_log_context("OAuth: 邮箱验证码校验成功", log_context))
+ break
+ if resp_val is None and logger:
+ logger.warning(
+ apply_log_context("OAuth OTP 校验失败: code=%s reason=%s", log_context),
+ try_code,
+ validate_error,
+ )
+ if active_trace is not None:
+ active_trace.record("oauth_otp_failure", code=try_code, reason=validate_error)
+
+ if code:
+ break
+ time.sleep(poll_interval)
+
+ if not code:
+ return fail("oauth_mail_otp_timeout", f"provider={mail_provider.provider_name}")
+
+ if "about-you" in continue_url:
+ if logger:
+ logger.info(apply_log_context("OAuth: 进入 about-you 阶段", log_context))
+ h_about = dict(NAVIGATE_HEADERS)
+ h_about["referer"] = f"{oauth_issuer}/email-verification"
+ resp_about, about_error = request_with_local_retry(
+ session,
+ "get",
+ f"{oauth_issuer}/about-you",
+ retry_attempts=safe_local_retry_attempts,
+ error_prefix="about_you_request",
+ transient_markers=safe_transient_markers,
+ logger=logger,
+ flow_trace=active_trace,
+ headers=h_about,
+ verify=False,
+ timeout=30,
+ allow_redirects=True,
+ )
+ if resp_about is None:
+ return fail("about_you_request", about_error)
+
+ if "consent" in str(resp_about.url) or "organization" in str(resp_about.url):
+ continue_url = str(resp_about.url)
+ else:
+ first_name, last_name = generate_random_name()
+ birthdate = generate_random_birthday()
+
+ h_create = dict(COMMON_HEADERS)
+ h_create["referer"] = f"{oauth_issuer}/about-you"
+ h_create["oai-device-id"] = device_id
+ h_create.update(generate_datadog_trace())
+
+ resp_create, create_error = request_with_local_retry(
+ session,
+ "post",
+ f"{oauth_issuer}/api/accounts/create_account",
+ retry_attempts=safe_local_retry_attempts,
+ error_prefix="oauth_create_account",
+ transient_markers=safe_transient_markers,
+ logger=logger,
+ flow_trace=active_trace,
+ json={"name": f"{first_name} {last_name}", "birthdate": birthdate},
+ headers=h_create,
+ verify=False,
+ timeout=30,
+ )
+ if resp_create is None:
+ return fail("oauth_create_account", create_error)
+
+ if resp_create.status_code == 200:
+ try:
+ data = resp_create.json()
+ continue_url = str(data.get("continue_url") or "")
+ except Exception:
+ pass
+ elif resp_create.status_code == 400 and "already_exists" in resp_create.text:
+ continue_url = f"{oauth_issuer}/sign-in-with-chatgpt/codex/consent"
+ if logger and continue_url:
+ logger.info(apply_log_context("OAuth: about-you 提交成功", log_context))
+ if active_trace is not None:
+ active_trace.record(
+ "oauth_create_account_parsed",
+ status_code=resp_create.status_code,
+ continue_url=continue_url,
+ )
+
+ if "consent" in page_type:
+ continue_url = f"{oauth_issuer}/sign-in-with-chatgpt/codex/consent"
+
+ if not continue_url or "email-verification" in continue_url:
+ return fail("continue_url_invalid", continue_url)
+
+ if logger:
+ logger.info(apply_log_context("OAuth: 进入 consent 阶段,准备换取 token", log_context))
+
+ tokens = exchange_codex_tokens_from_session(
+ session=session,
+ device_id=device_id,
+ code_verifier=code_verifier,
+ consent_url=continue_url,
+ oauth_issuer=oauth_issuer,
+ oauth_client_id=oauth_client_id,
+ oauth_redirect_uri=oauth_redirect_uri,
+ proxy=proxy,
+ )
+ if active_trace is not None:
+ active_trace.record(
+ "oauth_flow_complete",
+ success=bool(tokens),
+ continue_url=continue_url,
+ token_keys=sorted((tokens or {}).keys()),
+ )
+ if not tokens:
+ return fail("oauth_token_exchange_failed", continue_url)
+ if logger:
+ logger.info(apply_log_context("OAuth: token 换取成功", log_context))
+ return tokens
+
+
+def decode_jwt_payload(token: str) -> Dict[str, Any]:
+ try:
+ parts = token.split(".")
+ if len(parts) != 3:
+ return {}
+ payload = parts[1]
+ padding = 4 - len(payload) % 4
+ if padding != 4:
+ payload += "=" * padding
+ decoded = base64.urlsafe_b64decode(payload)
+ data = json.loads(decoded)
+ return data if isinstance(data, dict) else {}
+ except Exception:
+ return {}
+
+
+def find_jwt_in_data(data: Any, depth: int = 0, max_depth: int = 5) -> str:
+ if depth > max_depth:
+ return ""
+ if isinstance(data, str):
+ candidate = str(data or "").strip()
+ payload = decode_jwt_payload(candidate)
+ if payload and any(key in payload for key in ("exp", "iat", "sub", "email")):
+ return candidate
+ return ""
+ if isinstance(data, dict):
+ for value in data.values():
+ candidate = find_jwt_in_data(value, depth=depth + 1, max_depth=max_depth)
+ if candidate:
+ return candidate
+ return ""
+ if isinstance(data, (list, tuple, set)):
+ for item in data:
+ candidate = find_jwt_in_data(item, depth=depth + 1, max_depth=max_depth)
+ if candidate:
+ return candidate
+ return ""
+ return ""
+
+
+def build_chatgpt_session_token_result(
+ session: requests.Session,
+ auth_code: Optional[str],
+ callback_params: Optional[Dict[str, str]] = None,
+ chatgpt_base: str = "https://chatgpt.com",
+ logger: Optional[logging.Logger] = None,
+ flow_trace: Optional[FlowTraceRecorder] = None,
+) -> Optional[Dict[str, Any]]:
+ base = str(chatgpt_base or "https://chatgpt.com").rstrip("/")
+ active_trace = flow_trace or getattr(logger, "flow_trace", None)
+ session_referer = f"{base}/"
+
+ effective_callback_params = {
+ str(key): str(value)
+ for key, value in (callback_params or {}).items()
+ if str(key).strip() and str(value).strip()
+ }
+ if auth_code and "code" not in effective_callback_params:
+ effective_callback_params["code"] = str(auth_code)
+
+ if effective_callback_params.get("code"):
+ ordered_items: list[tuple[str, str]] = []
+ for key in ("code", "scope", "state"):
+ value = effective_callback_params.pop(key, "")
+ if value:
+ ordered_items.append((key, value))
+ for key, value in effective_callback_params.items():
+ ordered_items.append((key, value))
+ callback_url = f"{base}/api/auth/callback/openai?{urlencode(ordered_items)}"
+ if active_trace is not None:
+ active_trace.record("chatgpt_callback_start", callback_url=callback_url)
+ try:
+ callback_resp = session.get(
+ callback_url,
+ headers=NAVIGATE_HEADERS,
+ verify=False,
+ timeout=30,
+ allow_redirects=True,
+ )
+ except Exception as error:
+ if logger:
+ logger.warning("ChatGPT callback 请求失败: %s", error)
+ return None
+ if callback_resp.status_code >= 400:
+ if logger:
+ logger.warning("ChatGPT callback 返回异常状态: %s", callback_resp.status_code)
+ return None
+ session_referer = str(getattr(callback_resp, "url", "") or "").strip() or session_referer
+ if active_trace is not None:
+ active_trace.record(
+ "chatgpt_callback_response",
+ response=build_response_trace_payload(
+ callback_resp,
+ reveal_sensitive=active_trace.reveal_sensitive,
+ body_limit=active_trace.body_limit,
+ ),
+ )
+
+ session_headers = {
+ "accept": "application/json",
+ "referer": session_referer,
+ "user-agent": USER_AGENT,
+ }
+ try:
+ session_resp = session.get(
+ f"{base}/api/auth/session",
+ headers=session_headers,
+ verify=False,
+ timeout=30,
+ )
+ except Exception as error:
+ if logger:
+ logger.warning("ChatGPT session 请求失败: %s", error)
+ return None
+
+ if session_resp.status_code != 200:
+ if logger:
+ logger.warning("ChatGPT session 返回异常状态: %s", session_resp.status_code)
+ return None
+ if active_trace is not None:
+ active_trace.record(
+ "chatgpt_session_response",
+ response=build_response_trace_payload(
+ session_resp,
+ reveal_sensitive=active_trace.reveal_sensitive,
+ body_limit=active_trace.body_limit,
+ ),
+ )
+
+ try:
+ session_data = session_resp.json()
+ except Exception as error:
+ if logger:
+ logger.warning("ChatGPT session JSON 解析失败: %s", error)
+ return None
+
+ if not isinstance(session_data, dict):
+ return None
+
+ access_token = str(session_data.get("accessToken") or session_data.get("access_token") or "").strip()
+ if not access_token:
+ access_token = find_jwt_in_data(session_data)
+ if not access_token:
+ return None
+
+ payload = decode_jwt_payload(access_token)
+ auth_info = payload.get("https://api.openai.com/auth", {})
+ account_id = extract_chatgpt_account_id(auth_info) if isinstance(auth_info, dict) else None
+ user_info = session_data.get("user") or {}
+ email = str(payload.get("email") or (user_info.get("email") if isinstance(user_info, dict) else "") or "").strip()
+ exp = payload.get("exp")
+
+ return {
+ "access_token": access_token,
+ "refresh_token": "",
+ "id_token": "",
+ "email": email,
+ "account_id": str(account_id or ""),
+ "exp": exp if isinstance(exp, (int, float)) else 0,
+ }
+
+
+def has_complete_auth_tokens(tokens: Optional[Dict[str, Any]]) -> bool:
+ if not isinstance(tokens, dict):
+ return False
+ access_token = str(tokens.get("access_token") or "").strip()
+ refresh_token = str(tokens.get("refresh_token") or "").strip()
+ return bool(access_token and refresh_token)
+
+
+class RegisterRuntime:
+ def __init__(self, conf: Dict[str, Any], target_tokens: int, logger: logging.Logger):
+ self.conf = conf
+ self.target_tokens = target_tokens
+ self.logger = logger
+
+ self.file_lock = threading.Lock()
+ self.counter_lock = threading.Lock()
+ self.health_lock = threading.Lock()
+ self.stats_lock = threading.Lock()
+ self.token_success_count = 0
+ self.stop_event = threading.Event()
+ self.provider_consecutive_failures = 0
+ self.provider_cooldown_until = 0.0
+ self.failure_stage_counts: Counter[str] = Counter()
+ self.failure_detail_counts: Counter[str] = Counter()
+ self.success_counts: Counter[str] = Counter()
+ self.last_oauth_failure_stage = ""
+ self.last_oauth_failure_detail = ""
+
+ run_workers = int(pick_conf(conf, "run", "workers", default=1) or 1)
+ self.concurrent_workers = max(1, run_workers)
+ self.proxy = str(pick_conf(conf, "run", "proxy", default="") or "")
+ self.mail_provider = build_mail_provider(conf, proxy=self.proxy, logger=logger)
+ self.mail_provider_name = self.mail_provider.provider_name
+ self.mail_otp_timeout_seconds = int(pick_conf(conf, "mail", "otp_timeout_seconds", default=120) or 120)
+ self.mail_poll_interval_seconds = float(pick_conf(conf, "mail", "poll_interval_seconds", default=3.0) or 3.0)
+
+ self.oauth_issuer = str(pick_conf(conf, "oauth", "issuer", default="https://auth.openai.com") or "https://auth.openai.com")
+ self.oauth_client_id = str(
+ pick_conf(conf, "oauth", "client_id", default="app_EMoamEEZ73f0CkXaXp7hrann") or "app_EMoamEEZ73f0CkXaXp7hrann"
+ )
+ self.oauth_redirect_uri = str(
+ pick_conf(conf, "oauth", "redirect_uri", default="http://localhost:1455/auth/callback")
+ or "http://localhost:1455/auth/callback"
+ )
+ self.oauth_retry_attempts = int(pick_conf(conf, "oauth", "retry_attempts", default=3) or 3)
+ self.oauth_retry_backoff_base = float(pick_conf(conf, "oauth", "retry_backoff_base", default=2.0) or 2.0)
+ self.oauth_retry_backoff_max = float(pick_conf(conf, "oauth", "retry_backoff_max", default=15.0) or 15.0)
+ self.oauth_outer_retry_attempts = flow_outer_retry_attempts(conf, fallback=self.oauth_retry_attempts)
+ self.oauth_local_retry_attempts = oauth_local_retry_attempts(conf, fallback=3)
+ self.flow_transient_markers = parse_marker_config(
+ pick_conf(conf, "flow", "transient_markers", default=TRANSIENT_FLOW_MARKERS_DEFAULT),
+ fallback=TRANSIENT_FLOW_MARKERS_DEFAULT,
+ )
+ self.oauth_otp_validate_order = parse_otp_validate_order(
+ pick_conf(conf, "flow", "oauth_otp_validate_order", default="normal,sentinel")
+ )
+ self.oauth_phone_markers = parse_marker_config(
+ pick_conf(conf, "registration", "phone_verification_markers", default=PHONE_VERIFICATION_MARKERS_DEFAULT),
+ fallback=PHONE_VERIFICATION_MARKERS_DEFAULT,
+ )
+ self.oauth_password_phone_action = parse_choice(
+ pick_conf(conf, "flow", "oauth_password_phone_action", default="warn_and_continue"),
+ allowed=("warn_and_continue", "fail_fast"),
+ fallback="warn_and_continue",
+ )
+ self.oauth_otp_phone_action = parse_choice(
+ pick_conf(conf, "flow", "oauth_otp_phone_action", default="warn_and_continue"),
+ allowed=("warn_and_continue", "fail_fast"),
+ fallback="warn_and_continue",
+ )
+ self.oauth_otp_timeout_seconds = int(
+ pick_conf(conf, "oauth", "otp_timeout_seconds", default=self.mail_otp_timeout_seconds) or self.mail_otp_timeout_seconds
+ )
+ self.oauth_otp_poll_interval_seconds = float(
+ pick_conf(conf, "oauth", "otp_poll_interval_seconds", default=max(1.0, min(self.mail_poll_interval_seconds, 3.0)))
+ or max(1.0, min(self.mail_poll_interval_seconds, 3.0))
+ )
+ self.failure_threshold_for_cooldown = int(
+ pick_conf(conf, "run", "failure_threshold_for_cooldown", default=5) or 5
+ )
+ self.failure_cooldown_seconds = float(
+ pick_conf(conf, "run", "failure_cooldown_seconds", default=45.0) or 45.0
+ )
+ self.loop_jitter_min_seconds = float(pick_conf(conf, "run", "loop_jitter_min_seconds", default=2.0) or 2.0)
+ self.loop_jitter_max_seconds = float(pick_conf(conf, "run", "loop_jitter_max_seconds", default=6.0) or 6.0)
+
+ upload_base = str(pick_conf(conf, "upload", "cli_proxy_api_base", "base_url", default="") or "").strip()
+ if not upload_base:
+ upload_base = str(pick_conf(conf, "clean", "base_url", default="") or "").strip()
+ self.cli_proxy_api_base = upload_base.rstrip("/")
+
+ upload_token = str(pick_conf(conf, "upload", "token", "cpa_password", default="") or "").strip()
+ if not upload_token:
+ upload_token = str(pick_conf(conf, "clean", "token", "cpa_password", default="") or "").strip()
+ self.upload_api_token = upload_token
+
+ self.upload_url = f"{self.cli_proxy_api_base}/v0/management/auth-files" if self.cli_proxy_api_base else ""
+
+ output_cfg = conf.get("output")
+ if not isinstance(output_cfg, dict):
+ output_cfg = {}
+
+ save_local_raw = output_cfg.get("save_local", True)
+ if isinstance(save_local_raw, bool):
+ self.save_local = save_local_raw
+ else:
+ self.save_local = str(save_local_raw).strip().lower() in ("1", "true", "yes", "on")
+
+ self.run_dir = os.getcwd()
+ if self.save_local:
+ self.fixed_out_dir = os.path.join(self.run_dir, "output_fixed")
+ self.tokens_parent_dir = os.path.join(self.run_dir, "output_tokens")
+ os.makedirs(self.fixed_out_dir, exist_ok=True)
+ os.makedirs(self.tokens_parent_dir, exist_ok=True)
+ self.tokens_out_dir = self._ensure_unique_dir(self.tokens_parent_dir, f"{target_tokens}个账号")
+
+ self.accounts_file = self._resolve_output_path(str(output_cfg.get("accounts_file", "accounts.txt")))
+ self.csv_file = self._resolve_output_path(str(output_cfg.get("csv_file", "registered_accounts.csv")))
+ self.ak_file = self._resolve_output_path(str(output_cfg.get("ak_file", "ak.txt")))
+ self.rk_file = self._resolve_output_path(str(output_cfg.get("rk_file", "rk.txt")))
+ else:
+ self.fixed_out_dir = ""
+ self.tokens_parent_dir = ""
+ self.tokens_out_dir = ""
+ self.accounts_file = ""
+ self.csv_file = ""
+ self.ak_file = ""
+ self.rk_file = ""
+
+ def _resolve_output_path(self, value: str) -> str:
+ if os.path.isabs(value):
+ return value
+ return os.path.join(self.fixed_out_dir, value)
+
+ def _ensure_unique_dir(self, parent_dir: str, base_name: str) -> str:
+ os.makedirs(parent_dir, exist_ok=True)
+
+ candidates = [os.path.join(parent_dir, base_name)] + [
+ os.path.join(parent_dir, f"{base_name}-{idx}") for idx in range(1, 1000000)
+ ]
+ for candidate in candidates:
+ try:
+ os.makedirs(candidate)
+ return candidate
+ except FileExistsError:
+ continue
+ raise RuntimeError(f"无法创建唯一目录: {parent_dir}/{base_name}")
+
+ def get_token_success_count(self) -> int:
+ with self.counter_lock:
+ return self.token_success_count
+
+ def wait_for_provider_availability(self, worker_id: int = 0) -> None:
+ if self.stop_event.is_set() and self.get_token_success_count() >= self.target_tokens:
+ return
+ self.mail_provider.wait_for_availability(worker_id=worker_id)
+
+ def note_attempt_success(self, success_key: str = "register_oauth_success") -> None:
+ with self.stats_lock:
+ self.success_counts[str(success_key or "register_oauth_success")] += 1
+
+ def note_attempt_failure(self, stage: str, email: str = "", detail: str = "") -> None:
+ normalized_stage = str(stage or "unknown").strip() or "unknown"
+ normalized_detail = str(detail or "").strip()
+ with self.stats_lock:
+ self.failure_stage_counts[normalized_stage] += 1
+ if normalized_detail:
+ self.failure_detail_counts[f"{normalized_stage}:{normalized_detail}"] += 1
+ self.logger.warning(
+ "失败归类: stage=%s detail=%s email=%s",
+ normalized_stage,
+ normalized_detail or "-",
+ email or "-",
+ )
+
+ def snapshot_failure_stats(self) -> tuple[List[tuple[str, int]], List[tuple[str, int]], List[tuple[str, int]]]:
+ with self.stats_lock:
+ return (
+ sorted(self.failure_stage_counts.items(), key=lambda item: (-item[1], item[0])),
+ sorted(self.failure_detail_counts.items(), key=lambda item: (-item[1], item[0])),
+ sorted(self.success_counts.items(), key=lambda item: (-item[1], item[0])),
+ )
+
+ def claim_token_slot(self) -> tuple[bool, int]:
+ with self.counter_lock:
+ if self.token_success_count >= self.target_tokens:
+ return False, self.token_success_count
+ self.token_success_count += 1
+ if self.token_success_count >= self.target_tokens:
+ self.stop_event.set()
+ return True, self.token_success_count
+
+ def release_token_slot(self) -> None:
+ with self.counter_lock:
+ if self.token_success_count > 0:
+ self.token_success_count -= 1
+ if self.token_success_count < self.target_tokens:
+ self.stop_event.clear()
+
+ def save_token_json(self, email: str, access_token: str, refresh_token: str = "", id_token: str = "") -> bool:
+ try:
+ payload = decode_jwt_payload(access_token)
+ auth_info = payload.get("https://api.openai.com/auth", {})
+ account_id = auth_info.get("chatgpt_account_id", "") if isinstance(auth_info, dict) else ""
+
+ exp_timestamp = payload.get("exp", 0)
+ expired_str = ""
+ if exp_timestamp:
+ exp_dt = dt.datetime.fromtimestamp(exp_timestamp, tz=dt.timezone(dt.timedelta(hours=8)))
+ expired_str = exp_dt.strftime("%Y-%m-%dT%H:%M:%S+08:00")
+
+ now = dt.datetime.now(tz=dt.timezone(dt.timedelta(hours=8)))
+ token_data = {
+ "type": "codex",
+ "email": email,
+ "expired": expired_str,
+ "id_token": id_token or "",
+ "account_id": account_id,
+ "access_token": access_token,
+ "last_refresh": now.strftime("%Y-%m-%dT%H:%M:%S+08:00"),
+ "refresh_token": refresh_token or "",
+ }
+
+ if self.save_local:
+ filename = os.path.join(self.tokens_out_dir, f"{email}.json")
+ ensure_parent_dir(filename)
+ with open(filename, "w", encoding="utf-8") as f:
+ json.dump(token_data, f, ensure_ascii=False)
+
+ if self.upload_url and self.upload_api_token:
+ uploaded = self.upload_token_json(filename)
+ if not uploaded:
+ self.logger.warning("Token 已保存到本地,但上传 CPA 失败: %s", email)
+ return False
+ else:
+ if self.upload_url and self.upload_api_token:
+ uploaded = self.upload_token_data(f"{email}.json", token_data)
+ if not uploaded:
+ self.logger.warning("Token 直传 CPA 失败: %s", email)
+ return False
+
+ return True
+ except Exception as e:
+ self.logger.warning("保存 Token JSON 失败: %s", e)
+ return False
+
+ def upload_token_json(self, filename: str) -> bool:
+ if not self.upload_url or not self.upload_api_token:
+ return True
+ try:
+ s = create_session(proxy=self.proxy)
+ with open(filename, "rb") as f:
+ files = {"file": (os.path.basename(filename), f, "application/json")}
+ headers = {"Authorization": f"Bearer {self.upload_api_token}"}
+ resp = s.post(self.upload_url, files=files, headers=headers, verify=False, timeout=30)
+ if not (200 <= resp.status_code < 300):
+ self.logger.warning("上传 token 失败: %s %s", resp.status_code, resp.text[:200])
+ return False
+ return True
+ except Exception as e:
+ self.logger.warning("上传 token 异常: %s", e)
+ return False
+
+ def upload_token_data(self, filename: str, token_data: Dict[str, Any]) -> bool:
+ if not self.upload_url or not self.upload_api_token:
+ return True
+ try:
+ s = create_session(proxy=self.proxy)
+ content = json.dumps(token_data, ensure_ascii=False).encode("utf-8")
+ files = {"file": (filename, content, "application/json")}
+ headers = {"Authorization": f"Bearer {self.upload_api_token}"}
+ resp = s.post(self.upload_url, files=files, headers=headers, verify=False, timeout=30)
+ if not (200 <= resp.status_code < 300):
+ self.logger.warning("上传 token 失败: %s %s", resp.status_code, resp.text[:200])
+ return False
+ return True
+ except Exception as e:
+ self.logger.warning("上传 token 异常: %s", e)
+ return False
+
+ def save_tokens(self, email: str, tokens: Dict[str, Any]) -> bool:
+ access_token = str(tokens.get("access_token") or "")
+ refresh_token = str(tokens.get("refresh_token") or "")
+ id_token = str(tokens.get("id_token") or "")
+
+ if self.save_local:
+ try:
+ with self.file_lock:
+ if access_token:
+ ensure_parent_dir(self.ak_file)
+ with open(self.ak_file, "a", encoding="utf-8") as f:
+ f.write(f"{access_token}\n")
+ if refresh_token:
+ ensure_parent_dir(self.rk_file)
+ with open(self.rk_file, "a", encoding="utf-8") as f:
+ f.write(f"{refresh_token}\n")
+ except Exception as e:
+ self.logger.warning("AK/RK 保存失败: %s", e)
+ return False
+
+ if access_token:
+ return self.save_token_json(email, access_token, refresh_token, id_token)
+ return False
+
+ def save_account(self, email: str, password: str) -> None:
+ if not self.save_local:
+ return
+
+ with self.file_lock:
+ ensure_parent_dir(self.accounts_file)
+ ensure_parent_dir(self.csv_file)
+
+ with open(self.accounts_file, "a", encoding="utf-8") as f:
+ f.write(f"{email}:{password}\n")
+
+ file_exists = os.path.exists(self.csv_file)
+ with open(self.csv_file, "a", newline="", encoding="utf-8") as f:
+ writer = csv.writer(f)
+ if not file_exists:
+ writer.writerow(["email", "password", "timestamp"])
+ writer.writerow([email, password, time.strftime("%Y-%m-%d %H:%M:%S")])
+
+ def collect_token_emails(self) -> set[str]:
+ emails = set()
+ if not os.path.isdir(self.tokens_out_dir):
+ return emails
+ for name in os.listdir(self.tokens_out_dir):
+ if not name.endswith(".json"):
+ continue
+ path = os.path.join(self.tokens_out_dir, name)
+ try:
+ with open(path, "r", encoding="utf-8") as f:
+ data = json.load(f)
+ email = data.get("email") or name[:-5]
+ if email:
+ emails.add(str(email))
+ except Exception:
+ continue
+ return emails
+
+ def reconcile_account_outputs_from_tokens(self) -> int:
+ if not self.save_local:
+ return 0
+
+ token_emails = self.collect_token_emails()
+
+ pwd_map: Dict[str, str] = {}
+ if os.path.exists(self.accounts_file):
+ try:
+ with open(self.accounts_file, "r", encoding="utf-8") as f:
+ for line in f:
+ line = line.strip()
+ if not line or ":" not in line:
+ continue
+ email, pwd = line.split(":", 1)
+ pwd_map[email] = pwd
+ except Exception:
+ pass
+
+ ordered_emails = sorted(token_emails)
+ timestamp = time.strftime("%Y-%m-%d %H:%M:%S")
+
+ with self.file_lock:
+ ensure_parent_dir(self.accounts_file)
+ ensure_parent_dir(self.csv_file)
+
+ with open(self.accounts_file, "w", encoding="utf-8") as f:
+ for email in ordered_emails:
+ f.write(f"{email}:{pwd_map.get(email, '')}\n")
+
+ with open(self.csv_file, "w", newline="", encoding="utf-8") as f:
+ writer = csv.writer(f)
+ writer.writerow(["email", "password", "timestamp"])
+ for email in ordered_emails:
+ writer.writerow([email, pwd_map.get(email, ""), timestamp])
+
+ return len(ordered_emails)
+
+ def oauth_login_with_retry(self, mailbox: Mailbox, password: str, log_context: str = "") -> Optional[Dict[str, Any]]:
+ attempts = max(1, self.oauth_outer_retry_attempts)
+ self.last_oauth_failure_stage = ""
+ self.last_oauth_failure_detail = ""
+ flow_trace: Optional[FlowTraceRecorder] = getattr(self.logger, "flow_trace", None)
+ for attempt in range(1, attempts + 1):
+ if self.stop_event.is_set() and self.get_token_success_count() >= self.target_tokens:
+ return None
+
+ self.logger.info(apply_log_context("OAuth 尝试 %s/%s: %s", log_context), attempt, attempts, mailbox.email)
+ if flow_trace is not None:
+ flow_trace.record("oauth_outer_attempt_start", email=mailbox.email, attempt=attempt, total_attempts=attempts)
+ failure_state: Dict[str, str] = {}
+ tokens = perform_codex_oauth_login_http(
+ email=mailbox.email,
+ password=password,
+ oauth_issuer=self.oauth_issuer,
+ oauth_client_id=self.oauth_client_id,
+ oauth_redirect_uri=self.oauth_redirect_uri,
+ proxy=self.proxy,
+ mail_provider=self.mail_provider,
+ mailbox=mailbox,
+ otp_timeout_seconds=self.oauth_otp_timeout_seconds,
+ otp_poll_interval_seconds=self.oauth_otp_poll_interval_seconds,
+ local_retry_attempts=self.oauth_local_retry_attempts,
+ transient_markers=self.flow_transient_markers,
+ otp_validate_order=self.oauth_otp_validate_order,
+ phone_markers=self.oauth_phone_markers,
+ password_phone_action=self.oauth_password_phone_action,
+ otp_phone_action=self.oauth_otp_phone_action,
+ logger=self.logger,
+ flow_trace=flow_trace,
+ failure_state=failure_state,
+ log_context=log_context,
+ )
+ if tokens:
+ self.last_oauth_failure_stage = ""
+ self.last_oauth_failure_detail = ""
+ if flow_trace is not None:
+ flow_trace.record("oauth_outer_attempt_success", email=mailbox.email, attempt=attempt)
+ self.logger.info(apply_log_context("OAuth 尝试 %s/%s 成功: %s", log_context), attempt, attempts, mailbox.email)
+ return tokens
+ self.last_oauth_failure_stage = str(failure_state.get("stage") or "").strip()
+ failure_detail = str(failure_state.get("detail") or "").strip()
+ self.last_oauth_failure_detail = failure_detail or self.last_oauth_failure_stage or f"oauth_attempt_{attempt}_failed"
+ if flow_trace is not None:
+ flow_trace.record(
+ "oauth_outer_attempt_failed",
+ email=mailbox.email,
+ attempt=attempt,
+ stage=self.last_oauth_failure_stage,
+ detail=self.last_oauth_failure_detail,
+ )
+ if attempt < attempts:
+ backoff = min(self.oauth_retry_backoff_max, self.oauth_retry_backoff_base ** (attempt - 1))
+ jitter = random.uniform(0.2, 0.8)
+ self.logger.warning(
+ apply_log_context("OAuth 失败,准备重试: email=%s attempt=%s/%s sleep=%.1fs", log_context),
+ mailbox.email,
+ attempt,
+ attempts,
+ backoff + jitter,
+ )
+ if flow_trace is not None:
+ flow_trace.record(
+ "oauth_outer_attempt_retry_scheduled",
+ email=mailbox.email,
+ attempt=attempt,
+ sleep_seconds=round(backoff + jitter, 2),
+ )
+ time.sleep(backoff + jitter)
+ if not self.last_oauth_failure_stage and self.last_oauth_failure_detail.startswith("oauth_attempts_exhausted"):
+ self.last_oauth_failure_stage = "oauth_attempts_exhausted"
+ self.last_oauth_failure_detail = self.last_oauth_failure_detail or f"oauth_attempts_exhausted:{attempts}"
+ return None
+
+
+def register_one(runtime: RegisterRuntime, worker_id: int = 0) -> tuple[Optional[str], Optional[bool], float, float]:
+ if runtime.stop_event.is_set() and runtime.get_token_success_count() >= runtime.target_tokens:
+ return None, None, 0.0, 0.0
+
+ task_label = f"任务{worker_id}" if worker_id > 0 else ""
+ runtime.wait_for_provider_availability(worker_id=worker_id)
+ t_start = time.time()
+ mailbox = runtime.mail_provider.create_mailbox()
+ if not mailbox:
+ runtime.logger.warning(apply_log_context("创建邮箱失败: provider=%s", task_label), runtime.mail_provider_name)
+ note_target_failure = getattr(runtime.mail_provider, "note_target_failure", None)
+ if callable(note_target_failure):
+ note_target_failure(
+ getattr(runtime.mail_provider, "last_selected_target", ""),
+ stage="create_mailbox",
+ detail=f"provider={runtime.mail_provider_name}",
+ )
+ else:
+ note_domain_failure = getattr(runtime.mail_provider, "note_domain_failure", None)
+ if callable(note_domain_failure):
+ note_domain_failure(
+ getattr(runtime.mail_provider, "last_selected_domain", ""),
+ stage="create_mailbox",
+ detail=f"provider={runtime.mail_provider_name}",
+ )
+ runtime.note_attempt_failure(stage="create_mailbox", detail=f"provider={runtime.mail_provider_name}")
+ return None, False, 0.0, time.time() - t_start
+ email = mailbox.email
+ runtime.logger.info(
+ apply_log_context("邮箱已就绪: provider=%s email=%s", task_label),
+ runtime.mail_provider_name,
+ email,
+ )
+
+ password = generate_random_password()
+ registrar = ProtocolRegistrar(proxy=runtime.proxy, logger=runtime.logger, conf=runtime.conf)
+ reg_ok = registrar.register(
+ email=email,
+ password=password,
+ client_id=runtime.oauth_client_id,
+ redirect_uri=runtime.oauth_redirect_uri,
+ mailbox=mailbox,
+ mail_provider=runtime.mail_provider,
+ otp_timeout_seconds=runtime.mail_otp_timeout_seconds,
+ otp_poll_interval_seconds=runtime.mail_poll_interval_seconds,
+ log_context=task_label,
+ )
+ t_reg = time.time() - t_start
+ if not reg_ok:
+ runtime.logger.warning(apply_log_context("注册流程失败: %s", task_label), email)
+ register_detail = registrar.last_failure_detail or registrar.last_failure_stage or "unknown"
+ if registrar.last_failure_stage == "register_mail_otp_timeout":
+ failure_target = mailbox.failure_target or mailbox.account_name or mailbox.domain
+ note_target_failure = getattr(runtime.mail_provider, "note_target_failure", None)
+ if callable(note_target_failure):
+ note_target_failure(
+ failure_target,
+ stage="register",
+ detail=register_detail,
+ )
+ else:
+ note_domain_failure = getattr(runtime.mail_provider, "note_domain_failure", None)
+ if callable(note_domain_failure):
+ note_domain_failure(
+ mailbox.domain,
+ stage="register",
+ detail=register_detail,
+ )
+ runtime.note_attempt_failure(stage="register", email=email, detail=register_detail)
+ return email, False, t_reg, time.time() - t_start
+
+ registration_tokens = getattr(registrar, "registration_tokens", None)
+ use_registration_tokens = has_complete_auth_tokens(registration_tokens)
+ flow_trace = getattr(runtime.logger, "flow_trace", None)
+ if flow_trace is not None:
+ flow_trace.record(
+ "register_one_token_source",
+ email=email,
+ source="registration_complete_auth" if use_registration_tokens else "oauth_retry",
+ registration_tokens_captured=bool(registration_tokens),
+ registration_tokens_complete=use_registration_tokens,
+ registration_token_keys=sorted(registration_tokens.keys()) if isinstance(registration_tokens, dict) else [],
+ )
+ tokens = registration_tokens if use_registration_tokens else None
+ if not tokens:
+ tokens = runtime.oauth_login_with_retry(mailbox=mailbox, password=password, log_context=task_label)
+ t_total = time.time() - t_start
+ if not tokens:
+ oauth_detail = runtime.last_oauth_failure_detail or f"attempts={runtime.oauth_outer_retry_attempts}"
+ if runtime.last_oauth_failure_stage == "oauth_mail_otp_timeout":
+ failure_target = mailbox.failure_target or mailbox.account_name or mailbox.domain
+ note_target_failure = getattr(runtime.mail_provider, "note_target_failure", None)
+ if callable(note_target_failure):
+ note_target_failure(
+ failure_target,
+ stage="oauth",
+ detail=oauth_detail,
+ )
+ else:
+ note_domain_failure = getattr(runtime.mail_provider, "note_domain_failure", None)
+ if callable(note_domain_failure):
+ note_domain_failure(
+ mailbox.domain,
+ stage="oauth",
+ detail=oauth_detail,
+ )
+ runtime.note_attempt_failure(stage="oauth", email=email, detail=oauth_detail)
+ return email, False, t_reg, t_total
+
+ claimed, current = runtime.claim_token_slot()
+ if not claimed:
+ return email, None, t_reg, t_total
+
+ saved = runtime.save_tokens(email, tokens)
+ if not saved:
+ runtime.release_token_slot()
+ runtime.note_attempt_failure(stage="save_tokens", email=email, detail="save_token_json_or_upload_failed")
+ return email, False, t_reg, t_total
+
+ runtime.save_account(email, password)
+ runtime.logger.info(apply_log_context("Token 保存成功: %s", task_label), email)
+ success_target = mailbox.failure_target or mailbox.account_name or mailbox.domain
+ note_target_success = getattr(runtime.mail_provider, "note_target_success", None)
+ if callable(note_target_success):
+ note_target_success(success_target)
+ else:
+ note_domain_success = getattr(runtime.mail_provider, "note_domain_success", None)
+ if callable(note_domain_success):
+ note_domain_success(mailbox.domain)
+ runtime.note_attempt_success()
+ runtime.logger.info(
+ apply_log_context("注册+OAuth 成功: %s | 注册 %.1fs + OAuth %.1fs = %.1fs | token %s/%s", task_label),
+ email,
+ t_reg,
+ t_total - t_reg,
+ t_total,
+ current,
+ runtime.target_tokens,
+ )
+ return email, True, t_reg, t_total
+
+
+def run_batch_register(conf: Dict[str, Any], target_tokens: int, logger: logging.Logger) -> tuple[int, int, int]:
+ if target_tokens <= 0:
+ return 0, 0, 0
+
+ try:
+ runtime = RegisterRuntime(conf=conf, target_tokens=target_tokens, logger=logger)
+ except Exception as e:
+ logger.error("邮件提供方初始化失败: %s", e)
+ return 0, 0, 0
+
+ workers = runtime.concurrent_workers
+
+ logger.info(
+ "开始补号: 目标 token=%s, 并发=%s, 邮箱提供方=%s",
+ target_tokens,
+ workers,
+ runtime.mail_provider_name,
+ )
+ logger.info("Mail Provider Config: %s", runtime.mail_provider.describe())
+
+ ok = 0
+ fail = 0
+ skip = 0
+ attempts = 0
+ reg_times: List[float] = []
+ total_times: List[float] = []
+ lock = threading.Lock()
+ batch_start = time.time()
+
+ if workers == 1:
+ while runtime.get_token_success_count() < target_tokens:
+ attempts += 1
+ email, success, t_reg, t_total = register_one(runtime, worker_id=1)
+ if success is True:
+ ok += 1
+ reg_times.append(t_reg)
+ total_times.append(t_total)
+ elif success is False:
+ fail += 1
+ else:
+ skip += 1
+ logger.info(
+ "补号进度: token %s/%s | ✅%s ❌%s ⏭️%s | 用时 %.1fs",
+ runtime.get_token_success_count(),
+ target_tokens,
+ ok,
+ fail,
+ skip,
+ time.time() - batch_start,
+ )
+ if runtime.get_token_success_count() >= target_tokens:
+ break
+ jitter_min = min(runtime.loop_jitter_min_seconds, runtime.loop_jitter_max_seconds)
+ jitter_max = max(runtime.loop_jitter_min_seconds, runtime.loop_jitter_max_seconds)
+ time.sleep(random.uniform(jitter_min, jitter_max))
+ else:
+ def worker_task(task_index: int, worker_id: int):
+ if task_index > 1:
+ jitter = random.uniform(0.2, 1.0)
+ time.sleep(jitter)
+ if runtime.stop_event.is_set() and runtime.get_token_success_count() >= target_tokens:
+ return task_index, None, None, 0.0, 0.0
+ email, success, t_reg, t_total = register_one(runtime, worker_id=worker_id)
+ return task_index, email, success, t_reg, t_total
+
+ executor = ThreadPoolExecutor(max_workers=workers)
+ futures = {}
+ next_task_index = 1
+
+ def submit_one() -> bool:
+ nonlocal next_task_index
+ remaining = target_tokens - runtime.get_token_success_count()
+ if remaining <= 0:
+ return False
+ if len(futures) >= remaining:
+ return False
+
+ wid = ((next_task_index - 1) % workers) + 1
+ fut = executor.submit(worker_task, next_task_index, wid)
+ futures[fut] = next_task_index
+ next_task_index += 1
+ return True
+
+ try:
+ for _ in range(min(workers, target_tokens)):
+ if not submit_one():
+ break
+
+ while futures:
+ if runtime.get_token_success_count() >= target_tokens:
+ runtime.stop_event.set()
+
+ done_set, _ = wait(list(futures.keys()), return_when=FIRST_COMPLETED, timeout=1.0)
+ if not done_set:
+ continue
+
+ for fut in done_set:
+ _ = futures.pop(fut, None)
+ attempts += 1
+ try:
+ _, _, success, t_reg, t_total = fut.result()
+ except Exception as exc:
+ success, t_reg, t_total = False, 0.0, 0.0
+ runtime.note_attempt_failure(stage="worker_exception", detail=type(exc).__name__)
+
+ with lock:
+ if success is True:
+ ok += 1
+ reg_times.append(t_reg)
+ total_times.append(t_total)
+ elif success is False:
+ fail += 1
+ else:
+ skip += 1
+
+ logger.info(
+ "补号进度: token %s/%s | ✅%s ❌%s ⏭️%s | 用时 %.1fs",
+ runtime.get_token_success_count(),
+ target_tokens,
+ ok,
+ fail,
+ skip,
+ time.time() - batch_start,
+ )
+
+ if runtime.get_token_success_count() < target_tokens and not runtime.stop_event.is_set():
+ submit_one()
+ finally:
+ runtime.stop_event.set()
+ try:
+ executor.shutdown(wait=True, cancel_futures=False)
+ except TypeError:
+ executor.shutdown(wait=True)
+
+ synced = runtime.reconcile_account_outputs_from_tokens()
+ elapsed = time.time() - batch_start
+ avg_reg = (sum(reg_times) / len(reg_times)) if reg_times else 0
+ avg_total = (sum(total_times) / len(total_times)) if total_times else 0
+ logger.info(
+ "补号完成: token=%s/%s, fail=%s, skip=%s, attempts=%s, elapsed=%.1fs, avg(注册)=%.1fs, avg(总)=%.1fs, 收敛账号=%s",
+ runtime.get_token_success_count(),
+ target_tokens,
+ fail,
+ skip,
+ attempts,
+ elapsed,
+ avg_reg,
+ avg_total,
+ synced,
+ )
+ failure_stage_stats, failure_detail_stats, success_stats = runtime.snapshot_failure_stats()
+ if success_stats:
+ logger.info("成功分类汇总: %s", ", ".join(f"{name}={count}" for name, count in success_stats))
+ if failure_stage_stats:
+ logger.info("失败阶段汇总: %s", ", ".join(f"{name}={count}" for name, count in failure_stage_stats))
+ if failure_detail_stats:
+ top_failure_details = failure_detail_stats[:8]
+ logger.info(
+ "失败细节汇总(Top %s): %s",
+ len(top_failure_details),
+ ", ".join(f"{name}={count}" for name, count in top_failure_details),
+ )
+ return runtime.get_token_success_count(), fail, synced
+
+
+def fetch_auth_files(base_url: str, token: str, timeout: int) -> List[Dict[str, Any]]:
+ resp = requests.get(f"{base_url}/v0/management/auth-files", headers=mgmt_headers(token), timeout=timeout)
+ resp.raise_for_status()
+ raw = resp.json()
+ data = raw if isinstance(raw, dict) else {}
+ files = data.get("files", [])
+ return files if isinstance(files, list) else []
+
+
+def build_probe_payload(auth_index: str, user_agent: str, chatgpt_account_id: Optional[str] = None) -> Dict[str, Any]:
+ call_header = {
+ "Authorization": "Bearer $TOKEN$",
+ "Content-Type": "application/json",
+ "User-Agent": user_agent or DEFAULT_MGMT_UA,
+ }
+ if chatgpt_account_id:
+ call_header["Chatgpt-Account-Id"] = chatgpt_account_id
+ return {
+ "authIndex": auth_index,
+ "method": "GET",
+ "url": "https://chatgpt.com/backend-api/wham/usage",
+ "header": call_header,
+ }
+
+
+async def probe_account_async(
+ session: aiohttp.ClientSession,
+ semaphore: asyncio.Semaphore,
+ base_url: str,
+ token: str,
+ item: Dict[str, Any],
+ user_agent: str,
+ timeout: int,
+ retries: int,
+ used_percent_threshold: int = 80,
+) -> Dict[str, Any]:
+ auth_index = item.get("auth_index")
+ name = item.get("name") or item.get("id")
+ account = item.get("account") or item.get("email") or ""
+ disabled = is_item_disabled(item)
+ result = {
+ "name": name,
+ "account": account,
+ "auth_index": auth_index,
+ "type": get_item_type(item),
+ "provider": item.get("provider"),
+ "disabled": disabled,
+ "status_code": None,
+ "invalid_401": False,
+ "invalid_used_percent": False,
+ "used_percent": None,
+ "is_quota": False,
+ "is_healthy": False,
+ "action": "keep",
+ "error": None,
+ }
+ if not auth_index:
+ result["error"] = "missing auth_index"
+ return result
+
+ chatgpt_account_id = extract_chatgpt_account_id(item)
+ payload = build_probe_payload(str(auth_index), user_agent, chatgpt_account_id)
+
+ for attempt in range(retries + 1):
+ try:
+ async with semaphore:
+ async with session.post(
+ f"{base_url}/v0/management/api-call",
+ headers={**mgmt_headers(token), "Content-Type": "application/json"},
+ json=payload,
+ timeout=timeout,
+ ) as resp:
+ text = await resp.text()
+ if resp.status >= 400:
+ raise RuntimeError(f"management api-call http {resp.status}: {text[:200]}")
+ data = safe_json_text(text)
+ sc = normalize_status_code(data.get("status_code"))
+ result["status_code"] = sc
+ result["invalid_401"] = sc == 401
+ body_obj, body_text = parse_usage_body(data.get("body"))
+ usage = analyze_usage_status(
+ status_code=sc,
+ body_obj=body_obj,
+ body_text=body_text,
+ used_percent_threshold=used_percent_threshold,
+ )
+ result["used_percent"] = usage["used_percent"]
+ result["invalid_used_percent"] = usage["over_threshold"]
+ result["is_quota"] = usage["is_quota"]
+ result["is_healthy"] = usage["is_healthy"]
+ result["action"] = decide_clean_action(
+ status_code=sc,
+ disabled=disabled,
+ is_quota=bool(usage["is_quota"]),
+ over_threshold=bool(usage["over_threshold"]),
+ )
+
+ if sc is None:
+ result["error"] = "missing status_code in api-call response"
+ return result
+ except Exception as e:
+ result["error"] = str(e)
+ if attempt >= retries:
+ return result
+ return result
+
+
+async def delete_account_async(
+ session: aiohttp.ClientSession,
+ semaphore: asyncio.Semaphore,
+ base_url: str,
+ token: str,
+ name: str,
+ timeout: int,
+) -> Dict[str, Any]:
+ if not name:
+ return {"name": None, "deleted": False, "error": "missing name"}
+ encoded_name = quote(name, safe="")
+ url = f"{base_url}/v0/management/auth-files?name={encoded_name}"
+ try:
+ async with semaphore:
+ async with session.delete(url, headers=mgmt_headers(token), timeout=timeout) as resp:
+ text = await resp.text()
+ data = safe_json_text(text)
+ ok = resp.status == 200 and data.get("status") == "ok"
+ return {
+ "name": name,
+ "deleted": ok,
+ "status_code": resp.status,
+ "error": None if ok else f"delete failed, response={text[:200]}",
+ }
+ except Exception as e:
+ return {"name": name, "deleted": False, "error": str(e)}
+
+
+async def update_account_disabled_async(
+ session: aiohttp.ClientSession,
+ semaphore: asyncio.Semaphore,
+ base_url: str,
+ token: str,
+ name: str,
+ disabled: bool,
+ timeout: int,
+) -> Dict[str, Any]:
+ if not name:
+ return {"name": None, "updated": False, "error": "missing name"}
+
+ payload = {"name": name, "disabled": bool(disabled)}
+ headers = {**mgmt_headers(token), "Content-Type": "application/json"}
+ fallback_status_codes = {404, 405, 501}
+ urls = [
+ f"{base_url}/v0/management/auth-files",
+ f"{base_url}/v0/management/auth-files/status",
+ ]
+
+ last_error = "unknown"
+ for idx, url in enumerate(urls):
+ try:
+ async with semaphore:
+ async with session.patch(url, headers=headers, json=payload, timeout=timeout) as resp:
+ text = await resp.text()
+ data = safe_json_text(text)
+ if resp.status in fallback_status_codes and idx == 0:
+ last_error = f"primary_patch_http_{resp.status}"
+ continue
+ if resp.status >= 400:
+ return {
+ "name": name,
+ "updated": False,
+ "status_code": resp.status,
+ "error": f"patch failed, response={text[:200]}",
+ }
+ if isinstance(data, dict):
+ status = str(data.get("status") or "").strip().lower()
+ if status and status != "ok":
+ return {
+ "name": name,
+ "updated": False,
+ "status_code": resp.status,
+ "error": f"patch status={status}",
+ }
+ return {
+ "name": name,
+ "updated": True,
+ "status_code": resp.status,
+ "error": None,
+ }
+ except Exception as e:
+ last_error = str(e)
+
+ return {"name": name, "updated": False, "error": last_error}
+
+
+def select_probe_candidates(
+ candidates: List[Dict[str, Any]],
+ sample_size: int,
+ rng: Any = None,
+) -> List[Dict[str, Any]]:
+ candidate_list = list(candidates)
+ normalized_size = max(0, int(sample_size or 0))
+ if normalized_size <= 0 or normalized_size >= len(candidate_list):
+ return candidate_list
+ sampler = rng if rng is not None else random
+ return list(sampler.sample(candidate_list, normalized_size))
+
+
+async def run_probe_async(
+ base_url: str,
+ token: str,
+ target_type: str,
+ workers: int,
+ timeout: int,
+ retries: int,
+ user_agent: str,
+ used_percent_threshold: int = 80,
+ sample_size: int = 0,
+ logger: Optional[logging.Logger] = None,
+) -> tuple[List[Dict[str, Any]], int, int, int, List[Dict[str, Any]]]:
+ files = fetch_auth_files(base_url, token, timeout)
+ candidates: List[Dict[str, Any]] = []
+ for f in files:
+ if str(get_item_type(f)).lower() != target_type.lower():
+ continue
+ candidates.append(f)
+
+ if not candidates:
+ if logger:
+ logger.info("未找到 type=%s 的候选账号,跳过探测与策略判定", target_type)
+ return [], len(files), 0, 0, files
+
+ selected_candidates = select_probe_candidates(candidates, sample_size)
+ normalized_sample_size = max(0, int(sample_size or 0))
+ if logger and normalized_sample_size > 0:
+ if len(selected_candidates) < len(candidates):
+ logger.info(
+ "本轮随机抽样探测: 已抽样=%s/%s, target_type=%s",
+ len(selected_candidates),
+ len(candidates),
+ target_type,
+ )
+ else:
+ logger.info(
+ "本轮探测按全量处理: 抽样数量=%s, 候选总数=%s, target_type=%s",
+ normalized_sample_size,
+ len(candidates),
+ target_type,
+ )
+
+ connector = aiohttp.TCPConnector(limit=max(1, workers), limit_per_host=max(1, workers))
+ client_timeout = aiohttp.ClientTimeout(total=max(1, timeout))
+ semaphore = asyncio.Semaphore(max(1, workers))
+
+ probe_results = []
+ total_candidates = len(selected_candidates)
+ checked = 0
+ delete_count = 0
+ disable_count = 0
+ enable_count = 0
+
+ async with aiohttp.ClientSession(connector=connector, timeout=client_timeout, trust_env=True) as session:
+ tasks = [
+ asyncio.create_task(
+ probe_account_async(
+ session=session,
+ semaphore=semaphore,
+ base_url=base_url,
+ token=token,
+ item=item,
+ user_agent=user_agent,
+ timeout=timeout,
+ retries=retries,
+ used_percent_threshold=used_percent_threshold,
+ )
+ )
+ for item in selected_candidates
+ ]
+ for task in asyncio.as_completed(tasks):
+ result = await task
+ probe_results.append(result)
+ checked += 1
+ action = str(result.get("action") or "keep")
+ if action == "delete":
+ delete_count += 1
+ elif action == "disable":
+ disable_count += 1
+ elif action == "enable":
+ enable_count += 1
+
+ if logger and (checked % 50 == 0 or checked == total_candidates):
+ logger.info(
+ "账号探测进度: 已检查=%s/%s, 待删=%s, 待禁用=%s, 待启用=%s",
+ checked,
+ total_candidates,
+ delete_count,
+ disable_count,
+ enable_count,
+ )
+
+ return probe_results, len(files), len(candidates), len(selected_candidates), files
+
+
+async def run_delete_async(
+ base_url: str,
+ token: str,
+ names_to_delete: List[str],
+ delete_workers: int,
+ timeout: int,
+) -> tuple[int, int]:
+ if not names_to_delete:
+ return 0, 0
+
+ connector = aiohttp.TCPConnector(limit=max(1, delete_workers), limit_per_host=max(1, delete_workers))
+ client_timeout = aiohttp.ClientTimeout(total=max(1, timeout))
+ semaphore = asyncio.Semaphore(max(1, delete_workers))
+
+ delete_results = []
+ async with aiohttp.ClientSession(connector=connector, timeout=client_timeout, trust_env=True) as session:
+ tasks = [
+ asyncio.create_task(
+ delete_account_async(
+ session=session,
+ semaphore=semaphore,
+ base_url=base_url,
+ token=token,
+ name=name,
+ timeout=timeout,
+ )
+ )
+ for name in names_to_delete
+ ]
+ for task in asyncio.as_completed(tasks):
+ delete_results.append(await task)
+
+ success = [r for r in delete_results if r.get("deleted")]
+ failed = [r for r in delete_results if not r.get("deleted")]
+ return len(success), len(failed)
+
+
+async def run_update_disabled_async(
+ base_url: str,
+ token: str,
+ names: List[str],
+ *,
+ disabled: bool,
+ workers: int,
+ timeout: int,
+) -> tuple[int, int]:
+ if not names:
+ return 0, 0
+
+ connector = aiohttp.TCPConnector(limit=max(1, workers), limit_per_host=max(1, workers))
+ client_timeout = aiohttp.ClientTimeout(total=max(1, timeout))
+ semaphore = asyncio.Semaphore(max(1, workers))
+
+ results: List[Dict[str, Any]] = []
+ async with aiohttp.ClientSession(connector=connector, timeout=client_timeout, trust_env=True) as session:
+ tasks = [
+ asyncio.create_task(
+ update_account_disabled_async(
+ session=session,
+ semaphore=semaphore,
+ base_url=base_url,
+ token=token,
+ name=name,
+ disabled=disabled,
+ timeout=timeout,
+ )
+ )
+ for name in names
+ ]
+ for task in asyncio.as_completed(tasks):
+ results.append(await task)
+
+ success = [r for r in results if r.get("updated")]
+ failed = [r for r in results if not r.get("updated")]
+ return len(success), len(failed)
+
+
+async def run_clean_401_async(
+ *,
+ base_url: str,
+ token: str,
+ target_type: str,
+ workers: int,
+ delete_workers: int,
+ timeout: int,
+ retries: int,
+ user_agent: str,
+ used_percent_threshold: int,
+ sample_size: int,
+ logger: logging.Logger,
+) -> Dict[str, Any]:
+ probe_results, total_files, target_files, probed_files, files = await run_probe_async(
+ base_url=base_url,
+ token=token,
+ target_type=target_type,
+ workers=workers,
+ timeout=timeout,
+ retries=retries,
+ user_agent=user_agent,
+ used_percent_threshold=used_percent_threshold,
+ sample_size=sample_size,
+ logger=logger,
+ )
+
+ delete_names = sorted({str(r.get("name")) for r in probe_results if r.get("name") and r.get("action") == "delete"})
+ disable_names = sorted({str(r.get("name")) for r in probe_results if r.get("name") and r.get("action") == "disable"})
+ enable_names = sorted({str(r.get("name")) for r in probe_results if r.get("name") and r.get("action") == "enable"})
+
+ invalid_401_count = len([r for r in probe_results if r.get("invalid_401")])
+ invalid_used_percent_count = len([r for r in probe_results if r.get("invalid_used_percent")])
+ quota_count = len([r for r in probe_results if r.get("is_quota")])
+ healthy_disabled_count = len([r for r in probe_results if r.get("is_healthy") and r.get("disabled")])
+
+ logger.info(
+ "探测完成: 总账号=%s, %s账号=%s, 本轮探测=%s, 401失效=%s, used_percent超标=%s, quota=%s, healthy+disabled=%s",
+ total_files,
+ target_type,
+ target_files,
+ probed_files,
+ invalid_401_count,
+ invalid_used_percent_count,
+ quota_count,
+ healthy_disabled_count,
+ )
+ logger.info(
+ "清理策略决策: 待删=%s, 待禁用=%s, 待启用=%s",
+ len(delete_names),
+ len(disable_names),
+ len(enable_names),
+ )
+
+ deleted_ok, deleted_fail = await run_delete_async(
+ base_url=base_url,
+ token=token,
+ names_to_delete=delete_names,
+ delete_workers=delete_workers,
+ timeout=timeout,
+ )
+
+ disabled_ok, disabled_fail = await run_update_disabled_async(
+ base_url=base_url,
+ token=token,
+ names=disable_names,
+ disabled=True,
+ workers=delete_workers,
+ timeout=timeout,
+ )
+ enabled_ok, enabled_fail = await run_update_disabled_async(
+ base_url=base_url,
+ token=token,
+ names=enable_names,
+ disabled=False,
+ workers=delete_workers,
+ timeout=timeout,
+ )
+ logger.info(
+ "清理动作汇总: 删除(成功=%s 失败=%s) 禁用(成功=%s 失败=%s) 启用(成功=%s 失败=%s)",
+ deleted_ok,
+ deleted_fail,
+ disabled_ok,
+ disabled_fail,
+ enabled_ok,
+ enabled_fail,
+ )
+
+ refreshed_files = files
+ try:
+ refreshed_files = fetch_auth_files(base_url, token, timeout)
+ except Exception as e:
+ logger.warning("清理动作后重新拉取 auth-files 失败,回退旧列表: %s", e)
+ return {
+ "action_total": len(delete_names) + len(disable_names) + len(enable_names),
+ "delete_plan": len(delete_names),
+ "delete_ok": deleted_ok,
+ "delete_fail": deleted_fail,
+ "disable_plan": len(disable_names),
+ "disable_ok": disabled_ok,
+ "disable_fail": disabled_fail,
+ "enable_plan": len(enable_names),
+ "enable_ok": enabled_ok,
+ "enable_fail": enabled_fail,
+ "files": refreshed_files,
+ "total_files": total_files,
+ "target_files": target_files,
+ "probed_files": probed_files,
+ "invalid_401_count": invalid_401_count,
+ "invalid_used_percent_count": invalid_used_percent_count,
+ }
+
+
+def run_clean_401(conf: Dict[str, Any], logger: logging.Logger) -> Dict[str, Any]:
+ base_url = str(pick_conf(conf, "clean", "base_url", default="") or "").rstrip("/")
+ token = str(pick_conf(conf, "clean", "token", "cpa_password", default="") or "").strip()
+ target_type = str(pick_conf(conf, "clean", "target_type", default="codex") or "codex")
+ workers = int(pick_conf(conf, "clean", "workers", default=20) or 20)
+ delete_workers = int(pick_conf(conf, "clean", "delete_workers", default=40) or 40)
+ timeout = int(pick_conf(conf, "clean", "timeout", default=10) or 10)
+ retries = int(pick_conf(conf, "clean", "retries", default=1) or 1)
+ user_agent = str(pick_conf(conf, "clean", "user_agent", default=DEFAULT_MGMT_UA) or DEFAULT_MGMT_UA)
+ used_percent_threshold = int(pick_conf(conf, "clean", "used_percent_threshold", default=80) or 80)
+ sample_size = max(0, int(pick_conf(conf, "clean", "sample_size", default=0) or 0))
+
+ if not base_url or not token:
+ raise RuntimeError("clean 配置缺少 base_url 或 token/cpa_password")
+
+ if aiohttp is None:
+ logger.warning("未安装 aiohttp,跳过异步清理流程,回退为仅拉取账号列表继续执行补号。建议安装: pip install -r requirements.txt")
+ try:
+ files = fetch_auth_files(base_url, token, timeout)
+ except Exception as e:
+ raise RuntimeError(f"未安装 aiohttp,且拉取 auth-files 失败: {e}") from e
+ total_files, candidates = get_candidates_count_from_files(files, target_type)
+ return {
+ "action_total": 0,
+ "delete_plan": 0,
+ "delete_ok": 0,
+ "delete_fail": 0,
+ "disable_plan": 0,
+ "disable_ok": 0,
+ "disable_fail": 0,
+ "enable_plan": 0,
+ "enable_ok": 0,
+ "enable_fail": 0,
+ "files": files,
+ "total_files": total_files,
+ "target_files": candidates,
+ "probed_files": 0,
+ "invalid_401_count": 0,
+ "invalid_used_percent_count": 0,
+ }
+
+ logger.info(
+ "开始清理账号: base_url=%s target_type=%s used_percent_threshold=%s sample_size=%s",
+ base_url,
+ target_type,
+ used_percent_threshold,
+ sample_size,
+ )
+ return asyncio.run(
+ run_clean_401_async(
+ base_url=base_url,
+ token=token,
+ target_type=target_type,
+ workers=workers,
+ delete_workers=delete_workers,
+ timeout=timeout,
+ retries=retries,
+ user_agent=user_agent,
+ used_percent_threshold=used_percent_threshold,
+ sample_size=sample_size,
+ logger=logger,
+ )
+ )
+
+
+def get_counts_after_cleanup(
+ *,
+ base_url: str,
+ token: str,
+ target_type: str,
+ timeout: int,
+ deleted_ok: int,
+ pre_total: int,
+ pre_candidates: int,
+ logger: logging.Logger,
+ retries: int = 4,
+ delay_seconds: float = 1.0,
+) -> tuple[int, int]:
+ observed_total = pre_total
+ observed_candidates = pre_candidates
+
+ for attempt in range(1, max(1, retries) + 1):
+ observed_total, observed_candidates = get_candidates_count(
+ base_url=base_url,
+ token=token,
+ target_type=target_type,
+ timeout=timeout,
+ )
+ if deleted_ok <= 0:
+ return observed_total, observed_candidates
+ if observed_total < pre_total or observed_candidates < pre_candidates:
+ return observed_total, observed_candidates
+ if attempt < retries:
+ time.sleep(delay_seconds)
+
+ if deleted_ok > 0 and observed_total >= pre_total and observed_candidates >= pre_candidates:
+ corrected_total = max(0, pre_total - deleted_ok)
+ corrected_candidates = max(0, pre_candidates - deleted_ok)
+ logger.warning(
+ "删除后统计未及时反映(疑似缓存/延迟),按删除成功数保守修正: observed_total=%s observed_candidates=%s deleted_ok=%s corrected_total=%s corrected_candidates=%s",
+ observed_total,
+ observed_candidates,
+ deleted_ok,
+ corrected_total,
+ corrected_candidates,
+ )
+ return corrected_total, corrected_candidates
+
+ return observed_total, observed_candidates
+
+
+def resolve_loop_interval_seconds(conf: Dict[str, Any], cli_value: Optional[float] = None) -> float:
+ raw_value: Any
+ if cli_value is not None:
+ raw_value = cli_value
+ else:
+ raw_value = pick_conf(conf, "maintainer", "loop_interval_seconds", default=DEFAULT_LOOP_INTERVAL_SECONDS)
+
+ try:
+ interval = float(raw_value)
+ except Exception:
+ interval = DEFAULT_LOOP_INTERVAL_SECONDS
+ return max(MIN_LOOP_INTERVAL_SECONDS, interval)
+
+
+def parse_args() -> argparse.Namespace:
+ script_dir = Path(__file__).resolve().parent
+ app_data_dir = Path(os.environ.get("APP_DATA_DIR", str(script_dir)))
+ default_cfg = Path(os.environ.get("APP_CONFIG_PATH", str(app_data_dir / "config.json")))
+ default_log_dir = Path(os.environ.get("APP_LOG_DIR", str(app_data_dir / "logs")))
+
+ parser = argparse.ArgumentParser(description="账号池自动维护(三合一:清理+补号+收敛)")
+ parser.add_argument("--config", default=str(default_cfg), help="统一配置文件路径")
+ parser.add_argument(
+ "--min-candidates",
+ type=int,
+ default=None,
+ help="候选账号最小阈值(默认读取 maintainer.min_candidates / 顶层 min_candidates,最终默认 100)",
+ )
+ parser.add_argument("--timeout", type=int, default=15, help="统计 candidates 时接口超时秒数")
+ parser.add_argument("--log-dir", default=str(default_log_dir), help="日志目录")
+ parser.add_argument("--loop", action="store_true", help="开启循环维护模式(按固定间隔重复执行清理+补号)")
+ parser.add_argument("--loop-interval", type=float, default=None, help="循环模式的检查间隔秒数(默认读取 maintainer.loop_interval_seconds,兜底 60s)")
+ return parser.parse_args()
+
+
+def run_maintainer_once(args: argparse.Namespace, logger: logging.Logger, config_path: Path) -> int:
+ if not config_path.exists():
+ logger.error("配置文件不存在: %s", config_path)
+ return 2
+
+ conf = load_json(config_path)
+
+ base_url = str(pick_conf(conf, "clean", "base_url", default="") or "").rstrip("/")
+ token = str(pick_conf(conf, "clean", "token", "cpa_password", default="") or "").strip()
+ target_type = str(pick_conf(conf, "clean", "target_type", default="codex") or "codex")
+
+ cfg_min_candidates = pick_conf(conf, "maintainer", "min_candidates", default=None)
+ if cfg_min_candidates is None:
+ cfg_min_candidates = conf.get("min_candidates")
+
+ if args.min_candidates is not None:
+ min_candidates = int(args.min_candidates)
+ elif cfg_min_candidates is not None:
+ min_candidates = int(cfg_min_candidates)
+ else:
+ min_candidates = 100
+
+ if min_candidates < 0:
+ logger.error("min_candidates 不能小于 0(当前值=%s)", min_candidates)
+ return 2
+ if not base_url or not token:
+ logger.error("缺少 clean.base_url 或 clean.token/cpa_password")
+ return 2
+
+ try:
+ clean_summary = run_clean_401(conf, logger)
+ deleted_ok = int(clean_summary.get("delete_ok", 0) or 0)
+ pre_total_files = int(clean_summary.get("total_files", 0) or 0)
+ pre_candidates = int(clean_summary.get("target_files", 0) or 0)
+ logger.info(
+ "清理阶段汇总: 动作总计=%s | 删除 %s/%s | 禁用 %s/%s | 启用 %s/%s",
+ clean_summary.get("action_total", 0),
+ clean_summary.get("delete_ok", 0),
+ clean_summary.get("delete_plan", 0),
+ clean_summary.get("disable_ok", 0),
+ clean_summary.get("disable_plan", 0),
+ clean_summary.get("enable_ok", 0),
+ clean_summary.get("enable_plan", 0),
+ )
+ except Exception as e:
+ logger.error("清理无效账号失败: %s", e)
+ logger.info("=== 账号池自动维护结束(失败)===")
+ return 3
+
+ try:
+ total_after_clean, candidates_after_clean = get_counts_after_cleanup(
+ base_url=base_url,
+ token=token,
+ target_type=target_type,
+ timeout=args.timeout,
+ deleted_ok=deleted_ok,
+ pre_total=pre_total_files,
+ pre_candidates=pre_candidates,
+ logger=logger,
+ )
+ except Exception as e:
+ logger.error("删除后统计失败: %s", e)
+ logger.info("=== 账号池自动维护结束(失败)===")
+ return 4
+
+ logger.info(
+ "清理后统计: 总账号=%s, candidates=%s, 阈值=%s",
+ total_after_clean,
+ candidates_after_clean,
+ min_candidates,
+ )
+
+ if candidates_after_clean >= min_candidates:
+ logger.info("当前 candidates 已达标,无需补号。")
+ logger.info("=== 账号池自动维护结束(成功)===")
+ return 0
+
+ gap = min_candidates - candidates_after_clean
+ logger.info("当前 candidates 未达标,缺口=%s,开始补号。", gap)
+
+ try:
+ filled, failed, synced = run_batch_register(conf=conf, target_tokens=gap, logger=logger)
+ logger.info("补号阶段汇总: 成功token=%s, 失败=%s, 收敛账号=%s", filled, failed, synced)
+ except Exception as e:
+ logger.error("补号阶段失败: %s", e)
+ logger.info("=== 账号池自动维护结束(失败)===")
+ return 5
+
+ try:
+ total_final, candidates_final = get_candidates_count(
+ base_url=base_url,
+ token=token,
+ target_type=target_type,
+ timeout=args.timeout,
+ )
+ except Exception as e:
+ logger.error("补号后统计失败: %s", e)
+ logger.info("=== 账号池自动维护结束(失败)===")
+ return 6
+
+ logger.info(
+ "补号后统计: 总账号=%s, codex账号=%s, codex目标=%s",
+ total_final,
+ candidates_final,
+ min_candidates,
+ )
+ if candidates_final < min_candidates:
+ logger.warning("最终 codex账号数 仍低于阈值,请检查邮箱/OAuth/上传链路。")
+ logger.info("=== 账号池自动维护结束(成功)===")
+ return 0
+
+
+def run_maintainer_loop(args: argparse.Namespace, logger: logging.Logger, config_path: Path) -> int:
+ logger.info("=== 账号池循环维护开始 ===")
+ loop_round = 0
+ while True:
+ loop_round += 1
+ logger.info(">>> 循环轮次 #%s 开始", loop_round)
+ round_start = time.time()
+ exit_code = run_maintainer_once(args=args, logger=logger, config_path=config_path)
+ elapsed = time.time() - round_start
+ if exit_code == 0:
+ logger.info(">>> 循环轮次 #%s 完成(成功),耗时 %.1fs", loop_round, elapsed)
+ else:
+ logger.warning(">>> 循环轮次 #%s 完成(失败 code=%s),耗时 %.1fs", loop_round, exit_code, elapsed)
+
+ conf: Dict[str, Any] = {}
+ if config_path.exists():
+ try:
+ conf = load_json(config_path)
+ except Exception as e:
+ logger.warning("循环模式读取配置失败,使用默认间隔: %s", e)
+ sleep_seconds = resolve_loop_interval_seconds(conf, args.loop_interval)
+ logger.info("循环模式休眠 %.1fs 后再次检查号池", sleep_seconds)
+ time.sleep(sleep_seconds)
+
+
+def main() -> int:
+ requests.packages.urllib3.disable_warnings() # type: ignore[attr-defined]
+
+ args = parse_args()
+ config_path = Path(args.config).resolve()
+ logger, log_path = setup_logger(Path(args.log_dir).resolve())
+ logger.info("=== 账号池自动维护开始(二合一)===")
+ logger.info("配置文件: %s", config_path)
+ logger.info("日志文件: %s", log_path)
+
+ if args.loop:
+ return run_maintainer_loop(args=args, logger=logger, config_path=config_path)
+ return run_maintainer_once(args=args, logger=logger, config_path=config_path)
+
+
+if __name__ == "__main__":
+ raise SystemExit(main())
diff --git a/config.example.json b/config.example.json
new file mode 100644
index 0000000..81c7dfc
--- /dev/null
+++ b/config.example.json
@@ -0,0 +1,102 @@
+{
+ "cfmail": {
+ "api_base": "",
+ "api_key": "YOUR_CFMAIL_ADMIN_PASSWORD",
+ "domain": "",
+ "domains": [
+ ]
+ },
+ "clean": {
+ "base_url": "https://your-cpa-host.example.com",
+ "token": "YOUR_CPA_PWD",
+ "target_type": "codex",
+ "workers": 20,
+ "sample_size": 0,
+ "delete_workers": 20,
+ "timeout": 10,
+ "retries": 1,
+ "user_agent": "codex_cli_rs/0.76.0 (Debian 13.0.0; x86_64) WindowsTerminal",
+ "used_percent_threshold": 90
+ },
+ "mail": {
+ "provider": "tempmail_lol",
+ "api_base": "https://your-worker.workers.dev",
+ "api_key": "YOUR_MAIL_API_KEY",
+ "domain": "mail.example.com",
+ "domains": [
+ "mail.example.com",
+ "mail-backup.example.com"
+ ],
+ "otp_timeout_seconds": 120,
+ "poll_interval_seconds": 3
+ },
+ "duckmail": {
+ "api_base": "https://api.duckmail.sbs",
+ "bearer": "YOUR_DUCKMAIL_BEARER",
+ "domain": "duckmail.sbs",
+ "domains": [
+ "duckmail.sbs",
+ "duckmail-backup.example.com"
+ ]
+ },
+ "tempmail_lol": {
+ "api_base": "https://api.tempmail.lol/v2"
+ },
+ "yyds_mail": {
+ "api_base": "https://maliapi.215.im/v1",
+ "api_key": "YOUR_YYDS_MAIL_API_KEY",
+ "domain": "",
+ "domains": [
+ "mail-a.example.com",
+ "mail-b.example.com"
+ ]
+ },
+ "maintainer": {
+ "min_candidates": 50,
+ "loop_interval_seconds": 60
+ },
+ "run": {
+ "workers": 8,
+ "proxy": "",
+ "failure_threshold_for_cooldown": 5,
+ "failure_cooldown_seconds": 45,
+ "loop_jitter_min_seconds": 2,
+ "loop_jitter_max_seconds": 6
+ },
+ "flow": {
+ "step_retry_attempts": 2,
+ "step_retry_delay_base": 0.2,
+ "step_retry_delay_cap": 0.8,
+ "outer_retry_attempts": 3,
+ "oauth_local_retry_attempts": 3,
+ "transient_markers": "sentinel_,oauth_authorization_code_not_found,headers_failed,timeout,timed out,server disconnected,unexpected_eof_while_reading,transport,remoteprotocolerror,connection reset,temporarily unavailable,network,eof occurred,http_429,http_500,http_502,http_503,http_504",
+ "register_otp_validate_order": "normal,sentinel",
+ "oauth_otp_validate_order": "normal,sentinel",
+ "oauth_password_phone_action": "warn_and_continue",
+ "oauth_otp_phone_action": "warn_and_continue"
+ },
+ "registration": {
+ "entry_mode": "chatgpt_web",
+ "entry_mode_fallback": true,
+ "chatgpt_base": "https://chatgpt.com",
+ "register_create_account_phone_action": "warn_and_continue",
+ "phone_verification_markers": "add_phone,/add-phone,phone_verification,phone-verification,phone/verify"
+ },
+ "oauth": {
+ "issuer": "https://auth.openai.com",
+ "client_id": "app_EMoamEEZ73f0CkXaXp7hrann",
+ "redirect_uri": "http://localhost:1455/auth/callback",
+ "retry_attempts": 3,
+ "retry_backoff_base": 2,
+ "retry_backoff_max": 15,
+ "otp_timeout_seconds": 120,
+ "otp_poll_interval_seconds": 2
+ },
+ "output": {
+ "accounts_file": "accounts.txt",
+ "csv_file": "registered_accounts.csv",
+ "ak_file": "ak.txt",
+ "rk_file": "rk.txt",
+ "save_local": false
+ }
+}
diff --git a/dev_services.ps1 b/dev_services.ps1
new file mode 100644
index 0000000..734d633
--- /dev/null
+++ b/dev_services.ps1
@@ -0,0 +1,553 @@
+param(
+ [string]$Action = "",
+ [string]$Service = "",
+ [switch]$Background
+)
+
+Set-StrictMode -Version Latest
+$ErrorActionPreference = "Stop"
+
+$ScriptPath = $MyInvocation.MyCommand.Path
+$ProjectRoot = Split-Path -Parent $ScriptPath
+$RuntimeDir = Join-Path $ProjectRoot "logs/dev-services"
+$PidDir = Join-Path $RuntimeDir "pids"
+$Services = @("backend", "frontend")
+
+function Ensure-RuntimeDirectories {
+ New-Item -ItemType Directory -Force -Path $RuntimeDir | Out-Null
+ New-Item -ItemType Directory -Force -Path $PidDir | Out-Null
+}
+
+function Show-Usage {
+ @"
+用法:
+ .\dev_services.ps1 fg 前台启动两个服务,按 Ctrl+C 一键关闭
+ .\dev_services.ps1 bg 后台启动两个服务
+ .\dev_services.ps1 stop 停止由本脚本后台启动的两个服务
+ .\dev_services.ps1 restart 重启后台服务
+ .\dev_services.ps1 status 查看后台服务状态
+
+说明:
+ - 后台模式日志目录: logs/dev-services/
+ - 后台模式 PID 目录: logs/dev-services/pids/
+"@
+}
+
+function Get-ServiceTitle {
+ param([Parameter(Mandatory = $true)][string]$Name)
+
+ switch ($Name) {
+ "backend" { return "backend" }
+ "frontend" { return "frontend" }
+ default { return $Name }
+ }
+}
+
+function Get-ServiceLogFile {
+ param([Parameter(Mandatory = $true)][string]$Name)
+
+ Join-Path $RuntimeDir ("{0}.log" -f $Name)
+}
+
+function Get-ServicePidFile {
+ param([Parameter(Mandatory = $true)][string]$Name)
+
+ Join-Path $PidDir ("{0}.pid" -f $Name)
+}
+
+function Resolve-CommandPath {
+ param([Parameter(Mandatory = $true)][string[]]$Candidates)
+
+ foreach ($candidate in $Candidates) {
+ if ([System.IO.Path]::IsPathRooted($candidate) -and (Test-Path -LiteralPath $candidate)) {
+ return $candidate
+ }
+
+ $command = Get-Command -Name $candidate -ErrorAction SilentlyContinue
+ if ($null -ne $command) {
+ return $command.Source
+ }
+ }
+
+ return $null
+}
+
+function Get-HostPowerShell {
+ $currentPath = (Get-Process -Id $PID).Path
+ if ([string]::IsNullOrWhiteSpace($currentPath)) {
+ $currentPath = Resolve-CommandPath @("powershell.exe", "pwsh.exe")
+ }
+
+ if ([string]::IsNullOrWhiteSpace($currentPath)) {
+ throw "找不到当前 PowerShell 可执行文件。"
+ }
+
+ $fileName = [System.IO.Path]::GetFileName($currentPath)
+ if ($fileName -ieq "pwsh.exe") {
+ return [pscustomobject]@{
+ Executable = $currentPath
+ Arguments = @("-NoProfile", "-File")
+ }
+ }
+
+ return [pscustomobject]@{
+ Executable = $currentPath
+ Arguments = @("-NoProfile", "-ExecutionPolicy", "Bypass", "-File")
+ }
+}
+
+function Get-ServiceDefinition {
+ param([Parameter(Mandatory = $true)][string]$Name)
+
+ $frontendRoot = Join-Path $ProjectRoot "frontend"
+
+ switch ($Name) {
+ "backend" {
+ $pythonExe = Join-Path $ProjectRoot ".venv\Scripts\python.exe"
+ return [pscustomobject]@{
+ Name = $Name
+ Executable = $pythonExe
+ Arguments = @("api_server.py")
+ WorkingDirectory = $ProjectRoot
+ }
+ }
+ "frontend" {
+ $viteCmd = Join-Path $frontendRoot "node_modules\.bin\vite.cmd"
+ if (Test-Path -LiteralPath $viteCmd) {
+ return [pscustomobject]@{
+ Name = $Name
+ Executable = $viteCmd
+ Arguments = @()
+ WorkingDirectory = $frontendRoot
+ }
+ }
+
+ $pnpmExe = Resolve-CommandPath @("pnpm.cmd", "pnpm.exe", "pnpm")
+ if ([string]::IsNullOrWhiteSpace($pnpmExe)) {
+ throw "缺少前端启动命令: pnpm"
+ }
+
+ return [pscustomobject]@{
+ Name = $Name
+ Executable = $pnpmExe
+ Arguments = @("run", "dev")
+ WorkingDirectory = $frontendRoot
+ }
+ }
+ default {
+ throw ("未知服务: {0}" -f $Name)
+ }
+ }
+}
+
+function Format-ServiceCommand {
+ param([Parameter(Mandatory = $true)]$Definition)
+
+ $parts = @($Definition.Executable) + @($Definition.Arguments)
+ ($parts | ForEach-Object {
+ if ($_ -match "\s") {
+ '"{0}"' -f $_
+ }
+ else {
+ $_
+ }
+ }) -join " "
+}
+
+function Require-Dependencies {
+ $null = Get-HostPowerShell
+
+ $backendPython = Join-Path $ProjectRoot ".venv\Scripts\python.exe"
+ if (-not (Test-Path -LiteralPath $backendPython)) {
+ throw ("缺少 Python 解释器: {0}" -f $backendPython)
+ }
+
+ $frontendRoot = Join-Path $ProjectRoot "frontend"
+ if (-not (Test-Path -LiteralPath $frontendRoot)) {
+ throw ("缺少前端目录: {0}" -f $frontendRoot)
+ }
+
+ $null = Get-ServiceDefinition "frontend"
+}
+
+function Test-PidRunning {
+ param([Parameter(Mandatory = $true)][int]$ProcessId)
+
+ $null -ne (Get-Process -Id $ProcessId -ErrorAction SilentlyContinue)
+}
+
+function Get-ServicePid {
+ param([Parameter(Mandatory = $true)][string]$Name)
+
+ $pidFile = Get-ServicePidFile $Name
+ if (-not (Test-Path -LiteralPath $pidFile)) {
+ return $null
+ }
+
+ $rawPid = (Get-Content -LiteralPath $pidFile -Raw).Trim()
+ if ([string]::IsNullOrWhiteSpace($rawPid)) {
+ return $null
+ }
+
+ try {
+ return [int]$rawPid
+ }
+ catch {
+ return $null
+ }
+}
+
+function Clear-StalePid {
+ param([Parameter(Mandatory = $true)][string]$Name)
+
+ $pidFile = Get-ServicePidFile $Name
+ if (-not (Test-Path -LiteralPath $pidFile)) {
+ return
+ }
+
+ $servicePid = Get-ServicePid $Name
+ if ($null -eq $servicePid -or -not (Test-PidRunning $servicePid)) {
+ Remove-Item -LiteralPath $pidFile -Force -ErrorAction SilentlyContinue
+ }
+}
+
+function Test-ServiceRunning {
+ param([Parameter(Mandatory = $true)][string]$Name)
+
+ $servicePid = Get-ServicePid $Name
+ if ($null -eq $servicePid) {
+ return $false
+ }
+
+ Test-PidRunning $servicePid
+}
+
+function Stop-ProcessTree {
+ param([Parameter(Mandatory = $true)][int]$ProcessId)
+
+ if (-not (Test-PidRunning $ProcessId)) {
+ return
+ }
+
+ & taskkill /PID $ProcessId /T /F | Out-Null
+}
+
+function Stop-ServiceProcess {
+ param([Parameter(Mandatory = $true)][string]$Name)
+
+ Clear-StalePid $Name
+
+ $servicePid = Get-ServicePid $Name
+ if ($null -eq $servicePid) {
+ return
+ }
+
+ if (-not (Test-PidRunning $servicePid)) {
+ Remove-Item -LiteralPath (Get-ServicePidFile $Name) -Force -ErrorAction SilentlyContinue
+ return
+ }
+
+ Write-Host ("停止 {0,-12} pid={1}" -f (Get-ServiceTitle $Name), $servicePid)
+ Stop-ProcessTree $servicePid
+
+ $deadline = (Get-Date).AddSeconds(10)
+ while ((Get-Date) -lt $deadline) {
+ if (-not (Test-PidRunning $servicePid)) {
+ break
+ }
+ Start-Sleep -Milliseconds 250
+ }
+
+ Remove-Item -LiteralPath (Get-ServicePidFile $Name) -Force -ErrorAction SilentlyContinue
+}
+
+function Ensure-NoManagedServicesRunning {
+ $busy = $false
+
+ foreach ($service in $Services) {
+ Clear-StalePid $service
+ if (Test-ServiceRunning $service) {
+ $servicePid = Get-ServicePid $service
+ Write-Error ("{0,-12} 已在运行 pid={1},请先执行 .\dev_services.ps1 stop" -f (Get-ServiceTitle $service), $servicePid)
+ $busy = $true
+ }
+ }
+
+ if ($busy) {
+ throw "已有托管服务正在运行。"
+ }
+}
+
+function Write-LogHeader {
+ param(
+ [Parameter(Mandatory = $true)][string]$Name,
+ [Parameter(Mandatory = $true)]$Definition
+ )
+
+ $logFile = Get-ServiceLogFile $Name
+ $timestamp = Get-Date -Format "yyyy-MM-dd HH:mm:ss"
+ Add-Content -Path $logFile -Value "" -Encoding UTF8
+ Add-Content -Path $logFile -Value ("[{0}] starting {1}" -f $timestamp, (Get-ServiceTitle $Name)) -Encoding UTF8
+ Add-Content -Path $logFile -Value ("[{0}] command: {1}" -f $timestamp, (Format-ServiceCommand $Definition)) -Encoding UTF8
+}
+
+function Convert-CommandOutput {
+ param($Value)
+
+ if ($null -eq $Value) {
+ return $null
+ }
+
+ if ($Value -is [System.Management.Automation.ErrorRecord]) {
+ return $Value.ToString()
+ }
+
+ return [string]$Value
+}
+
+function Run-ServiceProcess {
+ param(
+ [Parameter(Mandatory = $true)][string]$Name,
+ [switch]$BackgroundMode
+ )
+
+ Ensure-RuntimeDirectories
+ $definition = Get-ServiceDefinition $Name
+ $logFile = Get-ServiceLogFile $Name
+
+ if (-not $BackgroundMode) {
+ Set-Content -Path $logFile -Value $null -Encoding UTF8
+ }
+
+ Write-LogHeader -Name $Name -Definition $definition
+ Set-Location -LiteralPath $definition.WorkingDirectory
+
+ try {
+ & $definition.Executable @($definition.Arguments) 2>&1 |
+ ForEach-Object {
+ $line = Convert-CommandOutput $_
+ if ($null -eq $line) {
+ return
+ }
+
+ Add-Content -Path $logFile -Value $line -Encoding UTF8
+
+ if (-not $BackgroundMode) {
+ Write-Host ("[{0}] {1}" -f (Get-ServiceTitle $Name), $line)
+ }
+ }
+
+ if ($null -ne $LASTEXITCODE) {
+ exit ([int]$LASTEXITCODE)
+ }
+
+ exit 0
+ }
+ catch {
+ $message = $_.Exception.Message
+ Add-Content -Path $logFile -Value $message -Encoding UTF8
+ if (-not $BackgroundMode) {
+ Write-Host ("[{0}] {1}" -f (Get-ServiceTitle $Name), $message)
+ }
+ exit 1
+ }
+}
+
+function Start-ServiceBackground {
+ param([Parameter(Mandatory = $true)][string]$Name)
+
+ $hostPowerShell = Get-HostPowerShell
+ $logFile = Get-ServiceLogFile $Name
+ Set-Content -Path $logFile -Value $null -Encoding UTF8
+
+ $arguments = @($hostPowerShell.Arguments + @($ScriptPath, "__runservice", $Name, "-Background"))
+ $process = Start-Process -FilePath $hostPowerShell.Executable -ArgumentList $arguments -WorkingDirectory $ProjectRoot -WindowStyle Hidden -PassThru
+
+ Set-Content -Path (Get-ServicePidFile $Name) -Value $process.Id -Encoding ASCII
+ Start-Sleep -Seconds 1
+
+ if (Test-PidRunning $process.Id) {
+ Write-Host ("启动 {0,-12} 成功 pid={1} log={2}" -f (Get-ServiceTitle $Name), $process.Id, $logFile)
+ return
+ }
+
+ Write-Error ("启动 {0} 失败,最近日志:" -f (Get-ServiceTitle $Name))
+ if (Test-Path -LiteralPath $logFile) {
+ Get-Content -Path $logFile -Tail 20 | ForEach-Object { Write-Host $_ }
+ }
+ Remove-Item -LiteralPath (Get-ServicePidFile $Name) -Force -ErrorAction SilentlyContinue
+ throw ("启动 {0} 失败。" -f (Get-ServiceTitle $Name))
+}
+
+function Start-Background {
+ Ensure-RuntimeDirectories
+ Require-Dependencies
+ Ensure-NoManagedServicesRunning
+
+ $started = @()
+ try {
+ foreach ($service in $Services) {
+ Start-ServiceBackground $service
+ $started += $service
+ }
+ }
+ catch {
+ foreach ($startedService in $started) {
+ Stop-ServiceProcess $startedService
+ }
+ throw
+ }
+
+ Write-Host ""
+ Write-Host "后台服务已启动。"
+ Write-Host "停止命令: .\dev_services.ps1 stop"
+ Write-Host "状态命令: .\dev_services.ps1 status"
+}
+
+function Show-Status {
+ Ensure-RuntimeDirectories
+
+ foreach ($service in $Services) {
+ Clear-StalePid $service
+
+ $title = Get-ServiceTitle $service
+ $logFile = Get-ServiceLogFile $service
+
+ if (Test-ServiceRunning $service) {
+ $servicePid = Get-ServicePid $service
+ Write-Host ("{0,-12} running pid={1,-8} log={2}" -f $title, $servicePid, $logFile)
+ }
+ else {
+ Write-Host ("{0,-12} stopped pid={1,-8} log={2}" -f $title, "-", $logFile)
+ }
+ }
+}
+
+function Stop-Background {
+ Ensure-RuntimeDirectories
+
+ foreach ($service in $Services) {
+ Stop-ServiceProcess $service
+ }
+}
+
+function Stop-ForegroundProcesses {
+ param([Parameter(Mandatory = $true)][object[]]$ManagedProcesses)
+
+ if ($ManagedProcesses.Count -eq 0) {
+ return
+ }
+
+ Write-Host ""
+ Write-Host "正在关闭前台服务..."
+
+ foreach ($managed in $ManagedProcesses) {
+ $process = $managed.Process
+ if ($null -eq $process) {
+ continue
+ }
+
+ $process.Refresh()
+ if ($process.HasExited) {
+ continue
+ }
+
+ Stop-ProcessTree $process.Id
+ }
+}
+
+function Start-ForegroundService {
+ param([Parameter(Mandatory = $true)][string]$Name)
+
+ $hostPowerShell = Get-HostPowerShell
+ $arguments = @($hostPowerShell.Arguments + @($ScriptPath, "__runservice", $Name))
+
+ Write-Host ("启动 {0,-12} 前台模式" -f (Get-ServiceTitle $Name))
+ $process = Start-Process -FilePath $hostPowerShell.Executable -ArgumentList $arguments -WorkingDirectory $ProjectRoot -NoNewWindow -PassThru
+
+ [pscustomobject]@{
+ Service = $Name
+ Process = $process
+ }
+}
+
+function Start-Foreground {
+ Ensure-RuntimeDirectories
+ Require-Dependencies
+ Ensure-NoManagedServicesRunning
+
+ $managedProcesses = @()
+
+ try {
+ foreach ($service in $Services) {
+ $managedProcesses += Start-ForegroundService $service
+ }
+
+ Write-Host ""
+ Write-Host "两个服务已进入前台托管模式。按 Ctrl+C 可一键关闭。"
+
+ while ($true) {
+ foreach ($managed in $managedProcesses) {
+ $process = $managed.Process
+ $process.Refresh()
+
+ if (-not $process.HasExited) {
+ continue
+ }
+
+ $exitCode = $process.ExitCode
+ Write-Host ""
+ Write-Host ("{0} 已退出,退出码={1},其余服务也会一并关闭。" -f (Get-ServiceTitle $managed.Service), $exitCode)
+ return $exitCode
+ }
+
+ Start-Sleep -Seconds 1
+ }
+ }
+ finally {
+ Stop-ForegroundProcesses $managedProcesses
+ }
+}
+
+Ensure-RuntimeDirectories
+
+switch ($Action) {
+ "__runservice" {
+ Run-ServiceProcess -Name $Service -BackgroundMode:$Background
+ break
+ }
+ "fg" {
+ exit (Start-Foreground)
+ }
+ "bg" {
+ Start-Background
+ break
+ }
+ "stop" {
+ Stop-Background
+ break
+ }
+ "restart" {
+ Stop-Background
+ Start-Background
+ break
+ }
+ "status" {
+ Show-Status
+ break
+ }
+ "help" {
+ Show-Usage
+ break
+ }
+ "" {
+ Show-Usage
+ break
+ }
+ default {
+ Write-Error ("未知命令: {0}" -f $Action)
+ Write-Host ""
+ Show-Usage
+ exit 1
+ }
+}
diff --git a/dev_services.sh b/dev_services.sh
new file mode 100644
index 0000000..d21734d
--- /dev/null
+++ b/dev_services.sh
@@ -0,0 +1,398 @@
+#!/usr/bin/env bash
+
+set -u
+set -o pipefail
+
+PROJECT_ROOT="$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" && pwd)"
+RUNTIME_DIR="$PROJECT_ROOT/logs/dev-services"
+PID_DIR="$RUNTIME_DIR/pids"
+
+SERVICES=(backend frontend)
+FG_PIDS=()
+FG_NAMES=()
+CLEANED_UP=0
+
+mkdir -p "$PID_DIR"
+
+usage() {
+ cat <<'EOF'
+用法:
+ ./dev_services.sh fg 前台启动两个服务,按 Ctrl+C 一键关闭
+ ./dev_services.sh bg 后台启动两个服务
+ ./dev_services.sh stop 停止由本脚本后台启动的两个服务
+ ./dev_services.sh restart 重启后台服务
+ ./dev_services.sh status 查看后台服务状态
+
+说明:
+ - 后台模式日志目录: logs/dev-services/
+ - 后台模式 PID 目录: logs/dev-services/pids/
+EOF
+}
+
+service_title() {
+ case "$1" in
+ backend) printf '%s' "backend" ;;
+ frontend) printf '%s' "frontend" ;;
+ *) printf '%s' "$1" ;;
+ esac
+}
+
+service_log_file() {
+ printf '%s/%s.log' "$RUNTIME_DIR" "$1"
+}
+
+service_pid_file() {
+ printf '%s/%s.pid' "$PID_DIR" "$1"
+}
+
+service_command() {
+ local service="$1"
+ local cmd=""
+
+ case "$service" in
+ backend)
+ printf -v cmd 'cd %q && exec %q api_server.py' "$PROJECT_ROOT" "$PROJECT_ROOT/.venv/bin/python"
+ ;;
+ frontend)
+ if [[ -x "$PROJECT_ROOT/frontend/node_modules/.bin/vite" ]]; then
+ printf -v cmd 'cd %q && exec %q' "$PROJECT_ROOT/frontend" "$PROJECT_ROOT/frontend/node_modules/.bin/vite"
+ else
+ printf -v cmd 'cd %q && exec pnpm run dev' "$PROJECT_ROOT/frontend"
+ fi
+ ;;
+ *)
+ echo "未知服务: $service" >&2
+ return 1
+ ;;
+ esac
+
+ printf '%s' "$cmd"
+}
+
+require_command() {
+ if ! command -v "$1" >/dev/null 2>&1; then
+ echo "缺少命令: $1" >&2
+ exit 1
+ fi
+}
+
+check_dependencies() {
+ require_command bash
+
+ if [[ ! -x "$PROJECT_ROOT/.venv/bin/python" ]]; then
+ echo "缺少 Python 解释器: $PROJECT_ROOT/.venv/bin/python" >&2
+ exit 1
+ fi
+
+ if [[ ! -d "$PROJECT_ROOT/frontend" ]]; then
+ echo "缺少前端目录: $PROJECT_ROOT/frontend" >&2
+ exit 1
+ fi
+
+ if [[ ! -x "$PROJECT_ROOT/frontend/node_modules/.bin/vite" ]]; then
+ require_command pnpm
+ fi
+}
+
+is_pid_running() {
+ local pid="$1"
+ [[ "$pid" =~ ^[0-9]+$ ]] || return 1
+ kill -0 "$pid" 2>/dev/null
+}
+
+service_pid() {
+ local pid_file
+ pid_file="$(service_pid_file "$1")"
+ [[ -f "$pid_file" ]] || return 1
+
+ local pid
+ pid="$(tr -d '[:space:]' <"$pid_file")"
+ [[ -n "$pid" ]] || return 1
+ printf '%s' "$pid"
+}
+
+service_running() {
+ local pid
+ pid="$(service_pid "$1")" || return 1
+ is_pid_running "$pid"
+}
+
+clear_stale_pid() {
+ local service="$1"
+ local pid_file
+ pid_file="$(service_pid_file "$service")"
+
+ if [[ ! -f "$pid_file" ]]; then
+ return 0
+ fi
+
+ local pid
+ pid="$(tr -d '[:space:]' <"$pid_file")"
+ if ! is_pid_running "$pid"; then
+ rm -f "$pid_file"
+ fi
+}
+
+kill_pid_group() {
+ local pid="$1"
+ kill -TERM -- "-$pid" 2>/dev/null || kill -TERM "$pid" 2>/dev/null || true
+}
+
+force_kill_pid_group() {
+ local pid="$1"
+ kill -KILL -- "-$pid" 2>/dev/null || kill -KILL "$pid" 2>/dev/null || true
+}
+
+stop_service() {
+ local service="$1"
+ clear_stale_pid "$service"
+
+ local pid
+ pid="$(service_pid "$service")" || return 0
+
+ if ! is_pid_running "$pid"; then
+ rm -f "$(service_pid_file "$service")"
+ return 0
+ fi
+
+ printf '停止 %-12s pid=%s\n' "$(service_title "$service")" "$pid"
+ kill_pid_group "$pid"
+
+ local i
+ for i in $(seq 1 20); do
+ if ! is_pid_running "$pid"; then
+ rm -f "$(service_pid_file "$service")"
+ return 0
+ fi
+ sleep 0.5
+ done
+
+ force_kill_pid_group "$pid"
+ rm -f "$(service_pid_file "$service")"
+}
+
+ensure_no_managed_services_running() {
+ local busy=0
+ local service
+
+ for service in "${SERVICES[@]}"; do
+ clear_stale_pid "$service"
+ if service_running "$service"; then
+ local pid
+ pid="$(service_pid "$service")"
+ printf '%-12s 已在运行 pid=%s,请先执行 ./dev_services.sh stop\n' "$(service_title "$service")" "$pid" >&2
+ busy=1
+ fi
+ done
+
+ if (( busy != 0 )); then
+ exit 1
+ fi
+}
+
+start_service_background() {
+ local service="$1"
+ local cmd
+ cmd="$(service_command "$service")"
+
+ local log_file pid_file
+ log_file="$(service_log_file "$service")"
+ pid_file="$(service_pid_file "$service")"
+
+ {
+ printf '\n[%s] starting %s\n' "$(date '+%Y-%m-%d %H:%M:%S')" "$(service_title "$service")"
+ printf '[%s] command: %s\n' "$(date '+%Y-%m-%d %H:%M:%S')" "$cmd"
+ } >>"$log_file"
+
+ if command -v setsid >/dev/null 2>&1; then
+ setsid bash -lc "$cmd" >>"$log_file" 2>&1 < /dev/null &
+ else
+ bash -lc "$cmd" >>"$log_file" 2>&1 < /dev/null &
+ fi
+ local pid=$!
+ printf '%s\n' "$pid" >"$pid_file"
+
+ sleep 1
+ if is_pid_running "$pid"; then
+ printf '启动 %-12s 成功 pid=%s log=%s\n' "$(service_title "$service")" "$pid" "$log_file"
+ return 0
+ fi
+
+ echo "启动 $(service_title "$service") 失败,最近日志:" >&2
+ tail -n 20 "$log_file" >&2 || true
+ rm -f "$pid_file"
+ return 1
+}
+
+start_background() {
+ check_dependencies
+ ensure_no_managed_services_running
+
+ local started=()
+ local service
+ for service in "${SERVICES[@]}"; do
+ if start_service_background "$service"; then
+ started+=("$service")
+ else
+ local started_service
+ for started_service in "${started[@]}"; do
+ stop_service "$started_service"
+ done
+ exit 1
+ fi
+ done
+
+ echo
+ echo "后台服务已启动。"
+ echo "停止命令: ./dev_services.sh stop"
+ echo "状态命令: ./dev_services.sh status"
+}
+
+show_status() {
+ local service
+ for service in "${SERVICES[@]}"; do
+ clear_stale_pid "$service"
+
+ local title pid_file log_file
+ title="$(service_title "$service")"
+ pid_file="$(service_pid_file "$service")"
+ log_file="$(service_log_file "$service")"
+
+ if service_running "$service"; then
+ local pid
+ pid="$(service_pid "$service")"
+ printf '%-12s running pid=%-8s log=%s\n' "$title" "$pid" "$log_file"
+ else
+ printf '%-12s stopped pid=%-8s log=%s\n' "$title" "-" "$log_file"
+ fi
+ done
+}
+
+stop_background() {
+ local service
+ for service in "${SERVICES[@]}"; do
+ stop_service "$service"
+ done
+}
+
+cleanup_foreground() {
+ if (( CLEANED_UP != 0 )); then
+ return 0
+ fi
+ CLEANED_UP=1
+
+ if (( ${#FG_PIDS[@]} == 0 )); then
+ return 0
+ fi
+
+ echo
+ echo "正在关闭前台服务..."
+
+ local pid
+ for pid in "${FG_PIDS[@]}"; do
+ kill -TERM "$pid" 2>/dev/null || true
+ done
+
+ sleep 1
+
+ for pid in "${FG_PIDS[@]}"; do
+ if is_pid_running "$pid"; then
+ kill -KILL "$pid" 2>/dev/null || true
+ fi
+ done
+
+ wait "${FG_PIDS[@]}" 2>/dev/null || true
+}
+
+on_foreground_interrupt() {
+ echo
+ echo "收到中断信号,准备关闭两个服务..."
+ cleanup_foreground
+ exit 130
+}
+
+start_service_foreground() {
+ local service="$1"
+ local cmd
+ cmd="$(service_command "$service")"
+
+ local log_file
+ log_file="$(service_log_file "$service")"
+ : >"$log_file"
+
+ printf '启动 %-12s 前台模式\n' "$(service_title "$service")"
+ bash -lc "$cmd" > >(tee -a "$log_file" | sed -u "s/^/[$(service_title "$service")] /") 2>&1 &
+ FG_PIDS+=("$!")
+ FG_NAMES+=("$service")
+}
+
+monitor_foreground() {
+ while true; do
+ local idx
+ for idx in "${!FG_PIDS[@]}"; do
+ local pid service
+ pid="${FG_PIDS[$idx]}"
+ service="${FG_NAMES[$idx]}"
+
+ if ! is_pid_running "$pid"; then
+ wait "$pid"
+ local status=$?
+ echo
+ echo "$(service_title "$service") 已退出,退出码=$status,其余服务也会一并关闭。"
+ return "$status"
+ fi
+ done
+ sleep 1
+ done
+}
+
+start_foreground() {
+ check_dependencies
+ ensure_no_managed_services_running
+
+ trap on_foreground_interrupt INT TERM
+ trap cleanup_foreground EXIT
+
+ local service
+ for service in "${SERVICES[@]}"; do
+ start_service_foreground "$service"
+ done
+
+ echo
+ echo "两个服务已进入前台托管模式。按 Ctrl+C 可一键关闭。"
+ monitor_foreground
+}
+
+main() {
+ local action="${1:-}"
+
+ case "$action" in
+ fg)
+ start_foreground
+ ;;
+ bg)
+ start_background
+ ;;
+ stop)
+ stop_background
+ ;;
+ restart)
+ stop_background
+ start_background
+ ;;
+ status)
+ show_status
+ ;;
+ -h|--help|help|"")
+ usage
+ ;;
+ *)
+ echo "未知命令: $action" >&2
+ echo >&2
+ usage >&2
+ exit 1
+ ;;
+ esac
+}
+
+main "$@"
diff --git a/docker-compose.yml b/docker-compose.yml
new file mode 100644
index 0000000..e6b30f6
--- /dev/null
+++ b/docker-compose.yml
@@ -0,0 +1,35 @@
+services:
+ backend:
+ build:
+ context: .
+ dockerfile: Dockerfile.backend
+ container_name: auto-pool-maintainer-backend
+ restart: unless-stopped
+ environment:
+ APP_DATA_DIR: /app/data
+ APP_CONFIG_PATH: /app/data/config.json
+ APP_LOG_DIR: /app/data/logs
+ APP_TEMPLATE_CONFIG_PATH: /app/config.example.json
+ APP_HOST: 0.0.0.0
+ APP_PORT: "8318"
+ APP_ADMIN_TOKEN: ${APP_ADMIN_TOKEN:-}
+ volumes:
+ - ./docker-data/backend:/app/data
+ healthcheck:
+ test: ["CMD", "python", "-c", "import urllib.request; urllib.request.urlopen('http://127.0.0.1:8318/api/health', timeout=3)"]
+ interval: 10s
+ timeout: 5s
+ retries: 5
+ start_period: 10s
+
+ frontend:
+ build:
+ context: ./frontend
+ dockerfile: Dockerfile
+ container_name: auto-pool-maintainer-frontend
+ restart: unless-stopped
+ depends_on:
+ backend:
+ condition: service_healthy
+ ports:
+ - "8080:80"
diff --git a/frontend/.DS_Store b/frontend/.DS_Store
new file mode 100644
index 0000000000000000000000000000000000000000..4d5910dcd4fa360cdc3d0ce61f81421987bf707d
GIT binary patch
literal 10244
zcmeHMYitx%6u#fI&>1?=DU^q63ky|IY-C%W1%d4YdDGf$=>ufh-5KeG=}g&~-J-Rz
zF)=<+jQ%pd{%Rx=H9m;O#6$%(>JLQ?#`ps@@ew02{xLDZbLY<1b_*C2V}Q&}=AL`c
zx#!+FXTCFM?j6P$nsa(RV^PMKOc$q`O4WG^m-A~E*CVcLl1-=2t6?70sel7(#2^!preBNuMVpGEdbF{
zV*7>Kd^^B(q5+KubW~81P=Qb-DpV0{F(A~59}ULE13D_GP-h4>9|%@PutR~rI?W&T
zhcm2m2?$;TY0%jmRI14bq46vfw?
zcU2_PlS`WibEbEPRx8Usw`P7s-J{b@>~M5Go36boi}8!*MdO?2nL!PEETI)BVAT1ZCIMFh;>%1R3qJa;b(1Iz+6+MMtbO34v;Bs
zz+1l|64}d_-JWv2wn4+DTY5iOyM<(ZZhtCkv<(pq1MVi``j3wITxp|jdQL&OjjZV9
z(>3Y3Z^KNjvLezR>5#QgWqK5*oi_>k^!P*sD76LW*T$EKX6@StqkhYGs8Sn`1w&+3
z!x~sC+s^i}EE{GIv&Y#{_6mEConW7{Z`hCQSM~>hsZcNt<)}gp79fg+Sb|k(#2Tzc
zE81}@Ij|cD=p1@Oh8qeTayoi_ZGTy|8_y`~46MTs?
z_!i&c7o3xr1ZkR7CC!%RNDHK>v`DIzR!Xa+xU@y;kh-NFapzJY(E0*P0wSfTG4|rH
z+Q8kxdGnYpmQ%Gu(
z-&O0Ymnjr@T8Ze^Rj*QH4WhF~y;-3U)JjFSMvW_q#{5ooOZ7&X!bp?_i(OI)N_iMG`5(HAl6LK^%mOyD&n?9>qO4jE8UpkKj>4
z_mhNgj_&91JdWZ8yoT3t9B<$qyo(b!i4O?xpW$nKkDu^!5sG(AL@|CJkKzHRwKX`t
z=3kS)vA0e(O_b{wl@IohOE>UCe=I`siL7lo_I{d=M^p)=+bqjmAqh9IWV!n`{d~7N%PN8#7F`=lPVp)KhJM;i5!)BuKPl(g|s{NIEge
zo~0a9NTx+8mP^8NF%OgoVU(Z$Z@lpT|BFo#%0dr>9=OasfYR1PYcm;9*y|VD!_V5I
zbnT;y7d~!OP+x^A|2Uqge;iL8zJa=)E*|Qz%f^Ru-xfaU=vz!
zjfwGsm*|u6_GUB^i5f3NeIQXmjryRd!Ke?YiI*5j^u@$P|1)RS(5;P$5wCMLbN)I1
zJB7ng}ZP6+A0I;irG03^#v
z?H9VIbATTc4P+va6GHl-)TcN-AP7Zp#DEAVdz8DAOayX5ND
z-<=^LWLSq82s1D>1DrM$EX`c{871fUW0?`lbiH`|14w1%Gb$=8mCCBhYm-CHXwpl2
zdABW{+s#`&j$>tt_vy?o(-=)@we61WrA^z&`2nVGkjtJv)7G8QHYacEt{kN+wzfT^WVs{Je;+sX;%HoeD@Idf}Ob@cd**efk?S2bb#G`O?Pd*IH|_m>1=BS%meFIUhm9J<6vdCK
zyC4$j&u7eodDFXIYm{YQ+i+1t-J{bab~rlkP1l~2h5zD7-!`wHPTlQdDgZC%C)s>q@SMWFfGLm
zbQ`XUME3Gwx2IjNbHuP|O0NfFx07wq9ZKhn&QXG6pxs1T|Iv}22W`|%&nX(W(JBUb
zcdfeauVIc>QyuAwbj#YiGCc~@E|^3CdVGQjN@MX6G$xjc+s5MuqyC+rP^B>u4_su{
zz?xYH+s^i}92;YIvHRH(_AGmionY^?PuW-OJN6@h3MiO~D%7F@3lYU4EX8WHU@g|6
z16{ZV-PnzN$RG|R%xWQm0fsoR4GwCH_
zxrN{(pmcN^LT%c-WvjaVUqh(Vvjv6b&0i4Zyjruq^;ZZA%obsKF8F=KPvVJ>cpniZ
zXgx(lyRg1NU0CE{DL;FOD#jt1GgGS+-iy_XWul5ULo}DDm&rsDtwJ*$M^zYm$!$>2Ed`$ox?$0Pzd
bAtY(2{N*14{@5StaQ_eYe?sejXaD~NsDGTe
literal 0
HcmV?d00001
diff --git a/frontend/dist/assets/index-CraRoSIX.css b/frontend/dist/assets/index-CraRoSIX.css
new file mode 100644
index 0000000..6512233
--- /dev/null
+++ b/frontend/dist/assets/index-CraRoSIX.css
@@ -0,0 +1 @@
+:root{--bg: #f4f8fc;--panel: rgba(255, 255, 255, .92);--panel-strong: #ffffff;--line: #d7e4f2;--line-soft: #e8f0f8;--text: #10233f;--muted: #6f87a4;--muted-strong: #4f6987;--accent: #15916e;--danger: #ef5077;--pending: #1e3152;--terminal: #191b29;--shadow: 0 14px 42px rgba(108, 135, 166, .12);--radius: 18px}*{box-sizing:border-box}html,body,#app{min-height:100%}html,body{margin:0}body{padding:28px 24px;font-family:IBM Plex Sans,PingFang SC,Noto Sans SC,Microsoft YaHei,sans-serif;color:var(--text);background:radial-gradient(circle at top left,rgba(177,220,255,.24),transparent 28%),linear-gradient(180deg,#f9fcff 0%,var(--bg) 100%)}button,input,select{font:inherit}.page-shell{max-width:1280px;margin:0 auto}.login-shell{min-height:100vh;display:flex;align-items:center;justify-content:center}.login-card{width:min(420px,100%);padding:28px;border:1px solid var(--line);border-radius:20px;background:linear-gradient(180deg,#fffffffa,#f6fafff5);box-shadow:var(--shadow)}.login-title{font-size:24px;font-weight:700;color:#153150}.login-subtitle{margin-top:8px;margin-bottom:20px;color:#6f87a4;font-size:14px}.login-error{margin-top:-4px;margin-bottom:14px;color:#b24b52;font-size:13px}.login-button{width:100%}.page-notice{margin-bottom:16px;padding:12px 16px;border:1px solid #d7e4f2;border-radius:14px;background:#ffffffeb;color:#335376;box-shadow:var(--shadow)}.page-grid{display:grid;grid-template-columns:430px minmax(0,1fr);gap:24px;align-items:start}.main-stack{display:grid;align-content:start;min-width:0}.card{background:linear-gradient(180deg,#fffffffa,#fbfdfff2);border:1px solid var(--line);border-radius:var(--radius);box-shadow:var(--shadow);overflow:hidden;-webkit-backdrop-filter:blur(10px);backdrop-filter:blur(10px)}.card-head{display:flex;align-items:center;justify-content:space-between;padding:20px 24px 18px;border-bottom:1px solid var(--line);background:linear-gradient(180deg,#fffffffa,#f7faffe6)}.card-title{display:inline-flex;align-items:center;gap:10px;font-size:16px;font-weight:700;letter-spacing:.01em}.title-icon{display:inline-flex;align-items:center;justify-content:center;width:18px;height:18px;font-size:14px}.settings-body{padding:24px;max-height:calc(100vh - 160px);overflow:auto}.settings-card{min-width:0}.settings-summary{display:grid;grid-template-columns:repeat(3,minmax(0,1fr));gap:10px;margin-bottom:16px}.summary-chip{padding:10px 12px;border:1px solid #deebf8;border-radius:14px;background:linear-gradient(180deg,#fdfefe,#eef5fb)}.summary-label{display:block;margin-bottom:4px;color:#6f87a4;font-size:12px}.summary-value{display:block;color:#163151;font-size:14px;font-weight:700}.settings-tabs{display:grid;grid-template-columns:repeat(3,minmax(0,1fr));gap:8px;margin-bottom:16px;padding:6px;border:1px solid #deebf8;border-radius:16px;background:linear-gradient(180deg,#f9fbfe,#eef4fb)}.settings-tab{height:38px;border:0;border-radius:11px;background:transparent;color:#6f87a4;font-size:14px;font-weight:700;cursor:pointer;transition:background .14s ease,color .14s ease,transform .14s ease}.settings-tab:hover{transform:translateY(-1px)}.settings-tab.active{background:#fff;color:#143150;box-shadow:0 8px 18px #6e8bae24}.config-group{margin-bottom:16px;padding:0;border:1px solid #e0eaf5;border-radius:14px;background:linear-gradient(180deg,#f9fcfff5,#f4f9fee6)}.config-group:last-of-type{margin-bottom:0}.group-title{font-size:13px;font-weight:700;letter-spacing:.03em;color:#4f7196}.group-toggle{width:100%;display:flex;align-items:center;justify-content:space-between;gap:12px;padding:15px 14px;border:0;background:transparent;cursor:pointer}.group-toggle:hover{background:#eaf2fb80}.group-caret{color:#6f87a4;font-size:18px;line-height:1;transition:transform .16s ease}.group-caret.open{transform:rotate(180deg)}.config-group.expanded .field-row,.config-group.expanded .group-content,.config-group.expanded>.field,.config-group.expanded>.checkbox-group{padding-left:14px;padding-right:14px}.config-group.expanded .field-row,.config-group.expanded .group-content{padding-bottom:10px}.group-key,.field-key{display:inline-flex;align-items:center;justify-content:center;margin-left:6px;padding:2px 7px;border-radius:999px;background:#eaf2fb;border:1px solid #d6e4f2;color:#6483a8;font-family:IBM Plex Mono,JetBrains Mono,monospace;font-size:11px;font-weight:600}.field-key{margin-left:8px;transform:translateY(-1px)}.field{display:block;margin-bottom:14px}.field-row{display:grid;gap:12px}.field-row.single-col{grid-template-columns:1fr}.field-row.two-cols{grid-template-columns:repeat(2,minmax(0,1fr))}.field-row.three-cols{grid-template-columns:repeat(3,minmax(0,1fr))}.field-row .field{margin-bottom:8px}.field-label{display:block;margin-bottom:8px;color:var(--muted-strong);font-size:13px;line-height:1.35}.field-hint{display:block;margin-top:8px;color:#8da1bb;font-size:12px;line-height:1.45}input[type=text],input[type=password],select,input[type=number]{width:100%;height:40px;padding:0 14px;border:1px solid #d6e2f0;border-radius:11px;background:#fff;color:var(--text);font-size:14px;outline:none;box-shadow:inset 0 1px #ffffffe6}input[type=text]:focus,input[type=password]:focus,select:focus,input[type=number]:focus{border-color:#8eb8ef;box-shadow:0 0 0 3px #73a4e024}.checkbox-group{margin-top:4px}.checkbox-group.compact{margin-bottom:6px}.check-row{display:flex;align-items:center;gap:10px;color:var(--muted-strong);font-size:15px}.check-row input{width:14px;height:14px;accent-color:var(--accent)}.settings-actions{display:flex;gap:10px;margin-top:18px;position:sticky;bottom:-24px;padding-top:12px;background:linear-gradient(180deg,#f4f8fc00,#f4f8fcf5 36%,#f4f8fc)}.button,.tool-button,.link-button{border:0;border-radius:11px;font:inherit;cursor:pointer;transition:transform .14s ease,box-shadow .14s ease,background .14s ease}.button:hover,.tool-button:hover,.link-button:hover{transform:translateY(-1px)}.button:disabled,.tool-button:disabled,.link-button:disabled{cursor:not-allowed;opacity:.65;transform:none}.button.primary{padding:11px 18px;background:linear-gradient(135deg,#179774,#0f7f61);color:#fff;box-shadow:0 12px 22px #15916e40}.button.secondary,.button.tertiary,.button.warning{padding:11px 18px;background:#eef4fb;color:var(--text);border:1px solid #d6e3f0}.button.warning{background:#fff2ee;color:#a34b2c;border-color:#f3c8ba}.button.tertiary{padding:8px 14px;font-size:14px;font-weight:600}.link-button{background:transparent;color:#4d6c92;padding:0;font-size:14px}.monitor-body{padding:22px 16px 18px;display:flex;flex-direction:column;min-height:0}.progress-head{display:flex;align-items:baseline;justify-content:space-between;gap:12px;margin-bottom:10px}.progress-title{color:#5b7391;font-size:13px;font-weight:700}.runtime-banner{display:flex;align-items:center;gap:10px;margin-bottom:16px;padding:10px 12px;border-radius:12px;background:#eef4fb;color:#526e90;font-size:14px}.runtime-banner.active{background:#e3f5ee;color:#1c6b53}.inventory-banner{display:flex;align-items:center;justify-content:space-between;margin-bottom:14px;padding:10px 12px;border-radius:12px;border:1px solid #dfe9f4;background:linear-gradient(180deg,#fff,#f2f7fc)}.inventory-label{color:#637b98;font-size:13px;font-weight:600}.inventory-value{color:#153150;font-size:18px;font-weight:700}.runtime-dot{width:10px;height:10px;border-radius:50%;background:#8ca6c7;box-shadow:0 0 0 4px #8ca6c724}.runtime-banner.active .runtime-dot{background:#35be7c;box-shadow:0 0 0 4px #35be7c24}.runtime-mode-banner{display:flex;align-items:center;gap:10px;margin-bottom:14px;padding:9px 12px;border-radius:12px;border:1px solid #dbe6f2;background:#f7fafd;color:#4b6788;font-size:13px}.runtime-mode-label{color:#6f89a7;font-weight:600}.runtime-mode-value{color:#153150;font-weight:700}.runtime-mode-next{margin-left:auto;color:#5a7290;font-weight:600}.progress-meta{display:flex;align-items:center;gap:12px;padding:0 2px;font-size:15px;font-weight:700;color:#0f2240}.progress-meta span:last-child{font-size:13px;font-weight:700;color:#5b7391}.progress-track{margin-top:10px;width:100%;height:8px;border-radius:999px;background:#deebf8;overflow:hidden}.progress-value{height:100%;border-radius:999px;background:linear-gradient(90deg,#15916e,#1bb790)}.stat-strip{display:grid;grid-template-columns:repeat(3,minmax(0,1fr));gap:10px;padding:14px 0}.mini-stat{display:flex;align-items:center;justify-content:space-between;gap:12px;padding:10px 12px;border:1px solid #dfebf6;border-radius:14px;background:linear-gradient(180deg,#fdfefe,#eef5fb)}.mini-stat-label{color:#617a98;font-size:13px;font-weight:600}.mini-stat-value{font-size:20px;font-weight:700;color:#0f2140}.mini-stat.success{border-color:#d2eee3;background:linear-gradient(180deg,#f9fffc,#eaf7f1)}.mini-stat.danger{border-color:#f4d8df;background:linear-gradient(180deg,#fffafb,#fcf0f3)}.mini-stat.pending{border-color:#e7e2cf;background:linear-gradient(180deg,#fffef9,#f6f2e4)}.timing-strip{display:grid;grid-template-columns:repeat(2,minmax(0,1fr));gap:10px;margin-bottom:12px}.timing-item{display:flex;align-items:center;justify-content:space-between;gap:12px;padding:8px 10px;border:1px dashed #d3e2f0;border-radius:10px;background:#f7fbff}.timing-label{color:#5f7997;font-size:12px;font-weight:600}.timing-value{color:#112746;font-size:13px;font-weight:700}.terminal{height:clamp(320px,52vh,560px);padding:14px 16px;border-radius:13px;background:linear-gradient(180deg,#1e2030,var(--terminal));border:1px solid #2b3044;box-shadow:inset 0 1px #ffffff08;overflow:auto}.log-row{font-family:IBM Plex Mono,JetBrains Mono,SFMono-Regular,monospace;font-size:15px;line-height:1.9;white-space:nowrap;color:#d7e3ff}.log-dim{color:#91a3c8}.log-info{color:#b6c8ff}.log-success{color:#9fe870}.log-warning{color:#ffd575}.log-danger{color:#ff8ea6}.card-tools{display:flex;align-items:center;gap:10px}.tool-button{padding:6px 10px;background:transparent;color:#4d6c92;font-size:14px}.table-wrap{overflow:hidden}table{width:100%;border-collapse:collapse;table-layout:fixed}thead th{padding:10px 16px;text-align:left;font-size:14px;font-weight:700;color:#506b8a;background:linear-gradient(180deg,#eef4fb,#e7eef7);border-bottom:1px solid #dde8f3}tbody td{padding:12px 16px;font-size:15px;border-bottom:1px solid #edf3f8;vertical-align:middle}tbody tr:hover{background:#f3f8fde0}.col-id{width:48px}.col-password{width:150px}.col-status{width:82px}.email-cell,.password-cell{display:flex;align-items:center;gap:8px;min-width:0}.email-cell span:first-child,.password-cell span:first-child{overflow:hidden;text-overflow:ellipsis;white-space:nowrap}.masked{filter:blur(4px);-webkit-user-select:none;user-select:none}.mini-icon{width:24px;height:24px;display:inline-flex;align-items:center;justify-content:center;border:1px solid #d9e4ef;background:#f7faff;border-radius:8px;color:#6f87a4;cursor:pointer;flex:0 0 auto}.status-dot{display:inline-block;width:13px;height:13px;border-radius:50%;background:#bbcad9;box-shadow:0 0 0 4px #bbcad92e}.status-dot.active{background:linear-gradient(180deg,#7fe098,#34c36c);box-shadow:0 0 0 4px #3bc66c29}@media(max-width:1120px){.page-grid{grid-template-columns:1fr}.monitor-body{padding:22px 18px 18px}}@media(max-width:720px){body{padding:18px 14px}.card-head,.settings-body{padding-left:16px;padding-right:16px}.field-row.two-cols,.field-row.three-cols,.settings-summary,.settings-tabs,.stat-strip,.timing-strip{grid-template-columns:1fr}.terminal{height:320px}thead th,tbody td{padding-left:12px;padding-right:12px;font-size:14px}}
diff --git a/frontend/dist/assets/index-DPSNYdMF.js b/frontend/dist/assets/index-DPSNYdMF.js
new file mode 100644
index 0000000..5e2493e
--- /dev/null
+++ b/frontend/dist/assets/index-DPSNYdMF.js
@@ -0,0 +1,2 @@
+var Qe=Object.defineProperty;var et=(e,t,a)=>t in e?Qe(e,t,{enumerable:!0,configurable:!0,writable:!0,value:a}):e[t]=a;var we=(e,t,a)=>et(e,typeof t!="symbol"?t+"":t,a);(function(){const t=document.createElement("link").relList;if(t&&t.supports&&t.supports("modulepreload"))return;for(const l of document.querySelectorAll('link[rel="modulepreload"]'))o(l);new MutationObserver(l=>{for(const n of l)if(n.type==="childList")for(const c of n.addedNodes)c.tagName==="LINK"&&c.rel==="modulepreload"&&o(c)}).observe(document,{childList:!0,subtree:!0});function a(l){const n={};return l.integrity&&(n.integrity=l.integrity),l.referrerPolicy&&(n.referrerPolicy=l.referrerPolicy),l.crossOrigin==="use-credentials"?n.credentials="include":l.crossOrigin==="anonymous"?n.credentials="omit":n.credentials="same-origin",n}function o(l){if(l.ep)return;l.ep=!0;const n=a(l);fetch(l.href,n)}})();var oe,w,De,F,Se,je,ze,Be,me,_e,ce,te={},ae=[],tt=/acit|ex(?:s|g|n|p|$)|rph|grid|ows|mnc|ntw|ine[ch]|zoo|^ord|itera/i,re=Array.isArray;function H(e,t){for(var a in t)e[a]=t[a];return e}function fe(e){e&&e.parentNode&&e.parentNode.removeChild(e)}function at(e,t,a){var o,l,n,c={};for(n in t)n=="key"?o=t[n]:n=="ref"?l=t[n]:c[n]=t[n];if(arguments.length>2&&(c.children=arguments.length>3?oe.call(arguments,2):a),typeof e=="function"&&e.defaultProps!=null)for(n in e.defaultProps)c[n]===void 0&&(c[n]=e.defaultProps[n]);return Z(e,c,o,l,null)}function Z(e,t,a,o,l){var n={type:e,props:t,key:a,ref:o,__k:null,__:null,__b:0,__e:null,__c:null,constructor:void 0,__v:l??++De,__i:-1,__u:0};return l==null&&w.vnode!=null&&w.vnode(n),n}function K(e){return e.children}function Q(e,t){this.props=e,this.context=t}function W(e,t){if(t==null)return e.__?W(e.__,e.__i+1):null;for(var a;tt&&F.sort(ze),e=F.shift(),t=F.length,nt(e)}finally{F.length=ne.__r=0}}function We(e,t,a,o,l,n,c,_,p,i,m){var r,f,d,g,S,T,k,y=o&&o.__k||ae,M=t.length;for(p=lt(a,t,y,p,M),r=0;r0?c=e.__k[n]=Z(c.type,c.props,c.key,c.ref?c.ref:null,c.__v):e.__k[n]=c,p=n+f,c.__=e,c.__b=e.__b+1,_=null,(i=c.__i=ot(c,a,p,r))!=-1&&(r--,(_=a[i])&&(_.__u|=2)),_==null||_.__v==null?(i==-1&&(l>m?f--:lp?f--:f++,c.__u|=4))):e.__k[n]=null;if(r)for(n=0;n(m?1:0)){for(l=a-1,n=a+1;l>=0||n=0?l--:n++])!=null&&(2&i.__u)==0&&_==i.key&&p==i.type)return c}return-1}function Ce(e,t,a){t[0]=="-"?e.setProperty(t,a??""):e[t]=a==null?"":typeof a!="number"||tt.test(t)?a:a+"px"}function Y(e,t,a,o,l){var n,c;e:if(t=="style")if(typeof a=="string")e.style.cssText=a;else{if(typeof o=="string"&&(e.style.cssText=o=""),o)for(t in o)a&&t in a||Ce(e.style,t,"");if(a)for(t in a)o&&a[t]==o[t]||Ce(e.style,t,a[t])}else if(t[0]=="o"&&t[1]=="n")n=t!=(t=t.replace(Be,"$1")),c=t.toLowerCase(),t=c in e||t=="onFocusOut"||t=="onFocusIn"?c.slice(2):t.slice(2),e.l||(e.l={}),e.l[t+n]=a,a?o?a.u=o.u:(a.u=me,e.addEventListener(t,n?ce:_e,n)):e.removeEventListener(t,n?ce:_e,n);else{if(l=="http://www.w3.org/2000/svg")t=t.replace(/xlink(H|:h)/,"h").replace(/sName$/,"s");else if(t!="width"&&t!="height"&&t!="href"&&t!="list"&&t!="form"&&t!="tabIndex"&&t!="download"&&t!="rowSpan"&&t!="colSpan"&&t!="role"&&t!="popover"&&t in e)try{e[t]=a??"";break e}catch{}typeof a=="function"||(a==null||a===!1&&t[4]!="-"?e.removeAttribute(t):e.setAttribute(t,t=="popover"&&a==1?"":a))}}function Te(e){return function(t){if(this.l){var a=this.l[t.type+e];if(t.t==null)t.t=me++;else if(t.t0?e:re(e)?e.map(Ge):H({},e)}function rt(e,t,a,o,l,n,c,_,p){var i,m,r,f,d,g,S,T=a.props||te,k=t.props,y=t.type;if(y=="svg"?l="http://www.w3.org/2000/svg":y=="math"?l="http://www.w3.org/1998/Math/MathML":l||(l="http://www.w3.org/1999/xhtml"),n!=null){for(i=0;i=a.__.length&&a.__.push({}),a.__[e]}function L(e){return le=1,ct(Je,e)}function ct(e,t,a){var o=ve(V++,2);if(o.t=e,!o.__c&&(o.__=[Je(void 0,t),function(_){var p=o.__N?o.__N[0]:o.__[0],i=o.t(p,_);p!==i&&(o.__N=[i,o.__[1]],o.__c.setState({}))}],o.__c=P,!P.__f)){var l=function(_,p,i){if(!o.__c.__H)return!0;var m=o.__c.__H.__.filter(function(f){return f.__c});if(m.every(function(f){return!f.__N}))return!n||n.call(this,_,p,i);var r=o.__c.props!==_;return m.some(function(f){if(f.__N){var d=f.__[0];f.__=f.__N,f.__N=void 0,d!==f.__[0]&&(r=!0)}}),n&&n.call(this,_,p,i)||r};P.__f=!0;var n=P.shouldComponentUpdate,c=P.componentWillUpdate;P.componentWillUpdate=function(_,p,i){if(this.__e){var m=n;n=void 0,l(_,p,i),n=m}c&&c.call(this,_,p,i)},P.shouldComponentUpdate=l}return o.__N||o.__}function G(e,t){var a=ve(V++,3);!$.__s&&Xe(a.__H,t)&&(a.__=e,a.u=t,P.__H.__h.push(a))}function Oe(e){return le=5,ut(function(){return{current:e}},[])}function ut(e,t){var a=ve(V++,7);return Xe(a.__H,t)&&(a.__=e(),a.__H=t,a.__h=e),a.__}function dt(){for(var e;e=Ye.shift();){var t=e.__H;if(e.__P&&t)try{t.__h.some(ee),t.__h.some(de),t.__h=[]}catch(a){t.__h=[],$.__e(a,e.__v)}}}$.__b=function(e){P=null,Pe&&Pe(e)},$.__=function(e,t){e&&t.__k&&t.__k.__m&&(e.__m=t.__k.__m),Le&&Le(e,t)},$.__r=function(e){$e&&$e(e),V=0;var t=(P=e.__c).__H;t&&(se===P?(t.__h=[],P.__h=[],t.__.some(function(a){a.__N&&(a.__=a.__N),a.u=a.__N=void 0})):(t.__h.some(ee),t.__h.some(de),t.__h=[],V=0)),se=P},$.diffed=function(e){Me&&Me(e);var t=e.__c;t&&t.__H&&(t.__H.__h.length&&(Ye.push(t)!==1&&Ae===$.requestAnimationFrame||((Ae=$.requestAnimationFrame)||pt)(dt)),t.__H.__.some(function(a){a.u&&(a.__H=a.u),a.u=void 0})),se=P=null},$.__c=function(e,t){t.some(function(a){try{a.__h.some(ee),a.__h=a.__h.filter(function(o){return!o.__||de(o)})}catch(o){t.some(function(l){l.__h&&(l.__h=[])}),t=[],$.__e(o,a.__v)}}),Ne&&Ne(e,t)},$.unmount=function(e){Re&&Re(e);var t,a=e.__c;a&&a.__H&&(a.__H.__.some(function(o){try{ee(o)}catch(l){t=l}}),a.__H=void 0,t&&$.__e(t,a.__v))};var He=typeof requestAnimationFrame=="function";function pt(e){var t,a=function(){clearTimeout(o),He&&cancelAnimationFrame(t),setTimeout(e)},o=setTimeout(a,35);He&&(t=requestAnimationFrame(a))}function ee(e){var t=P,a=e.__c;typeof a=="function"&&(e.__c=void 0,a()),P=t}function de(e){var t=P;e.__c=e.__(),P=t}function Xe(e,t){return!e||e.length!==t.length||t.some(function(a,o){return a!==e[o]})}function Je(e,t){return typeof t=="function"?t(e):t}function mt(e){const{sectionKey:t,field:a,onValueChange:o}=e;return a.type==="select"?s("select",{value:String(a.value),onInput:l=>o(t,a.key,l.currentTarget.value),children:(a.options??[]).map(l=>s("option",{value:l.value,children:l.label},l.value))}):a.type==="checkbox"?s("label",{class:"check-row",children:[s("input",{type:"checkbox",checked:!!a.value,onInput:l=>o(t,a.key,l.currentTarget.checked)}),s("span",{children:[a.label," ",s("span",{class:"field-key",children:a.key})]})]}):a.type==="textarea"?s("textarea",{value:String(a.value),onInput:l=>o(t,a.key,l.currentTarget.value)}):s("input",{type:a.type,value:String(a.value),placeholder:a.sensitive&&String(a.value)==="__MASKED__"?"已保存,留空或保持不变将沿用原值":"",onInput:l=>{const n=l.currentTarget,c=a.type==="number"?Number(n.value):n.value;o(t,a.key,c)}})}function ft(e){var h,C,A,N,j,B;const{sections:t,onValueChange:a,onSave:o,onStart:l,onStartLoop:n,onStop:c,onLogout:_,busy:p=!1,running:i=!1,loopRunning:m=!1,hasStoredToken:r=!1}=e,[f,d]=L("common"),[g,S]=L({priority:!0,clean:!1,mail:!0,cfmail:!1,self_hosted_mail_api:!1,duckmail:!1,tempmail_lol:!1,yyds_mail:!1,run:!1,registration:!1,flow:!1,oauth:!1,output:!1}),T={priority:"common",clean:"common",mail:"mail",cfmail:"mail",self_hosted_mail_api:"mail",duckmail:"mail",tempmail_lol:"mail",yyds_mail:"mail",run:"advanced",registration:"advanced",flow:"advanced",oauth:"advanced",output:"advanced"},k={common:"常用",mail:"邮箱",advanced:"高级"},y=((C=(h=t.find(v=>v.key==="mail"))==null?void 0:h.fields.find(v=>v.key==="provider"))==null?void 0:C.value)??"self_hosted_mail_api",M={cfmail:"CF Mail",self_hosted_mail_api:"自建 Mail API",duckmail:"DuckMail",tempmail_lol:"TempMail.lol",yyds_mail:"YYDS Mail"},R=t.filter(v=>T[v.key]!==f?!1:v.key==="self_hosted_mail_api"?y==="self_hosted_mail_api":v.key==="cfmail"?y==="cfmail":v.key==="duckmail"?y==="duckmail":v.key==="tempmail_lol"?y==="tempmail_lol":v.key==="yyds_mail"?y==="yyds_mail":!0),E=[{label:"当前邮箱",value:M[String(y)]??String(y)},{label:"维护目标",value:String(((N=(A=t.find(v=>v.key==="priority"))==null?void 0:A.fields.find(v=>v.key==="min_candidates"))==null?void 0:N.value)??"")},{label:"补号并发",value:String(((B=(j=t.find(v=>v.key==="run"))==null?void 0:j.fields.find(v=>v.key==="workers"))==null?void 0:B.value)??"")}],q=v=>{S(U=>({...U,[v]:!(U[v]??!1)}))};return G(()=>{(y==="cfmail"||y==="self_hosted_mail_api"||y==="duckmail"||y==="tempmail_lol"||y==="yyds_mail")&&S(v=>({...v,[String(y)]:!0}))},[y]),G(()=>{f!=="mail"&&(y==="cfmail"||y==="self_hosted_mail_api"||y==="duckmail"||y==="tempmail_lol"||y==="yyds_mail")&&S(v=>({...v,[String(y)]:!0}))},[f,y]),s("aside",{class:"card settings-card",children:[s("div",{class:"card-head",children:[s("div",{class:"card-title",children:[s("span",{class:"title-icon",children:"📝"}),s("span",{children:"维护配置"})]}),r?s("button",{class:"link-button",type:"button",onClick:_,children:"退出登录"}):null]}),s("div",{class:"settings-body",children:[s("div",{class:"settings-summary",children:E.map(v=>s("div",{class:"summary-chip",children:[s("span",{class:"summary-label",children:v.label}),s("span",{class:"summary-value",children:v.value})]},v.label))}),s("div",{class:"settings-tabs",children:Object.keys(k).map(v=>s("button",{type:"button",class:`settings-tab${f===v?" active":""}`,onClick:()=>d(v),children:k[v]},v))}),R.map(v=>{const U=g[v.key]??!1;return s("section",{class:`config-group${U?" expanded":""}`,children:[s("button",{class:"group-toggle",type:"button",onClick:()=>q(v.key),children:[s("span",{class:"group-title",children:[v.label,s("span",{class:"group-key",children:v.key})]}),s("span",{class:`group-caret${U?" open":""}`,children:"⌄"})]}),U?s("div",{class:"group-content field-row single-col",children:v.fields.map(I=>s("label",{class:`field${I.type==="checkbox"?" checkbox-group compact":""}`,children:[I.type!=="checkbox"?s("span",{class:"field-label",children:[I.label,s("span",{class:"field-key",children:I.key})]}):null,s(mt,{sectionKey:v.key,field:I,onValueChange:a}),I.hint?s("span",{class:"field-hint",children:I.hint}):null]},I.key))}):null]},v.key)}),s("div",{class:"settings-actions",children:[s("button",{class:"button primary",type:"button",onClick:l,disabled:p||i,children:"开始维护"}),s("button",{class:"button primary",type:"button",onClick:n,disabled:p||i,children:m?"循环补号运行中":"循环补号"}),s("button",{class:"button warning",type:"button",onClick:c,disabled:p||!i,children:"停止维护"}),s("button",{class:"button secondary",type:"button",onClick:o,disabled:p,children:"保存配置"})]})]})]})}function yt(e){const{busy:t=!1,error:a="",onSubmit:o}=e,[l,n]=L("");return s("div",{class:"login-shell",children:s("form",{class:"login-card",onSubmit:async _=>{_.preventDefault(),await o(l.trim())},children:[s("div",{class:"login-title",children:"管理登录"}),s("div",{class:"login-subtitle",children:"请输入管理令牌以访问控制台"}),s("label",{class:"field",children:[s("span",{class:"field-label",children:"Admin Token"}),s("input",{type:"password",value:l,onInput:_=>n(_.currentTarget.value),placeholder:"请输入 X-Admin-Token"})]}),a?s("div",{class:"login-error",children:a}):null,s("button",{class:"button primary login-button",type:"submit",disabled:t||!l.trim(),children:t?"验证中...":"进入控制台"})]})})}function ht(e){const{lines:t}=e,a=Oe(null),o=Oe(!0),l=()=>{const n=a.current;if(!n)return;const c=n.scrollHeight-n.scrollTop-n.clientHeight;o.current=c<32};return G(()=>{a.current&&o.current&&(a.current.scrollTop=a.current.scrollHeight)},[t]),s("div",{class:"terminal",ref:a,onScroll:l,children:t.map(n=>s("div",{class:"log-row",children:[s("span",{class:"log-dim",children:n.prefix})," ",s("span",{class:`log-${n.tone}`,children:n.timestamp})," ",s("span",{children:n.message})]},n.id))})}function vt(e){var f,d,g;const{monitor:t,onClearLogs:a}=e,o=((f=t.stats.find(S=>S.tone==="success"))==null?void 0:f.value)??0,l=((d=t.stats.find(S=>S.tone==="danger"))==null?void 0:d.value)??0,n=((g=t.stats.find(S=>S.tone==="pending"))==null?void 0:g.value)??0,c=Math.max(0,Math.max(n,t.total-o)),_=t.singleAccountTiming,p=S=>typeof S=="number"?`${S.toFixed(1)}s`:"--",i=t.loopNextCheckInSeconds,m=t.loopRunning?"循环补号":t.running?"单次维护":"未运行",r=typeof i=="number"?`${Math.max(0,i)}s`:"--";return s("section",{class:"card monitor-card",children:[s("div",{class:"card-head",children:[s("div",{class:"card-title",children:[s("span",{class:"title-icon",children:"💻"}),s("span",{children:"监控台"})]}),s("button",{class:"link-button",type:"button",onClick:a,children:"清空"})]}),s("div",{class:"monitor-body",children:[s("div",{class:`runtime-banner ${t.running?"active":""}`,children:[s("span",{class:"runtime-dot"}),s("span",{children:t.message})]}),s("div",{class:"runtime-mode-banner",children:[s("span",{class:"runtime-mode-label",children:"运行模式"}),s("span",{class:"runtime-mode-value",children:m}),t.loopRunning?s("span",{class:"runtime-mode-next",children:["下次检查: ",r]}):null]}),s("div",{class:"inventory-banner",children:[s("span",{class:"inventory-label",children:"CPA 可用账号"}),s("span",{class:"inventory-value",children:t.availableCandidates===null?"--":t.availableCandidates})]}),s("div",{class:"progress-head",children:[s("div",{class:"progress-title",children:"补号进度"}),s("div",{class:"progress-meta",children:[s("span",{children:["已补 ",o," / 目标 ",t.total]}),s("span",{children:[t.percent,"%"]})]})]}),s("div",{class:"progress-track",children:s("div",{class:"progress-value",style:{width:`${t.percent}%`}})}),s("div",{class:"stat-strip",children:[s("div",{class:"mini-stat success",children:[s("span",{class:"mini-stat-label",children:"补号成功"}),s("span",{class:"mini-stat-value",children:o})]}),s("div",{class:"mini-stat danger",children:[s("span",{class:"mini-stat-label",children:"补号失败"}),s("span",{class:"mini-stat-value",children:l})]}),s("div",{class:"mini-stat pending",children:[s("span",{class:"mini-stat-label",children:"待补数量"}),s("span",{class:"mini-stat-value",children:c})]})]}),s("div",{class:"timing-strip",children:[s("div",{class:"timing-item",children:[s("span",{class:"timing-label",children:"最近单号总耗时"}),s("span",{class:"timing-value",children:p(_.latestTotalSeconds)})]}),s("div",{class:"timing-item",children:[s("span",{class:"timing-label",children:"最近单号注册/OAuth"}),s("span",{class:"timing-value",children:[p(_.latestRegSeconds)," / ",p(_.latestOauthSeconds)]})]}),s("div",{class:"timing-item",children:[s("span",{class:"timing-label",children:["近",_.windowSize,"条均值(总)"]}),s("span",{class:"timing-value",children:p(_.recentAvgTotalSeconds)})]}),s("div",{class:"timing-item",children:[s("span",{class:"timing-label",children:"慢号(≥100s)"}),s("span",{class:"timing-value",children:[_.recentSlowCount," / ",_.sampleSize]})]})]}),s(ht,{lines:t.logs})]})]})}const u={cfmail:{api_base:"https://mail.example.com",api_key:"",domain:"",domains:[]},clean:{base_url:"CPA地址",token:"CPA登录密码",target_type:"codex",workers:20,sample_size:0,delete_workers:20,timeout:10,retries:1,user_agent:"codex_cli_rs/0.76.0 (Debian 13.0.0; x86_64) WindowsTerminal",used_percent_threshold:95},mail:{provider:"tempmail_lol",api_base:"https://your-worker.workers.dev",api_key:"your-mail-api-key",domain:"mail.example.com",domains:[],otp_timeout_seconds:120,poll_interval_seconds:3},duckmail:{api_base:"https://api.duckmail.sbs",bearer:"",domain:"duckmail.sbs",domains:[]},tempmail_lol:{api_base:"https://api.tempmail.lol/v2"},yyds_mail:{api_base:"https://maliapi.215.im/v1",api_key:"",domain:"",domains:[]},maintainer:{min_candidates:50,loop_interval_seconds:60},run:{workers:8,proxy:"",failure_threshold_for_cooldown:5,failure_cooldown_seconds:45,loop_jitter_min_seconds:2,loop_jitter_max_seconds:6},flow:{step_retry_attempts:2,step_retry_delay_base:.2,step_retry_delay_cap:.8,outer_retry_attempts:3,oauth_local_retry_attempts:3,transient_markers:"sentinel_,oauth_authorization_code_not_found,headers_failed,timeout,timed out,server disconnected,unexpected_eof_while_reading,transport,remoteprotocolerror,connection reset,temporarily unavailable,network,eof occurred,http_429,http_500,http_502,http_503,http_504",register_otp_validate_order:"normal,sentinel",oauth_otp_validate_order:"normal,sentinel",oauth_password_phone_action:"warn_and_continue",oauth_otp_phone_action:"warn_and_continue"},registration:{entry_mode:"chatgpt_web",entry_mode_fallback:!0,chatgpt_base:"https://chatgpt.com",register_create_account_phone_action:"warn_and_continue",phone_verification_markers:"add_phone,/add-phone,phone_verification,phone-verification,phone/verify"},oauth:{issuer:"https://auth.openai.com",client_id:"app_EMoamEEZ73f0CkXaXp7hrann",redirect_uri:"http://localhost:1455/auth/callback",retry_attempts:3,retry_backoff_base:2,retry_backoff_max:15,otp_timeout_seconds:120,otp_poll_interval_seconds:2},output:{accounts_file:"accounts.txt",csv_file:"registered_accounts.csv",ak_file:"ak.txt",rk_file:"rk.txt",save_local:!1}};function x(e,t){if(typeof e=="number"&&Number.isFinite(e))return e;const a=Number(e);return Number.isFinite(a)?a:t}function b(e,t){return typeof e=="string"?e:t}function Ee(e,t){return typeof e=="boolean"?e:t}function X(e,t=[]){if(!Array.isArray(e))return t;const a=e.map(o=>typeof o=="string"?o.trim():"").filter(Boolean);return a.length?a:t}function bt(e){return String(e??"").split(/\r?\n/).map(t=>t.trim()).filter(Boolean)}function J(e){return e.join(`
+`)}function be(e){const t=e??{},a=t.cfmail??{},o=t.clean??{},l=t.mail??{},n=t.duckmail??{},c=t.tempmail_lol??{},_=t.yyds_mail??{},p=t.maintainer??{},i=t.run??{},m=t.flow??{},r=t.registration??{},f=t.oauth??{},d=t.output??{};return{cfmail:{api_base:b(a.api_base,u.cfmail.api_base),api_key:b(a.api_key,u.cfmail.api_key),domain:b(a.domain,u.cfmail.domain),domains:X(a.domains,u.cfmail.domains)},clean:{base_url:b(o.base_url,u.clean.base_url),token:b(o.token,u.clean.token),target_type:b(o.target_type,u.clean.target_type),workers:x(o.workers,u.clean.workers),sample_size:x(o.sample_size,u.clean.sample_size),delete_workers:x(o.delete_workers,u.clean.delete_workers),timeout:x(o.timeout,u.clean.timeout),retries:x(o.retries,u.clean.retries),user_agent:b(o.user_agent,u.clean.user_agent??""),used_percent_threshold:x(o.used_percent_threshold,u.clean.used_percent_threshold)},mail:{provider:b(l.provider,u.mail.provider),api_base:b(l.api_base,u.mail.api_base),api_key:b(l.api_key,u.mail.api_key),domain:b(l.domain,u.mail.domain),domains:X(l.domains,u.mail.domains),otp_timeout_seconds:x(l.otp_timeout_seconds,u.mail.otp_timeout_seconds),poll_interval_seconds:x(l.poll_interval_seconds,u.mail.poll_interval_seconds)},duckmail:{api_base:b(n.api_base,u.duckmail.api_base),bearer:b(n.bearer,u.duckmail.bearer),domain:b(n.domain,u.duckmail.domain),domains:X(n.domains,u.duckmail.domains)},tempmail_lol:{api_base:b(c.api_base,u.tempmail_lol.api_base)},yyds_mail:{api_base:b(_.api_base,u.yyds_mail.api_base),api_key:b(_.api_key,u.yyds_mail.api_key),domain:b(_.domain,u.yyds_mail.domain),domains:X(_.domains,u.yyds_mail.domains)},maintainer:{min_candidates:x(p.min_candidates,u.maintainer.min_candidates),loop_interval_seconds:x(p.loop_interval_seconds,u.maintainer.loop_interval_seconds)},run:{workers:x(i.workers,u.run.workers),proxy:b(i.proxy,u.run.proxy),failure_threshold_for_cooldown:x(i.failure_threshold_for_cooldown,u.run.failure_threshold_for_cooldown),failure_cooldown_seconds:x(i.failure_cooldown_seconds,u.run.failure_cooldown_seconds),loop_jitter_min_seconds:x(i.loop_jitter_min_seconds,u.run.loop_jitter_min_seconds),loop_jitter_max_seconds:x(i.loop_jitter_max_seconds,u.run.loop_jitter_max_seconds)},flow:{step_retry_attempts:x(m.step_retry_attempts,u.flow.step_retry_attempts),step_retry_delay_base:x(m.step_retry_delay_base,u.flow.step_retry_delay_base),step_retry_delay_cap:x(m.step_retry_delay_cap,u.flow.step_retry_delay_cap),outer_retry_attempts:x(m.outer_retry_attempts,u.flow.outer_retry_attempts),oauth_local_retry_attempts:x(m.oauth_local_retry_attempts,u.flow.oauth_local_retry_attempts),transient_markers:b(m.transient_markers,u.flow.transient_markers),register_otp_validate_order:b(m.register_otp_validate_order,u.flow.register_otp_validate_order),oauth_otp_validate_order:b(m.oauth_otp_validate_order,u.flow.oauth_otp_validate_order),oauth_password_phone_action:b(m.oauth_password_phone_action,u.flow.oauth_password_phone_action),oauth_otp_phone_action:b(m.oauth_otp_phone_action,u.flow.oauth_otp_phone_action)},registration:{entry_mode:b(r.entry_mode,u.registration.entry_mode),entry_mode_fallback:Ee(r.entry_mode_fallback,u.registration.entry_mode_fallback),chatgpt_base:b(r.chatgpt_base,u.registration.chatgpt_base),register_create_account_phone_action:b(r.register_create_account_phone_action,u.registration.register_create_account_phone_action),phone_verification_markers:b(r.phone_verification_markers,u.registration.phone_verification_markers)},oauth:{issuer:b(f.issuer,u.oauth.issuer),client_id:b(f.client_id,u.oauth.client_id),redirect_uri:b(f.redirect_uri,u.oauth.redirect_uri),retry_attempts:x(f.retry_attempts,u.oauth.retry_attempts),retry_backoff_base:x(f.retry_backoff_base,u.oauth.retry_backoff_base),retry_backoff_max:x(f.retry_backoff_max,u.oauth.retry_backoff_max),otp_timeout_seconds:x(f.otp_timeout_seconds,u.oauth.otp_timeout_seconds),otp_poll_interval_seconds:x(f.otp_poll_interval_seconds,u.oauth.otp_poll_interval_seconds)},output:{accounts_file:b(d.accounts_file,u.output.accounts_file),csv_file:b(d.csv_file,u.output.csv_file),ak_file:b(d.ak_file,u.output.ak_file),rk_file:b(d.rk_file,u.output.rk_file),save_local:Ee(d.save_local,u.output.save_local)}}}function ke(e){return[{key:"priority",label:"核心配置",fields:[{key:"base_url",label:"CPA 接口地址",type:"text",value:e.clean.base_url},{key:"token",label:"CPA 访问令牌",type:"password",value:e.clean.token,sensitive:!0},{key:"min_candidates",label:"最小候选账号数",type:"number",value:e.maintainer.min_candidates,hint:"表示账号池希望长期保有的最低可用账号数。清理完成后若当前候选账号低于该值,系统会自动补号。"},{key:"loop_interval_seconds",label:"循环补号间隔(秒)",type:"number",value:e.maintainer.loop_interval_seconds,hint:"点击“循环补号”按钮后,每轮检查完会休眠该秒数再重新检测。"},{key:"proxy",label:"代理地址",type:"text",value:e.run.proxy,hint:"示例: http://127.0.0.1:7890 或 socks5://127.0.0.1:1080"}]},{key:"clean",label:"清理配置",columns:2,fields:[{key:"target_type",label:"目标账号类型",type:"text",value:e.clean.target_type},{key:"timeout",label:"请求超时",type:"number",value:e.clean.timeout},{key:"workers",label:"探测并发",type:"number",value:e.clean.workers},{key:"sample_size",label:"抽样数量",type:"number",value:e.clean.sample_size,hint:"0 表示全量探测;大于 0 时,每轮仅随机抽取这部分账号做可用性探测。"},{key:"delete_workers",label:"删除并发",type:"number",value:e.clean.delete_workers},{key:"retries",label:"重试次数",type:"number",value:e.clean.retries},{key:"used_percent_threshold",label:"用量阈值",type:"number",value:e.clean.used_percent_threshold,hint:"用于识别高消耗账号。若账号的 used_percent 大于等于该值,会在清理阶段优先禁用(不直接删除)。"}]},{key:"mail",label:"邮箱配置",columns:2,fields:[{key:"provider",label:"邮箱提供方",type:"select",value:e.mail.provider,options:[{label:"cfmail",value:"cfmail"},{label:"self_hosted_mail_api",value:"self_hosted_mail_api"},{label:"duckmail",value:"duckmail"},{label:"tempmail_lol",value:"tempmail_lol"},{label:"yyds_mail",value:"yyds_mail"}]},{key:"otp_timeout_seconds",label:"验证码超时",type:"number",value:e.mail.otp_timeout_seconds},{key:"poll_interval_seconds",label:"轮询间隔",type:"number",value:e.mail.poll_interval_seconds}]},{key:"cfmail",label:"CF Mail 配置",columns:2,fields:[{key:"api_base",label:"接口地址",type:"text",value:e.cfmail.api_base},{key:"api_key",label:"接口密钥",type:"password",value:e.cfmail.api_key,sensitive:!0},{key:"domain",label:"邮箱域名",type:"text",value:e.cfmail.domain},{key:"domains",label:"邮箱域名列表",type:"textarea",value:J(e.cfmail.domains),hint:"每行一个域名;填写后优先于单个 domain。"}]},{key:"self_hosted_mail_api",label:"自建 Mail API 配置",columns:2,fields:[{key:"api_base",label:"邮件 API 地址",type:"text",value:e.mail.api_base},{key:"domain",label:"邮箱域名",type:"text",value:e.mail.domain},{key:"domains",label:"邮箱域名列表",type:"textarea",value:J(e.mail.domains),hint:"每行一个域名;填写后优先于单个 domain。"},{key:"api_key",label:"邮件 API 密钥",type:"password",value:e.mail.api_key,sensitive:!0}]},{key:"duckmail",label:"DuckMail 配置",columns:2,fields:[{key:"api_base",label:"接口地址",type:"text",value:e.duckmail.api_base},{key:"domain",label:"邮箱域名",type:"text",value:e.duckmail.domain},{key:"domains",label:"邮箱域名列表",type:"textarea",value:J(e.duckmail.domains),hint:"每行一个域名;填写后优先于单个 domain。"},{key:"bearer",label:"访问凭证",type:"password",value:e.duckmail.bearer,sensitive:!0}]},{key:"tempmail_lol",label:"TempMail.lol 配置",fields:[{key:"api_base",label:"接口地址",type:"text",value:e.tempmail_lol.api_base}]},{key:"yyds_mail",label:"YYDS Mail 配置",columns:2,fields:[{key:"api_base",label:"接口地址",type:"text",value:e.yyds_mail.api_base},{key:"domain",label:"邮箱域名",type:"text",value:e.yyds_mail.domain},{key:"domains",label:"邮箱域名列表",type:"textarea",value:J(e.yyds_mail.domains),hint:"每行一个域名;填写后优先于单个 domain。"},{key:"api_key",label:"访问密钥",type:"password",value:e.yyds_mail.api_key,sensitive:!0}]},{key:"run",label:"运行参数",columns:2,fields:[{key:"workers",label:"补号并发数",type:"number",value:e.run.workers},{key:"failure_threshold_for_cooldown",label:"连续失败阈值",type:"number",value:e.run.failure_threshold_for_cooldown},{key:"failure_cooldown_seconds",label:"冷却时长",type:"number",value:e.run.failure_cooldown_seconds},{key:"loop_jitter_min_seconds",label:"最小抖动秒数",type:"number",value:e.run.loop_jitter_min_seconds},{key:"loop_jitter_max_seconds",label:"最大抖动秒数",type:"number",value:e.run.loop_jitter_max_seconds}]},{key:"registration",label:"注册流程策略",columns:2,fields:[{key:"entry_mode",label:"注册入口模式",type:"select",value:e.registration.entry_mode,options:[{label:"chatgpt_web",value:"chatgpt_web"},{label:"direct_auth",value:"direct_auth"}]},{key:"entry_mode_fallback",label:"入口失败自动回退",type:"checkbox",value:e.registration.entry_mode_fallback},{key:"chatgpt_base",label:"ChatGPT 入口域名",type:"text",value:e.registration.chatgpt_base},{key:"register_create_account_phone_action",label:"注册命中手机验证",type:"select",value:e.registration.register_create_account_phone_action,options:[{label:"warn_and_continue",value:"warn_and_continue"},{label:"fail_fast",value:"fail_fast"}]},{key:"phone_verification_markers",label:"手机验证识别关键词",type:"text",value:e.registration.phone_verification_markers}]},{key:"flow",label:"流程重试策略",columns:2,fields:[{key:"step_retry_attempts",label:"注册步骤局部重试",type:"number",value:e.flow.step_retry_attempts},{key:"step_retry_delay_base",label:"步骤重试基数",type:"number",value:e.flow.step_retry_delay_base},{key:"step_retry_delay_cap",label:"步骤重试上限",type:"number",value:e.flow.step_retry_delay_cap},{key:"outer_retry_attempts",label:"OAuth 外层重试",type:"number",value:e.flow.outer_retry_attempts},{key:"oauth_local_retry_attempts",label:"OAuth 局部重试",type:"number",value:e.flow.oauth_local_retry_attempts},{key:"register_otp_validate_order",label:"注册 OTP 校验顺序",type:"text",value:e.flow.register_otp_validate_order},{key:"oauth_otp_validate_order",label:"OAuth OTP 校验顺序",type:"text",value:e.flow.oauth_otp_validate_order},{key:"oauth_password_phone_action",label:"OAuth 密码阶段手机验证",type:"select",value:e.flow.oauth_password_phone_action,options:[{label:"warn_and_continue",value:"warn_and_continue"},{label:"fail_fast",value:"fail_fast"}]},{key:"oauth_otp_phone_action",label:"OAuth OTP阶段手机验证",type:"select",value:e.flow.oauth_otp_phone_action,options:[{label:"warn_and_continue",value:"warn_and_continue"},{label:"fail_fast",value:"fail_fast"}]},{key:"transient_markers",label:"瞬时错误关键词",type:"text",value:e.flow.transient_markers}]},{key:"oauth",label:"OAuth 配置",columns:2,fields:[{key:"issuer",label:"认证服务地址",type:"text",value:e.oauth.issuer},{key:"client_id",label:"客户端 ID",type:"text",value:e.oauth.client_id},{key:"redirect_uri",label:"回调地址",type:"text",value:e.oauth.redirect_uri},{key:"retry_attempts",label:"重试次数",type:"number",value:e.oauth.retry_attempts},{key:"retry_backoff_base",label:"退避基数",type:"number",value:e.oauth.retry_backoff_base},{key:"retry_backoff_max",label:"最大退避",type:"number",value:e.oauth.retry_backoff_max},{key:"otp_timeout_seconds",label:"登录验证码超时",type:"number",value:e.oauth.otp_timeout_seconds},{key:"otp_poll_interval_seconds",label:"登录轮询间隔",type:"number",value:e.oauth.otp_poll_interval_seconds}]},{key:"output",label:"输出配置",columns:2,fields:[{key:"accounts_file",label:"账号文件",type:"text",value:e.output.accounts_file},{key:"csv_file",label:"CSV 文件",type:"text",value:e.output.csv_file},{key:"ak_file",label:"Access Token 文件",type:"text",value:e.output.ak_file},{key:"rk_file",label:"Refresh Token 文件",type:"text",value:e.output.rk_file},{key:"save_local",label:"本地保存",type:"checkbox",value:e.output.save_local}]}]}function kt(e){const t=structuredClone(u);for(const a of e){const o=a.key==="self_hosted_mail_api"?"mail":a.key;if(a.key==="priority"){for(const n of a.fields)n.key==="base_url"||n.key==="token"?t.clean[n.key]=n.value:n.key==="min_candidates"?t.maintainer.min_candidates=Number(n.value):n.key==="loop_interval_seconds"?t.maintainer.loop_interval_seconds=Number(n.value):n.key==="proxy"&&(t.run.proxy=String(n.value));continue}const l=t[o];if(l)for(const n of a.fields){if(n.key==="domains"){l[n.key]=bt(n.value);continue}l[n.key]=n.value}}return be(t)}const gt=ke(u),wt={running:!1,runMode:"",loopRunning:!1,loopNextCheckInSeconds:null,phase:"idle",message:"等待任务启动",availableCandidates:null,availableCandidatesError:"",completed:2,total:20,percent:10,stats:[{label:"成功",value:2,icon:"☑",tone:"success"},{label:"失败",value:0,icon:"✕",tone:"danger"},{label:"剩余",value:18,icon:"⏳",tone:"pending"}],singleAccountTiming:{latestRegSeconds:15.4,latestOauthSeconds:56.8,latestTotalSeconds:72.2,recentAvgRegSeconds:16.1,recentAvgOauthSeconds:54.3,recentAvgTotalSeconds:70.4,recentSlowCount:1,sampleSize:20,windowSize:20},logs:[{id:"1",prefix:"[00:28:38] [任务3]",timestamp:"[00:28:38]",message:"提交密码状态: 200",tone:"info"},{id:"2",prefix:"[00:28:38] [任务3]",timestamp:"[00:28:38]",message:"9. 发送验证码...",tone:"info"},{id:"3",prefix:"[00:28:39] [任务3]",timestamp:"[00:28:39]",message:"验证码发送状态: 200",tone:"info"},{id:"4",prefix:"[00:28:39] [任务3]",timestamp:"[00:28:39]",message:"10. 等待验证码...",tone:"info"},{id:"5",prefix:"[00:28:39] [任务3]",timestamp:"[00:28:39]",message:"正在等待邮箱 dictman3eb8a4@whf.hush2u.com 的验证码...",tone:"info"},{id:"6",prefix:"[00:28:39] [任务3]",timestamp:"[00:28:39]",message:"成功获取验证码: 963817",tone:"success"},{id:"7",prefix:"[00:28:40] [任务3]",timestamp:"[00:28:40]",message:"生成用户信息: Charlotte,生日: 1996-01-27",tone:"info"},{id:"8",prefix:"[00:28:41] [任务3]",timestamp:"[00:28:41]",message:"Sentinel token 获取成功",tone:"success"},{id:"9",prefix:"[00:28:44] [任务3]",timestamp:"[00:28:44]",message:"OAuth 登录链路进入 consent 阶段",tone:"success"}]},ge="apm_admin_token";function pe(){return{token:window.sessionStorage.getItem(ge)??""}}function St(e){window.sessionStorage.setItem(ge,e)}function O(){window.sessionStorage.removeItem(ge)}class Ze extends Error{constructor(a,o){super(a);we(this,"status");this.name="ApiRequestError",this.status=o}}function z(e){return e instanceof Ze&&(e.status===401||e.status===403)}async function D(e,t,a){const o=a??pe().token,l=await fetch(e,{...t,headers:{"Content-Type":"application/json",...o?{"X-Admin-Token":o}:{},...(t==null?void 0:t.headers)??{}}});if(!l.ok)throw new Ze(`${(t==null?void 0:t.method)??"GET"} ${e} failed: ${l.status}`,l.status);return await l.json()}async function xt(e){await D("/api/health",void 0,e),await D("/api/config",void 0,e)}async function Ie(){const e=await D("/api/config");return ke(be(e))}async function ie(e){const t=await D("/api/config",{method:"POST",body:JSON.stringify(kt(e))});return ke(be(t))}async function Fe(){const e=await D("/api/runtime/status"),t=e.single_account_timing;return{running:e.running,runMode:e.run_mode??"",loopRunning:e.loop_running??!1,loopNextCheckInSeconds:e.loop_next_check_in_seconds??null,phase:e.phase,message:e.message,availableCandidates:e.available_candidates,availableCandidatesError:e.available_candidates_error,completed:e.completed,total:e.total,percent:e.percent,stats:e.stats,singleAccountTiming:{latestRegSeconds:(t==null?void 0:t.latest_reg_seconds)??null,latestOauthSeconds:(t==null?void 0:t.latest_oauth_seconds)??null,latestTotalSeconds:(t==null?void 0:t.latest_total_seconds)??null,recentAvgRegSeconds:(t==null?void 0:t.recent_avg_reg_seconds)??null,recentAvgOauthSeconds:(t==null?void 0:t.recent_avg_oauth_seconds)??null,recentAvgTotalSeconds:(t==null?void 0:t.recent_avg_total_seconds)??null,recentSlowCount:(t==null?void 0:t.recent_slow_count)??0,sampleSize:(t==null?void 0:t.sample_size)??0,windowSize:(t==null?void 0:t.window_size)??20},logs:e.logs}}async function Ct(){return D("/api/runtime/start",{method:"POST",body:"{}"})}async function Tt(){return D("/api/runtime/start-loop",{method:"POST",body:"{}"})}async function At(){return D("/api/runtime/stop",{method:"POST",body:"{}"})}function Pt(){const[e,t]=L(gt),[a,o]=L(wt),[l,n]=L(!1),[c,_]=L(""),[p,i]=L(!!pe().token),[m,r]=L(""),[f,d]=L(!!pe().token),g=async()=>{const h=await Fe();o(h)};G(()=>{let h=!0;return Ie().then(C=>{h&&(t(C),i(!0))}).catch(C=>{h&&(z(C)&&(O(),d(!1),r("登录已失效,请重新输入管理令牌")),i(!1))}),g().then(()=>{h&&_("")}).catch(C=>{h&&z(C)&&(O(),d(!1),i(!1),r("登录已失效,请重新输入管理令牌"))}),()=>{h=!1}},[]),G(()=>{if(!p)return;let h=!0;const C=window.setInterval(()=>{g().then(()=>{h&&_(A=>A)}).catch(A=>{h&&z(A)&&(O(),d(!1),i(!1),r("登录已失效,请重新输入管理令牌"))})},5e3);return()=>{h=!1,window.clearInterval(C)}},[p]);const S=async h=>{n(!0),r("");try{await xt(h),St(h),i(!0),d(!0),_("登录成功");const[C,A]=await Promise.all([Ie(),Fe()]);t(C),o(A)}catch(C){console.error("登录失败",C),O(),i(!1),d(!1),r("管理令牌无效或服务暂不可用")}finally{n(!1)}},T=()=>{O(),i(!1),d(!1),_("已退出登录"),r("")},k=(h,C,A)=>{t(N=>N.map(j=>j.key!==h?j:{...j,fields:j.fields.map(B=>B.key===C?{...B,value:A}:B)}))},y=()=>{o(h=>({...h,logs:[{id:"cleared",prefix:"[系统] [00:00:00]",timestamp:"[00:00:00]",message:"日志已清空,等待任务输出...",tone:"muted"}]}))};return s("div",{class:"page-shell",children:[p?null:s(yt,{busy:l,error:m,onSubmit:S}),p?s(K,{children:[c?s("div",{class:"page-notice",children:c}):null,s("div",{class:"page-grid",children:[s(ft,{sections:e,onValueChange:k,onSave:async()=>{n(!0);try{const h=await ie(e);t(h),_("配置已保存")}catch(h){console.error("保存配置失败",h),z(h)?(O(),d(!1),i(!1),r("登录已失效,请重新输入管理令牌")):_("保存配置失败")}finally{n(!1)}},onStart:async()=>{n(!0);try{const h=await ie(e);t(h);const C=await Ct();_(`配置已保存,${C.message}`),await g()}catch(h){console.error("保存配置或启动维护任务失败",h),z(h)?(O(),d(!1),i(!1),r("登录已失效,请重新输入管理令牌")):_("保存配置或启动维护任务失败")}finally{n(!1)}},onStartLoop:async()=>{n(!0);try{const h=await ie(e);t(h);const C=await Tt();_(`配置已保存,${C.message}`),await g()}catch(h){console.error("保存配置或启动循环补号任务失败",h),z(h)?(O(),d(!1),i(!1),r("登录已失效,请重新输入管理令牌")):_("保存配置或启动循环补号任务失败")}finally{n(!1)}},onStop:async()=>{n(!0);try{const h=await At();_(h.message),await g()}catch(h){console.error("停止维护任务失败",h),z(h)?(O(),d(!1),i(!1),r("登录已失效,请重新输入管理令牌")):_("停止维护任务失败")}finally{n(!1)}},onLogout:T,busy:l,running:a.running,loopRunning:!!a.loopRunning,hasStoredToken:f}),s("div",{class:"main-stack",children:s(vt,{monitor:a,onClearLogs:y})})]})]}):null]})}it(s(Pt,{}),document.getElementById("app"));
diff --git a/frontend/dist/index.html b/frontend/dist/index.html
new file mode 100644
index 0000000..ec60d50
--- /dev/null
+++ b/frontend/dist/index.html
@@ -0,0 +1,13 @@
+
+
+
+
+
+ Auto Pool Maintainer
+
+
+
+
+
+
+
diff --git a/frontend/index.html b/frontend/index.html
new file mode 100644
index 0000000..95a2d95
--- /dev/null
+++ b/frontend/index.html
@@ -0,0 +1,12 @@
+
+
+
+
+
+ Auto Pool Maintainer
+
+
+
+
+
+
diff --git a/frontend/nginx.conf b/frontend/nginx.conf
new file mode 100644
index 0000000..ffc0851
--- /dev/null
+++ b/frontend/nginx.conf
@@ -0,0 +1,20 @@
+server {
+ listen 80;
+ server_name _;
+
+ root /usr/share/nginx/html;
+ index index.html;
+
+ location /api/ {
+ proxy_pass http://backend:8318;
+ proxy_http_version 1.1;
+ proxy_set_header Host $host;
+ proxy_set_header X-Real-IP $remote_addr;
+ proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
+ proxy_set_header X-Forwarded-Proto $scheme;
+ }
+
+ location / {
+ try_files $uri $uri/ /index.html;
+ }
+}
diff --git a/frontend/package.json b/frontend/package.json
new file mode 100644
index 0000000..2f0f9c6
--- /dev/null
+++ b/frontend/package.json
@@ -0,0 +1,19 @@
+{
+ "name": "auto-pool-maintainer-frontend",
+ "private": true,
+ "version": "0.1.0",
+ "type": "module",
+ "scripts": {
+ "dev": "vite",
+ "build": "vite build",
+ "preview": "vite preview"
+ },
+ "dependencies": {
+ "preact": "^10.26.4"
+ },
+ "devDependencies": {
+ "@preact/preset-vite": "^2.10.2",
+ "typescript": "^5.8.2",
+ "vite": "^6.2.0"
+ }
+}
diff --git a/frontend/pnpm-lock.yaml b/frontend/pnpm-lock.yaml
new file mode 100644
index 0000000..8aeea57
--- /dev/null
+++ b/frontend/pnpm-lock.yaml
@@ -0,0 +1,1291 @@
+lockfileVersion: '9.0'
+
+settings:
+ autoInstallPeers: true
+ excludeLinksFromLockfile: false
+
+importers:
+
+ .:
+ dependencies:
+ preact:
+ specifier: ^10.26.4
+ version: 10.29.0
+ devDependencies:
+ '@preact/preset-vite':
+ specifier: ^2.10.2
+ version: 2.10.5(@babel/core@7.29.0)(preact@10.29.0)(rollup@4.60.0)(vite@6.4.1)
+ typescript:
+ specifier: ^5.8.2
+ version: 5.9.3
+ vite:
+ specifier: ^6.2.0
+ version: 6.4.1
+
+packages:
+
+ '@babel/code-frame@7.29.0':
+ resolution: {integrity: sha512-9NhCeYjq9+3uxgdtp20LSiJXJvN0FeCtNGpJxuMFZ1Kv3cWUNb6DOhJwUvcVCzKGR66cw4njwM6hrJLqgOwbcw==}
+ engines: {node: '>=6.9.0'}
+
+ '@babel/compat-data@7.29.0':
+ resolution: {integrity: sha512-T1NCJqT/j9+cn8fvkt7jtwbLBfLC/1y1c7NtCeXFRgzGTsafi68MRv8yzkYSapBnFA6L3U2VSc02ciDzoAJhJg==}
+ engines: {node: '>=6.9.0'}
+
+ '@babel/core@7.29.0':
+ resolution: {integrity: sha512-CGOfOJqWjg2qW/Mb6zNsDm+u5vFQ8DxXfbM09z69p5Z6+mE1ikP2jUXw+j42Pf1XTYED2Rni5f95npYeuwMDQA==}
+ engines: {node: '>=6.9.0'}
+
+ '@babel/generator@7.29.1':
+ resolution: {integrity: sha512-qsaF+9Qcm2Qv8SRIMMscAvG4O3lJ0F1GuMo5HR/Bp02LopNgnZBC/EkbevHFeGs4ls/oPz9v+Bsmzbkbe+0dUw==}
+ engines: {node: '>=6.9.0'}
+
+ '@babel/helper-annotate-as-pure@7.27.3':
+ resolution: {integrity: sha512-fXSwMQqitTGeHLBC08Eq5yXz2m37E4pJX1qAU1+2cNedz/ifv/bVXft90VeSav5nFO61EcNgwr0aJxbyPaWBPg==}
+ engines: {node: '>=6.9.0'}
+
+ '@babel/helper-compilation-targets@7.28.6':
+ resolution: {integrity: sha512-JYtls3hqi15fcx5GaSNL7SCTJ2MNmjrkHXg4FSpOA/grxK8KwyZ5bubHsCq8FXCkua6xhuaaBit+3b7+VZRfcA==}
+ engines: {node: '>=6.9.0'}
+
+ '@babel/helper-globals@7.28.0':
+ resolution: {integrity: sha512-+W6cISkXFa1jXsDEdYA8HeevQT/FULhxzR99pxphltZcVaugps53THCeiWA8SguxxpSp3gKPiuYfSWopkLQ4hw==}
+ engines: {node: '>=6.9.0'}
+
+ '@babel/helper-module-imports@7.28.6':
+ resolution: {integrity: sha512-l5XkZK7r7wa9LucGw9LwZyyCUscb4x37JWTPz7swwFE/0FMQAGpiWUZn8u9DzkSBWEcK25jmvubfpw2dnAMdbw==}
+ engines: {node: '>=6.9.0'}
+
+ '@babel/helper-module-transforms@7.28.6':
+ resolution: {integrity: sha512-67oXFAYr2cDLDVGLXTEABjdBJZ6drElUSI7WKp70NrpyISso3plG9SAGEF6y7zbha/wOzUByWWTJvEDVNIUGcA==}
+ engines: {node: '>=6.9.0'}
+ peerDependencies:
+ '@babel/core': ^7.0.0
+
+ '@babel/helper-plugin-utils@7.28.6':
+ resolution: {integrity: sha512-S9gzZ/bz83GRysI7gAD4wPT/AI3uCnY+9xn+Mx/KPs2JwHJIz1W8PZkg2cqyt3RNOBM8ejcXhV6y8Og7ly/Dug==}
+ engines: {node: '>=6.9.0'}
+
+ '@babel/helper-string-parser@7.27.1':
+ resolution: {integrity: sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==}
+ engines: {node: '>=6.9.0'}
+
+ '@babel/helper-validator-identifier@7.28.5':
+ resolution: {integrity: sha512-qSs4ifwzKJSV39ucNjsvc6WVHs6b7S03sOh2OcHF9UHfVPqWWALUsNUVzhSBiItjRZoLHx7nIarVjqKVusUZ1Q==}
+ engines: {node: '>=6.9.0'}
+
+ '@babel/helper-validator-option@7.27.1':
+ resolution: {integrity: sha512-YvjJow9FxbhFFKDSuFnVCe2WxXk1zWc22fFePVNEaWJEu8IrZVlda6N0uHwzZrUM1il7NC9Mlp4MaJYbYd9JSg==}
+ engines: {node: '>=6.9.0'}
+
+ '@babel/helpers@7.29.2':
+ resolution: {integrity: sha512-HoGuUs4sCZNezVEKdVcwqmZN8GoHirLUcLaYVNBK2J0DadGtdcqgr3BCbvH8+XUo4NGjNl3VOtSjEKNzqfFgKw==}
+ engines: {node: '>=6.9.0'}
+
+ '@babel/parser@7.29.2':
+ resolution: {integrity: sha512-4GgRzy/+fsBa72/RZVJmGKPmZu9Byn8o4MoLpmNe1m8ZfYnz5emHLQz3U4gLud6Zwl0RZIcgiLD7Uq7ySFuDLA==}
+ engines: {node: '>=6.0.0'}
+ hasBin: true
+
+ '@babel/plugin-syntax-jsx@7.28.6':
+ resolution: {integrity: sha512-wgEmr06G6sIpqr8YDwA2dSRTE3bJ+V0IfpzfSY3Lfgd7YWOaAdlykvJi13ZKBt8cZHfgH1IXN+CL656W3uUa4w==}
+ engines: {node: '>=6.9.0'}
+ peerDependencies:
+ '@babel/core': ^7.0.0-0
+
+ '@babel/plugin-transform-react-jsx-development@7.27.1':
+ resolution: {integrity: sha512-ykDdF5yI4f1WrAolLqeF3hmYU12j9ntLQl/AOG1HAS21jxyg1Q0/J/tpREuYLfatGdGmXp/3yS0ZA76kOlVq9Q==}
+ engines: {node: '>=6.9.0'}
+ peerDependencies:
+ '@babel/core': ^7.0.0-0
+
+ '@babel/plugin-transform-react-jsx@7.28.6':
+ resolution: {integrity: sha512-61bxqhiRfAACulXSLd/GxqmAedUSrRZIu/cbaT18T1CetkTmtDN15it7i80ru4DVqRK1WMxQhXs+Lf9kajm5Ow==}
+ engines: {node: '>=6.9.0'}
+ peerDependencies:
+ '@babel/core': ^7.0.0-0
+
+ '@babel/template@7.28.6':
+ resolution: {integrity: sha512-YA6Ma2KsCdGb+WC6UpBVFJGXL58MDA6oyONbjyF/+5sBgxY/dwkhLogbMT2GXXyU84/IhRw/2D1Os1B/giz+BQ==}
+ engines: {node: '>=6.9.0'}
+
+ '@babel/traverse@7.29.0':
+ resolution: {integrity: sha512-4HPiQr0X7+waHfyXPZpWPfWL/J7dcN1mx9gL6WdQVMbPnF3+ZhSMs8tCxN7oHddJE9fhNE7+lxdnlyemKfJRuA==}
+ engines: {node: '>=6.9.0'}
+
+ '@babel/types@7.29.0':
+ resolution: {integrity: sha512-LwdZHpScM4Qz8Xw2iKSzS+cfglZzJGvofQICy7W7v4caru4EaAmyUuO6BGrbyQ2mYV11W0U8j5mBhd14dd3B0A==}
+ engines: {node: '>=6.9.0'}
+
+ '@esbuild/aix-ppc64@0.25.12':
+ resolution: {integrity: sha512-Hhmwd6CInZ3dwpuGTF8fJG6yoWmsToE+vYgD4nytZVxcu1ulHpUQRAB1UJ8+N1Am3Mz4+xOByoQoSZf4D+CpkA==}
+ engines: {node: '>=18'}
+ cpu: [ppc64]
+ os: [aix]
+
+ '@esbuild/android-arm64@0.25.12':
+ resolution: {integrity: sha512-6AAmLG7zwD1Z159jCKPvAxZd4y/VTO0VkprYy+3N2FtJ8+BQWFXU+OxARIwA46c5tdD9SsKGZ/1ocqBS/gAKHg==}
+ engines: {node: '>=18'}
+ cpu: [arm64]
+ os: [android]
+
+ '@esbuild/android-arm@0.25.12':
+ resolution: {integrity: sha512-VJ+sKvNA/GE7Ccacc9Cha7bpS8nyzVv0jdVgwNDaR4gDMC/2TTRc33Ip8qrNYUcpkOHUT5OZ0bUcNNVZQ9RLlg==}
+ engines: {node: '>=18'}
+ cpu: [arm]
+ os: [android]
+
+ '@esbuild/android-x64@0.25.12':
+ resolution: {integrity: sha512-5jbb+2hhDHx5phYR2By8GTWEzn6I9UqR11Kwf22iKbNpYrsmRB18aX/9ivc5cabcUiAT/wM+YIZ6SG9QO6a8kg==}
+ engines: {node: '>=18'}
+ cpu: [x64]
+ os: [android]
+
+ '@esbuild/darwin-arm64@0.25.12':
+ resolution: {integrity: sha512-N3zl+lxHCifgIlcMUP5016ESkeQjLj/959RxxNYIthIg+CQHInujFuXeWbWMgnTo4cp5XVHqFPmpyu9J65C1Yg==}
+ engines: {node: '>=18'}
+ cpu: [arm64]
+ os: [darwin]
+
+ '@esbuild/darwin-x64@0.25.12':
+ resolution: {integrity: sha512-HQ9ka4Kx21qHXwtlTUVbKJOAnmG1ipXhdWTmNXiPzPfWKpXqASVcWdnf2bnL73wgjNrFXAa3yYvBSd9pzfEIpA==}
+ engines: {node: '>=18'}
+ cpu: [x64]
+ os: [darwin]
+
+ '@esbuild/freebsd-arm64@0.25.12':
+ resolution: {integrity: sha512-gA0Bx759+7Jve03K1S0vkOu5Lg/85dou3EseOGUes8flVOGxbhDDh/iZaoek11Y8mtyKPGF3vP8XhnkDEAmzeg==}
+ engines: {node: '>=18'}
+ cpu: [arm64]
+ os: [freebsd]
+
+ '@esbuild/freebsd-x64@0.25.12':
+ resolution: {integrity: sha512-TGbO26Yw2xsHzxtbVFGEXBFH0FRAP7gtcPE7P5yP7wGy7cXK2oO7RyOhL5NLiqTlBh47XhmIUXuGciXEqYFfBQ==}
+ engines: {node: '>=18'}
+ cpu: [x64]
+ os: [freebsd]
+
+ '@esbuild/linux-arm64@0.25.12':
+ resolution: {integrity: sha512-8bwX7a8FghIgrupcxb4aUmYDLp8pX06rGh5HqDT7bB+8Rdells6mHvrFHHW2JAOPZUbnjUpKTLg6ECyzvas2AQ==}
+ engines: {node: '>=18'}
+ cpu: [arm64]
+ os: [linux]
+
+ '@esbuild/linux-arm@0.25.12':
+ resolution: {integrity: sha512-lPDGyC1JPDou8kGcywY0YILzWlhhnRjdof3UlcoqYmS9El818LLfJJc3PXXgZHrHCAKs/Z2SeZtDJr5MrkxtOw==}
+ engines: {node: '>=18'}
+ cpu: [arm]
+ os: [linux]
+
+ '@esbuild/linux-ia32@0.25.12':
+ resolution: {integrity: sha512-0y9KrdVnbMM2/vG8KfU0byhUN+EFCny9+8g202gYqSSVMonbsCfLjUO+rCci7pM0WBEtz+oK/PIwHkzxkyharA==}
+ engines: {node: '>=18'}
+ cpu: [ia32]
+ os: [linux]
+
+ '@esbuild/linux-loong64@0.25.12':
+ resolution: {integrity: sha512-h///Lr5a9rib/v1GGqXVGzjL4TMvVTv+s1DPoxQdz7l/AYv6LDSxdIwzxkrPW438oUXiDtwM10o9PmwS/6Z0Ng==}
+ engines: {node: '>=18'}
+ cpu: [loong64]
+ os: [linux]
+
+ '@esbuild/linux-mips64el@0.25.12':
+ resolution: {integrity: sha512-iyRrM1Pzy9GFMDLsXn1iHUm18nhKnNMWscjmp4+hpafcZjrr2WbT//d20xaGljXDBYHqRcl8HnxbX6uaA/eGVw==}
+ engines: {node: '>=18'}
+ cpu: [mips64el]
+ os: [linux]
+
+ '@esbuild/linux-ppc64@0.25.12':
+ resolution: {integrity: sha512-9meM/lRXxMi5PSUqEXRCtVjEZBGwB7P/D4yT8UG/mwIdze2aV4Vo6U5gD3+RsoHXKkHCfSxZKzmDssVlRj1QQA==}
+ engines: {node: '>=18'}
+ cpu: [ppc64]
+ os: [linux]
+
+ '@esbuild/linux-riscv64@0.25.12':
+ resolution: {integrity: sha512-Zr7KR4hgKUpWAwb1f3o5ygT04MzqVrGEGXGLnj15YQDJErYu/BGg+wmFlIDOdJp0PmB0lLvxFIOXZgFRrdjR0w==}
+ engines: {node: '>=18'}
+ cpu: [riscv64]
+ os: [linux]
+
+ '@esbuild/linux-s390x@0.25.12':
+ resolution: {integrity: sha512-MsKncOcgTNvdtiISc/jZs/Zf8d0cl/t3gYWX8J9ubBnVOwlk65UIEEvgBORTiljloIWnBzLs4qhzPkJcitIzIg==}
+ engines: {node: '>=18'}
+ cpu: [s390x]
+ os: [linux]
+
+ '@esbuild/linux-x64@0.25.12':
+ resolution: {integrity: sha512-uqZMTLr/zR/ed4jIGnwSLkaHmPjOjJvnm6TVVitAa08SLS9Z0VM8wIRx7gWbJB5/J54YuIMInDquWyYvQLZkgw==}
+ engines: {node: '>=18'}
+ cpu: [x64]
+ os: [linux]
+
+ '@esbuild/netbsd-arm64@0.25.12':
+ resolution: {integrity: sha512-xXwcTq4GhRM7J9A8Gv5boanHhRa/Q9KLVmcyXHCTaM4wKfIpWkdXiMog/KsnxzJ0A1+nD+zoecuzqPmCRyBGjg==}
+ engines: {node: '>=18'}
+ cpu: [arm64]
+ os: [netbsd]
+
+ '@esbuild/netbsd-x64@0.25.12':
+ resolution: {integrity: sha512-Ld5pTlzPy3YwGec4OuHh1aCVCRvOXdH8DgRjfDy/oumVovmuSzWfnSJg+VtakB9Cm0gxNO9BzWkj6mtO1FMXkQ==}
+ engines: {node: '>=18'}
+ cpu: [x64]
+ os: [netbsd]
+
+ '@esbuild/openbsd-arm64@0.25.12':
+ resolution: {integrity: sha512-fF96T6KsBo/pkQI950FARU9apGNTSlZGsv1jZBAlcLL1MLjLNIWPBkj5NlSz8aAzYKg+eNqknrUJ24QBybeR5A==}
+ engines: {node: '>=18'}
+ cpu: [arm64]
+ os: [openbsd]
+
+ '@esbuild/openbsd-x64@0.25.12':
+ resolution: {integrity: sha512-MZyXUkZHjQxUvzK7rN8DJ3SRmrVrke8ZyRusHlP+kuwqTcfWLyqMOE3sScPPyeIXN/mDJIfGXvcMqCgYKekoQw==}
+ engines: {node: '>=18'}
+ cpu: [x64]
+ os: [openbsd]
+
+ '@esbuild/openharmony-arm64@0.25.12':
+ resolution: {integrity: sha512-rm0YWsqUSRrjncSXGA7Zv78Nbnw4XL6/dzr20cyrQf7ZmRcsovpcRBdhD43Nuk3y7XIoW2OxMVvwuRvk9XdASg==}
+ engines: {node: '>=18'}
+ cpu: [arm64]
+ os: [openharmony]
+
+ '@esbuild/sunos-x64@0.25.12':
+ resolution: {integrity: sha512-3wGSCDyuTHQUzt0nV7bocDy72r2lI33QL3gkDNGkod22EsYl04sMf0qLb8luNKTOmgF/eDEDP5BFNwoBKH441w==}
+ engines: {node: '>=18'}
+ cpu: [x64]
+ os: [sunos]
+
+ '@esbuild/win32-arm64@0.25.12':
+ resolution: {integrity: sha512-rMmLrur64A7+DKlnSuwqUdRKyd3UE7oPJZmnljqEptesKM8wx9J8gx5u0+9Pq0fQQW8vqeKebwNXdfOyP+8Bsg==}
+ engines: {node: '>=18'}
+ cpu: [arm64]
+ os: [win32]
+
+ '@esbuild/win32-ia32@0.25.12':
+ resolution: {integrity: sha512-HkqnmmBoCbCwxUKKNPBixiWDGCpQGVsrQfJoVGYLPT41XWF8lHuE5N6WhVia2n4o5QK5M4tYr21827fNhi4byQ==}
+ engines: {node: '>=18'}
+ cpu: [ia32]
+ os: [win32]
+
+ '@esbuild/win32-x64@0.25.12':
+ resolution: {integrity: sha512-alJC0uCZpTFrSL0CCDjcgleBXPnCrEAhTBILpeAp7M/OFgoqtAetfBzX0xM00MUsVVPpVjlPuMbREqnZCXaTnA==}
+ engines: {node: '>=18'}
+ cpu: [x64]
+ os: [win32]
+
+ '@jridgewell/gen-mapping@0.3.13':
+ resolution: {integrity: sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==}
+
+ '@jridgewell/remapping@2.3.5':
+ resolution: {integrity: sha512-LI9u/+laYG4Ds1TDKSJW2YPrIlcVYOwi2fUC6xB43lueCjgxV4lffOCZCtYFiH6TNOX+tQKXx97T4IKHbhyHEQ==}
+
+ '@jridgewell/resolve-uri@3.1.2':
+ resolution: {integrity: sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==}
+ engines: {node: '>=6.0.0'}
+
+ '@jridgewell/sourcemap-codec@1.5.5':
+ resolution: {integrity: sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==}
+
+ '@jridgewell/trace-mapping@0.3.31':
+ resolution: {integrity: sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==}
+
+ '@preact/preset-vite@2.10.5':
+ resolution: {integrity: sha512-p0vJpxiVO7KWWazWny3LUZ+saXyZKWv6Ju0bYMWNJRp2YveufRPgSUB1C4MTqGJfz07EehMgfN+AJNwQy+w6Iw==}
+ peerDependencies:
+ '@babel/core': 7.x
+ vite: 2.x || 3.x || 4.x || 5.x || 6.x || 7.x || 8.x
+
+ '@prefresh/babel-plugin@0.5.3':
+ resolution: {integrity: sha512-57LX2SHs4BX2s1IwCjNzTE2OJeEepRCNf1VTEpbNcUyHfMO68eeOWGDIt4ob9aYlW6PEWZ1SuwNikuoIXANDtQ==}
+
+ '@prefresh/core@1.5.9':
+ resolution: {integrity: sha512-IKBKCPaz34OFVC+adiQ2qaTF5qdztO2/4ZPf4KsRTgjKosWqxVXmEbxCiUydYZRY8GVie+DQlKzQr9gt6HQ+EQ==}
+ peerDependencies:
+ preact: ^10.0.0 || ^11.0.0-0
+
+ '@prefresh/utils@1.2.1':
+ resolution: {integrity: sha512-vq/sIuN5nYfYzvyayXI4C2QkprfNaHUQ9ZX+3xLD8nL3rWyzpxOm1+K7RtMbhd+66QcaISViK7amjnheQ/4WZw==}
+
+ '@prefresh/vite@2.4.12':
+ resolution: {integrity: sha512-FY1fzXpUjiuosznMV0YM7XAOPZjB5FIdWS0W24+XnlxYkt9hNAwwsiKYn+cuTEoMtD/ZVazS5QVssBr9YhpCQA==}
+ peerDependencies:
+ preact: ^10.4.0 || ^11.0.0-0
+ vite: '>=2.0.0'
+
+ '@rollup/pluginutils@4.2.1':
+ resolution: {integrity: sha512-iKnFXr7NkdZAIHiIWE+BX5ULi/ucVFYWD6TbAV+rZctiRTY2PL6tsIKhoIOaoskiWAkgu+VsbXgUVDNLHf+InQ==}
+ engines: {node: '>= 8.0.0'}
+
+ '@rollup/pluginutils@5.3.0':
+ resolution: {integrity: sha512-5EdhGZtnu3V88ces7s53hhfK5KSASnJZv8Lulpc04cWO3REESroJXg73DFsOmgbU2BhwV0E20bu2IDZb3VKW4Q==}
+ engines: {node: '>=14.0.0'}
+ peerDependencies:
+ rollup: ^1.20.0||^2.0.0||^3.0.0||^4.0.0
+ peerDependenciesMeta:
+ rollup:
+ optional: true
+
+ '@rollup/rollup-android-arm-eabi@4.60.0':
+ resolution: {integrity: sha512-WOhNW9K8bR3kf4zLxbfg6Pxu2ybOUbB2AjMDHSQx86LIF4rH4Ft7vmMwNt0loO0eonglSNy4cpD3MKXXKQu0/A==}
+ cpu: [arm]
+ os: [android]
+
+ '@rollup/rollup-android-arm64@4.60.0':
+ resolution: {integrity: sha512-u6JHLll5QKRvjciE78bQXDmqRqNs5M/3GVqZeMwvmjaNODJih/WIrJlFVEihvV0MiYFmd+ZyPr9wxOVbPAG2Iw==}
+ cpu: [arm64]
+ os: [android]
+
+ '@rollup/rollup-darwin-arm64@4.60.0':
+ resolution: {integrity: sha512-qEF7CsKKzSRc20Ciu2Zw1wRrBz4g56F7r/vRwY430UPp/nt1x21Q/fpJ9N5l47WWvJlkNCPJz3QRVw008fi7yA==}
+ cpu: [arm64]
+ os: [darwin]
+
+ '@rollup/rollup-darwin-x64@4.60.0':
+ resolution: {integrity: sha512-WADYozJ4QCnXCH4wPB+3FuGmDPoFseVCUrANmA5LWwGmC6FL14BWC7pcq+FstOZv3baGX65tZ378uT6WG8ynTw==}
+ cpu: [x64]
+ os: [darwin]
+
+ '@rollup/rollup-freebsd-arm64@4.60.0':
+ resolution: {integrity: sha512-6b8wGHJlDrGeSE3aH5mGNHBjA0TTkxdoNHik5EkvPHCt351XnigA4pS7Wsj/Eo9Y8RBU6f35cjN9SYmCFBtzxw==}
+ cpu: [arm64]
+ os: [freebsd]
+
+ '@rollup/rollup-freebsd-x64@4.60.0':
+ resolution: {integrity: sha512-h25Ga0t4jaylMB8M/JKAyrvvfxGRjnPQIR8lnCayyzEjEOx2EJIlIiMbhpWxDRKGKF8jbNH01NnN663dH638mA==}
+ cpu: [x64]
+ os: [freebsd]
+
+ '@rollup/rollup-linux-arm-gnueabihf@4.60.0':
+ resolution: {integrity: sha512-RzeBwv0B3qtVBWtcuABtSuCzToo2IEAIQrcyB/b2zMvBWVbjo8bZDjACUpnaafaxhTw2W+imQbP2BD1usasK4g==}
+ cpu: [arm]
+ os: [linux]
+ libc: [glibc]
+
+ '@rollup/rollup-linux-arm-musleabihf@4.60.0':
+ resolution: {integrity: sha512-Sf7zusNI2CIU1HLzuu9Tc5YGAHEZs5Lu7N1ssJG4Tkw6e0MEsN7NdjUDDfGNHy2IU+ENyWT+L2obgWiguWibWQ==}
+ cpu: [arm]
+ os: [linux]
+ libc: [musl]
+
+ '@rollup/rollup-linux-arm64-gnu@4.60.0':
+ resolution: {integrity: sha512-DX2x7CMcrJzsE91q7/O02IJQ5/aLkVtYFryqCjduJhUfGKG6yJV8hxaw8pZa93lLEpPTP/ohdN4wFz7yp/ry9A==}
+ cpu: [arm64]
+ os: [linux]
+ libc: [glibc]
+
+ '@rollup/rollup-linux-arm64-musl@4.60.0':
+ resolution: {integrity: sha512-09EL+yFVbJZlhcQfShpswwRZ0Rg+z/CsSELFCnPt3iK+iqwGsI4zht3secj5vLEs957QvFFXnzAT0FFPIxSrkQ==}
+ cpu: [arm64]
+ os: [linux]
+ libc: [musl]
+
+ '@rollup/rollup-linux-loong64-gnu@4.60.0':
+ resolution: {integrity: sha512-i9IcCMPr3EXm8EQg5jnja0Zyc1iFxJjZWlb4wr7U2Wx/GrddOuEafxRdMPRYVaXjgbhvqalp6np07hN1w9kAKw==}
+ cpu: [loong64]
+ os: [linux]
+ libc: [glibc]
+
+ '@rollup/rollup-linux-loong64-musl@4.60.0':
+ resolution: {integrity: sha512-DGzdJK9kyJ+B78MCkWeGnpXJ91tK/iKA6HwHxF4TAlPIY7GXEvMe8hBFRgdrR9Ly4qebR/7gfUs9y2IoaVEyog==}
+ cpu: [loong64]
+ os: [linux]
+ libc: [musl]
+
+ '@rollup/rollup-linux-ppc64-gnu@4.60.0':
+ resolution: {integrity: sha512-RwpnLsqC8qbS8z1H1AxBA1H6qknR4YpPR9w2XX0vo2Sz10miu57PkNcnHVaZkbqyw/kUWfKMI73jhmfi9BRMUQ==}
+ cpu: [ppc64]
+ os: [linux]
+ libc: [glibc]
+
+ '@rollup/rollup-linux-ppc64-musl@4.60.0':
+ resolution: {integrity: sha512-Z8pPf54Ly3aqtdWC3G4rFigZgNvd+qJlOE52fmko3KST9SoGfAdSRCwyoyG05q1HrrAblLbk1/PSIV+80/pxLg==}
+ cpu: [ppc64]
+ os: [linux]
+ libc: [musl]
+
+ '@rollup/rollup-linux-riscv64-gnu@4.60.0':
+ resolution: {integrity: sha512-3a3qQustp3COCGvnP4SvrMHnPQ9d1vzCakQVRTliaz8cIp/wULGjiGpbcqrkv0WrHTEp8bQD/B3HBjzujVWLOA==}
+ cpu: [riscv64]
+ os: [linux]
+ libc: [glibc]
+
+ '@rollup/rollup-linux-riscv64-musl@4.60.0':
+ resolution: {integrity: sha512-pjZDsVH/1VsghMJ2/kAaxt6dL0psT6ZexQVrijczOf+PeP2BUqTHYejk3l6TlPRydggINOeNRhvpLa0AYpCWSQ==}
+ cpu: [riscv64]
+ os: [linux]
+ libc: [musl]
+
+ '@rollup/rollup-linux-s390x-gnu@4.60.0':
+ resolution: {integrity: sha512-3ObQs0BhvPgiUVZrN7gqCSvmFuMWvWvsjG5ayJ3Lraqv+2KhOsp+pUbigqbeWqueGIsnn+09HBw27rJ+gYK4VQ==}
+ cpu: [s390x]
+ os: [linux]
+ libc: [glibc]
+
+ '@rollup/rollup-linux-x64-gnu@4.60.0':
+ resolution: {integrity: sha512-EtylprDtQPdS5rXvAayrNDYoJhIz1/vzN2fEubo3yLE7tfAw+948dO0g4M0vkTVFhKojnF+n6C8bDNe+gDRdTg==}
+ cpu: [x64]
+ os: [linux]
+ libc: [glibc]
+
+ '@rollup/rollup-linux-x64-musl@4.60.0':
+ resolution: {integrity: sha512-k09oiRCi/bHU9UVFqD17r3eJR9bn03TyKraCrlz5ULFJGdJGi7VOmm9jl44vOJvRJ6P7WuBi/s2A97LxxHGIdw==}
+ cpu: [x64]
+ os: [linux]
+ libc: [musl]
+
+ '@rollup/rollup-openbsd-x64@4.60.0':
+ resolution: {integrity: sha512-1o/0/pIhozoSaDJoDcec+IVLbnRtQmHwPV730+AOD29lHEEo4F5BEUB24H0OBdhbBBDwIOSuf7vgg0Ywxdfiiw==}
+ cpu: [x64]
+ os: [openbsd]
+
+ '@rollup/rollup-openharmony-arm64@4.60.0':
+ resolution: {integrity: sha512-pESDkos/PDzYwtyzB5p/UoNU/8fJo68vcXM9ZW2V0kjYayj1KaaUfi1NmTUTUpMn4UhU4gTuK8gIaFO4UGuMbA==}
+ cpu: [arm64]
+ os: [openharmony]
+
+ '@rollup/rollup-win32-arm64-msvc@4.60.0':
+ resolution: {integrity: sha512-hj1wFStD7B1YBeYmvY+lWXZ7ey73YGPcViMShYikqKT1GtstIKQAtfUI6yrzPjAy/O7pO0VLXGmUVWXQMaYgTQ==}
+ cpu: [arm64]
+ os: [win32]
+
+ '@rollup/rollup-win32-ia32-msvc@4.60.0':
+ resolution: {integrity: sha512-SyaIPFoxmUPlNDq5EHkTbiKzmSEmq/gOYFI/3HHJ8iS/v1mbugVa7dXUzcJGQfoytp9DJFLhHH4U3/eTy2Bq4w==}
+ cpu: [ia32]
+ os: [win32]
+
+ '@rollup/rollup-win32-x64-gnu@4.60.0':
+ resolution: {integrity: sha512-RdcryEfzZr+lAr5kRm2ucN9aVlCCa2QNq4hXelZxb8GG0NJSazq44Z3PCCc8wISRuCVnGs0lQJVX5Vp6fKA+IA==}
+ cpu: [x64]
+ os: [win32]
+
+ '@rollup/rollup-win32-x64-msvc@4.60.0':
+ resolution: {integrity: sha512-PrsWNQ8BuE00O3Xsx3ALh2Df8fAj9+cvvX9AIA6o4KpATR98c9mud4XtDWVvsEuyia5U4tVSTKygawyJkjm60w==}
+ cpu: [x64]
+ os: [win32]
+
+ '@types/estree@1.0.8':
+ resolution: {integrity: sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==}
+
+ babel-plugin-transform-hook-names@1.0.2:
+ resolution: {integrity: sha512-5gafyjyyBTTdX/tQQ0hRgu4AhNHG/hqWi0ZZmg2xvs2FgRkJXzDNKBZCyoYqgFkovfDrgM8OoKg8karoUvWeCw==}
+ peerDependencies:
+ '@babel/core': ^7.12.10
+
+ baseline-browser-mapping@2.10.11:
+ resolution: {integrity: sha512-DAKrHphkJyiGuau/cFieRYhcTFeK/lBuD++C7cZ6KZHbMhBrisoi+EvhQ5RZrIfV5qwsW8kgQ07JIC+MDJRAhg==}
+ engines: {node: '>=6.0.0'}
+ hasBin: true
+
+ boolbase@1.0.0:
+ resolution: {integrity: sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww==}
+
+ browserslist@4.28.1:
+ resolution: {integrity: sha512-ZC5Bd0LgJXgwGqUknZY/vkUQ04r8NXnJZ3yYi4vDmSiZmC/pdSN0NbNRPxZpbtO4uAfDUAFffO8IZoM3Gj8IkA==}
+ engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7}
+ hasBin: true
+
+ caniuse-lite@1.0.30001781:
+ resolution: {integrity: sha512-RdwNCyMsNBftLjW6w01z8bKEvT6e/5tpPVEgtn22TiLGlstHOVecsX2KHFkD5e/vRnIE4EGzpuIODb3mtswtkw==}
+
+ convert-source-map@2.0.0:
+ resolution: {integrity: sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==}
+
+ css-select@5.2.2:
+ resolution: {integrity: sha512-TizTzUddG/xYLA3NXodFM0fSbNizXjOKhqiQQwvhlspadZokn1KDy0NZFS0wuEubIYAV5/c1/lAr0TaaFXEXzw==}
+
+ css-what@6.2.2:
+ resolution: {integrity: sha512-u/O3vwbptzhMs3L1fQE82ZSLHQQfto5gyZzwteVIEyeaY5Fc7R4dapF/BvRoSYFeqfBk4m0V1Vafq5Pjv25wvA==}
+ engines: {node: '>= 6'}
+
+ debug@4.4.3:
+ resolution: {integrity: sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==}
+ engines: {node: '>=6.0'}
+ peerDependencies:
+ supports-color: '*'
+ peerDependenciesMeta:
+ supports-color:
+ optional: true
+
+ dom-serializer@2.0.0:
+ resolution: {integrity: sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg==}
+
+ domelementtype@2.3.0:
+ resolution: {integrity: sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw==}
+
+ domhandler@5.0.3:
+ resolution: {integrity: sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w==}
+ engines: {node: '>= 4'}
+
+ domutils@3.2.2:
+ resolution: {integrity: sha512-6kZKyUajlDuqlHKVX1w7gyslj9MPIXzIFiz/rGu35uC1wMi+kMhQwGhl4lt9unC9Vb9INnY9Z3/ZA3+FhASLaw==}
+
+ electron-to-chromium@1.5.327:
+ resolution: {integrity: sha512-hLxLdIJDf8zIzKoH2TPCs+Botc+wUmj9sp4jVMwklY/sKleM8xxxOExRX3Gxj73nCXmJe3anhG7SvsDDPDvmuQ==}
+
+ entities@4.5.0:
+ resolution: {integrity: sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==}
+ engines: {node: '>=0.12'}
+
+ esbuild@0.25.12:
+ resolution: {integrity: sha512-bbPBYYrtZbkt6Os6FiTLCTFxvq4tt3JKall1vRwshA3fdVztsLAatFaZobhkBC8/BrPetoa0oksYoKXoG4ryJg==}
+ engines: {node: '>=18'}
+ hasBin: true
+
+ escalade@3.2.0:
+ resolution: {integrity: sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==}
+ engines: {node: '>=6'}
+
+ estree-walker@2.0.2:
+ resolution: {integrity: sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==}
+
+ fdir@6.5.0:
+ resolution: {integrity: sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==}
+ engines: {node: '>=12.0.0'}
+ peerDependencies:
+ picomatch: ^3 || ^4
+ peerDependenciesMeta:
+ picomatch:
+ optional: true
+
+ fsevents@2.3.3:
+ resolution: {integrity: sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==}
+ engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0}
+ os: [darwin]
+
+ gensync@1.0.0-beta.2:
+ resolution: {integrity: sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==}
+ engines: {node: '>=6.9.0'}
+
+ he@1.2.0:
+ resolution: {integrity: sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==}
+ hasBin: true
+
+ js-tokens@4.0.0:
+ resolution: {integrity: sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==}
+
+ jsesc@3.1.0:
+ resolution: {integrity: sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==}
+ engines: {node: '>=6'}
+ hasBin: true
+
+ json5@2.2.3:
+ resolution: {integrity: sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==}
+ engines: {node: '>=6'}
+ hasBin: true
+
+ kolorist@1.8.0:
+ resolution: {integrity: sha512-Y+60/zizpJ3HRH8DCss+q95yr6145JXZo46OTpFvDZWLfRCE4qChOyk1b26nMaNpfHHgxagk9dXT5OP0Tfe+dQ==}
+
+ lru-cache@5.1.1:
+ resolution: {integrity: sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==}
+
+ magic-string@0.30.21:
+ resolution: {integrity: sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ==}
+
+ ms@2.1.3:
+ resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==}
+
+ nanoid@3.3.11:
+ resolution: {integrity: sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==}
+ engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1}
+ hasBin: true
+
+ node-html-parser@6.1.13:
+ resolution: {integrity: sha512-qIsTMOY4C/dAa5Q5vsobRpOOvPfC4pB61UVW2uSwZNUp0QU/jCekTal1vMmbO0DgdHeLUJpv/ARmDqErVxA3Sg==}
+
+ node-releases@2.0.36:
+ resolution: {integrity: sha512-TdC8FSgHz8Mwtw9g5L4gR/Sh9XhSP/0DEkQxfEFXOpiul5IiHgHan2VhYYb6agDSfp4KuvltmGApc8HMgUrIkA==}
+
+ nth-check@2.1.1:
+ resolution: {integrity: sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w==}
+
+ picocolors@1.1.1:
+ resolution: {integrity: sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==}
+
+ picomatch@2.3.2:
+ resolution: {integrity: sha512-V7+vQEJ06Z+c5tSye8S+nHUfI51xoXIXjHQ99cQtKUkQqqO1kO/KCJUfZXuB47h/YBlDhah2H3hdUGXn8ie0oA==}
+ engines: {node: '>=8.6'}
+
+ picomatch@4.0.4:
+ resolution: {integrity: sha512-QP88BAKvMam/3NxH6vj2o21R6MjxZUAd6nlwAS/pnGvN9IVLocLHxGYIzFhg6fUQ+5th6P4dv4eW9jX3DSIj7A==}
+ engines: {node: '>=12'}
+
+ postcss@8.5.8:
+ resolution: {integrity: sha512-OW/rX8O/jXnm82Ey1k44pObPtdblfiuWnrd8X7GJ7emImCOstunGbXUpp7HdBrFQX6rJzn3sPT397Wp5aCwCHg==}
+ engines: {node: ^10 || ^12 || >=14}
+
+ preact@10.29.0:
+ resolution: {integrity: sha512-wSAGyk2bYR1c7t3SZ3jHcM6xy0lcBcDel6lODcs9ME6Th++Dx2KU+6D3HD8wMMKGA8Wpw7OMd3/4RGzYRpzwRg==}
+
+ rollup@4.60.0:
+ resolution: {integrity: sha512-yqjxruMGBQJ2gG4HtjZtAfXArHomazDHoFwFFmZZl0r7Pdo7qCIXKqKHZc8yeoMgzJJ+pO6pEEHa+V7uzWlrAQ==}
+ engines: {node: '>=18.0.0', npm: '>=8.0.0'}
+ hasBin: true
+
+ semver@6.3.1:
+ resolution: {integrity: sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==}
+ hasBin: true
+
+ simple-code-frame@1.3.0:
+ resolution: {integrity: sha512-MB4pQmETUBlNs62BBeRjIFGeuy/x6gGKh7+eRUemn1rCFhqo7K+4slPqsyizCbcbYLnaYqaoZ2FWsZ/jN06D8w==}
+
+ source-map-js@1.2.1:
+ resolution: {integrity: sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==}
+ engines: {node: '>=0.10.0'}
+
+ source-map@0.7.6:
+ resolution: {integrity: sha512-i5uvt8C3ikiWeNZSVZNWcfZPItFQOsYTUAOkcUPGd8DqDy1uOUikjt5dG+uRlwyvR108Fb9DOd4GvXfT0N2/uQ==}
+ engines: {node: '>= 12'}
+
+ stack-trace@1.0.0-pre2:
+ resolution: {integrity: sha512-2ztBJRek8IVofG9DBJqdy2N5kulaacX30Nz7xmkYF6ale9WBVmIy6mFBchvGX7Vx/MyjBhx+Rcxqrj+dbOnQ6A==}
+ engines: {node: '>=16'}
+
+ tinyglobby@0.2.15:
+ resolution: {integrity: sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==}
+ engines: {node: '>=12.0.0'}
+
+ typescript@5.9.3:
+ resolution: {integrity: sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==}
+ engines: {node: '>=14.17'}
+ hasBin: true
+
+ update-browserslist-db@1.2.3:
+ resolution: {integrity: sha512-Js0m9cx+qOgDxo0eMiFGEueWztz+d4+M3rGlmKPT+T4IS/jP4ylw3Nwpu6cpTTP8R1MAC1kF4VbdLt3ARf209w==}
+ hasBin: true
+ peerDependencies:
+ browserslist: '>= 4.21.0'
+
+ vite-prerender-plugin@0.5.13:
+ resolution: {integrity: sha512-IKSpYkzDBsKAxa05naRbj7GvNVMSdww/Z/E89oO3xndz+gWnOBOKOAbEXv7qDhktY/j3vHgJmoV1pPzqU2tx9g==}
+ peerDependencies:
+ vite: 5.x || 6.x || 7.x || 8.x
+
+ vite@6.4.1:
+ resolution: {integrity: sha512-+Oxm7q9hDoLMyJOYfUYBuHQo+dkAloi33apOPP56pzj+vsdJDzr+j1NISE5pyaAuKL4A3UD34qd0lx5+kfKp2g==}
+ engines: {node: ^18.0.0 || ^20.0.0 || >=22.0.0}
+ hasBin: true
+ peerDependencies:
+ '@types/node': ^18.0.0 || ^20.0.0 || >=22.0.0
+ jiti: '>=1.21.0'
+ less: '*'
+ lightningcss: ^1.21.0
+ sass: '*'
+ sass-embedded: '*'
+ stylus: '*'
+ sugarss: '*'
+ terser: ^5.16.0
+ tsx: ^4.8.1
+ yaml: ^2.4.2
+ peerDependenciesMeta:
+ '@types/node':
+ optional: true
+ jiti:
+ optional: true
+ less:
+ optional: true
+ lightningcss:
+ optional: true
+ sass:
+ optional: true
+ sass-embedded:
+ optional: true
+ stylus:
+ optional: true
+ sugarss:
+ optional: true
+ terser:
+ optional: true
+ tsx:
+ optional: true
+ yaml:
+ optional: true
+
+ yallist@3.1.1:
+ resolution: {integrity: sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==}
+
+ zimmerframe@1.1.4:
+ resolution: {integrity: sha512-B58NGBEoc8Y9MWWCQGl/gq9xBCe4IiKM0a2x7GZdQKOW5Exr8S1W24J6OgM1njK8xCRGvAJIL/MxXHf6SkmQKQ==}
+
+snapshots:
+
+ '@babel/code-frame@7.29.0':
+ dependencies:
+ '@babel/helper-validator-identifier': 7.28.5
+ js-tokens: 4.0.0
+ picocolors: 1.1.1
+
+ '@babel/compat-data@7.29.0': {}
+
+ '@babel/core@7.29.0':
+ dependencies:
+ '@babel/code-frame': 7.29.0
+ '@babel/generator': 7.29.1
+ '@babel/helper-compilation-targets': 7.28.6
+ '@babel/helper-module-transforms': 7.28.6(@babel/core@7.29.0)
+ '@babel/helpers': 7.29.2
+ '@babel/parser': 7.29.2
+ '@babel/template': 7.28.6
+ '@babel/traverse': 7.29.0
+ '@babel/types': 7.29.0
+ '@jridgewell/remapping': 2.3.5
+ convert-source-map: 2.0.0
+ debug: 4.4.3
+ gensync: 1.0.0-beta.2
+ json5: 2.2.3
+ semver: 6.3.1
+ transitivePeerDependencies:
+ - supports-color
+
+ '@babel/generator@7.29.1':
+ dependencies:
+ '@babel/parser': 7.29.2
+ '@babel/types': 7.29.0
+ '@jridgewell/gen-mapping': 0.3.13
+ '@jridgewell/trace-mapping': 0.3.31
+ jsesc: 3.1.0
+
+ '@babel/helper-annotate-as-pure@7.27.3':
+ dependencies:
+ '@babel/types': 7.29.0
+
+ '@babel/helper-compilation-targets@7.28.6':
+ dependencies:
+ '@babel/compat-data': 7.29.0
+ '@babel/helper-validator-option': 7.27.1
+ browserslist: 4.28.1
+ lru-cache: 5.1.1
+ semver: 6.3.1
+
+ '@babel/helper-globals@7.28.0': {}
+
+ '@babel/helper-module-imports@7.28.6':
+ dependencies:
+ '@babel/traverse': 7.29.0
+ '@babel/types': 7.29.0
+ transitivePeerDependencies:
+ - supports-color
+
+ '@babel/helper-module-transforms@7.28.6(@babel/core@7.29.0)':
+ dependencies:
+ '@babel/core': 7.29.0
+ '@babel/helper-module-imports': 7.28.6
+ '@babel/helper-validator-identifier': 7.28.5
+ '@babel/traverse': 7.29.0
+ transitivePeerDependencies:
+ - supports-color
+
+ '@babel/helper-plugin-utils@7.28.6': {}
+
+ '@babel/helper-string-parser@7.27.1': {}
+
+ '@babel/helper-validator-identifier@7.28.5': {}
+
+ '@babel/helper-validator-option@7.27.1': {}
+
+ '@babel/helpers@7.29.2':
+ dependencies:
+ '@babel/template': 7.28.6
+ '@babel/types': 7.29.0
+
+ '@babel/parser@7.29.2':
+ dependencies:
+ '@babel/types': 7.29.0
+
+ '@babel/plugin-syntax-jsx@7.28.6(@babel/core@7.29.0)':
+ dependencies:
+ '@babel/core': 7.29.0
+ '@babel/helper-plugin-utils': 7.28.6
+
+ '@babel/plugin-transform-react-jsx-development@7.27.1(@babel/core@7.29.0)':
+ dependencies:
+ '@babel/core': 7.29.0
+ '@babel/plugin-transform-react-jsx': 7.28.6(@babel/core@7.29.0)
+ transitivePeerDependencies:
+ - supports-color
+
+ '@babel/plugin-transform-react-jsx@7.28.6(@babel/core@7.29.0)':
+ dependencies:
+ '@babel/core': 7.29.0
+ '@babel/helper-annotate-as-pure': 7.27.3
+ '@babel/helper-module-imports': 7.28.6
+ '@babel/helper-plugin-utils': 7.28.6
+ '@babel/plugin-syntax-jsx': 7.28.6(@babel/core@7.29.0)
+ '@babel/types': 7.29.0
+ transitivePeerDependencies:
+ - supports-color
+
+ '@babel/template@7.28.6':
+ dependencies:
+ '@babel/code-frame': 7.29.0
+ '@babel/parser': 7.29.2
+ '@babel/types': 7.29.0
+
+ '@babel/traverse@7.29.0':
+ dependencies:
+ '@babel/code-frame': 7.29.0
+ '@babel/generator': 7.29.1
+ '@babel/helper-globals': 7.28.0
+ '@babel/parser': 7.29.2
+ '@babel/template': 7.28.6
+ '@babel/types': 7.29.0
+ debug: 4.4.3
+ transitivePeerDependencies:
+ - supports-color
+
+ '@babel/types@7.29.0':
+ dependencies:
+ '@babel/helper-string-parser': 7.27.1
+ '@babel/helper-validator-identifier': 7.28.5
+
+ '@esbuild/aix-ppc64@0.25.12':
+ optional: true
+
+ '@esbuild/android-arm64@0.25.12':
+ optional: true
+
+ '@esbuild/android-arm@0.25.12':
+ optional: true
+
+ '@esbuild/android-x64@0.25.12':
+ optional: true
+
+ '@esbuild/darwin-arm64@0.25.12':
+ optional: true
+
+ '@esbuild/darwin-x64@0.25.12':
+ optional: true
+
+ '@esbuild/freebsd-arm64@0.25.12':
+ optional: true
+
+ '@esbuild/freebsd-x64@0.25.12':
+ optional: true
+
+ '@esbuild/linux-arm64@0.25.12':
+ optional: true
+
+ '@esbuild/linux-arm@0.25.12':
+ optional: true
+
+ '@esbuild/linux-ia32@0.25.12':
+ optional: true
+
+ '@esbuild/linux-loong64@0.25.12':
+ optional: true
+
+ '@esbuild/linux-mips64el@0.25.12':
+ optional: true
+
+ '@esbuild/linux-ppc64@0.25.12':
+ optional: true
+
+ '@esbuild/linux-riscv64@0.25.12':
+ optional: true
+
+ '@esbuild/linux-s390x@0.25.12':
+ optional: true
+
+ '@esbuild/linux-x64@0.25.12':
+ optional: true
+
+ '@esbuild/netbsd-arm64@0.25.12':
+ optional: true
+
+ '@esbuild/netbsd-x64@0.25.12':
+ optional: true
+
+ '@esbuild/openbsd-arm64@0.25.12':
+ optional: true
+
+ '@esbuild/openbsd-x64@0.25.12':
+ optional: true
+
+ '@esbuild/openharmony-arm64@0.25.12':
+ optional: true
+
+ '@esbuild/sunos-x64@0.25.12':
+ optional: true
+
+ '@esbuild/win32-arm64@0.25.12':
+ optional: true
+
+ '@esbuild/win32-ia32@0.25.12':
+ optional: true
+
+ '@esbuild/win32-x64@0.25.12':
+ optional: true
+
+ '@jridgewell/gen-mapping@0.3.13':
+ dependencies:
+ '@jridgewell/sourcemap-codec': 1.5.5
+ '@jridgewell/trace-mapping': 0.3.31
+
+ '@jridgewell/remapping@2.3.5':
+ dependencies:
+ '@jridgewell/gen-mapping': 0.3.13
+ '@jridgewell/trace-mapping': 0.3.31
+
+ '@jridgewell/resolve-uri@3.1.2': {}
+
+ '@jridgewell/sourcemap-codec@1.5.5': {}
+
+ '@jridgewell/trace-mapping@0.3.31':
+ dependencies:
+ '@jridgewell/resolve-uri': 3.1.2
+ '@jridgewell/sourcemap-codec': 1.5.5
+
+ '@preact/preset-vite@2.10.5(@babel/core@7.29.0)(preact@10.29.0)(rollup@4.60.0)(vite@6.4.1)':
+ dependencies:
+ '@babel/core': 7.29.0
+ '@babel/plugin-transform-react-jsx': 7.28.6(@babel/core@7.29.0)
+ '@babel/plugin-transform-react-jsx-development': 7.27.1(@babel/core@7.29.0)
+ '@prefresh/vite': 2.4.12(preact@10.29.0)(vite@6.4.1)
+ '@rollup/pluginutils': 5.3.0(rollup@4.60.0)
+ babel-plugin-transform-hook-names: 1.0.2(@babel/core@7.29.0)
+ debug: 4.4.3
+ magic-string: 0.30.21
+ picocolors: 1.1.1
+ vite: 6.4.1
+ vite-prerender-plugin: 0.5.13(vite@6.4.1)
+ zimmerframe: 1.1.4
+ transitivePeerDependencies:
+ - preact
+ - rollup
+ - supports-color
+
+ '@prefresh/babel-plugin@0.5.3': {}
+
+ '@prefresh/core@1.5.9(preact@10.29.0)':
+ dependencies:
+ preact: 10.29.0
+
+ '@prefresh/utils@1.2.1': {}
+
+ '@prefresh/vite@2.4.12(preact@10.29.0)(vite@6.4.1)':
+ dependencies:
+ '@babel/core': 7.29.0
+ '@prefresh/babel-plugin': 0.5.3
+ '@prefresh/core': 1.5.9(preact@10.29.0)
+ '@prefresh/utils': 1.2.1
+ '@rollup/pluginutils': 4.2.1
+ preact: 10.29.0
+ vite: 6.4.1
+ transitivePeerDependencies:
+ - supports-color
+
+ '@rollup/pluginutils@4.2.1':
+ dependencies:
+ estree-walker: 2.0.2
+ picomatch: 2.3.2
+
+ '@rollup/pluginutils@5.3.0(rollup@4.60.0)':
+ dependencies:
+ '@types/estree': 1.0.8
+ estree-walker: 2.0.2
+ picomatch: 4.0.4
+ optionalDependencies:
+ rollup: 4.60.0
+
+ '@rollup/rollup-android-arm-eabi@4.60.0':
+ optional: true
+
+ '@rollup/rollup-android-arm64@4.60.0':
+ optional: true
+
+ '@rollup/rollup-darwin-arm64@4.60.0':
+ optional: true
+
+ '@rollup/rollup-darwin-x64@4.60.0':
+ optional: true
+
+ '@rollup/rollup-freebsd-arm64@4.60.0':
+ optional: true
+
+ '@rollup/rollup-freebsd-x64@4.60.0':
+ optional: true
+
+ '@rollup/rollup-linux-arm-gnueabihf@4.60.0':
+ optional: true
+
+ '@rollup/rollup-linux-arm-musleabihf@4.60.0':
+ optional: true
+
+ '@rollup/rollup-linux-arm64-gnu@4.60.0':
+ optional: true
+
+ '@rollup/rollup-linux-arm64-musl@4.60.0':
+ optional: true
+
+ '@rollup/rollup-linux-loong64-gnu@4.60.0':
+ optional: true
+
+ '@rollup/rollup-linux-loong64-musl@4.60.0':
+ optional: true
+
+ '@rollup/rollup-linux-ppc64-gnu@4.60.0':
+ optional: true
+
+ '@rollup/rollup-linux-ppc64-musl@4.60.0':
+ optional: true
+
+ '@rollup/rollup-linux-riscv64-gnu@4.60.0':
+ optional: true
+
+ '@rollup/rollup-linux-riscv64-musl@4.60.0':
+ optional: true
+
+ '@rollup/rollup-linux-s390x-gnu@4.60.0':
+ optional: true
+
+ '@rollup/rollup-linux-x64-gnu@4.60.0':
+ optional: true
+
+ '@rollup/rollup-linux-x64-musl@4.60.0':
+ optional: true
+
+ '@rollup/rollup-openbsd-x64@4.60.0':
+ optional: true
+
+ '@rollup/rollup-openharmony-arm64@4.60.0':
+ optional: true
+
+ '@rollup/rollup-win32-arm64-msvc@4.60.0':
+ optional: true
+
+ '@rollup/rollup-win32-ia32-msvc@4.60.0':
+ optional: true
+
+ '@rollup/rollup-win32-x64-gnu@4.60.0':
+ optional: true
+
+ '@rollup/rollup-win32-x64-msvc@4.60.0':
+ optional: true
+
+ '@types/estree@1.0.8': {}
+
+ babel-plugin-transform-hook-names@1.0.2(@babel/core@7.29.0):
+ dependencies:
+ '@babel/core': 7.29.0
+
+ baseline-browser-mapping@2.10.11: {}
+
+ boolbase@1.0.0: {}
+
+ browserslist@4.28.1:
+ dependencies:
+ baseline-browser-mapping: 2.10.11
+ caniuse-lite: 1.0.30001781
+ electron-to-chromium: 1.5.327
+ node-releases: 2.0.36
+ update-browserslist-db: 1.2.3(browserslist@4.28.1)
+
+ caniuse-lite@1.0.30001781: {}
+
+ convert-source-map@2.0.0: {}
+
+ css-select@5.2.2:
+ dependencies:
+ boolbase: 1.0.0
+ css-what: 6.2.2
+ domhandler: 5.0.3
+ domutils: 3.2.2
+ nth-check: 2.1.1
+
+ css-what@6.2.2: {}
+
+ debug@4.4.3:
+ dependencies:
+ ms: 2.1.3
+
+ dom-serializer@2.0.0:
+ dependencies:
+ domelementtype: 2.3.0
+ domhandler: 5.0.3
+ entities: 4.5.0
+
+ domelementtype@2.3.0: {}
+
+ domhandler@5.0.3:
+ dependencies:
+ domelementtype: 2.3.0
+
+ domutils@3.2.2:
+ dependencies:
+ dom-serializer: 2.0.0
+ domelementtype: 2.3.0
+ domhandler: 5.0.3
+
+ electron-to-chromium@1.5.327: {}
+
+ entities@4.5.0: {}
+
+ esbuild@0.25.12:
+ optionalDependencies:
+ '@esbuild/aix-ppc64': 0.25.12
+ '@esbuild/android-arm': 0.25.12
+ '@esbuild/android-arm64': 0.25.12
+ '@esbuild/android-x64': 0.25.12
+ '@esbuild/darwin-arm64': 0.25.12
+ '@esbuild/darwin-x64': 0.25.12
+ '@esbuild/freebsd-arm64': 0.25.12
+ '@esbuild/freebsd-x64': 0.25.12
+ '@esbuild/linux-arm': 0.25.12
+ '@esbuild/linux-arm64': 0.25.12
+ '@esbuild/linux-ia32': 0.25.12
+ '@esbuild/linux-loong64': 0.25.12
+ '@esbuild/linux-mips64el': 0.25.12
+ '@esbuild/linux-ppc64': 0.25.12
+ '@esbuild/linux-riscv64': 0.25.12
+ '@esbuild/linux-s390x': 0.25.12
+ '@esbuild/linux-x64': 0.25.12
+ '@esbuild/netbsd-arm64': 0.25.12
+ '@esbuild/netbsd-x64': 0.25.12
+ '@esbuild/openbsd-arm64': 0.25.12
+ '@esbuild/openbsd-x64': 0.25.12
+ '@esbuild/openharmony-arm64': 0.25.12
+ '@esbuild/sunos-x64': 0.25.12
+ '@esbuild/win32-arm64': 0.25.12
+ '@esbuild/win32-ia32': 0.25.12
+ '@esbuild/win32-x64': 0.25.12
+
+ escalade@3.2.0: {}
+
+ estree-walker@2.0.2: {}
+
+ fdir@6.5.0(picomatch@4.0.4):
+ optionalDependencies:
+ picomatch: 4.0.4
+
+ fsevents@2.3.3:
+ optional: true
+
+ gensync@1.0.0-beta.2: {}
+
+ he@1.2.0: {}
+
+ js-tokens@4.0.0: {}
+
+ jsesc@3.1.0: {}
+
+ json5@2.2.3: {}
+
+ kolorist@1.8.0: {}
+
+ lru-cache@5.1.1:
+ dependencies:
+ yallist: 3.1.1
+
+ magic-string@0.30.21:
+ dependencies:
+ '@jridgewell/sourcemap-codec': 1.5.5
+
+ ms@2.1.3: {}
+
+ nanoid@3.3.11: {}
+
+ node-html-parser@6.1.13:
+ dependencies:
+ css-select: 5.2.2
+ he: 1.2.0
+
+ node-releases@2.0.36: {}
+
+ nth-check@2.1.1:
+ dependencies:
+ boolbase: 1.0.0
+
+ picocolors@1.1.1: {}
+
+ picomatch@2.3.2: {}
+
+ picomatch@4.0.4: {}
+
+ postcss@8.5.8:
+ dependencies:
+ nanoid: 3.3.11
+ picocolors: 1.1.1
+ source-map-js: 1.2.1
+
+ preact@10.29.0: {}
+
+ rollup@4.60.0:
+ dependencies:
+ '@types/estree': 1.0.8
+ optionalDependencies:
+ '@rollup/rollup-android-arm-eabi': 4.60.0
+ '@rollup/rollup-android-arm64': 4.60.0
+ '@rollup/rollup-darwin-arm64': 4.60.0
+ '@rollup/rollup-darwin-x64': 4.60.0
+ '@rollup/rollup-freebsd-arm64': 4.60.0
+ '@rollup/rollup-freebsd-x64': 4.60.0
+ '@rollup/rollup-linux-arm-gnueabihf': 4.60.0
+ '@rollup/rollup-linux-arm-musleabihf': 4.60.0
+ '@rollup/rollup-linux-arm64-gnu': 4.60.0
+ '@rollup/rollup-linux-arm64-musl': 4.60.0
+ '@rollup/rollup-linux-loong64-gnu': 4.60.0
+ '@rollup/rollup-linux-loong64-musl': 4.60.0
+ '@rollup/rollup-linux-ppc64-gnu': 4.60.0
+ '@rollup/rollup-linux-ppc64-musl': 4.60.0
+ '@rollup/rollup-linux-riscv64-gnu': 4.60.0
+ '@rollup/rollup-linux-riscv64-musl': 4.60.0
+ '@rollup/rollup-linux-s390x-gnu': 4.60.0
+ '@rollup/rollup-linux-x64-gnu': 4.60.0
+ '@rollup/rollup-linux-x64-musl': 4.60.0
+ '@rollup/rollup-openbsd-x64': 4.60.0
+ '@rollup/rollup-openharmony-arm64': 4.60.0
+ '@rollup/rollup-win32-arm64-msvc': 4.60.0
+ '@rollup/rollup-win32-ia32-msvc': 4.60.0
+ '@rollup/rollup-win32-x64-gnu': 4.60.0
+ '@rollup/rollup-win32-x64-msvc': 4.60.0
+ fsevents: 2.3.3
+
+ semver@6.3.1: {}
+
+ simple-code-frame@1.3.0:
+ dependencies:
+ kolorist: 1.8.0
+
+ source-map-js@1.2.1: {}
+
+ source-map@0.7.6: {}
+
+ stack-trace@1.0.0-pre2: {}
+
+ tinyglobby@0.2.15:
+ dependencies:
+ fdir: 6.5.0(picomatch@4.0.4)
+ picomatch: 4.0.4
+
+ typescript@5.9.3: {}
+
+ update-browserslist-db@1.2.3(browserslist@4.28.1):
+ dependencies:
+ browserslist: 4.28.1
+ escalade: 3.2.0
+ picocolors: 1.1.1
+
+ vite-prerender-plugin@0.5.13(vite@6.4.1):
+ dependencies:
+ kolorist: 1.8.0
+ magic-string: 0.30.21
+ node-html-parser: 6.1.13
+ simple-code-frame: 1.3.0
+ source-map: 0.7.6
+ stack-trace: 1.0.0-pre2
+ vite: 6.4.1
+
+ vite@6.4.1:
+ dependencies:
+ esbuild: 0.25.12
+ fdir: 6.5.0(picomatch@4.0.4)
+ picomatch: 4.0.4
+ postcss: 8.5.8
+ rollup: 4.60.0
+ tinyglobby: 0.2.15
+ optionalDependencies:
+ fsevents: 2.3.3
+
+ yallist@3.1.1: {}
+
+ zimmerframe@1.1.4: {}
diff --git a/frontend/pnpm-workspace.yaml b/frontend/pnpm-workspace.yaml
new file mode 100644
index 0000000..5ed0b5a
--- /dev/null
+++ b/frontend/pnpm-workspace.yaml
@@ -0,0 +1,2 @@
+allowBuilds:
+ esbuild: true
diff --git a/frontend/src/.DS_Store b/frontend/src/.DS_Store
new file mode 100644
index 0000000000000000000000000000000000000000..17fde03d91bf3d71497c40b4dc14b303466f1c4b
GIT binary patch
literal 6148
zcmeHLK~BRk5M0xMs&c7%;^-&PD;Gi)UeFJewg?anjz~d5;*jU?2Oa=#K)i$#v$hq*
zIF>jeRP9RkxVvlbWUAP8iAc>J)>EP}5e?B8vk|%;<2bj-_Ppl^sO%bs?&uP2OLdej
z4pBf9_-_jEyE~#487iql-@d<_tLc1Y%E}QweNW|;rYM#hvqYRXiR$z9?JS+U{&jA3
z-K}qF3}{0)v<#VDyr7zoB)GKE-s(QuQ!j%i2HM<)6^9?F*v1kyJMrGCZ9J^Y`!IB@
zv5Vf*Bc67JQ61JFKlkM%ctmTJrk78Y7xFr0UI*Z{r6+s~5vP-{pYa5rV2uQqASdLp
zU>^IHMHRD{Ou}e_LuY=1K_?@uuetuN7|v|A;h{t6MFCMj6lfLT{XwHK<{le|cI!ZA
zuK>UdZfh9ZGYqKN0n9x%4&i|*Hx=lnDu2aLZaVzh=j9$7hi*D4TN&&4%F5qRl&ucG
zw&|pDhti7zqClv?zC6!ZTI))3P?0OlSWhw#AUhk%tqI#J+H75E0utH2Tf
literal 0
HcmV?d00001
diff --git a/frontend/src/app.tsx b/frontend/src/app.tsx
new file mode 100644
index 0000000..efff52f
--- /dev/null
+++ b/frontend/src/app.tsx
@@ -0,0 +1,284 @@
+import { useEffect, useState } from "preact/hooks";
+import { ConfigPanel } from "./components/config-panel";
+import { LoginGate } from "./components/login-gate";
+import { MonitorPanel } from "./components/monitor-panel";
+import { initialConfigSections, initialMonitorState } from "./mock/data";
+import {
+ clearAuthToken,
+ fetchConfig,
+ fetchMonitorState,
+ getStoredAuth,
+ isAuthError,
+ saveConfig,
+ startRuntime,
+ startRuntimeLoop,
+ stopRuntime,
+ storeAuthToken,
+ verifyAuthToken,
+} from "./services/api";
+import type { ConfigSection, MonitorState } from "./types/runtime";
+
+export function App() {
+ const [sections, setSections] = useState(initialConfigSections);
+ const [monitor, setMonitor] = useState(initialMonitorState);
+ const [busy, setBusy] = useState(false);
+ const [notice, setNotice] = useState("");
+ const [authenticated, setAuthenticated] = useState(Boolean(getStoredAuth().token));
+ const [authError, setAuthError] = useState("");
+ const [hasStoredToken, setHasStoredToken] = useState(Boolean(getStoredAuth().token));
+
+ const refreshMonitor = async () => {
+ const nextMonitor = await fetchMonitorState();
+ setMonitor(nextMonitor);
+ };
+
+ useEffect(() => {
+ let active = true;
+
+ fetchConfig()
+ .then((nextSections) => {
+ if (active) {
+ setSections(nextSections);
+ setAuthenticated(true);
+ }
+ })
+ .catch((error) => {
+ if (active) {
+ if (isAuthError(error)) {
+ clearAuthToken();
+ setHasStoredToken(false);
+ setAuthError("登录已失效,请重新输入管理令牌");
+ }
+ setAuthenticated(false);
+ }
+ });
+
+ refreshMonitor()
+ .then(() => {
+ if (active) {
+ setNotice("");
+ }
+ })
+ .catch((error) => {
+ if (active && isAuthError(error)) {
+ clearAuthToken();
+ setHasStoredToken(false);
+ setAuthenticated(false);
+ setAuthError("登录已失效,请重新输入管理令牌");
+ }
+ });
+
+ return () => {
+ active = false;
+ };
+ }, []);
+
+ useEffect(() => {
+ if (!authenticated) {
+ return;
+ }
+
+ let active = true;
+
+ const timer = window.setInterval(() => {
+ refreshMonitor()
+ .then(() => {
+ if (active) {
+ setNotice((current) => current);
+ }
+ })
+ .catch((error) => {
+ if (active && isAuthError(error)) {
+ clearAuthToken();
+ setHasStoredToken(false);
+ setAuthenticated(false);
+ setAuthError("登录已失效,请重新输入管理令牌");
+ }
+ });
+ }, 5000);
+
+ return () => {
+ active = false;
+ window.clearInterval(timer);
+ };
+ }, [authenticated]);
+
+ const handleLogin = async (token: string) => {
+ setBusy(true);
+ setAuthError("");
+ try {
+ await verifyAuthToken(token);
+ storeAuthToken(token);
+ setAuthenticated(true);
+ setHasStoredToken(true);
+ setNotice("登录成功");
+ const [nextSections, nextMonitor] = await Promise.all([fetchConfig(), fetchMonitorState()]);
+ setSections(nextSections);
+ setMonitor(nextMonitor);
+ } catch (error) {
+ console.error("登录失败", error);
+ clearAuthToken();
+ setAuthenticated(false);
+ setHasStoredToken(false);
+ setAuthError("管理令牌无效或服务暂不可用");
+ } finally {
+ setBusy(false);
+ }
+ };
+
+ const handleLogout = () => {
+ clearAuthToken();
+ setAuthenticated(false);
+ setHasStoredToken(false);
+ setNotice("已退出登录");
+ setAuthError("");
+ };
+
+ const updateFieldValue = (sectionKey: string, fieldKey: string, nextValue: string | number | boolean) => {
+ setSections((current) =>
+ current.map((section) => {
+ if (section.key !== sectionKey) {
+ return section;
+ }
+ return {
+ ...section,
+ fields: section.fields.map((field) =>
+ field.key === fieldKey ? { ...field, value: nextValue } : field,
+ ),
+ };
+ }),
+ );
+ };
+
+ const handleClearLogs = () => {
+ setMonitor((current) => ({
+ ...current,
+ logs: [
+ {
+ id: "cleared",
+ prefix: "[系统] [00:00:00]",
+ timestamp: "[00:00:00]",
+ message: "日志已清空,等待任务输出...",
+ tone: "muted",
+ },
+ ],
+ }));
+ };
+
+ const handleSaveConfig = async () => {
+ setBusy(true);
+ try {
+ const savedSections = await saveConfig(sections);
+ setSections(savedSections);
+ setNotice("配置已保存");
+ } catch (error) {
+ console.error("保存配置失败", error);
+ if (isAuthError(error)) {
+ clearAuthToken();
+ setHasStoredToken(false);
+ setAuthenticated(false);
+ setAuthError("登录已失效,请重新输入管理令牌");
+ } else {
+ setNotice("保存配置失败");
+ }
+ } finally {
+ setBusy(false);
+ }
+ };
+
+ const handleStartRuntime = async () => {
+ setBusy(true);
+ try {
+ const savedSections = await saveConfig(sections);
+ setSections(savedSections);
+ const result = await startRuntime();
+ setNotice(`配置已保存,${result.message}`);
+ await refreshMonitor();
+ } catch (error) {
+ console.error("保存配置或启动维护任务失败", error);
+ if (isAuthError(error)) {
+ clearAuthToken();
+ setHasStoredToken(false);
+ setAuthenticated(false);
+ setAuthError("登录已失效,请重新输入管理令牌");
+ } else {
+ setNotice("保存配置或启动维护任务失败");
+ }
+ } finally {
+ setBusy(false);
+ }
+ };
+
+ const handleStopRuntime = async () => {
+ setBusy(true);
+ try {
+ const result = await stopRuntime();
+ setNotice(result.message);
+ await refreshMonitor();
+ } catch (error) {
+ console.error("停止维护任务失败", error);
+ if (isAuthError(error)) {
+ clearAuthToken();
+ setHasStoredToken(false);
+ setAuthenticated(false);
+ setAuthError("登录已失效,请重新输入管理令牌");
+ } else {
+ setNotice("停止维护任务失败");
+ }
+ } finally {
+ setBusy(false);
+ }
+ };
+
+ const handleStartRuntimeLoop = async () => {
+ setBusy(true);
+ try {
+ const savedSections = await saveConfig(sections);
+ setSections(savedSections);
+ const result = await startRuntimeLoop();
+ setNotice(`配置已保存,${result.message}`);
+ await refreshMonitor();
+ } catch (error) {
+ console.error("保存配置或启动循环补号任务失败", error);
+ if (isAuthError(error)) {
+ clearAuthToken();
+ setHasStoredToken(false);
+ setAuthenticated(false);
+ setAuthError("登录已失效,请重新输入管理令牌");
+ } else {
+ setNotice("保存配置或启动循环补号任务失败");
+ }
+ } finally {
+ setBusy(false);
+ }
+ };
+
+ return (
+
+ {!authenticated ?
: null}
+ {authenticated ? (
+ <>
+ {notice ?
{notice}
: null}
+
+ >
+ ) : null}
+
+ );
+}
diff --git a/frontend/src/components/config-panel.tsx b/frontend/src/components/config-panel.tsx
new file mode 100644
index 0000000..8b1bea7
--- /dev/null
+++ b/frontend/src/components/config-panel.tsx
@@ -0,0 +1,322 @@
+import { useEffect, useState } from "preact/hooks";
+import type { ConfigField, ConfigSection } from "../types/runtime";
+
+type ConfigPanelProps = {
+ sections: ConfigSection[];
+ onValueChange: (sectionKey: string, fieldKey: string, nextValue: string | number | boolean) => void;
+ onSave: () => void;
+ onStart: () => void;
+ onStartLoop: () => void;
+ onStop: () => void;
+ onLogout: () => void;
+ busy?: boolean;
+ running?: boolean;
+ loopRunning?: boolean;
+ hasStoredToken?: boolean;
+};
+
+type ConfigCategory = "common" | "mail" | "advanced";
+
+function FieldControl(props: {
+ sectionKey: string;
+ field: ConfigField;
+ onValueChange: ConfigPanelProps["onValueChange"];
+}) {
+ const { sectionKey, field, onValueChange } = props;
+
+ if (field.type === "select") {
+ return (
+
+ );
+ }
+
+ if (field.type === "checkbox") {
+ return (
+
+ );
+ }
+
+ if (field.type === "textarea") {
+ return (
+