feat: Implement Telegram bot with Claude authentication, mail service, and identity spoofing, refactoring core logic into new modules. #1

Open
mygo-kyx wants to merge 13 commits from mygo-kyx/autoClaude-TGbot:main into Feature-FromFork-Kunkun
17 changed files with 2007 additions and 180 deletions

1223
bot.py

File diff suppressed because it is too large Load Diff

View File

@@ -29,5 +29,48 @@ PRODUCT_ID: str = _cfg["stripe"]["product_id"]
TG_BOT_TOKEN: str = _cfg["telegram"]["bot_token"]
TG_ALLOWED_USERS: list[int] = _cfg["telegram"].get("allowed_users", [])
# --- 角色权限 ---
# 静态权限来自 config.toml运行时权限来自 permissions.json
# 每次调用 get_merged_permissions() 合并两者
_roles: list[dict] = _cfg["telegram"].get("roles", [])
# 静态权限映射(来自 config.toml
_STATIC_PERMISSIONS: dict[int, set[str]] = {}
# 检查 roles 是否实际配置了用户(至少一个 role 的 users 非空)
_roles_active = _roles and any(role.get("users") for role in _roles)
if _roles_active:
for role in _roles:
cmds = set(role.get("commands", []))
for uid in role.get("users", []):
_STATIC_PERMISSIONS.setdefault(uid, set()).update(cmds)
else:
# roles 未配置或 users 全为空 → 回退到 allowed_users 全量放行
for uid in TG_ALLOWED_USERS:
_STATIC_PERMISSIONS[uid] = {"*"}
# config.toml 中拥有 "*" 权限的用户 = 超级管理员
ADMIN_USERS: set[int] = {
uid for uid, cmds in _STATIC_PERMISSIONS.items() if "*" in cmds
}
def get_merged_permissions() -> dict[int, set[str]]:
"""合并 config.toml 静态权限 + permissions.json 运行时权限"""
from core import permissions as perm_mod
merged = dict(_STATIC_PERMISSIONS)
for uid, cmds in perm_mod.get_permissions_map().items():
merged.setdefault(uid, set()).update(cmds)
return merged
# 向后兼容:初始静态权限
TG_USER_PERMISSIONS = _STATIC_PERMISSIONS
# --- 邮箱系统 ---
MAIL_SYSTEMS: list[dict] = _cfg.get("mail", [])
# --- 代理池 ---
# 代理逻辑统一由 proxy_pool.py 管理,这里只做 re-export 保持兼容
from core.proxy_pool import get_proxy, get_proxy_count # noqa: E402, F401

View File

@@ -17,17 +17,34 @@ product_id = "prod_TXU4hGh2EDxASl"
bot_token = "your_bot_token_here" # @BotFather 获取
allowed_users = [] # 允许使用的用户ID列表空=不限制)
# --- 角色权限控制 ---
# 每个 [[telegram.roles]] 定义一个角色,包含用户列表和允许的命令
# commands = ["*"] 表示全部命令
# 如果不配置 roles所有 allowed_users 拥有全部权限
[[telegram.roles]]
name = "admin"
users = [] # 管理员用户 ID
commands = ["*"] # 全部命令
[[telegram.roles]]
name = "user"
users = [] # 普通用户 ID
commands = ["accounts", "verify", "stats", "status", "help", "start"]
# --- 邮箱系统轮询使用API 接口相同)---
# 可添加多个 [[mail]] 块
# api_token: 直接配置 API Token无需管理员账号密码
[[mail]]
base_url = "https://mail.example.com/"
admin_email = "admin@example.com"
admin_pass = "your_password"
api_token = "your_api_token_here"
domains = ["example.com"]
# [[mail]]
# base_url = "https://mail2.example.com/"
# admin_email = "admin@mail2.example.com"
# admin_pass = "pass2"
# api_token = "your_api_token_here"
# domains = ["domain2.com", "domain3.com"]
# --- 代理配置 ---
# 代理从 proxy.txt 文件加载,格式: host:port:user:pass每行一个

5
core/__init__.py Normal file
View File

@@ -0,0 +1,5 @@
"""
autoClaude 核心模块包
包含认证、检查、邮件、代理、账号存储等业务逻辑。
"""

268
core/account_store.py Normal file
View File

@@ -0,0 +1,268 @@
"""
线程安全的账号存储模块
统一管理 accounts.txt 的读写操作,避免并发冲突。
支持删除、统计等功能。
"""
import json
import threading
from pathlib import Path
_PROJECT_ROOT = Path(__file__).parent.parent
_ACCOUNTS_FILE = _PROJECT_ROOT / "accounts.txt"
_STATS_FILE = _PROJECT_ROOT / "stats.json"
_lock = threading.Lock()
# ====== 账号池(并发调度)======
# _busy 记录当前被占用的账号行(原始字符串)
_busy: set[str] = set()
def acquire(n: int = 0) -> list[str]:
"""从空闲账号中获取最多 n 个,标记为占用。
Args:
n: 需要的账号数。0 表示尽量获取所有空闲账号(至少 1 个)。
Returns:
被获取的账号行列表(可能少于 n为空表示没有空闲账号。
"""
with _lock:
try:
with open(_ACCOUNTS_FILE, "r", encoding="utf-8") as f:
all_lines = [line.strip() for line in f if line.strip()]
except FileNotFoundError:
return []
free = [line for line in all_lines if line not in _busy]
if not free:
return []
if n <= 0:
# 获取全部空闲
acquired = free
else:
acquired = free[:n]
_busy.update(acquired)
return acquired
def release(lines: list[str]) -> None:
"""释放指定账号,标记为空闲。"""
with _lock:
for line in lines:
_busy.discard(line)
def pool_status() -> dict:
"""返回账号池状态。"""
with _lock:
try:
with open(_ACCOUNTS_FILE, "r", encoding="utf-8") as f:
all_lines = [line.strip() for line in f if line.strip()]
except FileNotFoundError:
all_lines = []
total = len(all_lines)
busy = sum(1 for line in all_lines if line in _busy)
return {"total": total, "busy": busy, "free": total - busy}
# ====== 账号操作 ======
def append(email: str, session_key: str, org_uuid: str) -> None:
"""追加一个账号"""
with _lock:
with open(_ACCOUNTS_FILE, "a", encoding="utf-8") as f:
f.write(f"{email}|{session_key}|{org_uuid}\n")
def read_all() -> list[dict]:
"""读取所有账号"""
with _lock:
try:
with open(_ACCOUNTS_FILE, "r", encoding="utf-8") as f:
lines = [line.strip() for line in f if line.strip()]
except FileNotFoundError:
return []
result = []
for line in lines:
parts = line.split("|")
result.append({
"email": parts[0] if len(parts) > 0 else "",
"session_key": parts[1] if len(parts) > 1 else "",
"org_uuid": parts[2] if len(parts) > 2 else "",
})
return result
def read_lines() -> list[str]:
"""读取所有原始行"""
with _lock:
try:
with open(_ACCOUNTS_FILE, "r", encoding="utf-8") as f:
return [line.strip() for line in f if line.strip()]
except FileNotFoundError:
return []
def get_last() -> dict | None:
"""获取最后一个账号"""
accounts = read_all()
return accounts[-1] if accounts else None
def get_last_line() -> str | None:
"""获取最后一行原始数据"""
lines = read_lines()
return lines[-1] if lines else None
def count() -> int:
"""账号总数"""
return len(read_all())
def delete_by_index(index: int) -> dict | None:
"""删除指定序号的账号1-based返回被删除的账号信息"""
with _lock:
try:
with open(_ACCOUNTS_FILE, "r", encoding="utf-8") as f:
lines = [line.strip() for line in f if line.strip()]
except FileNotFoundError:
return None
if index < 1 or index > len(lines):
return None
removed_line = lines.pop(index - 1)
with open(_ACCOUNTS_FILE, "w", encoding="utf-8") as f:
for line in lines:
f.write(line + "\n")
parts = removed_line.split("|")
return {
"email": parts[0] if len(parts) > 0 else "",
"session_key": parts[1] if len(parts) > 1 else "",
"org_uuid": parts[2] if len(parts) > 2 else "",
}
def delete_by_email(email: str) -> dict | None:
"""按邮箱删除账号"""
with _lock:
try:
with open(_ACCOUNTS_FILE, "r", encoding="utf-8") as f:
lines = [line.strip() for line in f if line.strip()]
except FileNotFoundError:
return None
removed = None
remaining = []
for line in lines:
parts = line.split("|")
if parts[0] == email and removed is None:
removed = {
"email": parts[0] if len(parts) > 0 else "",
"session_key": parts[1] if len(parts) > 1 else "",
"org_uuid": parts[2] if len(parts) > 2 else "",
}
else:
remaining.append(line)
if removed:
with open(_ACCOUNTS_FILE, "w", encoding="utf-8") as f:
for line in remaining:
f.write(line + "\n")
return removed
def delete_by_emails(emails: list[str]) -> int:
"""批量按邮箱删除账号,返回实际删除数量"""
if not emails:
return 0
email_set = set(emails)
with _lock:
try:
with open(_ACCOUNTS_FILE, "r", encoding="utf-8") as f:
lines = [line.strip() for line in f if line.strip()]
except FileNotFoundError:
return 0
remaining = []
deleted = 0
for line in lines:
parts = line.split("|")
if parts[0] in email_set:
deleted += 1
# 同时从 busy 集合中移除
_busy.discard(line)
else:
remaining.append(line)
if deleted > 0:
with open(_ACCOUNTS_FILE, "w", encoding="utf-8") as f:
for line in remaining:
f.write(line + "\n")
return deleted
# ====== 统计数据 ======
def _load_stats() -> dict:
try:
with open(_STATS_FILE, "r", encoding="utf-8") as f:
return json.load(f)
except (FileNotFoundError, json.JSONDecodeError):
return {
"register_total": 0,
"register_success": 0,
"register_fail": 0,
"register_fail_reasons": {},
"cc_total": 0,
"cc_pass": 0,
"cc_fail": 0,
}
def _save_stats(stats: dict):
with open(_STATS_FILE, "w", encoding="utf-8") as f:
json.dump(stats, f, ensure_ascii=False, indent=2)
def record_register(success: bool, fail_reason: str = ""):
"""记录一次注册结果"""
with _lock:
stats = _load_stats()
stats["register_total"] += 1
if success:
stats["register_success"] += 1
else:
stats["register_fail"] += 1
if fail_reason:
reasons = stats.setdefault("register_fail_reasons", {})
reasons[fail_reason] = reasons.get(fail_reason, 0) + 1
_save_stats(stats)
def record_cc(passed: bool):
"""记录一次 CC 检查结果"""
with _lock:
stats = _load_stats()
stats["cc_total"] += 1
if passed:
stats["cc_pass"] += 1
else:
stats["cc_fail"] += 1
_save_stats(stats)
def get_stats() -> dict:
"""获取统计数据"""
with _lock:
return _load_stats()

View File

@@ -2,9 +2,9 @@ import uuid
import base64
from curl_cffi import requests # 用于模拟指纹
from config import CLAUDE_URL
from models import ClaudeAccount
from identity import random_ua
from config import CLAUDE_URL, get_proxy
from core.models import ClaudeAccount
from core.identity import random_ua
def attack_claude(target_email):
@@ -48,7 +48,8 @@ def attack_claude(target_email):
CLAUDE_URL,
json=payload,
headers=headers,
impersonate="chrome124"
impersonate="chrome124",
proxies=get_proxy(),
)
if response.status_code == 200:
@@ -132,7 +133,8 @@ def finalize_login(magic_link_fragment):
verify_url,
json=payload,
headers=headers,
impersonate="chrome124"
impersonate="chrome124",
proxies=get_proxy(),
)
if response.status_code == 200:

View File

@@ -1,8 +1,8 @@
from curl_cffi import requests # 用于模拟指纹
from config import PRODUCT_ID
from models import ClaudeAccount
from identity import random_address
from config import PRODUCT_ID, get_proxy
from core.models import ClaudeAccount
from core.identity import random_address
class GiftChecker:
@@ -42,7 +42,7 @@ class GiftChecker:
try:
print(f"[*] 正在尝试扣款 (Gift Purchase)...")
resp = requests.post(url, json=payload, headers=headers, impersonate="chrome124")
resp = requests.post(url, json=payload, headers=headers, impersonate="chrome124", proxies=get_proxy())
resp_json = {}
try:

View File

@@ -2,33 +2,21 @@ import time
import re
import random
import threading
import requests as standard_requests # 用于普通API交互
import requests as standard_requests # 用于普通API交互(不走代理,直连邮件服务器)
class MailSystem:
"""单个邮箱系统实例,支持多域名"""
def __init__(self, base_url, admin_email, admin_password, domains):
def __init__(self, base_url, api_token, domains):
self.base_url = base_url
self.domains = domains # 该系统支持的域名列表
self.token = self._get_token(admin_email, admin_password)
self.token = api_token
self.headers = {"Authorization": self.token}
def _get_token(self, email, password):
"""获取身份令牌,这是我们的通行证"""
url = f"{self.base_url}/api/public/genToken"
payload = {"email": email, "password": password}
try:
resp = standard_requests.post(url, json=payload)
data = resp.json()
if data['code'] == 200:
print(f"[+] 令牌获取成功 ({self.base_url}): {data['data']['token'][:10]}...")
return data['data']['token']
if self.token:
print(f"[+] 邮箱系统已连接 ({self.base_url}), Token: {self.token[:10]}...")
else:
raise Exception(f"获取Token失败: {data}")
except Exception as e:
print(f"[-] 连接邮件系统失败 ({self.base_url}): {e}")
return None
print(f"[-] 邮箱系统 Token 为空 ({self.base_url})")
def create_user(self, email_prefix, domain=None):
"""在系统里注册一个新邮箱用户"""
@@ -44,16 +32,23 @@ class MailSystem:
}
]
}
resp = standard_requests.post(url, json=payload, headers=self.headers)
try:
resp = standard_requests.post(url, json=payload, headers=self.headers, timeout=15)
if resp.json().get('code') == 200:
print(f"[+] 邮箱用户创建成功: {full_email}")
return full_email
elif resp.status_code in (401, 403):
print(f"[-] 邮箱 API Token 无效或已过期! HTTP {resp.status_code}")
return None
else:
print(f"[-] 创建邮箱失败: {resp.text}")
return None
except Exception as e:
print(f"[-] 创建邮箱请求异常: {e}")
return None
def wait_for_email(self, to_email, retry_count=20, sleep_time=3):
"""像猎人一样耐心等待猎物出现"""
def wait_for_email(self, to_email, retry_count=20, sleep_time=3, stop_check=None):
"""像猎人一样耐心等待猎物出现,支持外部中断"""
url = f"{self.base_url}/api/public/emailList"
payload = {
"toEmail": to_email,
@@ -66,10 +61,19 @@ class MailSystem:
print(f"[*] 开始轮询邮件,目标: {to_email}...")
for i in range(retry_count):
# 检查外部中断信号
if stop_check and stop_check():
print("[!] 收到停止信号,中断邮件轮询")
return None
try:
resp = standard_requests.post(url, json=payload, headers=self.headers)
resp = standard_requests.post(url, json=payload, headers=self.headers, timeout=15)
data = resp.json()
if resp.status_code in (401, 403):
print(f"[-] 邮箱 API Token 无效或已过期! HTTP {resp.status_code}")
return None
if data.get('code') == 200 and data.get('data'):
emails = data['data']
for email in emails:
@@ -85,6 +89,27 @@ class MailSystem:
print("[-] 等待超时,未收到邮件。")
return None
def check_health(self) -> dict:
"""检查该邮箱系统的连通性和 Token 有效性"""
if not self.token:
return {"ok": False, "message": "Token 未配置"}
try:
url = f"{self.base_url}/api/public/emailList"
payload = {"toEmail": "health@check.test", "sendName": "", "num": 1, "size": 1}
resp = standard_requests.post(url, json=payload, headers=self.headers, timeout=10)
if resp.status_code == 200:
return {"ok": True, "message": "连接正常"}
elif resp.status_code in (401, 403):
return {"ok": False, "message": f"Token 无效 (HTTP {resp.status_code})"}
else:
return {"ok": False, "message": f"异常响应 (HTTP {resp.status_code})"}
except standard_requests.exceptions.ConnectTimeout:
return {"ok": False, "message": "连接超时"}
except standard_requests.exceptions.ConnectionError:
return {"ok": False, "message": "无法连接"}
except Exception as e:
return {"ok": False, "message": f"异常: {e}"}
def __repr__(self):
return f"MailSystem({self.base_url}, domains={self.domains})"
@@ -103,8 +128,7 @@ class MailPool:
for cfg in mail_configs:
ms = MailSystem(
base_url=cfg["base_url"],
admin_email=cfg["admin_email"],
admin_password=cfg["admin_pass"],
api_token=cfg.get("api_token", ""),
domains=cfg["domains"],
)
if ms.token: # 只添加连接成功的系统

98
core/permissions.py Normal file
View File

@@ -0,0 +1,98 @@
"""
运行时权限管理模块
管理员可通过 Bot 命令动态添加/删除用户和设置权限。
持久化存储在 permissions.json 中。
"""
import json
import threading
from pathlib import Path
_PROJECT_ROOT = Path(__file__).parent.parent
_PERM_FILE = _PROJECT_ROOT / "permissions.json"
_lock = threading.Lock()
# 所有可用命令列表(用于验证输入)
ALL_COMMANDS = {
"start", "help", "register", "stop", "check",
"accounts", "delete", "verify", "stats", "status",
"mailstatus", "proxy", "proxytest", "proxystatus",
"document", # 文件上传
"adduser", "removeuser", "setperm", "users", # 管理命令
}
def _load() -> dict:
"""加载权限数据"""
try:
with open(_PERM_FILE, "r", encoding="utf-8") as f:
return json.load(f)
except (FileNotFoundError, json.JSONDecodeError):
return {"users": {}}
def _save(data: dict):
"""保存权限数据"""
with open(_PERM_FILE, "w", encoding="utf-8") as f:
json.dump(data, f, ensure_ascii=False, indent=2)
def add_user(user_id: int, commands: list[str]) -> None:
"""添加用户或更新已有用户权限"""
with _lock:
data = _load()
data["users"][str(user_id)] = {
"commands": commands,
}
_save(data)
def remove_user(user_id: int) -> bool:
"""删除用户,返回是否成功"""
with _lock:
data = _load()
key = str(user_id)
if key in data["users"]:
del data["users"][key]
_save(data)
return True
return False
def set_commands(user_id: int, commands: list[str]) -> bool:
"""设置用户权限,返回是否成功(用户必须存在)"""
with _lock:
data = _load()
key = str(user_id)
if key not in data["users"]:
return False
data["users"][key]["commands"] = commands
_save(data)
return True
def get_user(user_id: int) -> dict | None:
"""获取单个用户信息"""
with _lock:
data = _load()
return data["users"].get(str(user_id))
def list_users() -> dict[int, dict]:
"""列出所有运行时用户"""
with _lock:
data = _load()
return {int(k): v for k, v in data["users"].items()}
def get_permissions_map() -> dict[int, set[str]]:
"""
返回运行时权限映射user_id → set[command_name]
用于与 config.toml 的静态权限合并
"""
with _lock:
data = _load()
result = {}
for uid_str, info in data["users"].items():
result[int(uid_str)] = set(info.get("commands", []))
return result

327
core/proxy_pool.py Normal file
View File

@@ -0,0 +1,327 @@
"""
代理池管理模块
功能:
- 从 proxy.txt 加载代理(支持 host:port:user:pass 格式)
- 基于优先级的智能选取(优先使用表现好的代理)
- 自动测试连通性和延迟
- 测试失败降低优先级,过低则淘汰
- 线程安全
"""
import logging
import random
import threading
import time
from dataclasses import dataclass, field
from pathlib import Path
from typing import Callable, Optional
import requests as std_requests
logger = logging.getLogger(__name__)
# --- 配置常量 ---
_PROJECT_ROOT = Path(__file__).parent.parent
_PROXY_FILE = _PROJECT_ROOT / "proxy.txt"
_TEST_URL = "https://claude.ai" # 测试目标
_TEST_TIMEOUT = 10 # 测试超时秒数
_INITIAL_PRIORITY = 100 # 初始优先级
_FAIL_PENALTY = 30 # 每次失败扣分
_SUCCESS_BONUS = 10 # 每次成功加分
_MAX_PRIORITY = 100 # 最高优先级
_REMOVE_THRESHOLD = 0 # 优先级低于此值则淘汰
@dataclass
class Proxy:
"""代理实例"""
raw: str # 原始行
url: str # 解析后的 URL (http://user:pass@host:port)
host: str
port: str
priority: int = _INITIAL_PRIORITY
latency: float = 0.0 # 最近一次测试延迟 (ms)
fail_count: int = 0
success_count: int = 0
last_test_time: float = 0.0
last_test_ok: bool = True
@property
def masked_url(self) -> str:
"""脱敏显示"""
if "@" in self.url:
prefix = self.url.split("@")[0]
suffix = self.url.split("@")[1]
# 隐藏密码
if ":" in prefix.replace("http://", "").replace("https://", ""):
user_part = prefix.split(":")[-2].split("/")[-1]
return f"{self.host}:{self.port} ({user_part[:8]}...)"
return f"{self.host}:{self.port}"
def _parse_line(line: str) -> Proxy | None:
"""解析一行代理配置"""
line = line.strip()
if not line or line.startswith("#"):
return None
# 优先检查完整 URL 格式(必须在 colon-split 之前,否则会被错误匹配)
if line.startswith(("http://", "https://", "socks5://")):
try:
from urllib.parse import urlparse
parsed = urlparse(line)
return Proxy(raw=line, url=line, host=parsed.hostname or "?", port=str(parsed.port or "?"))
except Exception:
return None
parts = line.split(":")
if len(parts) == 4:
host, port, user, passwd = parts
url = f"http://{user}:{passwd}@{host}:{port}"
return Proxy(raw=line, url=url, host=host, port=port)
elif len(parts) == 2:
host, port = parts
url = f"http://{host}:{port}"
return Proxy(raw=line, url=url, host=host, port=port)
return None
class ProxyPool:
"""线程安全的代理池"""
def __init__(self):
self._proxies: list[Proxy] = []
self._lock = threading.Lock()
self.enabled = True # 代理开关
self._load()
def _load(self):
"""从 proxy.txt 加载代理"""
if not _PROXY_FILE.exists():
print("[*] 未找到 proxy.txt不使用代理")
return
with open(_PROXY_FILE, "r", encoding="utf-8") as f:
for line in f:
proxy = _parse_line(line)
if proxy:
self._proxies.append(proxy)
if self._proxies:
print(f"[+] 代理池: 已加载 {len(self._proxies)} 个代理")
else:
print("[!] proxy.txt 存在但没有有效代理")
def reload(self):
"""重新加载 proxy.txt"""
with self._lock:
self._proxies.clear()
self._load()
@property
def count(self) -> int:
return len(self._proxies)
@property
def active_count(self) -> int:
"""有效代理数量"""
return sum(1 for p in self._proxies if p.priority > _REMOVE_THRESHOLD)
def get(self) -> dict:
"""
基于优先级加权随机选取一个代理,返回 requests 格式的 proxies dict。
代理关闭或无可用代理时返回空 dict直连
"""
if not self.enabled:
return {}
with self._lock:
alive = [p for p in self._proxies if p.priority > _REMOVE_THRESHOLD]
if not alive:
return {}
# 加权随机priority 越高越容易选中
weights = [p.priority for p in alive]
chosen = random.choices(alive, weights=weights, k=1)[0]
return {"http": chosen.url, "https": chosen.url}
def report_success(self, proxies: dict):
"""调用方报告该代理请求成功"""
if not proxies:
return
url = proxies.get("https", "")
with self._lock:
for p in self._proxies:
if p.url == url:
p.success_count += 1
p.priority = min(p.priority + _SUCCESS_BONUS, _MAX_PRIORITY)
break
def report_failure(self, proxies: dict):
"""调用方报告该代理请求失败,降低优先级"""
if not proxies:
return
url = proxies.get("https", "")
with self._lock:
for p in self._proxies:
if p.url == url:
p.fail_count += 1
p.priority -= _FAIL_PENALTY
if p.priority <= _REMOVE_THRESHOLD:
print(f"[!] 代理已淘汰 (优先级归零): {p.masked_url}")
break
def _cleanup(self):
"""移除优先级过低的代理"""
before = len(self._proxies)
self._proxies = [p for p in self._proxies if p.priority > _REMOVE_THRESHOLD]
removed = before - len(self._proxies)
if removed:
print(f"[!] 清理了 {removed} 个失效代理,剩余 {len(self._proxies)}")
self._save()
def _save(self):
"""将当前有效代理写回 proxy.txt"""
with open(_PROXY_FILE, "w", encoding="utf-8") as f:
for p in self._proxies:
f.write(p.raw + "\n")
def test_one(self, proxy: Proxy) -> dict:
"""测试单个代理,返回结果 dict"""
logger.info(f"🔍 测试代理: {proxy.masked_url}")
proxies = {"http": proxy.url, "https": proxy.url}
try:
start = time.time()
resp = std_requests.get(
_TEST_URL,
proxies=proxies,
timeout=_TEST_TIMEOUT,
allow_redirects=True,
)
latency = (time.time() - start) * 1000 # ms
proxy.latency = latency
proxy.last_test_time = time.time()
if resp.status_code < 500:
proxy.last_test_ok = True
proxy.success_count += 1
proxy.priority = min(proxy.priority + _SUCCESS_BONUS, _MAX_PRIORITY)
logger.info(f" ✅ 通过 {proxy.masked_url} | {round(latency)}ms | HTTP {resp.status_code}")
return {"ok": True, "latency_ms": round(latency), "status": resp.status_code}
else:
proxy.last_test_ok = False
proxy.fail_count += 1
proxy.priority -= _FAIL_PENALTY
logger.warning(f" ❌ 失败 {proxy.masked_url} | HTTP {resp.status_code}")
return {"ok": False, "latency_ms": round(latency), "error": f"HTTP {resp.status_code}"}
except std_requests.exceptions.ConnectTimeout:
proxy.last_test_ok = False
proxy.fail_count += 1
proxy.priority -= _FAIL_PENALTY
proxy.last_test_time = time.time()
logger.warning(f" ❌ 超时 {proxy.masked_url}")
return {"ok": False, "latency_ms": -1, "error": "连接超时"}
except std_requests.exceptions.ProxyError as e:
proxy.last_test_ok = False
proxy.fail_count += 1
proxy.priority -= _FAIL_PENALTY
proxy.last_test_time = time.time()
logger.warning(f" ❌ 代理错误 {proxy.masked_url}: {e}")
return {"ok": False, "latency_ms": -1, "error": f"代理错误: {e}"}
except Exception as e:
proxy.last_test_ok = False
proxy.fail_count += 1
proxy.priority -= _FAIL_PENALTY
proxy.last_test_time = time.time()
logger.warning(f" ❌ 异常 {proxy.masked_url}: {e}")
return {"ok": False, "latency_ms": -1, "error": str(e)}
def test_all(self, progress_callback: Optional[Callable] = None, max_workers: int = 5) -> list[dict]:
"""
并发测试所有代理,返回结果列表。
测试后自动清理优先级过低的代理。
Args:
progress_callback: 可选回调函数,签名 (current, total, result_dict) -> None
每测完一个代理后调用,用于更新前端进度。
max_workers: 并发测试线程数,默认 5。
"""
from concurrent.futures import ThreadPoolExecutor, as_completed
with self._lock:
proxies_snapshot = list(self._proxies)
total = len(proxies_snapshot)
logger.info(f"📡 开始并发测试 {total} 个代理({max_workers} 并发)...")
results = [None] * total # 保持顺序
completed = [0] # 用列表以便在闭包中修改
results_lock = threading.Lock()
def _test_proxy(index, proxy):
result = self.test_one(proxy)
result["proxy"] = proxy.masked_url
result["priority"] = proxy.priority
return index, result
with ThreadPoolExecutor(max_workers=max_workers) as executor:
futures = {
executor.submit(_test_proxy, i, proxy): i
for i, proxy in enumerate(proxies_snapshot)
}
for future in as_completed(futures):
idx, result = future.result()
results[idx] = result
with results_lock:
completed[0] += 1
current = completed[0]
if progress_callback:
try:
progress_callback(current, total, result)
except Exception:
pass
ok_count = sum(1 for r in results if r and r["ok"])
fail_count = total - ok_count
logger.info(
f"📡 代理测试完成: ✅ 通过 {ok_count} | ❌ 失败 {fail_count} | "
f"剩余可用 {self.active_count}"
)
with self._lock:
self._cleanup()
return results
def status_list(self) -> list[dict]:
"""返回所有代理的状态信息"""
with self._lock:
return [
{
"proxy": p.masked_url,
"priority": p.priority,
"latency_ms": round(p.latency) if p.latency else "-",
"success": p.success_count,
"fail": p.fail_count,
"last_ok": p.last_test_ok,
}
for p in self._proxies
]
# --- 全局单例 ---
pool = ProxyPool()
def get_proxy() -> dict:
"""供外部模块调用:随机获取一个代理"""
return pool.get()
def get_proxy_count() -> int:
"""代理池大小"""
return pool.count

View File

@@ -2,8 +2,8 @@ import uuid
import random
from curl_cffi import requests # 用于模拟指纹
from config import STRIPE_PK
from identity import random_address, random_name
from config import STRIPE_PK, get_proxy
from core.identity import random_address, random_name
class StripeTokenizer:
@@ -77,7 +77,7 @@ class StripeTokenizer:
try:
print(f"[*] 正在向 Stripe 请求 Token: {cc_num[:4]}******{cc_num[-4:]}")
resp = requests.post(url, data=data, headers=headers, impersonate="chrome124")
resp = requests.post(url, data=data, headers=headers, impersonate="chrome124", proxies=get_proxy())
if resp.status_code == 200:
pm_id = resp.json().get("id")

View File

@@ -44,11 +44,13 @@ echo ""
# ============================================================
info "检查系统依赖..."
# 确保 uv 路径在 PATH 中
export PATH="$HOME/.local/bin:/root/.local/bin:$PATH"
# 安装 uv如果不存在
if ! command -v uv &> /dev/null; then
info "安装 uv..."
curl -LsSf https://astral.sh/uv/install.sh | sh
export PATH="$HOME/.local/bin:$PATH"
ok "uv 已安装"
else
ok "uv 已存在 ($(uv --version))"
@@ -59,8 +61,16 @@ fi
# ============================================================
info "安装 Python 依赖..."
cd "$APP_DIR"
sudo -u "$RUN_USER" uv sync 2>/dev/null || sudo -u "$RUN_USER" uv pip install -r pyproject.toml 2>/dev/null || true
ok "依赖安装完成"
if uv sync; then
ok "依赖安装完成"
else
warn "uv sync 失败,尝试 uv pip install..."
if uv pip install -r pyproject.toml; then
ok "依赖安装完成 (pip fallback)"
else
err "依赖安装失败,请手动检查"
fi
fi
# ============================================================
# 3. 检查配置文件
@@ -83,7 +93,16 @@ fi
# ============================================================
# 4. 获取 uv 和 python 路径
# ============================================================
UV_PATH="$(sudo -u "$RUN_USER" bash -c 'which uv')"
UV_PATH="$(which uv 2>/dev/null || echo '')"
if [ -z "$UV_PATH" ]; then
# 尝试常见路径
for p in "$HOME/.local/bin/uv" "/root/.local/bin/uv" "/usr/local/bin/uv"; do
if [ -x "$p" ]; then UV_PATH="$p"; break; fi
done
fi
if [ -z "$UV_PATH" ]; then
err "找不到 uv请检查安装是否成功"
fi
info "uv 路径: ${UV_PATH}"
# ============================================================
@@ -119,7 +138,6 @@ SyslogIdentifier=${APP_NAME}
# 安全加固
NoNewPrivileges=true
ProtectSystem=strict
ProtectHome=read-only
ReadWritePaths=${APP_DIR}
PrivateTmp=true

View File

@@ -3,10 +3,10 @@ import random
import string
from config import MAIL_SYSTEMS
from mail_service import MailPool, extract_magic_link
from stripe_token import StripeTokenizer
from gift_checker import GiftChecker
from claude_auth import attack_claude, finalize_login
from core.mail_service import MailPool, extract_magic_link
from core.stripe_token import StripeTokenizer
from core.gift_checker import GiftChecker
from core.claude_auth import attack_claude, finalize_login
# --- 主流程 (The Ritual) ---

View File

@@ -6,6 +6,7 @@ readme = "README.md"
requires-python = ">=3.12"
dependencies = [
"curl-cffi>=0.14.0",
"faker>=36.0.0",
"python-telegram-bot>=21.0",
"requests>=2.32.5",
]

23
uv.lock generated
View File

@@ -25,6 +25,7 @@ version = "0.1.0"
source = { virtual = "." }
dependencies = [
{ name = "curl-cffi" },
{ name = "faker" },
{ name = "python-telegram-bot" },
{ name = "requests" },
]
@@ -32,6 +33,7 @@ dependencies = [
[package.metadata]
requires-dist = [
{ name = "curl-cffi", specifier = ">=0.14.0" },
{ name = "faker", specifier = ">=36.0.0" },
{ name = "python-telegram-bot", specifier = ">=21.0" },
{ name = "requests", specifier = ">=2.32.5" },
]
@@ -182,6 +184,18 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/5c/7c/d2ba86b0b3e1e2830bd94163d047de122c69a8df03c5c7c36326c456ad82/curl_cffi-0.14.0-cp39-abi3-win_arm64.whl", hash = "sha256:2eed50a969201605c863c4c31269dfc3e0da52916086ac54553cfa353022425c", size = 1425067, upload-time = "2025-12-16T03:25:06.454Z" },
]
[[package]]
name = "faker"
version = "40.4.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "tzdata", marker = "sys_platform == 'win32'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/fc/7e/dccb7013c9f3d66f2e379383600629fec75e4da2698548bdbf2041ea4b51/faker-40.4.0.tar.gz", hash = "sha256:76f8e74a3df28c3e2ec2caafa956e19e37a132fdc7ea067bc41783affcfee364", size = 1952221, upload-time = "2026-02-06T23:30:15.515Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/ac/63/58efa67c10fb27810d34351b7a10f85f109a7f7e2a07dc3773952459c47b/faker-40.4.0-py3-none-any.whl", hash = "sha256:486d43c67ebbb136bc932406418744f9a0bdf2c07f77703ea78b58b77e9aa443", size = 1987060, upload-time = "2026-02-06T23:30:13.44Z" },
]
[[package]]
name = "h11"
version = "0.16.0"
@@ -274,6 +288,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" },
]
[[package]]
name = "tzdata"
version = "2025.3"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/5e/a7/c202b344c5ca7daf398f3b8a477eeb205cf3b6f32e7ec3a6bac0629ca975/tzdata-2025.3.tar.gz", hash = "sha256:de39c2ca5dc7b0344f2eba86f49d614019d29f060fc4ebc8a417896a620b56a7", size = 196772, upload-time = "2025-12-13T17:45:35.667Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/c7/b0/003792df09decd6849a5e39c28b513c06e84436a54440380862b5aeff25d/tzdata-2025.3-py2.py3-none-any.whl", hash = "sha256:06a47e5700f3081aab02b2e513160914ff0694bce9947d6b76ebd6bf57cfc5d1", size = 348521, upload-time = "2025-12-13T17:45:33.889Z" },
]
[[package]]
name = "urllib3"
version = "2.6.3"