1073 lines
51 KiB
Python
1073 lines
51 KiB
Python
import re
|
||
import json
|
||
import base64
|
||
import time
|
||
import random
|
||
import uuid
|
||
import datetime
|
||
from zoneinfo import ZoneInfo
|
||
from typing import Optional
|
||
|
||
try:
|
||
from curl_cffi import requests
|
||
import esprima
|
||
except ImportError:
|
||
print("Please install dependencies: pip install curl_cffi esprima")
|
||
exit(1)
|
||
|
||
|
||
# ==========================================
|
||
# Utils
|
||
# ==========================================
|
||
|
||
class Utils:
|
||
@staticmethod
|
||
def between(main_text: Optional[str], value_1: Optional[str], value_2: Optional[str]) -> str:
|
||
try:
|
||
return main_text.split(value_1)[1].split(value_2)[0]
|
||
except Exception:
|
||
return ""
|
||
|
||
# ==========================================
|
||
# PoW
|
||
# ==========================================
|
||
|
||
class Challenges:
|
||
@staticmethod
|
||
def encode(e):
|
||
if isinstance(e, (dict, list)):
|
||
e = json.dumps(e, separators=(",", ":"))
|
||
encoded = str(e).encode("utf-8")
|
||
return base64.b64encode(encoded).decode()
|
||
|
||
@staticmethod
|
||
def generate_token(config):
|
||
t = "e"
|
||
n = time.time() * 1000
|
||
try:
|
||
config[3] = 1
|
||
config[9] = round(time.time() * 1000 - n)
|
||
return "gAAAAAC" + Challenges.encode(config)
|
||
except Exception as e:
|
||
t = Challenges.encode(str(e))
|
||
return "error_" + t
|
||
|
||
@staticmethod
|
||
def mod(e: str) -> str:
|
||
t = 2166136261
|
||
for ch in e:
|
||
t ^= ord(ch)
|
||
t = (t * 16777619) & 0xFFFFFFFF
|
||
|
||
t ^= (t >> 16)
|
||
t = (t * 2246822507) & 0xFFFFFFFF
|
||
t ^= (t >> 13)
|
||
t = (t * 3266489909) & 0xFFFFFFFF
|
||
t ^= (t >> 16)
|
||
|
||
return f"{t:08x}"
|
||
|
||
@staticmethod
|
||
def _runCheck(t0, n, r, o, config):
|
||
config[3] = o
|
||
config[9] = round(time.time() * 1000 - t0)
|
||
|
||
i = Challenges.encode(config)
|
||
|
||
if Challenges.mod(n + i)[:len(r)] <= r:
|
||
return f"{i}~S"
|
||
return None
|
||
|
||
@staticmethod
|
||
def solve_pow(t, n, config):
|
||
t0 = int(time.time() * 1000)
|
||
for i in range(500000):
|
||
a = Challenges._runCheck(t0, t, n, i, config)
|
||
if a:
|
||
return "gAAAAAB" + a
|
||
return None
|
||
|
||
# ==========================================
|
||
# AST for VM
|
||
# ==========================================
|
||
|
||
class Parser:
|
||
@staticmethod
|
||
def find_var_definition(var_name, start_line, code):
|
||
code_lines = code.splitlines()
|
||
relevant_code = '\n'.join(code_lines[:start_line - 1])
|
||
try:
|
||
sub_ast = esprima.parseScript(relevant_code, {'loc': True, 'range': True, 'tolerant': True})
|
||
except Exception:
|
||
return None
|
||
|
||
var_defs = {}
|
||
|
||
def collect_var_defs(node, var_defs):
|
||
if (node.type == 'VariableDeclarator' and
|
||
hasattr(node, 'id') and node.id and
|
||
hasattr(node, 'init') and node.init and
|
||
hasattr(node, 'loc') and node.loc):
|
||
id_name = node.id.name if hasattr(node.id, 'name') else None
|
||
if not id_name:
|
||
return
|
||
abs_line = node.loc.start.line if hasattr(node.loc.start, 'line') else None
|
||
if abs_line is None or abs_line >= start_line:
|
||
return
|
||
if hasattr(node.init, 'range'):
|
||
value = relevant_code[node.init.range[0]:node.init.range[1]].strip()
|
||
else:
|
||
value = str(node.init).strip() if node.init else ''
|
||
if id_name not in var_defs:
|
||
var_defs[id_name] = []
|
||
var_defs[id_name].append({'line': abs_line, 'value': value})
|
||
|
||
def iterative_traverse(ast, visitor):
|
||
if not ast:
|
||
return
|
||
stack = [ast]
|
||
visited = set()
|
||
max_stack_size = 10000
|
||
while stack:
|
||
if len(stack) > max_stack_size:
|
||
break
|
||
node = stack.pop()
|
||
node_id = id(node)
|
||
if node_id in visited:
|
||
continue
|
||
visited.add(node_id)
|
||
visitor(node)
|
||
for key in reversed(node.__dict__.keys()):
|
||
value = getattr(node, key, None)
|
||
if isinstance(value, list):
|
||
for item in reversed(value):
|
||
if isinstance(item, esprima.nodes.Node) and id(item) not in visited:
|
||
item._parent = node
|
||
stack.append(item)
|
||
elif isinstance(value, esprima.nodes.Node) and id(value) not in visited:
|
||
value._parent = node
|
||
stack.append(value)
|
||
|
||
iterative_traverse(sub_ast, lambda n: collect_var_defs(n, var_defs))
|
||
|
||
last_resolved = None
|
||
def_line = None
|
||
|
||
if var_name in var_defs:
|
||
var_defs[var_name].sort(key=lambda x: x['line'], reverse=True)
|
||
for defn in var_defs[var_name]:
|
||
if 'btoa' not in defn['value'] and 'XOR_STR' not in defn['value'] and \
|
||
'doubleXOR' not in defn['value'] and 'singlebtoa' not in defn['value']:
|
||
last_resolved = defn['value']
|
||
def_line = defn['line']
|
||
break
|
||
|
||
if last_resolved:
|
||
resolved_vars_cache = {}
|
||
def resolve_var_recursive(expr, var_line):
|
||
try:
|
||
expr_ast = esprima.parseScript(expr, {'loc': True, 'range': True, 'tolerant': True})
|
||
except Exception:
|
||
return expr
|
||
vars_set = set()
|
||
def collect_identifiers(node):
|
||
if (hasattr(node, 'type') and node.type == 'Identifier' and hasattr(node, 'name')):
|
||
parent = getattr(node, '_parent', None)
|
||
if parent:
|
||
parent_type = parent.type if hasattr(parent, 'type') else None
|
||
if ((parent_type == 'MemberExpression' and hasattr(parent, 'property') and parent.property == node and not (hasattr(parent, 'computed') and parent.computed)) or node.name == 'window'):
|
||
return
|
||
vars_set.add(node.name)
|
||
|
||
def iterative_traverse_safe(ast, visitor):
|
||
if not ast:
|
||
return
|
||
stack = [ast]
|
||
visited = set()
|
||
while stack:
|
||
node = stack.pop()
|
||
node_id = id(node)
|
||
if node_id in visited:
|
||
continue
|
||
visited.add(node_id)
|
||
visitor(node)
|
||
for key in reversed(node.__dict__.keys()):
|
||
value = getattr(node, key, None)
|
||
if isinstance(value, list):
|
||
for item in reversed(value):
|
||
if isinstance(item, esprima.nodes.Node) and id(item) not in visited:
|
||
item._parent = node
|
||
stack.append(item)
|
||
elif isinstance(value, esprima.nodes.Node) and id(value) not in visited:
|
||
value._parent = node
|
||
stack.append(value)
|
||
|
||
iterative_traverse_safe(expr_ast, collect_identifiers)
|
||
if not vars_set:
|
||
return expr
|
||
|
||
for v in vars_set:
|
||
if v in resolved_vars_cache:
|
||
continue
|
||
def_value = v
|
||
if v in var_defs:
|
||
for defn in sorted(var_defs[v], key=lambda x: x['line'], reverse=True):
|
||
if defn['line'] < var_line and 'btoa' not in defn['value'] and 'XOR_STR' not in defn['value']:
|
||
def_value = defn['value']
|
||
break
|
||
resolved_vars_cache[v] = def_value
|
||
resolved_vars_cache[v] = resolve_var_recursive(def_value, var_line)
|
||
|
||
final_expr = expr
|
||
for k, v in resolved_vars_cache.items():
|
||
final_expr = re.sub(r'\b' + re.escape(k) + r'\b', str(v), final_expr)
|
||
return final_expr
|
||
|
||
last_resolved = resolve_var_recursive(last_resolved, def_line)
|
||
if last_resolved:
|
||
escaped_var_name = re.escape(var_name)
|
||
double_xor_pattern = re.compile(rf'XOR_STR\s*\(\s*{escaped_var_name}\s*,\s*{escaped_var_name}\s*\)')
|
||
xor_matches = double_xor_pattern.findall(code)
|
||
if xor_matches and len(xor_matches) >= 2:
|
||
last_resolved = f'doublexor({last_resolved})'
|
||
else:
|
||
usage_line_index = start_line - 1
|
||
search_start = max(0, usage_line_index - 10)
|
||
relevant_lines = '\n'.join(code_lines[search_start:usage_line_index + 1])
|
||
btoa_pattern = re.compile(rf'btoa\s*\(\s*""\s*\+\s*{escaped_var_name}\s*\)')
|
||
xor_var_pattern = re.compile(rf'XOR_STR\s*\(\s*{escaped_var_name}\s*,')
|
||
btoa_matches = btoa_pattern.findall(relevant_lines)
|
||
has_xor_var = bool(xor_var_pattern.search(relevant_lines))
|
||
if btoa_matches and len(btoa_matches) == 1 and not has_xor_var:
|
||
last_resolved = f'singlebtoa({last_resolved})'
|
||
return last_resolved
|
||
|
||
@staticmethod
|
||
def parse_assigments(code):
|
||
try:
|
||
ast = esprima.parseScript(code, loc=True, jsx=True)
|
||
except Exception:
|
||
return {}
|
||
stringify_calls = []
|
||
def traverse_node(node):
|
||
if isinstance(node, dict):
|
||
if node.get('type') == 'CallExpression':
|
||
callee = node.get('callee', {})
|
||
if (callee.get('type') == 'MemberExpression' and callee.get('object', {}).get('name') == 'JSON' and callee.get('property', {}).get('name') == 'stringify' and node.get('arguments') and node['arguments'][0]['type'] == 'Identifier'):
|
||
stringify_calls.append(node['arguments'][0]['name'])
|
||
for v in node.values():
|
||
traverse_node(v)
|
||
elif isinstance(node, list):
|
||
for item in node:
|
||
traverse_node(item)
|
||
traverse_node(ast.toDict())
|
||
last_stringify_arg = stringify_calls[-1] if stringify_calls else None
|
||
if not last_stringify_arg:
|
||
return {}
|
||
|
||
var_values = {}
|
||
def traverse_vars(node):
|
||
if isinstance(node, dict):
|
||
if node.get('type') == 'VariableDeclarator':
|
||
id_node = node.get('id', {})
|
||
init_node = node.get('init', {})
|
||
if (id_node.get('type') == 'Identifier' and init_node and init_node.get('type') in ('Literal', 'NumericLiteral', 'StringLiteral')):
|
||
var_values[id_node['name']] = init_node.get('value')
|
||
for v in node.values():
|
||
traverse_vars(v)
|
||
elif isinstance(node, list):
|
||
for item in node:
|
||
traverse_vars(item)
|
||
traverse_vars(ast.toDict())
|
||
|
||
assignments = {}
|
||
def traverse_assignments(node):
|
||
if isinstance(node, dict):
|
||
if node.get('type') == 'AssignmentExpression':
|
||
left = node.get('left', {})
|
||
right = node.get('right', {})
|
||
if (left.get('type') == 'MemberExpression' and left.get('object', {}).get('name') == last_stringify_arg and left.get('property', {}).get('type') == 'Identifier' and right.get('type') == 'Identifier' and node.get('loc')):
|
||
key_var = left['property']['name']
|
||
value = right['name']
|
||
key = var_values.get(key_var, key_var)
|
||
resolved_value = Parser.find_var_definition(value, node['loc']['start']['line'], code) or value
|
||
assignments[key] = resolved_value
|
||
for v in node.values():
|
||
traverse_assignments(v)
|
||
elif isinstance(node, list):
|
||
for item in node:
|
||
traverse_assignments(item)
|
||
traverse_assignments(ast.toDict())
|
||
return assignments
|
||
|
||
@staticmethod
|
||
def get_xor_key(js_code: str):
|
||
try:
|
||
parsed = esprima.parseScript(js_code, tolerant=True)
|
||
except Exception:
|
||
return None
|
||
last_xor_call = None
|
||
second_arg_node = None
|
||
for node in parsed.body:
|
||
if node.type == 'VariableDeclaration':
|
||
for decl in node.declarations:
|
||
if decl.init and decl.init.type == 'CallExpression':
|
||
call = decl.init
|
||
if call.callee.type == 'Identifier' and call.callee.name == 'XOR_STR':
|
||
last_xor_call = call
|
||
second_arg_node = call.arguments[1]
|
||
if not last_xor_call:
|
||
return None
|
||
if second_arg_node.type == 'Identifier':
|
||
var_name = second_arg_node.name
|
||
elif second_arg_node.type == 'Literal':
|
||
return second_arg_node.value
|
||
else:
|
||
return None
|
||
def find_value(nodes, name):
|
||
for node in nodes:
|
||
if node.type == 'VariableDeclaration':
|
||
for decl in node.declarations:
|
||
if decl.id.name == name and decl.init.type == 'Literal':
|
||
return decl.init.value
|
||
elif node.type == 'ExpressionStatement' and node.expression.type == 'AssignmentExpression':
|
||
expr = node.expression
|
||
if expr.left.type == 'Identifier' and expr.left.name == name and expr.right.type == 'Literal':
|
||
return expr.right.value
|
||
return None
|
||
return find_value(parsed.body, var_name)
|
||
|
||
@staticmethod
|
||
def parse_keys(decompiled_code: str) -> tuple[str, dict]:
|
||
assignments: dict = Parser.parse_assigments(decompiled_code)
|
||
xor_key: str = Parser.get_xor_key(decompiled_code)
|
||
parsed_keys: dict = {}
|
||
randomindex = 1
|
||
for key, value in assignments.items():
|
||
key = str(key)
|
||
if value.startswith("Array") and "location" not in value:
|
||
try:
|
||
numbers = value.split(') : ')[1].split(" + ")
|
||
num1 = float(numbers[0])
|
||
num2 = float(numbers[1])
|
||
parsed_keys[key] = str(float(num1 + num2))
|
||
except Exception:
|
||
pass
|
||
elif "location" in value:
|
||
parsed_keys[key] = "location"
|
||
elif "cfIpLongitude" in value:
|
||
parsed_keys[key] = "ipinfo"
|
||
elif "maxTouchPoints" in value:
|
||
parsed_keys[key] = "vendor"
|
||
elif "history" in value:
|
||
parsed_keys[key] = "history"
|
||
elif 'window["Object"]["keys"]' in value:
|
||
parsed_keys[key] = "localstorage"
|
||
elif 'createElement' in value:
|
||
parsed_keys[key] = "element"
|
||
elif value.isdigit():
|
||
parsed_keys[key] = value
|
||
elif "random" in value:
|
||
parsed_keys[key] = "random_" + str(randomindex)
|
||
randomindex += 1
|
||
elif "doublexor" in value:
|
||
parsed_keys[key] = value
|
||
elif "singlebtoa" in value:
|
||
parsed_keys[key] = value
|
||
return xor_key, parsed_keys
|
||
|
||
# ==========================================
|
||
# VM Bytecode
|
||
# ==========================================
|
||
|
||
class Decompiler:
|
||
mapping: dict = {
|
||
"1": "XOR_STR", "2": "SET_VALUE", "3": "BTOA", "4": "BTOA_2", "5": "ADD_OR_PUSH",
|
||
"6": "ARRAY_ACCESS", "7": "CALL", "8": "COPY", "10": "window", "11": "GET_SCRIPT_SRC",
|
||
"12": "GET_MAP", "13": "TRY_CALL", "14": "JSON_PARSE", "15": "JSON_STRINGIFY", "17": "CALL_AND_SET",
|
||
"18": "ATOB", "19": "BTOA_3", "20": "IF_EQUAL_CALL", "21": "IF_DIFF_CALL", "22": "TEMP_STACK_CALL",
|
||
"23": "IF_DEFINED_CALL", "24": "BIND_METHOD", "27": "REMOVE_OR_SUBTRACT", "28": "undefined",
|
||
"25": "undefined", "26": "undefined", "29": "LESS_THAN", "31": "INCREMENT", "32": "DECREMENT_AND_EXEC",
|
||
"33": "MULTIPLY", "34": "MOVE"
|
||
}
|
||
functions: dict = {
|
||
"XOR_STR": """function XOR_STR(e, t) {
|
||
e = String(e);
|
||
t = String(t);
|
||
let n = "";
|
||
for (let r = 0; r < e.length; r++)
|
||
n += String.fromCharCode(e.charCodeAt(r) ^ t.charCodeAt(r % t.length));
|
||
return n;
|
||
}
|
||
"""
|
||
}
|
||
|
||
@staticmethod
|
||
def start():
|
||
Decompiler.xorkey = ""
|
||
Decompiler.xorkey2 = ""
|
||
Decompiler.decompiled = "var mem = {};\n"
|
||
Decompiler.array_dict = {}
|
||
Decompiler.vg = 0
|
||
Decompiler.round1 = 0
|
||
Decompiler.found = False
|
||
Decompiler.potential = []
|
||
|
||
@staticmethod
|
||
def xS(e, t):
|
||
n = ""
|
||
for r in range(len(e)):
|
||
n += chr(ord(e[r]) ^ ord(t[r % len(t)]))
|
||
return n
|
||
|
||
@staticmethod
|
||
def handle_operation(operation, args):
|
||
if operation == "COPY":
|
||
Decompiler.mapping[args[0]] = Decompiler.mapping[args[1]]
|
||
if Decompiler.mapping[args[1]] != "window":
|
||
if Decompiler.mapping[args[1]] in Decompiler.functions and f"function {Decompiler.mapping[args[1]]}" not in Decompiler.decompiled:
|
||
Decompiler.decompiled += Decompiler.functions[Decompiler.mapping[args[1]]] + "\n"
|
||
else:
|
||
var_name = str(args[1]).replace(".", "_")
|
||
Decompiler.decompiled += f"var var_{var_name} = window;\n"
|
||
Decompiler.array_dict[args[1]] = "window"
|
||
elif operation == "SET_VALUE":
|
||
var_name = str(args[0]).replace(".", "_")
|
||
value = args[1]
|
||
try:
|
||
num = float(value)
|
||
if num.is_integer():
|
||
Decompiler.decompiled += f"var var_{var_name} = {int(num)};\n"
|
||
Decompiler.array_dict[args[0]] = str(int(num))
|
||
else:
|
||
Decompiler.decompiled += f"var var_{var_name} = {num};\n"
|
||
Decompiler.array_dict[args[0]] = str(num)
|
||
except (ValueError, TypeError):
|
||
if isinstance(value, str):
|
||
if value == "[]":
|
||
Decompiler.decompiled += f"var var_{var_name} = [];\n"
|
||
Decompiler.array_dict[args[0]] = []
|
||
elif value == "None":
|
||
Decompiler.decompiled += f"var var_{var_name} = null;\n"
|
||
Decompiler.array_dict[args[0]] = "null"
|
||
else:
|
||
Decompiler.decompiled += f"var var_{var_name} = \"{value}\";\n"
|
||
Decompiler.array_dict[args[0]] = f"\"{value}\""
|
||
elif isinstance(value, list):
|
||
Decompiler.decompiled += f"var var_{var_name} = [];\n"
|
||
Decompiler.array_dict[args[0]] = []
|
||
elif value is None:
|
||
Decompiler.decompiled += f"var var_{var_name} = null;\n"
|
||
Decompiler.array_dict[args[0]] = "null"
|
||
else:
|
||
Decompiler.decompiled += f"var var_{var_name} = {value};\n"
|
||
Decompiler.array_dict[args[0]] = str(value)
|
||
elif operation == "ARRAY_ACCESS":
|
||
Decompiler.handle_array_access(args)
|
||
elif operation == "BIND_METHOD":
|
||
Decompiler.handle_bind_method(args)
|
||
elif operation == "XOR_STR":
|
||
if Decompiler.round1 == 1 and len(Decompiler.potential) < 2:
|
||
Decompiler.potential.append({"var": args[0], "key": args[1]})
|
||
var_name = str(args[0]).replace(".", "_")
|
||
key_name = str(args[1]).replace(".", "_")
|
||
Decompiler.decompiled += f"var var_{var_name} = XOR_STR(var_{var_name}, var_{key_name});\n"
|
||
elif operation == "BTOA_3":
|
||
var_name = str(args[0]).replace(".", "_")
|
||
Decompiler.decompiled += f"var var_{var_name} = btoa(\"\" + var_{var_name});\n"
|
||
elif operation == "CALL_AND_SET":
|
||
var_name = str(args[0]).replace(".", "_")
|
||
func_name = str(args[1]).replace(".", "_")
|
||
args_str = ", ".join(f"var_{arg.replace('.', '_')}" for arg in args[2:])
|
||
Decompiler.decompiled += f"var var_{var_name} = var_{func_name}({args_str});\n"
|
||
elif operation == "IF_DEFINED_CALL":
|
||
Decompiler.handle_if_defined_call(args)
|
||
elif operation == "CALL":
|
||
Decompiler.handle_call_operation(args)
|
||
elif operation == "ADD_OR_PUSH":
|
||
var_name = str(args[0]).replace(".", "_")
|
||
arg_name = str(args[1]).replace(".", "_")
|
||
Decompiler.decompiled += (f"var var_{var_name} = Array.isArray(var_{var_name}) ? " f"(var_{var_name}.push(var_{arg_name}), var_{var_name}) : var_{var_name} + var_{arg_name};\n")
|
||
elif operation == "IF_DIFF_CALL":
|
||
var_0 = str(args[0]).replace(".", "_")
|
||
var_1 = str(args[1]).replace(".", "_")
|
||
var_2 = str(args[2]).replace(".", "_")
|
||
if Decompiler.mapping.get(args[3]) == "COPY":
|
||
var_4 = str(args[4]).replace(".", "_")
|
||
var_5 = str(args[5]).replace(".", "_")
|
||
Decompiler.decompiled += (f"Math.abs(var_{var_0} - var_{var_1}) > var_{var_2} ? var_{var_4} = var_{var_5} : null;\n")
|
||
else:
|
||
args_str = ", ".join(f"var_{arg.replace('.', '_')}" for arg in args[4:])
|
||
Decompiler.decompiled += (f"Math.abs(var_{var_0} - var_{var_1}) > var_{var_2} ? {Decompiler.mapping[args[3]]}({args_str}) : null;\n")
|
||
elif operation == "TRY_CALL":
|
||
Decompiler.handle_try_call(args)
|
||
elif operation == "JSON_STRINGIFY":
|
||
var_name = str(args[0]).replace(".", "_")
|
||
Decompiler.decompiled += f"var var_{var_name} = JSON.stringify(var_{var_name});\n"
|
||
elif operation == "MOVE":
|
||
Decompiler.decompiled += f"MOVE {args}"
|
||
else:
|
||
mapped = [Decompiler.mapping[key] for key in args[1:] if key in Decompiler.mapping]
|
||
unlabeled = [str(key) for key in args[1:] if key not in Decompiler.mapping]
|
||
all_values = " ".join(mapped + unlabeled)
|
||
Decompiler.decompiled += f"// UNKNOWN: {operation} -> {args[0]} {all_values};\n"
|
||
|
||
@staticmethod
|
||
def handle_try_call(args):
|
||
target_var = f"var_{str(args[0]).replace('.', '_')}"
|
||
fn = Decompiler.mapping.get(args[1], "")
|
||
rest_args = [f"var_{str(a).replace('.', '_')}" for a in args[2:]]
|
||
if fn == "ARRAY_ACCESS":
|
||
Decompiler.decompiled += (f"try {{ mem[{rest_args[0]}] = {rest_args[1]}[{rest_args[0]}]; }} catch(r) {{ {target_var} = \"\" + r; }}\n")
|
||
else:
|
||
args_str = ", ".join(rest_args)
|
||
Decompiler.decompiled += (f"try {{ {fn}({args_str}); }} catch(r) {{ {target_var} = \"\" + r; }}\n")
|
||
|
||
@staticmethod
|
||
def handle_array_access(args):
|
||
var_0 = str(args[0]).replace(".", "_")
|
||
var_1 = str(args[1]).replace(".", "_")
|
||
var_2 = str(args[2]).replace(".", "_")
|
||
if f"var var_{var_1} =" in Decompiler.decompiled:
|
||
if args[1] in Decompiler.array_dict or args[2] in Decompiler.array_dict:
|
||
if args[2] in Decompiler.array_dict and args[1] not in Decompiler.array_dict:
|
||
Decompiler.decompiled += f"var var_{var_0} = var_{var_1}[{Decompiler.array_dict[args[2]]}];\n"
|
||
elif args[1] in Decompiler.array_dict and args[2] not in Decompiler.array_dict:
|
||
Decompiler.decompiled += f"var var_{var_0} = {Decompiler.array_dict[args[1]]}[var_{var_2}];\n"
|
||
else:
|
||
if re.search(rf"var\s+var_{var_1}\s*=\s*\w+\([^)]*\)", Decompiler.decompiled):
|
||
Decompiler.decompiled += f"var var_{var_0} = var_{var_1}[{Decompiler.array_dict[args[2]]}];\n"
|
||
Decompiler.array_dict[args[0]] = f"var_{var_1}[{Decompiler.array_dict[args[2]]}]"
|
||
else:
|
||
Decompiler.decompiled += f"var var_{var_0} = {Decompiler.array_dict[args[1]]}[{Decompiler.array_dict[args[2]]}];\n"
|
||
Decompiler.array_dict[args[0]] = f"{Decompiler.array_dict[args[1]]}[{Decompiler.array_dict[args[2]]}]"
|
||
else:
|
||
Decompiler.decompiled += f"var var_{var_0} = var_{var_1}[var_{var_2}];\n"
|
||
else:
|
||
Decompiler.decompiled += f"var var_{var_0} = window[var_{var_2}];\n"
|
||
|
||
@staticmethod
|
||
def handle_bind_method(args):
|
||
var_0 = str(args[0]).replace(".", "_")
|
||
var_1 = str(args[1]).replace(".", "_")
|
||
var_2 = str(args[2]).replace(".", "_")
|
||
if f"var var_{var_1} =" in Decompiler.decompiled:
|
||
if args[1] in Decompiler.array_dict or args[2] in Decompiler.array_dict:
|
||
if args[1] in Decompiler.array_dict and args[2] not in Decompiler.array_dict:
|
||
Decompiler.decompiled += (f"var var_{var_0} = {Decompiler.array_dict[args[1]]}[var_{var_2}].bind({Decompiler.array_dict[args[1]]});\n")
|
||
else:
|
||
if re.search(rf"var\s+var_{var_1}\s*=\s*\w+\([^)]*\)", Decompiler.decompiled):
|
||
Decompiler.decompiled += (f"var var_{var_0} = var_{var_1}[{Decompiler.array_dict[args[2]]}].bind(var_{var_1});\n")
|
||
Decompiler.array_dict[args[0]] = f"var_{var_1}[{Decompiler.array_dict[args[2]]}]"
|
||
else:
|
||
Decompiler.decompiled += (f"var var_{var_0} = {Decompiler.array_dict[args[1]]}[{Decompiler.array_dict[args[2]]}].bind({Decompiler.array_dict[args[1]]});\n")
|
||
Decompiler.array_dict[args[0]] = f"{Decompiler.array_dict[args[1]]}[{Decompiler.array_dict[args[2]]}]"
|
||
else:
|
||
Decompiler.decompiled += (f"var var_{var_0} = var_{var_1}[var_{var_2}].bind(var_{var_1});\n")
|
||
else:
|
||
Decompiler.decompiled += (f"var var_{var_0} = window[var_{var_2}].bind(var_{var_1});\n")
|
||
|
||
@staticmethod
|
||
def handle_if_defined_call(args):
|
||
result = []
|
||
for item in args:
|
||
if item in Decompiler.mapping:
|
||
keys = [k for k, v in Decompiler.mapping.items() if v == Decompiler.mapping[item] and k != item]
|
||
result.append(keys[0] if keys else None)
|
||
else:
|
||
result.append(None)
|
||
result = [None if key is None else ([k for k, v in Decompiler.mapping.items() if v == Decompiler.mapping[key] and k != key] or [None])[0] for key in result]
|
||
if len(args) == 4:
|
||
target = str(args[3]).replace(".", "_")
|
||
count = len(re.findall(target, Decompiler.decompiled))
|
||
if count <= 1 and f"var var_{str(args[2]).replace('.', '_')}" not in Decompiler.decompiled:
|
||
if not Decompiler.xorkey:
|
||
Decompiler.xorkey = str(args[3])
|
||
var_0 = str(args[0]).replace(".", "_")
|
||
arg_2 = str(args[2]).replace(".", "_")
|
||
arg_3 = str(args[3]).replace(".", "_")
|
||
if Decompiler.mapping.get(result[1]) == "SET_VALUE":
|
||
Decompiler.decompiled += (f"var var_{var_0} = var_{var_0} !== void 0 ? (mem[\"{args[2]}\"] = \"{args[3]}\", var_{var_0}) : var_{var_0};\n")
|
||
else:
|
||
Decompiler.decompiled += (f"var var_{var_0} = var_{var_0} !== void 0 ? ({Decompiler.mapping[result[1]]}(\"{args[2]}\", \"{args[3]}\") || var_{var_0}) : var_{var_0};\n")
|
||
elif count <= 3:
|
||
var_0 = str(args[0]).replace(".", "_")
|
||
arg_2 = str(args[2]).replace(".", "_")
|
||
arg_3 = str(args[3]).replace(".", "_")
|
||
if Decompiler.mapping.get(result[1]) == "SET_VALUE":
|
||
Decompiler.decompiled += (f"var var_{var_0} = var_{var_0} !== void 0 ? ((mem[\"{args[2]}\"] = \"{args[3]}\") || var_{var_0}) : var_{var_0};\n")
|
||
else:
|
||
Decompiler.decompiled += (f"var var_{var_0} = var_{var_0} !== void 0 ? ({Decompiler.mapping[result[1]]}(var_{arg_2}, mem[\"{args[3]}\"]) || var_{var_0}) : var_{var_0};\n")
|
||
elif Decompiler.mapping.get(result[1]) == "JSON_PARSE":
|
||
var_0 = str(args[0]).replace(".", "_")
|
||
arg_3 = str(args[3]).replace(".", "_")
|
||
Decompiler.decompiled += (f"var var_{var_0} = var_{var_0} !== void 0 ? (JSON.parse(var_{arg_3}) || var_{var_0}) : var_{var_0};\n")
|
||
else:
|
||
var_0 = str(args[0]).replace(".", "_")
|
||
args_str = ", ".join(f"var_{arg.replace('.', '_')}" for arg in args[2:])
|
||
Decompiler.decompiled += (f"var var_{var_0} = var_{var_0} !== void 0 ? ({Decompiler.mapping[result[1]]}({args_str}) || var_{var_0}) : var_{var_0};\n")
|
||
else:
|
||
var_0 = str(args[0]).replace(".", "_")
|
||
if len(args) > 4 and f"mem[\"{args[4]}\"] =" in Decompiler.decompiled:
|
||
args_str = ", ".join(f"mem[\"{arg}\"]" if i + 2 == 3 else f"var_{str(arg).replace('.', '_')}" for i, arg in enumerate(args[2:]))
|
||
if Decompiler.mapping.get(result[1]) == "CALL":
|
||
Decompiler.decompiled += (f"var var_{var_0} = var_{var_0} !== void 0 ? (var_{str(args[2]).replace('.', '_')}({args_str}) || var_{var_0}) : var_{var_0};\n")
|
||
else:
|
||
Decompiler.decompiled += (f"var var_{var_0} = var_{var_0} !== void 0 ? ({Decompiler.mapping[result[1]]}({args_str}) || var_{var_0}) : var_{var_0};\n")
|
||
else:
|
||
args_str = ", ".join(f"var_{arg.replace('.', '_')}" for arg in args[2:])
|
||
if Decompiler.mapping.get(result[1]) == "ATOB":
|
||
arg_2 = str(args[2]).replace(".", "_")
|
||
Decompiler.decompiled += (f"var var_{var_0} = var_{var_0} !== void 0 ? (atob(\"\" + var_{arg_2}) || var_{var_0}) : var_{var_0};\n")
|
||
elif len(args) >= 3 and result[1] in Decompiler.mapping:
|
||
Decompiler.decompiled += (f"var var_{var_0} = var_{var_0} !== void 0 ? ({Decompiler.mapping[result[1]]}({args_str}) || var_{var_0}) : var_{var_0};\n")
|
||
else:
|
||
Decompiler.decompiled += f"// ERROR: Invalid IF_DEFINED_CALL with args {args};\n"
|
||
|
||
@staticmethod
|
||
def handle_call_operation(args):
|
||
if args[0] in Decompiler.mapping:
|
||
if Decompiler.mapping[args[0]] == "BTOA":
|
||
arg_1 = str(args[1]).replace(".", "_")
|
||
Decompiler.decompiled += f"console.log(btoa(\"\" + var_{arg_1}));\n"
|
||
else:
|
||
args_str = ", ".join(f"var_{arg.replace('.', '_')}" for arg in args)
|
||
Decompiler.decompiled += f"{Decompiler.mapping[args[0]]}({args_str});\n"
|
||
else:
|
||
if f"var var_{str(args[0]).replace('.', '_')} = \"set\";" in Decompiler.decompiled:
|
||
arg_1 = str(args[1]).replace(".", "_")
|
||
arg_2 = str(args[2]).replace(".", "_")
|
||
arg_3 = str(args[3]).replace(".", "_")
|
||
Decompiler.decompiled += f"var_{arg_1}[var_{arg_2}] = var_{arg_3};\n"
|
||
else:
|
||
args_str = ", ".join(f"var_{arg.replace('.', '_')}" for arg in args[1:])
|
||
Decompiler.decompiled += f"var_{str(args[0]).replace('.', '_')}({args_str});\n"
|
||
|
||
@staticmethod
|
||
def remove_unused_variables():
|
||
lines = Decompiler.decompiled.split("\n")
|
||
used_vars = set()
|
||
var_decl_lines = []
|
||
for i, line in enumerate(lines):
|
||
match = re.match(r"^var\s+var_([\w_]+)\s*=", line)
|
||
if match:
|
||
var_decl_lines.append({"name": match.group(1), "index": i})
|
||
for var in var_decl_lines:
|
||
name = var["name"]
|
||
is_used = any(name in line and not line.startswith(f"var var_{name} =") for line in lines)
|
||
if is_used:
|
||
used_vars.add(name)
|
||
Decompiler.decompiled = "\n".join(
|
||
line for line in lines
|
||
if not re.match(r"^var\s+var_([\w_]+)\s*=", line) or re.match(r"^var\s+var_([\w_]+)\s*=", line).group(1) in used_vars
|
||
)
|
||
|
||
@staticmethod
|
||
def decompile(bytecode):
|
||
while len(bytecode) > 0:
|
||
e = str(bytecode[0][0])
|
||
t = [str(item) for item in bytecode[0][1:]]
|
||
bytecode.pop(0)
|
||
Decompiler.vg += 1
|
||
if e in Decompiler.mapping:
|
||
Decompiler.handle_operation(Decompiler.mapping[e], t)
|
||
else:
|
||
Decompiler.decompiled += f"// UNKNOWN_OPCODE {e} -> {', '.join(t)};\n"
|
||
if Decompiler.mapping.get(e) == "CALL" and not Decompiler.found:
|
||
for entry in Decompiler.potential:
|
||
if len(t) > 3 and entry["var"] == t[3]:
|
||
key_str = str(entry["key"]).replace(".", "_")
|
||
regex = rf"var var_{key_str} = (.*);"
|
||
match = re.search(regex, Decompiler.decompiled)
|
||
if match:
|
||
Decompiler.xorkey2 = match.group(1).replace(";", "")
|
||
Decompiler.found = True
|
||
break
|
||
if Decompiler.round1 == 0:
|
||
Decompiler.round1 += 1
|
||
Decompiler.decompile_2()
|
||
|
||
@staticmethod
|
||
def decompile_2():
|
||
matches = [m.group(2) for m in re.finditer(r"var\s+\w+\s*=\s*(['\"`])([\s\S]*?)\1", Decompiler.decompiled)]
|
||
bytecode = max(matches, key=len, default="")
|
||
if bytecode:
|
||
decoded = json.loads(Decompiler.xS(base64.b64decode(bytecode).decode(), str(Decompiler.xorkey)))
|
||
Decompiler.decompile(decoded)
|
||
if Decompiler.round1 == 1:
|
||
Decompiler.round1 += 1
|
||
Decompiler.decompile_3()
|
||
|
||
@staticmethod
|
||
def decompile_3():
|
||
matches = [m.group(2) for m in re.finditer(r"var\s+\w+\s*=\s*(['\"`])([\s\S]*?)\1", Decompiler.decompiled)]
|
||
bytecode = next((s for s in matches if 60 <= len(s) <= 200), "")
|
||
if bytecode:
|
||
decoded = json.loads(Decompiler.xS(base64.b64decode(bytecode).decode(), str(Decompiler.xorkey)))
|
||
Decompiler.decompile(decoded)
|
||
Decompiler.remove_unused_variables()
|
||
|
||
@staticmethod
|
||
def decompile_vm(turnstile, token):
|
||
Decompiler.start()
|
||
Decompiler.decompiled = (
|
||
"const { JSDOM } = require(\"jsdom\");\n"
|
||
"const dom = new JSDOM(\"<!DOCTYPE html><p>Hello world</p>\", { url: \"https://chatgpt.com/\" });\n"
|
||
"const window = dom.window;\n"
|
||
"var mem = {};\n"
|
||
)
|
||
Decompiler.decompile(json.loads(Decompiler.xS(base64.b64decode(turnstile).decode(), str(token))))
|
||
return Decompiler.decompiled
|
||
|
||
# ==========================================
|
||
# VM
|
||
# ==========================================
|
||
|
||
class VM:
|
||
html_object: str = json.dumps({"x":0,"y":1219,"width":37.8125,"height":30,"top":1219,"right":37.8125,"bottom":1249,"left":0}, separators=(',', ':'))
|
||
|
||
@staticmethod
|
||
def xor(e, t):
|
||
t = str(t)
|
||
e = str(e)
|
||
n = ""
|
||
for r in range(len(e)):
|
||
n += chr(ord(e[r]) ^ ord(t[r % len(t)]))
|
||
return n
|
||
|
||
@staticmethod
|
||
def get_turnstile(turnstile: str, token: str, ip_info: str) -> str:
|
||
decompiled: str = Decompiler.decompile_vm(turnstile, token)
|
||
xor_key, keys = Parser.parse_keys(decompiled)
|
||
payload: dict = {}
|
||
|
||
for key, value in keys.items():
|
||
try:
|
||
value = float(value)
|
||
except Exception:
|
||
pass
|
||
|
||
if isinstance(value, float):
|
||
payload[key] = base64.b64encode(VM.xor(str(value), xor_key).encode("utf-8")).decode("utf-8")
|
||
elif "singlebtoa" in value:
|
||
payload[key] = base64.b64encode(value.split("singlebtoa(")[1].split(")")[0].encode("utf-8")).decode("utf-8")
|
||
elif "doublexor" in value:
|
||
number: str = value.split("doublexor(")[1].split(")")[0]
|
||
value_1: str = base64.b64encode(VM.xor(number, number).encode("utf-8")).decode("utf-8")
|
||
value_2: str = base64.b64encode(VM.xor(value_1, value_1).encode("utf-8")).decode("utf-8")
|
||
payload[key] = base64.b64encode(value_2.encode("utf-8")).decode("utf-8")
|
||
elif "ipinfo" in value:
|
||
payload[key] = base64.b64encode(VM.xor(ip_info, xor_key).encode("utf-8")).decode("utf-8")
|
||
elif "element" in value:
|
||
payload[key] = base64.b64encode(VM.xor(VM.html_object, xor_key).encode()).decode()
|
||
elif "location" in value:
|
||
location: str = 'https://chatgpt.com/'
|
||
payload[key] = base64.b64encode(VM.xor(location, xor_key).encode("utf-8")).decode("utf-8")
|
||
elif "random_1" in value:
|
||
random_value: float = random.random()
|
||
payload[key] = base64.b64encode(VM.xor(str(random_value), str(random_value)).encode("utf-8")).decode("utf-8")
|
||
elif "random_2" in value:
|
||
payload[key] = random.random()
|
||
elif "vendor" in value:
|
||
vendor_info: str = '["Google Inc.","Win32",8,0]'
|
||
payload[key] = base64.b64encode(VM.xor(vendor_info, xor_key).encode("utf-8")).decode("utf-8")
|
||
elif "localstorage" in value:
|
||
payload[key] = base64.b64encode(VM.xor('oai/apps/hasDismissedTeamsNoAuthUpsell,oai/apps/lastSeenNoAuthTrialsBannerAt,oai-did,oai/apps/noAuthGoUpsellModalDismissed,oai/apps/hasDismissedBusinessFreeTrialUpsellModal,oai/apps/capExpiresAt,statsig.session_id.1792610830,oai/apps/hasSeenNoAuthImagegenNux,oai/apps/lastPageLoadDate,client-correlated-secret,statsig.stable_id.1792610830,oai/apps/debugSettings,oai/apps/hasDismissedPlusFreeTrialUpsellModal,oai/apps/tatertotInContextUpsellBannerV2,search.attributions-settings', xor_key).encode("utf-8")).decode("utf-8")
|
||
elif "history" in value:
|
||
payload[key] = base64.b64encode(VM.xor(str(random.randint(1, 5)), xor_key).encode()).decode()
|
||
else:
|
||
pass
|
||
|
||
turnstile_token: str = base64.b64encode(VM.xor(json.dumps(payload, separators=(',', ':')), xor_key).encode("utf-8")).decode("utf-8")
|
||
return turnstile_token
|
||
|
||
# ==========================================
|
||
# Main Generator Logic
|
||
# ==========================================
|
||
|
||
class SentinelGenerator:
|
||
def __init__(self, access_token: str, account_id: str = None):
|
||
self.session = requests.Session(impersonate="chrome133a")
|
||
self.access_token = access_token
|
||
self.account_id = account_id
|
||
|
||
# Headers
|
||
self.headers = {
|
||
'accept': '*/*',
|
||
'accept-language': 'en-US,en;q=0.9',
|
||
'authorization': f'Bearer {self.access_token}',
|
||
'cache-control': 'no-cache',
|
||
'content-type': 'application/json',
|
||
'oai-client-version': '',
|
||
'oai-device-id': '',
|
||
'oai-language': 'en-US',
|
||
'origin': 'https://chatgpt.com',
|
||
'pragma': 'no-cache',
|
||
'priority': 'u=1, i',
|
||
'referer': 'https://chatgpt.com/',
|
||
'sec-ch-ua': '"Google Chrome";v="143", "Chromium";v="143", "Not A(Brand";v="24"',
|
||
'sec-ch-ua-mobile': '?0',
|
||
'sec-ch-ua-platform': '"Windows"',
|
||
'sec-fetch-dest': 'empty',
|
||
'sec-fetch-mode': 'cors',
|
||
'sec-fetch-site': 'same-origin',
|
||
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/143.0.0.0 Safari/537.36',
|
||
}
|
||
if account_id:
|
||
self.headers['chatgpt-account-id'] = account_id
|
||
|
||
def generate(self):
|
||
print("[*] Initializing session...")
|
||
# 1. Fetch home for cookies and prod version
|
||
resp = self.session.get("https://chatgpt.com", headers=self.headers)
|
||
prod = resp.text.split('data-build="')[1].split('"')[0]
|
||
device_id = self.session.cookies.get("oai-did")
|
||
print(f"[+] Prod Version: {prod}")
|
||
print(f"[+] Device ID: {device_id}")
|
||
|
||
self.headers.update({
|
||
'oai-client-version': prod,
|
||
'oai-device-id': device_id
|
||
})
|
||
|
||
# 2. Fetch IP Info
|
||
print("[*] Fetching IP info...")
|
||
ip_info = self._fetch_ip_info()
|
||
timezone = ip_info[5]
|
||
print(f"[+] IP: {ip_info[0]}, Timezone: {timezone}")
|
||
|
||
# 3. Prepare Config
|
||
start_time = int(time.time() * 1000)
|
||
config = self._create_config(ip_info, prod, start_time)
|
||
|
||
# 4. Generate vm_token (p param)
|
||
p_value = Challenges.generate_token(config)
|
||
print(f"[+] Generated p_value: {p_value[:20]}...")
|
||
|
||
# 5. Request chat requirements
|
||
print("[*] Requesting chat requirements...")
|
||
req_data = {'p': p_value}
|
||
req_resp = self.session.post(
|
||
'https://chatgpt.com/backend-api/sentinel/chat-requirements',
|
||
json=req_data,
|
||
headers=self.headers
|
||
)
|
||
|
||
if req_resp.status_code != 200:
|
||
print(f"[-] Failed to get requirements: {req_resp.status_code}")
|
||
return
|
||
|
||
data = req_resp.json()
|
||
chat_requirements_token = data.get("token")
|
||
pow_data = data.get("proofofwork")
|
||
turnstile_data = data.get("turnstile", {})
|
||
bytecode = turnstile_data.get("dx") if turnstile_data else None
|
||
|
||
print("\n[SUCCESS] Token 1: openai-sentinel-chat-requirements-token")
|
||
print(f"Value: {chat_requirements_token[:50]}...")
|
||
|
||
# 6. Solve PoW
|
||
print("\n[*] Solving Proof of Work...")
|
||
proof_token = Challenges.solve_pow(pow_data["seed"], pow_data["difficulty"], config)
|
||
print("[SUCCESS] Token 2: openai-sentinel-proof-token")
|
||
print(f"Value: {proof_token}")
|
||
|
||
# 7. Solve Turnstile (VM)
|
||
print("\n[*] Solving Turnstile VM...")
|
||
if bytecode:
|
||
turnstile_token = VM.get_turnstile(bytecode, p_value, str(ip_info[:-1]))
|
||
print("[SUCCESS] Token 3: openai-sentinel-turnstile-token")
|
||
print(f"Value: {turnstile_token[:50]}...")
|
||
else:
|
||
print("[-] No turnstile bytecode returned (cached?)")
|
||
|
||
return chat_requirements_token, proof_token, turnstile_token
|
||
|
||
def _fetch_ip_info(self):
|
||
# Simplified IP fetch
|
||
try:
|
||
r1 = self.session.get("https://iplocation.com/", headers={"User-Agent": self.headers['user-agent']})
|
||
ip = Utils.between(r1.text, '<td><b class="ip">', "<")
|
||
city = Utils.between(r1.text, '<td class="city">', "<")
|
||
region = Utils.between(r1.text, '<td class="region_name">', "<")
|
||
lat = Utils.between(r1.text, '<td class="lat">', "<")
|
||
lng = Utils.between(r1.text, '<td class="lng">', "<")
|
||
|
||
r2 = self.session.get("https://ipaddresslocation.net/ip-to-timezone", headers={"User-Agent": self.headers['user-agent']})
|
||
tz = Utils.between(r2.text, "Time Zone:</strong> ", " ")
|
||
return [ip, city, region, lat, lng, tz]
|
||
except Exception as e:
|
||
print(f"Error fetching IP: {e}")
|
||
return ["0.0.0.0", "Unknown", "Unknown", "0", "0", "UTC"]
|
||
|
||
def _create_config(self, ip_info, prod, start_time):
|
||
# Generate random react strings
|
||
reacts = ["location", "__reactContainer$" + self._rand_str(), "_reactListening" + self._rand_str()]
|
||
window_keys = ["0", "window", "self", "document", "name", "location", "customElements", "history", "navigation", "locationbar", "menubar", "personalbar", "scrollbars", "statusbar", "toolbar", "status", "closed", "frames", "length", "top", "opener", "parent", "frameElement", "navigator", "origin", "external", "screen", "innerWidth", "innerHeight", "scrollX", "pageXOffset", "scrollY", "pageYOffset", "visualViewport", "screenX", "screenY", "outerWidth", "outerHeight", "devicePixelRatio", "event", "clientInformation", "screenLeft", "screenTop", "styleMedia", "onsearch", "trustedTypes", "performance", "onappinstalled", "onbeforeinstallprompt", "crypto", "indexedDB", "sessionStorage", "localStorage", "chrome","__oai_SSR_HTML", "__reactRouterContext", "$RC", "__oai_SSR_TTI", "__reactRouterManifest", "__reactRouterVersion", "DD_RUM", "__REACT_INTL_CONTEXT__", "regeneratorRuntime", "DD_LOGS", "__STATSIG__", "__mobxInstanceCount", "__mobxGlobals", "_g", "__reactRouterRouteModules", "__SEGMENT_INSPECTOR__", "__reactRouterDataRouter", "MotionIsMounted", "_oaiHandleSessionExpired"]
|
||
|
||
try:
|
||
tz_info = ZoneInfo(ip_info[5])
|
||
except Exception:
|
||
tz_info = datetime.timezone.utc
|
||
|
||
return [
|
||
4880,
|
||
datetime.datetime.now(tz_info).strftime(f"%a %b %d %Y %H:%M:%S GMT%z ({datetime.datetime.now(tz_info).tzname()})"),
|
||
4294705152,
|
||
random.random(),
|
||
self.headers['user-agent'],
|
||
None,
|
||
prod,
|
||
"en-US",
|
||
"en-US,en",
|
||
random.random(),
|
||
"webkitGetUserMedia−function webkitGetUserMedia() { [native code] }",
|
||
random.choice(reacts),
|
||
random.choice(window_keys),
|
||
random.randint(800, 1400) + random.random(),
|
||
str(uuid.uuid4()),
|
||
"",
|
||
20,
|
||
start_time
|
||
]
|
||
|
||
def _rand_str(self):
|
||
n = random.random()
|
||
base36 = ''
|
||
chars = '0123456789abcdefghijklmnopqrstuvwxyz'
|
||
x = int(n * 36**10)
|
||
for _ in range(10):
|
||
x, r = divmod(x, 36)
|
||
base36 = chars[r] + base36
|
||
return base36
|
||
|
||
if __name__ == "__main__":
|
||
ACCESS_TOKEN = ""
|
||
ACCOUNT_ID = ""
|
||
|
||
gen = SentinelGenerator(ACCESS_TOKEN, ACCOUNT_ID)
|
||
tokens = gen.generate()
|
||
|
||
if tokens:
|
||
chat_req_token, proof_token, turnstile_token = tokens
|
||
|
||
print("\n[*] Sending verification request...")
|
||
|
||
url = "https://chatgpt.com/backend-api/f/conversation"
|
||
|
||
headers = {
|
||
'accept': 'text/event-stream',
|
||
'accept-language': 'en-US,en;q=0.9,zh-CN;q=0.8,zh;q=0.7,ja;q=0.6',
|
||
'authorization': f'Bearer {ACCESS_TOKEN}',
|
||
'chatgpt-account-id': ACCOUNT_ID,
|
||
'content-type': 'application/json',
|
||
'dnt': '1',
|
||
'oai-client-build-number': '4254641',
|
||
'oai-client-version': gen.headers.get('oai-client-version', ''),
|
||
'oai-device-id': gen.headers.get('oai-device-id', ''),
|
||
'oai-echo-logs': '0,2858,1,5120,0,42900,1,44934,0,48738,1,64103,0,68122,1,68125,0,76938,1,77534',
|
||
'oai-language': 'en-US',
|
||
'openai-sentinel-chat-requirements-token': chat_req_token,
|
||
'openai-sentinel-proof-token': proof_token,
|
||
'openai-sentinel-turnstile-token': turnstile_token,
|
||
'origin': 'https://chatgpt.com',
|
||
'priority': 'u=1, i',
|
||
'referer': 'https://chatgpt.com/',
|
||
'sec-ch-ua': '"Google Chrome";v="143", "Chromium";v="143", "Not A(Brand";v="24"',
|
||
'sec-ch-ua-arch': '"x86"',
|
||
'sec-ch-ua-bitness': '"64"',
|
||
'sec-ch-ua-full-version': '"143.0.7499.194"',
|
||
'sec-ch-ua-mobile': '?0',
|
||
'sec-ch-ua-model': '""',
|
||
'sec-ch-ua-platform': '"Windows"',
|
||
'sec-ch-ua-platform-version': '"19.0.0"',
|
||
'sec-fetch-dest': 'empty',
|
||
'sec-fetch-mode': 'cors',
|
||
'sec-fetch-site': 'same-origin',
|
||
'sec-gpc': '1',
|
||
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/143.0.0.0 Safari/537.36',
|
||
}
|
||
|
||
payload = {
|
||
"action": "next",
|
||
"messages": [
|
||
{
|
||
"id": str(uuid.uuid4()),
|
||
"author": {"role": "user"},
|
||
"create_time": time.time(),
|
||
"content": {
|
||
"content_type": "text",
|
||
"parts": ["你的模型型号是什么?"]
|
||
},
|
||
"metadata": {
|
||
"developer_mode_connector_ids": [],
|
||
"selected_connector_ids": [],
|
||
"selected_sync_knowledge_store_ids": [],
|
||
"selected_sources": [],
|
||
"selected_github_repos": [],
|
||
"selected_all_github_repos": False,
|
||
"serialization_metadata": {"custom_symbol_offsets": []}
|
||
}
|
||
}
|
||
],
|
||
"parent_message_id": "client-created-root",
|
||
"model": "gpt-5-2",
|
||
"timezone_offset_min": -480,
|
||
"timezone": "Asia/Shanghai",
|
||
"conversation_mode": {"kind": "primary_assistant"},
|
||
"enable_message_followups": True,
|
||
"system_hints": [],
|
||
"supports_buffering": True,
|
||
"supported_encodings": ["v1"],
|
||
"client_contextual_info": {
|
||
"is_dark_mode": True,
|
||
"time_since_loaded": 3786,
|
||
"page_height": 695,
|
||
"page_width": 687,
|
||
"pixel_ratio": 1.25,
|
||
"screen_height": 864,
|
||
"screen_width": 1536,
|
||
"app_name": "chatgpt.com"
|
||
},
|
||
"paragen_cot_summary_display_override": "allow",
|
||
"force_parallel_switch": "auto"
|
||
}
|
||
|
||
try:
|
||
response = gen.session.post(url, headers=headers, json=payload, stream=True)
|
||
print(f"[+] Response Status: {response.status_code}")
|
||
|
||
print("[+] Assistant Response:")
|
||
for line in response.iter_lines():
|
||
if not line:
|
||
continue
|
||
|
||
decoded_line = line.decode('utf-8')
|
||
if decoded_line.startswith("data: "):
|
||
data_str = decoded_line[6:]
|
||
if data_str == "[DONE]":
|
||
break
|
||
|
||
try:
|
||
data_json = json.loads(data_str)
|
||
if not isinstance(data_json, dict):
|
||
continue
|
||
|
||
target_value = None
|
||
|
||
if "v" in data_json:
|
||
v_val = data_json["v"]
|
||
if isinstance(v_val, str):
|
||
target_value = v_val
|
||
elif isinstance(v_val, list):
|
||
pass
|
||
|
||
if target_value:
|
||
print(target_value, end="", flush=True)
|
||
|
||
except json.JSONDecodeError:
|
||
pass
|
||
print()
|
||
except Exception as e:
|
||
print(f"[-] Request failed: {e}")
|
||
|