
简介
该用户还未填写简介
擅长的技术栈
可提供的服务
暂无可提供的服务
referer = ''headers = {"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36 Edg/120.0.0.0"}url = &
import requestsbx_et = requests.get('http://localhost:3000/getbxet').textprint(bx_et)import jsonheaders = {"accept": "application/json, text/plain, */*","accept-langua
新版本有点离谱,我ast一坨用其他方法搞出来的。会ast的可以找我,我学习一下,我可以提供代码给你学习,你把ast给我。不过没用ast也可以拿捏他,其实补环境是最好搞这个的了,但是一直用的扣代码方式懒得重头再补了所以就一直问ai咋处理新版v2这种情况。升级之后风控点我在2.0版本就发现了为什么那些监测点他用了但是没检测3.0直接全部加上了也算是被我预判了所以我弄了半天就弄来了的。部分代码如下url
session = requests.session()det = ddddocr.DdddOcr(det=False, ocr=False, show_ad=False)cookies = {}url = "api/captcha/get"params = {"aid": "CHANPASSPORT","ua": &
部分python代码headers = {"accept": "application/json","accept-language": "zh-CN,zh;q=0.9","sec-fetch-dest": "empty","sec-fetch-mode": "cors","sec-fetch-site": "cross-site","user-agent": "Mozilla/5.0 (Wind
import urllib.parseimport requestsheaders = {"accept": "application/json, text/plain, */*","accept-language": "zh-CN,zh;q=0.9","cache-control": "no-cache","pragma": "no-cache","priority": "u=1, i","se
import urllib.parseimport requestsheaders = {"accept": "application/json, text/plain, */*","accept-language": "zh-CN,zh;q=0.9","cache-control": "no-cache","pragma": "no-cache","priority": "u=1, i","se
import execjsimport requestsimport jsoncp = execjs.compile(open('pdd.js','r',encoding='utf-8').read())anti_content = cp.call('getToken')print(anti_content)headers = {"accept": "application/json, text/
import execjsimport requestsimport jsoncp = execjs.compile(open('pdd.js','r',encoding='utf-8').read())anti_content = cp.call('getToken')print(anti_content)headers = {"accept": "application/json, text/
cp = execjs.compile(open('vm.js','r',encoding='utf-8').read())result = cp.call('get_result',data1,WEBDFPID)print(result)mtgsig = result['headers']['mtgsig']print(mtgsig)# 设置请求头headers = {'Accept': 'ap