This commit is contained in:
yumoqing 2025-05-28 17:31:38 +08:00
parent c638340cd6
commit 982ea828fd

View File

@ -1,7 +1,10 @@
import os
from traceback import format_exc
import asyncio
import aiohttp
from aiohttp import FormData
import ssl
import certifi
import json
from appPublic.myTE import MyTemplateEngine
import re
@ -87,19 +90,37 @@ class HttpClient:
self.session = aiohttp.ClientSession(cookie_jar=jar)
return self.session
async def handleResp(self,url,resp,resp_type, stream_func=None):
async def response_generator(self, url, resp, resp_type=None, stream=False):
if resp.cookies is not None:
self.setCookie(url,resp.cookies)
if stream:
async for chunk in resp.content.iter_chunked(1024):
yield chunk
else:
if resp_type == RESPONSE_BIN:
yield await resp.read()
if resp_type == RESPONSE_JSON:
yield await resp.json()
if resp_type == RESPONSE_TEXT:
yield await resp.text(self.coding)
async def response_handle(self,url, resp, resp_type=None, stream_func=None):
if resp.cookies is not None:
self.setCookie(url,resp.cookies)
if stream_func:
async for chunk in resp.content.iter_chunked(1024):
if stream_func:
await stream_func(chunk)
return None
if resp_type == RESPONSE_BIN:
return await resp.read()
if resp_type == RESPONSE_JSON:
return await resp.json()
if resp_type == RESPONSE_TEXT:
return await resp.text(self.coding)
async for chunk in resp.content.iter_chunked(1024):
if stream_func:
await stream_func(chunk)
def grapCookie(self,url):
session = self.getsession(url)
@ -108,15 +129,12 @@ class HttpClient:
return filtered
async def make_request(self, url, method='GET',
response_type=RESPONSE_TEXT,
params=None,
data=None,
jd=None,
stream_func=None,
headers=None,
use_proxy=False
):
connector = None
if use_proxy:
connector = ProxyConnector.from_url(self.socks5_proxy_url)
reco = aiohttp.ClientSession(connector=connector)
@ -132,18 +150,47 @@ class HttpClient:
if headers == {}:
headers = None
print(f'{method=},{ url=}, {headers=}, {data=}')
resp = await session.request(method, url,
ssl_ctx = ssl.create_default_context(cafile=certifi.where())
return await session.request(method, url,
params=params,
data=data,
json=jd,
headers=headers)
if resp.status==200:
return await self.handleResp(url, resp, response_type, stream_func=stream_func)
msg = f'http error({resp.status}, {url=},{params=}, {data=}, {jd=})'
exception(msg)
raise HttpError(resp.status, msg)
headers=headers,
ssl=ssl_ctx
)
async def get_request_response(self, url, method='GET',
params=None,
data=None,
jd=None,
headers=None,
**kw
):
try:
return await self.make_request(url, method=method,
params=params,
data=data,
jd=jd,
use_proxy=False,
headers=headers
)
except:
e = Exception(f'make_request error')
exception(f'{e=}, {format_exc()}')
if self.socks5_proxy_url is None:
raise e
domain = get_domain(url)
debug(f'{self.socks5_proxy_url=}, {self.blocked_domains=}, {domain=}')
if domain not in self.blocked_domains:
self.blocked_domains.add(domain)
self.save_cache()
return await self.make_request(url, method=method,
params=params,
data=data,
jd=jd,
use_proxy=True,
headers=headers
)
async def request(self, url, method='GET',
response_type=RESPONSE_TEXT,
@ -154,64 +201,125 @@ class HttpClient:
headers=None,
**kw
):
if self.socks5_proxy_url is None:
resp = await self.make_request(url, method=method,
response_type=response_type,
params=params,
data=data,
jd=jd,
use_proxy=False,
stream_func=stream_func,
headers=headers
)
return resp
domain = get_domain(url)
if domain not in self.blocked_domains:
try:
resp = await self.make_request(url, method=method,
response_type=response_type,
params=params,
data=data,
jd=jd,
use_proxy=False,
stream_func=stream_func,
headers=headers
)
return resp
except:
if domain not in self.blocked_domains:
self.blocked_domains.add(domain)
self.save_cache()
resp = await self.make_request(url, method=method,
response_type=response_type,
params=params,
data=data,
jd=jd,
use_proxy=True,
stream_func=stream_func,
headers=headers
)
return resp
resp = await self.get_request_response(url, method=method,
params=params,
data=data,
jd=jd,
headers=headers,
**kw
)
if resp.status==200:
return await self.response_handle(url, resp,
resp_type=response_type,
stream_func=stream_func)
msg = f'http error({resp.status}, {url=},{params=}, {data=}, {jd=})'
exception(msg)
raise HttpError(resp.status, msg)
async def __call__(self, url, method='GET',
response_type=RESPONSE_TEXT,
params=None,
data=None,
jd=None,
headers=None,
stream=False,
use_proxy=False,
**kw
):
resp = await self.get_request_response(url, method=method,
params=params,
data=data,
jd=jd,
headers=headers,
**kw)
if resp.status==200:
async for d in self.response_generator(url, resp, resp_type=response_type, stream=stream):
yield d
return
msg = f'http error({resp.status}, {url=},{params=}, {data=}, {jd=})'
exception(msg)
raise HttpError(resp.status, msg)
async def get(self,url,**kw):
return self.request(url, 'GET', **kw)
async def post(self,url, **kw):
return self.request(url, 'POST', **kw)
session = self.getsession(url)
class JsonHttpAPI:
def __init__(self, env={}, socks5_proxy_url=None):
self.env = env
self.te = MyTemplateEngine([], env=env)
self.hc = HttpClient(socks5_proxy_url=socks5_proxy_url)
async def stream_func(self, chunk):
debug(f'{chunk=}')
d = self.chunk_buffer + chuck
a, b = d.split('\n', 1)
self.chunk_buffer = b
if self.resptmpl:
ns1 = json.loads(a)
a = self.te.renders(self.resptmpl, ns1)
if self.user_stream_func:
jd = json.loads(a)
await self.user_stream_func(jd)
async def __call__(self, url, method='GET', ns={},
stream=False,
headerstmpl=None,
paramstmpl=None,
datatmpl=None,
chunk_leading=None,
chunk_end="[done]",
resptmpl=None):
headers = None
ns1 = self.env.copy()
ns1.update(ns)
if headerstmpl:
headers = json.loads(self.te.renders(headerstmpl, ns1))
info(f'{headers=},{ns=}, {headerstmpl=}')
params = None
if paramstmpl:
params = json.loads(self.te.renders(paramstmpl, ns1))
data = None
if datatmpl:
datadic = json.loads(self.te.renders(datatmpl, ns1))
data = json.dumps(datadic, ensure_ascii=False)
"""
data = FormData()
for k,v in datadic.items():
data.add_field(k, v)
headers['Content-Type'] = 'multipart/form-data'
"""
info(f'{data=},{ns=}, {headers=}')
hc = HttpClient()
async for d in self.hc(url, method=method,
stream=stream,
headers=headers,
params=params,
data=data):
if stream:
d = self.chunk_handle(d, chunk_leading, chunk_end)
if isinstance(d, dict) and resptmpl:
ds = self.te.renders(resptmpl, d)
yield json.loads(ds)
else:
yield d
async def call(self, url, method='GET', ns={},
stream_func=None,
headerstmpl=None,
paramstmpl=None,
datatmpl=None,
chunk_leading=None,
chunk_end="[done]",
resptmpl=None):
self.user_stream_func = stream_func
self.chunk_leading = chunk_leading
self.chunk_end = chunk_end
self.chunk_buffer = ''
self.resptmpl =resptmpl
headers = None
ns1 = self.env.copy()
ns1.update(ns)
@ -253,11 +361,11 @@ class JsonHttpAPI:
if __name__ == '__main__':
async def main():
hc = HttpClient(socks5_proxy_url='socks5://localhost:1086')
r = await hc.request('https://www.baidu.com')
print(r)
async for d in hc('https://www.baidu.com'):
print(d)
r = await hc.request('https://www.google.com')
print(r)
await hc.close()
loop = asyncio.get_event_loop()
loop = asyncio.new_event_loop()
loop.run_until_complete(main())