bugfix
This commit is contained in:
parent
fededc673a
commit
20b8db7abd
@ -90,20 +90,12 @@ class HttpClient:
|
||||
self.session = aiohttp.ClientSession(cookie_jar=jar)
|
||||
return self.session
|
||||
|
||||
async def response_generator(self, url, resp, resp_type=None, stream=False):
|
||||
async def response_generator(self, url, resp):
|
||||
if resp.cookies is not None:
|
||||
self.setCookie(url,resp.cookies)
|
||||
|
||||
if stream:
|
||||
async for chunk in resp.content.iter_chunked(1024):
|
||||
yield chunk
|
||||
else:
|
||||
if resp_type == RESPONSE_BIN:
|
||||
yield await resp.read()
|
||||
if resp_type == RESPONSE_JSON:
|
||||
yield await resp.json()
|
||||
if resp_type == RESPONSE_TEXT:
|
||||
yield await resp.text(self.coding)
|
||||
async for chunk in resp.content.iter_chunked(1024):
|
||||
yield chunk
|
||||
|
||||
async def response_handle(self,url, resp, resp_type=None, stream_func=None):
|
||||
if resp.cookies is not None:
|
||||
@ -236,7 +228,7 @@ class HttpClient:
|
||||
headers=headers,
|
||||
**kw)
|
||||
if resp.status==200:
|
||||
async for d in self.response_generator(url, resp, resp_type=response_type, stream=stream):
|
||||
async for d in self.response_generator(url, resp):
|
||||
yield d
|
||||
return
|
||||
msg = f'http error({resp.status}, {url=},{params=}, {data=}, {jd=})'
|
||||
|
132
appPublic/streamhttpclient.py
Executable file
132
appPublic/streamhttpclient.py
Executable file
@ -0,0 +1,132 @@
|
||||
#!/Users/ymq/p3.12/bin/python
|
||||
|
||||
from traceback import format_exc
|
||||
import aiohttp
|
||||
import asyncio
|
||||
from aiohttp_socks import ProxyConnector
|
||||
from pathlib import Path
|
||||
import certifi
|
||||
import ssl
|
||||
import os
|
||||
|
||||
class StreamHttpClient:
|
||||
def __init__(self, socks5_url="socks5://127.0.0.1:1080"):
|
||||
home = os.path.expanduser("~")
|
||||
self.socks_urls_file = Path(f'{home}/.socksurls.txt')
|
||||
self.socks5_url = socks5_url
|
||||
self.socks_urls = set(self._load_socks_urls())
|
||||
self.ssl_context = ssl.create_default_context(cafile=certifi.where())
|
||||
|
||||
def _load_socks_urls(self):
|
||||
if self.socks_urls_file.exists():
|
||||
return [line.strip() for line in self.socks_url_file.read_text().splitlines() if line.strip()]
|
||||
return []
|
||||
|
||||
def _save_socks_url(self, url):
|
||||
if url not in self.socks_urls:
|
||||
self.socks_urls.add(url)
|
||||
with self.socks_url_file.open("a") as f:
|
||||
f.write(url + "\n")
|
||||
|
||||
async def __call__(self, method, url, *,
|
||||
headers=None,
|
||||
params=None,
|
||||
data=None,
|
||||
json=None,
|
||||
files=None,
|
||||
chunk_size=1024):
|
||||
"""
|
||||
Makes an HTTP request and yields response chunks (streamed).
|
||||
"""
|
||||
use_socks = url in self.socks_urls
|
||||
try:
|
||||
if use_socks:
|
||||
print(f"🔁 Using SOCKS5 directly for: {url}")
|
||||
async for chunk in self._request_with_connector(
|
||||
method, url,
|
||||
headers=headers, params=params, data=data,
|
||||
json=json, files=files,
|
||||
use_socks=True, chunk_size=chunk_size
|
||||
):
|
||||
yield chunk
|
||||
else:
|
||||
print(f"🌐 Trying direct request: {url}")
|
||||
async for chunk in self._request_with_connector(
|
||||
method, url,
|
||||
headers=headers, params=params, data=data,
|
||||
json=json, files=files,
|
||||
use_socks=False, chunk_size=chunk_size
|
||||
):
|
||||
yield chunk
|
||||
except Exception as e:
|
||||
if use_socks:
|
||||
print(f"❌ SOCKS5 request failed: {e},{format_exc()}")
|
||||
return
|
||||
print(f"❌ Direct request failed: {e}")
|
||||
print("🧦 Retrying with SOCKS5 proxy...")
|
||||
try:
|
||||
async for chunk in self._request_with_connector(
|
||||
method, url,
|
||||
headers=headers, params=params, data=data,
|
||||
json=json, files=files,
|
||||
use_socks=True, chunk_size=chunk_size
|
||||
):
|
||||
self._save_socks_url(url)
|
||||
yield chunk
|
||||
except Exception as e2:
|
||||
print(f"❌ SOCKS5 request also failed: {e2},{format_exc()}")
|
||||
|
||||
async def _request_with_connector(self, method, url,
|
||||
headers=None, params=None, data=None,
|
||||
json=None, files=None,
|
||||
use_socks=False, chunk_size=1024):
|
||||
connector = ProxyConnector.from_url(self.socks5_url) if use_socks else None
|
||||
|
||||
async with aiohttp.ClientSession(connector=connector) as session:
|
||||
req_args = {
|
||||
"headers": headers,
|
||||
"params": params,
|
||||
"timeout": 30,
|
||||
"ssl": self.ssl_context,
|
||||
}
|
||||
|
||||
if files:
|
||||
form = aiohttp.FormData()
|
||||
|
||||
if isinstance(data, dict):
|
||||
for k, v in data.items():
|
||||
form.add_field(k, str(v))
|
||||
|
||||
for name, file_info in files.items():
|
||||
form.add_field(name, *file_info)
|
||||
|
||||
req_args["data"] = form
|
||||
else:
|
||||
if json is not None:
|
||||
req_args["json"] = json
|
||||
else:
|
||||
req_args["data"] = data
|
||||
|
||||
async with session.request(method, url, **req_args) as response:
|
||||
response.raise_for_status()
|
||||
async for chunk in response.content.iter_chunked(chunk_size):
|
||||
yield chunk
|
||||
|
||||
if __name__ == '__main__':
|
||||
import asyncio
|
||||
import sys
|
||||
|
||||
async def main():
|
||||
if len(sys.argv) > 1:
|
||||
prompt = sys.argv[1]
|
||||
else:
|
||||
prompt = 'who are you'
|
||||
hc = HttpClient()
|
||||
url = 'http://devops.opencomputing.ai/v1/chat/completions'
|
||||
headers={'Content-Type': 'application/json'}
|
||||
data='{ "model": "devstral", "stream":true, "messages":[ { "role":"user", "content":"' + prompt + '" } ] }'
|
||||
async for chunk in hc.request('POST', url, data=data, headers=headers):
|
||||
print(chunk)
|
||||
|
||||
asyncio.new_event_loop().run_until_complete(main())
|
||||
|
Loading…
Reference in New Issue
Block a user