From e5d72b67bf9a83bd2d012d1e75c013ba1eecd405 Mon Sep 17 00:00:00 2001 From: yumoqing Date: Thu, 6 Mar 2025 00:30:44 +0800 Subject: [PATCH 1/2] bugfix --- appPublic/httpclient.py | 145 ++++++++++++++++++++++++++++++++-------- 1 file changed, 116 insertions(+), 29 deletions(-) diff --git a/appPublic/httpclient.py b/appPublic/httpclient.py index 51bf10c..dec29ef 100755 --- a/appPublic/httpclient.py +++ b/appPublic/httpclient.py @@ -5,6 +5,17 @@ import json from appPublic.myTE import MyTemplateEngine import re from appPublic.log import info, debug, warning, error, exception, critical +from urllib.parse import urlparse +from aiohttp_socks import ProxyType, ProxyConnector, ChainProxyConnector + +def get_domain(url): + # Prepend 'http://' if the URL lacks a scheme + if not url.startswith(('http://', 'https://')): + url = 'http://' + url + parsed_url = urlparse(url) + netloc = parsed_url.netloc + domain = netloc.split(':')[0] + return domain RESPONSE_BIN = 0 RESPONSE_TEXT = 1 @@ -24,27 +35,50 @@ class HttpError(Exception): return str(self) class HttpClient: - def __init__(self,coding='utf-8'): + def __init__(self,coding='utf-8', socks5_proxy_url=None): self.coding = coding self.session = None self.cookies = {} + self.socks5proxy = socks5proxy + self.proxy_connector = None + self.socks5_proxy_url = socks5_proxy_url + self.blocked_domains = set() + self.load_cache() + + def save_cache(self): + home_dir = os.path.expanduser('~') + cache_file = os.path.join(home_dir, '.proxytarget') + with open(cache_file, 'w') as f: + for d in self.blocked_domains: + f.write(f'{d}\n') + + def load_cache(self): + # 初始化缓存文件 + home_dir = os.path.expanduser('~') + cache_file = os.path.join(home_dir, '.proxytarget') + + try: + with open(cache_file, 'r') as f: + for line in f: + domain = line.strip() + if domain: + self.blocked_domains.add(domain) + except FileNotFoundError: + # 创建空文件 + with open(cache_file, 'w') as f: + pass async def close(self): if self.session: await self.session.close() self.session = None - def url2domain(self,url): - parts = url.split('/')[:3] - pre = '/'.join(parts) - return pre - def setCookie(self,url,cookies): - name = self.url2domain(url) + name = get_domain(url) self.cookies[name] = cookies def getCookies(self,url): - name = url2domain(url) + name = get_domain(url) return self.cookies.get(name,None) def getsession(self,url): @@ -69,39 +103,92 @@ class HttpClient: def grapCookie(self,url): session = self.getsession(url) - domain = self.url2domain(url) + domain = get_domain(url) filtered = session.cookie_jar.filter_cookies(domain) return filtered - async def request(self, url, method, + async def make_request(self, url, method, response_type=RESPONSE_TEXT, params=None, data=None, jd=None, stream_func=None, headers=None, + use_proxy=False, **kw): - session = self.getsession(url) - if params == {}: - params = None - if data == {}: - data = None - if jd == {}: - jd = None - if headers == {}: - headers = None + async with aiohttp.ClientSession() as session: + if params == {}: + params = None + if data == {}: + data = None + if jd == {}: + jd = None + proxy = None + if use_proxy and self.socks5_proxy_url: + proxy = self.socks5_proxy_url - resp = await session.request(method, url, - params=params, - data=data, - json=jd, - headers=headers, **kw) - if resp.status==200: - return await self.handleResp(url, resp, response_type, stream_func=stream_func) - msg = f'http error({resp.status}, {url=},{params=}, {data=}, {jd=}, {headers=}, {kw=})' - exception(msg) - raise HttpError(resp.status, msg) + if headers == {}: + headers = None + resp = await session.request(method, url, + params=params, + data=data, + json=jd, + proxy=proxy, + headers=headers, **kw) + if resp.status==200: + return await self.handleResp(url, resp, response_type, stream_func=stream_func) + msg = f'http error({resp.status}, {url=},{params=}, {data=}, {jd=}, {headers=}, {kw=})' + exception(msg) + raise HttpError(resp.status, msg) + + async def request(self, url, method, + response_type=RESPONSE_TEXT, + params=None, + data=None, + jd=None, + stream_func=None, + headers=None, + **kw + ): + if self.socks5_proxy_url is None: + resp = await self.make_request(url, method, + response_type=response_type, + params=params, + data=data, + jd=jd, + proxy=False, + stream_func=stream_func, + headers=headers, + **kw) + return resp + domain = get_domain(url) + if domain not in self.blocked_domains: + try: + resp = await self.make_request(url, method, + response_type=response_type, + params=params, + data=data, + jd=jd, + proxy=False, + stream_func=stream_func, + headers=headers, + **kw) + return resp + except aiohttp.ClientError: + if domain not in self.blocked_domains: + self.save_cache() + resp = await self.make_request(url, method, + response_type=response_type, + params=params, + data=data, + jd=jd, + proxy=True, + stream_func=stream_func, + headers=headers, + **kw) + return resp + async def get(self,url,**kw): return self.request(url, 'GET', **kw) From ec090a205ec64fe3d06d68a9c24c7e3168a51d9b Mon Sep 17 00:00:00 2001 From: yumoqing Date: Thu, 6 Mar 2025 01:02:28 +0800 Subject: [PATCH 2/2] bugfix --- appPublic/httpclient.py | 226 ++++++++++++++++++++-------------------- 1 file changed, 114 insertions(+), 112 deletions(-) diff --git a/appPublic/httpclient.py b/appPublic/httpclient.py index dec29ef..08e4eec 100755 --- a/appPublic/httpclient.py +++ b/appPublic/httpclient.py @@ -1,3 +1,4 @@ +import os import asyncio import aiohttp from aiohttp import FormData @@ -9,13 +10,13 @@ from urllib.parse import urlparse from aiohttp_socks import ProxyType, ProxyConnector, ChainProxyConnector def get_domain(url): - # Prepend 'http://' if the URL lacks a scheme - if not url.startswith(('http://', 'https://')): - url = 'http://' + url - parsed_url = urlparse(url) - netloc = parsed_url.netloc - domain = netloc.split(':')[0] - return domain + # Prepend 'http://' if the URL lacks a scheme + if not url.startswith(('http://', 'https://')): + url = 'http://' + url + parsed_url = urlparse(url) + netloc = parsed_url.netloc + domain = netloc.split(':')[0] + return domain RESPONSE_BIN = 0 RESPONSE_TEXT = 1 @@ -39,34 +40,33 @@ class HttpClient: self.coding = coding self.session = None self.cookies = {} - self.socks5proxy = socks5proxy - self.proxy_connector = None - self.socks5_proxy_url = socks5_proxy_url - self.blocked_domains = set() - self.load_cache() + self.proxy_connector = None + self.socks5_proxy_url = socks5_proxy_url + self.blocked_domains = set() + self.load_cache() - def save_cache(self): - home_dir = os.path.expanduser('~') - cache_file = os.path.join(home_dir, '.proxytarget') - with open(cache_file, 'w') as f: - for d in self.blocked_domains: - f.write(f'{d}\n') + def save_cache(self): + home_dir = os.path.expanduser('~') + cache_file = os.path.join(home_dir, '.proxytarget') + with open(cache_file, 'w') as f: + for d in self.blocked_domains: + f.write(f'{d}\n') - def load_cache(self): - # 初始化缓存文件 - home_dir = os.path.expanduser('~') - cache_file = os.path.join(home_dir, '.proxytarget') - - try: - with open(cache_file, 'r') as f: - for line in f: - domain = line.strip() - if domain: - self.blocked_domains.add(domain) - except FileNotFoundError: - # 创建空文件 - with open(cache_file, 'w') as f: - pass + def load_cache(self): + # 初始化缓存文件 + home_dir = os.path.expanduser('~') + cache_file = os.path.join(home_dir, '.proxytarget') + + try: + with open(cache_file, 'r') as f: + for line in f: + domain = line.strip() + if domain: + self.blocked_domains.add(domain) + except FileNotFoundError: + # 创建空文件 + with open(cache_file, 'w') as f: + pass async def close(self): if self.session: @@ -107,88 +107,88 @@ class HttpClient: filtered = session.cookie_jar.filter_cookies(domain) return filtered - async def make_request(self, url, method, + async def make_request(self, url, method='GET', response_type=RESPONSE_TEXT, params=None, data=None, jd=None, stream_func=None, headers=None, - use_proxy=False, - **kw): - async with aiohttp.ClientSession() as session: - if params == {}: - params = None - if data == {}: - data = None - if jd == {}: - jd = None - proxy = None - if use_proxy and self.socks5_proxy_url: - proxy = self.socks5_proxy_url + use_proxy=False + ): + connector = None + if use_proxy: + connector = ProxyConnector.from_url(self.socks5_proxy_url) + async with aiohttp.ClientSession(connector=connector) as session: + if params == {}: + params = None + if data == {}: + data = None + if jd == {}: + jd = None - if headers == {}: - headers = None + if headers == {}: + headers = None - resp = await session.request(method, url, - params=params, - data=data, - json=jd, - proxy=proxy, - headers=headers, **kw) - if resp.status==200: - return await self.handleResp(url, resp, response_type, stream_func=stream_func) - msg = f'http error({resp.status}, {url=},{params=}, {data=}, {jd=}, {headers=}, {kw=})' - exception(msg) - raise HttpError(resp.status, msg) + resp = await session.request(method, url, + params=params, + data=data, + json=jd, + headers=headers) + if resp.status==200: + return await self.handleResp(url, resp, response_type, stream_func=stream_func) + msg = f'http error({resp.status}, {url=},{params=}, {data=}, {jd=}, {headers=}, {kw=})' + exception(msg) + raise HttpError(resp.status, msg) - async def request(self, url, method, - response_type=RESPONSE_TEXT, - params=None, - data=None, - jd=None, - stream_func=None, - headers=None, - **kw - ): - if self.socks5_proxy_url is None: - resp = await self.make_request(url, method, - response_type=response_type, - params=params, - data=data, - jd=jd, - proxy=False, - stream_func=stream_func, - headers=headers, - **kw) - return resp - domain = get_domain(url) - if domain not in self.blocked_domains: - try: - resp = await self.make_request(url, method, - response_type=response_type, - params=params, - data=data, - jd=jd, - proxy=False, - stream_func=stream_func, - headers=headers, - **kw) - return resp - except aiohttp.ClientError: - if domain not in self.blocked_domains: - self.save_cache() - resp = await self.make_request(url, method, - response_type=response_type, - params=params, - data=data, - jd=jd, - proxy=True, - stream_func=stream_func, - headers=headers, - **kw) - return resp - + async def request(self, url, method='GET', + response_type=RESPONSE_TEXT, + params=None, + data=None, + jd=None, + stream_func=None, + headers=None, + **kw + ): + if self.socks5_proxy_url is None: + resp = await self.make_request(url, method=method, + response_type=response_type, + params=params, + data=data, + jd=jd, + use_proxy=False, + stream_func=stream_func, + headers=headers + ) + return resp + domain = get_domain(url) + if domain not in self.blocked_domains: + try: + resp = await self.make_request(url, method=method, + response_type=response_type, + params=params, + data=data, + jd=jd, + use_proxy=False, + stream_func=stream_func, + headers=headers + ) + return resp + except: + if domain not in self.blocked_domains: + self.blocked_domains.add(domain) + self.save_cache() + resp = await self.make_request(url, method=method, + response_type=response_type, + params=params, + data=data, + jd=jd, + use_proxy=True, + stream_func=stream_func, + headers=headers + ) + return resp + async def get(self,url,**kw): return self.request(url, 'GET', **kw) @@ -197,10 +197,10 @@ class HttpClient: session = self.getsession(url) class JsonHttpAPI: - def __init__(self, env={}): + def __init__(self, env={}, socks5_proxy_url=None): self.env = env self.te = MyTemplateEngine([], env=env) - self.hc = HttpClient() + self.hc = HttpClient(socks5_proxy_url=socks5_proxy_url) async def call(self, url, method='GET', ns={}, stream_func=None, @@ -234,11 +234,13 @@ class JsonHttpAPI: return ret if __name__ == '__main__': - async def gbaidu(hc): - r = await hc.get('https://www.baidu.com') + async def main(): + hc = HttpClient(socks5_proxy_url='socks5://localhost:1086') + r = await hc.request('https://www.baidu.com') + print(r) + r = await hc.request('https://www.google.com') print(r) await hc.close() loop = asyncio.get_event_loop() - hc = HttpClient() - loop.run_until_complete(gbaidu(hc)) + loop.run_until_complete(main())