This commit is contained in:
yumoqing 2025-03-06 00:30:44 +08:00
parent f605e8ad11
commit e5d72b67bf

View File

@ -5,6 +5,17 @@ import json
from appPublic.myTE import MyTemplateEngine from appPublic.myTE import MyTemplateEngine
import re import re
from appPublic.log import info, debug, warning, error, exception, critical from appPublic.log import info, debug, warning, error, exception, critical
from urllib.parse import urlparse
from aiohttp_socks import ProxyType, ProxyConnector, ChainProxyConnector
def get_domain(url):
# Prepend 'http://' if the URL lacks a scheme
if not url.startswith(('http://', 'https://')):
url = 'http://' + url
parsed_url = urlparse(url)
netloc = parsed_url.netloc
domain = netloc.split(':')[0]
return domain
RESPONSE_BIN = 0 RESPONSE_BIN = 0
RESPONSE_TEXT = 1 RESPONSE_TEXT = 1
@ -24,27 +35,50 @@ class HttpError(Exception):
return str(self) return str(self)
class HttpClient: class HttpClient:
def __init__(self,coding='utf-8'): def __init__(self,coding='utf-8', socks5_proxy_url=None):
self.coding = coding self.coding = coding
self.session = None self.session = None
self.cookies = {} self.cookies = {}
self.socks5proxy = socks5proxy
self.proxy_connector = None
self.socks5_proxy_url = socks5_proxy_url
self.blocked_domains = set()
self.load_cache()
def save_cache(self):
home_dir = os.path.expanduser('~')
cache_file = os.path.join(home_dir, '.proxytarget')
with open(cache_file, 'w') as f:
for d in self.blocked_domains:
f.write(f'{d}\n')
def load_cache(self):
# 初始化缓存文件
home_dir = os.path.expanduser('~')
cache_file = os.path.join(home_dir, '.proxytarget')
try:
with open(cache_file, 'r') as f:
for line in f:
domain = line.strip()
if domain:
self.blocked_domains.add(domain)
except FileNotFoundError:
# 创建空文件
with open(cache_file, 'w') as f:
pass
async def close(self): async def close(self):
if self.session: if self.session:
await self.session.close() await self.session.close()
self.session = None self.session = None
def url2domain(self,url):
parts = url.split('/')[:3]
pre = '/'.join(parts)
return pre
def setCookie(self,url,cookies): def setCookie(self,url,cookies):
name = self.url2domain(url) name = get_domain(url)
self.cookies[name] = cookies self.cookies[name] = cookies
def getCookies(self,url): def getCookies(self,url):
name = url2domain(url) name = get_domain(url)
return self.cookies.get(name,None) return self.cookies.get(name,None)
def getsession(self,url): def getsession(self,url):
@ -69,10 +103,45 @@ class HttpClient:
def grapCookie(self,url): def grapCookie(self,url):
session = self.getsession(url) session = self.getsession(url)
domain = self.url2domain(url) domain = get_domain(url)
filtered = session.cookie_jar.filter_cookies(domain) filtered = session.cookie_jar.filter_cookies(domain)
return filtered return filtered
async def make_request(self, url, method,
response_type=RESPONSE_TEXT,
params=None,
data=None,
jd=None,
stream_func=None,
headers=None,
use_proxy=False,
**kw):
async with aiohttp.ClientSession() as session:
if params == {}:
params = None
if data == {}:
data = None
if jd == {}:
jd = None
proxy = None
if use_proxy and self.socks5_proxy_url:
proxy = self.socks5_proxy_url
if headers == {}:
headers = None
resp = await session.request(method, url,
params=params,
data=data,
json=jd,
proxy=proxy,
headers=headers, **kw)
if resp.status==200:
return await self.handleResp(url, resp, response_type, stream_func=stream_func)
msg = f'http error({resp.status}, {url=},{params=}, {data=}, {jd=}, {headers=}, {kw=})'
exception(msg)
raise HttpError(resp.status, msg)
async def request(self, url, method, async def request(self, url, method,
response_type=RESPONSE_TEXT, response_type=RESPONSE_TEXT,
params=None, params=None,
@ -80,27 +149,45 @@ class HttpClient:
jd=None, jd=None,
stream_func=None, stream_func=None,
headers=None, headers=None,
**kw): **kw
session = self.getsession(url) ):
if params == {}: if self.socks5_proxy_url is None:
params = None resp = await self.make_request(url, method,
if data == {}: response_type=response_type,
data = None
if jd == {}:
jd = None
if headers == {}:
headers = None
resp = await session.request(method, url,
params=params, params=params,
data=data, data=data,
json=jd, jd=jd,
headers=headers, **kw) proxy=False,
if resp.status==200: stream_func=stream_func,
return await self.handleResp(url, resp, response_type, stream_func=stream_func) headers=headers,
msg = f'http error({resp.status}, {url=},{params=}, {data=}, {jd=}, {headers=}, {kw=})' **kw)
exception(msg) return resp
raise HttpError(resp.status, msg) domain = get_domain(url)
if domain not in self.blocked_domains:
try:
resp = await self.make_request(url, method,
response_type=response_type,
params=params,
data=data,
jd=jd,
proxy=False,
stream_func=stream_func,
headers=headers,
**kw)
return resp
except aiohttp.ClientError:
if domain not in self.blocked_domains:
self.save_cache()
resp = await self.make_request(url, method,
response_type=response_type,
params=params,
data=data,
jd=jd,
proxy=True,
stream_func=stream_func,
headers=headers,
**kw)
return resp
async def get(self,url,**kw): async def get(self,url,**kw):
return self.request(url, 'GET', **kw) return self.request(url, 'GET', **kw)