This commit is contained in:
yumoqing 2025-03-06 00:30:44 +08:00
parent f605e8ad11
commit e5d72b67bf

View File

@ -5,6 +5,17 @@ import json
from appPublic.myTE import MyTemplateEngine
import re
from appPublic.log import info, debug, warning, error, exception, critical
from urllib.parse import urlparse
from aiohttp_socks import ProxyType, ProxyConnector, ChainProxyConnector
def get_domain(url):
# Prepend 'http://' if the URL lacks a scheme
if not url.startswith(('http://', 'https://')):
url = 'http://' + url
parsed_url = urlparse(url)
netloc = parsed_url.netloc
domain = netloc.split(':')[0]
return domain
RESPONSE_BIN = 0
RESPONSE_TEXT = 1
@ -24,27 +35,50 @@ class HttpError(Exception):
return str(self)
class HttpClient:
def __init__(self,coding='utf-8'):
def __init__(self,coding='utf-8', socks5_proxy_url=None):
self.coding = coding
self.session = None
self.cookies = {}
self.socks5proxy = socks5proxy
self.proxy_connector = None
self.socks5_proxy_url = socks5_proxy_url
self.blocked_domains = set()
self.load_cache()
def save_cache(self):
home_dir = os.path.expanduser('~')
cache_file = os.path.join(home_dir, '.proxytarget')
with open(cache_file, 'w') as f:
for d in self.blocked_domains:
f.write(f'{d}\n')
def load_cache(self):
# 初始化缓存文件
home_dir = os.path.expanduser('~')
cache_file = os.path.join(home_dir, '.proxytarget')
try:
with open(cache_file, 'r') as f:
for line in f:
domain = line.strip()
if domain:
self.blocked_domains.add(domain)
except FileNotFoundError:
# 创建空文件
with open(cache_file, 'w') as f:
pass
async def close(self):
if self.session:
await self.session.close()
self.session = None
def url2domain(self,url):
parts = url.split('/')[:3]
pre = '/'.join(parts)
return pre
def setCookie(self,url,cookies):
name = self.url2domain(url)
name = get_domain(url)
self.cookies[name] = cookies
def getCookies(self,url):
name = url2domain(url)
name = get_domain(url)
return self.cookies.get(name,None)
def getsession(self,url):
@ -69,39 +103,92 @@ class HttpClient:
def grapCookie(self,url):
session = self.getsession(url)
domain = self.url2domain(url)
domain = get_domain(url)
filtered = session.cookie_jar.filter_cookies(domain)
return filtered
async def request(self, url, method,
async def make_request(self, url, method,
response_type=RESPONSE_TEXT,
params=None,
data=None,
jd=None,
stream_func=None,
headers=None,
use_proxy=False,
**kw):
session = self.getsession(url)
if params == {}:
params = None
if data == {}:
data = None
if jd == {}:
jd = None
if headers == {}:
headers = None
async with aiohttp.ClientSession() as session:
if params == {}:
params = None
if data == {}:
data = None
if jd == {}:
jd = None
proxy = None
if use_proxy and self.socks5_proxy_url:
proxy = self.socks5_proxy_url
resp = await session.request(method, url,
params=params,
data=data,
json=jd,
headers=headers, **kw)
if resp.status==200:
return await self.handleResp(url, resp, response_type, stream_func=stream_func)
msg = f'http error({resp.status}, {url=},{params=}, {data=}, {jd=}, {headers=}, {kw=})'
exception(msg)
raise HttpError(resp.status, msg)
if headers == {}:
headers = None
resp = await session.request(method, url,
params=params,
data=data,
json=jd,
proxy=proxy,
headers=headers, **kw)
if resp.status==200:
return await self.handleResp(url, resp, response_type, stream_func=stream_func)
msg = f'http error({resp.status}, {url=},{params=}, {data=}, {jd=}, {headers=}, {kw=})'
exception(msg)
raise HttpError(resp.status, msg)
async def request(self, url, method,
response_type=RESPONSE_TEXT,
params=None,
data=None,
jd=None,
stream_func=None,
headers=None,
**kw
):
if self.socks5_proxy_url is None:
resp = await self.make_request(url, method,
response_type=response_type,
params=params,
data=data,
jd=jd,
proxy=False,
stream_func=stream_func,
headers=headers,
**kw)
return resp
domain = get_domain(url)
if domain not in self.blocked_domains:
try:
resp = await self.make_request(url, method,
response_type=response_type,
params=params,
data=data,
jd=jd,
proxy=False,
stream_func=stream_func,
headers=headers,
**kw)
return resp
except aiohttp.ClientError:
if domain not in self.blocked_domains:
self.save_cache()
resp = await self.make_request(url, method,
response_type=response_type,
params=params,
data=data,
jd=jd,
proxy=True,
stream_func=stream_func,
headers=headers,
**kw)
return resp
async def get(self,url,**kw):
return self.request(url, 'GET', **kw)