Merge branch 'master' of git.kaiyuancloud.cn:yumoqing/appPublic

This commit is contained in:
yumoqing 2025-03-13 21:01:34 +08:00
commit dbfe00df59

View File

@ -1,3 +1,4 @@
import os
import asyncio import asyncio
import aiohttp import aiohttp
from aiohttp import FormData from aiohttp import FormData
@ -5,6 +6,17 @@ import json
from appPublic.myTE import MyTemplateEngine from appPublic.myTE import MyTemplateEngine
import re import re
from appPublic.log import info, debug, warning, error, exception, critical from appPublic.log import info, debug, warning, error, exception, critical
from urllib.parse import urlparse
from aiohttp_socks import ProxyType, ProxyConnector, ChainProxyConnector
def get_domain(url):
# Prepend 'http://' if the URL lacks a scheme
if not url.startswith(('http://', 'https://')):
url = 'http://' + url
parsed_url = urlparse(url)
netloc = parsed_url.netloc
domain = netloc.split(':')[0]
return domain
RESPONSE_BIN = 0 RESPONSE_BIN = 0
RESPONSE_TEXT = 1 RESPONSE_TEXT = 1
@ -24,27 +36,49 @@ class HttpError(Exception):
return str(self) return str(self)
class HttpClient: class HttpClient:
def __init__(self,coding='utf-8'): def __init__(self,coding='utf-8', socks5_proxy_url=None):
self.coding = coding self.coding = coding
self.session = None self.session = None
self.cookies = {} self.cookies = {}
self.proxy_connector = None
self.socks5_proxy_url = socks5_proxy_url
self.blocked_domains = set()
self.load_cache()
def save_cache(self):
home_dir = os.path.expanduser('~')
cache_file = os.path.join(home_dir, '.proxytarget')
with open(cache_file, 'w') as f:
for d in self.blocked_domains:
f.write(f'{d}\n')
def load_cache(self):
# 初始化缓存文件
home_dir = os.path.expanduser('~')
cache_file = os.path.join(home_dir, '.proxytarget')
try:
with open(cache_file, 'r') as f:
for line in f:
domain = line.strip()
if domain:
self.blocked_domains.add(domain)
except FileNotFoundError:
# 创建空文件
with open(cache_file, 'w') as f:
pass
async def close(self): async def close(self):
if self.session: if self.session:
await self.session.close() await self.session.close()
self.session = None self.session = None
def url2domain(self,url):
parts = url.split('/')[:3]
pre = '/'.join(parts)
return pre
def setCookie(self,url,cookies): def setCookie(self,url,cookies):
name = self.url2domain(url) name = get_domain(url)
self.cookies[name] = cookies self.cookies[name] = cookies
def getCookies(self,url): def getCookies(self,url):
name = url2domain(url) name = get_domain(url)
return self.cookies.get(name,None) return self.cookies.get(name,None)
def getsession(self,url): def getsession(self,url):
@ -69,25 +103,30 @@ class HttpClient:
def grapCookie(self,url): def grapCookie(self,url):
session = self.getsession(url) session = self.getsession(url)
domain = self.url2domain(url) domain = get_domain(url)
filtered = session.cookie_jar.filter_cookies(domain) filtered = session.cookie_jar.filter_cookies(domain)
return filtered return filtered
async def request(self, url, method, async def make_request(self, url, method='GET',
response_type=RESPONSE_TEXT, response_type=RESPONSE_TEXT,
params=None, params=None,
data=None, data=None,
jd=None, jd=None,
stream_func=None, stream_func=None,
headers=None, headers=None,
**kw): use_proxy=False
session = self.getsession(url) ):
connector = None
if use_proxy:
connector = ProxyConnector.from_url(self.socks5_proxy_url)
async with aiohttp.ClientSession(connector=connector) as session:
if params == {}: if params == {}:
params = None params = None
if data == {}: if data == {}:
data = None data = None
if jd == {}: if jd == {}:
jd = None jd = None
if headers == {}: if headers == {}:
headers = None headers = None
@ -95,13 +134,61 @@ class HttpClient:
params=params, params=params,
data=data, data=data,
json=jd, json=jd,
headers=headers, **kw) headers=headers)
if resp.status==200: if resp.status==200:
return await self.handleResp(url, resp, response_type, stream_func=stream_func) return await self.handleResp(url, resp, response_type, stream_func=stream_func)
msg = f'http error({resp.status}, {url=},{params=}, {data=}, {jd=}, {headers=}, {kw=})' msg = f'http error({resp.status}, {url=},{params=}, {data=}, {jd=}, {headers=}, {kw=})'
exception(msg) exception(msg)
raise HttpError(resp.status, msg) raise HttpError(resp.status, msg)
async def request(self, url, method='GET',
response_type=RESPONSE_TEXT,
params=None,
data=None,
jd=None,
stream_func=None,
headers=None,
**kw
):
if self.socks5_proxy_url is None:
resp = await self.make_request(url, method=method,
response_type=response_type,
params=params,
data=data,
jd=jd,
use_proxy=False,
stream_func=stream_func,
headers=headers
)
return resp
domain = get_domain(url)
if domain not in self.blocked_domains:
try:
resp = await self.make_request(url, method=method,
response_type=response_type,
params=params,
data=data,
jd=jd,
use_proxy=False,
stream_func=stream_func,
headers=headers
)
return resp
except:
if domain not in self.blocked_domains:
self.blocked_domains.add(domain)
self.save_cache()
resp = await self.make_request(url, method=method,
response_type=response_type,
params=params,
data=data,
jd=jd,
use_proxy=True,
stream_func=stream_func,
headers=headers
)
return resp
async def get(self,url,**kw): async def get(self,url,**kw):
return self.request(url, 'GET', **kw) return self.request(url, 'GET', **kw)
@ -110,10 +197,10 @@ class HttpClient:
session = self.getsession(url) session = self.getsession(url)
class JsonHttpAPI: class JsonHttpAPI:
def __init__(self, env={}): def __init__(self, env={}, socks5_proxy_url=None):
self.env = env self.env = env
self.te = MyTemplateEngine([], env=env) self.te = MyTemplateEngine([], env=env)
self.hc = HttpClient() self.hc = HttpClient(socks5_proxy_url=socks5_proxy_url)
async def call(self, url, method='GET', ns={}, async def call(self, url, method='GET', ns={},
stream_func=None, stream_func=None,
@ -147,11 +234,13 @@ class JsonHttpAPI:
return ret return ret
if __name__ == '__main__': if __name__ == '__main__':
async def gbaidu(hc): async def main():
r = await hc.get('https://www.baidu.com') hc = HttpClient(socks5_proxy_url='socks5://localhost:1086')
r = await hc.request('https://www.baidu.com')
print(r)
r = await hc.request('https://www.google.com')
print(r) print(r)
await hc.close() await hc.close()
loop = asyncio.get_event_loop() loop = asyncio.get_event_loop()
hc = HttpClient() loop.run_until_complete(main())
loop.run_until_complete(gbaidu(hc))