bugfix
This commit is contained in:
parent
e5d72b67bf
commit
ec090a205e
@ -1,3 +1,4 @@
|
|||||||
|
import os
|
||||||
import asyncio
|
import asyncio
|
||||||
import aiohttp
|
import aiohttp
|
||||||
from aiohttp import FormData
|
from aiohttp import FormData
|
||||||
@ -9,13 +10,13 @@ from urllib.parse import urlparse
|
|||||||
from aiohttp_socks import ProxyType, ProxyConnector, ChainProxyConnector
|
from aiohttp_socks import ProxyType, ProxyConnector, ChainProxyConnector
|
||||||
|
|
||||||
def get_domain(url):
|
def get_domain(url):
|
||||||
# Prepend 'http://' if the URL lacks a scheme
|
# Prepend 'http://' if the URL lacks a scheme
|
||||||
if not url.startswith(('http://', 'https://')):
|
if not url.startswith(('http://', 'https://')):
|
||||||
url = 'http://' + url
|
url = 'http://' + url
|
||||||
parsed_url = urlparse(url)
|
parsed_url = urlparse(url)
|
||||||
netloc = parsed_url.netloc
|
netloc = parsed_url.netloc
|
||||||
domain = netloc.split(':')[0]
|
domain = netloc.split(':')[0]
|
||||||
return domain
|
return domain
|
||||||
|
|
||||||
RESPONSE_BIN = 0
|
RESPONSE_BIN = 0
|
||||||
RESPONSE_TEXT = 1
|
RESPONSE_TEXT = 1
|
||||||
@ -39,34 +40,33 @@ class HttpClient:
|
|||||||
self.coding = coding
|
self.coding = coding
|
||||||
self.session = None
|
self.session = None
|
||||||
self.cookies = {}
|
self.cookies = {}
|
||||||
self.socks5proxy = socks5proxy
|
self.proxy_connector = None
|
||||||
self.proxy_connector = None
|
self.socks5_proxy_url = socks5_proxy_url
|
||||||
self.socks5_proxy_url = socks5_proxy_url
|
self.blocked_domains = set()
|
||||||
self.blocked_domains = set()
|
self.load_cache()
|
||||||
self.load_cache()
|
|
||||||
|
|
||||||
def save_cache(self):
|
def save_cache(self):
|
||||||
home_dir = os.path.expanduser('~')
|
home_dir = os.path.expanduser('~')
|
||||||
cache_file = os.path.join(home_dir, '.proxytarget')
|
cache_file = os.path.join(home_dir, '.proxytarget')
|
||||||
with open(cache_file, 'w') as f:
|
with open(cache_file, 'w') as f:
|
||||||
for d in self.blocked_domains:
|
for d in self.blocked_domains:
|
||||||
f.write(f'{d}\n')
|
f.write(f'{d}\n')
|
||||||
|
|
||||||
def load_cache(self):
|
def load_cache(self):
|
||||||
# 初始化缓存文件
|
# 初始化缓存文件
|
||||||
home_dir = os.path.expanduser('~')
|
home_dir = os.path.expanduser('~')
|
||||||
cache_file = os.path.join(home_dir, '.proxytarget')
|
cache_file = os.path.join(home_dir, '.proxytarget')
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with open(cache_file, 'r') as f:
|
with open(cache_file, 'r') as f:
|
||||||
for line in f:
|
for line in f:
|
||||||
domain = line.strip()
|
domain = line.strip()
|
||||||
if domain:
|
if domain:
|
||||||
self.blocked_domains.add(domain)
|
self.blocked_domains.add(domain)
|
||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
# 创建空文件
|
# 创建空文件
|
||||||
with open(cache_file, 'w') as f:
|
with open(cache_file, 'w') as f:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
async def close(self):
|
async def close(self):
|
||||||
if self.session:
|
if self.session:
|
||||||
@ -107,88 +107,88 @@ class HttpClient:
|
|||||||
filtered = session.cookie_jar.filter_cookies(domain)
|
filtered = session.cookie_jar.filter_cookies(domain)
|
||||||
return filtered
|
return filtered
|
||||||
|
|
||||||
async def make_request(self, url, method,
|
async def make_request(self, url, method='GET',
|
||||||
response_type=RESPONSE_TEXT,
|
response_type=RESPONSE_TEXT,
|
||||||
params=None,
|
params=None,
|
||||||
data=None,
|
data=None,
|
||||||
jd=None,
|
jd=None,
|
||||||
stream_func=None,
|
stream_func=None,
|
||||||
headers=None,
|
headers=None,
|
||||||
use_proxy=False,
|
use_proxy=False
|
||||||
**kw):
|
):
|
||||||
async with aiohttp.ClientSession() as session:
|
connector = None
|
||||||
if params == {}:
|
if use_proxy:
|
||||||
params = None
|
connector = ProxyConnector.from_url(self.socks5_proxy_url)
|
||||||
if data == {}:
|
async with aiohttp.ClientSession(connector=connector) as session:
|
||||||
data = None
|
if params == {}:
|
||||||
if jd == {}:
|
params = None
|
||||||
jd = None
|
if data == {}:
|
||||||
proxy = None
|
data = None
|
||||||
if use_proxy and self.socks5_proxy_url:
|
if jd == {}:
|
||||||
proxy = self.socks5_proxy_url
|
jd = None
|
||||||
|
|
||||||
if headers == {}:
|
if headers == {}:
|
||||||
headers = None
|
headers = None
|
||||||
|
|
||||||
resp = await session.request(method, url,
|
resp = await session.request(method, url,
|
||||||
params=params,
|
params=params,
|
||||||
data=data,
|
data=data,
|
||||||
json=jd,
|
json=jd,
|
||||||
proxy=proxy,
|
headers=headers)
|
||||||
headers=headers, **kw)
|
if resp.status==200:
|
||||||
if resp.status==200:
|
return await self.handleResp(url, resp, response_type, stream_func=stream_func)
|
||||||
return await self.handleResp(url, resp, response_type, stream_func=stream_func)
|
msg = f'http error({resp.status}, {url=},{params=}, {data=}, {jd=}, {headers=}, {kw=})'
|
||||||
msg = f'http error({resp.status}, {url=},{params=}, {data=}, {jd=}, {headers=}, {kw=})'
|
exception(msg)
|
||||||
exception(msg)
|
raise HttpError(resp.status, msg)
|
||||||
raise HttpError(resp.status, msg)
|
|
||||||
|
|
||||||
async def request(self, url, method,
|
async def request(self, url, method='GET',
|
||||||
response_type=RESPONSE_TEXT,
|
response_type=RESPONSE_TEXT,
|
||||||
params=None,
|
params=None,
|
||||||
data=None,
|
data=None,
|
||||||
jd=None,
|
jd=None,
|
||||||
stream_func=None,
|
stream_func=None,
|
||||||
headers=None,
|
headers=None,
|
||||||
**kw
|
**kw
|
||||||
):
|
):
|
||||||
if self.socks5_proxy_url is None:
|
if self.socks5_proxy_url is None:
|
||||||
resp = await self.make_request(url, method,
|
resp = await self.make_request(url, method=method,
|
||||||
response_type=response_type,
|
response_type=response_type,
|
||||||
params=params,
|
params=params,
|
||||||
data=data,
|
data=data,
|
||||||
jd=jd,
|
jd=jd,
|
||||||
proxy=False,
|
use_proxy=False,
|
||||||
stream_func=stream_func,
|
stream_func=stream_func,
|
||||||
headers=headers,
|
headers=headers
|
||||||
**kw)
|
)
|
||||||
return resp
|
return resp
|
||||||
domain = get_domain(url)
|
domain = get_domain(url)
|
||||||
if domain not in self.blocked_domains:
|
if domain not in self.blocked_domains:
|
||||||
try:
|
try:
|
||||||
resp = await self.make_request(url, method,
|
resp = await self.make_request(url, method=method,
|
||||||
response_type=response_type,
|
response_type=response_type,
|
||||||
params=params,
|
params=params,
|
||||||
data=data,
|
data=data,
|
||||||
jd=jd,
|
jd=jd,
|
||||||
proxy=False,
|
use_proxy=False,
|
||||||
stream_func=stream_func,
|
stream_func=stream_func,
|
||||||
headers=headers,
|
headers=headers
|
||||||
**kw)
|
)
|
||||||
return resp
|
return resp
|
||||||
except aiohttp.ClientError:
|
except:
|
||||||
if domain not in self.blocked_domains:
|
if domain not in self.blocked_domains:
|
||||||
self.save_cache()
|
self.blocked_domains.add(domain)
|
||||||
resp = await self.make_request(url, method,
|
self.save_cache()
|
||||||
response_type=response_type,
|
resp = await self.make_request(url, method=method,
|
||||||
params=params,
|
response_type=response_type,
|
||||||
data=data,
|
params=params,
|
||||||
jd=jd,
|
data=data,
|
||||||
proxy=True,
|
jd=jd,
|
||||||
stream_func=stream_func,
|
use_proxy=True,
|
||||||
headers=headers,
|
stream_func=stream_func,
|
||||||
**kw)
|
headers=headers
|
||||||
return resp
|
)
|
||||||
|
return resp
|
||||||
|
|
||||||
async def get(self,url,**kw):
|
async def get(self,url,**kw):
|
||||||
return self.request(url, 'GET', **kw)
|
return self.request(url, 'GET', **kw)
|
||||||
|
|
||||||
@ -197,10 +197,10 @@ class HttpClient:
|
|||||||
session = self.getsession(url)
|
session = self.getsession(url)
|
||||||
|
|
||||||
class JsonHttpAPI:
|
class JsonHttpAPI:
|
||||||
def __init__(self, env={}):
|
def __init__(self, env={}, socks5_proxy_url=None):
|
||||||
self.env = env
|
self.env = env
|
||||||
self.te = MyTemplateEngine([], env=env)
|
self.te = MyTemplateEngine([], env=env)
|
||||||
self.hc = HttpClient()
|
self.hc = HttpClient(socks5_proxy_url=socks5_proxy_url)
|
||||||
|
|
||||||
async def call(self, url, method='GET', ns={},
|
async def call(self, url, method='GET', ns={},
|
||||||
stream_func=None,
|
stream_func=None,
|
||||||
@ -234,11 +234,13 @@ class JsonHttpAPI:
|
|||||||
return ret
|
return ret
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
async def gbaidu(hc):
|
async def main():
|
||||||
r = await hc.get('https://www.baidu.com')
|
hc = HttpClient(socks5_proxy_url='socks5://localhost:1086')
|
||||||
|
r = await hc.request('https://www.baidu.com')
|
||||||
|
print(r)
|
||||||
|
r = await hc.request('https://www.google.com')
|
||||||
print(r)
|
print(r)
|
||||||
await hc.close()
|
await hc.close()
|
||||||
loop = asyncio.get_event_loop()
|
loop = asyncio.get_event_loop()
|
||||||
hc = HttpClient()
|
loop.run_until_complete(main())
|
||||||
loop.run_until_complete(gbaidu(hc))
|
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user