2019-07-10 17:34:45 +08:00
|
|
|
import os
|
|
|
|
import re
|
|
|
|
|
2020-12-11 01:48:32 +08:00
|
|
|
import asyncio
|
|
|
|
|
|
|
|
from yarl import URL
|
|
|
|
|
|
|
|
from appPublic.http_client import Http_Client
|
|
|
|
from functools import partial
|
|
|
|
from aiohttp_auth import auth
|
2019-07-10 17:34:45 +08:00
|
|
|
from aiohttp.web_urldispatcher import StaticResource, _WebHandler, PathLike
|
|
|
|
from aiohttp.web_urldispatcher import Optional, _ExpectHandler
|
|
|
|
from aiohttp.web_urldispatcher import Path
|
|
|
|
from aiohttp.web_response import Response, StreamResponse
|
|
|
|
from aiohttp.web_exceptions import (
|
2019-11-28 15:24:09 +08:00
|
|
|
HTTPException,
|
|
|
|
HTTPExpectationFailed,
|
|
|
|
HTTPForbidden,
|
|
|
|
HTTPMethodNotAllowed,
|
|
|
|
HTTPNotFound,
|
2019-07-10 17:34:45 +08:00
|
|
|
)
|
|
|
|
from aiohttp.web_fileresponse import FileResponse
|
|
|
|
from aiohttp.web_request import Request
|
|
|
|
from aiohttp.web_response import Response, StreamResponse
|
|
|
|
from aiohttp.web_routedef import AbstractRouteDef
|
|
|
|
|
|
|
|
from appPublic.jsonConfig import getConfig
|
|
|
|
from appPublic.MiniI18N import getI18N
|
2019-07-29 10:01:11 +08:00
|
|
|
from appPublic.dictObject import DictObject, multiDict2Dict
|
2020-11-06 11:15:28 +08:00
|
|
|
from appPublic.timecost import TimeCost
|
2020-11-06 16:53:38 +08:00
|
|
|
from appPublic.timeUtils import timestampstr
|
2019-07-10 17:34:45 +08:00
|
|
|
|
|
|
|
from .baseProcessor import getProcessor
|
|
|
|
from .xlsxdsProcessor import XLSXDataSourceProcessor
|
|
|
|
from .sqldsProcessor import SQLDataSourceProcessor
|
2019-11-28 15:24:09 +08:00
|
|
|
from .functionProcessor import FunctionProcessor
|
2019-07-10 17:34:45 +08:00
|
|
|
from .serverenv import ServerEnv
|
|
|
|
from .url2file import Url2File
|
|
|
|
from .filestorage import FileStorage
|
2019-07-29 10:01:11 +08:00
|
|
|
from .restful import DBCrud
|
2020-04-22 11:37:20 +08:00
|
|
|
from .dbadmin import DBAdmin
|
2019-08-28 10:15:35 +08:00
|
|
|
from .filedownload import file_download, path_decode
|
2019-07-10 17:34:45 +08:00
|
|
|
|
|
|
|
def getHeaderLang(request):
|
|
|
|
al = request.headers.get('Accept-Language')
|
|
|
|
if al is None:
|
|
|
|
return 'en'
|
|
|
|
return al.split(',')[0]
|
|
|
|
|
|
|
|
def i18nDICT(request):
|
|
|
|
c = getConfig()
|
|
|
|
i18n = getI18N()
|
|
|
|
lang = getHeaderLang(request)
|
|
|
|
l = c.langMapping.get(lang,lang)
|
|
|
|
return json.dumps(i18n.getLangDict(l)).encode(c.website.coding)
|
|
|
|
|
2020-11-09 02:39:35 +08:00
|
|
|
|
2019-12-03 11:30:15 +08:00
|
|
|
class ProcessorResource(StaticResource,Url2File):
|
2019-07-10 17:34:45 +08:00
|
|
|
def __init__(self, prefix: str, directory: PathLike,
|
2019-11-28 15:24:09 +08:00
|
|
|
*, name: Optional[str]=None,
|
|
|
|
expect_handler: Optional[_ExpectHandler]=None,
|
|
|
|
chunk_size: int=256 * 1024,
|
|
|
|
show_index: bool=False, follow_symlinks: bool=False,
|
2019-12-03 11:30:15 +08:00
|
|
|
append_version: bool=False,
|
|
|
|
indexes:list=[],
|
2019-12-05 06:04:38 +08:00
|
|
|
processors:dict={}) -> None:
|
2019-12-03 11:30:15 +08:00
|
|
|
StaticResource.__init__(self,prefix, directory,
|
2019-11-28 15:24:09 +08:00
|
|
|
name=name,
|
|
|
|
expect_handler=expect_handler,
|
|
|
|
chunk_size=chunk_size,
|
|
|
|
show_index=show_index,
|
2019-07-10 17:34:45 +08:00
|
|
|
follow_symlinks=follow_symlinks,
|
2019-11-28 15:24:09 +08:00
|
|
|
append_version=append_version)
|
2019-12-03 11:30:15 +08:00
|
|
|
Url2File.__init__(self,directory,prefix,indexes,inherit=True)
|
2019-07-10 17:34:45 +08:00
|
|
|
gr = self._routes.get('GET')
|
|
|
|
self._routes.update({'POST':gr})
|
2019-07-29 10:01:11 +08:00
|
|
|
self._routes.update({'PUT':gr})
|
|
|
|
self._routes.update({'OPTIONS':gr})
|
|
|
|
self._routes.update({'DELETE':gr})
|
|
|
|
self._routes.update({'TRACE':gr})
|
2019-12-05 06:04:38 +08:00
|
|
|
self.y_processors = processors
|
2019-07-10 17:34:45 +08:00
|
|
|
self.y_prefix = prefix
|
|
|
|
self.y_directory = directory
|
2019-12-05 06:04:38 +08:00
|
|
|
self.y_indexes = indexes
|
2019-07-10 17:34:45 +08:00
|
|
|
self.y_env = DictObject()
|
|
|
|
|
|
|
|
def setProcessors(self, processors):
|
|
|
|
self.y_processors = processors
|
|
|
|
|
|
|
|
def setIndexes(self, indexes):
|
|
|
|
self.y_indexes = indexes
|
|
|
|
|
|
|
|
def abspath(self,path:str):
|
|
|
|
path = path[len(self.y_prefix):]
|
|
|
|
if len(path)>0 and path[0] == '/':
|
|
|
|
path = path[1:]
|
|
|
|
rp = os.path.join(self.y_directory , path)
|
|
|
|
real_path = os.path.abspath(rp)
|
|
|
|
return real_path
|
|
|
|
|
2019-07-29 10:01:11 +08:00
|
|
|
async def getPostData(self,request: Request) -> dict:
|
2019-07-10 17:34:45 +08:00
|
|
|
reader = await request.multipart()
|
|
|
|
if reader is None:
|
|
|
|
md = await request.post()
|
|
|
|
ns = multiDict2Dict(md)
|
|
|
|
return ns
|
|
|
|
ns = {}
|
|
|
|
while 1:
|
|
|
|
field = await reader.next()
|
|
|
|
if not field:
|
|
|
|
break
|
|
|
|
value = ''
|
|
|
|
if hasattr(field,'filename'):
|
|
|
|
saver = FileStorage()
|
|
|
|
value = await saver.save(field.filename,field.read_chunk)
|
|
|
|
else:
|
|
|
|
value = await field.read(decode=True)
|
|
|
|
ov = ns.get(field.name)
|
|
|
|
if ov:
|
|
|
|
if type(ov) == type([]):
|
|
|
|
ov.append(value)
|
|
|
|
else:
|
|
|
|
ov = [ov,value]
|
|
|
|
else:
|
|
|
|
ov = value
|
|
|
|
ns.update({field.name:ov})
|
|
|
|
return ns
|
|
|
|
|
|
|
|
async def _handle(self,request:Request) -> StreamResponse:
|
2020-11-06 11:15:28 +08:00
|
|
|
name = str(request.url)
|
2020-11-06 11:27:42 +08:00
|
|
|
t = TimeCost(name)
|
|
|
|
with t:
|
2020-11-06 11:33:24 +08:00
|
|
|
x = await self._handle1(request)
|
2020-11-06 16:53:38 +08:00
|
|
|
print(timestampstr(),':',name,':', 'time cost=', t.end_time - t.begin_time)
|
2020-11-06 11:15:28 +08:00
|
|
|
return x
|
|
|
|
|
|
|
|
async def _handle1(self,request:Request) -> StreamResponse:
|
2019-07-10 17:34:45 +08:00
|
|
|
clientkeys = {
|
|
|
|
"iPhone":"iphone",
|
|
|
|
"iPad":"ipad",
|
|
|
|
"Android":"androidpad",
|
|
|
|
"Windows Phone":"winphone",
|
|
|
|
"Windows NT[.]*Win64; x64":"pc",
|
|
|
|
}
|
|
|
|
|
|
|
|
def i18nDICT():
|
|
|
|
c = getConfig()
|
|
|
|
g = ServerEnv()
|
|
|
|
if not g.get('myi18n',False):
|
|
|
|
g.myi18n = getI18N()
|
|
|
|
lang = getHeaderLang(request)
|
|
|
|
l = c.langMapping.get(lang,lang)
|
|
|
|
return json.dumps(g.myi18n.getLangDict(l))
|
|
|
|
|
|
|
|
def getClientType(request):
|
|
|
|
agent = request.headers.get('user-agent')
|
|
|
|
if type(agent)!=type('') and type(agent)!=type(b''):
|
|
|
|
return 'pc'
|
|
|
|
for k in clientkeys.keys():
|
|
|
|
m = re.findall(k,agent)
|
|
|
|
if len(m)>0:
|
|
|
|
return clientkeys[k]
|
|
|
|
return 'pc'
|
|
|
|
|
|
|
|
def serveri18n(s):
|
|
|
|
lang = getHeaderLang(request)
|
|
|
|
c = getConfig()
|
|
|
|
g = ServerEnv()
|
|
|
|
if not g.get('myi18n',False):
|
|
|
|
g.myi18n = getI18N()
|
|
|
|
l = c.langMapping.get(lang,lang)
|
|
|
|
return g.myi18n(s,l)
|
|
|
|
|
2020-03-15 01:13:03 +08:00
|
|
|
def gethost():
|
2020-04-05 09:47:13 +08:00
|
|
|
return '/'.join(str(request.url).split('/')[:3])
|
2020-03-15 01:13:03 +08:00
|
|
|
|
2020-12-11 01:48:32 +08:00
|
|
|
|
2019-07-10 17:34:45 +08:00
|
|
|
async def getArgs():
|
|
|
|
ns = DictObject()
|
|
|
|
if request.method == 'POST':
|
|
|
|
return await self.getPostData(request)
|
|
|
|
ns = multiDict2Dict(request.query)
|
|
|
|
return ns
|
|
|
|
|
|
|
|
self.y_env.i18n = serveri18n
|
|
|
|
self.y_env.i18nDict = i18nDICT
|
|
|
|
self.y_env.terminalType = getClientType(request)
|
|
|
|
self.y_env.absurl = self.absUrl
|
2020-12-11 01:48:32 +08:00
|
|
|
self.y_env.entire_url = partial(self.entireUrl,request)
|
2019-07-10 17:34:45 +08:00
|
|
|
self.y_env.abspath = self.abspath
|
|
|
|
self.y_env.request2ns = getArgs
|
|
|
|
self.y_env.resource = self
|
2020-03-15 01:13:03 +08:00
|
|
|
self.y_env.gethost = gethost
|
2020-12-16 10:15:43 +08:00
|
|
|
self.y_env.path_call = partial(self.path_call,request)
|
2020-12-11 01:48:32 +08:00
|
|
|
self.user = await auth.get_auth(request)
|
2019-07-10 17:34:45 +08:00
|
|
|
path = request.path
|
2019-07-29 10:01:11 +08:00
|
|
|
config = getConfig()
|
2020-04-22 11:37:20 +08:00
|
|
|
if config.website.dbadm and path.startswith(config.website.dbadm):
|
|
|
|
pp = path.split('/')[2:]
|
|
|
|
if len(pp)<3:
|
|
|
|
raise HTTPNotFound
|
|
|
|
dbname = pp[0]
|
|
|
|
tablename = pp[1]
|
|
|
|
action = pp[2]
|
|
|
|
adm = DBAdmin(request,dbname,tablename,action)
|
|
|
|
return await adm.render()
|
2019-07-29 10:01:11 +08:00
|
|
|
if config.website.dbrest and path.startswith(config.website.dbrest):
|
|
|
|
pp = path.split('/')[2:]
|
|
|
|
if len(pp)<2:
|
|
|
|
raise HTTPNotFound
|
|
|
|
dbname = pp[0]
|
|
|
|
tablename = pp[1]
|
|
|
|
id = None
|
|
|
|
if len(pp) > 2:
|
|
|
|
id = pp[2]
|
|
|
|
crud = DBCrud(request,dbname,tablename,id=id)
|
|
|
|
return await crud.dispatch()
|
2019-08-28 10:15:35 +08:00
|
|
|
if config.website.download and path.startswith(config.website.download):
|
|
|
|
pp = path.split('/')[2:]
|
|
|
|
if len(pp)<1:
|
|
|
|
raise HTTPNotFound
|
|
|
|
dp = '/'.join(pp)
|
|
|
|
path = path_decode(dp)
|
2019-08-28 11:30:31 +08:00
|
|
|
return await file_download(request, path)
|
2019-11-28 15:24:09 +08:00
|
|
|
|
2020-12-11 01:48:32 +08:00
|
|
|
processor = self.url2processor(request, str(request.url))
|
|
|
|
if processor:
|
|
|
|
return await processor.handle(request)
|
|
|
|
|
2019-07-10 17:34:45 +08:00
|
|
|
print(f'path={path} handler by StaticResource..')
|
2020-04-05 09:47:13 +08:00
|
|
|
if self.isFolder(path):
|
|
|
|
config = getConfig()
|
|
|
|
if not config.website.allowListFolder:
|
2020-04-22 11:37:20 +08:00
|
|
|
raise HTTPNotFound
|
2019-07-10 17:34:45 +08:00
|
|
|
return await super()._handle(request)
|
|
|
|
|
2020-12-11 01:48:32 +08:00
|
|
|
def url2processor(self, request, url):
|
2020-12-16 10:15:43 +08:00
|
|
|
config = getConfig()
|
2020-12-11 01:48:32 +08:00
|
|
|
url = self.entireUrl(request, url)
|
|
|
|
host = '/'.join(str(request.url).split('/')[:3])
|
2020-12-16 10:15:43 +08:00
|
|
|
path = url[len(host):].split('?')[0]
|
|
|
|
if config.website.startswiths:
|
|
|
|
for a in config.website.startswiths:
|
|
|
|
if path.startswith(a.leading):
|
|
|
|
processor = FunctionProcessor(self.abspath(path),self,a)
|
|
|
|
return processor
|
|
|
|
|
2020-12-11 01:48:32 +08:00
|
|
|
for word, handlername in self.y_processors:
|
2020-12-16 10:15:43 +08:00
|
|
|
if path.endswith(word):
|
2020-12-11 01:48:32 +08:00
|
|
|
Klass = getProcessor(handlername)
|
|
|
|
processor = Klass(self.abspath(path),self)
|
|
|
|
return processor
|
|
|
|
return None
|
|
|
|
|
|
|
|
def entireUrl(self, request, url):
|
|
|
|
if url.startswith('http://') or url.startswith('https://'):
|
|
|
|
return url
|
|
|
|
h = '/'.join(str(request.url).split('/')[:3])
|
|
|
|
if url.startswith('/'):
|
|
|
|
return '%s%s' % (h,url)
|
|
|
|
path = request.path
|
|
|
|
p = self.relatedurl(path,url)
|
|
|
|
return '%s%s' % (h, p)
|
|
|
|
|
2020-12-16 10:15:43 +08:00
|
|
|
async def path_call(self,request, path, params={}):
|
|
|
|
processor = self.url2processor(request, path)
|
2020-12-19 14:00:04 +08:00
|
|
|
real_path = self.url2file(path)
|
|
|
|
print('processorResource.py:real_path=',real_path)
|
|
|
|
return await processor.path_call(request, real_path)
|
2020-12-16 10:15:43 +08:00
|
|
|
|
2020-12-11 01:48:32 +08:00
|
|
|
def url_call(self,request, url,params={}):
|
|
|
|
processor = self.url2processor(request, url)
|
|
|
|
if processor:
|
|
|
|
# self.y_env.update(params)
|
|
|
|
loop = asyncio.get_event_loop()
|
|
|
|
loop.run_until_complete(processor.execute(request))
|
|
|
|
return processor.content
|
|
|
|
long_url = self.entireUrl(request,url)
|
|
|
|
hc = Http_Client()
|
|
|
|
print('url_call() called,long_url=',long_url)
|
|
|
|
x = hc(long_url,method=method,params=params)
|
|
|
|
print('url_call() call finished')
|
|
|
|
return x
|
|
|
|
|
2019-07-10 17:34:45 +08:00
|
|
|
def absUrl(self,request,url):
|
|
|
|
http='http://'
|
|
|
|
https='https://'
|
2020-12-11 01:48:32 +08:00
|
|
|
if url.startswith('https://') or url.startswith('http://') :
|
2019-07-10 17:34:45 +08:00
|
|
|
return url
|
|
|
|
path = request.path
|
|
|
|
return self.relatedurl(path,url)
|