bugfix
This commit is contained in:
parent
963602feb8
commit
14da6d0f98
@ -8,7 +8,10 @@ from aiohttp_session.cookie_storage import EncryptedCookieStorage
|
||||
|
||||
from appPublic.jsonConfig import getConfig
|
||||
from appPublic.rsa import RSA
|
||||
class AuthAPI:
|
||||
from appPublic.app_logger import AppLogger
|
||||
class AuthAPI(AppLogger):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
|
||||
def getPrivateKey(self):
|
||||
if not hasattr(self,'rsaEngine'):
|
||||
@ -45,14 +48,14 @@ class AuthAPI:
|
||||
"""
|
||||
authinfo = request.headers.get('authorization')
|
||||
if authinfo is None:
|
||||
print('header not include "authorization" info', request.headers)
|
||||
self.debug('header not include "authorization" info %s' % request.headers)
|
||||
raise web.HTTPUnauthorized()
|
||||
|
||||
authdata = self.rsaDecode(authinfo)
|
||||
# print('authdata=',authdata)
|
||||
alist = authdata.split('::')
|
||||
if len(alist) != 3:
|
||||
print('auth data format error')
|
||||
self.debug('auth data format error %s' % authdata)
|
||||
raise web.HTTPUnauthorized()
|
||||
|
||||
login_method=alist[0]
|
||||
|
@ -8,6 +8,7 @@ from aiohttp.web_response import Response, StreamResponse
|
||||
from appPublic.jsonConfig import getConfig
|
||||
from appPublic.dictObject import DictObject
|
||||
from appPublic.folderUtils import listFile
|
||||
from appPublic.app_logger import AppLogger
|
||||
|
||||
from .utils import unicode_escape
|
||||
from .serverenv import ServerEnv
|
||||
@ -38,7 +39,10 @@ class ObjectCache:
|
||||
|
||||
|
||||
|
||||
class BaseProcessor:
|
||||
class BaseProcessor(AppLogger):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
|
||||
@classmethod
|
||||
def isMe(self,name):
|
||||
return name=='base'
|
||||
@ -91,7 +95,7 @@ class BaseProcessor:
|
||||
return Response(text=self.content,headers=self.headers)
|
||||
|
||||
async def datahandle(self,request):
|
||||
print('*******Error*************')
|
||||
self.debug('*******Error*************')
|
||||
self.content=''
|
||||
|
||||
def setheaders(self):
|
||||
|
@ -8,6 +8,7 @@ from appPublic.folderUtils import ProgramPath
|
||||
from appPublic.background import Background
|
||||
from appPublic.jsonConfig import getConfig
|
||||
from appPublic.i18n import getI18N
|
||||
from appPublic.app_logger import AppLogger
|
||||
|
||||
from sqlor.dbpools import DBPools
|
||||
|
||||
@ -20,8 +21,9 @@ try:
|
||||
except:
|
||||
pmp = None
|
||||
|
||||
class ConfiguredServer:
|
||||
class ConfiguredServer(AppLogger):
|
||||
def __init__(self, auth_klass=AuthAPI, workdir=None):
|
||||
super().__init__()
|
||||
pp = ProgramPath()
|
||||
if workdir is None:
|
||||
self.natpmp_loop = True
|
||||
@ -56,14 +58,14 @@ class ConfiguredServer:
|
||||
t = config.natpmp.portmap_period or 3600
|
||||
while self.natpmp_loop:
|
||||
gateway = pmp.get_gateway_addr()
|
||||
print('gateway=', gateway)
|
||||
self.debug('gateway=%s' % gateway)
|
||||
try:
|
||||
x = pmp.map_port(pmp.NATPMP_PROTOCOL_TCP,
|
||||
config.natpmp.public_port, config.website.port,
|
||||
t, gateway_ip=gateway)
|
||||
print('gateway=', gateway, 'map_port()=', x)
|
||||
self.debug('gateway=%s map_port=%s' %( gateway, x))
|
||||
except Exception as e:
|
||||
print('mat_pmp():Exception:',e)
|
||||
self.debug('mat_pmp():Exception:%s' % e)
|
||||
time.sleep(t - 1)
|
||||
|
||||
|
||||
|
@ -20,6 +20,7 @@ from sqlor.crud import CRUD
|
||||
|
||||
from appPublic.dictObject import multiDict2Dict
|
||||
from appPublic.jsonConfig import getConfig
|
||||
from appPublic.app_logger import AppLogger
|
||||
|
||||
from .error import Error,Success
|
||||
actions = [
|
||||
@ -29,19 +30,19 @@ actions = [
|
||||
"filter"
|
||||
]
|
||||
|
||||
class DBAdmin:
|
||||
class DBAdmin(AppLogger):
|
||||
def __init__(self, request,dbname,tablename, action):
|
||||
self.dbname = dbname
|
||||
self.tablename = tablename
|
||||
self.request = request
|
||||
self.action = action
|
||||
if action not in actions:
|
||||
print('action not defined',action)
|
||||
self.debug('action not defined:%s' % action)
|
||||
raise HTTPNotFound
|
||||
try:
|
||||
self.crud = CRUD(dbname,tablename)
|
||||
except Exception as e:
|
||||
print('e=',e)
|
||||
self.info('e= %s' % e)
|
||||
traceback.print_exc()
|
||||
raise HTTPNotFound
|
||||
|
||||
@ -50,7 +51,7 @@ class DBAdmin:
|
||||
d = await self.crud.I()
|
||||
return json_response(Success(d))
|
||||
except Exception as e:
|
||||
print(e)
|
||||
self.debug('except=%s' % e)
|
||||
traceback.print_exc()
|
||||
return json_response(Error(errno='metaerror',msg='get metadata error'))
|
||||
|
||||
|
@ -60,7 +60,7 @@ class FileOutZone(Exception):
|
||||
def openfile(url,m):
|
||||
fp = abspath(url)
|
||||
if fp is None:
|
||||
print('openfile(',url,m,'),url is not match a file')
|
||||
print(f'openfile({url},{m}),url is not match a file')
|
||||
raise Exception('url can not mathc a file')
|
||||
config = getConfig()
|
||||
paths = [ os.path.abspath(p) for p in config.website.paths ]
|
||||
|
@ -31,6 +31,7 @@ from appPublic.i18n import getI18N
|
||||
from appPublic.dictObject import DictObject, multiDict2Dict
|
||||
from appPublic.timecost import TimeCost
|
||||
from appPublic.timeUtils import timestampstr
|
||||
from appPublic.app_logger import AppLogger
|
||||
|
||||
from .baseProcessor import getProcessor
|
||||
from .xlsxdsProcessor import XLSXDataSourceProcessor
|
||||
@ -59,7 +60,7 @@ def i18nDICT(request):
|
||||
return json.dumps(i18n.getLangDict(l)).encode(c.website.coding)
|
||||
|
||||
|
||||
class ProcessorResource(StaticResource,Url2File):
|
||||
class ProcessorResource(AppLogger, StaticResource,Url2File):
|
||||
def __init__(self, prefix: str, directory: PathLike,
|
||||
*, name: Optional[str]=None,
|
||||
expect_handler: Optional[_ExpectHandler]=None,
|
||||
@ -68,6 +69,7 @@ class ProcessorResource(StaticResource,Url2File):
|
||||
append_version: bool=False,
|
||||
indexes:list=[],
|
||||
processors:dict={}) -> None:
|
||||
AppLogger.__init__(self)
|
||||
StaticResource.__init__(self,prefix, directory,
|
||||
name=name,
|
||||
expect_handler=expect_handler,
|
||||
@ -130,8 +132,11 @@ class ProcessorResource(StaticResource,Url2File):
|
||||
name = str(request.url)
|
||||
t = TimeCost(name)
|
||||
with t:
|
||||
try:
|
||||
x = await self._handle1(request)
|
||||
print(timestampstr(),':',name,':', 'time cost=', t.end_time - t.begin_time)
|
||||
except:
|
||||
return None
|
||||
self.info(f'{name}:time cost={t.end_time - t.begin_time}')
|
||||
return x
|
||||
|
||||
async def _handle1(self,request:Request) -> StreamResponse:
|
||||
@ -196,7 +201,7 @@ class ProcessorResource(StaticResource,Url2File):
|
||||
if config.website.dbadm and path.startswith(config.website.dbadm):
|
||||
pp = path.split('/')[2:]
|
||||
if len(pp)<3:
|
||||
print(str(request.url), 'not found')
|
||||
self.error('%s:not found' % str(request.url))
|
||||
raise HTTPNotFound
|
||||
dbname = pp[0]
|
||||
tablename = pp[1]
|
||||
@ -206,7 +211,7 @@ class ProcessorResource(StaticResource,Url2File):
|
||||
if config.website.dbrest and path.startswith(config.website.dbrest):
|
||||
pp = path.split('/')[2:]
|
||||
if len(pp)<2:
|
||||
print(str(request.url), 'not found')
|
||||
self.error('%s:not found' % str(request.url))
|
||||
raise HTTPNotFound
|
||||
dbname = pp[0]
|
||||
tablename = pp[1]
|
||||
@ -218,7 +223,7 @@ class ProcessorResource(StaticResource,Url2File):
|
||||
if config.website.download and path.startswith(config.website.download):
|
||||
pp = path.split('/')[2:]
|
||||
if len(pp)<1:
|
||||
print(str(request.url), 'not found')
|
||||
self.error('%s:not found' % str(request.url))
|
||||
raise HTTPNotFound
|
||||
dp = '/'.join(pp)
|
||||
path = path_decode(dp)
|
||||
@ -235,7 +240,7 @@ class ProcessorResource(StaticResource,Url2File):
|
||||
if self.request_filename and os.path.isdir(self.request_filename):
|
||||
config = getConfig()
|
||||
if not config.website.allowListFolder:
|
||||
print(str(request.url), 'not found')
|
||||
self.error('%s:not found' % str(request.url))
|
||||
raise HTTPNotFound
|
||||
return await super()._handle(request)
|
||||
|
||||
@ -287,7 +292,7 @@ class ProcessorResource(StaticResource,Url2File):
|
||||
return processor
|
||||
|
||||
if self.request_filename is None:
|
||||
print(url, 'not found')
|
||||
self.error('%s:not found' % str(request.url))
|
||||
raise HTTPNotFound
|
||||
|
||||
for word, handlername in self.y_processors:
|
||||
|
@ -17,7 +17,7 @@ class ProxyProcessor(BaseProcessor):
|
||||
te = self.run_ns['tmpl_engine']
|
||||
txt = await te.render(url,**ns)
|
||||
data = json.loads(txt)
|
||||
print('proxyProcessor: data=', data)
|
||||
self.debug('proxyProcessor: data=%s' % data)
|
||||
return data
|
||||
|
||||
async def datahandle(self,request):
|
||||
@ -44,7 +44,7 @@ class ProxyProcessor(BaseProcessor):
|
||||
await self.retResponse.prepare(request)
|
||||
async for chunk in res.content.iter_chunked(chunk_size):
|
||||
await self.retResponse.write(chunk)
|
||||
print('proxy: datahandle() finish', res)
|
||||
self.debug('proxy: datahandle() finish')
|
||||
|
||||
|
||||
def setheaders(self):
|
||||
|
@ -1,6 +1,8 @@
|
||||
# fixed bug in restful.py delete import _WebHandler from aiohttp. web_urldispatcher
|
||||
__version__ = '0.2.13'
|
||||
# fixed bug in processorResource.py, delete import _WebHandler from aiohttp.web_urldispatcher
|
||||
# __version__ = '0.2.12'
|
||||
# apply a initial version for sqlor
|
||||
#__version__ = '0.2.11'
|
||||
# fixed bug in processorResource.py, delete import _WebHandler from aiohttp.web_urldispatcher
|
||||
# __version__ = '0.2.12'
|
||||
# fixed bug in restful.py delete import _WebHandler from aiohttp. web_urldispatcher
|
||||
# __version__ = '0.2.13'
|
||||
# use AppLogger in appPublic.app_logger to output log
|
||||
__version__ = '0.3.0'
|
||||
|
100
doodahs/dir.spec
100
doodahs/dir.spec
@ -1,100 +0,0 @@
|
||||
# -*- mode: python ; coding: utf-8 -*-
|
||||
|
||||
|
||||
block_cipher = None
|
||||
|
||||
|
||||
a = Analysis(['doodah_s.py'],
|
||||
pathex=['/Volumes/home/ymq/pydev/github/ahserver/doodah'],
|
||||
binaries=[],
|
||||
datas=[],
|
||||
hiddenimports=[],
|
||||
hookspath=[],
|
||||
hooksconfig={},
|
||||
runtime_hooks=[],
|
||||
excludes= [
|
||||
"AppKit",
|
||||
"FLAC",
|
||||
"Ogg",
|
||||
"Opus",
|
||||
"OpusFile",
|
||||
"SDL2",
|
||||
"SDL2_image",
|
||||
"SDL2_mixer",
|
||||
"SDL2_ttf",
|
||||
"caio",
|
||||
"cchardet",
|
||||
"cv2",
|
||||
"certifi",
|
||||
"ffpyplayer",
|
||||
"importlib_metadata-4.11.2.dist-info",
|
||||
"kivy",
|
||||
"kiwisolver",
|
||||
"libSDL2-2.0.0.dylib",
|
||||
"libSDL2_mixer-2.0.0.dylib",
|
||||
"libass.9.dylib",
|
||||
"libavcodec.58.54.100.dylib",
|
||||
"libavcodec.58.91.100.dylib",
|
||||
"libavdevice.58.10.100.dylib",
|
||||
"libavfilter.7.85.100.dylib",
|
||||
"libavformat.58.29.100.dylib",
|
||||
"libavformat.58.45.100.dylib",
|
||||
"libavresample.4.0.0.dylib",
|
||||
"libavutil.56.31.100.dylib",
|
||||
"libavutil.56.51.100.dylib",
|
||||
"libbluray.2.dylib",
|
||||
"libfreetype.6.dylib",
|
||||
"libfribidi.0.dylib",
|
||||
"libmp3lame.0.dylib",
|
||||
"libmpdec.3.dylib",
|
||||
"libogg.0.dylib",
|
||||
"libopenblas.0.dylib",
|
||||
"libopus.0.dylib",
|
||||
"libtiff.5.dylib",
|
||||
"libvorbis.0.dylib",
|
||||
"libvorbisenc.2.dylib",
|
||||
"libvorbisfile.3.dylib",
|
||||
"libwebp.7.dylib",
|
||||
"libwebpdemux.2.dylib",
|
||||
"libwebpmux.3.dylib",
|
||||
"libx264.160.dylib",
|
||||
"libx265.192.dylib",
|
||||
"matplotlib",
|
||||
"mpg123",
|
||||
"multidict",
|
||||
"numba",
|
||||
"numpy",
|
||||
"objc",
|
||||
"pandas",
|
||||
"psutil",
|
||||
"scipy"
|
||||
],
|
||||
win_no_prefer_redirects=False,
|
||||
win_private_assemblies=False,
|
||||
cipher=block_cipher,
|
||||
noarchive=False)
|
||||
pyz = PYZ(a.pure, a.zipped_data,
|
||||
cipher=block_cipher)
|
||||
|
||||
exe = EXE(pyz,
|
||||
a.scripts,
|
||||
[],
|
||||
exclude_binaries=True,
|
||||
name='doodah_s',
|
||||
debug=False,
|
||||
bootloader_ignore_signals=False,
|
||||
strip=False,
|
||||
upx=True,
|
||||
console=True,
|
||||
disable_windowed_traceback=False,
|
||||
target_arch=None,
|
||||
codesign_identity=None,
|
||||
entitlements_file=None )
|
||||
coll = COLLECT(exe,
|
||||
a.binaries,
|
||||
a.zipfiles,
|
||||
a.datas,
|
||||
strip=False,
|
||||
upx=True,
|
||||
upx_exclude=[],
|
||||
name='doodah_s')
|
@ -1,43 +0,0 @@
|
||||
import os
|
||||
import sys
|
||||
from ahserver.configuredServer import ConfiguredServer
|
||||
|
||||
from appPublic.registerfunction import RegisterFunction
|
||||
from appPublic.objectAction import ObjectAction
|
||||
from ahserver.filedownload import path_encode
|
||||
from imgThumb import thumb
|
||||
from idFile import idFileDownload
|
||||
from myauth import MyAuthAPI
|
||||
from rf import getPublicKey, getI18nMapping
|
||||
from loadplugins import load_plugins
|
||||
|
||||
def encodeFilepath(id,event,d):
|
||||
if d is None:
|
||||
return d
|
||||
|
||||
if type(d) == type([]):
|
||||
return ArrayEncodeFilepath(d)
|
||||
|
||||
d['rows'] = ArrayEncodeFilepath(d['rows'])
|
||||
return d
|
||||
|
||||
def ArrayEncodeFilepath(d):
|
||||
ret = []
|
||||
for r in d:
|
||||
r['name'] = path_encode(r['name'])
|
||||
ret.append(r)
|
||||
return ret
|
||||
|
||||
rf = RegisterFunction()
|
||||
rf.register('makeThumb',thumb)
|
||||
rf.register('idFileDownload',idFileDownload)
|
||||
rf.register('getPublicKey', getPublicKey)
|
||||
rf.register('getI18nMapping', getI18nMapping)
|
||||
|
||||
p = os.getcwd()
|
||||
if len(sys.argv) > 1:
|
||||
p = sys.argv[1]
|
||||
print('p=', p)
|
||||
server = ConfiguredServer(auth_klass=MyAuthAPI,workdir=p)
|
||||
load_plugins(p)
|
||||
server.run()
|
@ -1,89 +0,0 @@
|
||||
# -*- mode: python ; coding: utf-8 -*-
|
||||
|
||||
block_cipher = None
|
||||
|
||||
|
||||
a = Analysis(['doodah_s.py'],
|
||||
pathex=['/home/ymq/pydev/github/ahserver/doodah'],
|
||||
binaries=[],
|
||||
datas=[],
|
||||
hiddenimports=[
|
||||
"aiomysql",
|
||||
"aiopg"
|
||||
],
|
||||
hookspath=[],
|
||||
runtime_hooks=[],
|
||||
excludes=[
|
||||
"AppKit",
|
||||
"FLAC",
|
||||
"Ogg",
|
||||
"Opus",
|
||||
"OpusFile",
|
||||
"SDL2",
|
||||
"SDL2_image",
|
||||
"SDL2_mixer",
|
||||
"SDL2_ttf",
|
||||
"cv2",
|
||||
"ffpyplayer",
|
||||
"importlib_metadata-4.11.2.dist-info",
|
||||
"kivy",
|
||||
"kiwisolver",
|
||||
"libSDL2-2.0.0.dylib",
|
||||
"libSDL2_mixer-2.0.0.dylib",
|
||||
"libass.9.dylib",
|
||||
"libavcodec.58.54.100.dylib",
|
||||
"libavcodec.58.91.100.dylib",
|
||||
"libavdevice.58.10.100.dylib",
|
||||
"libavfilter.7.85.100.dylib",
|
||||
"libavformat.58.29.100.dylib",
|
||||
"libavformat.58.45.100.dylib",
|
||||
"libavresample.4.0.0.dylib",
|
||||
"libavutil.56.31.100.dylib",
|
||||
"libavutil.56.51.100.dylib",
|
||||
"libbluray.2.dylib",
|
||||
"libfreetype.6.dylib",
|
||||
"libfribidi.0.dylib",
|
||||
"libmp3lame.0.dylib",
|
||||
"libmpdec.3.dylib",
|
||||
"libogg.0.dylib",
|
||||
"libopenblas.0.dylib",
|
||||
"libopus.0.dylib",
|
||||
"libtiff.5.dylib",
|
||||
"libvorbis.0.dylib",
|
||||
"libvorbisenc.2.dylib",
|
||||
"libvorbisfile.3.dylib",
|
||||
"libwebp.7.dylib",
|
||||
"libwebpdemux.2.dylib",
|
||||
"libwebpmux.3.dylib",
|
||||
"libx264.160.dylib",
|
||||
"libx265.192.dylib",
|
||||
"matplotlib",
|
||||
"mpg123",
|
||||
"numba",
|
||||
"numpy",
|
||||
"objc",
|
||||
"pandas",
|
||||
"psutil",
|
||||
"scipy"
|
||||
|
||||
],
|
||||
win_no_prefer_redirects=False,
|
||||
win_private_assemblies=False,
|
||||
cipher=block_cipher,
|
||||
noarchive=False)
|
||||
pyz = PYZ(a.pure, a.zipped_data,
|
||||
cipher=block_cipher)
|
||||
exe = EXE(pyz,
|
||||
a.scripts,
|
||||
a.binaries,
|
||||
a.zipfiles,
|
||||
a.datas,
|
||||
[],
|
||||
name='doodahs',
|
||||
debug=False,
|
||||
bootloader_ignore_signals=False,
|
||||
strip=False,
|
||||
upx=True,
|
||||
upx_exclude=[],
|
||||
runtime_tmpdir=None,
|
||||
console=True )
|
@ -1,12 +0,0 @@
|
||||
|
||||
from sqlor.dbpools import runSQL
|
||||
|
||||
async def getFilenameFromId(idstr:str) -> str:
|
||||
sql = "select * from kvobjects where id='%s'" % idstr
|
||||
recs = await runSQL('homedata',sql)
|
||||
if recs is None:
|
||||
return None
|
||||
if len(recs) == 0:
|
||||
return None
|
||||
return recs[0].name
|
||||
|
@ -1,21 +0,0 @@
|
||||
import os
|
||||
from PIL import Image, ExifTags
|
||||
from io import BytesIO
|
||||
from aiohttp.web_exceptions import (
|
||||
HTTPException,
|
||||
HTTPExpectationFailed,
|
||||
HTTPForbidden,
|
||||
HTTPMethodNotAllowed,
|
||||
HTTPNotFound,
|
||||
)
|
||||
from aiohttp.web_response import Response, StreamResponse
|
||||
|
||||
from appPublic.registerfunction import RegisterFunction
|
||||
from ahserver.filedownload import file_download
|
||||
from id2file import getFilenameFromId
|
||||
|
||||
async def idFileDownload(*args, **kw):
|
||||
id = args[0]
|
||||
fname = await getFilenameFromId(id)
|
||||
request = kw.get('request')
|
||||
return await file_download(request,fname)
|
@ -1,82 +0,0 @@
|
||||
import os
|
||||
from PIL import Image, ExifTags
|
||||
from io import BytesIO
|
||||
from aiohttp.web_exceptions import (
|
||||
HTTPException,
|
||||
HTTPExpectationFailed,
|
||||
HTTPForbidden,
|
||||
HTTPMethodNotAllowed,
|
||||
HTTPNotFound,
|
||||
)
|
||||
from aiohttp.web_response import Response, StreamResponse
|
||||
from id2file import getFilenameFromId
|
||||
|
||||
|
||||
def imageUp(img):
|
||||
try:
|
||||
o = 'Orientation'
|
||||
exif=dict(img._getexif().items())
|
||||
if exif[o] == 3:
|
||||
img = img.rotate(180, expand=True)
|
||||
elif exif[o] == 6:
|
||||
img = img.rotate(270, expand=True)
|
||||
elif exif[o] == 8:
|
||||
img = img.rotate(90, expand=True)
|
||||
return img
|
||||
except (AttributeError, KeyError, IndexError):
|
||||
# cases: image don't have getexif
|
||||
return img
|
||||
|
||||
def imageThumb(imgfilepath,width=None,height=None):
|
||||
im = Image.open(imgfilepath)
|
||||
im = imageUp(im)
|
||||
mode = im.mode
|
||||
if mode not in ('L', 'RGB'):
|
||||
if mode == 'RGBA':
|
||||
alpha = im.split()[3]
|
||||
bgmask = alpha.point(lambda x: 255-x)
|
||||
im = im.convert('RGB')
|
||||
# paste(color, box, mask)
|
||||
im.paste((255,255,255), None, bgmask)
|
||||
else:
|
||||
im = im.convert('RGB')
|
||||
|
||||
w, h = im.size
|
||||
if not width and not height:
|
||||
width = 256
|
||||
if width:
|
||||
width = int(width)
|
||||
height = int(float(width) * float(h) / float(w))
|
||||
else:
|
||||
height = int(height)
|
||||
width = int(float(height) * float(w) / float(h))
|
||||
thumb = im.resize((width,height),Image.ANTIALIAS)
|
||||
f = BytesIO()
|
||||
thumb.save(f,format='jpeg',quality=60)
|
||||
im.close()
|
||||
v = f.getvalue()
|
||||
return v
|
||||
|
||||
async def thumb(*args, **kw):
|
||||
id = args[0]
|
||||
request = kw.get('request')
|
||||
xpath = request.path[len(options.leading):]
|
||||
if xpath == '':
|
||||
raise HTTPNotFound
|
||||
id = xpath[1:]
|
||||
imgpath = await getFilenameFromId(id)
|
||||
v = imageThumb(imgpath,width=options.width,height=options.height)
|
||||
response = Response(
|
||||
status=200,
|
||||
headers = {
|
||||
'Content-Disposition': 'attrachment;filename={}'.format(os.path.basename(imgpath)),
|
||||
'Content-Length':str(len(v))
|
||||
}
|
||||
)
|
||||
await response.prepare(request)
|
||||
await response.write(v)
|
||||
await response.write_eof()
|
||||
return response
|
||||
|
||||
if __name__ == '__main__':
|
||||
imageThumb("/home/ymq/media/pictures/2019-08/IMG_20190804_113014.jpg", width=256)
|
@ -1,17 +0,0 @@
|
||||
import os
|
||||
import sys
|
||||
|
||||
from appPublic.folderUtils import listFile
|
||||
from appPublic.ExecFile import ExecFile
|
||||
|
||||
def load_plugins(p_dir):
|
||||
ef = ExecFile()
|
||||
pdir = os.path.join(p_dir, 'plugins')
|
||||
if not os.path.isdir(pdir):
|
||||
print('load_plugins:%s not exists' % pdir)
|
||||
return
|
||||
sys.path.append(pdir)
|
||||
for py in listFile(pdir, suffixs=['.py'], rescursive=True):
|
||||
ef.set('sys',sys)
|
||||
ef.run(py)
|
||||
|
@ -1,45 +0,0 @@
|
||||
from ahserver.auth_api import AuthAPI
|
||||
from appPublic.jsonConfig import getConfig
|
||||
from appPublic.registerfunction import getRegisterFunctionByName
|
||||
|
||||
class MyAuthAPI(AuthAPI):
|
||||
async def needAuth(self,path):
|
||||
config = getConfig()
|
||||
if not config.website.authpaths:
|
||||
return False
|
||||
for p in config.website.authpaths:
|
||||
if path.startswith(p):
|
||||
if not config.website.whitelist:
|
||||
return True
|
||||
for p1 in config.website.whitelist:
|
||||
if path.startswith(p1):
|
||||
return False
|
||||
return True
|
||||
return False
|
||||
|
||||
async def checkUserPassword(self,user_id,password):
|
||||
config = getConfig()
|
||||
if config.users:
|
||||
for userid, passwd in config.users.items():
|
||||
if user_id == userid and password == passwd:
|
||||
print('******user passwd check OK****************')
|
||||
return True
|
||||
rf = getRegisterFunctionByName('user_password_check')
|
||||
if rf:
|
||||
return rf(user_id, password)
|
||||
|
||||
return False
|
||||
|
||||
async def getPermissionNeed(self,path):
|
||||
rf = getRegisterFunctionByName('get_need_permission')
|
||||
if rf:
|
||||
return rf(path)
|
||||
|
||||
return 'ok'
|
||||
|
||||
async def getUserPermissions(self,user):
|
||||
rf = getRegisterFunctionByName('get_user_permissions')
|
||||
if rf:
|
||||
return rf(user)
|
||||
|
||||
return ['ok']
|
@ -1,16 +0,0 @@
|
||||
from appPublic.jsonConfig import getConfig
|
||||
from appPublic.i18n import getI18N
|
||||
from ahserver.filedownload import file_download
|
||||
|
||||
async def getPublicKey(*args, **kw):
|
||||
config = getConfig()
|
||||
request = options.request
|
||||
pf = config.website.rsakey.publickey
|
||||
return await file_download(request,pf)
|
||||
|
||||
async def getI18nMapping(*args, **kw):
|
||||
lang = args[0]
|
||||
i18n = getI18N()
|
||||
mapping = i18n.getLangDict(lang)
|
||||
return mapping
|
||||
|
Loading…
Reference in New Issue
Block a user