first commit
This commit is contained in:
commit
16999f6b0b
5
ah.py
Normal file
5
ah.py
Normal file
@ -0,0 +1,5 @@
|
||||
from ahserver.configuredServer import ConfiguredServer
|
||||
|
||||
if __name__ == '__main__':
|
||||
server = ConfiguredServer()
|
||||
server.run()
|
0
ahserver/__init__.py
Executable file
0
ahserver/__init__.py
Executable file
70
ahserver/auth_api.py
Normal file
70
ahserver/auth_api.py
Normal file
@ -0,0 +1,70 @@
|
||||
from aiohttp_auth import auth
|
||||
from os import urandom
|
||||
from aiohttp import web
|
||||
import aiohttp_session
|
||||
|
||||
from aiohttp_session import get_session, session_middleware
|
||||
from aiohttp_session.cookie_storage import EncryptedCookieStorage
|
||||
|
||||
class AuthAPI:
|
||||
def setupAuth(self,app):
|
||||
# setup session middleware in aiohttp fashion
|
||||
storage = EncryptedCookieStorage(urandom(32))
|
||||
aiohttp_session.setup(app, storage)
|
||||
|
||||
# Create an auth ticket mechanism that expires after 1 minute (60
|
||||
# seconds), and has a randomly generated secret. Also includes the
|
||||
# optional inclusion of the users IP address in the hash
|
||||
policy = auth.SessionTktAuthentication(urandom(32), 60,
|
||||
include_ip=True)
|
||||
|
||||
# setup aiohttp_auth.auth middleware in aiohttp fashion
|
||||
auth.setup(app, policy)
|
||||
app.middlewares.append(self.checkAuth)
|
||||
app.router.add_route('POST','/login',self.login)
|
||||
app.router.add_route('GET', '/logout', self.logout)
|
||||
|
||||
async def login(self,request):
|
||||
params = await request.post()
|
||||
user_id = params.get('user',None)
|
||||
password = params.get('password',None)
|
||||
from_path = params.get('from_path',None)
|
||||
if self.checkUserPassword(user_id,password):
|
||||
await auth.remember(request, user)
|
||||
return web.HpptFound(from_path)
|
||||
raise web.HTTPUnauthorized()
|
||||
|
||||
async def logout(self,request):
|
||||
await auth.forget(request)
|
||||
return web.REsponse(body='OK'.encode('utf-8'))
|
||||
|
||||
@web.middleware
|
||||
async def checkAuth(self,request,handler):
|
||||
path = request.path
|
||||
print(f'*****{path} checkAuth called********')
|
||||
if not await self.needAuth(path):
|
||||
return await handler(request)
|
||||
user = await auth.get_auth(request)
|
||||
if user is None:
|
||||
raise web.HTTPFound(f'/login_form?from_path={path}')
|
||||
user_perms = await self.getUserPermission(user)
|
||||
need_perm = await self.getPermissionNeed(path)
|
||||
if need_perm in user_perms:
|
||||
return await handler(request)
|
||||
print(f'**{path} forbidden**')
|
||||
raise web.HTTPForbidden()
|
||||
|
||||
async def needAuth(self,path):
|
||||
if path in ['/','/header.tmpl','footer.tmpl','/login','/login_form','/index.tmpl',]:
|
||||
return False
|
||||
return False
|
||||
|
||||
async def getPermissionNeed(self,path):
|
||||
return 'admin'
|
||||
|
||||
async def checkUserPassword(self,user_id,password):
|
||||
return True
|
||||
|
||||
async def getUserPermissions(self,user):
|
||||
return ['admin','view']
|
||||
|
191
ahserver/baseProcessor.py
Normal file
191
ahserver/baseProcessor.py
Normal file
@ -0,0 +1,191 @@
|
||||
import os
|
||||
import re
|
||||
import json
|
||||
import codecs
|
||||
from aiohttp.web_request import Request
|
||||
from aiohttp.web_response import Response, StreamResponse
|
||||
|
||||
from jinja2 import Template,Environment,BaseLoader
|
||||
|
||||
from appPublic.jsonConfig import getConfig
|
||||
from appPublic.dictObject import DictObject
|
||||
from appPublic.folderUtils import listFile
|
||||
|
||||
from .serverenv import ServerEnv
|
||||
|
||||
class ObjectCache:
|
||||
def __init__(self):
|
||||
self.cache = {}
|
||||
|
||||
def store(self,path,obj):
|
||||
o = self.cache.get(path,None)
|
||||
if o is not None:
|
||||
try:
|
||||
del o.cached_obj
|
||||
except:
|
||||
pass
|
||||
o = DictObject()
|
||||
o.cached_obj = obj
|
||||
o.mtime = os.path.getmtime(path)
|
||||
self.cache[path] = o
|
||||
|
||||
def get(self,path):
|
||||
o = self.cache.get(path)
|
||||
if o:
|
||||
if os.path.getmtime(path) > o.mtime:
|
||||
return None
|
||||
return o.cached_obj
|
||||
return None
|
||||
|
||||
|
||||
class BaseProcessor:
|
||||
@classmethod
|
||||
def isMe(self,name):
|
||||
return name=='base'
|
||||
|
||||
def __init__(self,path,resource):
|
||||
self.path = path
|
||||
self.resource = resource
|
||||
self.retResponse = None
|
||||
self.last_modified = os.path.getmtime(path)
|
||||
self.content_length = os.path.getsize(path)
|
||||
self.headers = {
|
||||
'Content-Type': 'text/html',
|
||||
'Content-Length': str(self.content_length),
|
||||
'Accept-Ranges': 'bytes'
|
||||
}
|
||||
self.content = ''
|
||||
|
||||
|
||||
async def handle(self,request):
|
||||
config = getConfig()
|
||||
await self.datahandle(request)
|
||||
if self.retResponse is not None:
|
||||
return self.retResponse
|
||||
if type(self.content) == type({}):
|
||||
self.content = json.dumps(self.content,
|
||||
indent=4)
|
||||
if type(self.content) == type([]):
|
||||
self.content = json.dumps(self.content,
|
||||
indent=4)
|
||||
self.setheaders()
|
||||
return Response(text=self.content,headers=self.headers)
|
||||
|
||||
async def datahandle(self,txt,request):
|
||||
print('*******Error*************')
|
||||
self.content=''
|
||||
|
||||
def setheaders(self):
|
||||
self.headers['Content-Length'] = str(len(self.content))
|
||||
|
||||
class TemplateProcessor(BaseProcessor):
|
||||
@classmethod
|
||||
def isMe(self,name):
|
||||
return name=='tmpl'
|
||||
|
||||
async def datahandle(self,request):
|
||||
path = request.path
|
||||
g = ServerEnv()
|
||||
ns = DictObject()
|
||||
ns.update(g)
|
||||
ns.update(self.resource.y_env)
|
||||
ns.request = request
|
||||
ns.ref_real_path = self.path
|
||||
te = g.tmpl_engine
|
||||
self.content = te.render(path,**ns)
|
||||
#self.content = await te.render_async(path,**ns)
|
||||
|
||||
def setheaders(self):
|
||||
super(TemplateProcessor,self).setheaders()
|
||||
if self.path.endswith('.tmpl.css'):
|
||||
self.headers['Content-Type'] = 'text/css; utf-8'
|
||||
elif self.path.endswith('.tmpl.js'):
|
||||
self.headers['Content-Type'] = 'application/javascript ; utf-8'
|
||||
else:
|
||||
self.headers['Content-Type'] = 'text/html; utf-8'
|
||||
|
||||
|
||||
class PythonScriptProcessor(BaseProcessor):
|
||||
@classmethod
|
||||
def isMe(self,name):
|
||||
return name=='dspy'
|
||||
|
||||
def loadScript(self):
|
||||
data = ''
|
||||
with codecs.open(self.path,'rb','utf-8') as f:
|
||||
data = f.read()
|
||||
b= ''.join(data.split('\r'))
|
||||
lines = b.split('\n')
|
||||
lines = ['\t' + l for l in lines ]
|
||||
txt = "async def myfunc(request,**ns):\n" + '\n'.join(lines)
|
||||
return txt
|
||||
|
||||
async def datahandle(self,request):
|
||||
g = ServerEnv()
|
||||
lenv = {}
|
||||
lenv.update(g)
|
||||
lenv.update(self.resource.y_env)
|
||||
if not g.get('dspy_cache',False):
|
||||
g.dspy_cache = ObjectCache()
|
||||
func = g.dspy_cache.get(self.path)
|
||||
if not func:
|
||||
txt = self.loadScript()
|
||||
exec(txt,lenv,lenv)
|
||||
func = lenv['myfunc']
|
||||
print('func=',func)
|
||||
g.dspy_cache.store(self.path,func)
|
||||
self.content = await func(request,**lenv)
|
||||
|
||||
class MarkdownProcessor(BaseProcessor):
|
||||
@classmethod
|
||||
def isMe(self,name):
|
||||
return name=='md'
|
||||
|
||||
async def datahandle(self,request:Request):
|
||||
data = ''
|
||||
with codecs.open(self.path,'rb','utf-8') as f:
|
||||
data = f.read()
|
||||
b = data
|
||||
b = self.urlreplace(b,request)
|
||||
ret = {
|
||||
"__widget__":"markdown",
|
||||
"data":{
|
||||
"md_text":b
|
||||
}
|
||||
}
|
||||
config = getConfig()
|
||||
self.content = json.dumps(ret,indent=4)
|
||||
|
||||
def urlreplace(self,mdtxt,request):
|
||||
def replaceURL(s):
|
||||
p1 = '\[.*?\]\((.*?)\)'
|
||||
url = re.findall(p1,s)[0]
|
||||
txts = s.split(url)
|
||||
url = self.resource.absUrl(request,url)
|
||||
return url.join(txts)
|
||||
|
||||
p = '\[.*?\]\(.*?\)'
|
||||
textarray = re.split(p,mdtxt)
|
||||
links = re.findall(p,mdtxt)
|
||||
newlinks = [ replaceURL(link) for link in links]
|
||||
if len(links)>0:
|
||||
mdtxt = ''
|
||||
for i in range(len(newlinks)):
|
||||
mdtxt = mdtxt + textarray[i]
|
||||
mdtxt = mdtxt + newlinks[i]
|
||||
mdtxt = mdtxt + textarray[i+1]
|
||||
return mdtxt
|
||||
|
||||
def getProcessor(name):
|
||||
return _getProcessor(BaseProcessor,name)
|
||||
|
||||
def _getProcessor(kclass,name):
|
||||
for k in kclass.__subclasses__():
|
||||
if not hasattr(k,'isMe'):
|
||||
continue
|
||||
if k.isMe(name):
|
||||
return k
|
||||
a = _getProcessor(k,name)
|
||||
if a is not None:
|
||||
return a
|
||||
return None
|
57
ahserver/configuredServer.py
Normal file
57
ahserver/configuredServer.py
Normal file
@ -0,0 +1,57 @@
|
||||
import os,sys
|
||||
import ssl
|
||||
from aiohttp import web
|
||||
|
||||
from appPublic.folderUtils import ProgramPath
|
||||
from appPublic.jsonConfig import getConfig
|
||||
|
||||
from sqlor.dbpools import DBPools
|
||||
|
||||
from .processorResource import ProcessorResource
|
||||
from .auth_api import AuthAPI
|
||||
from .myTE import setupTemplateEngine
|
||||
from .globalEnv import initEnv
|
||||
|
||||
class ConfiguredServer:
|
||||
def __init__(self):
|
||||
pp = ProgramPath()
|
||||
workdir = pp
|
||||
if len(sys.argv) > 1:
|
||||
workdir = sys.argv[1]
|
||||
config = getConfig(workdir,{'workdir':workdir,'ProgramPath':pp})
|
||||
if config.databases:
|
||||
DBPools(config.databases)
|
||||
initEnv()
|
||||
setupTemplateEngine()
|
||||
self.app = web.Application()
|
||||
auth = AuthAPI()
|
||||
auth.setupAuth(self.app)
|
||||
self.configPath(config)
|
||||
|
||||
def run(self):
|
||||
config = getConfig()
|
||||
ssl_context = None
|
||||
if config.website.ssl:
|
||||
ssl_context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
|
||||
ssl_context.load_cert_chain(config.website.ssl.crtfile,
|
||||
config.website.ssl.keyfile)
|
||||
web.run_app(self.app,host=config.website.host or '0.0.0.0',
|
||||
port=config.website.port or 8080,
|
||||
ssl_context=ssl_context)
|
||||
|
||||
def configPath(self,config):
|
||||
for p,prefix in config.website.paths:
|
||||
res = ProcessorResource(prefix,p,show_index=True,
|
||||
follow_symlinks=True)
|
||||
res.setProcessors(config.website.processors or {})
|
||||
res.setIndexes(config.website.indexes or [])
|
||||
self.app.router.register_resource(res)
|
||||
|
||||
def addProcessors(self,config,resource):
|
||||
for subfix,processorname in config.website.processors:
|
||||
resource.addProcessor(subfix,processorname)
|
||||
return resource
|
||||
|
||||
def addIndexes(self,res,indexes):
|
||||
res.indexes = indexes
|
||||
return res
|
67
ahserver/dsProcessor.py
Executable file
67
ahserver/dsProcessor.py
Executable file
@ -0,0 +1,67 @@
|
||||
import codecs
|
||||
import json
|
||||
from appPublic.jsonConfig import getConfig
|
||||
from appPublic.dictObject import DictObject
|
||||
from .baseProcessor import BaseProcessor
|
||||
from .serverenv import ServerEnv
|
||||
|
||||
class DataSourceProcessor(BaseProcessor):
|
||||
@classmethod
|
||||
def isMe(self,name):
|
||||
return name=='ds'
|
||||
|
||||
def __init__(self,filename,k):
|
||||
super(DataSourceProcessor,self).__init__(filename,k)
|
||||
self.actions = {
|
||||
'getdata':self.getData,
|
||||
'pagingdata':self.getPagingData,
|
||||
'arguments':self.getArgumentsDesc,
|
||||
'resultFields':self.getDataDesc,
|
||||
'gridlist':self.getGridlist,
|
||||
}
|
||||
self.g = ServerEnv()
|
||||
|
||||
def getData(self,dict_data,ns,request):pass
|
||||
def getPagingData(self,dict_data,ns,request):pass
|
||||
def getArgumentsDesc(self,dict_data,ns,request):pass
|
||||
def getDataDesc(self,dict_data,ns,request):pass
|
||||
def getGridlist(self,dict_data,ns,request):
|
||||
ret = self.getDataDesc(dict_data,ns,request)
|
||||
ffs = [ f for f in ret if f.get('frozen',False) ]
|
||||
fs = [ f for f in ret if not f['frozen'] ]
|
||||
[ f.update({'hide':True}) for f in ffs if f.get('listhide',False) ]
|
||||
[ f.update({'hide':True}) for f in fs if f.get('listhide') ]
|
||||
d = {
|
||||
"iconCls":"icon-search",
|
||||
"url":self.resource.absUrl(request,request.path + '?action=pagingdata'),
|
||||
"view":"bufferview",
|
||||
"options":{
|
||||
"pageSize":50,
|
||||
"pagination":False
|
||||
}
|
||||
}
|
||||
d.update({'fields':fs})
|
||||
if len(ffs)>0:
|
||||
d.update({'ffields':ffs})
|
||||
ret = {
|
||||
"__ctmpl__":"datagrid",
|
||||
"data":d
|
||||
}
|
||||
return ret
|
||||
|
||||
async def datahandle(self,request):
|
||||
dict_data = {}
|
||||
config = getConfig()
|
||||
with codecs.open(self.path,'r',config.website.coding) as f:
|
||||
b = f.read()
|
||||
dict_data = json.loads(b)
|
||||
ns = DictObject()
|
||||
g = ServerEnv()
|
||||
ns.update(g)
|
||||
ns.update(self.resource.y_env)
|
||||
ns.update(self.resource.getGetArgs(request))
|
||||
act = ns.get('action','getdata')
|
||||
action = self.actions.get(act)
|
||||
self.content = action(dict_data,ns,request)
|
||||
|
||||
|
47
ahserver/filestorage.py
Executable file
47
ahserver/filestorage.py
Executable file
@ -0,0 +1,47 @@
|
||||
# fileUpload.py
|
||||
|
||||
import os
|
||||
import time
|
||||
import tempfile
|
||||
import aiofile
|
||||
|
||||
from appPublic.folderUtils import _mkdir
|
||||
from appPublic.jsonConfig import getConfig
|
||||
|
||||
class FileStorage:
|
||||
def __init__(self):
|
||||
config = getConfig()
|
||||
self.root = config.filesroot or tempfile.gettempdir()
|
||||
|
||||
def realPath(self,path):
|
||||
if path[0] == '/':
|
||||
path = path[1:]
|
||||
p = os.path.join(self.root,path)
|
||||
return p
|
||||
|
||||
def _name2path(name):
|
||||
name = os.path.basename(name)
|
||||
paths=[191,193,197,199,97]
|
||||
v = int(time.time()*1000000)
|
||||
# b = name.encode('utf8') if not isinstance(name,bytes) else name
|
||||
# v = int.from_bytes(b,byteorder='big',signed=False)
|
||||
path = os.path.abspath(os.path.join(self.root,
|
||||
v % paths[0],
|
||||
v % paths[1],
|
||||
v % paths[2],
|
||||
v % paths[3],
|
||||
v % paths[4],
|
||||
name))
|
||||
return path
|
||||
|
||||
async def save(name,read_data):
|
||||
p = self.name2path(name)
|
||||
_mkdir(os.path.dirname(p))
|
||||
async with aiofile.open(p,mode='rb') as f:
|
||||
while 1:
|
||||
d = await read_data()
|
||||
if not d:
|
||||
break
|
||||
await f.write(d)
|
||||
|
||||
return p[len(self.root):]
|
173
ahserver/globalEnv.py
Executable file
173
ahserver/globalEnv.py
Executable file
@ -0,0 +1,173 @@
|
||||
# -*- coding:utf8 -*-
|
||||
import os
|
||||
import sys
|
||||
import codecs
|
||||
from urllib.parse import quote
|
||||
import json
|
||||
|
||||
import random
|
||||
import time
|
||||
import datetime
|
||||
from openpyxl import Workbook
|
||||
from tempfile import mktemp
|
||||
|
||||
from appPublic.jsonConfig import getConfig
|
||||
from appPublic.Singleton import GlobalEnv
|
||||
from appPublic.argsConvert import ArgsConvert
|
||||
from appPublic.timeUtils import str2Date,str2Datetime,curDatetime,getCurrentTimeStamp
|
||||
from appPublic.folderUtils import folderInfo
|
||||
from appPublic.uniqueID import setNode,getID
|
||||
from appPublic.unicoding import unicoding,uDict,uObject
|
||||
from appPublic.Singleton import SingletonDecorator
|
||||
|
||||
from sql.crud import _CRUD,CRUD
|
||||
# from sql.sqlorAPI import DBPools,runSQL,runSQLPaging,runSQLIterator
|
||||
# from sql.sqlorAPI import getTables,getTableFields,getTablePrimaryKey
|
||||
# from sql.sqlorAPI import getTableForignKeys,runSQLResultFields
|
||||
from sqlor.dbpools import DBPools
|
||||
|
||||
|
||||
from .xlsxData import XLSXData
|
||||
from .uriop import URIOp
|
||||
|
||||
from .serverenv import ServerEnv
|
||||
|
||||
def data2xlsx(rows,headers=None):
|
||||
wb = Workbook()
|
||||
ws = wb.active
|
||||
|
||||
i = 1
|
||||
if headers is not None:
|
||||
for j in range(len(headers)):
|
||||
v = headers[j].title if headers[j].get('title',False) else headers[j].name
|
||||
ws.cell(column=j+1,row=i,value=v)
|
||||
i += 1
|
||||
for r in rows:
|
||||
for j in range(len(r)):
|
||||
v = r[headers[j].name]
|
||||
ws.cell(column=j+1,row=i,value=v)
|
||||
i += 1
|
||||
name = mktemp(suffix='.xlsx')
|
||||
wb.save(filename = name)
|
||||
wb.close()
|
||||
return name
|
||||
|
||||
class FileOutZone(Exception):
|
||||
def __init__(self,fp,*args,**kwargs):
|
||||
super(FileOutZone,self).__init__(*args,**kwargs)
|
||||
self.openfilename = fp
|
||||
|
||||
def __str__(self):
|
||||
return self.openfilename + ': not allowed to open'
|
||||
|
||||
def openfile(url,m):
|
||||
fp = abspath(url)
|
||||
if fp is None:
|
||||
print('openfile(',url,m,'),url is not match a file')
|
||||
raise Exception('url can not mathc a file')
|
||||
config = getConfig()
|
||||
paths = [ os.path.abspath(p) for p in config.website.paths ]
|
||||
fs = config.get('allow_folders',[])
|
||||
fs = [ os.path.abspath(i) for i in fs + paths ]
|
||||
r = False
|
||||
for f in fs:
|
||||
if fp.startswith(f):
|
||||
r = True
|
||||
break
|
||||
if not r:
|
||||
raise FileOutZone(fp)
|
||||
return open(fp,m)
|
||||
|
||||
def isNone(a):
|
||||
return a is None
|
||||
|
||||
def abspath(path):
|
||||
config = getConfig()
|
||||
paths = [ os.path.abspath(p) for p in config.website.paths ]
|
||||
for root in paths:
|
||||
p = root + path
|
||||
if os.path.exists(root+path):
|
||||
return p
|
||||
|
||||
return None
|
||||
|
||||
def appname():
|
||||
config = getConfig()
|
||||
try:
|
||||
return config.license.app
|
||||
except:
|
||||
return "test app"
|
||||
|
||||
def request2ns(request):
|
||||
ret = {}
|
||||
for k,v in request.args.items():
|
||||
if type(v) == type([]) and len(v) == 1:
|
||||
ret[k] = v[0]
|
||||
else :
|
||||
ret[k] = v
|
||||
ret = uObject(ret)
|
||||
return ret
|
||||
|
||||
def configValue(ks):
|
||||
config = getConfig()
|
||||
try:
|
||||
a = eval('config' + ks)
|
||||
return a
|
||||
except:
|
||||
return None
|
||||
|
||||
def visualcoding():
|
||||
return configValue('.website.visualcoding');
|
||||
|
||||
def file_download(request,path,name,coding='utf8'):
|
||||
f = openfile(path,'rb')
|
||||
b = f.read()
|
||||
f.close()
|
||||
fname = quote(name).encode(coding)
|
||||
hah = b"attachment; filename=" + fname
|
||||
# print('file head=',hah.decode(coding))
|
||||
request.setHeader(b'Content-Disposition',hah)
|
||||
request.setHeader(b'Expires',0)
|
||||
request.setHeader(b'Cache-Control',b'must-revalidate, post-check=0, pre-check=0')
|
||||
request.setHeader(b'Content-Transfer-Encoding',b'binary')
|
||||
request.setHeader(b'Pragma',b'public')
|
||||
request.setHeader(b'Content-Length',len(b))
|
||||
request.write(b)
|
||||
request.finish()
|
||||
|
||||
def initEnv():
|
||||
pool = DBPools()
|
||||
g = ServerEnv()
|
||||
g.configValue = configValue
|
||||
g.visualcoding = visualcoding
|
||||
g.uriop = URIOp
|
||||
g.isNone = isNone
|
||||
g.json = json
|
||||
g.int = int
|
||||
g.str = str
|
||||
g.float = float
|
||||
g.type = type
|
||||
g.ArgsConvert = ArgsConvert
|
||||
g.time = time
|
||||
g.datetime = datetime
|
||||
g.random = random
|
||||
g.str2date = str2Date
|
||||
g.str2datetime = str2Datetime
|
||||
g.curDatetime = curDatetime
|
||||
g.uObject = uObject
|
||||
g.uuid = getID
|
||||
g.runSQL = pool.runSQL
|
||||
g.runSQLPaging = pool.runSQLPaging
|
||||
g.runSQLIterator = pool.runSQL
|
||||
g.runSQLResultFields = pool.runSQLResultFields
|
||||
g.getTables = pool.getTables
|
||||
g.getTableFields = pool.getTableFields
|
||||
g.getTablePrimaryKey = pool.getTablePrimaryKey
|
||||
g.getTableForignKeys = pool.getTableForignKeys
|
||||
g.folderInfo = folderInfo
|
||||
g.abspath = abspath
|
||||
g.request2ns = request2ns
|
||||
g.CRUD = CRUD
|
||||
g.data2xlsx = data2xlsx
|
||||
g.xlsxdata = XLSXData
|
||||
g.openfile = openfile
|
44
ahserver/mdProcessor.py
Executable file
44
ahserver/mdProcessor.py
Executable file
@ -0,0 +1,44 @@
|
||||
import re
|
||||
from appPublic.jsonConfig import getConfig
|
||||
from WebServer.configuredResource import BaseProcessor
|
||||
from WebServer.globalEnv import request2ns,absUrl
|
||||
|
||||
class MarkDownProcessor(BaseProcessor):
|
||||
@classmethod
|
||||
def isMe(self,name):
|
||||
return name=='md'
|
||||
|
||||
content_type='webwidget/json'
|
||||
|
||||
def fileHandle(self,f,request):
|
||||
b = f.read()
|
||||
b = self.urlreplace(b,request)
|
||||
ret = {
|
||||
"__widget__":"markdown",
|
||||
"data":{
|
||||
"md_text":b
|
||||
}
|
||||
}
|
||||
self.file_data = ret
|
||||
return self.file_data
|
||||
|
||||
def urlreplace(self,mdtxt,request):
|
||||
def replaceURL(s):
|
||||
p1 = '\[.*?\]\((.*?)\)'
|
||||
url = re.findall(p1,s)[0]
|
||||
txts = s.split(url)
|
||||
url = absUrl(request,url)
|
||||
return url.join(txts)
|
||||
|
||||
p = '\[.*?\]\(.*?\)'
|
||||
textarray = re.split(p,mdtxt)
|
||||
links = re.findall(p,mdtxt)
|
||||
newlinks = [ replaceURL(link) for link in links]
|
||||
if len(links)>0:
|
||||
mdtxt = ''
|
||||
for i in range(len(newlinks)):
|
||||
mdtxt = mdtxt + textarray[i]
|
||||
mdtxt = mdtxt + newlinks[i]
|
||||
mdtxt = mdtxt + textarray[i+1]
|
||||
return mdtxt
|
||||
|
57
ahserver/myTE.py
Normal file
57
ahserver/myTE.py
Normal file
@ -0,0 +1,57 @@
|
||||
import os
|
||||
import codecs
|
||||
|
||||
from appPublic.Singleton import SingletonDecorator
|
||||
from appPublic.jsonConfig import getConfig
|
||||
|
||||
from jinja2 import Template,Environment, BaseLoader
|
||||
from .serverenv import ServerEnv
|
||||
from .url2file import Url2File, TmplUrl2File
|
||||
|
||||
class TmplLoader(BaseLoader, TmplUrl2File):
|
||||
def __init__(self, paths, indexes, subffixes=['.tmpl'], inherit=False):
|
||||
BaseLoader.__init__(self)
|
||||
TmplUrl2File.__init__(self,paths,indexes=indexes,subffixes=subffixes, inherit=inherit)
|
||||
|
||||
def get_source(self,env: Environment,template: str):
|
||||
config = getConfig()
|
||||
coding = config.website.coding
|
||||
fp = self.url2file(template)
|
||||
if not os.path.isfile(fp):
|
||||
raise TemplateNotFound(template)
|
||||
mtime = os.path.getmtime(fp)
|
||||
with codecs.open(fp,'r',coding) as f:
|
||||
source = f.read()
|
||||
return source,fp,lambda:mtime == os.path.getmtime(fp)
|
||||
|
||||
def join_path(self,name, parent):
|
||||
return self.relatedurl(parent,name)
|
||||
|
||||
def list_templates(self):
|
||||
return []
|
||||
|
||||
|
||||
class TemplateEngine(Environment):
|
||||
def __init__(self,loader=None):
|
||||
Environment.__init__(self,loader=loader)
|
||||
self.urlpaths = {}
|
||||
self.loader = loader
|
||||
|
||||
def join_path(self,template: str, parent: str):
|
||||
return self.loader.join_path(template, parent)
|
||||
|
||||
|
||||
def render(self,name: str, **globals):
|
||||
t = self.get_template(name,globals=globals)
|
||||
return t.render(globals)
|
||||
|
||||
def setupTemplateEngine():
|
||||
config = getConfig()
|
||||
subffixes = [ i[0] for i in config.website.processors if i[1] == 'tmpl' ]
|
||||
print(subffixes)
|
||||
paths = [ os.path.abspath(p) for p,prefix in config.website.paths ]
|
||||
loader = TmplLoader(paths,config.website.indexes,subffixes,inherit=True)
|
||||
engine = TemplateEngine(loader)
|
||||
g = ServerEnv()
|
||||
g.tmpl_engine = engine
|
||||
|
193
ahserver/processorResource.py
Normal file
193
ahserver/processorResource.py
Normal file
@ -0,0 +1,193 @@
|
||||
import os
|
||||
import re
|
||||
|
||||
from aiohttp.web_urldispatcher import StaticResource, _WebHandler, PathLike
|
||||
from aiohttp.web_urldispatcher import Optional, _ExpectHandler
|
||||
from aiohttp.web_urldispatcher import Path
|
||||
from aiohttp.web_response import Response, StreamResponse
|
||||
from aiohttp.web_exceptions import (
|
||||
HTTPException,
|
||||
HTTPExpectationFailed,
|
||||
HTTPForbidden,
|
||||
HTTPMethodNotAllowed,
|
||||
HTTPNotFound,
|
||||
)
|
||||
from aiohttp.web_fileresponse import FileResponse
|
||||
from aiohttp.web_request import Request
|
||||
from aiohttp.web_response import Response, StreamResponse
|
||||
from aiohttp.web_routedef import AbstractRouteDef
|
||||
|
||||
from appPublic.jsonConfig import getConfig
|
||||
from appPublic.MiniI18N import getI18N
|
||||
from appPublic.dictObject import DictObject
|
||||
|
||||
from .baseProcessor import getProcessor
|
||||
from .xlsxdsProcessor import XLSXDataSourceProcessor
|
||||
from .sqldsProcessor import SQLDataSourceProcessor
|
||||
from .serverenv import ServerEnv
|
||||
from .url2file import Url2File
|
||||
from .filestorage import FileStorage
|
||||
|
||||
def multiDict2Dict(md):
|
||||
ns = {}
|
||||
for k,v in md.items():
|
||||
ov = ns.get(k,None)
|
||||
if ov is None:
|
||||
ns[k] = v
|
||||
elif type(ov) == type([]):
|
||||
ov.append(v)
|
||||
ns[k] = ov
|
||||
else:
|
||||
ns[k] = [ov,v]
|
||||
return ns
|
||||
|
||||
def getHeaderLang(request):
|
||||
al = request.headers.get('Accept-Language')
|
||||
if al is None:
|
||||
return 'en'
|
||||
return al.split(',')[0]
|
||||
|
||||
def i18nDICT(request):
|
||||
c = getConfig()
|
||||
i18n = getI18N()
|
||||
lang = getHeaderLang(request)
|
||||
l = c.langMapping.get(lang,lang)
|
||||
return json.dumps(i18n.getLangDict(l)).encode(c.website.coding)
|
||||
|
||||
class ProcessorResource(StaticResource):
|
||||
def __init__(self, prefix: str, directory: PathLike,
|
||||
*, name: Optional[str]=None,
|
||||
expect_handler: Optional[_ExpectHandler]=None,
|
||||
chunk_size: int=256 * 1024,
|
||||
show_index: bool=False, follow_symlinks: bool=False,
|
||||
append_version: bool=False)-> None:
|
||||
super().__init__(prefix, directory,
|
||||
name=name,
|
||||
expect_handler=expect_handler,
|
||||
chunk_size=chunk_size,
|
||||
show_index=show_index,
|
||||
follow_symlinks=follow_symlinks,
|
||||
append_version=append_version)
|
||||
gr = self._routes.get('GET')
|
||||
self._routes.update({'POST':gr})
|
||||
self.y_processors = []
|
||||
self.y_prefix = prefix
|
||||
self.y_directory = directory
|
||||
self.y_indexes = []
|
||||
self.y_env = DictObject()
|
||||
|
||||
def setProcessors(self, processors):
|
||||
self.y_processors = processors
|
||||
|
||||
def setIndexes(self, indexes):
|
||||
self.y_indexes = indexes
|
||||
|
||||
def abspath(self,path:str):
|
||||
path = path[len(self.y_prefix):]
|
||||
if len(path)>0 and path[0] == '/':
|
||||
path = path[1:]
|
||||
rp = os.path.join(self.y_directory , path)
|
||||
real_path = os.path.abspath(rp)
|
||||
return real_path
|
||||
|
||||
async def getPostData(self,request):
|
||||
reader = await request.multipart()
|
||||
if reader is None:
|
||||
md = await request.post()
|
||||
ns = multiDict2Dict(md)
|
||||
return ns
|
||||
ns = {}
|
||||
while 1:
|
||||
field = await reader.next()
|
||||
if not field:
|
||||
break
|
||||
value = ''
|
||||
if hasattr(field,'filename'):
|
||||
saver = FileStorage()
|
||||
value = await saver.save(field.filename,field.read_chunk)
|
||||
else:
|
||||
value = await field.read(decode=True)
|
||||
ov = ns.get(field.name)
|
||||
if ov:
|
||||
if type(ov) == type([]):
|
||||
ov.append(value)
|
||||
else:
|
||||
ov = [ov,value]
|
||||
else:
|
||||
ov = value
|
||||
ns.update({field.name:ov})
|
||||
return ns
|
||||
|
||||
async def _handle(self,request:Request) -> StreamResponse:
|
||||
clientkeys = {
|
||||
"iPhone":"iphone",
|
||||
"iPad":"ipad",
|
||||
"Android":"androidpad",
|
||||
"Windows Phone":"winphone",
|
||||
"Windows NT[.]*Win64; x64":"pc",
|
||||
}
|
||||
|
||||
def i18nDICT():
|
||||
c = getConfig()
|
||||
g = ServerEnv()
|
||||
if not g.get('myi18n',False):
|
||||
g.myi18n = getI18N()
|
||||
lang = getHeaderLang(request)
|
||||
l = c.langMapping.get(lang,lang)
|
||||
return json.dumps(g.myi18n.getLangDict(l))
|
||||
|
||||
def getClientType(request):
|
||||
agent = request.headers.get('user-agent')
|
||||
if type(agent)!=type('') and type(agent)!=type(b''):
|
||||
return 'pc'
|
||||
for k in clientkeys.keys():
|
||||
m = re.findall(k,agent)
|
||||
if len(m)>0:
|
||||
return clientkeys[k]
|
||||
return 'pc'
|
||||
|
||||
def serveri18n(s):
|
||||
lang = getHeaderLang(request)
|
||||
c = getConfig()
|
||||
g = ServerEnv()
|
||||
if not g.get('myi18n',False):
|
||||
g.myi18n = getI18N()
|
||||
l = c.langMapping.get(lang,lang)
|
||||
return g.myi18n(s,l)
|
||||
|
||||
async def getArgs():
|
||||
ns = DictObject()
|
||||
if request.method == 'POST':
|
||||
return await self.getPostData(request)
|
||||
ns = multiDict2Dict(request.query)
|
||||
return ns
|
||||
print('** ret=',ret,request.query)
|
||||
return ns
|
||||
|
||||
self.y_env.i18n = serveri18n
|
||||
self.y_env.i18nDict = i18nDICT
|
||||
self.y_env.terminalType = getClientType(request)
|
||||
self.y_env.absurl = self.absUrl
|
||||
self.y_env.abspath = self.abspath
|
||||
self.y_env.request2ns = getArgs
|
||||
self.y_env.resource = self
|
||||
path = request.path
|
||||
for word, handlername in self.y_processors:
|
||||
if path.endswith(word):
|
||||
Klass = getProcessor(handlername)
|
||||
processor = Klass(self.abspath(path),self)
|
||||
return await processor.handle(request)
|
||||
print(f'path={path} handler by StaticResource..')
|
||||
return await super()._handle(request)
|
||||
|
||||
def absUrl(self,request,url):
|
||||
http='http://'
|
||||
https='https://'
|
||||
if url[:7] == http:
|
||||
return url
|
||||
if url[:8] == https:
|
||||
return url
|
||||
|
||||
path = request.path
|
||||
return self.relatedurl(path,url)
|
||||
|
25
ahserver/serverenv.py
Normal file
25
ahserver/serverenv.py
Normal file
@ -0,0 +1,25 @@
|
||||
|
||||
from appPublic.Singleton import SingletonDecorator
|
||||
from appPublic.dictObject import DictObject
|
||||
|
||||
@SingletonDecorator
|
||||
class ServerEnv(DictObject):
|
||||
pass
|
||||
|
||||
|
||||
clientkeys = {
|
||||
"iPhone":"iphone",
|
||||
"iPad":"ipad",
|
||||
"Android":"androidpad",
|
||||
"Windows Phone":"winphone",
|
||||
"Windows NT[.]*Win64; x64":"pc",
|
||||
}
|
||||
|
||||
def getClientType(request):
|
||||
agent = request.headers.get('user-agent')
|
||||
for k in clientkeys.keys():
|
||||
m = re.findall(k,agent)
|
||||
if len(m)>0:
|
||||
return clientkeys[k]
|
||||
return 'pc'
|
||||
|
75
ahserver/sqldsProcessor.py
Executable file
75
ahserver/sqldsProcessor.py
Executable file
@ -0,0 +1,75 @@
|
||||
import codecs
|
||||
from .dsProcessor import DataSourceProcessor
|
||||
from appPublic.jsonConfig import getConfig
|
||||
from sqlor.dbpools import DBPools
|
||||
import json
|
||||
"""
|
||||
sqlds file format:
|
||||
{
|
||||
"sqldesc":{
|
||||
"sql_string":"select * from dbo.stock_daily_hist where stock_num=${stock_num}$ order by trade_date desc",
|
||||
"db":"mydb",
|
||||
"sortfield":"stock_date"
|
||||
}
|
||||
"arguments":[
|
||||
{
|
||||
"name":"stock_num",
|
||||
"type":"str",
|
||||
"iotype":"text",
|
||||
"default":"600804"
|
||||
}
|
||||
],
|
||||
"datadesc":[
|
||||
{
|
||||
}
|
||||
]
|
||||
}
|
||||
"""
|
||||
|
||||
class SQLDataSourceProcessor(DataSourceProcessor):
|
||||
@classmethod
|
||||
def isMe(self,name):
|
||||
return name=='sqlds'
|
||||
|
||||
def getArgumentsDesc(self,dict_data,ns,request):
|
||||
desc = dict_data.get('arguments',None)
|
||||
return desc
|
||||
|
||||
def getDataDesc(self,dict_data,ns,request):
|
||||
pool = DBPools()
|
||||
@pool.runSQLResultFields
|
||||
def sql(dbname,NS):
|
||||
sqldesc = dict_data.get('sqldesc')
|
||||
# print('sql(),sqldesc=',sqldesc)
|
||||
return sqldesc
|
||||
rec = dict_data.get('datadesc',None)
|
||||
if rec is None:
|
||||
sqldesc = dict_data.get('sqldesc')
|
||||
ns = dict_data.get('arguments',{})
|
||||
rec = [ r for r in sql(sqldesc['db'],ns) if r['name']!='_row_id' ]
|
||||
dict_data['datadesc'] = rec
|
||||
f = codecs.open(self.src_file,'w',self.config.website.coding)
|
||||
b = json.dumps(dict_data,indent=4)
|
||||
f.write(b)
|
||||
f.close()
|
||||
return rec
|
||||
|
||||
def getData(self,dict_data,ns,request):
|
||||
pool = DBPools()
|
||||
@pool.runSQL
|
||||
def sql(dbname,NS):
|
||||
sqldesc = dict_data.get('sqldesc')
|
||||
return sqldesc
|
||||
db = dict_data['sqldesc']['db']
|
||||
ret = [ i for i in sql(db,ns) ]
|
||||
return ret
|
||||
|
||||
def getPagingData(self,dict_data,ns,request):
|
||||
pool = DBPools()
|
||||
@pool.runSQLPaging
|
||||
def sql(dbname,NS):
|
||||
sqldesc = dict_data.get('sqldesc')
|
||||
return sqldesc
|
||||
db = dict_data['sqldesc']['db']
|
||||
ret = sql(db,ns)
|
||||
return ret
|
83
ahserver/uriop.py
Executable file
83
ahserver/uriop.py
Executable file
@ -0,0 +1,83 @@
|
||||
#
|
||||
import os
|
||||
import codecs
|
||||
from appPublic.jsonConfig import getConfig
|
||||
from appPublic.folderUtils import folderInfo
|
||||
|
||||
class URIopException(Exception):
|
||||
def __init__(self,errtype,errmsg):
|
||||
self.errtype = errtype
|
||||
self.errmsg = errmsg
|
||||
super(URIopException,self).init('errtype=%s,errmsg=%s' % (errtype,errmsg))
|
||||
|
||||
def __str__(self):
|
||||
return 'errtype=%s,errmsg=%s' % (self.errtype,self.errmsg)
|
||||
|
||||
class URIOp(object):
|
||||
def __init__(self):
|
||||
self.conf = getConfig()
|
||||
self.realPath = os.path.abspath(self.conf.website.root)
|
||||
|
||||
def abspath(self,uri=None):
|
||||
p = self.conf.website.root
|
||||
if uri is not None and len(uri)>0:
|
||||
x = uri
|
||||
if x[0] == '/':
|
||||
x = x[1:]
|
||||
p = os.path.join(p,*x.split('/'))
|
||||
d = os.path.abspath(p)
|
||||
if len(d) < len(self.realPath):
|
||||
raise URIopException('url scope error',uri);
|
||||
if d[:len(self.realPath)] != self.realPath:
|
||||
raise URIopException('url scope error',uri);
|
||||
return d
|
||||
|
||||
def fileList(self,uri=''):
|
||||
r = [ i for i in folderInfo(self.realPath,uri) ]
|
||||
for i in r:
|
||||
if i['type']=='dir':
|
||||
i['state'] = 'closed'
|
||||
i['id'] = '_#_'.join(i['id'].split('/'))
|
||||
|
||||
ret={
|
||||
'total':len(r),
|
||||
'rows':r
|
||||
}
|
||||
return ret
|
||||
|
||||
def mkdir(self,at_uri,name):
|
||||
p = self.abspath(at_uri)
|
||||
p = os.path.join(p,name)
|
||||
os.mkdir(p)
|
||||
|
||||
def rename(self,uri,newname):
|
||||
p = self.abspath(uri)
|
||||
dir = os.path.dirname(p)
|
||||
np = os.path.join(p,newname)
|
||||
os.rename(p,np)
|
||||
|
||||
def delete(self,uri):
|
||||
p = self.abspath(uri)
|
||||
os.remove(p)
|
||||
|
||||
def save(self,uri,data):
|
||||
p = self.abspath(uri)
|
||||
f = codecs.open(p,"w",self.conf.website.coding)
|
||||
f.write(data)
|
||||
f.close()
|
||||
|
||||
|
||||
def read(self,uri):
|
||||
p = self.abspath(uri)
|
||||
f = codecs.open(p,"r",self.conf.website.coding)
|
||||
b = f.read()
|
||||
f.close()
|
||||
return b
|
||||
|
||||
def write(self,uri,data):
|
||||
p = self.abspath(uri)
|
||||
f = codecs.open(p,"w",self.conf.website.coding)
|
||||
f.write(data)
|
||||
f.close()
|
||||
|
||||
|
85
ahserver/url2file.py
Normal file
85
ahserver/url2file.py
Normal file
@ -0,0 +1,85 @@
|
||||
|
||||
|
||||
import os
|
||||
|
||||
class Url2File:
|
||||
def __init__(self,paths: list,indexes: list, inherit: bool=False):
|
||||
self.paths = paths
|
||||
self.indexes = indexes
|
||||
self.inherit = inherit
|
||||
|
||||
def realurl(self,url):
|
||||
items = url.split('/')
|
||||
items = [ i for i in items if i != '.' ]
|
||||
while '..' in items:
|
||||
for i,v in enumerate(items):
|
||||
if v=='..' and i > 0:
|
||||
del items[i]
|
||||
del items[i-1]
|
||||
break
|
||||
return '/'.join(items)
|
||||
|
||||
def isFolder(self,url: str):
|
||||
for r in self.paths:
|
||||
rp = r + url
|
||||
real_path = os.path.abspath(rp)
|
||||
if os.path.isdir(real_path):
|
||||
return True
|
||||
return False
|
||||
|
||||
def defaultIndex(self,url: str):
|
||||
for p in self.indexes:
|
||||
rp = url + '/' + p
|
||||
r = self.url2file(rp)
|
||||
if r is not None:
|
||||
return r
|
||||
return None
|
||||
|
||||
def url2file(self,url: str):
|
||||
if url[-1] == '/':
|
||||
url = url[:-1]
|
||||
|
||||
if self.isFolder(url):
|
||||
return self.defaultIndex(url)
|
||||
|
||||
for r in self.paths:
|
||||
f = r + url
|
||||
real_path = os.path.abspath(f)
|
||||
if os.path.isfile(real_path):
|
||||
return f
|
||||
if not self.inherit:
|
||||
return None
|
||||
items = url.split('/')
|
||||
if len(items) > 2:
|
||||
del items[-2]
|
||||
url = '/'.join(items)
|
||||
return self.url2file(url)
|
||||
return None
|
||||
|
||||
def relatedurl(self,url: str, name: str):
|
||||
if url[-1] == '/':
|
||||
url = url[:-1]
|
||||
|
||||
if not self.isFolder(url):
|
||||
items = url.split('/')
|
||||
del items[-1]
|
||||
url = '/'.join(items)
|
||||
url = url + '/' + name
|
||||
return self.realurl(url)
|
||||
|
||||
def relatedurl2file(self,url: str, name: str):
|
||||
url = self.relatedurl(url,name)
|
||||
return self.url2file(url)
|
||||
|
||||
class TmplUrl2File(Url2File):
|
||||
def __init__(self,paths,indexes, subffixes=['.tmpl'],inherit=False):
|
||||
Url2File.__init__(self,paths,indexes=indexes,inherit=inherit)
|
||||
self.subffixes = subffixes
|
||||
|
||||
def list_tmpl(self):
|
||||
ret = []
|
||||
for rp in self.paths:
|
||||
p = os.path.abspath(rp)
|
||||
[ ret.append(i) for i in listFile(p,suffixs=self.subffixes,rescursive=True) ]
|
||||
return sorted(ret)
|
||||
|
11
ahserver/wwProcessor.py
Executable file
11
ahserver/wwProcessor.py
Executable file
@ -0,0 +1,11 @@
|
||||
from appPublic.jsonConfig import getConfig
|
||||
from WebServer.configuredResource import BaseProcessor
|
||||
from WebServer.globalEnv import request2ns,absUrl
|
||||
|
||||
class WebWidgetProcessor(BaseProcessor):
|
||||
@classmethod
|
||||
def isMe(self,name):
|
||||
return name=='ww'
|
||||
|
||||
|
||||
|
128
ahserver/xlsxData.py
Executable file
128
ahserver/xlsxData.py
Executable file
@ -0,0 +1,128 @@
|
||||
from openpyxl import load_workbook
|
||||
import json
|
||||
|
||||
"""
|
||||
xlsxds file format:
|
||||
{
|
||||
"xlsxfile":"./data.xlsx",
|
||||
"data_from":7,
|
||||
"data_sheet":"Sheet1",
|
||||
"label_at",1,
|
||||
"name_at":null,
|
||||
"datatype_at":2,
|
||||
"ioattrs_at":3,
|
||||
"listhide_at":4,
|
||||
"inputhide_at":5,
|
||||
"frozen_at":6
|
||||
}
|
||||
"""
|
||||
|
||||
class XLSXData:
|
||||
def __init__(self,path,desc):
|
||||
self.desc = desc
|
||||
self.xlsxfile = path
|
||||
self.workbook = load_workbook(self.xlsxfile)
|
||||
self.ws = self.workbook[self.desc['data_sheet']]
|
||||
|
||||
def getBaseFieldsInfo(self):
|
||||
ws = self.workbook[self.desc['data_sheet']]
|
||||
ret = []
|
||||
for y in range(1,ws.max_column+1):
|
||||
r = {
|
||||
'name':self._fieldName(ws,y),
|
||||
'label':self._fieldLabel(ws,y),
|
||||
'type':self._fieldType(ws,y),
|
||||
'listhide':self._isListHide(ws,y),
|
||||
'inputhide':self._isInputHide(ws,y),
|
||||
'frozen':self._isFrozen(ws,y)
|
||||
}
|
||||
r.update(self._fieldIOattrs(ws,y))
|
||||
ret.append(r)
|
||||
return ret
|
||||
|
||||
def _fieldName(self,ws,i):
|
||||
x = self.desc.get('name_at')
|
||||
if x is not None:
|
||||
return ws.cell(x,i).value
|
||||
return 'f' + str(i)
|
||||
def _fieldLabel(self,ws,i):
|
||||
x = self.desc.get('label_at',1)
|
||||
if x is not None:
|
||||
return ws.cell(x,i).value
|
||||
return 'f' + str(i)
|
||||
def _fieldType(self,ws,i):
|
||||
x = self.desc.get('datatype_at')
|
||||
if x is not None:
|
||||
return ws.cell(x,i).value
|
||||
return 'str'
|
||||
def _fieldIOattrs(self,ws,i):
|
||||
x = self.desc.get('ioattrs_at')
|
||||
if x is not None:
|
||||
t = ws.cell(x,i).value
|
||||
if t is not None:
|
||||
try:
|
||||
return json.loads(t,'utf-8')
|
||||
except Exception as e:
|
||||
print('xlsxData.py:field=',i,'t=',t,'error')
|
||||
return {}
|
||||
def _isFrozen(self,ws,i):
|
||||
x = self.desc.get('frozen_at')
|
||||
if x is not None:
|
||||
t = ws.cell(x,y).value
|
||||
if t == 'Y' or t == 'y':
|
||||
return True
|
||||
return False
|
||||
def _isListHide(self,ws,i):
|
||||
x = self.desc.get('listhide_at')
|
||||
if x is not None:
|
||||
t = ws.cell(x,i).value
|
||||
if t == 'Y' or t == 'y':
|
||||
return True
|
||||
return False
|
||||
def _isInputHide(self,ws,i):
|
||||
x = self.desc.get('inputhide_at')
|
||||
if x is not None:
|
||||
t = ws.cell(x,i).value
|
||||
if t == 'Y' or t == 'y':
|
||||
return True
|
||||
return False
|
||||
|
||||
def getPeriodData(self,min_r,max_r):
|
||||
ws = self.ws
|
||||
rows = []
|
||||
assert(min_r >= self.desc.get('data_from',2))
|
||||
if max_r > ws.max_row:
|
||||
max_r = ws.max_row + 1;
|
||||
|
||||
if min_r <= max_r:
|
||||
x = min_r;
|
||||
while x < max_r:
|
||||
d = {}
|
||||
for y in range(1,ws.max_column+1):
|
||||
name = self._fieldName(ws,y)
|
||||
d.update({name:ws.cell(column=y,row=x).value})
|
||||
rows.append(d)
|
||||
x = x + 1
|
||||
return rows
|
||||
|
||||
def getArgumentsDesc(self,ns,request):
|
||||
return None
|
||||
|
||||
def getData(self,ns):
|
||||
ws = self.ws
|
||||
min_r = self.desc.get('data_from',2)
|
||||
return self.getPeriodData(min_r,ws.max_row + 1)
|
||||
|
||||
def getPagingData(self,ns):
|
||||
rows = int(ns.get('rows',50))
|
||||
page = int(ns.get('page',1))
|
||||
d1 = self.desc.get('data_from',2)
|
||||
min_r = (page - 1) * rows + d1
|
||||
max_r = page * rows + d1 + 1
|
||||
rows = self.getPeriodData(min_r,max_r)
|
||||
ret = {
|
||||
'total':self.ws.max_row - d1,
|
||||
'rows':rows
|
||||
}
|
||||
return ret
|
||||
|
51
ahserver/xlsxdsProcessor.py
Executable file
51
ahserver/xlsxdsProcessor.py
Executable file
@ -0,0 +1,51 @@
|
||||
import codecs
|
||||
|
||||
from openpyxl import load_workbook
|
||||
|
||||
from appPublic.jsonConfig import getConfig
|
||||
|
||||
from .dsProcessor import DataSourceProcessor
|
||||
from .xlsxData import XLSXData
|
||||
|
||||
"""
|
||||
xlsxds file format:
|
||||
{
|
||||
"xlsxfile":"./data.xlsx",
|
||||
"data_from":7,
|
||||
"data_sheet":"Sheet1",
|
||||
"label_at",1,
|
||||
"name_at":null,
|
||||
"datatype_at":2,
|
||||
"ioattrs":3,
|
||||
"listhide_at":4,
|
||||
"inputhide_at":5,
|
||||
"frozen_at":6
|
||||
}
|
||||
"""
|
||||
|
||||
class XLSXDataSourceProcessor(DataSourceProcessor):
|
||||
@classmethod
|
||||
def isMe(self,name):
|
||||
return name=='xlsxds'
|
||||
|
||||
def getArgumentsDesc(self,dict_data,ns,request):
|
||||
return None
|
||||
|
||||
def getDataDesc(self,dict_data,ns,request):
|
||||
path = dict_data.get('xlsxfile',None)
|
||||
self.xlsxdata = XLSXData(self.g.abspath(self.g.absurl(request,path)),dict_data)
|
||||
ret = self.xlsxdata.getBaseFieldsInfo(ns)
|
||||
return ret
|
||||
|
||||
def getData(self,dict_data,ns,request):
|
||||
path = dict_data.get('xlsxfile',None)
|
||||
self.xlsxdata = XLSXData(self.g.abspath(self.g.absurl(request,path)),dict_data)
|
||||
ret = self.xlsxdata.getData(ns)
|
||||
return ret
|
||||
|
||||
def getPagingData(self,dict_data,ns,request):
|
||||
path = dict_data.get('xlsxfile',None)
|
||||
self.xlsxdata = XLSXData(self.g.abspath(ns.absurl(request,path)),dict_data)
|
||||
ret = self.xlsxdata.getPagingData(ns)
|
||||
return ret
|
||||
|
71
conf/config.json
Normal file
71
conf/config.json
Normal file
@ -0,0 +1,71 @@
|
||||
{
|
||||
"debug":true,
|
||||
"databases":{
|
||||
"aiocfae":{
|
||||
"driver":"aiomysql",
|
||||
"async_mode":true,
|
||||
"coding":"utf8",
|
||||
"dbname":"cfae",
|
||||
"kwargs":{
|
||||
"user":"test",
|
||||
"db":"cfae",
|
||||
"password":"test123",
|
||||
"host":"localhost"
|
||||
}
|
||||
},
|
||||
"cfae":{
|
||||
"driver":"mysql.connector",
|
||||
"coding":"utf8",
|
||||
"dbname":"cfae",
|
||||
"kwargs":{
|
||||
"user":"test",
|
||||
"db":"cfae",
|
||||
"password":"test123",
|
||||
"host":"localhost"
|
||||
}
|
||||
}
|
||||
},
|
||||
"website":{
|
||||
"paths":[
|
||||
["$[workdir]$/../usedpkgs/antd","/antd"],
|
||||
["$[workdir]$/../wolon",""]
|
||||
],
|
||||
"host":"0.0.0.0",
|
||||
"port":8080,
|
||||
"coding":"utf-8",
|
||||
"ssl_gg":{
|
||||
"crtfile":"$[workdir]$/conf/www.bsppo.com.pem",
|
||||
"keyfile":"$[workdir]$/conf/www.bsppo.com.key"
|
||||
},
|
||||
"indexes":[
|
||||
"index.html",
|
||||
"index.tmpl",
|
||||
"index.dspy",
|
||||
"index.md"
|
||||
],
|
||||
"visualcoding":{
|
||||
"default_root":"/samples/vc/test",
|
||||
"userroot":{
|
||||
"ymq":"/samples/vc/ymq",
|
||||
"root":"/samples/vc/root"
|
||||
},
|
||||
"jrjpath":"/samples/vc/default"
|
||||
},
|
||||
"processors":[
|
||||
[".xlsxds","xlsxds"],
|
||||
[".sqlds","sqlds"],
|
||||
[".tmpl.js","tmpl"],
|
||||
[".tmpl.css","tmpl"],
|
||||
[".html.tmpl","tmpl"],
|
||||
[".tmpl","tmpl"],
|
||||
[".dspy","dspy"],
|
||||
[".md","md"]
|
||||
]
|
||||
},
|
||||
"langMapping":{
|
||||
"zh-Hans-CN":"zh-cn",
|
||||
"zh-CN":"zh-cn",
|
||||
"en-us":"en",
|
||||
"en-US":"en"
|
||||
}
|
||||
}
|
0
i18n/en/msg.txt
Normal file
0
i18n/en/msg.txt
Normal file
0
i18n/zh-cn/msg.txt
Normal file
0
i18n/zh-cn/msg.txt
Normal file
Loading…
Reference in New Issue
Block a user