first commit

This commit is contained in:
yumoqing 2025-06-24 11:50:40 +08:00
commit f1f480d429
19 changed files with 1572 additions and 0 deletions

59
README.md Executable file
View File

@ -0,0 +1,59 @@
# xls2ddl
a tool to create related database schema from xlsx file
there is a xlsx template file called "model.xlst" in this repo
## How to use it
1 double click "model.xlst"
2 in "sammary" datasheet, wirte the table name, table label, primary key
3 in "fields" datasheet, create all the field in the table
4 in "validation" datashhet identifies the index
first column is index name
second column choose the "idx"
thrid column for duplicatable index use "index:f1,f2 ..." or "unique:f1, f2, ..." for unique index
5 save it with the tablenme as xlsx file's name
6 repeat 1 to 5 for all the table.
7 translates all xlsx file to ddl sql using
in the folder hold all the xlsx file
for mysql
```
python path/to/xls2ddl.py mysql .
```
for sqlite3
```
python path/to/xls2ddl.py sqlite3 .
```
for oracle
```
python path/to/xls2ddl.py oracle .
```
for postgresql
```
python path/to/xls2ddl.py postgresql .
```
for sqlserver
```
python path/to/xls2ddl.py sqlserver .
```
## examples xlsx file
open the [examples.xlsx](./examples.xlsx) to find out how to write a table schema infomation
write your table schema info file:
1 copy examples.xlsx file to 'your table name'.xlsx
2 open 'your table name'.xlsx and change information in summary, fields, validation codes
3 save it
4 use xls2ddl translates xlsx file to ddl file
5 use the ddl file to create you database tables
# xls2crud
xls2crud is a tool to create table data administration website user interface.
it is only support [ahserver](https://git.kaiyuancloud.cn/yumoqing/ahserver) backend server and [bricks](https://git.kaiyuancloud.cn/yumoqing/bricks) front end

14
examples/audit_log.json Normal file
View File

@ -0,0 +1,14 @@
{
"tblname": "audit_log",
"title":"审计日志",
"params": {
"sortby":"userid",
"browserfields": {
"exclouded": ["id"],
"cwidth": {}
},
"editexclouded": [
"id"
]
}
}

View File

@ -0,0 +1,32 @@
{
"models_dir": "${HOME}$/py/rbac/models",
"output_dir": "${HOME}$/py/sage/wwwroot/_a/organization",
"dbname": "sage",
"tblname": "organization",
"title":"Organization",
"params": {
"sortby":"orgname",
"browserfields": {
"exclouded": ["id"],
"cwidth": {}
},
"editexclouded": [
"id"
],
"subtables":[
{
"field":"orgid",
"title":"Org. type",
"url":"../orgtypes",
"subtable":"orgtypes"
},
{
"field":"orgid",
"title":"Users",
"url":"../users",
"subtable":"users"
}
],
"record_toolbar": null
}
}

18
examples/orgtypes.json Normal file
View File

@ -0,0 +1,18 @@
{
"models_dir": "${HOME}$/py/rbac/models",
"output_dir": "${HOME}$/py/sage/wwwroot/_a/orgtypes",
"dbname": "sage",
"tblname": "orgtypes",
"title":"Org. type",
"params": {
"browserfields": {
"exclouded": ["id", "orgid"],
"cwidth": {}
},
"editexclouded": [
"id",
"orgid"
],
"record_toolbar": null
}
}

21
examples/permission.json Normal file
View File

@ -0,0 +1,21 @@
{
"tblname": "permission",
"uitype":"tree",
"title":"权限",
"params":{
"idField":"id",
"textField":"path",
"sortby":"path",
"editable":true,
"browserfields":{
"alters":{}
},
"edit_exclouded_fields":[],
"parentField":"parentid",
"toolbar":{
},
"binds":[
]
}
}

31
examples/role.json Normal file
View File

@ -0,0 +1,31 @@
{
"models_dir": "${HOME}$/py/rbac/models",
"output_dir": "${HOME}$/py/sage/wwwroot/_a/role",
"dbname": "sage",
"tblname": "role",
"title":"角色",
"params": {
"sortby":"name",
"browserfields": {
"exclouded": ["id"],
"cwidth": {}
},
"editexclouded": [
"id"
],
"subtables":[
{
"field":"roleid",
"title":"角色权限",
"url":"../rolepermission",
"subtable":"rolepermission"
},
{
"field":"roleid",
"title":"用户",
"url":"../users",
"subtable":"users"
}
]
}
}

View File

@ -0,0 +1,26 @@
{
"models_dir": "${HOME}$/py/rbac/models",
"output_dir": "${HOME}$/py/sage/wwwroot/_a/rolepermission",
"dbname": "sage",
"tblname": "rolepermission",
"title":"用户",
"params": {
"relation":{
"outter_field":"permid",
"param_field":"roleid"
},
"noedit":true,
"browserfields": {
"exclouded": ["id", "roleid"],
"alters":{
"permid":{
"cwidth":60
}
}
},
"editexclouded": [
"id", "roleid"
],
"record_toolbar": null
}
}

17
examples/userapp.json Normal file
View File

@ -0,0 +1,17 @@
{
"models_dir": "${HOME}$/py/rbac/models",
"output_dir": "${HOME}$/py/sage/wwwroot/_a/userapp",
"dbname": "sage",
"tblname": "userapp",
"title":"用户",
"params": {
"browserfields": {
"exclouded": ["id", "userid"],
"cwidth": {}
},
"editexclouded": [
"id", "userid"
],
"record_toolbar": null
}
}

View File

@ -0,0 +1,17 @@
{
"models_dir": "${HOME}$/py/rbac/models",
"output_dir": "${HOME}$/py/sage/wwwroot/_a/userdepartment",
"dbname": "sage",
"tblname": "userdepartment",
"title":"用户",
"params": {
"browserfields": {
"exclouded": ["id", "userid"],
"cwidth": {}
},
"editexclouded": [
"id", "userid"
],
"record_toolbar": null
}
}

19
examples/userrole.json Normal file
View File

@ -0,0 +1,19 @@
{
"tblname": "userrole",
"title":"用户角色",
"params": {
"relation":{
"param_field":"userid",
"outter_field":"roleid"
},
"browserfields": {
"exclouded": ["id", "userid"],
"alters": {}
},
"editexclouded": [
"id",
"userid"
],
"record_toolbar": null
}
}

23
examples/users.json Normal file
View File

@ -0,0 +1,23 @@
{
"tblname": "users",
"title":"用户",
"params": {
"sortby":"username",
"confidential_fields":["password"],
"logined_userorgid":"orgid",
"browserfields": {
"exclouded": ["id", "password", "orgid", "nick_name" ],
"cwidth": {}
},
"editexclouded": [
"id", "nick_name", "orgid"
],
"subtables": [
{
"field":"userid",
"title":"用户角色",
"subtable":"userrole"
}
]
}
}

BIN
model.xltx Normal file

Binary file not shown.

4
requirements.txt Executable file
View File

@ -0,0 +1,4 @@
xlrd
openpyxl
git+https://github.com/yumoqing/appPublic
git+https://github.com/yumoqing/sqlor

136
singletree.py Normal file
View File

@ -0,0 +1,136 @@
import os
import sys
import codecs
import json
from appPublic.dictObject import DictObject
from xlsxData import xlsxFactory
from appPublic.folderUtils import listFile, _mkdir
from appPublic.myTE import MyTemplateEngine
from tmpls import data_browser_tmpl, get_data_tmpl, data_new_tmpl, data_update_tmpl, data_delete_tmpl
from xls2crud import build_dbdesc, field_list
from tmpls import data_new_tmpl, data_update_tmpl, data_delete_tmpl
ui_tmpl = """
{
"widgettype":"Tree",
"options":{
{% if not notitle %}
{% if title %}
"title":"{{title}}",
{% else %}
"title":"{{summary[0].title}}",
{% endif %}
{% endif %}
{% if description %}
"description":"{{description}}",
{% endif %}
{% if toolbar %}
"toolbar":{{json.dumps(toolbar, ensure_ascii=False)}},
{% endif %}
{% if editable %}
"editable":{
"fields":{{edit_fields_str}},
"add_url":{%- raw -%}"{{entire_url('./new_{%- endraw -%}{{tblname}}{%- raw -%}.dspy')}}",{%- endraw %}
"update_url":{%- raw -%}"{{entire_url('./update_{%- endraw -%}{{tblname}}{%- raw -%}.dspy')}}",{%- endraw %}
"delete_url":{%- raw -%}"{{entire_url('./delete_{%- endraw -%}{{tblname}}{%- raw -%}.dspy')}}"{%- endraw %}
},
{% endif %}
{% if checkField %}
"checkField":"{{checkField}}",
{% endif %}
"parentField":"{{parentField}}",
"idField":"{{idField}}",
"textField":"{{textField}}",
"dataurl":{%- raw -%}"{{entire_url('./get_{%- endraw -%}{{tblname}}{%- raw -%}.dspy')}}"{%- endraw %}
}
{% if binds %}
,"binds":{{json.dumps(binds, indent=4, ensure_ascii=False)}}
{% endif %}
}
"""
get_nodes_tmpl = """
ns = params_kw.copy()
sql = '''select * from {{tblname}} where 1 = 1'''
id = ns.get('{{idField}}')
if id:
sql += " and {{parentField}} = ${id}$"
else:
sql += " and {{parentField}} is null"
sql += " order by {{textField}} "
db = DBPools()
dbname = get_module_dbname('{{modulename or ''}}')
async with db.sqlorContext(dbname) as sor:
r = await sor.sqlExe(sql, ns)
return r
return []
"""
def gen_tree_ui(d, pat):
e = MyTemplateEngine([])
s = e.renders(ui_tmpl, d)
with open(os.path.join(pat, f'index.ui'), 'w') as f:
f.write(s)
def gen_delete_nodedata(d, pat):
e = MyTemplateEngine([])
s = e.renders(data_delete_tmpl, d)
with open(os.path.join(pat, f'delete_{d.tblname}.dspy'), 'w') as f:
f.write(s)
def gen_update_nodedata(d, pat):
e = MyTemplateEngine([])
s = e.renders(data_update_tmpl, d)
with open(os.path.join(pat, f'update_{d.tblname}.dspy'), 'w') as f:
f.write(s)
def gen_new_nodedata(d, pat):
e = MyTemplateEngine([])
s = e.renders(data_new_tmpl, d)
with open(os.path.join(pat, f'new_{d.tblname}.dspy'), 'w') as f:
f.write(s)
def gen_get_nodedata(d, pat):
e = MyTemplateEngine([])
s = e.renders(get_nodes_tmpl, d)
with open(os.path.join(pat, f'get_{d.tblname}.dspy'), 'w') as f:
f.write(s)
def build_tree_ui(tree_data, dbdesc):
outdir = tree_data.output_dir
_mkdir(outdir)
tbldesc = dbdesc[tree_data.tblname].copy()
tbldesc = DictObject(**tbldesc)
tbldesc.tblname = tree_data.tblname
tbldesc.update(tree_data.params)
exclouds = tbldesc.edit_exclouded_fields or []
if tbldesc.idField not in exclouds:
exclouds.append(tbldesc.idField)
if tbldesc.parentField not in exclouds:
exclouds.append(tbldesc.parentField)
tbldesc.edit_fields_str = json.dumps([ f for f in field_list(tbldesc) if f.name not in exclouds ],
indent=4, ensure_ascii=False)
gen_tree_ui(tbldesc, outdir)
gen_get_nodedata(tbldesc, outdir)
gen_new_nodedata(tbldesc, outdir)
gen_update_nodedata(tbldesc, outdir)
gen_delete_nodedata(tbldesc, outdir)
def main(dbdesc, outdir, modulename, fn):
with codecs.open(fn, 'r', 'utf-8') as f:
gen(dbdesc, outdir, modulename, f.read())
def main(dbdesc, outdir, modulename, fn):
with codecs.open(fn, 'r', 'utf-8') as f:
gen(dbdesc, outdir, modulename, f.read())
if __name__ == '__main__':
if len(sys.argv) < 4:
print(f'{sys.argv[0]} model_path outpath modelname tree_desc_file ...')
sys.exit(1)
dbdesc = build_dbdesc(sys.argv[1])
outdir = sys.argv[2]
modulename = sys.argv[3]
for f in sys.argv[4:]:
main(dbdesc, outdir, modulename, f)

381
tmpls.py Normal file
View File

@ -0,0 +1,381 @@
data_browser_tmpl = """
{
"id":"{{tblname}}_tbl",
"widgettype":"Tabular",
"options":{
{% if not notitle %}
{% if title %}
"title":"{{title}}",
{% else %}
"title":"{{summary[0].title}}",
{% endif %}
{% endif %}
{% if description %}
"description":"{{description}}",
{% endif %}
{% if toolbar %}
"toolbar":{{json.dumps(toolbar, indent=4, ensure_ascii=False)}},
{% endif %}
"css":"card",
{% if not noedit %}
"editable":{
"new_data_url":{%- raw -%}"{{entire_url('add_{%- endraw -%}{{summary[0].name}}{%- raw -%}.dspy')}}",{%- endraw %}
"delete_data_url":{%- raw -%}"{{entire_url('delete_{%- endraw -%}{{summary[0].name}}{%- raw -%}.dspy')}}",{%- endraw %}
"update_data_url":{%- raw -%}"{{entire_url('update_{%- endraw -%}{{summary[0].name}}{%- raw -%}.dspy')}}"{%- endraw %}
},
{% endif %}
"data_url":"{%- raw -%}{{entire_url('./get_{%- endraw -%}{{summary[0].name}}{%- raw -%}.dspy')}}",{%- endraw %}
"data_method":"{{data_method or 'GET'}}",
"data_params":{%- raw -%}{{json.dumps(params_kw, indent=4, ensure_ascii=False)}},{%- endraw %}
"row_options":{
{% if idField %}
"idField":"{{idField}}",
{% endif %}
{% if checkField %}
"checkField":"{{checkField}}",
{% endif %}
{% if browserfields %}
"browserfields": {{json.dumps(browserfields, indent=4, ensure_ascii=Fasle)}},
{% endif %}
{% if editexclouded %}
"editexclouded":{{json.dumps(editexclouded, indent=4, ensure_ascii=False)}},
{% endif %}
"fields":{{fieldliststr}}
},
{% if subtables_condition %}
{%- raw -%}{% {%- endraw %}if {{subtables_condition}} {%- raw -%} %}{%- endraw -%}
{% endif %}
{% if content_view %}
"content_view":{{json.dumps(content_view, indent=4, ensure_ascii=False)}},
{% endif %}
{% if subtables_condition %}
{%- raw -%}{% endif %}{%- endraw %}
{% endif %}
"page_rows":160,
"cache_limit":5
}
{% if bindsstr %}
,"binds":{{bindsstr}}
{% endif %}
}
"""
get_data_tmpl = """
ns = params_kw.copy()
{% if logined_userid %}
userid = await get_user()
if not userid:
return {
"widgettype":"Error",
"options":{
"title":"Authorization Error",
"timeout":3,
"cwidth":16,
"cheight":9,
"message":"Please login"
}
}
ns['{{logined_userid}}'] = userid
ns['userid'] = userid
{% endif %}
{% if logined_userorgid %}
userorgid = await get_userorgid()
if not userorgid:
return {
"widgettype":"Error",
"options":{
"title":"Authorization Error",
"timeout":3,
"cwidth":16,
"cheight":9,
"message":"Please login"
}
}
ns['{{logined_userorgid}}'] = userorgid
ns['userorgid'] = userorgid
{% endif %}
debug(f'get_{{tblname}}.dspy:{ns=}')
if not ns.get('page'):
ns['page'] = 1
if not ns.get('sort'):
{% if sortby %}
{% if type(sortby) == type("") %}
ns['sort'] = '{{sortby}}'
{% else %}
ns['sort'] = {{json.dumps(sortby)}}
{% endif %}
{% else %}
ns['sort'] = 'id'
{% endif %}
{% if relation %}
ns['sort'] = '{{relation.outter_field}}_text'
{% endif %}
sql = '''{{sql}}'''
{% if not relation %}
filterjson = params_kw.get('data_filter')
if not filterjson:
fields = [ f['name'] for f in {{json.dumps(fields, indent=4, ensure_ascii=False)}} ]
filterjson = default_filterjson(fields, ns)
filterdic = ns.copy()
filterdic['filterstr'] = ''
filterdic['userorgid'] = '${userorgid}$'
filterdic['userid'] = '${userid}$'
if filterjson:
dbf = DBFilter(filterjson)
conds = dbf.gen(ns)
if conds:
ns.update(dbf.consts)
conds = f' and {conds}'
filterdic['filterstr'] = conds
ac = ArgsConvert('[[', ']]')
vars = ac.findAllVariables(sql)
NameSpace = {v:'${' + v + '}$' for v in vars if v != 'filterstr' }
filterdic.update(NameSpace)
sql = ac.convert(sql, filterdic)
{% endif %}
debug(f'{sql=}')
db = DBPools()
dbname = get_module_dbname('{{modulename}}')
async with db.sqlorContext(dbname) as sor:
r = await sor.sqlPaging(sql, ns)
return r
return {
"total":0,
"rows":[]
}
"""
data_new_tmpl = """
ns = params_kw.copy()
id = params_kw.id
if not id or len(id) > 32:
id = uuid()
ns['id'] = id
{% for f in confidential_fields or [] %}
if params_kw.get('{{f}}'):
ns['{{f}}'] = password_encode(params_kw.get('{{f}}'))
{% endfor %}
{% if logined_userid %}
userid = await get_user()
if not userid:
return {
"widgettype":"Error",
"options":{
"title":"Authorization Error",
"timeout":3,
"cwidth":16,
"cheight":9,
"message":"Please login"
}
}
ns['{{logined_userid}}'] = userid
{% endif %}
{% if logined_userorgid %}
userorgid = await get_userorgid()
if not userorgid:
return {
"widgettype":"Error",
"options":{
"title":"Authorization Error",
"timeout":3,
"cwidth":16,
"cheight":9,
"message":"Please login"
}
}
ns['{{logined_userorgid}}'] = userorgid
{% endif %}
db = DBPools()
dbname = get_module_dbname('{{modulename}}')
async with db.sqlorContext(dbname) as sor:
r = await sor.C('{{summary[0].name}}', ns.copy())
return {
"widgettype":"Message",
"options":{
"user_data":ns,
"cwidth":16,
"cheight":9,
"title":"Add Success",
"timeout":3,
"message":"ok"
}
}
return {
"widgettype":"Error",
"options":{
"title":"Add Error",
"cwidth":16,
"cheight":9,
"timeout":3,
"message":"failed"
}
}
"""
data_update_tmpl = """
ns = params_kw.copy()
{% if logined_userid %}
userid = await get_user()
if not userid:
return {
"widgettype":"Error",
"options":{
"title":"Authorization Error",
"timeout":3,
"cwidth":16,
"cheight":9,
"message":"Please login"
}
}
ns['{{logined_userid}}'] = userid
{% endif %}
{% if logined_userorgid %}
userorgid = await get_userorgid()
if not userorgid:
return {
"widgettype":"Error",
"options":{
"title":"Authorization Error",
"timeout":3,
"cwidth":16,
"cheight":9,
"message":"Please login"
}
}
ns['{{logined_userorgid}}'] = userorgid
{% endif %}
{% for f in confidential_fields or [] %}
if params_kw.get('{{f}}'):
ns['{{f}}'] = password_encode(params_kw.get('{{f}}'))
{% endfor %}
db = DBPools()
dbname = get_module_dbname('{{modulename}}')
async with db.sqlorContext(dbname) as sor:
r = await sor.U('{{summary[0].name}}', ns)
debug('update success');
return {
"widgettype":"Message",
"options":{
"title":"Update Success",
"cwidth":16,
"cheight":9,
"timeout":3,
"message":"ok"
}
}
return {
"widgettype":"Error",
"options":{
"title":"Update Error",
"cwidth":16,
"cheight":9,
"timeout":3,
"message":"failed"
}
}
"""
data_delete_tmpl = """
ns = {
'id':params_kw['id'],
}
{% if logined_userid %}
userid = await get_user()
if not userid:
return {
"widgettype":"Error",
"options":{
"title":"Authorization Error",
"timeout":3,
"cwidth":16,
"cheight":9,
"message":"Please login"
}
}
ns['{{logined_userid}}'] = userid
{% endif %}
{% if logined_userorgid %}
userorgid = await get_userorgid()
if not userorgid:
return {
"widgettype":"Error",
"options":{
"title":"Authorization Error",
"timeout":3,
"cwidth":16,
"cheight":9,
"message":"Please login"
}
}
ns['{{logined_userorgid}}'] = userorgid
{% endif %}
db = DBPools()
dbname = get_module_dbname('{{modulename}}')
async with db.sqlorContext(dbname) as sor:
r = await sor.D('{{summary[0].name}}', ns)
debug('delete success');
return {
"widgettype":"Message",
"options":{
"title":"Delete Success",
"timeout":3,
"cwidth":16,
"cheight":9,
"message":"ok"
}
}
debug('Delete failed');
return {
"widgettype":"Error",
"options":{
"title":"Delete Error",
"timeout":3,
"cwidth":16,
"cheight":9,
"message":"failed"
}
}
"""
check_changed_tmpls = """
is_checked = params_kw.get('has_{{relation.param_field}}')
debug(f'{params_kw=}, {is_checked=}')
dbname = get_module_dbname('{{modulename}}')
if is_checked == 'true':
ns = {
"id":uuid(),
"{{relation.param_field}}":params_kw.{{relation.param_field}},
"{{relation.outter_field}}":params_kw.{{relation.outter_field}}
}
db = DBPools();
async with db.sqlorContext(dbname) as sor:
await sor.C('{{tblname}}', ns)
return {
"widgettype":"Message",
"options":{
"title":"Success",
"message":"record add success",
"timeout":2
}
}
else:
ns = {
"{{relation.param_field}}":params_kw.{{relation.param_field}},
"{{relation.outter_field}}":params_kw.{{relation.outter_field}}
}
sql = "delete from {{tblname}} where {{relation.param_field}}=" + "${" + "{{relation.param_field}}" + "}$" + " and {{relation.outter_field}}=" + "${" + "{{relation.outter_field}}" + "}$"
db = DBPools()
async with db.sqlorContext(dbname) as sor:
await sor.sqlExe(sql, ns)
return {
"widgettype":"Message",
"options":{
"title":"Success",
"message":"delete record success",
"timeout":3
}
}
"""

304
xls2crud.py Normal file
View File

@ -0,0 +1,304 @@
import os
import sys
import codecs
import json
import argparse
from appPublic.dictObject import DictObject
from xlsxData import xlsxFactory
from appPublic.folderUtils import listFile, _mkdir
from appPublic.myTE import MyTemplateEngine
from tmpls import data_browser_tmpl, get_data_tmpl, data_new_tmpl, data_update_tmpl, data_delete_tmpl, check_changed_tmpls
from appPublic.argsConvert import ArgsConvert
"""
usage:
xls2crud.py dbname models_dir uidir
"""
def build_dbdesc(models_dir: str) -> dict:
db_desc = {}
for f in listFile(models_dir, suffixs=['.xlsx']):
print(f'{f} handle ...')
x = xlsxFactory(f)
d = x.get_data()
tbname = d.summary[0].name
db_desc.update({tbname:d})
return db_desc
def build_subtable(subtable):
t = subtable
url = f"../{t.subtable}"
if t.url:
url = t.url
params = t.params or {}
params[t.field] = "${id}"
return {
"widgettype":"urlwidget",
"options":{
"params":params,
"url":"{{entire_url('" + url + "')}}"
}
}
def build_crud_ui(crud_data: dict, dbdesc: dict):
uidir = crud_data.output_dir
tables = [ k for k in dbdesc.keys() ]
desc = dbdesc[crud_data.tblname]
desc.update(crud_data.params)
binds = desc.binds or []
if desc.relation:
desc.checkField = 'has_' + desc.relation.param_field
binds.append({
"wid":"self",
"event":"row_check_changed",
"actiontype":"urlwidget",
"target":"self",
"options":{
"params":{},
"url":"{{entire_url('check_changed.dspy')}}"
}
})
desc.bindsstr = json.dumps(binds, indent=4, ensure_ascii=False)
if desc.subtables:
if len(desc.subtables) == 1:
t = desc.subtables[0]
d = build_subtable(t)
content_view = DictObject(**d)
else:
items = []
for t in desc.subtables:
d = build_subtable(t)
item = {
"name":t.subtable,
"label":t.title or t.subtable,
"content":d
}
items.append(item)
content_view = DictObject(**{
"widgettype":"TabPanel",
"options":{
"tab_wide":"auto",
"height":"100%",
"width":"100%",
"tab_pos":"top",
"items":items
}
})
desc.content_view = content_view
desc.update({
"tblname":crud_data.tblname,
"dbname":crud_data.dbname
})
build_table_crud_ui(uidir, desc)
def build_table_crud_ui(uidir: str, desc: dict) -> None:
_mkdir(uidir)
build_data_browser(uidir, desc)
if desc.relation:
build_check_changed(uidir, desc)
else:
build_data_new(uidir, desc)
build_data_update(uidir, desc)
build_data_delete(uidir, desc)
build_get_data(uidir, desc)
def alter_field(field:dict, desc:DictObject) -> dict:
name = field['name']
ret = field.copy()
alters = desc.browserfields.alters
if alters:
[ ret.update(alters[k]) for k in alters.keys() if k == name ]
return ret
def field_list(desc: dict) -> list:
fs = []
for f in desc.fields:
if desc.codes and f.name in [c.field for c in desc.codes]:
d = get_code_desc(f, desc)
else:
d = setup_ui_info(f, confidential_fields=desc.confidential_fields or [])
"""
use alters to modify fields
"""
d = alter_field(d, desc)
fs.append(d)
return fs
def get_code_desc(field: dict, desc: dict) -> dict:
d = DictObject(**field.copy())
if not desc.codes:
return None
for c in desc.codes:
if d.name == c.field:
d.label = d.title or d.name
d.uitype = 'code'
d.valueField = d.name
d.textField = d.name + '_text'
d.params = {
'dbname':"{{get_module_dbname('" + desc.modulename + "')}}",
'table':c.table,
'tblvalue':c.valuefield,
'tbltext':c.textfield,
'valueField':d.valueField,
'textField':d.textField
}
if c.cond:
d.params['cond'] = c.cond
ac = ArgsConvert('[[', ']]')
vars = ac.findAllVariables(c.cond)
for v in vars:
d.params[v] = '{{params_kw.' + v + '}}'
d.dataurl = "{{entire_url('/appbase/get_code.dspy')}}"
return d
return None
def setup_ui_info(field:dict, confidential_fields=[]) ->dict:
d = DictObject(**field.copy())
if d.length:
d.cwidth = d.length if d.length < 18 else 18
if d.cwidth < 4:
d.cwidth = 4;
else:
d.length = 0
if d.type == 'date':
d.uitype = 'date'
d.length = 0
elif d.type == 'time':
d.uitype = 'time'
d.length = 0
elif d.type in ['int', 'short', 'long', 'longlong']:
d.uitype = 'int'
d.length = 0
elif d.type == 'text':
d.uitype = 'text'
elif d.type in ['float', 'double', 'decimal']:
d.uitype = 'float'
else:
if d.name in confidential_fields:
d.uitype = 'password'
elif d.name.endswith('_date') or d.name.endswith('_dat'):
d.uitype = 'date'
d.length = 0
else:
d.uitype = 'str'
d.datatype = d.type
d.label = d.title or d.name
return d
def construct_get_data_sql(desc: dict) -> str:
shortnames = [c for c in 'bcdefghjklmnopqrstuvwxyz']
infos = []
if desc.relation and desc.codes:
param_field = "${" + desc.relation.param_field + "}$"
for code in desc.codes:
if code.field == desc.relation.outter_field:
return f"""select '$[{desc.relation.param_field}]$' as {desc.relation.param_field},
case when b.{desc.relation.param_field} is NULL then 0 else 1 end has_{desc.relation.param_field},
a.{code.valuefield} as {code.field},
a.{code.textfield} as {code.field}_text
from {code.table} a left join
(select * from {desc.tblsql or desc.tblname} where {desc.relation.param_field} ={param_field}) b
on a.{code.valuefield} = b.{code.field}
"""
if not desc.codes or len(desc.codes) == 0:
return f"select * from {desc.tblsql or desc.tblname} where 1=1 " + ' [[filterstr]]'
for i, c in enumerate(desc.codes):
shortname = shortnames[i]
cond = '1 = 1'
if c.cond:
cond = c.cond
csql = f"""(select {c.valuefield} as {c.field},
{c.textfield} as {c.field}_text from {c.table} where {cond})"""
infos.append([f'{shortname}.{c.field}_text', f"{csql} {shortname} on a.{c.field} = {shortname}.{c.field}"])
bt = f'(select * from {desc.summary[0].name} where 1=1' + " [[filterstr]]) a"
infos.insert(0, ['a.*', bt])
fields = ', '.join([i[0] for i in infos])
tables = ' left join '.join([i[1] for i in infos])
return f"""select {fields}
from {tables}"""
def build_data_browser(pat: str, desc: dict):
desc = desc.copy()
desc.fieldliststr = json.dumps(field_list(desc), ensure_ascii=False, indent=4)
e = MyTemplateEngine([])
s = e.renders(data_browser_tmpl, desc)
with open(os.path.join(pat, f'index.ui'), 'w') as f:
f.write(s)
def build_data_new(pat: str, desc: dict):
e = MyTemplateEngine([])
desc = desc.copy()
s = e.renders(data_new_tmpl, desc)
with open(os.path.join(pat, f'add_{desc.tblname}.dspy'), 'w') as f:
f.write(s)
def build_data_update(pat: str, desc: dict):
e = MyTemplateEngine([])
desc = desc.copy()
s = e.renders(data_update_tmpl, desc)
with open(os.path.join(pat, f'update_{desc.tblname}.dspy'), 'w') as f:
f.write(s)
def build_data_delete(pat: str, desc: dict):
e = MyTemplateEngine([])
desc = desc.copy()
s = e.renders(data_delete_tmpl, desc)
with open(os.path.join(pat, f'delete_{desc.tblname}.dspy'), 'w') as f:
f.write(s)
def build_get_data(pat: str, desc: dict):
e = MyTemplateEngine([])
desc = desc.copy()
desc.sql = construct_get_data_sql(desc)
s = e.renders(get_data_tmpl, desc)
with open(os.path.join(pat, f'get_{desc.tblname}.dspy'), 'w') as f:
f.write(s)
def build_check_changed(pat:str, desc:dict):
e = MyTemplateEngine([])
desc = desc.copy()
s = e.renders(check_changed_tmpls, desc)
with open(os.path.join(pat, 'check_changed.dspy'), 'w') as f:
f.write(s)
if __name__ == '__main__':
"""
crud_json has following format
{
"tblname",
"params"
}
"""
parser = argparse.ArgumentParser('xls2crud')
parser.add_argument('-m', '--models_dir')
parser.add_argument('-o', '--output_dir')
parser.add_argument('modulename')
parser.add_argument('files', nargs='*')
args = parser.parse_args()
if len(args.files) < 1:
print(f'Usage:\n{sys.argv[0]} [-m models_dir] [-o output_dir] json_file ....\n')
sys.exit(1)
ns = {k:v for k, v in os.environ.items()}
for fn in args.files:
print(f'handle {fn}')
crud_data = {}
with codecs.open(fn, 'r', 'utf-8') as f:
a = json.load(f)
ac = ArgsConvert('${','}$')
a = ac.convert(a,ns)
crud_data = DictObject(**a)
if args.models_dir:
crud_data.models_dir = args.models_dir
models_dir = crud_data.models_dir
if args.output_dir:
tblname = crud_data.alias or crud_data.tblname
crud_data.output_dir = os.path.join(args.output_dir, tblname)
crud_data.params.modulename = args.modulename
dbdesc = build_dbdesc(models_dir)
build_crud_ui(crud_data, dbdesc)

94
xls2ddl.py Executable file
View File

@ -0,0 +1,94 @@
# -*- coding:utf-8 -*-
import io
import sys
from traceback import print_exc
from xlsxData import CRUDData, xlsxFactory
import codecs
import json
from sqlor.ddl_template_sqlserver import sqlserver_ddl_tmpl
from sqlor.ddl_template_mysql import mysql_ddl_tmpl
from sqlor.ddl_template_oracle import oracle_ddl_tmpl
from sqlor.ddl_template_postgresql import postgresql_ddl_tmpl
from appPublic.myTE import MyTemplateEngine
from appPublic.folderUtils import listFile
tmpls = {
"sqlserver":sqlserver_ddl_tmpl,
"mysql":mysql_ddl_tmpl,
"oracle":oracle_ddl_tmpl,
"postgresql":postgresql_ddl_tmpl
}
def xls2ddl(xlsfile,dbtype):
data = None
if xlsfile.endswith('json'):
with codecs.open(xlsfile,'r','utf-8') as f:
data = json.load(f)
else:
d = xlsxFactory(xlsfile)
if d is None:
print(xlsfile, 'can not read data')
return
data = d.get_data()
if data is None:
print(xlsfile, 'not data return from XLSX file')
return
tmpl = tmpls.get(dbtype.lower())
if tmpl is None:
raise Exception('%s database not implemented' % dbtype)
e = MyTemplateEngine([])
s = e.renders(tmpl,data)
# print(data.data)
if data.data:
ins = gen_insert(data)
s = f"{s}\n{ins}\n"
return s
def gen_insert(xls):
tbl = xls.summary[0].name
lines = []
for d in xls.data:
ks = []
vs = []
for k,v in d.items():
ks.append(k)
if isinstance(v, str):
vs.append(f"'{v}'")
else:
vs.append(str(v))
line = f"insert into {tbl} ({','.join(ks)}) values ({','.join(vs)});"
lines.append(line)
return "\n".join(lines)
def model2ddl(folder,dbtype):
ddl_str = ''
for f in listFile(folder, suffixs=['xlsx','json']):
try:
ddl_str += f'\n-- {f}\n'
s = xls2ddl(f,dbtype)
ddl_str = f"{ddl_str}\n{s}\n"
except Exception as e:
print('Exception:',e,'f=',f)
print_exc()
return ddl_str
if __name__ == '__main__':
import sys
##解决windows 终端中输出中文出现
# UnicodeEncodeError: 'gbk' codec can't encode character '\xa0' in position 20249
# 错误
# BEGIN
sys.stdout = io.TextIOWrapper(sys.stdout.buffer,encoding='utf8')
#
# END
if len(sys.argv) < 3:
print('Usage:%s dbtype folder' % sys.argv[0])
sys.exit(1)
s = model2ddl(sys.argv[2], sys.argv[1])
print(s)

46
xls2ui.py Normal file
View File

@ -0,0 +1,46 @@
import os
import codecs
import json
import sys
import argparse
from appPublic.argsConvert import ArgsConvert
from appPublic.dictObject import DictObject
from xls2crud import build_dbdesc, build_crud_ui
from singletree import build_tree_ui
if __name__ == '__main__':
"""
crud_json has following format
{
"tblname",
"params"
}
"""
parser = argparse.ArgumentParser('xls2crud')
parser.add_argument('-m', '--models_dir')
parser.add_argument('-o', '--output_dir')
parser.add_argument('modulename')
parser.add_argument('files', nargs='*')
args = parser.parse_args()
if len(args.files) < 1:
print(f'Usage:\n{sys.argv[0]} [-m models_dir] [-o output_dir] json_file ....\n')
sys.exit(1)
print(args)
ns = {k:v for k, v in os.environ.items()}
dbdesc = build_dbdesc(args.models_dir)
for fn in args.files:
print(f'handle {fn}')
crud_data = {}
with codecs.open(fn, 'r', 'utf-8') as f:
a = json.load(f)
ac = ArgsConvert('${','}$')
a = ac.convert(a,ns)
crud_data = DictObject(**a)
tblname = crud_data.alias or crud_data.tblname
crud_data.output_dir = os.path.join(args.output_dir, tblname)
crud_data.params.modulename = args.modulename
crud_data.params.tblname = crud_data.tblname
if crud_data.uitype == 'tree':
build_tree_ui(crud_data, dbdesc)
continue
build_crud_ui(crud_data, dbdesc)

330
xlsxData.py Executable file
View File

@ -0,0 +1,330 @@
import os
import sys
from traceback import print_exc
from openpyxl import load_workbook
from appPublic.myjson import loadf,dumpf,dumps
from appPublic.dictObject import DictObject
class TypeConvert:
def conv(self,typ,v):
if typ is None:
return v
f = getattr(self,'to_'+typ,None)
if f is None:
return v
return f(v)
def to_int(self,v):
try:
return int(v)
except:
return 0
def to_float(self,v):
try:
return float(v)
except:
return 0.0
def to_str(self,v):
try:
return str(v)
except:
return ''
def to_json(self,v):
if v == '':
return v
try:
return loads(v)
except:
return v
def to_date(self,v):
return v
def to_time(self,v):
return v
def to_timestamp(self,v):
return v
def to_cruddata(self,v):
vs = v.split('"',3)
if vs < 3:
return v
fn = vs[1]
d = CRUDData(fn)
try:
data = d.get_data()
except Exception as e:
print_exc()
print(e)
return v
if vs[2] is None:
return data
cmd = "d%s" % vs[2]
ret=eval(cmd,{'d':data})
return ret
def to_xlsxdata(self,v):
vs = v.split('"',3)
if vs < 3:
return v
fn = vs[1]
d = XLSXData(fn)
try:
data = d.get_data()
except Exception as e:
print_exc()
print(e)
return v
if vs[2] is None:
return data
cmd = "d%s" % vs[2]
ret=eval(cmd,{'d':data})
return ret
class CRUDException(Exception):
def __init__(self,xlsfile,errmsg,*args,**argv):
Exception.__init__(self,*args,**argv)
self.xlsfile = xlsfile
self.errmsg = errmsg
def __str__(self):
return 'filename:' + self.xlsfile+' error:' + self.errmsg
class XLSXData(object):
def __init__(self,xlsxfile, book=None):
self.xlsxfile = xlsxfile
if book is None:
self.book = load_workbook(filename=xlsxfile)
else:
self.book = book
self._read()
def get_data(self):
return self.data
def readRecords(self,name,sheet):
i = 1
recs = []
fields = []
tc = TypeConvert()
for i,row in enumerate(sheet.values):
if i==0:
fields = self.getFieldNames(row)
continue
rec = {}
for j, a in enumerate(row):
if a is None:
continue
k = fields[j][0]
v = tc.conv(fields[j][1],a)
rec[k] = v
if rec == {}:
continue
o = DictObject(**rec)
recs.append(o)
return {name:recs}
def _read(self):
ret = {}
for i,s in enumerate(self.book.worksheets):
ret.update(self.readRecords(self.book.sheetnames[i], s))
self.data = DictObject(**ret)
def getFieldNames(self,row):
fs = []
for i,f in enumerate(row):
if f is None:
f = 'F_' + str(i)
else:
if type(f) != type(""):
f = 'F_' + str(f)
"""
else:
f = f.encode('utf-8')
"""
b=f.split(':')
if len(b) < 2:
b.append(None)
fs.append(b)
i+= 1
return fs
class CRUDData(XLSXData):
@classmethod
def isMe(self,book):
if book is None:
return False
names = book.sheetnames
if 'summary' not in names:
return False
if 'fields' not in names:
return False
if 'validation' not in names:
return False
return True
def _read(self):
super()._read()
d = self.data
if not 'summary' in d.keys():
raise CRUDException(self.xlsxfile,'summary sheet missing')
if not 'fields' in d.keys():
raise CRUDException(self.xlsxfile,'fields sheet missing')
if not 'validation' in d.keys():
raise CRUDException(self.xlsxfile,'validation sheet missing')
if len(d['summary']) != 1:
raise CRUDException(self.xlsxfile, 'Not summary or more than one summary')
self.convPrimary()
self.convForeignkey()
self.convIndex()
self.check_codes_fields()
def convPrimary(self):
d = self.data
v = d['summary'][0]['primary']
v = v.split(',')
self.data['summary'][0]['primary'] = v
self.check_primary_fields()
def check_primary_fields(self):
primarys = self.data['summary'][0]['primary']
if primarys is None:
raise CRUDException(self.xlsxfile, 'primary is None')
for p in primarys:
r = self.check_field(p)
if not r:
raise CRUDException(
self.xlsxfile, f'primary error({p})')
def check_codes_fields(self):
if 'codes' not in self.data.keys():
return
for f in self.data['codes']:
r = self.check_field(f['field'])
if not r:
raise CRUDException(
self.xlsxfile,
f'code definintion error({f["field"]})')
def check_field(self, fieldname):
return fieldname in [f['name'] for f in self.data['fields']]
def convForeignkey(self):
data = self.data
vs = data['validation']
nvs = []
for v in vs:
if v['oper'] == 'fk':
m = v['value']
des= m.split(':')
if len(des) != 3:
raise CRUDException(self.xlsxfile,'fk value error:%s' % m)
v['value'] = {'table':des[0],'value':des[1],'title':des[2]}
nvs.append(v)
data['validation'] = nvs
self.data = data
def getFieldByNmae(self,fields,name):
for f in fields:
if f['name'] == name:
return f
def getFKs(self,validation):
fks = []
for v in validation:
if v['oepr'] == 'fk':
fks.append(v)
return fks
def getIDXs(self,validation):
idxs = []
for v in validation:
if v['oper'] == 'idx':
idxs.append(v)
return idxs
def convIndex(self):
data = self.data
vs = data['validation']
nvs = []
for v in vs:
if v['oper'] == 'idx':
idx = {}
idx['name'] = v['name']
m = v['value']
des= m.split(':')
if len(des) != 2:
raise CRUDException(self.xlsxfile,'idx value format:idx_type:keylist:%s' % m)
idx['idxtype'] = des[0]
idx['idxfields'] = des[1].split(',')
self.check_field(f for f in idx['idxfields'])
nvs.append(idx)
data['indexes'] = nvs
self.data = data
def xlsxFactory(xlsxfilename):
def findSubclass(name,klass):
for k in klass.__subclasses__():
if k.isMe(name):
return k
k1 = findSubclass(name,k)
if k1 is not None:
return k1
return None
try:
book = load_workbook(filename=xlsxfilename)
if book is None:
print(f'{xlsxfilename} read error')
return None
k = findSubclass(book, XLSXData)
if k is not None:
xlsx = k(xlsxfilename, book=book)
return xlsx
return XLSXData(xlsxfilename, book=book)
except Exception as e:
print_exc()
print(xlsxfilename, 'new class failed\n%s' % str(e))
print_exc()
return None
def ValueConvert(s):
if s[:9] == 'xlsfile::':
d = xlsxFactory(s[9:])
return d.get_data()
if s[:10] == 'jsonfile::':
return loadf(s[10:])
return s
def paramentHandle(ns):
for k,v in ns.items():
ns[k] = ValueConvert(v)
return ns
if __name__ == '__main__':
retData = {}
ns = {}
datafiles = []
for a in sys.argv[1:]:
m = a.split('=',1)
if len(m)>1:
ns[m[0]] = m[1]
else:
datafiles.append(a)
ns = paramentHandle(ns)
for f in datafiles:
ext = os.path.splitext(f)[-1]
if ext in ['.xlsx','.xls' ]:
d = xlsxFactory(f)
data = d.get_data()
retData.update(data)
retData.update(ns)
print( dumps(retData))