update
This commit is contained in:
parent
b71aea969d
commit
7d64d6ded4
22
setup.py
22
setup.py
@ -9,7 +9,8 @@ from setuptools import setup, find_packages
|
||||
# python setup.py bdist_egg generate a egg file
|
||||
# Release information about eway
|
||||
|
||||
version = "0.0.1"
|
||||
version = "0.0.2"
|
||||
name = "sqlor"
|
||||
description = "sqlor"
|
||||
author = "yumoqing"
|
||||
email = "yumoqing@gmail.com"
|
||||
@ -17,26 +18,27 @@ email = "yumoqing@gmail.com"
|
||||
packages=find_packages()
|
||||
package_data = {}
|
||||
|
||||
with open("README.md", "r") as fh:
|
||||
long_description = fh.read()
|
||||
|
||||
setup(
|
||||
name="sqlor",
|
||||
name=name,
|
||||
description = description,
|
||||
version=version,
|
||||
|
||||
# uncomment the following lines if you fill them out in release.py
|
||||
description=description,
|
||||
author=author,
|
||||
author_email=email,
|
||||
|
||||
install_requires=[
|
||||
],
|
||||
packages=packages,
|
||||
package_data=package_data,
|
||||
keywords = [
|
||||
],
|
||||
url="https://github.com/yumoqing/sqlor",
|
||||
long_description=long_description,
|
||||
long_description_content_type="text/markdown",
|
||||
classifiers = [
|
||||
'Development Status :: 1 - Alpha',
|
||||
'Operating System :: OS Independent',
|
||||
'Programming Language :: Python3.5',
|
||||
'Topic :: SQL execute :: Libraries :: Python Modules',
|
||||
'Programming Language :: Python :: 3',
|
||||
'License :: OSI Approved :: MIT License',
|
||||
],
|
||||
platforms= 'any'
|
||||
)
|
||||
|
@ -1,14 +1,135 @@
|
||||
Metadata-Version: 1.1
|
||||
Metadata-Version: 2.1
|
||||
Name: sqlor
|
||||
Version: 0.0.1
|
||||
Version: 0.0.2
|
||||
Summary: sqlor
|
||||
Home-page: UNKNOWN
|
||||
Home-page: https://github.com/yumoqing/sqlor
|
||||
Author: yumoqing
|
||||
Author-email: yumoqing@gmail.com
|
||||
License: UNKNOWN
|
||||
Description: UNKNOWN
|
||||
Platform: any
|
||||
Classifier: Development Status :: 1 - Alpha
|
||||
Description: # SQLOR
|
||||
|
||||
SQLOR is a database api for python3, it is base on the python's DBAPI2
|
||||
|
||||
## Features
|
||||
|
||||
* Multiple database supported(Oracle, MySql, Postgresql, SQL Server
|
||||
* Both asynchronous API & synchronous API supported
|
||||
* Connection pools
|
||||
* Connection life cycle managements
|
||||
* Easy using API
|
||||
* Resources(connection object, cursor object) automatic recycled
|
||||
|
||||
|
||||
## requirements
|
||||
|
||||
* python 3.5 or above
|
||||
* asyncio
|
||||
* Oracle DBAPI2 driver(cx_Oracle)
|
||||
* MySQL DBAPI2 driver(mysql-connector)
|
||||
* Postgresql DBAPI2 driver(psycopg2-binrary)
|
||||
* Asynchronous MySQL driver(aiomysql)
|
||||
* Asynchronous Postgresql driver(aiopg)
|
||||
* Other driver can be easy integreated
|
||||
|
||||
## Using
|
||||
|
||||
```
|
||||
import asyncio
|
||||
|
||||
from sqlor.dbpools import DBPools
|
||||
|
||||
dbs={
|
||||
"aiocfae":{
|
||||
"driver":"aiomysql",
|
||||
"async_mode":True,
|
||||
"coding":"utf8",
|
||||
"dbname":"cfae",
|
||||
"kwargs":{
|
||||
"user":"test",
|
||||
"db":"cfae",
|
||||
"password":"test123",
|
||||
"host":"localhost"
|
||||
}
|
||||
},
|
||||
"cfae":{
|
||||
"driver":"mysql.connector",
|
||||
"coding":"utf8",
|
||||
"dbname":"cfae",
|
||||
"kwargs":{
|
||||
"user":"test",
|
||||
"db":"cfae",
|
||||
"password":"test123",
|
||||
"host":"localhost"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
loop = asyncio.get_event_loop()
|
||||
pool = DBPools(dbs,loop=loop)
|
||||
|
||||
async def testfunc():
|
||||
@pool.runSQL
|
||||
def sql(db,ns,callback):
|
||||
return {
|
||||
"sql_string":"select * from product",
|
||||
}
|
||||
x = await sql('cfae',{},print)
|
||||
x = await sql('aiocfae',{},print)
|
||||
|
||||
loop.run_until_complete(testfunc())
|
||||
```
|
||||
|
||||
## API
|
||||
|
||||
|
||||
### Databases description data(dbdesc)
|
||||
|
||||
sqlor uses a dbdesc data(databases description data) which description
|
||||
how many databases and what database will using, and them connection parameters to create a dbpools objects
|
||||
|
||||
dbdesc data is a dict data, format of the dbdesc as follow:
|
||||
```
|
||||
{
|
||||
"aiocfae":{ # name to identify a database connect
|
||||
"driver":"aiomysql", # database dbapi2 driver package name
|
||||
"async_mode":True, # indicte this connection is asynchronous mode
|
||||
"coding":"utf8", # charset coding
|
||||
"dbname":"cfae", # database real name
|
||||
"kwargs":{ # connection parameters
|
||||
"user":"test",
|
||||
"db":"cfae",
|
||||
"password":"test123",
|
||||
"host":"localhost"
|
||||
}
|
||||
},
|
||||
"cfae":{
|
||||
"driver":"mysql.connector",
|
||||
"coding":"utf8",
|
||||
"dbname":"cfae",
|
||||
"kwargs":{
|
||||
"user":"test",
|
||||
"db":"cfae",
|
||||
"password":"test123",
|
||||
"host":"localhost"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
```
|
||||
sqlor can using multiple databases and difference databases by using difference database driver
|
||||
|
||||
### sql description data
|
||||
|
||||
|
||||
## class
|
||||
|
||||
### DBPools
|
||||
|
||||
### SQLor
|
||||
|
||||
|
||||
Platform: UNKNOWN
|
||||
Classifier: Operating System :: OS Independent
|
||||
Classifier: Programming Language :: Python3.5
|
||||
Classifier: Topic :: SQL execute :: Libraries :: Python Modules
|
||||
Classifier: Programming Language :: Python :: 3
|
||||
Classifier: License :: OSI Approved :: MIT License
|
||||
Description-Content-Type: text/markdown
|
||||
|
@ -3,11 +3,13 @@ setup.py
|
||||
sqlor/__init__.py
|
||||
sqlor/aiomysqlor.py
|
||||
sqlor/aiopostgresqlor.py
|
||||
sqlor/crud.py
|
||||
sqlor/dbpools.py
|
||||
sqlor/ddl_template_mysql.py
|
||||
sqlor/ddl_template_oracle.py
|
||||
sqlor/ddl_template_postgresql.py
|
||||
sqlor/ddl_template_sqlserver.py
|
||||
sqlor/filter.py
|
||||
sqlor/mssqlor.py
|
||||
sqlor/mysqlor.py
|
||||
sqlor/oracleor.py
|
||||
|
@ -4,6 +4,7 @@ from .filter import DBFilter
|
||||
from appPublic.objectAction import ObjectAction
|
||||
from appPublic.dictObject import DictObject
|
||||
from appPublic.timeUtils import date2str,time2str,str2Date
|
||||
from appPublic.uniqueID import getID
|
||||
toStringFuncs={
|
||||
'char':None,
|
||||
'str':None,
|
||||
@ -21,17 +22,31 @@ fromStringFuncs={
|
||||
'time':str2Date
|
||||
}
|
||||
|
||||
class DatabaseNotfound(Exception):
|
||||
def __init__(self,dbname):
|
||||
Exception.__init__(self)
|
||||
self.dbname = dbname
|
||||
|
||||
def __str__(self):
|
||||
return f'{self.dbname} not found'
|
||||
|
||||
class CRUD(object):
|
||||
def __init__(self,dbname,tablename,rows=10):
|
||||
self.pool = DBPools()
|
||||
if dbname not in self.pool.databases.keys():
|
||||
raise DatabaseNotfound(dbname)
|
||||
self.dbname = dbname
|
||||
self.tablename = tablename
|
||||
self.rows = rows
|
||||
self.primary_data = None
|
||||
self.oa = ObjectAction()
|
||||
|
||||
async def primaryKey(self):
|
||||
data = await self.pool.getTablePrimaryKey(self.dbname,self.tablename)
|
||||
return data
|
||||
if self.primary_data is None:
|
||||
self.primary_data = await self.pool.getTablePrimaryKey(self.dbname,
|
||||
self.tablename)
|
||||
|
||||
return self.primary_data
|
||||
|
||||
async def forignKeys(self):
|
||||
data = self.pool.getTableForignKeys(self.dbname,self.tablename)
|
||||
@ -78,7 +93,7 @@ class CRUD(object):
|
||||
fs = [ self.defaultIOField(f) for f in fields ]
|
||||
id = self.dbname+':'+ self.tablename
|
||||
pk = await self.primaryKey()
|
||||
idField = pk[0].field
|
||||
idField = pk[0]['field_name']
|
||||
data = {
|
||||
"tmplname":"widget_js.tmpl",
|
||||
"data":{
|
||||
@ -172,7 +187,7 @@ class CRUD(object):
|
||||
data = {}
|
||||
[ data.update({k.lower():v}) for k,v in rec.items() ]
|
||||
@self.pool.runSQL
|
||||
def addSQL(dbname,data):
|
||||
async def addSQL(dbname,data,callback=None):
|
||||
sqldesc={
|
||||
"sql_string" : """
|
||||
insert into %s (%s) values (%s)
|
||||
@ -180,72 +195,86 @@ class CRUD(object):
|
||||
}
|
||||
return sqldesc
|
||||
|
||||
pk = await self.primaryKey()
|
||||
k = pk[0]['field_name']
|
||||
v = getID()
|
||||
data[k] = v
|
||||
data = self.oa.execute(self.dbname+'_'+self.tablename,'beforeAdd',data)
|
||||
await addSQL(self.dbname,data)
|
||||
data = self.oa.execute(self.dbname+'_'+self.tablename,'afterAdd',data)
|
||||
print('data=',data,'MMMMMMMMMMMMMM')
|
||||
return {k:v}
|
||||
return data
|
||||
|
||||
async def defaultFilter(self,NS):
|
||||
fields = await self.pool.getTableFields(self.dbname,self.tablename)
|
||||
d = [ '%s = ${%s}$' % (f.name,f.name) for f in fields if f.name in NS.keys() ]
|
||||
d = [ '%s = ${%s}$' % (f['name'],f['name']) for f in fields if f['name'] in NS.keys() ]
|
||||
if len(d) == 0:
|
||||
return ''
|
||||
ret = ' and ' + ' and '.join(d)
|
||||
return ret
|
||||
|
||||
def R(self,filters=None,NS={}):
|
||||
async def R(self,filters=None,NS={}):
|
||||
"""
|
||||
retrieve data
|
||||
"""
|
||||
@self.pool.runSQLIterator
|
||||
def retrieve(dbname,data):
|
||||
@self.pool.runSQL
|
||||
async def retrieve(dbname,data,callback=None):
|
||||
fstr = ''
|
||||
if filters is not None:
|
||||
fstr = ' and '
|
||||
dbf = DBFilter(filters)
|
||||
fstr = fstr + dbf.genFilterString()
|
||||
else:
|
||||
fstr = self.defaultFilter(NS)
|
||||
fstr = await self.defaultFilter(NS)
|
||||
sqldesc = {
|
||||
"sql_string":"""select * from %s where 1=1 %s""" % (self.tablename,fstr),
|
||||
}
|
||||
return sqldesc
|
||||
|
||||
|
||||
data = self.oa.execute(self.dbname+'_'+self.tablename,'beforeRetieve',NS)
|
||||
data = await retrieve(self.dbname,data,fstr)
|
||||
data = self.oa.execute(self.dbname+'_'+self.tablename,'afterRetieve',data)
|
||||
return data
|
||||
|
||||
async def RP(self,filters=None,NS={}):
|
||||
@self.pool.runPaging
|
||||
def pagingdata(dbname,data,filterString):
|
||||
@self.pool.runSQLPaging
|
||||
async def pagingdata(dbname,data,filters=None):
|
||||
fstr = ""
|
||||
if filters is not None:
|
||||
fstr = ' and '
|
||||
dbf = DBFilter(filters)
|
||||
fstr = fstr + dbf.genFilterString()
|
||||
else:
|
||||
fstr = self.defaultFilter(NS)
|
||||
fstr = await self.defaultFilter(NS)
|
||||
|
||||
sqldesc = {
|
||||
"sql_string":"""select * from %s where 1=1 %s""" % (self.tablename,filterString),
|
||||
"sql_string":"""select * from %s where 1=1 %s""" % (self.tablename,fstr),
|
||||
"default":{'rows':self.rows}
|
||||
}
|
||||
return sqldesc
|
||||
|
||||
if not NS.get('sort',False):
|
||||
fields = await self.pool.getTableFields(self.dbname,self.tablename)
|
||||
NS['sort'] = fields[0]['name']
|
||||
d = await pagingdata(self.dbname,NS)
|
||||
return d
|
||||
|
||||
p = await self.primaryKey()
|
||||
if NS.get('__id') is not None:
|
||||
NS[p[0]['field_name']] = NS['__id']
|
||||
del NS['__id']
|
||||
if NS.get('page'):
|
||||
del NS['page']
|
||||
|
||||
if NS.get('page'):
|
||||
if NS.get('sort',None) is None:
|
||||
NS['sort'] = p[0]['field_name']
|
||||
|
||||
data = self.oa.execute(self.dbname+'_'+self.tablename,'beforeRetieve',NS)
|
||||
if NS.get('page'):
|
||||
data = await pagingdata(self.dbname,data)
|
||||
else:
|
||||
data = await retrieve(self.dbname,data)
|
||||
data = self.oa.execute(self.dbname+'_'+self.tablename,'afterRetieve',data)
|
||||
return data
|
||||
|
||||
async def U(self,data):
|
||||
"""
|
||||
update data
|
||||
"""
|
||||
@self.pool.runSQL
|
||||
def update(dbname,NS,condi,newData):
|
||||
async def update(dbname,NS,callback=None):
|
||||
condi = [ i['field_name'] for i in self.primary_data ]
|
||||
newData = [ i for i in NS.keys() if i not in condi ]
|
||||
c = [ '%s = ${%s}$' % (i,i) for i in condi ]
|
||||
u = [ '%s = ${%s}$' % (i,i) for i in newData ]
|
||||
cs = ' and '.join(c)
|
||||
@ -259,7 +288,7 @@ class CRUD(object):
|
||||
pkfields = [k.field_name for k in pk ]
|
||||
newData = [ k for k in data if k not in pkfields ]
|
||||
data = self.oa.execute(self.dbname+'_'+self.tablename,'beforeUpdate',data)
|
||||
await update(self.dbname,data,pkfields,newData)
|
||||
await update(self.dbname,data)
|
||||
data = self.oa.execute(self.dbname+'_'+self.tablename,'afterUpdate',data)
|
||||
return data
|
||||
|
||||
@ -268,16 +297,15 @@ class CRUD(object):
|
||||
delete data
|
||||
"""
|
||||
@self.pool.runSQL
|
||||
def delete(dbname,data,fields):
|
||||
c = [ '%s = ${%s}$' % (i,i) for i in fields ]
|
||||
def delete(dbname,data):
|
||||
pnames = [ i['field_name'] for i in self.primary_data ]
|
||||
c = [ '%s = ${%s}$' % (i,i) for i in pnames ]
|
||||
cs = ' and '.join(c)
|
||||
sqldesc = {
|
||||
"sql_string":"delete from %s where %s" % (self.tablename,cs)
|
||||
}
|
||||
return sqldesc
|
||||
|
||||
pk = await self.primaryKey()
|
||||
pkfields = [k.field_name for k in pk ]
|
||||
data = self.oa.execute(self.dbname+'_'+self.tablename,'beforeDelete',data)
|
||||
await delete(self.dbname,data,pkfields)
|
||||
data = self.oa.execute(self.dbname+'_'+self.tablename,'afterDelete',data)
|
||||
|
@ -209,11 +209,11 @@ class DBPools:
|
||||
|
||||
def runSQL(self,func):
|
||||
@wraps(func)
|
||||
async def wrap_func(dbname,NS,callback=None,**kw):
|
||||
async def wrap_func(dbname,NS,*,callback=None,**kw):
|
||||
sor = await self.getSqlor(dbname)
|
||||
ret = None
|
||||
try:
|
||||
desc = func(dbname,NS,callback=callback,**kw)
|
||||
desc = await func(dbname,NS,callback=callback,**kw)
|
||||
ret = await sor.runSQL(desc,NS,callback,**kw)
|
||||
await sor.conn.commit()
|
||||
if NS.get('dummy'):
|
||||
@ -233,14 +233,14 @@ class DBPools:
|
||||
async def wrap_func(dbname,NS,**kw):
|
||||
sor = await self.getSqlor(dbname)
|
||||
try:
|
||||
desc = func(dbname,NS,**kw)
|
||||
desc = await func(dbname,NS,**kw)
|
||||
total = await sor.record_count(desc,NS)
|
||||
recs = await sor.pagingdata(desc,NS)
|
||||
data = {
|
||||
"total":total,
|
||||
"rows":recs
|
||||
}
|
||||
return data
|
||||
return DictObject(**data)
|
||||
except Exception as e:
|
||||
print('error',e)
|
||||
raise e
|
||||
@ -253,7 +253,7 @@ class DBPools:
|
||||
async def wrap_func(dbname,NS,**kw):
|
||||
sor = await self.getSqlor(dbname)
|
||||
try:
|
||||
desc = func(dbname,NS,**kw)
|
||||
desc = await func(dbname,NS,**kw)
|
||||
ret = await sor.resultFields(desc,NS)
|
||||
return ret
|
||||
except Exception as e:
|
||||
@ -295,6 +295,6 @@ class DBPools:
|
||||
@self.inSqlor
|
||||
async def _getTableForignKeys(sor,dbname,tblname):
|
||||
ret = await sor.fkeys(tblname)
|
||||
return ret
|
||||
return ret
|
||||
return await _getTableForignKeys(None,dbname,tblname)
|
||||
|
||||
|
10
sqlor/sor.py
10
sqlor/sor.py
@ -7,7 +7,7 @@ import json
|
||||
from appPublic.myImport import myImport
|
||||
from appPublic.dictObject import DictObject,dictObjectFactory
|
||||
from appPublic.unicoding import uDict
|
||||
from patterncoding.myTemplateEngine import MyTemplateEngine
|
||||
from appPublic.myTE import MyTemplateEngine
|
||||
|
||||
|
||||
from appPublic.argsConvert import ArgsConvert,ConditionConvert
|
||||
@ -202,6 +202,7 @@ class SQLor(object):
|
||||
await self.runVarSQL(cur,sql,value)
|
||||
if callback is not None:
|
||||
fields = [ i[0].lower() for i in cur.description ]
|
||||
rec = None
|
||||
if self.async_mode:
|
||||
rec = await cur.fetchone()
|
||||
else:
|
||||
@ -211,8 +212,7 @@ class SQLor(object):
|
||||
dic = {}
|
||||
for i in range(len(fields)):
|
||||
dic.update({fields[i]:rec[i]})
|
||||
#dic = uDict(dic,coding='utf8')
|
||||
callback(dic,**kwargs)
|
||||
callback(DictObject(**dic),**kwargs)
|
||||
if self.async_mode:
|
||||
rec = await cur.fetchone()
|
||||
else:
|
||||
@ -346,7 +346,7 @@ class SQLor(object):
|
||||
async def resultFields(self,desc,NS):
|
||||
NS.update(rows=1,page=1)
|
||||
r = await self.pagingdata(desc,NS)
|
||||
ret = [ {'name':i[0],'type':i[1]} for i in self.cur.description ]
|
||||
ret = [ DictObject(**{'name':i[0],'type':i[1]}) for i in self.cur.description ]
|
||||
return ret
|
||||
|
||||
async def runSQL(self,desc,NS,callback,**kw):
|
||||
@ -357,7 +357,7 @@ class SQLor(object):
|
||||
self.ns[name] = []
|
||||
|
||||
def handler(self,rec):
|
||||
obj = DictObject(rec)
|
||||
obj = DictObject(**rec)
|
||||
self.ns[self.name].append(obj)
|
||||
|
||||
cur = self.cursor()
|
||||
|
Loading…
Reference in New Issue
Block a user