first commit
This commit is contained in:
commit
689cb7c016
121
README.md
Normal file
121
README.md
Normal file
@ -0,0 +1,121 @@
|
||||
# SQLOR
|
||||
|
||||
SQLOR is a database api for python3, it is base on the python's DBAPI2
|
||||
|
||||
## Feature
|
||||
|
||||
* Multiple database supported(Oracle, MySql, Postgresql, SQL Server
|
||||
* Both asynchronous API & synchronous API supported
|
||||
* Connection pools
|
||||
* Connection life cycle managements
|
||||
* Easy using API
|
||||
* Resources(connection object, cursor object) automatic recycled
|
||||
|
||||
|
||||
## requirements
|
||||
|
||||
* python 3.5 or above
|
||||
* asyncio
|
||||
* Oracle DBAPI2 driver(cx_Oracle)
|
||||
* MySQL DBAPI2 driver(mysql-connector)
|
||||
* Postgresql DBAPI2 driver(psycopg2-binrary)
|
||||
* Asynchronous MySQL driver(aiomysql)
|
||||
* Asynchronous Postgresql driver(aiopg)
|
||||
* Other driver can be easy integreated
|
||||
|
||||
## Using
|
||||
|
||||
```
|
||||
import asyncio
|
||||
|
||||
from sqlor.dbpools import DBPools
|
||||
|
||||
dbs={
|
||||
"aiocfae":{
|
||||
"driver":"aiomysql",
|
||||
"async_mode":True,
|
||||
"coding":"utf8",
|
||||
"dbname":"cfae",
|
||||
"kwargs":{
|
||||
"user":"test",
|
||||
"db":"cfae",
|
||||
"password":"test123",
|
||||
"host":"localhost"
|
||||
}
|
||||
},
|
||||
"cfae":{
|
||||
"driver":"mysql.connector",
|
||||
"coding":"utf8",
|
||||
"dbname":"cfae",
|
||||
"kwargs":{
|
||||
"user":"test",
|
||||
"db":"cfae",
|
||||
"password":"test123",
|
||||
"host":"localhost"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
loop = asyncio.get_event_loop()
|
||||
pool = DBPools(dbs,loop=loop)
|
||||
|
||||
async def testfunc():
|
||||
@pool.runSQL
|
||||
def sql(db,ns,callback):
|
||||
return {
|
||||
"sql_string":"select * from product",
|
||||
}
|
||||
x = await sql('cfae',{},print)
|
||||
x = await sql('aiocfae',{},print)
|
||||
|
||||
loop.run_until_complete(testfunc())
|
||||
```
|
||||
|
||||
## API
|
||||
|
||||
|
||||
### Databases description data(dbdesc)
|
||||
|
||||
sqlor uses a dbdesc data(databases description data) which description
|
||||
how many databases and what database will using, and them connection parameters to create a dbpools objects
|
||||
|
||||
dbdesc data is a dict data, format of the dbdesc as follow:
|
||||
```
|
||||
{
|
||||
"aiocfae":{ # name to identify a database connect
|
||||
"driver":"aiomysql", # database dbapi2 driver package name
|
||||
"async_mode":True, # indicte this connection is asynchronous mode
|
||||
"coding":"utf8", # charset coding
|
||||
"dbname":"cfae", # database real name
|
||||
"kwargs":{ # connection parameters
|
||||
"user":"test",
|
||||
"db":"cfae",
|
||||
"password":"test123",
|
||||
"host":"localhost"
|
||||
}
|
||||
},
|
||||
"cfae":{
|
||||
"driver":"mysql.connector",
|
||||
"coding":"utf8",
|
||||
"dbname":"cfae",
|
||||
"kwargs":{
|
||||
"user":"test",
|
||||
"db":"cfae",
|
||||
"password":"test123",
|
||||
"host":"localhost"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
```
|
||||
sqlor can using multiple databases and difference databases by using difference database driver
|
||||
|
||||
### sql description data
|
||||
|
||||
|
||||
## class
|
||||
|
||||
### DBPools
|
||||
|
||||
### SQLor
|
||||
|
0
build/lib/sqlor/__init__.py
Normal file
0
build/lib/sqlor/__init__.py
Normal file
7
build/lib/sqlor/aiomysqlor.py
Normal file
7
build/lib/sqlor/aiomysqlor.py
Normal file
@ -0,0 +1,7 @@
|
||||
from .mysqlor import MySqlor
|
||||
|
||||
class AioMysqlor(MySqlor):
|
||||
@classmethod
|
||||
def isMe(self,name):
|
||||
return name=='aiomysql'
|
||||
|
8
build/lib/sqlor/aiopostgresqlor.py
Normal file
8
build/lib/sqlor/aiopostgresqlor.py
Normal file
@ -0,0 +1,8 @@
|
||||
|
||||
from .postgresqlor import PostgreSQLor
|
||||
class AioPostgresqlor(PostgreSQLor):
|
||||
@classmethod
|
||||
def isMe(self,name):
|
||||
return name=='aiopg'
|
||||
|
||||
|
277
build/lib/sqlor/dbpools.py
Normal file
277
build/lib/sqlor/dbpools.py
Normal file
@ -0,0 +1,277 @@
|
||||
|
||||
import asyncio
|
||||
from queue import Queue
|
||||
from functools import wraps
|
||||
import codecs
|
||||
|
||||
from appPublic.myImport import myImport
|
||||
from appPublic.dictObject import DictObject
|
||||
from appPublic.Singleton import SingletonDecorator
|
||||
from appPublic.myjson import loadf
|
||||
from appPublic.jsonConfig import getConfig
|
||||
|
||||
from .sor import SQLor
|
||||
from .mssqlor import MsSqlor
|
||||
from .oracleor import Oracleor
|
||||
from .sqlite3or import SQLite3or
|
||||
from .mysqlor import MySqlor
|
||||
from .aiomysqlor import AioMysqlor
|
||||
from .aiopostgresqlor import AioPostgresqlor
|
||||
|
||||
|
||||
def sqlorFactory(dbdesc):
|
||||
driver = dbdesc.get('driver',dbdesc)
|
||||
def findSubclass(name,klass):
|
||||
for k in klass.__subclasses__():
|
||||
if k.isMe(name):
|
||||
return k
|
||||
k1 = findSubclass(name,k)
|
||||
if k1 is not None:
|
||||
return k1
|
||||
return None
|
||||
k = findSubclass(driver,SQLor)
|
||||
if k is None:
|
||||
return SQLor(dbdesc=dbdesc)
|
||||
return k(dbdesc=dbdesc)
|
||||
|
||||
def sqlorFromFile(dbdef_file,coding='utf8'):
|
||||
dbdef = loadf(dbdef_file)
|
||||
return sqlorFactory(dbdef)
|
||||
|
||||
class LifeConnect:
|
||||
__conndict = {}
|
||||
def __init__(self,connfunc,kw,use_max=1000,async_mode=False):
|
||||
self.connfunc = connfunc
|
||||
self.async_mode = async_mode
|
||||
self.use_max = use_max
|
||||
self.kw = kw
|
||||
self.conn = None
|
||||
|
||||
def print(self):
|
||||
print(self.use_max)
|
||||
print(self.conn)
|
||||
|
||||
async def _mkconn(self):
|
||||
if self.async_mode:
|
||||
self.conn = await self.connfunc(**self.kw)
|
||||
else:
|
||||
self.conn = self.connfunc(**self.kw)
|
||||
self.use_cnt = 0
|
||||
self.__conndict[self.conn] = self
|
||||
|
||||
async def use(self):
|
||||
if self.conn is None:
|
||||
await self._mkconn()
|
||||
conn = self.conn
|
||||
if await self.testok():
|
||||
return conn
|
||||
del self.__conndict[conn]
|
||||
await self._mkconn()
|
||||
|
||||
@classmethod
|
||||
async def free(self,conn):
|
||||
lc = self.__conndict[conn]
|
||||
lc.use_cnt = lc.use_cnt + 1
|
||||
if lc.use_cnt >= lc.use_max:
|
||||
await lc.conn.close()
|
||||
await lc._mkcomm()
|
||||
return lc
|
||||
|
||||
async def testok(self):
|
||||
if self.async_mode:
|
||||
async with self.conn.cursor() as cur:
|
||||
try:
|
||||
await cur.execute('select 1 as cnt')
|
||||
return True
|
||||
except:
|
||||
return False
|
||||
else:
|
||||
cur = self.conn.cursor()
|
||||
try:
|
||||
cur.execute('select 1 as cnt')
|
||||
r = cur.fetchall()
|
||||
return True
|
||||
except:
|
||||
return False
|
||||
finally:
|
||||
cur.close()
|
||||
|
||||
class ConnectionPool(object):
|
||||
def __init__(self,dbdesc,loop):
|
||||
self.dbdesc = dbdesc
|
||||
self.async_mode = dbdesc.get('async_mode',False)
|
||||
self.loop = loop
|
||||
self.driver = myImport(self.dbdesc['driver'])
|
||||
self.maxconn = dbdesc.get('maxconn',5)
|
||||
self.maxuse = dbdesc.get('maxuse',1000)
|
||||
self._pool = Queue(self.maxconn)
|
||||
self._fillPool()
|
||||
self.using = []
|
||||
self.use_cnt = 0
|
||||
self.max_use = 1000
|
||||
|
||||
def _fillPool(self):
|
||||
for i in range(self.maxconn):
|
||||
lc = self.connect()
|
||||
i = i + 1
|
||||
|
||||
def connect(self):
|
||||
lc = LifeConnect(self.driver.connect,self.dbdesc['kwargs'],
|
||||
use_max=self.maxuse,async_mode=self.async_mode)
|
||||
self._pool.put(lc)
|
||||
return lc
|
||||
|
||||
def isEmpty(self):
|
||||
return self._pool.empty()
|
||||
|
||||
def isFull(self):
|
||||
return self._pool.full()
|
||||
|
||||
async def aquire(self):
|
||||
lc = self._pool.get()
|
||||
self.using.append(lc)
|
||||
conn = await lc.use()
|
||||
return conn
|
||||
|
||||
async def release(self,conn):
|
||||
lc = await LifeConnect.free(conn)
|
||||
self.using = [c for c in self.using if c != lc ]
|
||||
self._pool.put(lc)
|
||||
|
||||
@SingletonDecorator
|
||||
class DBPools:
|
||||
def __init__(self,databases={},max_connect=10,loop=None):
|
||||
if loop is None:
|
||||
loop = asyncio.get_event_loop()
|
||||
self.loop = loop
|
||||
self._cpools = {}
|
||||
self.databases = databases
|
||||
|
||||
def addDatabase(self,name,desc):
|
||||
self.databases[name] = desc
|
||||
|
||||
async def getSqlor(self,name):
|
||||
desc = self.databases.get(name)
|
||||
sor = sqlorFactory(desc)
|
||||
sor.name = name
|
||||
a,conn,cur = await self._aquireConn(name)
|
||||
sor.setCursor(a,conn,cur)
|
||||
return sor
|
||||
|
||||
async def freeSqlor(self,sor):
|
||||
await self._releaseConn(sor.name,sor.conn,sor.cur)
|
||||
|
||||
async def _aquireConn(self,dbname):
|
||||
p = self._cpools.get(dbname)
|
||||
if p == None:
|
||||
p = ConnectionPool(self.databases.get(dbname),self.loop)
|
||||
self._cpools[dbname] = p
|
||||
conn = await p.aquire()
|
||||
if self.isAsyncDriver(dbname):
|
||||
cur = await conn.cursor()
|
||||
else:
|
||||
cur = conn.cursor()
|
||||
return self.isAsyncDriver(dbname),conn,cur
|
||||
|
||||
def isAsyncDriver(self,dbname):
|
||||
ret = self.databases[dbname].get('async_mode',False)
|
||||
return ret
|
||||
|
||||
async def _releaseConn(self,dbname,conn,cur):
|
||||
if self.isAsyncDriver(dbname):
|
||||
await cur.close()
|
||||
else:
|
||||
try:
|
||||
cur.fetchall()
|
||||
except:
|
||||
pass
|
||||
cur.close()
|
||||
p = self._cpools.get(dbname)
|
||||
if p == None:
|
||||
raise Exception('database (%s) not connected'%dbname)
|
||||
await p.release(conn)
|
||||
|
||||
def inSqlor(self,func):
|
||||
@wraps(func)
|
||||
async def wrap_func(sor,dbname,*args,**kw):
|
||||
sor = await self.getSqlor(dbname)
|
||||
try:
|
||||
ret = await func(sor,dbname,*args,**kw)
|
||||
return ret
|
||||
except Exception as e:
|
||||
print('error',sor)
|
||||
raise e
|
||||
finally:
|
||||
await self.freeSqlor(sor)
|
||||
|
||||
return wrap_func
|
||||
|
||||
def runSQL(self,func):
|
||||
@wraps(func)
|
||||
async def wrap_func(dbname,NS,callback,**kw):
|
||||
sor = await self.getSqlor(dbname)
|
||||
desc = func(dbname,NS,callback,**kw)
|
||||
try:
|
||||
ret = await sor.runSQL(desc,NS,callback,**kw)
|
||||
except Exception as e:
|
||||
print('error:',e)
|
||||
raise e
|
||||
finally:
|
||||
await self.freeSqlor(sor)
|
||||
return wrap_func
|
||||
|
||||
def runSQLPaging(self,func):
|
||||
@wraps(func)
|
||||
async def wrap_func(dbname,NS,**kw):
|
||||
sor = await self.getSqlor(dbname)
|
||||
desc = func(dbname,NS,**kw)
|
||||
total = await sor.record_count(desc,NS)
|
||||
recs = await sor.pagingdata(desc,NS)
|
||||
data = {
|
||||
"total":total,
|
||||
"rows":recs
|
||||
}
|
||||
print(len(recs),'records return')
|
||||
await self.freeSqlor(sor)
|
||||
return data
|
||||
return wrap_func
|
||||
|
||||
async def runSQLResultFields(self, dbname,NS,**kwargs):
|
||||
sor = self.getSqlor(dbname)
|
||||
desc = func(dbname,NS,**kw)
|
||||
conn = await self._aquireConn(dbname)
|
||||
async with conn.cursor() as cur:
|
||||
sor.setCursor(conn,cur)
|
||||
ret=await sor.sqlIterator(desc,NS)
|
||||
ret = [ {'name':i[0],'type':i[1]} for i in cur.description ]
|
||||
return ret
|
||||
await self._releaseConn(dbname,conn)
|
||||
|
||||
async def getTables(self,dbname):
|
||||
@self.inSqlor
|
||||
async def _getTables(sor,dbname):
|
||||
ret = await sor.tables()
|
||||
return ret
|
||||
return await _getTables(None,dbname)
|
||||
|
||||
async def getTableFields(self,dbname,tblname):
|
||||
@self.inSqlor
|
||||
async def _getTableFields(sor,dbname,tblname):
|
||||
ret = await sor.fields(tblname)
|
||||
return ret
|
||||
return await _getTableFields(None,dbname,tblname)
|
||||
|
||||
async def getTablePrimaryKey(self,dbname,tblname):
|
||||
@self.inSqlor
|
||||
async def _getTablePrimaryKey(sor,dbname,tblname):
|
||||
ret = await sor.primary(tblname)
|
||||
return ret
|
||||
return await _getTablePrimaryKey(None,dbname,tblname)
|
||||
|
||||
async def getTableForignKeys(self,dbname,tblname):
|
||||
@self.inSqlor
|
||||
async def _getTableForignKeys(sor,dbname,tblname):
|
||||
ret = await sor.fkeys(tblname)
|
||||
return ret
|
||||
return await _getTableForignKeys(None,dbname,tblname)
|
||||
|
55
build/lib/sqlor/ddl_template_mysql.py
Normal file
55
build/lib/sqlor/ddl_template_mysql.py
Normal file
@ -0,0 +1,55 @@
|
||||
mysql_ddl_tmpl = """{% macro typeStr(type,len,dec) %}
|
||||
{%- if type=='str' -%}
|
||||
VARCHAR({{len}})
|
||||
{%- elif type=='char' -%}
|
||||
CHAR({{len}})
|
||||
{%- elif type=='long' or type=='int' or type=='short' -%}
|
||||
int
|
||||
{%- elif type=='long' -%}
|
||||
bigint
|
||||
{%- elif type=='float' or type=='double' or type=='ddouble' -%}
|
||||
double({{len}},{{dec}})
|
||||
{%- elif type=='date' -%}
|
||||
date
|
||||
{%- elif type=='time' -%}
|
||||
time
|
||||
{%- elif type=='datetime' -%}
|
||||
datetime
|
||||
{%- elif type=='timestamp' -%}
|
||||
TIMESTAMP
|
||||
{%- elif type=='text' -%}
|
||||
longtext
|
||||
{%- elif type=='bin' -%}
|
||||
longblob
|
||||
{%- else -%}
|
||||
{{type}}
|
||||
{%- endif %}
|
||||
{%- endmacro %}
|
||||
{% macro nullStr(nullable) %}
|
||||
{%- if nullable=='no' -%}
|
||||
NOT NULL
|
||||
{%- endif -%}
|
||||
{% endmacro %}
|
||||
{% macro primary() %}
|
||||
{% if len(','.join(summary[0].primary))>0 %}
|
||||
,primary key({{','.join(summary[0].primary)}})
|
||||
{% endif %}
|
||||
{% endmacro %}
|
||||
drop table {{summary[0].name}};
|
||||
CREATE TABLE {{summary[0].name}}
|
||||
(
|
||||
{% for field in fields %}
|
||||
{{field.name}} {{typeStr(field.type,field.length,field.dec)}} {{nullStr(field.nullable)}} {%if field.title -%} comment '{{field.title}}'{%- endif %}{%- if not loop.last -%},{%- endif -%}
|
||||
{% endfor %}
|
||||
{{primary()}}
|
||||
)
|
||||
engine=innodb
|
||||
default charset=utf8
|
||||
{% if summary[0].title %}comment '{{summary[0].title}}'{% endif %}
|
||||
;
|
||||
{% for v in validation %}
|
||||
{% if v.oper=='idx' %}
|
||||
CREATE {% if v.value.idxtype=='unique' %}UNIQUE{% endif %} INDEX {{summary[0].name}}_{{v.name}} ON {{summary[0].name}}({{",".join(v.value.fields)}});
|
||||
{% endif %}
|
||||
{%- endfor -%}
|
||||
"""
|
49
build/lib/sqlor/ddl_template_oracle.py
Normal file
49
build/lib/sqlor/ddl_template_oracle.py
Normal file
@ -0,0 +1,49 @@
|
||||
oracle_ddl_tmpl = """{% macro typeStr(type,len,dec) %}
|
||||
{%- if type=='str' -%}
|
||||
VARCHAR2({{len}})
|
||||
{%- elif type=='char' -%}
|
||||
CHAR({{len}})
|
||||
{%- elif type=='long' or type=='int' or type=='short' -%}
|
||||
NUMBER
|
||||
{%- elif type=='float' or type=='double' or type=='ddouble' -%}
|
||||
NUMBER({{len}},{{dec}})
|
||||
{%- elif type=='date' or type=='time' -%}
|
||||
DATE
|
||||
{%- elif type=='timestamp' -%}
|
||||
TIMESTAMP
|
||||
{%- elif type=='text' -%}
|
||||
CLOB
|
||||
{%- elif type=='bin' -%}
|
||||
BLOB
|
||||
{%- else -%}
|
||||
{{type}}
|
||||
{%- endif %}
|
||||
{%- endmacro %}
|
||||
{% macro nullStr(nullable) %}
|
||||
{%- if nullable=='no' -%}
|
||||
NOT NULL
|
||||
{%- endif -%}
|
||||
{% endmacro %}
|
||||
{% macro primary() %}
|
||||
{% if len(','.join(summary[0].primary))>0 %}
|
||||
,primary key({{','.join(summary[0].primary)}})
|
||||
{% endif %}
|
||||
{% endmacro %}
|
||||
drop table {{summary[0].name}};
|
||||
CREATE TABLE {{summary[0].name}}
|
||||
(
|
||||
{% for field in fields %}
|
||||
{{field.name}} {{typeStr(field.type,field.length,field.dec)}} {{nullStr(field.nullable)}}{%- if not loop.last -%},{%- endif -%}
|
||||
{% endfor %}
|
||||
{{primary()}}
|
||||
);
|
||||
{% for v in validation %}
|
||||
{% if v.oper=='idx' %}
|
||||
CREATE {% if v.value.idxtype=='unique' %}UNIQUE{% endif %} INDEX {{summary[0].name}}_{{v.name}} ON {{summary[0].name}}({{",".join(v.value.fields)}});
|
||||
{% endif %}
|
||||
{%- endfor -%}
|
||||
COMMENT ON TABLE {{summary[0].name}} IS '{{summary[0].title}}';
|
||||
{% for field in fields %}
|
||||
COMMENT ON COLUMN {{summary[0].name}}.{{field.name}} is '{{field.title}}';
|
||||
{% endfor %}
|
||||
"""
|
45
build/lib/sqlor/ddl_template_postgresql.py
Normal file
45
build/lib/sqlor/ddl_template_postgresql.py
Normal file
@ -0,0 +1,45 @@
|
||||
postgresql_ddl_tmpl = """{% macro typeStr(type,len,dec) %}
|
||||
{%- if type=='str' -%}
|
||||
VARCHAR({{len}})
|
||||
{%- elif type=='char' -%}
|
||||
CHAR({{len}})
|
||||
{%- elif type=='long' or type=='int' or type=='short' -%}
|
||||
INTEGER
|
||||
{%- elif type=='float' or type=='double' or type=='ddouble' -%}
|
||||
NUMERIC({{len}},{{dec}})
|
||||
{%- elif type=='date' -%}
|
||||
DATE
|
||||
{%- elif type=='time' -%}
|
||||
TIME
|
||||
{%- elif type=='timestamp' -%}
|
||||
TIMESTAMP
|
||||
{%- else -%}
|
||||
{{type}}
|
||||
{%- endif %}
|
||||
{%- endmacro %}
|
||||
{% macro nullStr(nullable) %}
|
||||
{%- if nullable=='no' -%}
|
||||
NOT NULL
|
||||
{%- endif -%}
|
||||
{% endmacro %}
|
||||
{% macro primary() %}
|
||||
primary key({{','.join(summary[0].primary)}})
|
||||
{% endmacro %}
|
||||
DROP TABLE IF EXISTS {{summary[0].name}};
|
||||
CREATE TABLE {{summary[0].name}}
|
||||
(
|
||||
{% for field in fields %}
|
||||
{{field.name}} {{typeStr(field.type,field.length,field.dec)}} {{nullStr(field.nullable)}},
|
||||
{% endfor %}
|
||||
{{primary()}}
|
||||
);
|
||||
{% for v in validation %}
|
||||
{% if v.oper=='idx' %}
|
||||
CREATE {% if v.value.idxtype=='unique' %}UNIQUE{% endif %} INDEX {{summary[0].name}}_{{v.name}} ON {{summary[0].name}}({{",".join(v.value.fields)}});
|
||||
{% endif %}
|
||||
{%- endfor -%}
|
||||
COMMENT ON TABLE {{summary[0].name}} IS '{{summary[0].title.decode('utf8')}}';
|
||||
{% for field in fields %}
|
||||
COMMENT ON COLUMN {{summary[0].name}}.{{field.name}} is '{{field.title.decode('utf8')}}';
|
||||
{% endfor %}
|
||||
"""
|
51
build/lib/sqlor/ddl_template_sqlserver.py
Normal file
51
build/lib/sqlor/ddl_template_sqlserver.py
Normal file
@ -0,0 +1,51 @@
|
||||
sqlserver_ddl_tmpl = """{% macro typeStr(type,len,dec) %}
|
||||
{%- if type=='str' -%}
|
||||
NVARCHAR({{len}})
|
||||
{%- elif type=='char' -%}
|
||||
CHAR({{len}})
|
||||
{%- elif type=='long' or type=='int' or type=='short' -%}
|
||||
NUMERIC
|
||||
{%- elif type=='float' or type=='double' or type=='ddouble' -%}
|
||||
numeric({{len}},{{dec}})
|
||||
{%- elif type=='date' or type=='time' -%}
|
||||
DATE
|
||||
{%- elif type=='timestamp' -%}
|
||||
TIMESTAMP
|
||||
{%- elif type=='text' -%}
|
||||
NVARCHAR(MAX)
|
||||
{%- elif type=='bin' -%}
|
||||
IMAGE
|
||||
{%- else -%}
|
||||
{{type}}
|
||||
{%- endif %}
|
||||
{%- endmacro %}
|
||||
{% macro nullStr(nullable) %}
|
||||
{%- if nullable=='no' -%}
|
||||
NOT NULL
|
||||
{%- endif -%}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro primary() %}
|
||||
{% if len(','.join(summary[0].primary))>0 %}
|
||||
,primary key({{','.join(summary[0].primary)}})
|
||||
{% endif %}
|
||||
{% endmacro %}
|
||||
|
||||
drop table dbo.{{summary[0].name}};
|
||||
CREATE TABLE dbo.{{summary[0].name}}
|
||||
(
|
||||
{% for field in fields %}
|
||||
{{field.name}} {{typeStr(field.type,field.length,field.dec)}} {{nullStr(field.nullable)}}{%- if not loop.last -%},{%- endif -%}
|
||||
{% endfor %}
|
||||
{{primary()}}
|
||||
)
|
||||
{% for v in validation %}
|
||||
{% if v.oper=='idx' %}
|
||||
CREATE {% if v.value.idxtype=='unique' %}UNIQUE{% endif %} INDEX {{summary[0].name}}_{{v.name}} ON {{summary[0].name}}({{",".join(v.value.fields)}});
|
||||
{% endif %}
|
||||
{%- endfor -%}
|
||||
EXEC sys.sp_addextendedproperty @name=N'MS_Description', @value=N'{{summary[0].title}}' , @level0type=N'SCHEMA',@level0name=N'dbo', @level1type=N'TABLE',@level1name=N'{{summary[0].name}}'
|
||||
{% for field in fields %}
|
||||
EXEC sys.sp_addextendedproperty @name=N'MS_Description', @value=N'{{field.title}}' , @level0type=N'SCHEMA',@level0name=N'dbo', @level1type=N'TABLE',@level1name=N'{{summary[0].name}}', @level2type=N'COLUMN',@level2name=N'{{field.name}}'
|
||||
{% endfor %}
|
||||
"""
|
175
build/lib/sqlor/mssqlor.py
Normal file
175
build/lib/sqlor/mssqlor.py
Normal file
@ -0,0 +1,175 @@
|
||||
# -*- coding:utf8 -*-
|
||||
from .sor import SQLor
|
||||
from .ddl_template_sqlserver import sqlserver_ddl_tmpl
|
||||
|
||||
class MsSqlor(SQLor):
|
||||
ddl_template = sqlserver_ddl_tmpl
|
||||
db2modelTypeMapping = {
|
||||
'bit':'short',
|
||||
'tinyint':'short',
|
||||
'date':'date',
|
||||
'bigint':'long',
|
||||
'smallint':'short',
|
||||
'int':'long',
|
||||
'decimal':'float',
|
||||
'numeric':'float',
|
||||
'smallmoney':'float',
|
||||
'money':'float',
|
||||
'real':'float',
|
||||
'float':'float',
|
||||
'datetime':'date',
|
||||
'timestamp':'timestamp',
|
||||
'uniqueidentifier':'timestamp',
|
||||
'char':'char',
|
||||
'varchar':'str',
|
||||
'text':'text',
|
||||
'nchar':'str',
|
||||
'nvarchar':'str',
|
||||
'ntext':'text',
|
||||
'binary':'str',
|
||||
'varbinary':'str',
|
||||
'image':'file',
|
||||
}
|
||||
model2dbTypemapping = {
|
||||
'date':'datetime',
|
||||
'time':'date',
|
||||
'timestamp':'timestamp',
|
||||
'str':'nvarchar',
|
||||
'char':'char',
|
||||
'short':'int',
|
||||
'long':'numeric',
|
||||
'float':'numeric',
|
||||
'text':'ntext',
|
||||
'file':'image',
|
||||
}
|
||||
@classmethod
|
||||
def isMe(self,name):
|
||||
return name=='pymssql'
|
||||
|
||||
def grammar(self):
|
||||
return {
|
||||
'select':select_stmt,
|
||||
}
|
||||
|
||||
def placeHolder(self,varname):
|
||||
if varname=='__mainsql__' :
|
||||
return ''
|
||||
return '%s'
|
||||
|
||||
def dataConvert(self,dataList):
|
||||
if type(dataList) == type({}):
|
||||
d = [ i for i in dataList.values()]
|
||||
else:
|
||||
d = [ i['value'] for i in dataList]
|
||||
return tuple(d)
|
||||
|
||||
def pagingSQLmodel(self):
|
||||
return u"""select *
|
||||
from (
|
||||
select row_number() over(order by $[sort]$ $[order]$) as _row_id,page_s.*
|
||||
from (%s) page_s
|
||||
) A
|
||||
where _row_id >= $[from_line]$ and _row_id < $[end_line]$"""
|
||||
|
||||
def tablesSQL(self):
|
||||
sqlcmd = u"""select
|
||||
lower(d.name) as name,
|
||||
lower(cast(Isnull(f.VALUE,d.name) as nvarchar )) title
|
||||
from sysobjects d
|
||||
left join sys.extended_properties f on d.id = f.major_id and f.minor_id = 0
|
||||
where d.xtype = 'U'"""
|
||||
return sqlcmd
|
||||
|
||||
def fieldsSQL(self,tablename=None):
|
||||
sqlcmd=u"""SELECT name = lower(a.name)
|
||||
,type = b.name
|
||||
,length = Columnproperty(a.id,a.name,'PRECISION')
|
||||
,dec = Isnull(Columnproperty(a.id,a.name,'Scale'),null)
|
||||
,nullable = CASE
|
||||
WHEN a.isnullable = 1 THEN 'yes'
|
||||
ELSE 'no'
|
||||
END
|
||||
,title = lower(cast(Isnull(g.[value],a.name) as nvarchar) )
|
||||
,table_name = lower(d.name)
|
||||
FROM syscolumns a
|
||||
LEFT JOIN systypes b
|
||||
ON a.xusertype = b.xusertype
|
||||
INNER JOIN sysobjects d
|
||||
ON (a.id = d.id)
|
||||
AND (d.xtype = 'U')
|
||||
AND (d.name <> 'dtproperties')
|
||||
INNER JOIN sys.all_objects c
|
||||
ON d.id=c.object_id
|
||||
AND schema_name(schema_id)='dbo'
|
||||
LEFT JOIN sys.extended_properties g
|
||||
ON (a.id = g.major_id)
|
||||
AND (a.colid = g.minor_id)
|
||||
LEFT JOIN sys.extended_properties f
|
||||
ON (d.id = f.major_id)
|
||||
AND (f.minor_id = 0)"""
|
||||
if tablename is not None:
|
||||
sqlcmd = sqlcmd + """ where lower(d.name)='%s'
|
||||
ORDER BY a.id,a.colorder""" % tablename.lower()
|
||||
else:
|
||||
sqlcmd = sqlcmd + """ ORDER BY a.id,a.colorder"""
|
||||
return sqlcmd
|
||||
|
||||
def fkSQL(self,tablename=None):
|
||||
sqlcmd = u"""select
|
||||
MainCol.name AS field -- [主表列名]
|
||||
,oSub.name AS fk_table -- [子表名称],
|
||||
,SubCol.name AS fk_field -- [子表列名],
|
||||
from
|
||||
sys.foreign_keys fk
|
||||
JOIN sys.all_objects oSub
|
||||
ON (fk.parent_object_id = oSub.object_id)
|
||||
JOIN sys.all_objects oMain
|
||||
ON (fk.referenced_object_id = oMain.object_id)
|
||||
JOIN sys.foreign_key_columns fkCols
|
||||
ON (fk.object_id = fkCols.constraint_object_id)
|
||||
JOIN sys.columns SubCol
|
||||
ON (oSub.object_id = SubCol.object_id
|
||||
AND fkCols.parent_column_id = SubCol.column_id)
|
||||
JOIN sys.columns MainCol
|
||||
ON (oMain.object_id = MainCol.object_id
|
||||
AND fkCols.referenced_column_id = MainCol.column_id)"""
|
||||
if tablename is not None:
|
||||
sqlcmd = sqlcmd + """ where lower(oMain.name) = '%s'""" % tablename.lower()
|
||||
|
||||
return sqlcmd
|
||||
|
||||
def pkSQL(self,tablename=None):
|
||||
sqlcmd = u"""select
|
||||
lower(a.table_name) as table_name,
|
||||
lower(b.column_name) as field_name
|
||||
from information_schema.table_constraints a
|
||||
inner join information_schema.constraint_column_usage b
|
||||
on a.constraint_name = b.constraint_name
|
||||
where a.constraint_type = 'PRIMARY KEY'"""
|
||||
if tablename is not None:
|
||||
sqlcmd = sqlcmd + """ and lower(a.table_name) = '%s'""" % tablename.lower()
|
||||
return sqlcmd
|
||||
|
||||
def indexesSQL(self,tablename=None):
|
||||
sqlcmd = """SELECT
|
||||
index_name=lower(IDX.Name),
|
||||
index_type=case when KC.type_desc is null then 'primary' WHEN IDX.is_unique=1 THEN 'unique' ELSE 'ununique' END,
|
||||
table_name=lower(O.Name),
|
||||
column_name=lower(C.Name)
|
||||
FROM sys.indexes IDX
|
||||
INNER JOIN sys.index_columns IDXC
|
||||
ON IDX.[object_id]=IDXC.[object_id]
|
||||
AND IDX.index_id=IDXC.index_id
|
||||
LEFT JOIN sys.key_constraints KC
|
||||
ON IDX.[object_id]=KC.[parent_object_id]
|
||||
AND IDX.index_id=KC.unique_index_id
|
||||
INNER JOIN sys.objects O
|
||||
ON O.[object_id]=IDX.[object_id]
|
||||
INNER JOIN sys.columns C
|
||||
ON O.[object_id]=C.[object_id]
|
||||
AND O.type='U'
|
||||
AND O.is_ms_shipped=0
|
||||
AND IDXC.Column_id=C.Column_id"""
|
||||
if tablename is not None:
|
||||
sqlcmd = sqlcmd + """ where lower(O.name)='%s'""" % tablename.lower()
|
||||
return sqlcmd
|
179
build/lib/sqlor/mysqlor.py
Normal file
179
build/lib/sqlor/mysqlor.py
Normal file
@ -0,0 +1,179 @@
|
||||
# -*- coding:utf8 -*-
|
||||
from mysql import connector
|
||||
from appPublic.argsConvert import ArgsConvert,ConditionConvert
|
||||
|
||||
from .sor import SQLor
|
||||
from .ddl_template_mysql import mysql_ddl_tmpl
|
||||
class MySqlor(SQLor):
|
||||
ddl_template = mysql_ddl_tmpl
|
||||
db2modelTypeMapping = {
|
||||
'tinyint':'short',
|
||||
'smallint':'short',
|
||||
'mediumint':'long',
|
||||
'int':'long',
|
||||
'bigint':'long',
|
||||
'decimal':'float',
|
||||
'double':'float',
|
||||
'float':'float',
|
||||
'char':'char',
|
||||
'varchar':'str',
|
||||
'tinyblob':'text',
|
||||
'tinytext':'text',
|
||||
'mediumblob':'text',
|
||||
'mediumtext':'text',
|
||||
'blob':'text',
|
||||
'text':'text',
|
||||
'mediumblob':'text',
|
||||
'mediumtext':'text',
|
||||
'longblob':'bin',
|
||||
'longtext':'text',
|
||||
'barbinary':'text',
|
||||
'binary':'text',
|
||||
'date':'date',
|
||||
'time':'time',
|
||||
'datetime':'datetime',
|
||||
'timestamp':'datestamp',
|
||||
'year':'short',
|
||||
}
|
||||
model2dbTypemapping = {
|
||||
'date':'date',
|
||||
'time':'date',
|
||||
'timestamp':'timestamp',
|
||||
'str':'varchar',
|
||||
'char':'char',
|
||||
'short':'int',
|
||||
'long':'bigint',
|
||||
'float':'double',
|
||||
'text':'longtext',
|
||||
'bin':'longblob',
|
||||
'file':'longblob',
|
||||
}
|
||||
@classmethod
|
||||
def isMe(self,name):
|
||||
return name=='mysql.connector'
|
||||
|
||||
def grammar(self):
|
||||
return {
|
||||
'select':select_stmt,
|
||||
}
|
||||
|
||||
def _opendb(self):
|
||||
self.conn = connector.connect(**self.dbdesc['kwargs'])
|
||||
|
||||
def placeHolder(self,varname):
|
||||
if varname=='__mainsql__' :
|
||||
return ''
|
||||
return '%s'
|
||||
|
||||
def dataConvert(self,dataList):
|
||||
if type(dataList) == type({}):
|
||||
d = [ i for i in dataList.values()]
|
||||
else:
|
||||
d = [ i['value'] for i in dataList]
|
||||
return tuple(d)
|
||||
|
||||
def pagingSQL(self,sql,paging,NS):
|
||||
"""
|
||||
default it not support paging
|
||||
"""
|
||||
page = int(NS.get(paging['pagename'],1))
|
||||
rows = int(NS.get(paging['rowsname'],10))
|
||||
sort = NS.get(paging.get('sortname','sort'),None)
|
||||
order = NS.get(paging.get('ordername','asc'),'asc')
|
||||
if not sort:
|
||||
return sql
|
||||
if page < 1:
|
||||
page = 1
|
||||
from_line = (page - 1) * rows
|
||||
end_line = page * rows + 1
|
||||
psql = self.pagingSQLmodel()
|
||||
ns={
|
||||
'from_line':from_line,
|
||||
'end_line':end_line,
|
||||
'rows':rows,
|
||||
'sort':sort,
|
||||
'order':order,
|
||||
}
|
||||
ac = ArgsConvert('$[',']$')
|
||||
psql = ac.convert(psql,ns)
|
||||
retSQL=psql % sql
|
||||
return retSQL
|
||||
|
||||
def pagingSQLmodel(self):
|
||||
return u"""select * from (%s) A order by $[sort]$ $[order]$
|
||||
limit $[from_line]$,$[rows]$"""
|
||||
|
||||
def tablesSQL(self):
|
||||
sqlcmd = """SELECT lower(TABLE_NAME) as name, lower(TABLE_COMMENT) as title FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_SCHEMA = '%s'""" % self.dbdesc.get('dbname','unknown')
|
||||
return sqlcmd
|
||||
|
||||
def fieldsSQL(self,tablename=None):
|
||||
sqlcmd="""
|
||||
select
|
||||
lower(column_name) as name,
|
||||
data_type as type,
|
||||
case when character_maximum_length is null then NUMERIC_PRECISION
|
||||
else character_maximum_length end
|
||||
as length,
|
||||
NUMERIC_SCALE as dec1,
|
||||
lower(is_nullable) as nullable,
|
||||
column_comment as title,
|
||||
lower(table_name) as table_name
|
||||
from information_schema.columns where lower(TABLE_SCHEMA) = '%s' """ % self.dbdesc.get('dbname','unknown').lower()
|
||||
if tablename is not None:
|
||||
sqlcmd = sqlcmd + """and lower(table_name)='%s';""" % tablename.lower()
|
||||
return sqlcmd
|
||||
|
||||
def fkSQL(self,tablename=None):
|
||||
sqlcmd = """SELECT C.TABLE_SCHEMA 拥有者,
|
||||
C.REFERENCED_TABLE_NAME 父表名称 ,
|
||||
C.REFERENCED_COLUMN_NAME 父表字段 ,
|
||||
C.TABLE_NAME 子表名称,
|
||||
C.COLUMN_NAME 子表字段,
|
||||
C.CONSTRAINT_NAME 约束名,
|
||||
T.TABLE_COMMENT 表注释,
|
||||
R.UPDATE_RULE 约束更新规则,
|
||||
R.DELETE_RULE 约束删除规则
|
||||
FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE C
|
||||
JOIN INFORMATION_SCHEMA. TABLES T
|
||||
ON T.TABLE_NAME = C.TABLE_NAME
|
||||
JOIN INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS R
|
||||
ON R.TABLE_NAME = C.TABLE_NAME
|
||||
AND R.CONSTRAINT_NAME = C.CONSTRAINT_NAME
|
||||
AND R.REFERENCED_TABLE_NAME = C.REFERENCED_TABLE_NAME
|
||||
WHERE C.REFERENCED_TABLE_NAME IS NOT NULL ;
|
||||
and C.TABLE_SCHEMA = '%s'
|
||||
""" % self.dbdesc.get('dbname','unknown').lower()
|
||||
if tablename is not None:
|
||||
sqlcmd = sqlcmd + " and C.REFERENCED_TABLE_NAME = '%s'" % tablename.lower()
|
||||
return sqlcmd
|
||||
|
||||
def pkSQL(self,tablename=None):
|
||||
sqlcmd = """SELECT
|
||||
lower(c.table_name) as table_name,
|
||||
lower(c.COLUMN_NAME) as field_name
|
||||
FROM
|
||||
INFORMATION_SCHEMA.TABLE_CONSTRAINTS AS t,
|
||||
INFORMATION_SCHEMA.KEY_COLUMN_USAGE AS c
|
||||
WHERE
|
||||
t.CONSTRAINT_TYPE = 'PRIMARY KEY'
|
||||
AND t.TABLE_SCHEMA = '%s'
|
||||
AND t.TABLE_NAME = c.TABLE_NAME
|
||||
""" % self.dbdesc.get('dbname','unknown').lower()
|
||||
if tablename is not None:
|
||||
sqlcmd = sqlcmd + " AND c.TABLE_NAME = '%s'" % tablename.lower()
|
||||
return sqlcmd
|
||||
|
||||
def indexesSQL(self,tablename=None):
|
||||
sqlcmd = """SELECT DISTINCT
|
||||
lower(index_name) as index_name,
|
||||
lower(index_type) as index_type,
|
||||
lower(table_name) as table_name,
|
||||
lower(column_name) as column_name
|
||||
FROM
|
||||
information_schema.statistics
|
||||
WHERE
|
||||
table_schema = '%s'""" % self.dbdesc.get('dbname','unknown')
|
||||
if tablename is not None:
|
||||
sqlcmd = sqlcmd + """ AND table_name = '%s'""" % tablename.lower()
|
||||
return sqlcmd
|
131
build/lib/sqlor/oracleor.py
Normal file
131
build/lib/sqlor/oracleor.py
Normal file
@ -0,0 +1,131 @@
|
||||
from .sor import SQLor
|
||||
from .ddl_template_oracle import oracle_ddl_tmpl
|
||||
class Oracleor(SQLor):
|
||||
ddl_template = oracle_ddl_tmpl
|
||||
db2modelTypeMapping = {
|
||||
'char':'char',
|
||||
'nchar':'str',
|
||||
'varchar':'str',
|
||||
'varchar2':'str',
|
||||
'nvarchar2':'str',
|
||||
'number':'long',
|
||||
'integer':'long',
|
||||
'binary_float':'float',
|
||||
'binary_double':'float',
|
||||
'float':'float',
|
||||
'timestamp':'timestamp',
|
||||
'timestamp with time zone':'timestamp',
|
||||
'timestamp with local time zone':'timestamp',
|
||||
'interval year to moth':'date',
|
||||
'interval day to second':'timestamp',
|
||||
'clob':'text',
|
||||
'nclob':'text',
|
||||
'blob':'file',
|
||||
'bfile':'file',
|
||||
'date':'date',
|
||||
}
|
||||
model2dbTypemapping = {
|
||||
'date':'date',
|
||||
'time':'date',
|
||||
'timestamp':'date',
|
||||
'str':'varchar2',
|
||||
'char':'char',
|
||||
'short':'number',
|
||||
'long':'number',
|
||||
'float':'number',
|
||||
'text':'nclob',
|
||||
'file':'blob',
|
||||
}
|
||||
@classmethod
|
||||
def isMe(self,name):
|
||||
return name=='cx_Oracle'
|
||||
|
||||
def grammar(self):
|
||||
return {
|
||||
'select':select_stmt,
|
||||
}
|
||||
|
||||
def placeHolder(self,varname):
|
||||
if varname=='__mainsql__' :
|
||||
return ''
|
||||
return ':%s' % varname
|
||||
|
||||
def dataConvert(self,dataList):
|
||||
if type(dataList) == type({}):
|
||||
return dataList
|
||||
d = {}
|
||||
[ d.update({i['name']:i['value']}) for i in dataList ]
|
||||
return d
|
||||
|
||||
def pagingSQLmodel(self):
|
||||
return u"""select *
|
||||
from (
|
||||
select page_s.*,rownum row_id
|
||||
from (%s) page_s
|
||||
order by $[sort]$ $[order]$
|
||||
)
|
||||
where row_id >=$[from_line]$ and row_id < $[end_line]$"""
|
||||
|
||||
def tablesSQL(self):
|
||||
sqlcmd = """select
|
||||
lower(table_name) as name,
|
||||
lower(decode(comments,null,table_name,comments)) as title
|
||||
from USER_TAB_COMMENTS where table_type = 'TABLE'"""
|
||||
return sqlcmd
|
||||
|
||||
def fieldsSQL(self,tablename=None):
|
||||
sqlcmd="""select lower(utc.COLUMN_NAME) name
|
||||
,utc.DATA_TYPE type
|
||||
,utc.DATA_LENGTH length
|
||||
,utc.data_scale dec
|
||||
,case when utc.nullable = 'Y' then 'yes' else 'no' end nullable
|
||||
,lower(nvl(ucc.comments,utc.COLUMN_NAME)) title
|
||||
,lower(utc.table_name) as table_name
|
||||
from user_tab_cols utc left join USER_COL_COMMENTS ucc on utc.table_name = ucc.table_name and utc.COLUMN_NAME = ucc.COLUMN_NAME"""
|
||||
if tablename is not None:
|
||||
sqlcmd = sqlcmd + """ where lower(utc.table_name) = '%s'""" % tablename.lower()
|
||||
return sqlcmd
|
||||
|
||||
def fkSQL(self,tablename=None):
|
||||
tablename = tablename.lower()
|
||||
sqlcmd = """select
|
||||
distinct(ucc.column_name) as field,rela.table_name as fk_table,rela.column_name as fk_field
|
||||
from
|
||||
user_constraints uc,user_cons_columns ucc,
|
||||
(
|
||||
select t2.table_name,t2.column_name,t1.r_constraint_name
|
||||
from user_constraints t1,user_cons_columns t2
|
||||
where t1.r_constraint_name=t2.constraint_name
|
||||
) rela
|
||||
where
|
||||
uc.constraint_name=ucc.constraint_name
|
||||
and uc.r_constraint_name=rela.r_constraint_name"""
|
||||
if tablename is not None:
|
||||
sqlcmd = sqlcmd + """ and lower(uc.table_name)='%s'""" % tablename.lower()
|
||||
return sqlcmd
|
||||
|
||||
def pkSQL(self,tablename=None):
|
||||
sqlcmd = """
|
||||
select
|
||||
lower(col.table_name) table_name,
|
||||
lower(col.column_name) as field_name
|
||||
from
|
||||
user_constraints con,user_cons_columns col
|
||||
where
|
||||
con.constraint_name=col.constraint_name and con.constraint_type='P'"""
|
||||
if tablename is not None:
|
||||
sqlcmd = sqlcmd + """ and lower(col.table_name)='%s'""" % tablename.lower()
|
||||
return sqlcmd
|
||||
|
||||
def indexesSQL(self,tablename=None):
|
||||
sqlcmd = """select
|
||||
lower(a.index_name) index_name,
|
||||
lower(a.UNIQUENESS) index_type,
|
||||
lower(a.table_name) table_name,
|
||||
lower(b.column_name) column_name
|
||||
from user_indexes a, user_ind_columns b
|
||||
where a.index_name = b.index_name"""
|
||||
if tablename is not None:
|
||||
sqlcmd += """ and lower(a.table_name) = lower('%s')""" % tablename.lower()
|
||||
return sqlcmd
|
||||
|
199
build/lib/sqlor/postgresqlor.py
Normal file
199
build/lib/sqlor/postgresqlor.py
Normal file
@ -0,0 +1,199 @@
|
||||
from .sor import SQLor
|
||||
from .ddl_template_postgresql import postgresql_ddl_tmpl
|
||||
|
||||
class PostgreSQLor(SQLor):
|
||||
ddl_template = postgresql_ddl_tmpl
|
||||
db2modelTypeMapping = {
|
||||
'smallint':'short',
|
||||
'integer':'long',
|
||||
'bigint':'llong',
|
||||
'decimal':'float',
|
||||
'numeric':'float',
|
||||
'real':'float',
|
||||
'double':'float',
|
||||
'serial':'long',
|
||||
'bigserial':'llong',
|
||||
'char':'char',
|
||||
'character':'char',
|
||||
'varchar':'str',
|
||||
'character varying':'str',
|
||||
'text':'text',
|
||||
'timestamp':'timestamp',
|
||||
'date':'date',
|
||||
'time':'time',
|
||||
'boolean':'char',
|
||||
'bytea':'file'
|
||||
}
|
||||
model2dbTypemapping = {
|
||||
'date':'date',
|
||||
'time':'date',
|
||||
'timestamp':'timestamp',
|
||||
'str':'varchar',
|
||||
'char':'char',
|
||||
'short':'smallint',
|
||||
'long':'integer',
|
||||
'float':'numeric',
|
||||
'text':'text',
|
||||
'file':'bytea',
|
||||
}
|
||||
@classmethod
|
||||
def isMe(self,name):
|
||||
return name=='psycopg2'
|
||||
|
||||
def grammar(self):
|
||||
return {
|
||||
'select':select_stmt,
|
||||
}
|
||||
|
||||
def placeHolder(self,varname):
|
||||
if varname=='__mainsql__' :
|
||||
return ''
|
||||
return ':%s' % varname
|
||||
|
||||
def dataConvert(self,dataList):
|
||||
if type(dataList) == type({}):
|
||||
return dataList
|
||||
d = {}
|
||||
[ d.update({i['name']:i['value']}) for i in dataList ]
|
||||
return d
|
||||
|
||||
def pagingSQLmodel(self):
|
||||
return u"""select *
|
||||
from (
|
||||
select page_s.*,rownum row_id
|
||||
from (%s) page_s
|
||||
order by $[sort]$ $[order]$
|
||||
)
|
||||
where row_id >=$[from_line]$ and row_id < $[end_line]$"""
|
||||
|
||||
def tablesSQL(self):
|
||||
"""
|
||||
列出表名
|
||||
SELECT tablename FROM pg_tables;
|
||||
WHERE tablename NOT LIKE 'pg%'
|
||||
AND tablename NOT LIKE 'sql_%'
|
||||
ORDER BY tablename;
|
||||
"""
|
||||
sqlcmd = """select
|
||||
lower(table_name) as name,
|
||||
lower(decode(comments,null,table_name,comments)) as title
|
||||
from USER_TAB_COMMENTS where table_type = 'TABLE'"""
|
||||
return sqlcmd
|
||||
|
||||
def fieldsSQL(self,tablename=None):
|
||||
"""SELECT col_description(a.attrelid,a.attnum) as comment,pg_type.typname as typename,a.attname as name, a.attnotnull as notnull
|
||||
FROM pg_class as c,pg_attribute as a inner join pg_type on pg_type.oid = a.atttypid
|
||||
where c.relname = 'tablename' and a.attrelid = c.oid and a.attnum>0
|
||||
"""
|
||||
sqlcmd="""select lower(utc.COLUMN_NAME) name
|
||||
,utc.DATA_TYPE type
|
||||
,utc.DATA_LENGTH length
|
||||
,utc.data_scale dec
|
||||
,case when utc.nullable = 'Y' then 'yes' else 'no' end nullable
|
||||
,lower(nvl(ucc.comments,utc.COLUMN_NAME)) title
|
||||
,lower(utc.table_name) as table_name
|
||||
from user_tab_cols utc left join USER_COL_COMMENTS ucc on utc.table_name = ucc.table_name and utc.COLUMN_NAME = ucc.COLUMN_NAME"""
|
||||
if tablename is not None:
|
||||
sqlcmd = sqlcmd + """ where lower(utc.table_name) = '%s'""" % tablename.lower()
|
||||
return sqlcmd
|
||||
|
||||
def fkSQL(self,tablename=None):
|
||||
tablename = tablename.lower()
|
||||
sqlcmd = """select
|
||||
distinct(ucc.column_name) as field,rela.table_name as fk_table,rela.column_name as fk_field
|
||||
from
|
||||
user_constraints uc,user_cons_columns ucc,
|
||||
(
|
||||
select t2.table_name,t2.column_name,t1.r_constraint_name
|
||||
from user_constraints t1,user_cons_columns t2
|
||||
where t1.r_constraint_name=t2.constraint_name
|
||||
) rela
|
||||
where
|
||||
uc.constraint_name=ucc.constraint_name
|
||||
and uc.r_constraint_name=rela.r_constraint_name"""
|
||||
if tablename is not None:
|
||||
sqlcmd = sqlcmd + """ and lower(uc.table_name)='%s'""" % tablename.lower()
|
||||
return sqlcmd
|
||||
|
||||
def pkSQL(self,tablename=None):
|
||||
"""
|
||||
select pg_attribute.attname as colname,pg_type.typname as typename,pg_constraint.conname as pk_name from
|
||||
pg_constraint inner join pg_class
|
||||
on pg_constraint.conrelid = pg_class.oid
|
||||
inner join pg_attribute on pg_attribute.attrelid = pg_class.oid
|
||||
and pg_attribute.attnum = pg_constraint.conkey[1]
|
||||
inner join pg_type on pg_type.oid = pg_attribute.atttypid
|
||||
where pg_class.relname = 'tablename'
|
||||
and pg_constraint.contype='p'
|
||||
"""
|
||||
sqlcmd = """
|
||||
select
|
||||
lower(col.table_name) table_name,
|
||||
lower(col.column_name) as field_name
|
||||
from
|
||||
user_constraints con,user_cons_columns col
|
||||
where
|
||||
con.constraint_name=col.constraint_name and con.constraint_type='P'"""
|
||||
if tablename is not None:
|
||||
sqlcmd = sqlcmd + """ and lower(col.table_name)='%s'""" % tablename.lower()
|
||||
return sqlcmd
|
||||
|
||||
def indexesSQL(self,tablename=None):
|
||||
"""
|
||||
SELECT
|
||||
|
||||
A.SCHEMANAME,
|
||||
|
||||
A.TABLENAME,
|
||||
|
||||
A.INDEXNAME,
|
||||
|
||||
A.TABLESPACE,
|
||||
|
||||
A.INDEXDEF,
|
||||
|
||||
B.AMNAME,
|
||||
|
||||
C.INDEXRELID,
|
||||
|
||||
C.INDNATTS,
|
||||
|
||||
C.INDISUNIQUE,
|
||||
|
||||
C.INDISPRIMARY,
|
||||
|
||||
C.INDISCLUSTERED,
|
||||
|
||||
D.DESCRIPTION
|
||||
|
||||
FROM
|
||||
|
||||
PG_AM B
|
||||
|
||||
LEFT JOIN PG_CLASS F ON B.OID = F.RELAM
|
||||
|
||||
LEFT JOIN PG_STAT_ALL_INDEXES E ON F.OID = E.INDEXRELID
|
||||
|
||||
LEFT JOIN PG_INDEX C ON E.INDEXRELID = C.INDEXRELID
|
||||
|
||||
LEFT OUTER JOIN PG_DESCRIPTION D ON C.INDEXRELID = D.OBJOID,
|
||||
|
||||
PG_INDEXES A
|
||||
|
||||
WHERE
|
||||
|
||||
A.SCHEMANAME = E.SCHEMANAME AND A.TABLENAME = E.RELNAME AND A.INDEXNAME = E.INDEXRELNAME
|
||||
|
||||
AND E.SCHEMANAME = 'public' AND E.RELNAME = 'table_name'
|
||||
"""
|
||||
sqlcmd = """select
|
||||
lower(a.index_name) index_name,
|
||||
lower(a.UNIQUENESS) index_type,
|
||||
lower(a.table_name) table_name,
|
||||
lower(b.column_name) column_name
|
||||
from user_indexes a, user_ind_columns b
|
||||
where a.index_name = b.index_name"""
|
||||
if tablename is not None:
|
||||
sqlcmd += """ and lower(a.table_name) = lower('%s')""" % tablename.lower()
|
||||
return sqlcmd
|
||||
|
454
build/lib/sqlor/sor.py
Normal file
454
build/lib/sqlor/sor.py
Normal file
@ -0,0 +1,454 @@
|
||||
import os
|
||||
os.environ['NLS_LANG'] = 'SIMPLIFIED CHINESE_CHINA.UTF8'
|
||||
import sys
|
||||
import codecs
|
||||
import re
|
||||
import json
|
||||
from appPublic.myImport import myImport
|
||||
from appPublic.dictObject import DictObject,dictObjectFactory
|
||||
from appPublic.unicoding import uDict
|
||||
from patterncoding.myTemplateEngine import MyTemplateEngine
|
||||
|
||||
|
||||
from appPublic.argsConvert import ArgsConvert,ConditionConvert
|
||||
|
||||
class SQLorException(Exception,object):
|
||||
def __int__(self,**kvs):
|
||||
supper(SQLException,self).__init__(self,**kvs)
|
||||
self.dic = {
|
||||
'response':'error',
|
||||
'errtype':'SQLor',
|
||||
'errmsg':supper(SQLException,self).message,
|
||||
}
|
||||
|
||||
def __str__(self):
|
||||
return 'errtype:%s,errmsg=%s' % (self.dic['errtype'],self.dic['errmsg'])
|
||||
|
||||
def setValues(params,ns):
|
||||
r = ns.get(params,os.getenv(params))
|
||||
return r
|
||||
|
||||
def findNamedParameters(sql):
|
||||
"""
|
||||
return a list of named parameters
|
||||
"""
|
||||
re1 = '\$\{[_a-zA-Z_][a-zA-Z_0-9]*\}'
|
||||
params1 = re.findall(re1,sql)
|
||||
return params1
|
||||
|
||||
|
||||
def uniParams(params1):
|
||||
ret = []
|
||||
for i in params1:
|
||||
if i not in ret:
|
||||
ret.append(i)
|
||||
return ret
|
||||
|
||||
def readsql(fn):
|
||||
f = codecs.open(fn,'r','utf-8')
|
||||
b = f.read()
|
||||
f.close()
|
||||
return b
|
||||
|
||||
class SQLor(object):
|
||||
def __init__(self,dbdesc=None,sqltp = '$[',sqlts = ']$',sqlvp = '${',sqlvs = '}$'):
|
||||
self.conn = None
|
||||
self.cur = None
|
||||
self.async_mode = False
|
||||
self.sqltp = sqltp
|
||||
self.sqlts = sqlts
|
||||
self.sqlvp = sqlvp
|
||||
self.sqlvs = sqlvs
|
||||
self.dbdesc = dbdesc
|
||||
self.writer = None
|
||||
self.convfuncs = {}
|
||||
self.cc = ConditionConvert()
|
||||
|
||||
def setCursor(self,async_mode,conn,cur):
|
||||
self.async_mode = async_mode
|
||||
self.conn = conn
|
||||
self.cur = cur
|
||||
|
||||
def getConn(self):
|
||||
return self.conn
|
||||
|
||||
def setConvertFunction(self,typ,func):
|
||||
self.convfuncs.update({typ:func})
|
||||
|
||||
def convert(self,typ,value):
|
||||
if self.convfuncs.get(typ,None) is not None:
|
||||
return self.convfuncs[typ](value)
|
||||
return value
|
||||
@classmethod
|
||||
def isMe(self,name):
|
||||
return name=='sqlor'
|
||||
|
||||
def pagingSQLmodel(self):
|
||||
return u""
|
||||
|
||||
def placeHolder(self,varname):
|
||||
if varname=='__mainsql__' :
|
||||
return ''
|
||||
return '?'
|
||||
|
||||
def dataConvert(self,dataList):
|
||||
return [ i.get('value',None) for i in dataList]
|
||||
|
||||
def dataList(self,k,v):
|
||||
a = []
|
||||
a.append({'name':k,'value':v})
|
||||
return a
|
||||
|
||||
def cursor(self):
|
||||
return self.cur
|
||||
|
||||
def recordCnt(self,sql):
|
||||
ret = u"""select count(*) rcnt from (%s) rowcount_table""" % sql
|
||||
return ret
|
||||
|
||||
def pagingSQL(self,sql,paging,NS):
|
||||
"""
|
||||
default it not support paging
|
||||
"""
|
||||
page = int(NS.get(paging['pagename'],1))
|
||||
rows = int(NS.get(paging['rowsname'],10))
|
||||
sort = NS.get(paging.get('sortname','sort'),None)
|
||||
order = NS.get(paging.get('ordername','asc'),'asc')
|
||||
if not sort:
|
||||
return sql
|
||||
if page < 1:
|
||||
page = 1
|
||||
from_line = (page - 1) * rows + 1
|
||||
end_line = page * rows + 1
|
||||
psql = self.pagingSQLmodel()
|
||||
ns={
|
||||
'from_line':from_line,
|
||||
'end_line':end_line,
|
||||
'rows':rows,
|
||||
'sort':sort,
|
||||
'order':order,
|
||||
}
|
||||
ac = ArgsConvert('$[',']$')
|
||||
psql = ac.convert(psql,ns)
|
||||
retSQL=psql % sql
|
||||
return retSQL
|
||||
|
||||
def filterSQL(self,sql,filters,NS):
|
||||
ac = ArgsConvert('$[',']$')
|
||||
fbs = []
|
||||
for f in filters:
|
||||
vars = ac.findAllVariables(f)
|
||||
if len(vars) > 0:
|
||||
ignoreIt = False
|
||||
for v in vars:
|
||||
if not NS.get(v,False):
|
||||
ignoreIt = True
|
||||
if not ignoreIt:
|
||||
f = ac.convert(f,NS)
|
||||
else:
|
||||
f = '1=1'
|
||||
fbs.append(f)
|
||||
fb = ' '.join(fbs)
|
||||
retsql = u"""select * from (%s) filter_table where %s""" % (sql,fb)
|
||||
return retsql
|
||||
|
||||
async def runVarSQL(self,cursor,sql,NS):
|
||||
"""
|
||||
using a opened cursor to run a SQL statment with variable, the variable is setup in NS namespace
|
||||
return a cursor with data
|
||||
"""
|
||||
markedSQL,datas = self.maskingSQL(sql,NS)
|
||||
datas = self.dataConvert(datas)
|
||||
try:
|
||||
markedSQL = markedSQL.encode('utf8')
|
||||
if self.async_mode:
|
||||
await cursor.execute(markedSQL,datas)
|
||||
else:
|
||||
cursor.execute(markedSQL,datas)
|
||||
|
||||
except Exception as e:
|
||||
print( "markedSQL=",markedSQL,datas,e)
|
||||
raise e
|
||||
return
|
||||
|
||||
def maskingSQL(self,org_sql,NS):
|
||||
"""
|
||||
replace all ${X}$ format variable exception named by '__mainsql__' in sql with '%s',
|
||||
and return the marked sql sentent and variable list
|
||||
sql is a sql statment with variable formated in '${X}$
|
||||
the '__mainsql__' variable use to identify the main sql will outout data.
|
||||
NS is the name space the variable looking for, it is a variable dictionary
|
||||
return (MarkedSQL,list_of_variable)
|
||||
"""
|
||||
sqltextAC = ArgsConvert(self.sqltp,self.sqlts)
|
||||
sqlargsAC = ArgsConvert(self.sqlvp,self.sqlvs)
|
||||
sql1 = sqltextAC.convert(org_sql,NS)
|
||||
cc = ConditionConvert()
|
||||
sql1 = cc.convert(sql1,NS)
|
||||
vars = sqlargsAC.findAllVariables(sql1)
|
||||
phnamespace = {}
|
||||
[phnamespace.update({v:self.placeHolder(v)}) for v in vars]
|
||||
m_sql = sqlargsAC.convert(sql1,phnamespace)
|
||||
newdata = []
|
||||
for v in vars:
|
||||
if v != '__mainsql__':
|
||||
value = sqlargsAC.getVarValue(v,NS,None)
|
||||
newdata += self.dataList(v,value)
|
||||
|
||||
return (m_sql,newdata)
|
||||
|
||||
async def execute(self,sql,value,callback,**kwargs):
|
||||
cur = self.cursor()
|
||||
await self.runVarSQL(cur,sql,value)
|
||||
if callback is not None:
|
||||
fields = [ i[0].lower() for i in cur.description ]
|
||||
if self.async_mode:
|
||||
rec = await cur.fetchone()
|
||||
else:
|
||||
rec = cur.fetchone()
|
||||
|
||||
while rec is not None:
|
||||
dic = {}
|
||||
for i in range(len(fields)):
|
||||
dic.update({fields[i]:rec[i]})
|
||||
#dic = uDict(dic,coding='utf8')
|
||||
callback(dic,**kwargs)
|
||||
if self.async_mode:
|
||||
rec = await cur.fetchone()
|
||||
else:
|
||||
rec = cur.fetchone()
|
||||
|
||||
|
||||
async def executemany(self,sql,values):
|
||||
cur = self.cursor()
|
||||
markedSQL,datas = self.maskingSQL(sql,{})
|
||||
datas = [ self.dataConvert(d) for d in values ]
|
||||
if async_mode:
|
||||
await cur.executemany(markedSQL,datas)
|
||||
else:
|
||||
cur.executemany(markedSQL,datas)
|
||||
|
||||
def pivotSQL(self,tablename,rowFields,columnFields,valueFields):
|
||||
def maxValue(columnFields,valueFields,cfvalues):
|
||||
sql = ''
|
||||
for f in valueFields:
|
||||
i = 0
|
||||
for field in columnFields:
|
||||
for v in cfvalues[field]:
|
||||
sql += """
|
||||
,sum(%s_%d) %s_%d""" % (f,i,f,i)
|
||||
i+=1
|
||||
return sql
|
||||
def casewhen(columnFields,valueFields,cfvalues):
|
||||
sql = ''
|
||||
for f in valueFields:
|
||||
i = 0
|
||||
for field in columnFields:
|
||||
for v in cfvalues[field]:
|
||||
if v is None:
|
||||
sql += """,case when %s is null then %s
|
||||
else 0 end as %s_%d -- %s
|
||||
""" % (field,f,f,i,v)
|
||||
else:
|
||||
sql += """,case when trim(%s) = trim('%s') then %s
|
||||
else 0 end as %s_%d -- %s
|
||||
""" % (field,v,f,f,i,v)
|
||||
|
||||
i += 1
|
||||
return sql
|
||||
|
||||
cfvalues={}
|
||||
for field in columnFields:
|
||||
sqlstring = 'select distinct %s from %s' % (field,tablename)
|
||||
v = []
|
||||
self.execute(sqlstring,{},lambda x: v.append(x))
|
||||
cfvalues[field] = [ i[field] for i in v ]
|
||||
|
||||
sql ="""
|
||||
select """ + ','.join(rowFields)
|
||||
sql += maxValue(columnFields,valueFields,cfvalues)
|
||||
sql += """ from
|
||||
(select """ + ','.join(rowFields)
|
||||
sql += casewhen(columnFields,valueFields,cfvalues)
|
||||
sql += """
|
||||
from %s)
|
||||
group by %s""" % (tablename,','.join(rowFields))
|
||||
return sql
|
||||
|
||||
async def pivot(self,desc,tablename,rowFields,columnFields,valueFields):
|
||||
sql = self.pivotSQL(tablename,rowFields,columnFields,valueFields)
|
||||
desc['sql_string'] = sql
|
||||
ret = []
|
||||
return await self.execute(sql,{},lambda x:ret.append(x))
|
||||
|
||||
def isSelectSql(self,sql):
|
||||
i = 0
|
||||
while sql[i] in "\r\n \t":
|
||||
i = i + 1
|
||||
return sql.lower().startswith('select ')
|
||||
|
||||
def getSQLfromDesc(self,desc):
|
||||
sql = ''
|
||||
if 'sql_file' in desc.keys():
|
||||
sql = readsql(desc['sql_file'])
|
||||
else:
|
||||
sql = desc['sql_string']
|
||||
return sql
|
||||
|
||||
async def record_count(self,desc,NS):
|
||||
cnt_desc = {}
|
||||
cnt_desc.update(desc)
|
||||
sql = self.getSQLfromDesc(desc)
|
||||
if desc.get('sql_file',False):
|
||||
del cnt_desc['sql_file']
|
||||
cnt_desc['sql_string'] = self.recordCnt(sql)
|
||||
class Cnt:
|
||||
def __init__(self):
|
||||
self.recs = []
|
||||
def handler(self,rec):
|
||||
self.recs.append(rec)
|
||||
|
||||
c = Cnt()
|
||||
await self.sqlIterator(cnt_desc,NS,c.handler)
|
||||
print(c.recs[0])
|
||||
t = c.recs[0]['rcnt']
|
||||
return t
|
||||
|
||||
async def pagingdata(self,desc,NS):
|
||||
paging_desc = {}
|
||||
paging_desc.update(desc)
|
||||
paging_desc.update(
|
||||
{
|
||||
"paging":{
|
||||
"rowsname":"rows",
|
||||
"pagename":"page",
|
||||
"sortname":"sort",
|
||||
"ordername":"order"
|
||||
}
|
||||
})
|
||||
if desc.get('sortfield',False):
|
||||
NS['sort'] = desc.get('sortfield')
|
||||
sql = self.getSQLfromDesc(desc)
|
||||
if desc.get('sql_file',False):
|
||||
del cnt_desc['sql_file']
|
||||
paging_desc['sql_string'] = self.pagingSQL(sql,
|
||||
paging_desc.get('paging'),NS)
|
||||
|
||||
class Cnt:
|
||||
def __init__(self):
|
||||
self.recs = []
|
||||
def handler(self,rec):
|
||||
self.recs.append(rec)
|
||||
|
||||
c = Cnt()
|
||||
await self.sqlIterator(paging_desc,NS,c.handler)
|
||||
return c.recs
|
||||
|
||||
async def runSQL(self,desc,NS,callback,**kw):
|
||||
class RecordHandler:
|
||||
def __init__(self,ns,name):
|
||||
self.ns = ns
|
||||
self.name = name
|
||||
self.ns[name] = []
|
||||
|
||||
def handler(self,rec):
|
||||
obj = DictObject(rec)
|
||||
self.ns[self.name].append(obj)
|
||||
|
||||
cur = self.cursor()
|
||||
sql = self.getSQLfromDesc(desc)
|
||||
if self.isSelectSql(sql):
|
||||
if callback is None:
|
||||
klass = desc.get('dataname','dummy')
|
||||
if klass is not None:
|
||||
rh = RecordHandler(NS,klass)
|
||||
callback = rh.handler
|
||||
else:
|
||||
callback = None
|
||||
await self.execute(sql,NS,callback)
|
||||
|
||||
async def sqlExecute(self,desc,NS):
|
||||
await self.execute(sql,NS,None)
|
||||
|
||||
async def tables(self):
|
||||
sqlstring = self.tablesSQL()
|
||||
ret = []
|
||||
await self.execute(sqlstring,{},lambda x:ret.append(x))
|
||||
return ret
|
||||
|
||||
def indexesSQL(self,tablename):
|
||||
"""
|
||||
record of {
|
||||
index_name,
|
||||
index_type,
|
||||
table_name,
|
||||
column_name
|
||||
}
|
||||
"""
|
||||
return None
|
||||
|
||||
async def indexes(self,tablename=None):
|
||||
sqlstring = self.indexesSQL(tablename.lower())
|
||||
if sqlstring is None:
|
||||
return []
|
||||
recs = []
|
||||
await self.execute(sqlstring,{},lambda x:recs.append(x))
|
||||
return recs
|
||||
|
||||
async def fields(self,tablename=None):
|
||||
sqlstring = self.fieldsSQL(tablename)
|
||||
recs = []
|
||||
await self.execute(sqlstring,{},lambda x:recs.append(x))
|
||||
ret = []
|
||||
for r in recs:
|
||||
r.update({'type':self.db2modelTypeMapping.get(r['type'].lower(),'unknown')})
|
||||
r.update({'name':r['name'].lower()})
|
||||
ret.append(r)
|
||||
return ret
|
||||
|
||||
async def primary(self,tablename):
|
||||
sqlstring = self.pkSQL(tablename)
|
||||
recs = []
|
||||
await self.execute(sqlstring,{},lambda x:recs.append(x))
|
||||
return recs
|
||||
|
||||
async def fkeys(self,tablename):
|
||||
sqlstring = self.fkSQL(tablename)
|
||||
recs = []
|
||||
await self.execute(sqlstring,{},lambda x:recs.append(x))
|
||||
return recs
|
||||
|
||||
async def createTable(self,tabledesc):
|
||||
te = MyTemplateEngine([],'utf8','utf8')
|
||||
desc = {
|
||||
"sql_string":te.renders(self.ddl_template,tabledesc)
|
||||
}
|
||||
return await self.sqlExecute(desc,{})
|
||||
|
||||
async def getTableDesc(self,tablename):
|
||||
desc = {}
|
||||
summary = [ i for i in await self.tables() if tablename.lower() == i.name ]
|
||||
primary = [i.field_name for i in await self.primary(tablename) ]
|
||||
summary['primary'] = primary
|
||||
desc['summary'] = summary
|
||||
desc['fields'] = await self.fields(tablename=tablename)
|
||||
desc['validation'] = []
|
||||
idx = {}
|
||||
async for idxrec in self.indexes(tablename=tablename):
|
||||
if idxrec.index_name != idx.get('name',None):
|
||||
if idx != {}:
|
||||
desc['validation'].append(idx)
|
||||
idx = {
|
||||
'fields':[]
|
||||
}
|
||||
else:
|
||||
idx['fields'] = []
|
||||
idx['name'] = idxrec.index_name
|
||||
idx['oper'] = 'idx'
|
||||
idx['fields'].append(idxrec.field_name)
|
||||
if idx != {}:
|
||||
desc['validation'].append(idx)
|
||||
return desc
|
||||
|
||||
|
||||
|
100
build/lib/sqlor/sqlite3or.py
Normal file
100
build/lib/sqlor/sqlite3or.py
Normal file
@ -0,0 +1,100 @@
|
||||
import re
|
||||
from .sor import SQLor
|
||||
|
||||
class SQLite3or(SQLor):
|
||||
db2modelTypeMapping = {
|
||||
'char':'char',
|
||||
'nchar':'str',
|
||||
'text':'text',
|
||||
'ntext':'text',
|
||||
'varchar':'str',
|
||||
'nvarchar':'str',
|
||||
'blob':'file',
|
||||
'integer':'long',
|
||||
'double':'float',
|
||||
'date':'date',
|
||||
'time':'time',
|
||||
'timestamp':'timestamp',
|
||||
'number':'long',
|
||||
}
|
||||
model2dbTypemapping = {
|
||||
'date':'date',
|
||||
'time':'time',
|
||||
'timestamp':'timestamp',
|
||||
'str':'nvarchar',
|
||||
'char':'char',
|
||||
'short':'int',
|
||||
'long':'integer',
|
||||
'float':'double',
|
||||
'text':'ntext',
|
||||
'file':'blob',
|
||||
}
|
||||
@classmethod
|
||||
def isMe(self,name):
|
||||
return name=='sqlite3'
|
||||
|
||||
def placeHolder(self,varname):
|
||||
if varname=='__mainsql__' :
|
||||
return ''
|
||||
return '?'
|
||||
|
||||
def dataConvert(self,dataList):
|
||||
if type(dataList) == type({}):
|
||||
d = [ i for i in dataList.values()]
|
||||
else:
|
||||
d = [ i['value'] for i in dataList]
|
||||
return tuple(d)
|
||||
|
||||
def pagingSQLmodel(self):
|
||||
sql = u"""select * from (%s) order by $[sort]$ $[order]$ limit $[from_line]$,$[end_line]$"""
|
||||
return sql
|
||||
|
||||
def tablesSQL(self):
|
||||
sqlcmd = u"""select name, tbl_name as title from sqlite_master where upper(type) = 'TABLE'"""
|
||||
return sqlcmd
|
||||
|
||||
def fieldsSQL(self,tablename):
|
||||
sqlcmd="""PRAGMA table_info('%s')""" % tablename.lower()
|
||||
return sqlcmd
|
||||
|
||||
def fields(self,tablename):
|
||||
m = u'(\w+)\(((\d+)(,(\d+)){0,1})\){0,1}'
|
||||
k = re.compile(m)
|
||||
def typesplit(typ):
|
||||
d = k.search(typ)
|
||||
if d is None:
|
||||
return typ,0,0
|
||||
|
||||
return d.group(1),int(d.group(3) if d.group(3) is not None else 0 ),int(d.group(5) if d.group(5) is not None else 0)
|
||||
|
||||
sqlstring = self.fieldsSQL(tablename)
|
||||
recs = []
|
||||
self.execute(sqlstring,callback=lambda x:recs.append(x))
|
||||
for r in recs:
|
||||
t,l,d = typesplit(r['type'])
|
||||
r['type'] = t
|
||||
r['length'] = int(l)
|
||||
r['dec'] = int(d)
|
||||
r['title'] = r['name']
|
||||
ret = []
|
||||
for r in recs:
|
||||
r.update({'type':self.db2modelTypeMapping[r['type'].lower()]})
|
||||
r.update({'name':r['name'].lower()})
|
||||
ret.append(r)
|
||||
return ret
|
||||
|
||||
def fkSQL(self,tablename):
|
||||
sqlcmd = ""
|
||||
return sqlcmd
|
||||
|
||||
def fkeys(self,tablename):
|
||||
return []
|
||||
|
||||
def primary(self,tablename):
|
||||
recs = self.fields(tablename)
|
||||
ret = [ {'field':r['name']} for r in recs if r['pk'] == 1 ]
|
||||
return ret
|
||||
|
||||
def pkSQL(self,tablename):
|
||||
sqlcmd = ""
|
||||
return sqlcmd
|
393
build/lib/sqlor/sqlor.py
Normal file
393
build/lib/sqlor/sqlor.py
Normal file
@ -0,0 +1,393 @@
|
||||
import os
|
||||
os.environ['NLS_LANG'] = 'SIMPLIFIED CHINESE_CHINA.UTF8'
|
||||
import sys
|
||||
import codecs
|
||||
import re
|
||||
import json
|
||||
from appPublic.myImport import myImport
|
||||
from appPublic.dictObject import DictObject,dictObjectFactory
|
||||
from appPublic.unicoding import uDict
|
||||
from patterncoding.myTemplateEngine import MyTemplateEngine
|
||||
|
||||
|
||||
from appPublic.argsConvert import ArgsConvert,ConditionConvert
|
||||
|
||||
class SQLorException(Exception,object):
|
||||
def __int__(self,**kvs):
|
||||
supper(SQLException,self).__init__(self,**kvs)
|
||||
self.dic = {
|
||||
'response':'error',
|
||||
'errtype':'SQLor',
|
||||
'errmsg':supper(SQLException,self).message,
|
||||
}
|
||||
|
||||
def __str__(self):
|
||||
return 'errtype:%s,errmsg=%s' % (self.dic['errtype'],self.dic['errmsg'])
|
||||
|
||||
def setValues(params,ns):
|
||||
r = ns.get(params,os.getenv(params))
|
||||
return r
|
||||
|
||||
def findNamedParameters(sql):
|
||||
"""
|
||||
return a list of named parameters
|
||||
"""
|
||||
re1 = '\$\{[_a-zA-Z_][a-zA-Z_0-9]*\}'
|
||||
params1 = re.findall(re1,sql)
|
||||
return params1
|
||||
|
||||
|
||||
def uniParams(params1):
|
||||
ret = []
|
||||
for i in params1:
|
||||
if i not in ret:
|
||||
ret.append(i)
|
||||
return ret
|
||||
|
||||
def readsql(fn):
|
||||
f = codecs.open(fn,'r','utf-8')
|
||||
b = f.read()
|
||||
f.close()
|
||||
return b
|
||||
|
||||
class SQLor(object):
|
||||
def __init__(self,dbdesc=None,sqltp = '$[',sqlts = ']$',sqlvp = '${',sqlvs = '}$'):
|
||||
self.conn = None
|
||||
self.cur = None
|
||||
self.sqltp = sqltp
|
||||
self.sqlts = sqlts
|
||||
self.sqlvp = sqlvp
|
||||
self.sqlvs = sqlvs
|
||||
self.dbdesc = dbdesc
|
||||
self.writer = None
|
||||
self.convfuncs = {}
|
||||
self.cc = ConditionConvert()
|
||||
|
||||
def getConn(self):
|
||||
return self.conn
|
||||
|
||||
def setConvertFunction(self,typ,func):
|
||||
self.convfuncs.update({typ:func})
|
||||
|
||||
def convert(self,typ,value):
|
||||
if self.convfuncs.get(typ,None) is not None:
|
||||
return self.convfuncs[typ](value)
|
||||
return value
|
||||
@classmethod
|
||||
def isMe(self,name):
|
||||
return name=='sqlor'
|
||||
|
||||
def pagingSQLmodel(self):
|
||||
return u""
|
||||
|
||||
def placeHolder(self,varname):
|
||||
if varname=='__mainsql__' :
|
||||
return ''
|
||||
return '?'
|
||||
|
||||
def dataConvert(self,dataList):
|
||||
return [ i.get('value',None) for i in dataList]
|
||||
|
||||
def dataList(self,k,v):
|
||||
a = []
|
||||
a.append({'name':k,'value':v})
|
||||
return a
|
||||
|
||||
def cursor(self):
|
||||
return self.cur
|
||||
|
||||
def recordCnt(self,sql):
|
||||
ret = u"""select count(*) rcnt from (%s) rowcount_table""" % sql
|
||||
return ret
|
||||
|
||||
def pagingSQL(self,sql,paging,NS):
|
||||
"""
|
||||
default it not support paging
|
||||
"""
|
||||
page = int(NS.get(paging['pagename'],1))
|
||||
rows = int(NS.get(paging['rowsname'],10))
|
||||
sort = NS.get(paging.get('sortname','sort'),None)
|
||||
order = NS.get(paging.get('ordername','asc'),'asc')
|
||||
if not sort:
|
||||
return sql
|
||||
if page < 1:
|
||||
page = 1
|
||||
from_line = (page - 1) * rows + 1
|
||||
end_line = page * rows + 1
|
||||
psql = self.pagingSQLmodel()
|
||||
ns={
|
||||
'from_line':from_line,
|
||||
'end_line':end_line,
|
||||
'rows':rows,
|
||||
'sort':sort,
|
||||
'order':order,
|
||||
}
|
||||
ac = ArgsConvert('$[',']$')
|
||||
psql = ac.convert(psql,ns)
|
||||
retSQL=psql % sql
|
||||
return retSQL
|
||||
|
||||
def filterSQL(self,sql,filters,NS):
|
||||
ac = ArgsConvert('$[',']$')
|
||||
fbs = []
|
||||
for f in filters:
|
||||
vars = ac.findAllVariables(f)
|
||||
if len(vars) > 0:
|
||||
ignoreIt = False
|
||||
for v in vars:
|
||||
if not NS.get(v,False):
|
||||
ignoreIt = True
|
||||
if not ignoreIt:
|
||||
f = ac.convert(f,NS)
|
||||
else:
|
||||
f = '1=1'
|
||||
fbs.append(f)
|
||||
fb = ' '.join(fbs)
|
||||
retsql = u"""select * from (%s) filter_table where %s""" % (sql,fb)
|
||||
return retsql
|
||||
|
||||
async def runVarSQL(self,cursor,sql,NS):
|
||||
"""
|
||||
using a opened cursor to run a SQL statment with variable, the variable is setup in NS namespace
|
||||
return a cursor with data
|
||||
"""
|
||||
markedSQL,datas = self.maskingSQL(sql,NS)
|
||||
datas = self.dataConvert(datas)
|
||||
try:
|
||||
markedSQL = markedSQL.encode('utf8')
|
||||
await cursor.execute(markedSQL,datas)
|
||||
except Exception as e:
|
||||
print( "markedSQL=",markedSQL,datas,e)
|
||||
raise e
|
||||
return
|
||||
|
||||
def maskingSQL(self,org_sql,NS):
|
||||
"""
|
||||
replace all ${X}$ format variable exception named by '__mainsql__' in sql with '%s',
|
||||
and return the marked sql sentent and variable list
|
||||
sql is a sql statment with variable formated in '${X}$
|
||||
the '__mainsql__' variable use to identify the main sql will outout data.
|
||||
NS is the name space the variable looking for, it is a variable dictionary
|
||||
return (MarkedSQL,list_of_variable)
|
||||
"""
|
||||
sqltextAC = ArgsConvert(self.sqltp,self.sqlts)
|
||||
sqlargsAC = ArgsConvert(self.sqlvp,self.sqlvs)
|
||||
sql1 = sqltextAC.convert(org_sql,NS)
|
||||
cc = ConditionConvert()
|
||||
sql1 = cc.convert(sql1,NS)
|
||||
vars = sqlargsAC.findAllVariables(sql1)
|
||||
phnamespace = {}
|
||||
[phnamespace.update({v:self.placeHolder(v)}) for v in vars]
|
||||
m_sql = sqlargsAC.convert(sql1,phnamespace)
|
||||
newdata = []
|
||||
for v in vars:
|
||||
if v != '__mainsql__':
|
||||
value = sqlargsAC.getVarValue(v,NS,None)
|
||||
newdata += self.dataList(v,value)
|
||||
|
||||
return (m_sql,newdata)
|
||||
|
||||
async def execute(self,sql,value,callback,**kwargs):
|
||||
cur = self.cursor()
|
||||
await self.runVarSQL(cur,sql,value)
|
||||
if callback is not None:
|
||||
fields = [ i[0].lower() for i in cur.description ]
|
||||
rec = await cur.fetchone()
|
||||
while rec is not None:
|
||||
dic = {}
|
||||
for i in range(len(fields)):
|
||||
dic.update({fields[i]:rec[i]})
|
||||
dic = uDict(dic,coding='utf8')
|
||||
callback(dic,**kwargs)
|
||||
rec = await cur.fetchone()
|
||||
|
||||
self.close_cursor()
|
||||
|
||||
async def isOK(self):
|
||||
try:
|
||||
await self.execute('select 1 as cnt',{});
|
||||
return True
|
||||
except:
|
||||
return False
|
||||
|
||||
async def executemany(self,sql,values):
|
||||
cur = self.cursor()
|
||||
markedSQL,datas = self.maskingSQL(sql,{})
|
||||
datas = [ self.dataConvert(d) for d in values ]
|
||||
await cur.executemany(markedSQL,datas)
|
||||
self.close_cursor()
|
||||
|
||||
def pivotSQL(self,tablename,rowFields,columnFields,valueFields):
|
||||
def maxValue(columnFields,valueFields,cfvalues):
|
||||
sql = ''
|
||||
for f in valueFields:
|
||||
i = 0
|
||||
for field in columnFields:
|
||||
for v in cfvalues[field]:
|
||||
sql += """
|
||||
,sum(%s_%d) %s_%d""" % (f,i,f,i)
|
||||
i+=1
|
||||
return sql
|
||||
def casewhen(columnFields,valueFields,cfvalues):
|
||||
sql = ''
|
||||
for f in valueFields:
|
||||
i = 0
|
||||
for field in columnFields:
|
||||
for v in cfvalues[field]:
|
||||
if v is None:
|
||||
sql += """,case when %s is null then %s
|
||||
else 0 end as %s_%d -- %s
|
||||
""" % (field,f,f,i,v)
|
||||
else:
|
||||
sql += """,case when trim(%s) = trim('%s') then %s
|
||||
else 0 end as %s_%d -- %s
|
||||
""" % (field,v,f,f,i,v)
|
||||
|
||||
i += 1
|
||||
return sql
|
||||
|
||||
cfvalues={}
|
||||
for field in columnFields:
|
||||
sqlstring = 'select distinct %s from %s' % (field,tablename)
|
||||
v = []
|
||||
self.execute(sqlstring,{},lambda x: v.append(x))
|
||||
cfvalues[field] = [ i[field] for i in v ]
|
||||
|
||||
sql ="""
|
||||
select """ + ','.join(rowFields)
|
||||
sql += maxValue(columnFields,valueFields,cfvalues)
|
||||
sql += """ from
|
||||
(select """ + ','.join(rowFields)
|
||||
sql += casewhen(columnFields,valueFields,cfvalues)
|
||||
sql += """
|
||||
from %s)
|
||||
group by %s""" % (tablename,','.join(rowFields))
|
||||
return sql
|
||||
|
||||
async def pivot(self,desc,tablename,rowFields,columnFields,valueFields):
|
||||
sql = self.pivotSQL(tablename,rowFields,columnFields,valueFields)
|
||||
desc['sql_string'] = sql
|
||||
ret = []
|
||||
return await self.execute(sql,{},lambda x:ret.append(x))
|
||||
|
||||
def isSeelectSQL(self,sql):
|
||||
i = 0
|
||||
while sql[i] in "\r\n \t":
|
||||
i = i + 1
|
||||
return sql.lower().startswith('select ')
|
||||
|
||||
async def sqlIterator(self,desc,NS,callback,**kw):
|
||||
class RecordHandler:
|
||||
def __init__(self,ns,klass):
|
||||
self.ns = ns
|
||||
self.klass = klass
|
||||
self.recs = []
|
||||
|
||||
def handler(self,rec):
|
||||
obj = self.klass(rec)
|
||||
self.recs.append(obj)
|
||||
|
||||
def finish(self):
|
||||
self.ns[name] = self.recs
|
||||
|
||||
cur = self.cursor()
|
||||
sql = ''
|
||||
if 'sql_file' in desc.keys():
|
||||
sql = readsql(desc['sql_file'])
|
||||
else:
|
||||
sql = desc['sql_string']
|
||||
if isSelectSql(sql):
|
||||
if callback is None:
|
||||
klass = desc.get('classname','DictObject')
|
||||
if klass is not None:
|
||||
rh = RecordHandler(NS,klass)
|
||||
callback = rh.handler
|
||||
else:
|
||||
callback = None
|
||||
await self.execute(sql,NS,callback)
|
||||
|
||||
async def sqlExecute(self,desc,NS):
|
||||
await self.execute(sql,NS,None)
|
||||
|
||||
async def tables(self):
|
||||
sqlstring = self.tablesSQL()
|
||||
ret = []
|
||||
await self.execute(sqlstring,{},lambda x:ret.append(x))
|
||||
return ret
|
||||
|
||||
def indexesSQL(self,tablename):
|
||||
"""
|
||||
record of {
|
||||
index_name,
|
||||
index_type,
|
||||
table_name,
|
||||
column_name
|
||||
}
|
||||
"""
|
||||
return None
|
||||
|
||||
async def indexes(self,tablename=None):
|
||||
sqlstring = self.indexesSQL(tablename.lower())
|
||||
if sqlstring is None:
|
||||
return []
|
||||
recs = []
|
||||
await self.execute(sqlstring,{},lambda x:recs.append(x))
|
||||
return recs
|
||||
|
||||
async def fields(self,tablename=None):
|
||||
sqlstring = self.fieldsSQL(tablename)
|
||||
recs = []
|
||||
await self.execute(sqlstring,{},lambda x:recs.append(x))
|
||||
ret = []
|
||||
for r in recs:
|
||||
r.update({'type':self.db2modelTypeMapping.get(r['type'].lower(),'unknown')})
|
||||
r.update({'name':r['name'].lower()})
|
||||
ret.append(r)
|
||||
return ret
|
||||
|
||||
async def primary(self,tablename):
|
||||
sqlstring = self.pkSQL(tablename)
|
||||
recs = []
|
||||
await self.execute(sqlstring,{},lambda x:recs.append(x))
|
||||
return recs
|
||||
|
||||
async def fkeys(self,tablename):
|
||||
sqlstring = self.fkSQL(tablename)
|
||||
recs = []
|
||||
await self.execute(sqlstring,{},lambda x:recs.append(x))
|
||||
return recs
|
||||
|
||||
async def createTable(self,tabledesc):
|
||||
te = MyTemplateEngine([],'utf8','utf8')
|
||||
desc = {
|
||||
"sql_string":te.renders(self.ddl_template,tabledesc)
|
||||
}
|
||||
return await self.sqlExecute(desc,{})
|
||||
|
||||
async def getTableDesc(self,tablename):
|
||||
desc = {}
|
||||
summary = [ i for i in await self.tables() if tablename.lower() == i.name ]
|
||||
primary = [i.field_name for i in await self.primary(tablename) ]
|
||||
summary['primary'] = primary
|
||||
desc['summary'] = summary
|
||||
desc['fields'] = await self.fields(tablename=tablename)
|
||||
desc['validation'] = []
|
||||
idx = {}
|
||||
async for idxrec in self.indexes(tablename=tablename):
|
||||
if idxrec.index_name != idx.get('name',None):
|
||||
if idx != {}:
|
||||
desc['validation'].append(idx)
|
||||
idx = {
|
||||
'fields':[]
|
||||
}
|
||||
else:
|
||||
idx['fields'] = []
|
||||
idx['name'] = idxrec.index_name
|
||||
idx['oper'] = 'idx'
|
||||
idx['fields'].append(idxrec.field_name)
|
||||
if idx != {}:
|
||||
desc['validation'].append(idx)
|
||||
return desc
|
||||
|
||||
|
||||
|
BIN
dist/sqlor-0.0.1-py3.7.egg
vendored
Normal file
BIN
dist/sqlor-0.0.1-py3.7.egg
vendored
Normal file
Binary file not shown.
42
setup.py
Executable file
42
setup.py
Executable file
@ -0,0 +1,42 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
from distutils.core import setup
|
||||
from setuptools import setup, find_packages
|
||||
|
||||
# usage:
|
||||
# python setup.py bdist_wininst generate a window executable file
|
||||
# python setup.py bdist_egg generate a egg file
|
||||
# Release information about eway
|
||||
|
||||
version = "0.0.1"
|
||||
description = "sqlor"
|
||||
author = "yumoqing"
|
||||
email = "yumoqing@gmail.com"
|
||||
|
||||
packages=find_packages()
|
||||
package_data = {}
|
||||
|
||||
setup(
|
||||
name="sqlor",
|
||||
version=version,
|
||||
|
||||
# uncomment the following lines if you fill them out in release.py
|
||||
description=description,
|
||||
author=author,
|
||||
author_email=email,
|
||||
|
||||
install_requires=[
|
||||
],
|
||||
packages=packages,
|
||||
package_data=package_data,
|
||||
keywords = [
|
||||
],
|
||||
classifiers = [
|
||||
'Development Status :: 1 - Alpha',
|
||||
'Operating System :: OS Independent',
|
||||
'Programming Language :: Python3.5',
|
||||
'Topic :: SQL execute :: Libraries :: Python Modules',
|
||||
],
|
||||
platforms= 'any'
|
||||
)
|
14
sqlor.egg-info/PKG-INFO
Normal file
14
sqlor.egg-info/PKG-INFO
Normal file
@ -0,0 +1,14 @@
|
||||
Metadata-Version: 1.1
|
||||
Name: sqlor
|
||||
Version: 0.0.1
|
||||
Summary: sqlor
|
||||
Home-page: UNKNOWN
|
||||
Author: yumoqing
|
||||
Author-email: yumoqing@gmail.com
|
||||
License: UNKNOWN
|
||||
Description: UNKNOWN
|
||||
Platform: any
|
||||
Classifier: Development Status :: 1 - Alpha
|
||||
Classifier: Operating System :: OS Independent
|
||||
Classifier: Programming Language :: Python3.5
|
||||
Classifier: Topic :: SQL execute :: Libraries :: Python Modules
|
21
sqlor.egg-info/SOURCES.txt
Normal file
21
sqlor.egg-info/SOURCES.txt
Normal file
@ -0,0 +1,21 @@
|
||||
README.md
|
||||
setup.py
|
||||
sqlor/__init__.py
|
||||
sqlor/aiomysqlor.py
|
||||
sqlor/aiopostgresqlor.py
|
||||
sqlor/dbpools.py
|
||||
sqlor/ddl_template_mysql.py
|
||||
sqlor/ddl_template_oracle.py
|
||||
sqlor/ddl_template_postgresql.py
|
||||
sqlor/ddl_template_sqlserver.py
|
||||
sqlor/mssqlor.py
|
||||
sqlor/mysqlor.py
|
||||
sqlor/oracleor.py
|
||||
sqlor/postgresqlor.py
|
||||
sqlor/sor.py
|
||||
sqlor/sqlite3or.py
|
||||
sqlor.egg-info/PKG-INFO
|
||||
sqlor.egg-info/SOURCES.txt
|
||||
sqlor.egg-info/dependency_links.txt
|
||||
sqlor.egg-info/top_level.txt
|
||||
test/test.py
|
1
sqlor.egg-info/dependency_links.txt
Normal file
1
sqlor.egg-info/dependency_links.txt
Normal file
@ -0,0 +1 @@
|
||||
|
1
sqlor.egg-info/top_level.txt
Normal file
1
sqlor.egg-info/top_level.txt
Normal file
@ -0,0 +1 @@
|
||||
sqlor
|
BIN
sqlor/.dbpools.py.swp
Normal file
BIN
sqlor/.dbpools.py.swp
Normal file
Binary file not shown.
0
sqlor/__init__.py
Normal file
0
sqlor/__init__.py
Normal file
7
sqlor/aiomysqlor.py
Normal file
7
sqlor/aiomysqlor.py
Normal file
@ -0,0 +1,7 @@
|
||||
from .mysqlor import MySqlor
|
||||
|
||||
class AioMysqlor(MySqlor):
|
||||
@classmethod
|
||||
def isMe(self,name):
|
||||
return name=='aiomysql'
|
||||
|
8
sqlor/aiopostgresqlor.py
Normal file
8
sqlor/aiopostgresqlor.py
Normal file
@ -0,0 +1,8 @@
|
||||
|
||||
from .postgresqlor import PostgreSQLor
|
||||
class AioPostgresqlor(PostgreSQLor):
|
||||
@classmethod
|
||||
def isMe(self,name):
|
||||
return name=='aiopg'
|
||||
|
||||
|
277
sqlor/dbpools.py
Normal file
277
sqlor/dbpools.py
Normal file
@ -0,0 +1,277 @@
|
||||
|
||||
import asyncio
|
||||
from queue import Queue
|
||||
from functools import wraps
|
||||
import codecs
|
||||
|
||||
from appPublic.myImport import myImport
|
||||
from appPublic.dictObject import DictObject
|
||||
from appPublic.Singleton import SingletonDecorator
|
||||
from appPublic.myjson import loadf
|
||||
from appPublic.jsonConfig import getConfig
|
||||
|
||||
from .sor import SQLor
|
||||
from .mssqlor import MsSqlor
|
||||
from .oracleor import Oracleor
|
||||
from .sqlite3or import SQLite3or
|
||||
from .mysqlor import MySqlor
|
||||
from .aiomysqlor import AioMysqlor
|
||||
from .aiopostgresqlor import AioPostgresqlor
|
||||
|
||||
|
||||
def sqlorFactory(dbdesc):
|
||||
driver = dbdesc.get('driver',dbdesc)
|
||||
def findSubclass(name,klass):
|
||||
for k in klass.__subclasses__():
|
||||
if k.isMe(name):
|
||||
return k
|
||||
k1 = findSubclass(name,k)
|
||||
if k1 is not None:
|
||||
return k1
|
||||
return None
|
||||
k = findSubclass(driver,SQLor)
|
||||
if k is None:
|
||||
return SQLor(dbdesc=dbdesc)
|
||||
return k(dbdesc=dbdesc)
|
||||
|
||||
def sqlorFromFile(dbdef_file,coding='utf8'):
|
||||
dbdef = loadf(dbdef_file)
|
||||
return sqlorFactory(dbdef)
|
||||
|
||||
class LifeConnect:
|
||||
__conndict = {}
|
||||
def __init__(self,connfunc,kw,use_max=1000,async_mode=False):
|
||||
self.connfunc = connfunc
|
||||
self.async_mode = async_mode
|
||||
self.use_max = use_max
|
||||
self.kw = kw
|
||||
self.conn = None
|
||||
|
||||
def print(self):
|
||||
print(self.use_max)
|
||||
print(self.conn)
|
||||
|
||||
async def _mkconn(self):
|
||||
if self.async_mode:
|
||||
self.conn = await self.connfunc(**self.kw)
|
||||
else:
|
||||
self.conn = self.connfunc(**self.kw)
|
||||
self.use_cnt = 0
|
||||
self.__conndict[self.conn] = self
|
||||
|
||||
async def use(self):
|
||||
if self.conn is None:
|
||||
await self._mkconn()
|
||||
conn = self.conn
|
||||
if await self.testok():
|
||||
return conn
|
||||
del self.__conndict[conn]
|
||||
await self._mkconn()
|
||||
|
||||
@classmethod
|
||||
async def free(self,conn):
|
||||
lc = self.__conndict[conn]
|
||||
lc.use_cnt = lc.use_cnt + 1
|
||||
if lc.use_cnt >= lc.use_max:
|
||||
await lc.conn.close()
|
||||
await lc._mkcomm()
|
||||
return lc
|
||||
|
||||
async def testok(self):
|
||||
if self.async_mode:
|
||||
async with self.conn.cursor() as cur:
|
||||
try:
|
||||
await cur.execute('select 1 as cnt')
|
||||
return True
|
||||
except:
|
||||
return False
|
||||
else:
|
||||
cur = self.conn.cursor()
|
||||
try:
|
||||
cur.execute('select 1 as cnt')
|
||||
r = cur.fetchall()
|
||||
return True
|
||||
except:
|
||||
return False
|
||||
finally:
|
||||
cur.close()
|
||||
|
||||
class ConnectionPool(object):
|
||||
def __init__(self,dbdesc,loop):
|
||||
self.dbdesc = dbdesc
|
||||
self.async_mode = dbdesc.get('async_mode',False)
|
||||
self.loop = loop
|
||||
self.driver = myImport(self.dbdesc['driver'])
|
||||
self.maxconn = dbdesc.get('maxconn',5)
|
||||
self.maxuse = dbdesc.get('maxuse',1000)
|
||||
self._pool = Queue(self.maxconn)
|
||||
self._fillPool()
|
||||
self.using = []
|
||||
self.use_cnt = 0
|
||||
self.max_use = 1000
|
||||
|
||||
def _fillPool(self):
|
||||
for i in range(self.maxconn):
|
||||
lc = self.connect()
|
||||
i = i + 1
|
||||
|
||||
def connect(self):
|
||||
lc = LifeConnect(self.driver.connect,self.dbdesc['kwargs'],
|
||||
use_max=self.maxuse,async_mode=self.async_mode)
|
||||
self._pool.put(lc)
|
||||
return lc
|
||||
|
||||
def isEmpty(self):
|
||||
return self._pool.empty()
|
||||
|
||||
def isFull(self):
|
||||
return self._pool.full()
|
||||
|
||||
async def aquire(self):
|
||||
lc = self._pool.get()
|
||||
self.using.append(lc)
|
||||
conn = await lc.use()
|
||||
return conn
|
||||
|
||||
async def release(self,conn):
|
||||
lc = await LifeConnect.free(conn)
|
||||
self.using = [c for c in self.using if c != lc ]
|
||||
self._pool.put(lc)
|
||||
|
||||
@SingletonDecorator
|
||||
class DBPools:
|
||||
def __init__(self,databases={},max_connect=10,loop=None):
|
||||
if loop is None:
|
||||
loop = asyncio.get_event_loop()
|
||||
self.loop = loop
|
||||
self._cpools = {}
|
||||
self.databases = databases
|
||||
|
||||
def addDatabase(self,name,desc):
|
||||
self.databases[name] = desc
|
||||
|
||||
async def getSqlor(self,name):
|
||||
desc = self.databases.get(name)
|
||||
sor = sqlorFactory(desc)
|
||||
sor.name = name
|
||||
a,conn,cur = await self._aquireConn(name)
|
||||
sor.setCursor(a,conn,cur)
|
||||
return sor
|
||||
|
||||
async def freeSqlor(self,sor):
|
||||
await self._releaseConn(sor.name,sor.conn,sor.cur)
|
||||
|
||||
async def _aquireConn(self,dbname):
|
||||
p = self._cpools.get(dbname)
|
||||
if p == None:
|
||||
p = ConnectionPool(self.databases.get(dbname),self.loop)
|
||||
self._cpools[dbname] = p
|
||||
conn = await p.aquire()
|
||||
if self.isAsyncDriver(dbname):
|
||||
cur = await conn.cursor()
|
||||
else:
|
||||
cur = conn.cursor()
|
||||
return self.isAsyncDriver(dbname),conn,cur
|
||||
|
||||
def isAsyncDriver(self,dbname):
|
||||
ret = self.databases[dbname].get('async_mode',False)
|
||||
return ret
|
||||
|
||||
async def _releaseConn(self,dbname,conn,cur):
|
||||
if self.isAsyncDriver(dbname):
|
||||
await cur.close()
|
||||
else:
|
||||
try:
|
||||
cur.fetchall()
|
||||
except:
|
||||
pass
|
||||
cur.close()
|
||||
p = self._cpools.get(dbname)
|
||||
if p == None:
|
||||
raise Exception('database (%s) not connected'%dbname)
|
||||
await p.release(conn)
|
||||
|
||||
def inSqlor(self,func):
|
||||
@wraps(func)
|
||||
async def wrap_func(sor,dbname,*args,**kw):
|
||||
sor = await self.getSqlor(dbname)
|
||||
try:
|
||||
ret = await func(sor,dbname,*args,**kw)
|
||||
return ret
|
||||
except Exception as e:
|
||||
print('error',sor)
|
||||
raise e
|
||||
finally:
|
||||
await self.freeSqlor(sor)
|
||||
|
||||
return wrap_func
|
||||
|
||||
def runSQL(self,func):
|
||||
@wraps(func)
|
||||
async def wrap_func(dbname,NS,callback,**kw):
|
||||
sor = await self.getSqlor(dbname)
|
||||
desc = func(dbname,NS,callback,**kw)
|
||||
try:
|
||||
ret = await sor.runSQL(desc,NS,callback,**kw)
|
||||
except Exception as e:
|
||||
print('error:',e)
|
||||
raise e
|
||||
finally:
|
||||
await self.freeSqlor(sor)
|
||||
return wrap_func
|
||||
|
||||
def runSQLPaging(self,func):
|
||||
@wraps(func)
|
||||
async def wrap_func(dbname,NS,**kw):
|
||||
sor = await self.getSqlor(dbname)
|
||||
desc = func(dbname,NS,**kw)
|
||||
total = await sor.record_count(desc,NS)
|
||||
recs = await sor.pagingdata(desc,NS)
|
||||
data = {
|
||||
"total":total,
|
||||
"rows":recs
|
||||
}
|
||||
print(len(recs),'records return')
|
||||
await self.freeSqlor(sor)
|
||||
return data
|
||||
return wrap_func
|
||||
|
||||
async def runSQLResultFields(self, dbname,NS,**kwargs):
|
||||
sor = self.getSqlor(dbname)
|
||||
desc = func(dbname,NS,**kw)
|
||||
conn = await self._aquireConn(dbname)
|
||||
async with conn.cursor() as cur:
|
||||
sor.setCursor(conn,cur)
|
||||
ret=await sor.sqlIterator(desc,NS)
|
||||
ret = [ {'name':i[0],'type':i[1]} for i in cur.description ]
|
||||
return ret
|
||||
await self._releaseConn(dbname,conn)
|
||||
|
||||
async def getTables(self,dbname):
|
||||
@self.inSqlor
|
||||
async def _getTables(sor,dbname):
|
||||
ret = await sor.tables()
|
||||
return ret
|
||||
return await _getTables(None,dbname)
|
||||
|
||||
async def getTableFields(self,dbname,tblname):
|
||||
@self.inSqlor
|
||||
async def _getTableFields(sor,dbname,tblname):
|
||||
ret = await sor.fields(tblname)
|
||||
return ret
|
||||
return await _getTableFields(None,dbname,tblname)
|
||||
|
||||
async def getTablePrimaryKey(self,dbname,tblname):
|
||||
@self.inSqlor
|
||||
async def _getTablePrimaryKey(sor,dbname,tblname):
|
||||
ret = await sor.primary(tblname)
|
||||
return ret
|
||||
return await _getTablePrimaryKey(None,dbname,tblname)
|
||||
|
||||
async def getTableForignKeys(self,dbname,tblname):
|
||||
@self.inSqlor
|
||||
async def _getTableForignKeys(sor,dbname,tblname):
|
||||
ret = await sor.fkeys(tblname)
|
||||
return ret
|
||||
return await _getTableForignKeys(None,dbname,tblname)
|
||||
|
55
sqlor/ddl_template_mysql.py
Normal file
55
sqlor/ddl_template_mysql.py
Normal file
@ -0,0 +1,55 @@
|
||||
mysql_ddl_tmpl = """{% macro typeStr(type,len,dec) %}
|
||||
{%- if type=='str' -%}
|
||||
VARCHAR({{len}})
|
||||
{%- elif type=='char' -%}
|
||||
CHAR({{len}})
|
||||
{%- elif type=='long' or type=='int' or type=='short' -%}
|
||||
int
|
||||
{%- elif type=='long' -%}
|
||||
bigint
|
||||
{%- elif type=='float' or type=='double' or type=='ddouble' -%}
|
||||
double({{len}},{{dec}})
|
||||
{%- elif type=='date' -%}
|
||||
date
|
||||
{%- elif type=='time' -%}
|
||||
time
|
||||
{%- elif type=='datetime' -%}
|
||||
datetime
|
||||
{%- elif type=='timestamp' -%}
|
||||
TIMESTAMP
|
||||
{%- elif type=='text' -%}
|
||||
longtext
|
||||
{%- elif type=='bin' -%}
|
||||
longblob
|
||||
{%- else -%}
|
||||
{{type}}
|
||||
{%- endif %}
|
||||
{%- endmacro %}
|
||||
{% macro nullStr(nullable) %}
|
||||
{%- if nullable=='no' -%}
|
||||
NOT NULL
|
||||
{%- endif -%}
|
||||
{% endmacro %}
|
||||
{% macro primary() %}
|
||||
{% if len(','.join(summary[0].primary))>0 %}
|
||||
,primary key({{','.join(summary[0].primary)}})
|
||||
{% endif %}
|
||||
{% endmacro %}
|
||||
drop table {{summary[0].name}};
|
||||
CREATE TABLE {{summary[0].name}}
|
||||
(
|
||||
{% for field in fields %}
|
||||
{{field.name}} {{typeStr(field.type,field.length,field.dec)}} {{nullStr(field.nullable)}} {%if field.title -%} comment '{{field.title}}'{%- endif %}{%- if not loop.last -%},{%- endif -%}
|
||||
{% endfor %}
|
||||
{{primary()}}
|
||||
)
|
||||
engine=innodb
|
||||
default charset=utf8
|
||||
{% if summary[0].title %}comment '{{summary[0].title}}'{% endif %}
|
||||
;
|
||||
{% for v in validation %}
|
||||
{% if v.oper=='idx' %}
|
||||
CREATE {% if v.value.idxtype=='unique' %}UNIQUE{% endif %} INDEX {{summary[0].name}}_{{v.name}} ON {{summary[0].name}}({{",".join(v.value.fields)}});
|
||||
{% endif %}
|
||||
{%- endfor -%}
|
||||
"""
|
49
sqlor/ddl_template_oracle.py
Normal file
49
sqlor/ddl_template_oracle.py
Normal file
@ -0,0 +1,49 @@
|
||||
oracle_ddl_tmpl = """{% macro typeStr(type,len,dec) %}
|
||||
{%- if type=='str' -%}
|
||||
VARCHAR2({{len}})
|
||||
{%- elif type=='char' -%}
|
||||
CHAR({{len}})
|
||||
{%- elif type=='long' or type=='int' or type=='short' -%}
|
||||
NUMBER
|
||||
{%- elif type=='float' or type=='double' or type=='ddouble' -%}
|
||||
NUMBER({{len}},{{dec}})
|
||||
{%- elif type=='date' or type=='time' -%}
|
||||
DATE
|
||||
{%- elif type=='timestamp' -%}
|
||||
TIMESTAMP
|
||||
{%- elif type=='text' -%}
|
||||
CLOB
|
||||
{%- elif type=='bin' -%}
|
||||
BLOB
|
||||
{%- else -%}
|
||||
{{type}}
|
||||
{%- endif %}
|
||||
{%- endmacro %}
|
||||
{% macro nullStr(nullable) %}
|
||||
{%- if nullable=='no' -%}
|
||||
NOT NULL
|
||||
{%- endif -%}
|
||||
{% endmacro %}
|
||||
{% macro primary() %}
|
||||
{% if len(','.join(summary[0].primary))>0 %}
|
||||
,primary key({{','.join(summary[0].primary)}})
|
||||
{% endif %}
|
||||
{% endmacro %}
|
||||
drop table {{summary[0].name}};
|
||||
CREATE TABLE {{summary[0].name}}
|
||||
(
|
||||
{% for field in fields %}
|
||||
{{field.name}} {{typeStr(field.type,field.length,field.dec)}} {{nullStr(field.nullable)}}{%- if not loop.last -%},{%- endif -%}
|
||||
{% endfor %}
|
||||
{{primary()}}
|
||||
);
|
||||
{% for v in validation %}
|
||||
{% if v.oper=='idx' %}
|
||||
CREATE {% if v.value.idxtype=='unique' %}UNIQUE{% endif %} INDEX {{summary[0].name}}_{{v.name}} ON {{summary[0].name}}({{",".join(v.value.fields)}});
|
||||
{% endif %}
|
||||
{%- endfor -%}
|
||||
COMMENT ON TABLE {{summary[0].name}} IS '{{summary[0].title}}';
|
||||
{% for field in fields %}
|
||||
COMMENT ON COLUMN {{summary[0].name}}.{{field.name}} is '{{field.title}}';
|
||||
{% endfor %}
|
||||
"""
|
45
sqlor/ddl_template_postgresql.py
Normal file
45
sqlor/ddl_template_postgresql.py
Normal file
@ -0,0 +1,45 @@
|
||||
postgresql_ddl_tmpl = """{% macro typeStr(type,len,dec) %}
|
||||
{%- if type=='str' -%}
|
||||
VARCHAR({{len}})
|
||||
{%- elif type=='char' -%}
|
||||
CHAR({{len}})
|
||||
{%- elif type=='long' or type=='int' or type=='short' -%}
|
||||
INTEGER
|
||||
{%- elif type=='float' or type=='double' or type=='ddouble' -%}
|
||||
NUMERIC({{len}},{{dec}})
|
||||
{%- elif type=='date' -%}
|
||||
DATE
|
||||
{%- elif type=='time' -%}
|
||||
TIME
|
||||
{%- elif type=='timestamp' -%}
|
||||
TIMESTAMP
|
||||
{%- else -%}
|
||||
{{type}}
|
||||
{%- endif %}
|
||||
{%- endmacro %}
|
||||
{% macro nullStr(nullable) %}
|
||||
{%- if nullable=='no' -%}
|
||||
NOT NULL
|
||||
{%- endif -%}
|
||||
{% endmacro %}
|
||||
{% macro primary() %}
|
||||
primary key({{','.join(summary[0].primary)}})
|
||||
{% endmacro %}
|
||||
DROP TABLE IF EXISTS {{summary[0].name}};
|
||||
CREATE TABLE {{summary[0].name}}
|
||||
(
|
||||
{% for field in fields %}
|
||||
{{field.name}} {{typeStr(field.type,field.length,field.dec)}} {{nullStr(field.nullable)}},
|
||||
{% endfor %}
|
||||
{{primary()}}
|
||||
);
|
||||
{% for v in validation %}
|
||||
{% if v.oper=='idx' %}
|
||||
CREATE {% if v.value.idxtype=='unique' %}UNIQUE{% endif %} INDEX {{summary[0].name}}_{{v.name}} ON {{summary[0].name}}({{",".join(v.value.fields)}});
|
||||
{% endif %}
|
||||
{%- endfor -%}
|
||||
COMMENT ON TABLE {{summary[0].name}} IS '{{summary[0].title.decode('utf8')}}';
|
||||
{% for field in fields %}
|
||||
COMMENT ON COLUMN {{summary[0].name}}.{{field.name}} is '{{field.title.decode('utf8')}}';
|
||||
{% endfor %}
|
||||
"""
|
51
sqlor/ddl_template_sqlserver.py
Normal file
51
sqlor/ddl_template_sqlserver.py
Normal file
@ -0,0 +1,51 @@
|
||||
sqlserver_ddl_tmpl = """{% macro typeStr(type,len,dec) %}
|
||||
{%- if type=='str' -%}
|
||||
NVARCHAR({{len}})
|
||||
{%- elif type=='char' -%}
|
||||
CHAR({{len}})
|
||||
{%- elif type=='long' or type=='int' or type=='short' -%}
|
||||
NUMERIC
|
||||
{%- elif type=='float' or type=='double' or type=='ddouble' -%}
|
||||
numeric({{len}},{{dec}})
|
||||
{%- elif type=='date' or type=='time' -%}
|
||||
DATE
|
||||
{%- elif type=='timestamp' -%}
|
||||
TIMESTAMP
|
||||
{%- elif type=='text' -%}
|
||||
NVARCHAR(MAX)
|
||||
{%- elif type=='bin' -%}
|
||||
IMAGE
|
||||
{%- else -%}
|
||||
{{type}}
|
||||
{%- endif %}
|
||||
{%- endmacro %}
|
||||
{% macro nullStr(nullable) %}
|
||||
{%- if nullable=='no' -%}
|
||||
NOT NULL
|
||||
{%- endif -%}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro primary() %}
|
||||
{% if len(','.join(summary[0].primary))>0 %}
|
||||
,primary key({{','.join(summary[0].primary)}})
|
||||
{% endif %}
|
||||
{% endmacro %}
|
||||
|
||||
drop table dbo.{{summary[0].name}};
|
||||
CREATE TABLE dbo.{{summary[0].name}}
|
||||
(
|
||||
{% for field in fields %}
|
||||
{{field.name}} {{typeStr(field.type,field.length,field.dec)}} {{nullStr(field.nullable)}}{%- if not loop.last -%},{%- endif -%}
|
||||
{% endfor %}
|
||||
{{primary()}}
|
||||
)
|
||||
{% for v in validation %}
|
||||
{% if v.oper=='idx' %}
|
||||
CREATE {% if v.value.idxtype=='unique' %}UNIQUE{% endif %} INDEX {{summary[0].name}}_{{v.name}} ON {{summary[0].name}}({{",".join(v.value.fields)}});
|
||||
{% endif %}
|
||||
{%- endfor -%}
|
||||
EXEC sys.sp_addextendedproperty @name=N'MS_Description', @value=N'{{summary[0].title}}' , @level0type=N'SCHEMA',@level0name=N'dbo', @level1type=N'TABLE',@level1name=N'{{summary[0].name}}'
|
||||
{% for field in fields %}
|
||||
EXEC sys.sp_addextendedproperty @name=N'MS_Description', @value=N'{{field.title}}' , @level0type=N'SCHEMA',@level0name=N'dbo', @level1type=N'TABLE',@level1name=N'{{summary[0].name}}', @level2type=N'COLUMN',@level2name=N'{{field.name}}'
|
||||
{% endfor %}
|
||||
"""
|
175
sqlor/mssqlor.py
Normal file
175
sqlor/mssqlor.py
Normal file
@ -0,0 +1,175 @@
|
||||
# -*- coding:utf8 -*-
|
||||
from .sor import SQLor
|
||||
from .ddl_template_sqlserver import sqlserver_ddl_tmpl
|
||||
|
||||
class MsSqlor(SQLor):
|
||||
ddl_template = sqlserver_ddl_tmpl
|
||||
db2modelTypeMapping = {
|
||||
'bit':'short',
|
||||
'tinyint':'short',
|
||||
'date':'date',
|
||||
'bigint':'long',
|
||||
'smallint':'short',
|
||||
'int':'long',
|
||||
'decimal':'float',
|
||||
'numeric':'float',
|
||||
'smallmoney':'float',
|
||||
'money':'float',
|
||||
'real':'float',
|
||||
'float':'float',
|
||||
'datetime':'date',
|
||||
'timestamp':'timestamp',
|
||||
'uniqueidentifier':'timestamp',
|
||||
'char':'char',
|
||||
'varchar':'str',
|
||||
'text':'text',
|
||||
'nchar':'str',
|
||||
'nvarchar':'str',
|
||||
'ntext':'text',
|
||||
'binary':'str',
|
||||
'varbinary':'str',
|
||||
'image':'file',
|
||||
}
|
||||
model2dbTypemapping = {
|
||||
'date':'datetime',
|
||||
'time':'date',
|
||||
'timestamp':'timestamp',
|
||||
'str':'nvarchar',
|
||||
'char':'char',
|
||||
'short':'int',
|
||||
'long':'numeric',
|
||||
'float':'numeric',
|
||||
'text':'ntext',
|
||||
'file':'image',
|
||||
}
|
||||
@classmethod
|
||||
def isMe(self,name):
|
||||
return name=='pymssql'
|
||||
|
||||
def grammar(self):
|
||||
return {
|
||||
'select':select_stmt,
|
||||
}
|
||||
|
||||
def placeHolder(self,varname):
|
||||
if varname=='__mainsql__' :
|
||||
return ''
|
||||
return '%s'
|
||||
|
||||
def dataConvert(self,dataList):
|
||||
if type(dataList) == type({}):
|
||||
d = [ i for i in dataList.values()]
|
||||
else:
|
||||
d = [ i['value'] for i in dataList]
|
||||
return tuple(d)
|
||||
|
||||
def pagingSQLmodel(self):
|
||||
return u"""select *
|
||||
from (
|
||||
select row_number() over(order by $[sort]$ $[order]$) as _row_id,page_s.*
|
||||
from (%s) page_s
|
||||
) A
|
||||
where _row_id >= $[from_line]$ and _row_id < $[end_line]$"""
|
||||
|
||||
def tablesSQL(self):
|
||||
sqlcmd = u"""select
|
||||
lower(d.name) as name,
|
||||
lower(cast(Isnull(f.VALUE,d.name) as nvarchar )) title
|
||||
from sysobjects d
|
||||
left join sys.extended_properties f on d.id = f.major_id and f.minor_id = 0
|
||||
where d.xtype = 'U'"""
|
||||
return sqlcmd
|
||||
|
||||
def fieldsSQL(self,tablename=None):
|
||||
sqlcmd=u"""SELECT name = lower(a.name)
|
||||
,type = b.name
|
||||
,length = Columnproperty(a.id,a.name,'PRECISION')
|
||||
,dec = Isnull(Columnproperty(a.id,a.name,'Scale'),null)
|
||||
,nullable = CASE
|
||||
WHEN a.isnullable = 1 THEN 'yes'
|
||||
ELSE 'no'
|
||||
END
|
||||
,title = lower(cast(Isnull(g.[value],a.name) as nvarchar) )
|
||||
,table_name = lower(d.name)
|
||||
FROM syscolumns a
|
||||
LEFT JOIN systypes b
|
||||
ON a.xusertype = b.xusertype
|
||||
INNER JOIN sysobjects d
|
||||
ON (a.id = d.id)
|
||||
AND (d.xtype = 'U')
|
||||
AND (d.name <> 'dtproperties')
|
||||
INNER JOIN sys.all_objects c
|
||||
ON d.id=c.object_id
|
||||
AND schema_name(schema_id)='dbo'
|
||||
LEFT JOIN sys.extended_properties g
|
||||
ON (a.id = g.major_id)
|
||||
AND (a.colid = g.minor_id)
|
||||
LEFT JOIN sys.extended_properties f
|
||||
ON (d.id = f.major_id)
|
||||
AND (f.minor_id = 0)"""
|
||||
if tablename is not None:
|
||||
sqlcmd = sqlcmd + """ where lower(d.name)='%s'
|
||||
ORDER BY a.id,a.colorder""" % tablename.lower()
|
||||
else:
|
||||
sqlcmd = sqlcmd + """ ORDER BY a.id,a.colorder"""
|
||||
return sqlcmd
|
||||
|
||||
def fkSQL(self,tablename=None):
|
||||
sqlcmd = u"""select
|
||||
MainCol.name AS field -- [主表列名]
|
||||
,oSub.name AS fk_table -- [子表名称],
|
||||
,SubCol.name AS fk_field -- [子表列名],
|
||||
from
|
||||
sys.foreign_keys fk
|
||||
JOIN sys.all_objects oSub
|
||||
ON (fk.parent_object_id = oSub.object_id)
|
||||
JOIN sys.all_objects oMain
|
||||
ON (fk.referenced_object_id = oMain.object_id)
|
||||
JOIN sys.foreign_key_columns fkCols
|
||||
ON (fk.object_id = fkCols.constraint_object_id)
|
||||
JOIN sys.columns SubCol
|
||||
ON (oSub.object_id = SubCol.object_id
|
||||
AND fkCols.parent_column_id = SubCol.column_id)
|
||||
JOIN sys.columns MainCol
|
||||
ON (oMain.object_id = MainCol.object_id
|
||||
AND fkCols.referenced_column_id = MainCol.column_id)"""
|
||||
if tablename is not None:
|
||||
sqlcmd = sqlcmd + """ where lower(oMain.name) = '%s'""" % tablename.lower()
|
||||
|
||||
return sqlcmd
|
||||
|
||||
def pkSQL(self,tablename=None):
|
||||
sqlcmd = u"""select
|
||||
lower(a.table_name) as table_name,
|
||||
lower(b.column_name) as field_name
|
||||
from information_schema.table_constraints a
|
||||
inner join information_schema.constraint_column_usage b
|
||||
on a.constraint_name = b.constraint_name
|
||||
where a.constraint_type = 'PRIMARY KEY'"""
|
||||
if tablename is not None:
|
||||
sqlcmd = sqlcmd + """ and lower(a.table_name) = '%s'""" % tablename.lower()
|
||||
return sqlcmd
|
||||
|
||||
def indexesSQL(self,tablename=None):
|
||||
sqlcmd = """SELECT
|
||||
index_name=lower(IDX.Name),
|
||||
index_type=case when KC.type_desc is null then 'primary' WHEN IDX.is_unique=1 THEN 'unique' ELSE 'ununique' END,
|
||||
table_name=lower(O.Name),
|
||||
column_name=lower(C.Name)
|
||||
FROM sys.indexes IDX
|
||||
INNER JOIN sys.index_columns IDXC
|
||||
ON IDX.[object_id]=IDXC.[object_id]
|
||||
AND IDX.index_id=IDXC.index_id
|
||||
LEFT JOIN sys.key_constraints KC
|
||||
ON IDX.[object_id]=KC.[parent_object_id]
|
||||
AND IDX.index_id=KC.unique_index_id
|
||||
INNER JOIN sys.objects O
|
||||
ON O.[object_id]=IDX.[object_id]
|
||||
INNER JOIN sys.columns C
|
||||
ON O.[object_id]=C.[object_id]
|
||||
AND O.type='U'
|
||||
AND O.is_ms_shipped=0
|
||||
AND IDXC.Column_id=C.Column_id"""
|
||||
if tablename is not None:
|
||||
sqlcmd = sqlcmd + """ where lower(O.name)='%s'""" % tablename.lower()
|
||||
return sqlcmd
|
179
sqlor/mysqlor.py
Normal file
179
sqlor/mysqlor.py
Normal file
@ -0,0 +1,179 @@
|
||||
# -*- coding:utf8 -*-
|
||||
from mysql import connector
|
||||
from appPublic.argsConvert import ArgsConvert,ConditionConvert
|
||||
|
||||
from .sor import SQLor
|
||||
from .ddl_template_mysql import mysql_ddl_tmpl
|
||||
class MySqlor(SQLor):
|
||||
ddl_template = mysql_ddl_tmpl
|
||||
db2modelTypeMapping = {
|
||||
'tinyint':'short',
|
||||
'smallint':'short',
|
||||
'mediumint':'long',
|
||||
'int':'long',
|
||||
'bigint':'long',
|
||||
'decimal':'float',
|
||||
'double':'float',
|
||||
'float':'float',
|
||||
'char':'char',
|
||||
'varchar':'str',
|
||||
'tinyblob':'text',
|
||||
'tinytext':'text',
|
||||
'mediumblob':'text',
|
||||
'mediumtext':'text',
|
||||
'blob':'text',
|
||||
'text':'text',
|
||||
'mediumblob':'text',
|
||||
'mediumtext':'text',
|
||||
'longblob':'bin',
|
||||
'longtext':'text',
|
||||
'barbinary':'text',
|
||||
'binary':'text',
|
||||
'date':'date',
|
||||
'time':'time',
|
||||
'datetime':'datetime',
|
||||
'timestamp':'datestamp',
|
||||
'year':'short',
|
||||
}
|
||||
model2dbTypemapping = {
|
||||
'date':'date',
|
||||
'time':'date',
|
||||
'timestamp':'timestamp',
|
||||
'str':'varchar',
|
||||
'char':'char',
|
||||
'short':'int',
|
||||
'long':'bigint',
|
||||
'float':'double',
|
||||
'text':'longtext',
|
||||
'bin':'longblob',
|
||||
'file':'longblob',
|
||||
}
|
||||
@classmethod
|
||||
def isMe(self,name):
|
||||
return name=='mysql.connector'
|
||||
|
||||
def grammar(self):
|
||||
return {
|
||||
'select':select_stmt,
|
||||
}
|
||||
|
||||
def _opendb(self):
|
||||
self.conn = connector.connect(**self.dbdesc['kwargs'])
|
||||
|
||||
def placeHolder(self,varname):
|
||||
if varname=='__mainsql__' :
|
||||
return ''
|
||||
return '%s'
|
||||
|
||||
def dataConvert(self,dataList):
|
||||
if type(dataList) == type({}):
|
||||
d = [ i for i in dataList.values()]
|
||||
else:
|
||||
d = [ i['value'] for i in dataList]
|
||||
return tuple(d)
|
||||
|
||||
def pagingSQL(self,sql,paging,NS):
|
||||
"""
|
||||
default it not support paging
|
||||
"""
|
||||
page = int(NS.get(paging['pagename'],1))
|
||||
rows = int(NS.get(paging['rowsname'],10))
|
||||
sort = NS.get(paging.get('sortname','sort'),None)
|
||||
order = NS.get(paging.get('ordername','asc'),'asc')
|
||||
if not sort:
|
||||
return sql
|
||||
if page < 1:
|
||||
page = 1
|
||||
from_line = (page - 1) * rows
|
||||
end_line = page * rows + 1
|
||||
psql = self.pagingSQLmodel()
|
||||
ns={
|
||||
'from_line':from_line,
|
||||
'end_line':end_line,
|
||||
'rows':rows,
|
||||
'sort':sort,
|
||||
'order':order,
|
||||
}
|
||||
ac = ArgsConvert('$[',']$')
|
||||
psql = ac.convert(psql,ns)
|
||||
retSQL=psql % sql
|
||||
return retSQL
|
||||
|
||||
def pagingSQLmodel(self):
|
||||
return u"""select * from (%s) A order by $[sort]$ $[order]$
|
||||
limit $[from_line]$,$[rows]$"""
|
||||
|
||||
def tablesSQL(self):
|
||||
sqlcmd = """SELECT lower(TABLE_NAME) as name, lower(TABLE_COMMENT) as title FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_SCHEMA = '%s'""" % self.dbdesc.get('dbname','unknown')
|
||||
return sqlcmd
|
||||
|
||||
def fieldsSQL(self,tablename=None):
|
||||
sqlcmd="""
|
||||
select
|
||||
lower(column_name) as name,
|
||||
data_type as type,
|
||||
case when character_maximum_length is null then NUMERIC_PRECISION
|
||||
else character_maximum_length end
|
||||
as length,
|
||||
NUMERIC_SCALE as dec1,
|
||||
lower(is_nullable) as nullable,
|
||||
column_comment as title,
|
||||
lower(table_name) as table_name
|
||||
from information_schema.columns where lower(TABLE_SCHEMA) = '%s' """ % self.dbdesc.get('dbname','unknown').lower()
|
||||
if tablename is not None:
|
||||
sqlcmd = sqlcmd + """and lower(table_name)='%s';""" % tablename.lower()
|
||||
return sqlcmd
|
||||
|
||||
def fkSQL(self,tablename=None):
|
||||
sqlcmd = """SELECT C.TABLE_SCHEMA 拥有者,
|
||||
C.REFERENCED_TABLE_NAME 父表名称 ,
|
||||
C.REFERENCED_COLUMN_NAME 父表字段 ,
|
||||
C.TABLE_NAME 子表名称,
|
||||
C.COLUMN_NAME 子表字段,
|
||||
C.CONSTRAINT_NAME 约束名,
|
||||
T.TABLE_COMMENT 表注释,
|
||||
R.UPDATE_RULE 约束更新规则,
|
||||
R.DELETE_RULE 约束删除规则
|
||||
FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE C
|
||||
JOIN INFORMATION_SCHEMA. TABLES T
|
||||
ON T.TABLE_NAME = C.TABLE_NAME
|
||||
JOIN INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS R
|
||||
ON R.TABLE_NAME = C.TABLE_NAME
|
||||
AND R.CONSTRAINT_NAME = C.CONSTRAINT_NAME
|
||||
AND R.REFERENCED_TABLE_NAME = C.REFERENCED_TABLE_NAME
|
||||
WHERE C.REFERENCED_TABLE_NAME IS NOT NULL ;
|
||||
and C.TABLE_SCHEMA = '%s'
|
||||
""" % self.dbdesc.get('dbname','unknown').lower()
|
||||
if tablename is not None:
|
||||
sqlcmd = sqlcmd + " and C.REFERENCED_TABLE_NAME = '%s'" % tablename.lower()
|
||||
return sqlcmd
|
||||
|
||||
def pkSQL(self,tablename=None):
|
||||
sqlcmd = """SELECT
|
||||
lower(c.table_name) as table_name,
|
||||
lower(c.COLUMN_NAME) as field_name
|
||||
FROM
|
||||
INFORMATION_SCHEMA.TABLE_CONSTRAINTS AS t,
|
||||
INFORMATION_SCHEMA.KEY_COLUMN_USAGE AS c
|
||||
WHERE
|
||||
t.CONSTRAINT_TYPE = 'PRIMARY KEY'
|
||||
AND t.TABLE_SCHEMA = '%s'
|
||||
AND t.TABLE_NAME = c.TABLE_NAME
|
||||
""" % self.dbdesc.get('dbname','unknown').lower()
|
||||
if tablename is not None:
|
||||
sqlcmd = sqlcmd + " AND c.TABLE_NAME = '%s'" % tablename.lower()
|
||||
return sqlcmd
|
||||
|
||||
def indexesSQL(self,tablename=None):
|
||||
sqlcmd = """SELECT DISTINCT
|
||||
lower(index_name) as index_name,
|
||||
lower(index_type) as index_type,
|
||||
lower(table_name) as table_name,
|
||||
lower(column_name) as column_name
|
||||
FROM
|
||||
information_schema.statistics
|
||||
WHERE
|
||||
table_schema = '%s'""" % self.dbdesc.get('dbname','unknown')
|
||||
if tablename is not None:
|
||||
sqlcmd = sqlcmd + """ AND table_name = '%s'""" % tablename.lower()
|
||||
return sqlcmd
|
131
sqlor/oracleor.py
Normal file
131
sqlor/oracleor.py
Normal file
@ -0,0 +1,131 @@
|
||||
from .sor import SQLor
|
||||
from .ddl_template_oracle import oracle_ddl_tmpl
|
||||
class Oracleor(SQLor):
|
||||
ddl_template = oracle_ddl_tmpl
|
||||
db2modelTypeMapping = {
|
||||
'char':'char',
|
||||
'nchar':'str',
|
||||
'varchar':'str',
|
||||
'varchar2':'str',
|
||||
'nvarchar2':'str',
|
||||
'number':'long',
|
||||
'integer':'long',
|
||||
'binary_float':'float',
|
||||
'binary_double':'float',
|
||||
'float':'float',
|
||||
'timestamp':'timestamp',
|
||||
'timestamp with time zone':'timestamp',
|
||||
'timestamp with local time zone':'timestamp',
|
||||
'interval year to moth':'date',
|
||||
'interval day to second':'timestamp',
|
||||
'clob':'text',
|
||||
'nclob':'text',
|
||||
'blob':'file',
|
||||
'bfile':'file',
|
||||
'date':'date',
|
||||
}
|
||||
model2dbTypemapping = {
|
||||
'date':'date',
|
||||
'time':'date',
|
||||
'timestamp':'date',
|
||||
'str':'varchar2',
|
||||
'char':'char',
|
||||
'short':'number',
|
||||
'long':'number',
|
||||
'float':'number',
|
||||
'text':'nclob',
|
||||
'file':'blob',
|
||||
}
|
||||
@classmethod
|
||||
def isMe(self,name):
|
||||
return name=='cx_Oracle'
|
||||
|
||||
def grammar(self):
|
||||
return {
|
||||
'select':select_stmt,
|
||||
}
|
||||
|
||||
def placeHolder(self,varname):
|
||||
if varname=='__mainsql__' :
|
||||
return ''
|
||||
return ':%s' % varname
|
||||
|
||||
def dataConvert(self,dataList):
|
||||
if type(dataList) == type({}):
|
||||
return dataList
|
||||
d = {}
|
||||
[ d.update({i['name']:i['value']}) for i in dataList ]
|
||||
return d
|
||||
|
||||
def pagingSQLmodel(self):
|
||||
return u"""select *
|
||||
from (
|
||||
select page_s.*,rownum row_id
|
||||
from (%s) page_s
|
||||
order by $[sort]$ $[order]$
|
||||
)
|
||||
where row_id >=$[from_line]$ and row_id < $[end_line]$"""
|
||||
|
||||
def tablesSQL(self):
|
||||
sqlcmd = """select
|
||||
lower(table_name) as name,
|
||||
lower(decode(comments,null,table_name,comments)) as title
|
||||
from USER_TAB_COMMENTS where table_type = 'TABLE'"""
|
||||
return sqlcmd
|
||||
|
||||
def fieldsSQL(self,tablename=None):
|
||||
sqlcmd="""select lower(utc.COLUMN_NAME) name
|
||||
,utc.DATA_TYPE type
|
||||
,utc.DATA_LENGTH length
|
||||
,utc.data_scale dec
|
||||
,case when utc.nullable = 'Y' then 'yes' else 'no' end nullable
|
||||
,lower(nvl(ucc.comments,utc.COLUMN_NAME)) title
|
||||
,lower(utc.table_name) as table_name
|
||||
from user_tab_cols utc left join USER_COL_COMMENTS ucc on utc.table_name = ucc.table_name and utc.COLUMN_NAME = ucc.COLUMN_NAME"""
|
||||
if tablename is not None:
|
||||
sqlcmd = sqlcmd + """ where lower(utc.table_name) = '%s'""" % tablename.lower()
|
||||
return sqlcmd
|
||||
|
||||
def fkSQL(self,tablename=None):
|
||||
tablename = tablename.lower()
|
||||
sqlcmd = """select
|
||||
distinct(ucc.column_name) as field,rela.table_name as fk_table,rela.column_name as fk_field
|
||||
from
|
||||
user_constraints uc,user_cons_columns ucc,
|
||||
(
|
||||
select t2.table_name,t2.column_name,t1.r_constraint_name
|
||||
from user_constraints t1,user_cons_columns t2
|
||||
where t1.r_constraint_name=t2.constraint_name
|
||||
) rela
|
||||
where
|
||||
uc.constraint_name=ucc.constraint_name
|
||||
and uc.r_constraint_name=rela.r_constraint_name"""
|
||||
if tablename is not None:
|
||||
sqlcmd = sqlcmd + """ and lower(uc.table_name)='%s'""" % tablename.lower()
|
||||
return sqlcmd
|
||||
|
||||
def pkSQL(self,tablename=None):
|
||||
sqlcmd = """
|
||||
select
|
||||
lower(col.table_name) table_name,
|
||||
lower(col.column_name) as field_name
|
||||
from
|
||||
user_constraints con,user_cons_columns col
|
||||
where
|
||||
con.constraint_name=col.constraint_name and con.constraint_type='P'"""
|
||||
if tablename is not None:
|
||||
sqlcmd = sqlcmd + """ and lower(col.table_name)='%s'""" % tablename.lower()
|
||||
return sqlcmd
|
||||
|
||||
def indexesSQL(self,tablename=None):
|
||||
sqlcmd = """select
|
||||
lower(a.index_name) index_name,
|
||||
lower(a.UNIQUENESS) index_type,
|
||||
lower(a.table_name) table_name,
|
||||
lower(b.column_name) column_name
|
||||
from user_indexes a, user_ind_columns b
|
||||
where a.index_name = b.index_name"""
|
||||
if tablename is not None:
|
||||
sqlcmd += """ and lower(a.table_name) = lower('%s')""" % tablename.lower()
|
||||
return sqlcmd
|
||||
|
199
sqlor/postgresqlor.py
Normal file
199
sqlor/postgresqlor.py
Normal file
@ -0,0 +1,199 @@
|
||||
from .sor import SQLor
|
||||
from .ddl_template_postgresql import postgresql_ddl_tmpl
|
||||
|
||||
class PostgreSQLor(SQLor):
|
||||
ddl_template = postgresql_ddl_tmpl
|
||||
db2modelTypeMapping = {
|
||||
'smallint':'short',
|
||||
'integer':'long',
|
||||
'bigint':'llong',
|
||||
'decimal':'float',
|
||||
'numeric':'float',
|
||||
'real':'float',
|
||||
'double':'float',
|
||||
'serial':'long',
|
||||
'bigserial':'llong',
|
||||
'char':'char',
|
||||
'character':'char',
|
||||
'varchar':'str',
|
||||
'character varying':'str',
|
||||
'text':'text',
|
||||
'timestamp':'timestamp',
|
||||
'date':'date',
|
||||
'time':'time',
|
||||
'boolean':'char',
|
||||
'bytea':'file'
|
||||
}
|
||||
model2dbTypemapping = {
|
||||
'date':'date',
|
||||
'time':'date',
|
||||
'timestamp':'timestamp',
|
||||
'str':'varchar',
|
||||
'char':'char',
|
||||
'short':'smallint',
|
||||
'long':'integer',
|
||||
'float':'numeric',
|
||||
'text':'text',
|
||||
'file':'bytea',
|
||||
}
|
||||
@classmethod
|
||||
def isMe(self,name):
|
||||
return name=='psycopg2'
|
||||
|
||||
def grammar(self):
|
||||
return {
|
||||
'select':select_stmt,
|
||||
}
|
||||
|
||||
def placeHolder(self,varname):
|
||||
if varname=='__mainsql__' :
|
||||
return ''
|
||||
return ':%s' % varname
|
||||
|
||||
def dataConvert(self,dataList):
|
||||
if type(dataList) == type({}):
|
||||
return dataList
|
||||
d = {}
|
||||
[ d.update({i['name']:i['value']}) for i in dataList ]
|
||||
return d
|
||||
|
||||
def pagingSQLmodel(self):
|
||||
return u"""select *
|
||||
from (
|
||||
select page_s.*,rownum row_id
|
||||
from (%s) page_s
|
||||
order by $[sort]$ $[order]$
|
||||
)
|
||||
where row_id >=$[from_line]$ and row_id < $[end_line]$"""
|
||||
|
||||
def tablesSQL(self):
|
||||
"""
|
||||
列出表名
|
||||
SELECT tablename FROM pg_tables;
|
||||
WHERE tablename NOT LIKE 'pg%'
|
||||
AND tablename NOT LIKE 'sql_%'
|
||||
ORDER BY tablename;
|
||||
"""
|
||||
sqlcmd = """select
|
||||
lower(table_name) as name,
|
||||
lower(decode(comments,null,table_name,comments)) as title
|
||||
from USER_TAB_COMMENTS where table_type = 'TABLE'"""
|
||||
return sqlcmd
|
||||
|
||||
def fieldsSQL(self,tablename=None):
|
||||
"""SELECT col_description(a.attrelid,a.attnum) as comment,pg_type.typname as typename,a.attname as name, a.attnotnull as notnull
|
||||
FROM pg_class as c,pg_attribute as a inner join pg_type on pg_type.oid = a.atttypid
|
||||
where c.relname = 'tablename' and a.attrelid = c.oid and a.attnum>0
|
||||
"""
|
||||
sqlcmd="""select lower(utc.COLUMN_NAME) name
|
||||
,utc.DATA_TYPE type
|
||||
,utc.DATA_LENGTH length
|
||||
,utc.data_scale dec
|
||||
,case when utc.nullable = 'Y' then 'yes' else 'no' end nullable
|
||||
,lower(nvl(ucc.comments,utc.COLUMN_NAME)) title
|
||||
,lower(utc.table_name) as table_name
|
||||
from user_tab_cols utc left join USER_COL_COMMENTS ucc on utc.table_name = ucc.table_name and utc.COLUMN_NAME = ucc.COLUMN_NAME"""
|
||||
if tablename is not None:
|
||||
sqlcmd = sqlcmd + """ where lower(utc.table_name) = '%s'""" % tablename.lower()
|
||||
return sqlcmd
|
||||
|
||||
def fkSQL(self,tablename=None):
|
||||
tablename = tablename.lower()
|
||||
sqlcmd = """select
|
||||
distinct(ucc.column_name) as field,rela.table_name as fk_table,rela.column_name as fk_field
|
||||
from
|
||||
user_constraints uc,user_cons_columns ucc,
|
||||
(
|
||||
select t2.table_name,t2.column_name,t1.r_constraint_name
|
||||
from user_constraints t1,user_cons_columns t2
|
||||
where t1.r_constraint_name=t2.constraint_name
|
||||
) rela
|
||||
where
|
||||
uc.constraint_name=ucc.constraint_name
|
||||
and uc.r_constraint_name=rela.r_constraint_name"""
|
||||
if tablename is not None:
|
||||
sqlcmd = sqlcmd + """ and lower(uc.table_name)='%s'""" % tablename.lower()
|
||||
return sqlcmd
|
||||
|
||||
def pkSQL(self,tablename=None):
|
||||
"""
|
||||
select pg_attribute.attname as colname,pg_type.typname as typename,pg_constraint.conname as pk_name from
|
||||
pg_constraint inner join pg_class
|
||||
on pg_constraint.conrelid = pg_class.oid
|
||||
inner join pg_attribute on pg_attribute.attrelid = pg_class.oid
|
||||
and pg_attribute.attnum = pg_constraint.conkey[1]
|
||||
inner join pg_type on pg_type.oid = pg_attribute.atttypid
|
||||
where pg_class.relname = 'tablename'
|
||||
and pg_constraint.contype='p'
|
||||
"""
|
||||
sqlcmd = """
|
||||
select
|
||||
lower(col.table_name) table_name,
|
||||
lower(col.column_name) as field_name
|
||||
from
|
||||
user_constraints con,user_cons_columns col
|
||||
where
|
||||
con.constraint_name=col.constraint_name and con.constraint_type='P'"""
|
||||
if tablename is not None:
|
||||
sqlcmd = sqlcmd + """ and lower(col.table_name)='%s'""" % tablename.lower()
|
||||
return sqlcmd
|
||||
|
||||
def indexesSQL(self,tablename=None):
|
||||
"""
|
||||
SELECT
|
||||
|
||||
A.SCHEMANAME,
|
||||
|
||||
A.TABLENAME,
|
||||
|
||||
A.INDEXNAME,
|
||||
|
||||
A.TABLESPACE,
|
||||
|
||||
A.INDEXDEF,
|
||||
|
||||
B.AMNAME,
|
||||
|
||||
C.INDEXRELID,
|
||||
|
||||
C.INDNATTS,
|
||||
|
||||
C.INDISUNIQUE,
|
||||
|
||||
C.INDISPRIMARY,
|
||||
|
||||
C.INDISCLUSTERED,
|
||||
|
||||
D.DESCRIPTION
|
||||
|
||||
FROM
|
||||
|
||||
PG_AM B
|
||||
|
||||
LEFT JOIN PG_CLASS F ON B.OID = F.RELAM
|
||||
|
||||
LEFT JOIN PG_STAT_ALL_INDEXES E ON F.OID = E.INDEXRELID
|
||||
|
||||
LEFT JOIN PG_INDEX C ON E.INDEXRELID = C.INDEXRELID
|
||||
|
||||
LEFT OUTER JOIN PG_DESCRIPTION D ON C.INDEXRELID = D.OBJOID,
|
||||
|
||||
PG_INDEXES A
|
||||
|
||||
WHERE
|
||||
|
||||
A.SCHEMANAME = E.SCHEMANAME AND A.TABLENAME = E.RELNAME AND A.INDEXNAME = E.INDEXRELNAME
|
||||
|
||||
AND E.SCHEMANAME = 'public' AND E.RELNAME = 'table_name'
|
||||
"""
|
||||
sqlcmd = """select
|
||||
lower(a.index_name) index_name,
|
||||
lower(a.UNIQUENESS) index_type,
|
||||
lower(a.table_name) table_name,
|
||||
lower(b.column_name) column_name
|
||||
from user_indexes a, user_ind_columns b
|
||||
where a.index_name = b.index_name"""
|
||||
if tablename is not None:
|
||||
sqlcmd += """ and lower(a.table_name) = lower('%s')""" % tablename.lower()
|
||||
return sqlcmd
|
||||
|
454
sqlor/sor.py
Normal file
454
sqlor/sor.py
Normal file
@ -0,0 +1,454 @@
|
||||
import os
|
||||
os.environ['NLS_LANG'] = 'SIMPLIFIED CHINESE_CHINA.UTF8'
|
||||
import sys
|
||||
import codecs
|
||||
import re
|
||||
import json
|
||||
from appPublic.myImport import myImport
|
||||
from appPublic.dictObject import DictObject,dictObjectFactory
|
||||
from appPublic.unicoding import uDict
|
||||
from patterncoding.myTemplateEngine import MyTemplateEngine
|
||||
|
||||
|
||||
from appPublic.argsConvert import ArgsConvert,ConditionConvert
|
||||
|
||||
class SQLorException(Exception,object):
|
||||
def __int__(self,**kvs):
|
||||
supper(SQLException,self).__init__(self,**kvs)
|
||||
self.dic = {
|
||||
'response':'error',
|
||||
'errtype':'SQLor',
|
||||
'errmsg':supper(SQLException,self).message,
|
||||
}
|
||||
|
||||
def __str__(self):
|
||||
return 'errtype:%s,errmsg=%s' % (self.dic['errtype'],self.dic['errmsg'])
|
||||
|
||||
def setValues(params,ns):
|
||||
r = ns.get(params,os.getenv(params))
|
||||
return r
|
||||
|
||||
def findNamedParameters(sql):
|
||||
"""
|
||||
return a list of named parameters
|
||||
"""
|
||||
re1 = '\$\{[_a-zA-Z_][a-zA-Z_0-9]*\}'
|
||||
params1 = re.findall(re1,sql)
|
||||
return params1
|
||||
|
||||
|
||||
def uniParams(params1):
|
||||
ret = []
|
||||
for i in params1:
|
||||
if i not in ret:
|
||||
ret.append(i)
|
||||
return ret
|
||||
|
||||
def readsql(fn):
|
||||
f = codecs.open(fn,'r','utf-8')
|
||||
b = f.read()
|
||||
f.close()
|
||||
return b
|
||||
|
||||
class SQLor(object):
|
||||
def __init__(self,dbdesc=None,sqltp = '$[',sqlts = ']$',sqlvp = '${',sqlvs = '}$'):
|
||||
self.conn = None
|
||||
self.cur = None
|
||||
self.async_mode = False
|
||||
self.sqltp = sqltp
|
||||
self.sqlts = sqlts
|
||||
self.sqlvp = sqlvp
|
||||
self.sqlvs = sqlvs
|
||||
self.dbdesc = dbdesc
|
||||
self.writer = None
|
||||
self.convfuncs = {}
|
||||
self.cc = ConditionConvert()
|
||||
|
||||
def setCursor(self,async_mode,conn,cur):
|
||||
self.async_mode = async_mode
|
||||
self.conn = conn
|
||||
self.cur = cur
|
||||
|
||||
def getConn(self):
|
||||
return self.conn
|
||||
|
||||
def setConvertFunction(self,typ,func):
|
||||
self.convfuncs.update({typ:func})
|
||||
|
||||
def convert(self,typ,value):
|
||||
if self.convfuncs.get(typ,None) is not None:
|
||||
return self.convfuncs[typ](value)
|
||||
return value
|
||||
@classmethod
|
||||
def isMe(self,name):
|
||||
return name=='sqlor'
|
||||
|
||||
def pagingSQLmodel(self):
|
||||
return u""
|
||||
|
||||
def placeHolder(self,varname):
|
||||
if varname=='__mainsql__' :
|
||||
return ''
|
||||
return '?'
|
||||
|
||||
def dataConvert(self,dataList):
|
||||
return [ i.get('value',None) for i in dataList]
|
||||
|
||||
def dataList(self,k,v):
|
||||
a = []
|
||||
a.append({'name':k,'value':v})
|
||||
return a
|
||||
|
||||
def cursor(self):
|
||||
return self.cur
|
||||
|
||||
def recordCnt(self,sql):
|
||||
ret = u"""select count(*) rcnt from (%s) rowcount_table""" % sql
|
||||
return ret
|
||||
|
||||
def pagingSQL(self,sql,paging,NS):
|
||||
"""
|
||||
default it not support paging
|
||||
"""
|
||||
page = int(NS.get(paging['pagename'],1))
|
||||
rows = int(NS.get(paging['rowsname'],10))
|
||||
sort = NS.get(paging.get('sortname','sort'),None)
|
||||
order = NS.get(paging.get('ordername','asc'),'asc')
|
||||
if not sort:
|
||||
return sql
|
||||
if page < 1:
|
||||
page = 1
|
||||
from_line = (page - 1) * rows + 1
|
||||
end_line = page * rows + 1
|
||||
psql = self.pagingSQLmodel()
|
||||
ns={
|
||||
'from_line':from_line,
|
||||
'end_line':end_line,
|
||||
'rows':rows,
|
||||
'sort':sort,
|
||||
'order':order,
|
||||
}
|
||||
ac = ArgsConvert('$[',']$')
|
||||
psql = ac.convert(psql,ns)
|
||||
retSQL=psql % sql
|
||||
return retSQL
|
||||
|
||||
def filterSQL(self,sql,filters,NS):
|
||||
ac = ArgsConvert('$[',']$')
|
||||
fbs = []
|
||||
for f in filters:
|
||||
vars = ac.findAllVariables(f)
|
||||
if len(vars) > 0:
|
||||
ignoreIt = False
|
||||
for v in vars:
|
||||
if not NS.get(v,False):
|
||||
ignoreIt = True
|
||||
if not ignoreIt:
|
||||
f = ac.convert(f,NS)
|
||||
else:
|
||||
f = '1=1'
|
||||
fbs.append(f)
|
||||
fb = ' '.join(fbs)
|
||||
retsql = u"""select * from (%s) filter_table where %s""" % (sql,fb)
|
||||
return retsql
|
||||
|
||||
async def runVarSQL(self,cursor,sql,NS):
|
||||
"""
|
||||
using a opened cursor to run a SQL statment with variable, the variable is setup in NS namespace
|
||||
return a cursor with data
|
||||
"""
|
||||
markedSQL,datas = self.maskingSQL(sql,NS)
|
||||
datas = self.dataConvert(datas)
|
||||
try:
|
||||
markedSQL = markedSQL.encode('utf8')
|
||||
if self.async_mode:
|
||||
await cursor.execute(markedSQL,datas)
|
||||
else:
|
||||
cursor.execute(markedSQL,datas)
|
||||
|
||||
except Exception as e:
|
||||
print( "markedSQL=",markedSQL,datas,e)
|
||||
raise e
|
||||
return
|
||||
|
||||
def maskingSQL(self,org_sql,NS):
|
||||
"""
|
||||
replace all ${X}$ format variable exception named by '__mainsql__' in sql with '%s',
|
||||
and return the marked sql sentent and variable list
|
||||
sql is a sql statment with variable formated in '${X}$
|
||||
the '__mainsql__' variable use to identify the main sql will outout data.
|
||||
NS is the name space the variable looking for, it is a variable dictionary
|
||||
return (MarkedSQL,list_of_variable)
|
||||
"""
|
||||
sqltextAC = ArgsConvert(self.sqltp,self.sqlts)
|
||||
sqlargsAC = ArgsConvert(self.sqlvp,self.sqlvs)
|
||||
sql1 = sqltextAC.convert(org_sql,NS)
|
||||
cc = ConditionConvert()
|
||||
sql1 = cc.convert(sql1,NS)
|
||||
vars = sqlargsAC.findAllVariables(sql1)
|
||||
phnamespace = {}
|
||||
[phnamespace.update({v:self.placeHolder(v)}) for v in vars]
|
||||
m_sql = sqlargsAC.convert(sql1,phnamespace)
|
||||
newdata = []
|
||||
for v in vars:
|
||||
if v != '__mainsql__':
|
||||
value = sqlargsAC.getVarValue(v,NS,None)
|
||||
newdata += self.dataList(v,value)
|
||||
|
||||
return (m_sql,newdata)
|
||||
|
||||
async def execute(self,sql,value,callback,**kwargs):
|
||||
cur = self.cursor()
|
||||
await self.runVarSQL(cur,sql,value)
|
||||
if callback is not None:
|
||||
fields = [ i[0].lower() for i in cur.description ]
|
||||
if self.async_mode:
|
||||
rec = await cur.fetchone()
|
||||
else:
|
||||
rec = cur.fetchone()
|
||||
|
||||
while rec is not None:
|
||||
dic = {}
|
||||
for i in range(len(fields)):
|
||||
dic.update({fields[i]:rec[i]})
|
||||
#dic = uDict(dic,coding='utf8')
|
||||
callback(dic,**kwargs)
|
||||
if self.async_mode:
|
||||
rec = await cur.fetchone()
|
||||
else:
|
||||
rec = cur.fetchone()
|
||||
|
||||
|
||||
async def executemany(self,sql,values):
|
||||
cur = self.cursor()
|
||||
markedSQL,datas = self.maskingSQL(sql,{})
|
||||
datas = [ self.dataConvert(d) for d in values ]
|
||||
if async_mode:
|
||||
await cur.executemany(markedSQL,datas)
|
||||
else:
|
||||
cur.executemany(markedSQL,datas)
|
||||
|
||||
def pivotSQL(self,tablename,rowFields,columnFields,valueFields):
|
||||
def maxValue(columnFields,valueFields,cfvalues):
|
||||
sql = ''
|
||||
for f in valueFields:
|
||||
i = 0
|
||||
for field in columnFields:
|
||||
for v in cfvalues[field]:
|
||||
sql += """
|
||||
,sum(%s_%d) %s_%d""" % (f,i,f,i)
|
||||
i+=1
|
||||
return sql
|
||||
def casewhen(columnFields,valueFields,cfvalues):
|
||||
sql = ''
|
||||
for f in valueFields:
|
||||
i = 0
|
||||
for field in columnFields:
|
||||
for v in cfvalues[field]:
|
||||
if v is None:
|
||||
sql += """,case when %s is null then %s
|
||||
else 0 end as %s_%d -- %s
|
||||
""" % (field,f,f,i,v)
|
||||
else:
|
||||
sql += """,case when trim(%s) = trim('%s') then %s
|
||||
else 0 end as %s_%d -- %s
|
||||
""" % (field,v,f,f,i,v)
|
||||
|
||||
i += 1
|
||||
return sql
|
||||
|
||||
cfvalues={}
|
||||
for field in columnFields:
|
||||
sqlstring = 'select distinct %s from %s' % (field,tablename)
|
||||
v = []
|
||||
self.execute(sqlstring,{},lambda x: v.append(x))
|
||||
cfvalues[field] = [ i[field] for i in v ]
|
||||
|
||||
sql ="""
|
||||
select """ + ','.join(rowFields)
|
||||
sql += maxValue(columnFields,valueFields,cfvalues)
|
||||
sql += """ from
|
||||
(select """ + ','.join(rowFields)
|
||||
sql += casewhen(columnFields,valueFields,cfvalues)
|
||||
sql += """
|
||||
from %s)
|
||||
group by %s""" % (tablename,','.join(rowFields))
|
||||
return sql
|
||||
|
||||
async def pivot(self,desc,tablename,rowFields,columnFields,valueFields):
|
||||
sql = self.pivotSQL(tablename,rowFields,columnFields,valueFields)
|
||||
desc['sql_string'] = sql
|
||||
ret = []
|
||||
return await self.execute(sql,{},lambda x:ret.append(x))
|
||||
|
||||
def isSelectSql(self,sql):
|
||||
i = 0
|
||||
while sql[i] in "\r\n \t":
|
||||
i = i + 1
|
||||
return sql.lower().startswith('select ')
|
||||
|
||||
def getSQLfromDesc(self,desc):
|
||||
sql = ''
|
||||
if 'sql_file' in desc.keys():
|
||||
sql = readsql(desc['sql_file'])
|
||||
else:
|
||||
sql = desc['sql_string']
|
||||
return sql
|
||||
|
||||
async def record_count(self,desc,NS):
|
||||
cnt_desc = {}
|
||||
cnt_desc.update(desc)
|
||||
sql = self.getSQLfromDesc(desc)
|
||||
if desc.get('sql_file',False):
|
||||
del cnt_desc['sql_file']
|
||||
cnt_desc['sql_string'] = self.recordCnt(sql)
|
||||
class Cnt:
|
||||
def __init__(self):
|
||||
self.recs = []
|
||||
def handler(self,rec):
|
||||
self.recs.append(rec)
|
||||
|
||||
c = Cnt()
|
||||
await self.sqlIterator(cnt_desc,NS,c.handler)
|
||||
print(c.recs[0])
|
||||
t = c.recs[0]['rcnt']
|
||||
return t
|
||||
|
||||
async def pagingdata(self,desc,NS):
|
||||
paging_desc = {}
|
||||
paging_desc.update(desc)
|
||||
paging_desc.update(
|
||||
{
|
||||
"paging":{
|
||||
"rowsname":"rows",
|
||||
"pagename":"page",
|
||||
"sortname":"sort",
|
||||
"ordername":"order"
|
||||
}
|
||||
})
|
||||
if desc.get('sortfield',False):
|
||||
NS['sort'] = desc.get('sortfield')
|
||||
sql = self.getSQLfromDesc(desc)
|
||||
if desc.get('sql_file',False):
|
||||
del cnt_desc['sql_file']
|
||||
paging_desc['sql_string'] = self.pagingSQL(sql,
|
||||
paging_desc.get('paging'),NS)
|
||||
|
||||
class Cnt:
|
||||
def __init__(self):
|
||||
self.recs = []
|
||||
def handler(self,rec):
|
||||
self.recs.append(rec)
|
||||
|
||||
c = Cnt()
|
||||
await self.sqlIterator(paging_desc,NS,c.handler)
|
||||
return c.recs
|
||||
|
||||
async def runSQL(self,desc,NS,callback,**kw):
|
||||
class RecordHandler:
|
||||
def __init__(self,ns,name):
|
||||
self.ns = ns
|
||||
self.name = name
|
||||
self.ns[name] = []
|
||||
|
||||
def handler(self,rec):
|
||||
obj = DictObject(rec)
|
||||
self.ns[self.name].append(obj)
|
||||
|
||||
cur = self.cursor()
|
||||
sql = self.getSQLfromDesc(desc)
|
||||
if self.isSelectSql(sql):
|
||||
if callback is None:
|
||||
klass = desc.get('dataname','dummy')
|
||||
if klass is not None:
|
||||
rh = RecordHandler(NS,klass)
|
||||
callback = rh.handler
|
||||
else:
|
||||
callback = None
|
||||
await self.execute(sql,NS,callback)
|
||||
|
||||
async def sqlExecute(self,desc,NS):
|
||||
await self.execute(sql,NS,None)
|
||||
|
||||
async def tables(self):
|
||||
sqlstring = self.tablesSQL()
|
||||
ret = []
|
||||
await self.execute(sqlstring,{},lambda x:ret.append(x))
|
||||
return ret
|
||||
|
||||
def indexesSQL(self,tablename):
|
||||
"""
|
||||
record of {
|
||||
index_name,
|
||||
index_type,
|
||||
table_name,
|
||||
column_name
|
||||
}
|
||||
"""
|
||||
return None
|
||||
|
||||
async def indexes(self,tablename=None):
|
||||
sqlstring = self.indexesSQL(tablename.lower())
|
||||
if sqlstring is None:
|
||||
return []
|
||||
recs = []
|
||||
await self.execute(sqlstring,{},lambda x:recs.append(x))
|
||||
return recs
|
||||
|
||||
async def fields(self,tablename=None):
|
||||
sqlstring = self.fieldsSQL(tablename)
|
||||
recs = []
|
||||
await self.execute(sqlstring,{},lambda x:recs.append(x))
|
||||
ret = []
|
||||
for r in recs:
|
||||
r.update({'type':self.db2modelTypeMapping.get(r['type'].lower(),'unknown')})
|
||||
r.update({'name':r['name'].lower()})
|
||||
ret.append(r)
|
||||
return ret
|
||||
|
||||
async def primary(self,tablename):
|
||||
sqlstring = self.pkSQL(tablename)
|
||||
recs = []
|
||||
await self.execute(sqlstring,{},lambda x:recs.append(x))
|
||||
return recs
|
||||
|
||||
async def fkeys(self,tablename):
|
||||
sqlstring = self.fkSQL(tablename)
|
||||
recs = []
|
||||
await self.execute(sqlstring,{},lambda x:recs.append(x))
|
||||
return recs
|
||||
|
||||
async def createTable(self,tabledesc):
|
||||
te = MyTemplateEngine([],'utf8','utf8')
|
||||
desc = {
|
||||
"sql_string":te.renders(self.ddl_template,tabledesc)
|
||||
}
|
||||
return await self.sqlExecute(desc,{})
|
||||
|
||||
async def getTableDesc(self,tablename):
|
||||
desc = {}
|
||||
summary = [ i for i in await self.tables() if tablename.lower() == i.name ]
|
||||
primary = [i.field_name for i in await self.primary(tablename) ]
|
||||
summary['primary'] = primary
|
||||
desc['summary'] = summary
|
||||
desc['fields'] = await self.fields(tablename=tablename)
|
||||
desc['validation'] = []
|
||||
idx = {}
|
||||
async for idxrec in self.indexes(tablename=tablename):
|
||||
if idxrec.index_name != idx.get('name',None):
|
||||
if idx != {}:
|
||||
desc['validation'].append(idx)
|
||||
idx = {
|
||||
'fields':[]
|
||||
}
|
||||
else:
|
||||
idx['fields'] = []
|
||||
idx['name'] = idxrec.index_name
|
||||
idx['oper'] = 'idx'
|
||||
idx['fields'].append(idxrec.field_name)
|
||||
if idx != {}:
|
||||
desc['validation'].append(idx)
|
||||
return desc
|
||||
|
||||
|
||||
|
100
sqlor/sqlite3or.py
Normal file
100
sqlor/sqlite3or.py
Normal file
@ -0,0 +1,100 @@
|
||||
import re
|
||||
from .sor import SQLor
|
||||
|
||||
class SQLite3or(SQLor):
|
||||
db2modelTypeMapping = {
|
||||
'char':'char',
|
||||
'nchar':'str',
|
||||
'text':'text',
|
||||
'ntext':'text',
|
||||
'varchar':'str',
|
||||
'nvarchar':'str',
|
||||
'blob':'file',
|
||||
'integer':'long',
|
||||
'double':'float',
|
||||
'date':'date',
|
||||
'time':'time',
|
||||
'timestamp':'timestamp',
|
||||
'number':'long',
|
||||
}
|
||||
model2dbTypemapping = {
|
||||
'date':'date',
|
||||
'time':'time',
|
||||
'timestamp':'timestamp',
|
||||
'str':'nvarchar',
|
||||
'char':'char',
|
||||
'short':'int',
|
||||
'long':'integer',
|
||||
'float':'double',
|
||||
'text':'ntext',
|
||||
'file':'blob',
|
||||
}
|
||||
@classmethod
|
||||
def isMe(self,name):
|
||||
return name=='sqlite3'
|
||||
|
||||
def placeHolder(self,varname):
|
||||
if varname=='__mainsql__' :
|
||||
return ''
|
||||
return '?'
|
||||
|
||||
def dataConvert(self,dataList):
|
||||
if type(dataList) == type({}):
|
||||
d = [ i for i in dataList.values()]
|
||||
else:
|
||||
d = [ i['value'] for i in dataList]
|
||||
return tuple(d)
|
||||
|
||||
def pagingSQLmodel(self):
|
||||
sql = u"""select * from (%s) order by $[sort]$ $[order]$ limit $[from_line]$,$[end_line]$"""
|
||||
return sql
|
||||
|
||||
def tablesSQL(self):
|
||||
sqlcmd = u"""select name, tbl_name as title from sqlite_master where upper(type) = 'TABLE'"""
|
||||
return sqlcmd
|
||||
|
||||
def fieldsSQL(self,tablename):
|
||||
sqlcmd="""PRAGMA table_info('%s')""" % tablename.lower()
|
||||
return sqlcmd
|
||||
|
||||
def fields(self,tablename):
|
||||
m = u'(\w+)\(((\d+)(,(\d+)){0,1})\){0,1}'
|
||||
k = re.compile(m)
|
||||
def typesplit(typ):
|
||||
d = k.search(typ)
|
||||
if d is None:
|
||||
return typ,0,0
|
||||
|
||||
return d.group(1),int(d.group(3) if d.group(3) is not None else 0 ),int(d.group(5) if d.group(5) is not None else 0)
|
||||
|
||||
sqlstring = self.fieldsSQL(tablename)
|
||||
recs = []
|
||||
self.execute(sqlstring,callback=lambda x:recs.append(x))
|
||||
for r in recs:
|
||||
t,l,d = typesplit(r['type'])
|
||||
r['type'] = t
|
||||
r['length'] = int(l)
|
||||
r['dec'] = int(d)
|
||||
r['title'] = r['name']
|
||||
ret = []
|
||||
for r in recs:
|
||||
r.update({'type':self.db2modelTypeMapping[r['type'].lower()]})
|
||||
r.update({'name':r['name'].lower()})
|
||||
ret.append(r)
|
||||
return ret
|
||||
|
||||
def fkSQL(self,tablename):
|
||||
sqlcmd = ""
|
||||
return sqlcmd
|
||||
|
||||
def fkeys(self,tablename):
|
||||
return []
|
||||
|
||||
def primary(self,tablename):
|
||||
recs = self.fields(tablename)
|
||||
ret = [ {'field':r['name']} for r in recs if r['pk'] == 1 ]
|
||||
return ret
|
||||
|
||||
def pkSQL(self,tablename):
|
||||
sqlcmd = ""
|
||||
return sqlcmd
|
47
test/primary.py
Normal file
47
test/primary.py
Normal file
@ -0,0 +1,47 @@
|
||||
import asyncio
|
||||
|
||||
from sqlor.dbpools import DBPools
|
||||
|
||||
dbs={
|
||||
"aiocfae":{
|
||||
"driver":"aiomysql",
|
||||
"async_mode":True,
|
||||
"coding":"utf8",
|
||||
"dbname":"cfae",
|
||||
"kwargs":{
|
||||
"user":"test",
|
||||
"db":"cfae",
|
||||
"password":"test123",
|
||||
"host":"localhost"
|
||||
}
|
||||
},
|
||||
"cfae":{
|
||||
"driver":"mysql.connector",
|
||||
"coding":"utf8",
|
||||
"dbname":"cfae",
|
||||
"kwargs":{
|
||||
"user":"test",
|
||||
"db":"cfae",
|
||||
"password":"test123",
|
||||
"host":"localhost"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
loop = asyncio.get_event_loop()
|
||||
pool = DBPools(dbs,loop=loop)
|
||||
async def printTables(dbname):
|
||||
r = await pool.getTables(dbname)
|
||||
print('tables=',r)
|
||||
|
||||
async def printFields(dbname,tablename):
|
||||
r = await pool.getTableFields(dbname,tablename)
|
||||
print(dbname,tablename,'fields=',r)
|
||||
|
||||
async def printPrimary(dbname,tablename):
|
||||
r = await pool.getTablePrimaryKey(dbname,tablename)
|
||||
print(dbname,tablename,'primary key=',r)
|
||||
|
||||
loop.run_until_complete(printTables('cfae'))
|
||||
loop.run_until_complete(printFields('cfae','product'))
|
||||
loop.run_until_complete(printPrimary('cfae','product'))
|
44
test/t1.py
Normal file
44
test/t1.py
Normal file
@ -0,0 +1,44 @@
|
||||
import asyncio
|
||||
|
||||
from sqlor.dbpools import DBPools
|
||||
|
||||
dbs={
|
||||
"aiocfae":{
|
||||
"driver":"aiomysql",
|
||||
"async_mode":True,
|
||||
"coding":"utf8",
|
||||
"dbname":"cfae",
|
||||
"kwargs":{
|
||||
"user":"test",
|
||||
"db":"cfae",
|
||||
"password":"test123",
|
||||
"host":"localhost"
|
||||
}
|
||||
},
|
||||
"cfae":{
|
||||
"driver":"mysql.connector",
|
||||
"coding":"utf8",
|
||||
"dbname":"cfae",
|
||||
"kwargs":{
|
||||
"user":"test",
|
||||
"db":"cfae",
|
||||
"password":"test123",
|
||||
"host":"localhost"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
loop = asyncio.get_event_loop()
|
||||
pool = DBPools(dbs,loop=loop)
|
||||
|
||||
async def paging():
|
||||
@pool.runSQLPaging
|
||||
def sql(db,ns):
|
||||
return {
|
||||
"sql_string":"select * from product",
|
||||
}
|
||||
x = await sql('aiocfae',{'rows':5,'page':1,"sort":"productid"})
|
||||
print('x=',x['total'],len(x['rows']))
|
||||
|
||||
|
||||
loop.run_until_complete(paging())
|
42
test/t2.py
Normal file
42
test/t2.py
Normal file
@ -0,0 +1,42 @@
|
||||
import asyncio
|
||||
|
||||
from sqlor.dbpools import DBPools
|
||||
|
||||
dbs={
|
||||
"aiocfae":{
|
||||
"driver":"aiomysql",
|
||||
"async_mode":True,
|
||||
"coding":"utf8",
|
||||
"dbname":"cfae",
|
||||
"kwargs":{
|
||||
"user":"test",
|
||||
"db":"cfae",
|
||||
"password":"test123",
|
||||
"host":"localhost"
|
||||
}
|
||||
},
|
||||
"cfae":{
|
||||
"driver":"mysql.connector",
|
||||
"coding":"utf8",
|
||||
"dbname":"cfae",
|
||||
"kwargs":{
|
||||
"user":"test",
|
||||
"db":"cfae",
|
||||
"password":"test123",
|
||||
"host":"localhost"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
loop = asyncio.get_event_loop()
|
||||
pool = DBPools(dbs,loop=loop)
|
||||
|
||||
async def testfunc():
|
||||
@pool.runSQL
|
||||
def sql(db,ns,callback):
|
||||
return {
|
||||
"sql_string":"select * from product",
|
||||
}
|
||||
x = await sql('cfae',{},print)
|
||||
|
||||
loop.run_until_complete(testfunc())
|
42
test/test.py
Normal file
42
test/test.py
Normal file
@ -0,0 +1,42 @@
|
||||
import asyncio
|
||||
|
||||
from sqlor.dbpools import DBPools
|
||||
|
||||
dbs={
|
||||
"aiocfae":{
|
||||
"driver":"aiomysql",
|
||||
"async_mode":True,
|
||||
"coding":"utf8",
|
||||
"dbname":"cfae",
|
||||
"kwargs":{
|
||||
"user":"test",
|
||||
"db":"cfae",
|
||||
"password":"test123",
|
||||
"host":"localhost"
|
||||
}
|
||||
},
|
||||
"cfae":{
|
||||
"driver":"mysql.connector",
|
||||
"coding":"utf8",
|
||||
"dbname":"cfae",
|
||||
"kwargs":{
|
||||
"user":"test",
|
||||
"db":"cfae",
|
||||
"password":"test123",
|
||||
"host":"localhost"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
loop = asyncio.get_event_loop()
|
||||
pool = DBPools(dbs,loop=loop)
|
||||
|
||||
async def testfunc():
|
||||
@pool.runSQL
|
||||
def sql(db,ns,callback):
|
||||
return {
|
||||
"sql_string":"select * from product",
|
||||
}
|
||||
x = await sql('cfae',{},print)
|
||||
|
||||
loop.run_until_complete(testfunc())
|
Loading…
Reference in New Issue
Block a user