Compare commits

..

No commits in common. "master" and "latest_branch" have entirely different histories.

108 changed files with 7649 additions and 995 deletions

View File

@ -0,0 +1,16 @@
Metadata-Version: 2.1
Name: appPublic
Version: 5.1.27
Summary: appPublic
Home-page: https://github.com/yumoqing/appPublic
Author: yumoqing
Author-email: yumoqing@gmail.com
Platform: any
Classifier: Operating System :: OS Independent
Classifier: Programming Language :: Python :: 3
Classifier: License :: OSI Approved :: MIT License
Description-Content-Type: text/markdown
# appPublic
a set of icommon modules for python development

87
appPublic.egg-info/SOURCES.txt Executable file
View File

@ -0,0 +1,87 @@
README.md
setup.py
appPublic/CSVData.py
appPublic/Config.py
appPublic/ExecFile.py
appPublic/FiniteStateMachine.py
appPublic/MiniI18N.py
appPublic/ObjectCache.py
appPublic/RSAutils.py
appPublic/SQLite3Utils.py
appPublic/Singleton.py
appPublic/__init__.py
appPublic/across_nat.bak.py
appPublic/across_nat.py
appPublic/app_logger.py
appPublic/argsConvert.py
appPublic/asynciorun.py
appPublic/audioplayer.py
appPublic/background.py
appPublic/country_cn_en.py
appPublic/csv_Data.py
appPublic/dataencoder.py
appPublic/datamapping.py
appPublic/dictExt.py
appPublic/dictObject.old.py
appPublic/dictObject.py
appPublic/easyExcel.py
appPublic/exceldata.py
appPublic/excelwriter.py
appPublic/find_player.py
appPublic/folderUtils.py
appPublic/genetic.py
appPublic/hf.py
appPublic/http_client.py
appPublic/httpclient.py
appPublic/i18n.py
appPublic/ipgetter.py
appPublic/iplocation.py
appPublic/jsonConfig.py
appPublic/jsonIO.py
appPublic/localefunc.py
appPublic/log.py
appPublic/macAddress.py
appPublic/myImport.py
appPublic/myTE.py
appPublic/myjson.py
appPublic/mylog.py
appPublic/oauth_client.py
appPublic/objectAction.py
appPublic/outip.py
appPublic/pickleUtils.py
appPublic/port_forward.py
appPublic/process_workers.py
appPublic/proxy.py
appPublic/rc4.py
appPublic/receiveMail.py
appPublic/registerfunction.py
appPublic/restrictedEnv.py
appPublic/rsaPeer.py
appPublic/rsawrap.py
appPublic/set_fgcolor.py
appPublic/sockPackage.py
appPublic/sshx.py
appPublic/strUtils.py
appPublic/t.py
appPublic/testdict.py
appPublic/thread_workers.py
appPublic/timeUtils.py
appPublic/timecost.py
appPublic/tworkers.py
appPublic/udp_comm.py
appPublic/uni_outip.py
appPublic/unicoding.py
appPublic/uniqueID.py
appPublic/version.py
appPublic/wcag_checker.py
appPublic/worker.py
appPublic/zmq_reqrep.py
appPublic/zmq_topic.py
appPublic/zmqapi.py
appPublic.egg-info/PKG-INFO
appPublic.egg-info/SOURCES.txt
appPublic.egg-info/dependency_links.txt
appPublic.egg-info/requires.txt
appPublic.egg-info/top_level.txt
test/test_across_nat.py
test/test_aioupnp.py

View File

@ -0,0 +1 @@

View File

@ -0,0 +1,19 @@
xlrd
bs4
ffpyplayer
xlwt
nanoid
psutil
ujson
numpy
rsa
brotli
aiohttp
upnpclient
py-natpmp
asyncio
requests
jinja2
pyzmq
cryptography
asyncssh==2.13.2

View File

@ -0,0 +1 @@
appPublic

Binary file not shown.

Binary file not shown.

View File

@ -1,7 +1,6 @@
# -*- coding:utf8 -*-
import re
from appPublic.dictObject import DictObject
from appPublic.registerfunction import rfrun
class ConvertException(Exception):
pass
@ -14,8 +13,7 @@ class ArgsConvert(object):
sl2 = [ u'\\' + c for c in self.subfixString ]
ps = u''.join(sl1)
ss = u''.join(sl2)
# re1 = ps + r"[_a-zA-Z_\u4e00-\u9fa5][a-zA-Z_0-9\u4e00-\u9fa5\,\.\'\{\}\[\]\(\)\-\+\*\/]*" + ss
re1 = ps + r".*?" + ss
re1 = ps + r"[_a-zA-Z_\u4e00-\u9fa5][a-zA-Z_0-9\u4e00-\u9fa5\,\.\'\{\}\[\]\(\)\-\+\*\/]*" + ss
self.re1 = re1
# print( self.re1,len(self.re1),len(re1),type(self.re1))
@ -45,13 +43,11 @@ class ArgsConvert(object):
return vs[len(self.preString):-len(self.subfixString)]
def getVarValue(self,var,namespace,default):
ns = DictObject(**namespace.copy())
ns.rfrun = rfrun
v = default
try:
v = eval(var,ns)
v = eval(var,namespace)
except Exception as e:
v = ns.get(var, None)
v = namespace.get(var, None)
if v:
return v
if callable(default):

View File

@ -1,62 +0,0 @@
from eventpy.eventdispatcher import EventDispatcher
from appPublic.dictObject import DictObject
def bind(self, eventname, handler):
self.appendListener(eventname, handler)
def unbind(self, eventname, handler):
self.removeListener(eventname, handler);
EventDispatcher.bind = bind
EventDispatcher.unbind = unbind
class EventProperty:
def __init__(self, event_name, initial_value=None):
self._value = initial_value
self.event_name = event_name
def __get__(self, instance, owner):
return self._value
def __set__(self, instance, value):
if self._value != value:
self._value = value
d = DictObject()
d.target = instance
d.data = value
d.event = self.event_name
instance.dispatch(self.event_name, d)
if __name__ == '__main__':
class SomeClass(EventDispatcher):
state = EventProperty('onstate', 0)
age = EventProperty('onage', 20)
def __init__(self):
super().__init__()
def observer1(data):
print(f"Observer 1 received: {data}")
def observer2(data):
print(f"Observer 2 received: {data}")
def observer3(data):
print(f"Observer 3 received: {data}")
# 创建实例
si = SomeClass()
# 添加监听
si.bind('onstate', observer1)
si.bind('onstate', observer2)
si.bind('onage', observer3)
# 改变状态
si.state = 10 # 输出: Observer 1 received: 10, Observer 2 received: 10
# 再次改变状态
# si.unbind('onstate', observer1) unbind has error
si.state = 20 # 输出: Observer 2 received: 20
# change age's value
si.age = 30

View File

@ -1,25 +1,7 @@
import os
from traceback import format_exc
import asyncio
import aiohttp
from aiohttp import FormData
import ssl
import certifi
import json
from appPublic.myTE import MyTemplateEngine
import re
from appPublic.log import info, debug, warning, error, exception, critical
from urllib.parse import urlparse
from aiohttp_socks import ProxyType, ProxyConnector, ChainProxyConnector
def get_domain(url):
# Prepend 'http://' if the URL lacks a scheme
if not url.startswith(('http://', 'https://')):
url = 'http://' + url
parsed_url = urlparse(url)
netloc = parsed_url.netloc
domain = netloc.split(':')[0]
return domain
RESPONSE_BIN = 0
RESPONSE_TEXT = 1
@ -39,49 +21,27 @@ class HttpError(Exception):
return str(self)
class HttpClient:
def __init__(self,coding='utf-8', socks5_proxy_url=None):
def __init__(self,coding='utf-8'):
self.coding = coding
self.session = None
self.cookies = {}
self.proxy_connector = None
self.socks5_proxy_url = socks5_proxy_url
self.blocked_domains = set()
self.load_cache()
def save_cache(self):
home_dir = os.path.expanduser('~')
cache_file = os.path.join(home_dir, '.proxytarget')
with open(cache_file, 'w') as f:
for d in self.blocked_domains:
f.write(f'{d}\n')
def load_cache(self):
# 初始化缓存文件
home_dir = os.path.expanduser('~')
cache_file = os.path.join(home_dir, '.proxytarget')
try:
with open(cache_file, 'r') as f:
for line in f:
domain = line.strip()
if domain:
self.blocked_domains.add(domain)
except FileNotFoundError:
# 创建空文件
with open(cache_file, 'w') as f:
pass
async def close(self):
if self.session:
await self.session.close()
self.session = None
def url2domain(self,url):
parts = url.split('/')[:3]
pre = '/'.join(parts)
return pre
def setCookie(self,url,cookies):
name = get_domain(url)
name = self.url2domain(url)
self.cookies[name] = cookies
def getCookies(self,url):
name = get_domain(url)
name = url2domain(url)
return self.cookies.get(name,None)
def getsession(self,url):
@ -90,148 +50,52 @@ class HttpClient:
self.session = aiohttp.ClientSession(cookie_jar=jar)
return self.session
async def response_generator(self, url, resp):
async def handleResp(self,url,resp,resp_type, stream_func=None):
if resp.cookies is not None:
self.setCookie(url,resp.cookies)
async for chunk in resp.content.iter_chunked(1024):
yield chunk
async def response_handle(self,url, resp, resp_type=None, stream_func=None):
if resp.cookies is not None:
self.setCookie(url,resp.cookies)
if stream_func:
async for chunk in resp.content.iter_chunked(1024):
if stream_func:
await stream_func(chunk)
return None
if resp_type == RESPONSE_BIN:
return await resp.read()
if resp_type == RESPONSE_JSON:
return await resp.json()
if resp_type == RESPONSE_TEXT:
return await resp.text(self.coding)
async for chunk in resp.content.iter_chunked(1024):
if stream_func:
await stream_func(chunk)
def grapCookie(self,url):
session = self.getsession(url)
domain = get_domain(url)
domain = self.url2domain(url)
filtered = session.cookie_jar.filter_cookies(domain)
return filtered
async def make_request(self, url, method='GET',
params=None,
data=None,
jd=None,
headers=None,
use_proxy=False
):
if use_proxy:
connector = ProxyConnector.from_url(self.socks5_proxy_url)
reco = aiohttp.ClientSession(connector=connector)
else:
reco = aiohttp.ClientSession()
async with reco as session:
hp = {
}
if params:
hp['params'] = params
if data:
hp['data'] = data
if jd:
hp['jd'] = jd
if headers:
hp['headers'] = headers
if url.startswith('https://'):
debug(f'{url=} add_ssl_ctx')
hp['ssl_ctx'] = ssl.create_default_context(cafile=certifi.where())
# debug(f'{url=}, {hp=}')
return await session.request(method, url,
**hp
)
async def get_request_response(self, url, method='GET',
params=None,
data=None,
jd=None,
headers=None,
**kw
):
domain = get_domain(url)
try:
if self.socks5_proxy_url is None or domain not in self.blocked_domains:
return await self.make_request(url, method=method,
params=params,
data=data,
jd=jd,
use_proxy=False,
headers=headers
)
except:
e = Exception(f'make_request error')
exception(f'{e=}, {format_exc()}')
if self.socks5_proxy_url is None:
raise e
debug(f'{self.socks5_proxy_url=}, {self.blocked_domains=}, {domain=}')
if domain not in self.blocked_domains:
self.blocked_domains.add(domain)
self.save_cache()
return await self.make_request(url, method=method,
params=params,
data=data,
jd=jd,
use_proxy=True,
headers=headers
)
async def request(self, url, method='GET',
async def request(self, url, method,
response_type=RESPONSE_TEXT,
params=None,
data=None,
jd=None,
stream_func=None,
headers=None,
**kw
):
resp = await self.get_request_response(url, method=method,
params=params,
data=data,
jd=jd,
headers=headers,
**kw
)
if resp.status==200:
return await self.response_handle(url, resp,
resp_type=response_type,
stream_func=stream_func)
**kw):
session = self.getsession(url)
if params == {}:
params = None
if data == {}:
data = None
if jd == {}:
jd = None
if headers == {}:
headers = None
msg = f'http error({resp.status}, {url=},{params=}, {data=}, {jd=})'
exception(msg)
raise HttpError(resp.status, msg)
async def __call__(self, url, method='GET',
response_type=RESPONSE_TEXT,
params=None,
data=None,
jd=None,
headers=None,
stream=False,
use_proxy=False,
**kw
):
resp = await self.get_request_response(url, method=method,
params=params,
data=data,
jd=jd,
headers=headers,
**kw)
resp = await session.request(method, url,
params=params,
data=data,
json=jd,
headers=headers, **kw)
if resp.status==200:
async for d in self.response_generator(url, resp):
yield d
return
msg = f'http error({resp.status}, {url=},{params=}, {data=}, {jd=})'
return await self.handleResp(url, resp, response_type, stream_func=stream_func)
msg = f'http error({resp.status}, {url=},{params=}, {data=}, {jd=}, {headers=}, {kw=})'
exception(msg)
raise HttpError(resp.status, msg)
@ -240,124 +104,14 @@ class HttpClient:
async def post(self,url, **kw):
return self.request(url, 'POST', **kw)
class JsonHttpAPI:
def __init__(self, env={}, socks5_proxy_url=None):
self.env = env
self.te = MyTemplateEngine([], env=env)
self.hc = HttpClient(socks5_proxy_url=socks5_proxy_url)
async def stream_func(self, chunk):
debug(f'{chunk=}')
d = self.chunk_buffer + chuck
a, b = d.split('\n', 1)
self.chunk_buffer = b
if self.resptmpl:
ns1 = json.loads(a)
a = self.te.renders(self.resptmpl, ns1)
if self.user_stream_func:
jd = json.loads(a)
await self.user_stream_func(jd)
async def chunk_handle(self, chunk, chunk_lead, chunk_end):
return chunk
async def __call__(self, url, method='GET', ns={},
headerstmpl=None,
paramstmpl=None,
datatmpl=None,
chunk_leading=None,
chunk_end="[done]",
resptmpl=None):
headers = None
self.chunk_buffer = ''
ns1 = self.env.copy()
ns1.update(ns)
if headerstmpl:
headers = json.loads(self.te.renders(headerstmpl, ns1))
info(f'{headers=},{ns=}, {headerstmpl=}')
params = None
if paramstmpl:
params = json.loads(self.te.renders(paramstmpl, ns1))
data = None
stream = False
if datatmpl:
datadic = json.loads(self.te.renders(datatmpl, ns1))
stream = atadic.get('stream', False)
data = json.dumps(datadic, ensure_ascii=False)
hc = HttpClient()
async for d in self.hc(url, method=method,
stream=stream,
headers=headers,
params=params,
data=data):
if stream:
d = self.chunk_handle(d, chunk_leading, chunk_end)
if resptmpl:
dic = json.loads(d)
ns1.update(dic)
d = self.te.renders(resptmpl, ns1)
yield d
async def call(self, url, method='GET', ns={},
stream_func=None,
headerstmpl=None,
paramstmpl=None,
datatmpl=None,
chunk_leading=None,
chunk_end="[done]",
resptmpl=None):
self.user_stream_func = stream_func
self.chunk_leading = chunk_leading
self.chunk_end = chunk_end
self.chunk_buffer = ''
self.resptmpl =resptmpl
headers = None
ns1 = self.env.copy()
ns1.update(ns)
if headerstmpl:
headers = json.loads(self.te.renders(headerstmpl, ns1))
info(f'{headers=},{ns=}, {headerstmpl=}')
params = None
if paramstmpl:
params = json.loads(self.te.renders(paramstmpl, ns1))
data = None
if datatmpl:
datadic = json.loads(self.te.renders(datatmpl, ns1))
data = json.dumps(datadic, ensure_ascii=False)
"""
data = FormData()
for k,v in datadic.items():
data.add_field(k, v)
headers['Content-Type'] = 'multipart/form-data'
"""
info(f'{data=},{ns=}, {headers=}')
if stream_func:
resp = await self.hc.request(url, method=method, headers=headers,
stream_func=stream_func,
params=params,
data=data)
else:
resp = await self.hc.request(url, method=method, headers=headers,
response_type=RESPONSE_JSON,
params=params,
data=data)
ret = resp
if resptmpl:
ns1 = self.env.copy()
ns1.update(resp)
rets = self.te.renders(resptmpl, ns1)
ret = json.loads(rets)
return ret
session = self.getsession(url)
if __name__ == '__main__':
async def main():
hc = HttpClient(socks5_proxy_url='socks5://localhost:1086')
async for d in hc('https://www.baidu.com'):
print(d)
r = await hc.request('https://www.google.com')
async def gbaidu(hc):
r = await hc.get('https://www.baidu.com')
print(r)
await hc.close()
loop = asyncio.new_event_loop()
loop.run_until_complete(main())
loop = asyncio.get_event_loop()
hc = HttpClient()
loop.run_until_complete(gbaidu(hc))

View File

@ -1,6 +1,5 @@
import sys
import codecs
from traceback import format_exc
from appPublic.timeUtils import timestampstr
from appPublic.Singleton import SingletonDecorator
import inspect
@ -16,9 +15,8 @@ def my_function():
@SingletonDecorator
class MyLogger:
levels={
"clientinfo":7,
"info":6,
"debug":5,
"info":6,
"warning":4,
"error":3,
"exception":2,
@ -65,11 +63,6 @@ class MyLogger:
self.logger.flush()
self.close_logger()
def clientinfo(message):
frame_info = inspect.currentframe()
logger = MyLogger('Test')
logger.log('clientinfo', message, frame_info)
def info(message):
frame_info = inspect.currentframe()
logger = MyLogger('Test')
@ -97,8 +90,6 @@ def critical(message):
def exception(message):
frame_info = inspect.currentframe()
tb_msg = format_exc()
msg = f'{message}\n{tb_msg}'
logger = MyLogger('exception')
logger.log('exception', msg, frame_info)
logger.log('exception', message, frame_info)

View File

@ -17,7 +17,7 @@ def string_template_render(tmp_string, data):
return rtemplate.render(**data)
class MyTemplateEngine:
def __init__(self,pathList,file_coding='utf-8',out_coding='utf-8', env={}):
def __init__(self,pathList,file_coding='utf-8',out_coding='utf-8'):
self.file_coding = file_coding
self.out_coding = out_coding
loader = FileSystemLoader(pathList, encoding=self.file_coding)
@ -41,8 +41,6 @@ class MyTemplateEngine:
'extname':lambda x:os.path.splitext(x)[-1],
}
self.env.globals.update(denv)
if env:
self.env.globals.update(env)
def set(self,k,v):
self.env.globals.update({k:v})
@ -82,17 +80,3 @@ def tmpTml(f, ns):
wf.write(b)
return p
if __name__ == '__main__':
import sys
import json
if len(sys.argv) < 3:
print(f'{sys.argv[0]} tmplfile jsonfile')
sys.exit(1)
te = MyTemplateEngine('.')
with codecs.open(sys.argv[1], 'r', 'utf-8') as f:
tmpl = f.read()
with codecs.open(sys.argv[2], 'r', 'utf-8') as f1:
ns = json.loads(f1.read())
print(te.renders(tmpl, ns))

View File

@ -1,56 +0,0 @@
import json
from time import sleep
from multiprocessing import Manager
from multiprocessing.shared_memory import SharedMemory
from multiprocessing.resource_tracker import unregister
class PSharedMemory:
tailstring=b'#:@#'
def __init__(self, name, datalen, data=None):
self.sm = None
self.name = name
self.datalen = datalen
self.lock = Manager().Lock()
if data:
self.sm = SharedMemory(name=self.name, create=True, size=self.datalen)
self.creator = True
self.set(data)
else:
self.sm = SharedMemory(name=self.name)
unregister(self.sm._name, 'shared_memory')
self.creator = False
def get(self):
b = self.sm.buf.tobytes().split(self.tailstring)[0]
d = b.decode('utf-8')
return json.loads(d)
def set(self, data):
with self.lock:
d = json.dumps(data)
b = d.encode('utf-8') + self.tailstring
if self.datalen < len(b):
raise Exception(f'SharedMemory allocated size is {self.datalen} set size is {len(b)}')
self.sm.buf[:len(b)] = b
def __del__(self):
if self.sm is None:
return
self.sm.close()
if self.creator:
self.sm.unlink()
if __name__ == '__main__':
import sys
data = {
"aaa":"134902t34gf",
"bbb":36
}
if len(sys.argv) > 1:
sm = PSharedMemory('rtgerigreth', datalen=200, data=data)
sleep(10000)
else:
sm = PSharedMemory('rtgerigreth', datalen=200 )
x = sm.get()
print(f'data in shared memory: {x}')

View File

@ -19,22 +19,13 @@ class RegisterFunction:
def get(self,name):
return self.registKW.get(name,None)
def run(self, name, *args, **kw):
f = self.get(name)
if iscoroutinefunction(f):
print(f'{name} is a coro')
return None
if f:
return f(*args, **kw)
error(f'{name} not register')
async def exe(self, name, *args, **kw):
f = self.get(name)
if f is None:
# error(f'{name=} function not registed')
error(f'{name=} function not registed')
return None
if iscoroutinefunction(f):
# info(f'{name=} is coroutine function');
info(f'{name=} is coroutine function');
return await f(*args, **kw)
return f(*args, **kw)
@ -73,14 +64,6 @@ def registerFunction(name, func):
rf = RegisterFunction()
rf.register(name, func)
async def rfexe(rfname, *args, **kw):
rf = RegisterFunction()
return await rf.exe(rfname, *args, **kw)
def rfrun(rfname, *args, **kw):
rf = RegisterFunction()
return rf.run(rfname, *args, **kw)
async def main():
d = {}
rf = RegisterCoroutine()

View File

@ -1,151 +0,0 @@
from cryptography.hazmat.primitives.asymmetric import rsa
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.asymmetric import padding
from cryptography.hazmat.primitives import serialization
from cryptography.hazmat.backends import default_backend
def _load_private_key(filepath: str, password: bytes = None):
with open(filepath, "rb") as key_file:
key_data = key_file.read()
if b"BEGIN OPENSSH PRIVATE KEY" in key_data:
return serialization.load_ssh_private_key(key_data, password=password, backend=default_backend())
elif b"BEGIN RSA PRIVATE KEY" in key_data or b"BEGIN PRIVATE KEY" in key_data:
return serialization.load_pem_private_key(key_data, password=password, backend=default_backend())
else:
raise ValueError("Unsupported private key format")
def _load_public_key(filepath: str):
with open(filepath, "rb") as key_file:
key_data = key_file.read()
if key_data.startswith(b"ssh-"):
return serialization.load_ssh_public_key(key_data, backend=default_backend())
elif b"BEGIN PUBLIC KEY" in key_data:
return serialization.load_pem_public_key(key_data, backend=default_backend())
else:
raise ValueError("Unsupported public key format")
def _write_public_key(public_key, filepath, fmt="pem"):
if fmt.lower() == "pem":
encoding = serialization.Encoding.PEM
format = serialization.PublicFormat.SubjectPublicKeyInfo
elif fmt.lower() == "openssh":
encoding = serialization.Encoding.OpenSSH
format = serialization.PublicFormat.OpenSSH
else:
raise ValueError("Unsupported format. Use: pem or openssh")
pem = public_key.public_bytes(
encoding=encoding,
format=format
)
with open(filepath, "wb") as f:
f.write(pem)
def _write_private_key(key, filepath, fmt="pkcs8", password: bytes = None):
if fmt.lower() == "pkcs8":
encoding = serialization.Encoding.PEM
format = serialization.PrivateFormat.PKCS8
elif fmt.lower() == "pkcs1":
encoding = serialization.Encoding.PEM
format = serialization.PrivateFormat.TraditionalOpenSSL
elif fmt.lower() == "openssh":
encoding = serialization.Encoding.PEM
format = serialization.PrivateFormat.OpenSSH
else:
raise ValueError("Unsupported format. Use: pkcs1, pkcs8, openssh")
encryption = serialization.NoEncryption() if password is None else serialization.BestAvailableEncryption(password)
pem = key.private_bytes(
encoding=encoding,
format=format,
encryption_algorithm=encryption
)
with open(filepath, "wb") as f:
f.write(pem)
def _sign(prikey, data):
"""
use prikey to sign bytes type data
"""
signature = prikey.sign(
data,
padding.PKCS1v15(), # 或者使用 PSS
hashes.SHA256()
)
return signature
def _verify(pubkey, data, signature):
try:
pubkey.verify(
signature,
data,
padding.PKCS1v15(), # 与签名时一致
hashes.SHA256()
)
return True
except InvalidSignature:
return False
class RSAer:
def __init__(self):
self.prikey = None
self.pubkey = None
def create_key(selfi, keylen=2048):
aself.prikey = rsa.generate_private_key(
public_exponent=65537,
key_size=keylen
)
def write_private_key(self, filepath, fmt="pkcs8", password: bytes = None):
if self.prikey is None:
raise Exception('private key is None')
write_private_key(self.prikey, filepath, fmt=fmt, password=password)
def write_public_key(self, filepath, fmt="pem"):
if self.prikey is None:
raise Exception('private key is None')
if self.pubkey is None:
self.pubkey = self.prikey.publib_key()
_write_public_key(self.pubkey, filepath, fmt="pem")
def load_private_key(self, filepath, password=None):
self.prikey = _load_private_key(filepath, passowrd=password)
def load_public_key(self, filepath):
self.pubkey = _load_public_key(filepath)
def encode(self, data):
def decode(self, data):
def sign(self, data):
return _sign(self.prikey, data)
def verify(self, data, signature):
return _verify(self.pubkey, data, signature)
if __name__ == '__main__':
# 示例:加载私钥和公钥
private_key = load_private_key("path/to/private_key.pem", password=None) # password 可为 b"your_passphrase"
public_key = load_public_key("path/to/public_key.pub")
print("私钥类型:", type(private_key))
print("公钥类型:", type(public_key))
# key 是一个 RSAPrivateKey 对象(如从 load_private_key 返回)
write_private_key(key, "private_pkcs8.pem", fmt="pkcs8")
write_private_key(key, "private_pkcs1.pem", fmt="pkcs1")
write_private_key(key, "private_openssh", fmt="openssh")
# public_key 是一个 RSAPublicKey 对象
write_public_key(public_key, "public.pem", fmt="pem")
write_public_key(public_key, "id_rsa.pub", fmt="openssh")

View File

@ -2,7 +2,6 @@ import os
import sys
import time
import shlex
from contextlib import asynccontextmanager
from functools import partial
from threading import Thread
from appPublic.myTE import tmpTml
@ -13,15 +12,11 @@ class SSHNode:
username='root',
port=22,
password=None,
client_keys=[],
passphrase=None,
jumpers=[]):
self.server2 = {
"host":host,
"username":username,
"password":password,
"client_keys":client_keys,
"passphrase":passphrase,
"port":port
}
print(self.server2)
@ -45,91 +40,42 @@ class SSHNode:
def set_jumpers(self, jumpers):
self.jumpers = jumpers
async def _connect(self, **kw):
refconn = kw['refconn']
host = kw['host']
username = kw.get('username', 'root')
port = kw.get('port',22)
password= kw.get('password', None)
client_keys = kw.get('client_keys', [])
passphrase = kw.get('passphrase', None)
conn = None
if refconn:
if password:
conn = await refconn.connect_ssh(host,
username=username,
known_hosts=None,
keepalive_interval=60,
password=password,
port=port)
elif client_keys != []:
conn = await refconn.connect_ssh(host,
username=username,
known_hosts=None,
keepalive_interval=60,
client_keys=client_keys,
passphrase=passphrase,
port=port)
else:
conn = await refconn.connect_ssh(host,
username=username,
known_hosts=None,
keepalive_interval=60,
port=port)
else:
if password:
conn = await asyncssh.connect(host,
username=username,
known_hosts=None,
keepalive_interval=60,
password=password,
port=port)
elif client_keys:
conn = await asyncssh.connect(host,
username=username,
known_hosts=None,
keepalive_interval=60,
client_keys=client_keys,
passphrase=passphrase,
port=port)
else:
conn = await asyncssh.connect(host,
username=username,
known_hosts=None,
keepalive_interval=60,
port=port)
return conn
async def connect(self):
refconn = None
for jj in self.jumpers:
j = jj.copy()
j['refconn'] = refconn
refconn = await self._connect(**j)
for j in self.jumpers:
host = j['host']
username = j.get('username', 'root')
port = j.get('port',22)
password= j.get('password', None)
if refconn:
refconn = await refconn.connect_ssh(host,
username=username,
known_hosts=None,
password=password,
port=port)
else:
refconn = await asyncssh.connect(host,
username=username,
known_hosts=None,
password=password,
port=port)
self.jumper_conns.append(refconn)
j = self.server2.copy()
j['refconn'] = refconn
self.conn = await self._connect(**j)
@asynccontextmanager
async def get_connector(self):
refconn = None
for jj in self.jumpers:
j = jj.copy()
j['refconn'] = refconn
refconn = await self._connect(**j)
j = self.server2.copy()
j['refconn'] = refconn
conn = await self._connect(**j)
try:
yield SshConnector(conn, refconn=refconn)
except Exception as e:
exception(f'{e=}, {format_exc()}')
conn.close()
host = self.server2['host']
username = self.server2.get('username', 'root')
port = self.server2.get('port',22)
password = self.server2.get('password', None)
if refconn:
refconn.close()
self.conn = await refconn.connect_ssh(host,
username=username,
port=port,
password=password,
known_hosts=None)
else:
self.conn = await asyncssh.connect(host,
username=username,
password=password,
port=port)
def close(self):
self.conn.close()
@ -161,8 +107,9 @@ class SSHNode:
async def _xcmd(self, cmd, xmsgs=[], ns={},
show_input=None,
show_stdout=None):
proc = await self._process(cmd, term_type='xterm-256color',
term_size=(24, 80),
proc = await self._process(cmd, term_type='xterm',
term_size=(80,24),
encoding='utf-8'
)
@ -343,8 +290,8 @@ class SSHBash:
async def run(self, read_co, write_co):
await self.node.connect()
self.p_obj = await self.node._process('bash',
term_type='xterm-256color',
term_size=(24, 80),
term_type='vt100',
term_size=(80,24),
encoding=None)
if isinstance(self.p_obj, Exception):
print('Excetion:', self.p_obj)
@ -366,31 +313,6 @@ class SSHBash:
x = await self.p_obj.stdout.read(1024)
await write_co(x)
class SshConnector:
def __init__(self, conn, refconn=None):
self.conn = conn
self.refconn = refconn
async def r2l(self, rf, lf):
x = await asyncssh.scp((self.conn, rf),
lf,
preserve=True,
recurse=True)
return x
async def l2r(self, lf, rf):
x = await asyncssh.scp(lf, (self.comm, rf),
preserve=True,
recurse=Tree)
return x
async def run_process(self, *args, **kw):
a = await self.conn.create_process(*args, **kw)
return a
async def run(self, cmdline, input=None, stdin=None, stdout=None, stderr=None):
return await self.conn.run(cmdline, input=input, stdin=stdin, stdout=stdout)
if __name__ == '__main__':
async def sysstdin_read():
return os.read(sys.stdin.fileno(), 65535)

View File

@ -1,152 +0,0 @@
#!/Users/ymq/p3.12/bin/python
from traceback import format_exc
import aiohttp
import asyncio
from aiohttp_socks import ProxyConnector
from pathlib import Path
import certifi
import ssl
import os
from appPublic.log import exception, debug
async def liner(async_gen):
remainer = ''
async for chunk in async_gen:
chunk = chunk.decode('utf-8')
d = remainer + chunk
lst = d.split('\n', 1)
if len(lst) == 2:
if lst[0]:
yield lst[0]
remainer = lst[1]
else:
remainer = lst[0]
for l in remainer.split('\n'):
if l:
await asyncio.sleep(0)
yield l
class StreamHttpClient:
def __init__(self, socks5_url="socks5://127.0.0.1:1086"):
home = os.path.expanduser("~")
self.socks_urls_file = Path(f'{home}/.socksurls.txt')
self.socks5_url = socks5_url
self.socks_urls = set(self._load_socks_urls())
self.ssl_context = ssl.create_default_context(cafile=certifi.where())
def _load_socks_urls(self):
if self.socks_urls_file.exists():
return [line.strip() for line in self.socks_url_file.read_text().splitlines() if line.strip()]
return []
def _save_socks_url(self, url):
if url not in self.socks_urls:
self.socks_urls.add(url)
with self.socks_url_file.open("a") as f:
f.write(url + "\n")
async def __call__(self, method, url, *,
headers=None,
params=None,
data=None,
json=None,
files=None,
chunk_size=1024, **kw):
"""
Makes an HTTP request and yields response chunks (streamed).
"""
use_socks = url in self.socks_urls
try:
if use_socks:
debug(f"🔁 Using SOCKS5 directly for: {url}")
async for chunk in self._request_with_connector(
method, url,
headers=headers, params=params, data=data,
json=json, files=files,
use_socks=True, chunk_size=chunk_size, **kw
):
yield chunk
else:
debug(f"🌐 Trying direct request: {url}")
async for chunk in self._request_with_connector(
method, url,
headers=headers, params=params, data=data,
json=json, files=files,
use_socks=False, chunk_size=chunk_size, **kw
):
yield chunk
except Exception as e:
if use_socks:
exception(f"❌ SOCKS5 request failed: {e},{format_exc()}")
return
debug(f"❌ Direct request failed: {e}")
debug("🧦 Retrying with SOCKS5 proxy...")
try:
async for chunk in self._request_with_connector(
method, url,
headers=headers, params=params, data=data,
json=json, files=files,
use_socks=True, chunk_size=chunk_size, **kw
):
self._save_socks_url(url)
yield chunk
except Exception as e2:
exception(f"❌ SOCKS5 request also failed: {e2},{format_exc()}")
async def _request_with_connector(self, method, url,
headers=None, params=None, data=None,
json=None, files=None,
use_socks=False,
chunk_size=1024,
**kw):
connector = ProxyConnector.from_url(self.socks5_url) if use_socks else None
async with aiohttp.ClientSession(connector=connector) as session:
req_args = kw
req_args.update({
"headers": headers,
"params": params,
"ssl": self.ssl_context,
})
if files:
form = aiohttp.FormData()
if isinstance(data, dict):
for k, v in data.items():
form.add_field(k, str(v))
for name, file_info in files.items():
form.add_field(name, *file_info)
req_args["data"] = form
else:
if json is not None:
req_args["json"] = json
else:
req_args["data"] = data
async with session.request(method, url, **req_args) as response:
response.raise_for_status()
async for chunk in response.content.iter_chunked(chunk_size):
yield chunk
if __name__ == '__main__':
import asyncio
import sys
async def main():
if len(sys.argv) > 1:
prompt = sys.argv[1]
else:
prompt = 'who are you'
hc = StreamHttpClient()
url = 'http://devops.opencomputing.ai/v1/chat/completions'
headers={'Content-Type': 'application/json'}
data='{ "model": "devstral", "stream":true, "messages":[ { "role":"user", "content":"' + prompt + '" } ] }'
async for chunk in hc('POST', url, data=data, headers=headers):
print(chunk)
asyncio.new_event_loop().run_until_complete(main())

View File

@ -1,48 +0,0 @@
import re
def split_english_sentences(text):
# 替换可能连在一起的句点后无空格情况,统一成 .<space>
text = re.sub(r'([a-zA-Z])\.([A-Z])', r'\1. \2', text)
# 不分割缩写(如 "Dr.", "U.S.A."
abbreviations = r"(Mr|Mrs|Ms|Dr|St|Jr|Sr|vs|i\.e|e\.g|U\.S\.A|U\.K)\."
text = re.sub(abbreviations, lambda m: m.group(0).replace('.', '<DOT>'), text)
# 用正则分割句子
sentences = re.split(r'(?<=[.!?])\s+', text.strip())
# 还原缩写中的 <DOT>
sentences = [s.replace('<DOT>', '.') for s in sentences if s.strip()]
return sentences
def split_text_with_dialog_preserved(text):
# 正则匹配中英文引号对话
text = ''.join(text.split('\r'))
text = ' '.join(text.split('\n'))
dialog_pattern = r'([“"](.*?)[”"])'
parts = []
last_idx = 0
# 先提取所有对话,保证不被切割
for match in re.finditer(dialog_pattern, text, flags=re.DOTALL):
start, end = match.span()
# 前面的非对话部分按句子分割
non_dialog = text[last_idx:start]
sentences = re.findall(r'[^。!?!?]*[。!?!?]', non_dialog, re.MULTILINE)
print(f'{non_dialog=}, {sentences=}')
if len(sentences) == 0:
sentences = split_english_sentences(non_dialog)
parts.extend([s.strip() for s in sentences if s.strip()])
# 加入整个对话
parts.append(match.group(1).strip())
last_idx = end
# 处理最后一个对话之后的部分
remaining = text[last_idx:]
sentences = re.findall(r'[^。!?!?]*[。!?!?]', remaining, re.MULTILINE)
if len(sentences) == 0:
sentences = split_english_sentences(remaining)
parts.extend([s.strip() for s in sentences if s.strip()])
return parts

View File

@ -5,22 +5,6 @@ from datetime import date, timedelta, datetime
leapMonthDays = [0,31,29,31,30,31,30,31,31,30,31,30,31]
unleapMonthDays = [0,31,28,31,30,31,30,31,31,30,31,30,31]
def days_between(date_str1, date_str2):
# Convert the strings to datetime objects
date1 = datetime.strptime(date_str1, '%Y-%m-%d')
date2 = datetime.strptime(date_str2, '%Y-%m-%d')
# Calculate the difference between the two dates
delta = date2 - date1
# Get the number of days
days = abs(delta.days)
return days
def monthfirstday():
d = datetime.now()
return '%4d-%02d-01' % (d.year, d.month)
def curDatetime():
return datetime.now()

View File

@ -1 +1 @@
__version__ = '5.5.0'
__version__ = '5.1.27'

View File

@ -1,10 +0,0 @@
from pydub import AudioSegment
def convert_to_16k_mono(in_wav_path, out_wav_path):
audio = AudioSegment.from_wav(in_wav_path)
audio = audio.set_frame_rate(16000).set_channels(1)
audio.export(out_wav_path, format="wav")
# 用法
# convert_to_16k_mono("input.wav", "output_16k_mono.wav")

View File

@ -0,0 +1,65 @@
import csv
class CSVData:
def __init__(self,csvfile,names = None,headline = 0,dataline = 1):
self.csvfile = csvfile
self.names = names
self.headline = headline
self.dataline = dataline
def read(self):
f = open(self.csvfile,'rb')
reader = csv.reader(f)
fields = None
if self.names is not None:
fields = self.names
data = []
lno = 0
for l in reader:
if fields is None and lno == self.headline:
fields = [f for f in l]
if lno >= self.dataline:
rec = {}
for i in range(len(fields)):
rec[fields[i]] = l[i]
data.append(rec)
lno += 1
f.close()
return data
def iterRead(self):
self.fd = open(self.csvfile,'r')
try:
reader = csv.reader(self.fd)
fields = None
if self.names is not None:
fields = self.names
lno = 0
self.onBegin()
for l in reader:
if fields is None and lno == self.headline:
fields = [f for f in l]
if lno >= self.dataline:
rec = {}
for i in range(len(fields)):
rec[fields[i]] = l[i]
self.onRecord(rec)
lno += 1
self.fd.close()
self.onFinish()
except exception as e:
fd.close()
raise e
def onReadBegin(self):
pass
def onRecord(self,rec):
print(rec)
def onFinish(self):
print("onFinish() called")
if __name__ == '__main__':
import sys
cd = CSVData(sys.argv[1],names = ['st_date','open_price','max_price','min_price','close_price','volume','adj_price'])
cd.iterRead()

View File

@ -0,0 +1,37 @@
# Config.py
# Copyright (c) 2009 longtop Co.
# See LICENSE for details.
# author: yumoqing@gmail.com
# created date: 2009-02-01
# last modified date: 2009-02-05
import os,sys
from appPublic.ExecFile import ExecFile
from appPublic.dictObject import DictObject
from appPublic.Singleton import Singleton
from zope.interface import implements
CONFIG_FILE = 'conf/config.ini'
from folderUtils import ProgramPath
class Node(object) :
pass
class Config:
__metaclass = Singleton
def __init__(self,configpath=None):
if configpath is None:
ps = CONFIG_FILE.split('/')
configpath = os.path.join(ProgramPath(),*ps)
self.configfile = configpath
self.__execfile = ExecFile(self,path=configpath)
self.__execfile.set('Node',Node)
self.__execfile.set('DictObject',DictObject)
self.__execfile.set('dict',DictObject)
r,msg = self.__execfile.run()
if not r:
print(r,msg)
def getConfig(path=None):
conf = Config(path)
return conf

View File

@ -0,0 +1,107 @@
# ExecFile.py
# usage :
# r = ExecFile()
# r.set('a','bbbb')
# r.run('test/cards.ini')
# r.cards
#
import os,sys
class DictConfig(dict):
def __init__(self,dic=None,path=None,str=None,namespace={}):
dict.__init__(self)
self.namespace=namespace
if dic is not None and type(dic) == dict:
self.__dict__.update(dic)
self.__subConfig()
if path is not None:
self.__path = path
self.__load(path)
if str is not None:
self.__confstr = str
try:
exec(str,self.namespace,self.__dict__)
self.__subConfig()
except:
pass
def keys(self):
return self.__dict__.keys()
def __getitem__(self,n):
return self.__dict__[n]
def __getattr__(self,name):
if self.__dict__.has_key(name):
return self.__dict__[name]
raise AttributeError(name)
def __subConfig(self):
for n in self.__dict__.keys():
if type(self.__dict__[n]) == dict:
self.__dict__[n] = DictConfig(dic=self.__dict__[n])
elif type(self.__dict__[n]) == type([]):
a = []
for i in self.__dict__[n]:
if type(i) == dict:
a.append(DictConfig(dic=i))
else:
a.append(i)
self.__dict__[n] = a
elif type(self.__dict__[n]) == type(()):
a = []
for i in self.__dict__[n]:
if type(i) == dict:
a.append(DictConfig(dic=i))
else:
a.append(i)
self.__dict__[n] = tuple(a)
def __load(self,path):
d = {}
c = {}
f = open(path,'r')
buf = f.read()
f.close()
try:
exec(buf,self.namespace,namespace)
#print d
#print "c=",c
self.__dict__.update(c)
#print self.__dict__
self.__subConfig()
return True
except Exception as e:
print(self.__path,e)
return False
class ExecFile(object) :
def __init__(self,obj=None,path=None,namespace={}):
self.namespace = namespace
if obj == None:
obj = self
self.__object = obj
#self.namespace.update(self.__object.__dict__)
self.__file = path
def set(self,name,v) :
setattr(self.__object,name,v)
def get(self,name,default=None) :
return getattr(self.__object,name,default)
def run(self,path=None) :
if path!=None:
self.__file = path
if self.__file is None:
raise Exception('exec file is none')
f = open(self.__file,'r')
buf = f.read()
f.close()
try :
exec(buf,globals(),self.__object.__dict__)
except Exception as e:
print("ExecFile()",e,self.__file)
return (False,e)
return (True,'')

View File

@ -0,0 +1,53 @@
# FiniteStateMachine.py
## a virtual State object of FSM
#
class BaseFSM(object):
def enterState(self, obj):
raise NotImplementedError()
def execState(self, obj):
raise NotImplementedError()
def exitState(self, obj):
raise NotImplementedError()
## a FMS Manager
# only need one Manager for a FSM
class FSMManager(object):
def __init__(self):
self._fsms = {}
def addState(self,state,fsm):
self._fsms[state] = fsm
def delState(self,state):
del self._fsms[state]
def getFSM(self, state):
return self._fsms[state]
def frame(self, objs, state):
for obj in objs:
if state == obj.curr_state:
obj.keepState()
else:
obj.changeState(state, self._fsms[state])
## the object with has a Finite State Machine
#
class FSMObject(object):
def attachFSM(self,state,fsm):
self.fsm_state_object = fsm
self.fsm_cur_state = state
def changeState(self,new_state,newfsm):
self.fsm_cur_state = new_state
self.fsm_state_object.exitState(self)
self.fsm_state_object = new_fsm
self.fsm_state_object.enterState(self)
self.fsm_state_object.execState(self)
def keepState(self):
self.fsm_state_object.execState(self)

View File

@ -0,0 +1,150 @@
import os,re,sys
import codecs
from appPublic.folderUtils import _mkdir
from appPublic.Singleton import SingletonDecorator
from appPublic.folderUtils import ProgramPath
import threading
import time
import locale
comment_re = re.compile(r'\s*#.*')
msg_re = re.compile(r'\s*([^:]*)\s*:\s*([^\s].*)')
def dictModify(d, md) :
for i in md.keys() :
if md[i]!=None :
d[i] = md[i]
return d
convert_pairs = {':':'\\x3A',
'\n':'\\x0A',
'\r':'\\x0D',
}
def charEncode(s) :
r = ''
v = s.split('\\')
s = '\\\\'.join(v)
for i in convert_pairs.keys() :
v = s.split(i)
s = convert_pairs[i].join(v)
# print 'i=',i,'iv=',convert_pairs[i],'s=',s
return s
def charDecode(s) :
for i in convert_pairs.items() :
v = s.split(i[1])
s = i[0].join(v)
v = s.split('\\\\')
s = '\\'.join(v)
return s
def getTextDictFromLines(lines) :
d = {}
for l in lines :
l = ''.join(l.split('\r'))
if comment_re.match(l) :
continue
m = msg_re.match(l)
if m :
grp = m.groups()
d[charDecode(grp[0])] = charDecode(grp[1])
return d
def getFirstLang(lang) :
s = lang.split(',')
return s[0]
@SingletonDecorator
class MiniI18N:
"""
"""
def __init__(self,path,lang=None,coding='utf8') :
self.path = path
l = locale.getdefaultlocale()
self.curLang = l[0]
self.coding = coding
self.id = 'i18n'
self.langTextDict = {}
self.setupMiniI18N()
self.missed_pt = None
self.translated_pt = None
self.header_pt = None
self.footer_pt = None
self.show_pt=None
self.clientLangs = {}
self.languageMapping = {}
self.timeout = 600
def __call__(self,msg,lang=None) :
"""
"""
if type(msg) == type(b''):
msg = msg.decode(self.coding)
return self.getLangText(msg,lang)
def setLangMapping(self,lang,path):
self.languageMapping[lang] = path
def getLangMapping(self,lang):
return self.languageMapping.get(lang,lang)
def setTimeout(self,timeout=600):
self.timeout = timeout
def delClientLangs(self):
t = threading.currentThread()
tim = time.time() - self.timeout
[ self.clientLangs.pop(k,None) for k in self.clientLangs.keys() if self.clientLangs[k]['timestamp'] < tim ]
def getLangDict(self,lang):
lang = self.getLangMapping(lang)
return self.langTextDict.get(lang,{})
def getLangText(self,msg,lang=None) :
"""
"""
if lang==None :
lang = self.getCurrentLang()
textMapping = self.getLangDict(lang)
return textMapping.get(msg,msg)
def setupMiniI18N(self) :
"""
"""
p = os.path.join(self.path,'i18n')
langs = []
for f in os.listdir(p) :
if os.path.isdir(os.path.join(p,f)) :
langs.append(f)
for dir in langs :
p1 = os.path.join(p,dir,'msg.txt')
if os.path.exists(p1) :
f = codecs.open(p1,'r',self.coding)
textDict = getTextDictFromLines(f.readlines())
f.close()
self.langTextDict[dir] = textDict
self._p_changed = 1
def setCurrentLang(self,lang):
lang = self.getLangMapping(lang)
t = time.time()
threadid = threading.currentThread()
a = dict(timestamp=t,lang=lang)
self.clientLangs[threadid] = a
def getCurrentLang(self) :
"""
"""
threadid = threading.currentThread()
return self.clientLangs[threadid]['lang']
def getI18N(coding='utf8'):
path = ProgramPath()
i18n = MiniI18N(path,coding)
return i18n

View File

@ -0,0 +1,53 @@
# !/usr/bin/env python
#
# ObjectsCache is a Objects cache
# the Object has to have a method "get_size" to tell
# the cacher counting the objects size
class ObjectCache(dict) :
def __init__(self,maxsize=10000000,*args) :
super(ObjectsCache,self).__init__(*args)
self.maxsize = maxsize
self.size = 0
self._shadow = {}
def __setitem__(self,key,item) :
try :
size = item.get_size()
self.size += size
except :
return
if self.size >= self.maxsize :
tmp = [(t,key) for key,(t,size) in self._shadow.iteritems() ]
tmp.sort()
for i in xrange(len(tmp)//2) :
del self[tmp[i][i]]
del tmp
super(ObjectCache,self).__setitem__(key,item)
self._shadow[key] = [time.time(),size]
def __getitem__(self,key) :
try :
item = super(ObjectCache,self).__getitem__(key)
except :
raise
else :
self._shadow[key][0] = time.time()
return item
def get(self,key,default=None) :
if self.has_key(key) :
return self[key]
else :
return default
def __delitem__(self,key) :
try :
super(ObjectCache,self).__delitem__(key)
except :
raise
else :
self.size -= self._shadow[key][1]
del self._shadow[key]

View File

@ -0,0 +1,89 @@
import codecs
from Crypto.PublicKey import RSA
from Crypto.Cipher import PKCS1_OAEP
from Crypto.Cipher import KPCS1_V1_5 as V1_5
from Crypto.Signature import PKCS1_v1_5
from Crypto.Hash import SHA512, SHA384, SHA256, SHA, MD5
from Crypto import Random
from base64 import b64encode, b64decode
hash = "SHA-256"
def readPublickey(fname):
with codecs.open(fname,'r','utf8') as f:
b = f.read()
k = RSA.importKey(b)
return k
return None
def readPrivatekey(fname,pwd):
with codecs.open(fname,'r','utf8') as f:
b = f.read()
k = RSA.importKey(b,pwd)
return k
return None
def newkeys(keysize):
random_generator = Random.new().read
key = RSA.generate(keysize, random_generator)
private, public = key, key.publickey()
return public, private
def importKey(externKey):
return RSA.importKey(externKey)
def getpublickey(priv_key):
return priv_key.publickey()
def encrypt(message, pub_key):
cipher = PKCS1_OAEP.new(pub_key)
return cipher.encrypt(message)
def decrypt(ciphertext, priv_key):
try:
cipher = PKCS1_OAEP.new(priv_key)
return cipher.decrypt(ciphertext)
except Exception as e:
print('e=',e)
cipher = V1_5.new(priv_key)
return cipher.decrypt(ciphertext)
def sign(message, priv_key, hashAlg = "SHA-256"):
global hash
hash = hashAlg
signer = PKCS1_v1_5.new(priv_key)
if (hash == "SHA-512"):
digest = SHA512.new()
elif (hash == "SHA-384"):
digest = SHA384.new()
elif (hash == "SHA-256"):
digest = SHA256.new()
elif (hash == "SHA-1"):
digest = SHA.new()
else:
digest = MD5.new()
digest.update(message)
return signer.sign(digest)
def verify(message, signature, pub_key):
signer = PKCS1_v1_5.new(pub_key)
if (hash == "SHA-512"):
digest = SHA512.new()
elif (hash == "SHA-384"):
digest = SHA384.new()
elif (hash == "SHA-256"):
digest = SHA256.new()
elif (hash == "SHA-1"):
digest = SHA.new()
else:
digest = MD5.new()
digest.update(message)
return signer.verify(digest, signature)
if __name__ == '__main__':
cipher="""WaMlLEYnhBk+kTDyN/4OJmQf4ccNdk6USgtKpb7eHsYsotq4iyXi3N5hB1E/PqrPSmca1AMDLUcumwIrLeGLT9it3eTBQgl1YQAsmPxa6lF/rDOZoLbwD5sJ6ab/0/fuM4GbotqN5/d0MeuOSELoo8cFWw+7XpRxn9EMYnw5SzsjDQRWxXjZptoaGa/8pBBkDmgLqINif9EWV+8899xqTd0e9w1Gqb7wbt/elRNVBpgsSuSZb+dtBlvNUjuTms8BETSRai5vhXetK26Ms8hrayiy38n7wwEKE8fZ9iFzLtwa6xbhD5KudWbKJFFOZAfpzWttGMwWlISbGQigcW4+Bg=="""
key = readPrivatekey('d:/dev/mecp/conf/RSA.private.key','ymq123')
t = decrypt(cipher,key)
print('t=',t)

View File

@ -0,0 +1,220 @@
import os,sys
import thread
from sqlite3 import dbapi2 as sqlite
import time
from localefunc import *
from folderUtils import mkdir
from PublicData import public_data
from mylog import mylog
def logit(s) :
mylog('%s:%s' % (__file__,s))
class Record :
def __init__(self,data,localize=False) :
for i in data.keys() :
d = data[i]
if localize and type(d)==type('') :
d = localeString(d)
setattr(self,i.lower(),d)
def __getattr__(self,name) :
name = name.lower()
try :
return getattr(self,name)
except :
raise AttributeError(name)
def __str__(self) :
a = self.__dict__
f = []
for i in a.keys() :
f.append("%s : %s" % (i,str(a[i])))
return '[%s]' % '\n'.join(f)
def str2unicode(s) :
if type(s) == type('') :
try :
ret = unicode(s,local_encoding)
return ret
except :
try :
ret = unicode(s,'utf8')
return ret
except :
return buffer(s)
return s
def unicode2str(s) :
t = type(s)
if t == type(5) :
return long(s)
if t == type(buffer('')) :
return str(s)
if t == type(u"w") :
return s.encode('utf8')
return s
def argConvert(args) :
if args==None :
return None
t = type(args)
if t==type(()) or t==type([]) :
return [str2unicode(i) for i in args]
if t==type({}) :
for i in args.keys() :
args[i] = str2unicode(args[i])
return args
return args
class SQLite3 :
def __init__(self,dbpath,localize=False) :
self.__dict__['threadMap'] = {}
self.__dict__['localize'] = localize
self.__dict__['dbpath'] = dbpath
self.results = None
self.con = None
self.cursor = None
self.sqlcmd = ''
self._connection(dbpath)
def _connection(self,dbpath=None) :
if dbpath!=None :
self.dbpath = dbpath
self.con = sqlite.connect(self.dbpath)
self.cursor = self.con.cursor()
self.result = None
self.sqlcmd = ''
def __setattr__(self, name, value):
id = thread.get_ident()
if not self.__dict__['threadMap'].has_key(id):
self.__dict__['threadMap'][id] = {}
self.threadMap[id][name] = value
def __getattr__(self, name):
id = thread.get_ident()
if not self.__dict__['threadMap'].has_key(id) :
self.__dict__['threadMap'][id] = {}
if self.__dict__['threadMap'][id].has_key(name) :
return self.__dict__['threadMap'][id][name]
raise AttributeError(name)
def tables(self) :
self.SQL("select * from sqlite_master where type='table'")
r = self.FETCH()
ts = []
while r :
ts.append(r.name)
r = self.FETCH()
return ts
def columns(self,tablenmae) :
self.SQL('select * from %s' % tablename)
self.desc = self.results.getdescription()
return desc
def FETCHALL(self) :
all=[]
r = True
r = self.cursor.fetchall()
return r
def _eatCursorNext(self) :
if self.cursor==None :
return None
r = 1
while r :
try :
r = self.cursor.next()
except :
return
def SQL(self,cmd,args=(),retry=0) :
if self.con==None :
print("self.con==None",cmd)
self._connection()
return self.SQL(cmd,args,retry)
return -1
self._eatCursorNext()
args = argConvert(args)
self.lastSQL = cmd
self.desc = None
try :
if len(cmd.split(';'))>1 :
self.results = self.cursor.executescript(cmd)
else :
self.results = self.cursor.execute(cmd,args)
return True
except Exception as e:
print('execute:',cmd,'error',e)
self.results = None
raise
return True
def FETCH(self) :
if self.results == None :
return None
if self.desc == None :
try :
self.desc = self.results.description
except Exception as e:
print("fetch error",self.lastSQL,e)
raise
try :
desc = self.desc
d = self.results.next()
data = {}
for i in range(len(d)) :
data[desc[i][0]] = unicode2str(d[i])
return Record(data,self.localize)
except StopIteration :
return None
except Exception as e:
print("error happen",e,self,lastSQL)
raise
def COMMIT(self) :
self.SQL('PRAGMA case_sensitive_like = 1')
try :
self.cursor.fetchall()
except :
pass
def ROLLBACK(self) :
self.SQL('ROLLBACK')
def BEGIN(self) :
# self.SQL('BEGIN')
return
def CLOSE(self) :
self.con = None
self.cursor = None
def getDataBase(name) :
a_name='db_%s' % name
db = public_data.get(a_name,None)
if db==None :
dbpath = public_data.get('dbpath_%s' % name,None)
if dbpath==None :
p = public_data.get('ProgramPath',None)
if p==None:
raise Exception('public_data must has a "ProgramPath" variable')
p1 = os.path.join(p,'var')
mkdir(p1)
dbpath = os.path.join(p1,'%s.db3' % name)
public_data.set('dbpath_%s' % name,dbpath)
db = SQLite3(dbpath)
public_data.set(a_name,db)
try :
con = db.con
except :
dbpath = public_data.get('dbpath_%s' % name,None)
db._connection(dbpath)
return db

View File

@ -0,0 +1,42 @@
#
from appPublic.dictObject import DictObject
class SingletonDecorator:
def __init__(self,klass):
self.klass = klass
self.instance = None
def __call__(self,*args,**kwds):
if self.instance == None:
self.instance = self.klass(*args,**kwds)
return self.instance
@SingletonDecorator
class GlobalEnv(DictObject):
pass
if __name__ == '__main__':
@SingletonDecorator
class Child(object):
def __init__(self,name):
print("clild.init")
self.name = name
def __str__(self):
return 'HAHA' + self.name
def __expr__(self):
print(self.name)
@SingletonDecorator
class Handle(object):
def __init__(self,name):
self.name = name
def __expr__(self):
print(self.name)
c = Child('me')
d = Child('he')
print(str(c),str(d))
e = Handle('hammer')
f = Handle('nail');
print(str(e),str(f))

View File

@ -0,0 +1 @@
from .version import __version__

View File

@ -0,0 +1,106 @@
from natpmp import NATPMP as pmp
from aioupnp.upnp import UPnP
from requests import get
from .background import Background
class AcrossNat(object):
def __init__(self):
self.external_ip = None
self.upnp = None
self.pmp_supported = True
self.upnp_supported = True
self.init_pmp()
async def init_upnp(self):
if self.upnp is None:
self.upnp = await UPnP.discover()
def init_pmp(self):
try:
self.external_ip = pmp.get_public_address()
except pmp.NATPMPUnsupportedError:
self.pmp_supported = False
async def get_external_ip(self):
if self.pmp_supported:
self.external_ip = pmp.get_public_address()
return self.external_ip
if self.upnp_supported:
if self.upnp is None:
await self.init_upnp()
return await self.upnp.get_external_ip()
try:
return get('https://api.ipify.org').text
except:
return get('https://ipapi.co/ip/').text
async def upnp_map_port(self, inner_port,
protocol='TCP', from_port=40003, ip=None, desc=None):
if self.upnp is None:
await self.init_upnp()
protocol = protocol.upper()
if ip is None:
ip = self.upnp.lan_address
all_mappings = [i for i in await self.upnp.get_redirects()]
x = [ i for i in all_mappings if i.internal_port == inner_port \
and i.lan_address == ip \
and i.protocol == protocol ]
if len(x) > 0:
return x[0].external_port
occupied_ports = [ i.external_port for i in all_mappings if i.protocol == protocol ]
external_port = from_port
while external_port < 52333:
if external_port not in occupied_ports:
break
external_port += 1
if external_port < 52333:
await self.upnp.add_port_mapping(external_port,
protocol,
inner_port,
ip,
desc or 'user added')
return external_port
return None
async def is_port_mapped(self, external_port, protocol='TCP'):
if self.upnp is None:
await self.init_upnp()
protocol = protocol.upper()
if self.upnp_supported:
x = await self.upnp.get_specific_port_mapping(external_port,
protocol)
if len(x) == 0:
return True
return False
raise Exception('not implemented')
async def port_unmap(self, external_port, protocol='TCP'):
if self.upnp is None:
await self.init_upnp()
protocol = protocol.upper()
if self.upnp_supported:
await self.upnp.delete_port_mapping(external_port, protocol)
raise Exception('not implemented')
def pmp_map_port(self, inner_port, protocol='TCP', from_port=40003):
if protocol.upper() == 'TCP':
x = pmp.map_tcp_port(from_port, inner_port,
lifetime=999999999)
return x.public_port
x = pmp.map_udp_port(from_port, inner_port,
lifetime=999999999)
return x.public_port
async def map_port(self, inner_port, protocol='tcp', from_port=40003, lan_ip=None, desc=None):
if self.pmp_supported:
return self.pmp_map_port(inner_port, protocol=protocol)
return await self.upnp_map_port( inner_port, protocol=protocol, ip=lan_ip, desc=desc)

View File

@ -0,0 +1,118 @@
from traceback import print_exc
from natpmp import NATPMP as pmp
import upnpclient
from requests import get
from .background import Background
class AcrossNat(object):
def __init__(self):
self.external_ip = None
self.upnp = None
self.pmp_supported = True
self.upnp_supported = True
self.init_pmp()
self.init_upnp()
def init_upnp(self):
try:
igd = upnpclient.discover()[0]
s_names = [ n for n in igd.service_map.keys() if 'WAN' in n and 'Conn' in n]
self.upnp = igd.service_map[s_names[0]]
except Exception as e:
print(e)
print_exc()
self.upnp_supported = False
def init_pmp(self):
try:
self.external_ip = pmp.get_public_address()
except pmp.NATPMPUnsupportedError:
self.pmp_supported = False
def get_external_ip(self):
if self.pmp_supported:
try:
self.external_ip = pmp.get_public_address()
return self.external_ip
except:
self.pmp_supported = False
if self.upnp_supported:
try:
x = self.upnp.GetExternalIPAddress()
return x['NewExternalIPAddress']
except:
self.upnp_supported = False
try:
return get('https://api.ipify.org').text
except:
pass
try:
return get('https://ipapi.co/ip/').text
except:
return None
def upnp_check_external_port(self, eport, protocol='TCP'):
try:
self.upnp.GetSpecificPortMappingEntry(NewExternalPort=eport,
NewProtocol=protocol,
NewRemoteHost='')
return True
except:
return False
def upnp_map_port(self, inner_port,
protocol='TCP', from_port=40003,
ip=None, desc='test'):
protocol = protocol.upper()
external_port = from_port
while external_port < 52333:
if self.upnp_check_external_port(external_port,
protocol=protocol):
external_port += 1
continue
try:
self.upnp.AddPortMapping(NewRemoteHost='',
NewExternalPort=external_port,
NewProtocol=protocol,
NewInternalPort=inner_port,
NewInternalClient=ip,
NewEnabled='1',
NewPortMappingDescription=desc,
NewLeaseDuration=0
)
return external_port
except:
return None
return None
def is_port_mapped(self, external_port, protocol='TCP'):
protocol = protocol.upper()
if self.upnp_supported:
return self.upnp_check_external_port(external_port,
protocol=protocol)
raise Exception('not implemented')
def port_unmap(self, external_port, protocol='TCP'):
protocol = protocol.upper()
if self.upnp_supported:
self.upnp.delete_port_mapping(external_port, protocol)
raise Exception('not implemented')
def pmp_map_port(self, inner_port, protocol='TCP', from_port=40003):
if protocol.upper() == 'TCP':
x = pmp.map_tcp_port(from_port, inner_port,
lifetime=999999999)
return x.public_port
x = pmp.map_udp_port(from_port, inner_port,
lifetime=999999999)
return x.public_port
def map_port(self, inner_port, protocol='tcp', from_port=40003, lan_ip=None, desc=None):
if self.pmp_supported:
return self.pmp_map_port(inner_port, protocol=protocol)
return self.upnp_map_port( inner_port, protocol=protocol, ip=lan_ip, desc=desc)

View File

@ -0,0 +1,88 @@
import os
import sys
import logging
from functools import partial
from appPublic.timeUtils import timestampstr
levels={
"debug":logging.DEBUG,
"info":logging.INFO,
"warning":logging.WARNING,
"error":logging.error,
"critical":logging.CRITICAL
}
defaultfmt = '%(asctime)s[%(name)s][%(levelname)s][%(filename)s:%(lineno)s]%(message)s'
logfile = -1
logger = None
g_levelname='info'
level = levels.get('info')
def create_logger(name, formater=defaultfmt, levelname=None, file=None):
global logger, logfile, level, g_levelname
if logfile == -1:
logfile = file
if logger:
return logger
logger = logging.getLogger(name)
if levelname:
g_levelname = levelname
else:
levelname = g_levelname
level = levels.get(levelname, levels.get('info'))
logger.setLevel(level)
format = logging.Formatter(formater)
file_handler = None
if logfile is not None:
file_handler = logging.FileHandler(logfile)
else:
file_handler = logging.StreamHandler()
file_handler.setFormatter(format)
logger.addHandler(file_handler)
return logger
def info(*args, **kw):
global logger
if logger is None:
return
logger.info(*args, **kw)
def debug(*args, **kw):
global logger
if logger is None:
return
logger.debug(*args, **kw)
def warning(*args, **kw):
global logger
if logger is None:
return
logger.warning(*aegs, **kw)
def error(*args, **kw):
global logger
if logger is None:
return
logger.error(*args, **kw)
def critical(*args, **kw):
global logger
if logger is None:
return
logger.critical(*args, **kw)
def exception(*args, **kw):
global logger
if logger is None:
return
logger.exception(**args, **kw)
class AppLogger:
def __init__(self):
self.logger = create_logger(self.__class__.__name__)
self.debug = self.logger.debug
self.info = self.logger.info
self.warning = self.logger.warning
self.error = self.logger.error
self.critical = self.logger.critical
self.exception = self.logger.exception

View File

@ -0,0 +1,182 @@
# -*- coding:utf8 -*-
import re
from appPublic.dictObject import DictObject
class ConvertException(Exception):
pass
class ArgsConvert(object):
def __init__(self,preString,subfixString,coding='utf-8'):
self.preString = preString
self.subfixString = subfixString
self.coding=coding
sl1 = [ u'\\' + c for c in self.preString ]
sl2 = [ u'\\' + c for c in self.subfixString ]
ps = u''.join(sl1)
ss = u''.join(sl2)
re1 = ps + r"[_a-zA-Z_\u4e00-\u9fa5][a-zA-Z_0-9\u4e00-\u9fa5\,\.\'\{\}\[\]\(\)\-\+\*\/]*" + ss
self.re1 = re1
# print( self.re1,len(self.re1),len(re1),type(self.re1))
def convert(self,obj,namespace,default=''):
""" obj can be a string,[],or dictionary """
if isinstance(obj, str):
return self.convertString(obj,namespace,default)
if isinstance(obj, list):
ret = []
for o in obj:
ret.append(self.convert(o,namespace,default))
return ret
if isinstance(obj, dict):
ret = DictObject()
ret.update({k:self.convert(v,namespace,default) for k,v in obj.items()})
return ret
return obj
def findAllVariables(self,src):
r = []
for ph in re.findall(self.re1,src):
dl = self.getVarName(ph)
r.append(dl)
return r
def getVarName(self,vs):
return vs[len(self.preString):-len(self.subfixString)]
def getVarValue(self,var,namespace,default):
v = default
try:
v = eval(var,namespace)
except Exception as e:
v = namespace.get(var, None)
if v:
return v
if callable(default):
return default(var)
return default
return v
def convertString(self,s,namespace,default):
args = re.findall(self.re1,s)
for arg in args:
dl = s.split(arg)
var = self.getVarName(arg)
v = self.getVarValue(var,namespace,default)
if not isinstance(v, str):
if len(args) == 1 and s.startswith(self.preString) and s.endswith(self.subfixString):
return v
v = str(v)
s = v.join(dl)
return s
class ConditionConvert(object):
def __init__(self,pString = u'$<',sString=u'>$',coding='utf-8'):
self.coding = coding
self.pString = pString
self.sString = sString
pS = ''.join([u'\\'+i for i in self.pString ])
sS = ''.join([u'\\'+i for i in self.sString ])
self.re1 = re.compile(u'(' + pS + '/?' + u'[_a-zA-Z_\u4e00-\u9fa5][a-zA-Z_0-9\u4e00-\u9fa5\,\.\'\{\}\[\]\(\)\-\+\*\/]*' + sS + u')')
self.buffer1 = []
def convert(self,obj,namespace):
""" obj can be a string,[],or dictionary """
if type(obj) == type(u''):
return self.convertUnicode(obj,namespace)
if type(obj) == type(''):
return self.convertString(obj,namespace)
if type(obj) == type([]):
ret = []
for o in obj:
ret.append(self.convert(o,namespace))
return ret
if type(obj) == type({}):
ret = {}
for k in obj.keys():
ret.update({k:self.convert(obj.get(k),namespace)})
return ret
# print( type(obj),"not converted")
return obj
def getVarName(self,vs):
return vs[len(self.pString):-len(self.sString)]
def getVarValue(self,var,namespace):
v = None
try:
v = eval(var,namespace)
except Exception as e:
v = namespace.get(var,None)
return v
def convertList(self,alist,namespace):
ret = []
handleList = alist
while len(handleList) > 0:
i = handleList[0]
handleList = handleList[1:]
if len(self.re1.findall(i)) < 1:
ret.append(i)
else:
name = self.getVarName(i)
if name[0] == u'/':
name = name[1:]
if len(self.buffer1) < 1:
raise ConvertException('name(%s) not match' % name)
if self.buffer1[-1] != name:
raise ConvertException('name(%s) not match(%s)' % (self.buffer1[-1],name))
val = self.getVarValue(name,namespace)
self.buffer1 = self.buffer1[:-1]
if val is not None:
return u''.join(ret),handleList
else:
return u'',handleList
else:
self.buffer1.append(name)
subStr,handleList = self.convertList(handleList,namespace)
ret.append(subStr)
if len(self.buffer1)>0:
raise ConvertException('name(s)(%s) not closed' % ','.join(self.buffer1))
return u''.join(ret),[]
def convertUnicode(self,s,namespace):
ret = []
parts = self.re1.split(s)
s,b = self.convertList(parts,namespace)
return s
def convertString(self,s,namespace):
ret = self.convertUnicode(s,namespace)
return ret
if __name__ == '__main__':
"""
ns = {
'a':12,
'b':'of',
'c':'abc',
'':'is',
'd':{
'a':'doc',
'b':'gg',
}
}
AC = ArgsConvert('%{','}%')
s1 = u"%{a}% is a number,%{d['b']}% is %{是}% undefined,%{c}% is %{d['a']+'(rr)'}% string"
arglist=['this is a descrciption %{b}% selling book',123,'ereg%{a}%,%{c}%']
argdict={
'my':arglist,
'b':s1
}
print(f(s1,'<=>',AC.convert(s1,ns)))
print(f(argdict,'<=>',AC.convert(argdict,ns)))
"""
cc = ConditionConvert()
s2 = u"Begin $<abc>$this is $<ba>$ba = 100 $</ba>$condition out$</abc>$ end"
s3 = """select * from RPT_BONDRATINGS
where 1=1
$<rtype>$and ratingtype=${rtype}$$</rtype>$
$<bond>$and bond_id = ${bond}$$</bond>$"""
print(f("result=",cc.convert(s2,{'ba':23})))
print(f("result = ",cc.convert(s3,{'bond':'943','rtype':'1'})))

View File

@ -0,0 +1,15 @@
import asyncio
import sys
from sqlor.dbpools import DBPools
from appPublic.jsonConfig import getConfig
def run(coro):
p = '.'
if len(sys.argv) > 1:
p = sys.argv[1]
config = getConfig(p, {'woridir':p})
DBPools(config.databases)
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
loop.run_until_complete(coro())

View File

@ -0,0 +1,157 @@
import time
from ffpyplayer.player import MediaPlayer
from ffpyplayer.tools import set_log_callback, get_log_callback, formats_in
class AudioPlayer:
def __init__(self, source=None, autoplay=False,
loop=False,
on_stop=None):
self.volume = 1
self.state = None
self.source = None
self.quitted = False
self.loop = loop
self.autoplay = autoplay
self.player = None
self.on_stop = on_stop
self.cmds = []
if source:
self.set_source(source)
def set_source(self, source):
self.source = source
self.load()
def player_callback(self, selector, value):
print(f'player_callback(): {selector}, {value}')
if self.player is None:
print(f'player_callback():self.player is None, {selector}, {value}')
return
if selector == 'quit':
def close(*args):
self.quitted = True
self.unload()
close()
elif selector == 'eof':
self._do_eos()
def load(self):
if self.source is None:
return
source = self.source
self.unload()
ff_opts = {'vn':True, 'sn':True}
self.player = MediaPlayer(source,
callback=self.player_callback,
loglevel='info',
ff_opts=ff_opts)
player = self.player
self.state = 'pause'
s = time.perf_counter()
while (player.get_metadata()['duration'] is None and
not self.quitted and
time.perf_counter() - s < 10.):
time.sleep(0.005)
player.toggle_pause()
time.sleep(0.04)
player.set_volume(self.volume)
if self.autoplay:
self.play()
def unload(self):
self.player = None
self.state = 'stop'
self.quitted = False
def __del__(self):
self.unload()
def play(self):
if self.player is None:
self.load()
if self.player is None:
print('play():self.player is None')
return
if self.state == 'play':
return
self.player.toggle_pause()
self.state = 'play'
def pause(self):
if self.player is None:
self.load()
if self.player is None:
print('pause():self.player is None')
return
if self.state == 'pause':
return
self.player.toggle_pause()
self.state = 'pause'
def is_busy(self):
if self.player and self.state == 'play':
return True
return False
def stop(self):
if self.player is None:
return
if self.state == 'play':
self.player.toggle_pause()
self.state = 'stop'
self.seek(0)
if self.on_stop:
self.on_stop()
def seek(self, pos):
if self.player is None:
print('seek():self.player is None')
return
self.player.seek(pos, relative=False)
def get_pos(self):
if self.player is None:
return 0
return self.player.get_pts()
def _do_eos(self, *args):
print('_do_eos() called ...')
if self.loop:
self.seek(0.)
else:
print('go stop')
self.stop()
if __name__ == '__main__':
import sys
p = AudioPlayer(autoplay=True, loop=True)
p.source = sys.argv[1]
p.load()
p.play()
while True:
while p._pump():
pass
print("""
play: play it,
stop: stop play
pause:pause it
quit: exit
""")
x = input()
if x == 'quit':
p.quitted = True
p.stop()
break
if x == 'play':
p.play()
continue
if x == 'stop':
p.stop()
continue
if x == 'pause':
p.pause()
continue

View File

@ -0,0 +1,11 @@
from threading import Thread
class Background(Thread):
def __init__(self,func, *args,**kw):
Thread.__init__(self)
self.__callee = func
self.__args = args
self.__kw = kw
def run(self):
return self.__callee(*self.__args, **self.__kw)

View File

@ -0,0 +1,247 @@
# -*- coding = utf-8 -*-
ecc = {
"Afghanistan":"阿富汗",
"Aland Islands":"奥兰群岛",
"Albania":"阿尔巴尼亚",
"Algeria":"阿尔及利亚",
"American Samoa":"美属萨摩亚",
"Andorra":"安道尔",
"Angola":"安哥拉",
"Anguilla":"安圭拉",
"Antigua and Barbuda":"安提瓜和巴布达",
"Argentina":"阿根廷",
"Armenia":"亚美尼亚",
"Aruba":"阿鲁巴",
"Australia":"澳大利亚",
"Austria":"奥地利",
"Azerbaijan":"阿塞拜疆",
"Bangladesh":"孟加拉",
"Bahrain":"巴林",
"Bahamas":"巴哈马",
"Barbados":"巴巴多斯",
"Belarus":"白俄罗斯",
"Belgium":"比利时",
"Belize":"伯利兹",
"Benin":"贝宁",
"Bermuda":"百慕大",
"Bhutan":"不丹",
"Bolivia":"玻利维亚",
"Bosnia and Herzegovina":"波斯尼亚和黑塞哥维那",
"Botswana":"博茨瓦纳",
"Bouvet Island":"布维岛",
"Brazil":"巴西",
"Brunei":"文莱",
"Bulgaria":"保加利亚",
"Burkina Faso":"布基纳法索",
"Burundi":"布隆迪",
"Cambodia":"柬埔寨",
"Cameroon":"喀麦隆",
"Canada":"加拿大",
"Cape Verde":"佛得角",
"Central African Republic":"中非",
"Chad":"乍得",
"Chile":"智利",
"Christmas Islands":"圣诞岛",
"Cocos (keeling) Islands":"科科斯(基林)群岛",
"Colombia":"哥伦比亚",
"Comoros":"科摩罗",
"Congo (Congo-Kinshasa)":"刚果(金)",
"Congo":"刚果",
"Cook Islands":"库克群岛",
"Costa Rica":"哥斯达黎加",
"Cote DIvoire":"科特迪瓦",
"China":"中国",
"Croatia":"克罗地亚",
"Cuba":"古巴",
"Czech":"捷克",
"Cyprus":"塞浦路斯",
"Denmark":"丹麦",
"Djibouti":"吉布提",
"Dominica":"多米尼加",
"East Timor":"东帝汶",
"Ecuador":"厄瓜多尔",
"Egypt":"埃及",
"Equatorial Guinea":"赤道几内亚",
"Eritrea":"厄立特里亚",
"Estonia":"爱沙尼亚",
"Ethiopia":"埃塞俄比亚",
"Faroe Islands":"法罗群岛",
"Fiji":"斐济",
"Finland":"Finland",
"France":"法国",
"Franch Metropolitan":"法国大都会",
"Franch Guiana":"法属圭亚那",
"French Polynesia":"法属波利尼西亚",
"Gabon":"加蓬",
"Gambia":"冈比亚",
"Georgia":"格鲁吉亚",
"Germany":"德国",
"Ghana":"加纳",
"Gibraltar":"直布罗陀",
"Greece":"希腊",
"Grenada":"格林纳达",
"Guadeloupe":"瓜德罗普岛",
"Guam":"关岛",
"Guatemala":"危地马拉",
"Guernsey":"根西岛",
"Guinea-Bissau":"几内亚比绍",
"Guinea":"几内亚",
"Guyana":"圭亚那",
"Hong Kong":"香港 (中国)",
"Haiti":"海地",
"Honduras":"洪都拉斯",
"Hungary":"匈牙利",
"Iceland":"冰岛",
"India":"印度",
"Indonesia":"印度尼西亚",
"Iran":"伊朗",
"Iraq":"伊拉克",
"Ireland":"爱尔兰",
"Isle of Man":"马恩岛",
"Israel":"以色列",
"Italy":"意大利",
"Jamaica":"牙买加",
"Japan":"日本",
"Jersey":"泽西岛",
"Jordan":"约旦",
"Kazakhstan":"哈萨克斯坦",
"Kenya":"肯尼亚",
"Kiribati":"基里巴斯",
"Korea (South)":"韩国",
"Korea (North)":"朝鲜",
"Kuwait":"科威特",
"Kyrgyzstan":"吉尔吉斯斯坦",
"Laos":"老挝",
"Latvia":"拉脱维亚",
"Lebanon":"黎巴嫩",
"Lesotho":"莱索托",
"Liberia":"利比里亚",
"Libya":"利比亚",
"Liechtenstein":"列支敦士登",
"Lithuania":"立陶宛",
"Luxembourg":"卢森堡",
"Macau":"澳门(中国)",
"Macedonia":"马其顿",
"Malawi":"马拉维",
"Malaysia":"马来西亚",
"Madagascar":"马达加斯加",
"Maldives":"马尔代夫",
"Mali":"马里",
"Malta":"马耳他",
"Marshall Islands":"马绍尔群岛",
"Martinique":"马提尼克岛",
"Mauritania":"毛里塔尼亚",
"Mauritius":"毛里求斯",
"Mayotte":"马约特",
"Mexico":"墨西哥",
"Micronesia":"密克罗尼西亚",
"Moldova":"摩尔多瓦",
"Monaco":"摩纳哥",
"Mongolia":"蒙古",
"Montenegro":"黑山",
"Montserrat":"蒙特塞拉特",
"Morocco":"摩洛哥",
"Mozambique":"莫桑比克",
"Myanmar":"缅甸",
"Namibia":"纳米比亚",
"Nauru":"瑙鲁",
"Nepal":"尼泊尔",
"Netherlands":"荷兰",
"New Caledonia":"新喀里多尼亚",
"New Zealand":"新西兰",
"Nicaragua":"尼加拉瓜",
"Niger":"尼日尔",
"Nigeria":"尼日利亚",
"Niue":"纽埃",
"Norfolk Island":"诺福克岛",
"Norway":"挪威",
"Oman":"阿曼",
"Pakistan":"巴基斯坦",
"Palau":"帕劳",
"Palestine":"巴勒斯坦",
"Panama":"巴拿马",
"Papua New Guinea":"巴布亚新几内亚",
"Paraguay":"巴拉圭",
"Peru":"秘鲁",
"Philippines":"菲律宾",
"Pitcairn Islands":"皮特凯恩群岛",
"Poland":"波兰",
"Portugal":"葡萄牙",
"Puerto Rico":"波多黎各",
"Qatar":"卡塔尔",
"Reunion":"留尼汪岛",
"Romania":"罗马尼亚",
"Rwanda":"卢旺达",
"Russian Federation":"俄罗斯联邦",
"Saint Helena":"圣赫勒拿",
"Saint Kitts-Nevis":"圣基茨和尼维斯",
"Saint Lucia":"圣卢西亚",
"Saint Vincent and the Grenadines":"圣文森特和格林纳丁斯",
"El Salvador":"萨尔瓦多",
"Samoa":"萨摩亚",
"San Marino":"圣马力诺",
"Sao Tome and Principe":"圣多美和普林西比",
"Saudi Arabia":"沙特阿拉伯",
"Senegal":"塞内加尔",
"Seychelles":"塞舌尔",
"Sierra Leone":"塞拉利昂",
"Singapore":"新加坡",
"Serbia":"塞尔维亚",
"Slovakia":"斯洛伐克",
"Slovenia":"斯洛文尼亚",
"Solomon Islands":"所罗门群岛",
"Somalia":"索马里",
"South Africa":"南非",
"Spain":"西班牙",
"Sri Lanka":"斯里兰卡",
"Sudan":"苏丹",
"Suriname":"苏里南",
"Swaziland":"斯威士兰",
"Sweden":"瑞典",
"Switzerland":"瑞士",
"Syria":"叙利亚",
"Tajikistan":"塔吉克斯坦",
"Tanzania":"坦桑尼亚",
"Taiwan":"台湾 (中国)",
"Thailand":"泰国",
"Trinidad and Tobago":"特立尼达和多巴哥",
"Timor-Leste":"东帝汶",
"Togo":"多哥",
"Tokelau":"托克劳",
"Tonga":"汤加",
"Tunisia":"突尼斯",
"Turkey":"土耳其",
"Turkmenistan":"土库曼斯坦",
"Tuvalu":"图瓦卢",
"Uganda":"乌干达",
"Ukraine":"乌克兰",
"United Arab Emirates":"阿拉伯联合酋长国",
"United Kingdom":"英国",
"United States":"美国",
"Uruguay":"乌拉圭",
"Uzbekistan":"乌兹别克斯坦",
"Vanuatu":"瓦努阿图",
"Vatican City":"梵蒂冈",
"Venezuela":"委内瑞拉",
"Vietnam":"越南",
"Wallis and Futuna":"瓦利斯群岛和富图纳群岛",
"Western Sahara":"西撒哈拉",
"Yemen":"也门",
"Yugoslavia":"南斯拉夫",
"Zambia":"赞比亚",
"Zimbabwe":"津巴布韦"
}
cec = {v:k for k,v in ecc.items()}
def get_en_country_name(country):
x = cec.get(country)
if x:
return x
return country
def get_cn_country_name(country):
x = ecc.get(country)
if x:
return x
return country

View File

@ -0,0 +1,54 @@
import codecs
import csv
class Reader:
def __init__(self,f,delimiter):
self.f = f
self.delimiter = delimiter
self.line = 0
def __iter__(self):
return self
def next(self):
l = self.f.readline()
if l == '':
raise StopIteration()
while l[-1] in [ '\n','\r']:
l = l[:-1]
r = [ i if i != '' else None for i in l.split(self.delimiter) ]
self.line = self.line + 1
return r
class CSVData:
def __init__(self,filename,coding='utf8',delimiter=','):
self.filename = filename
self.coding = coding
self.f = codecs.open(filename,'rb',self.coding)
self.reader = Reader(self.f,delimiter)
self.fields = self.reader.next()
def __del__(self):
self.f.close()
def __iter__(self):
return self
def next(self):
try:
r = self.reader.next()
if len(r) != len(self.fields):
print("length diff",len(r),len(self.fields),"at line %d" % self.reader.line)
raise StopIteration()
d = {}
[d.update({self.fields[i]:r[i]}) for i in range(len(self.fields))]
return d
except:
raise StopIteration()
if __name__ == '__main__':
import sys
cd = CSVData(sys.argv[1])
for r in cd:
print(r)

View File

@ -0,0 +1,170 @@
try:
import ujson as json
except:
import json
from appPublic.rsawrap import RSA
from appPublic.rc4 import RC4
from appPublic.uniqueID import getID
# import brotli
import zlib
import struct
DATA_TYPE_BYTES = 1
DATA_TYPE_STR = 2
DATA_TYPE_JSON = 3
class DataEncoder:
"""
security data packing - unpacking object
packs data:
encode data with random key's rc4 crypt algorithm,
encode rc4's key with receiver's public key
sign data with sender's private key
packs data using struct in follows order
0: data format(18 bytes)
1. datatype(c)
2. encoded data(length=len(d))
3. encoded_rc4key(length=len(k))
4. sign(signs from (0+1+2+3) data) (length=len(s))
5. compress data and return compressed dta
return packed data
unpacks data:
0. decompress data
1. get 18 bytes fmt data, erase tails b'\x00'
2. using fmt to unpack data[18:]
3. verify sign
4. decode k
5. decode data usig decoded k with rc4 algorithm
6. convert data type to origin data type
7. return converted data
"""
def __init__(self, myid, func_get_peer_pubkey, private_file=None):
self.myid = myid
self.func_get_peer_pubkey = func_get_peer_pubkey
self.public_keys = {}
self.private_file = private_file
self.rsa = RSA()
self.rc4 = RC4()
if self.private_file:
self.private_key = self.rsa.read_privatekey(self.private_file)
else:
self.private_key = self.rsa.create_privatekey()
self.public_key = self.rsa.create_publickey(self.private_key)
def identify_datatype(self, data):
if isinstance(data, bytes):
return DATA_TYPE_BYTES, data
if isinstance(data, str):
return DATA_TYPE_STR, data.encode('utf-8')
data = json.dumps(data).encode('utf-8')
return DATA_TYPE_JSON, data
def my_text_publickey(self):
return self.rsa.publickeyText(self.public_key)
def exist_peer_publickeys(self, peer_id):
return True if self.public_keys.get(peer_id, False) else False
def set_peer_pubkey(self, peer_id, pubkey):
self.public_keys[peer_id] = pubkey
def get_peer_text_pubkey(self, peer_id):
pk = self.get_peer_pubkey()
txtpk = self.rsa. publickeyText(pk)
return txtpk
def set_peer_text_pubkey(self, peer_id, text_pubkey):
pk = self.rsa.publickeyFromText(text_pubkey)
self.set_peer_pubkey(peer_id, pk)
def get_peer_pubkey(self, peer_id):
pubkey = self.public_keys.get(peer_id)
if not pubkey:
try:
self.func_get_peer_pubkey(peer_id)
except:
raise Exception('Can not get peer public key(%s)')
pubkey = self.public_keys.get(peer_id)
return pubkey
def pack(self, peer_id, data, uncrypt=False):
t, d = self.identify_datatype(data)
if uncrypt:
return zlib.compress(b'\x00' * 18 + \
bytes(chr(t),'utf-8') + \
d)
pk = self.get_peer_pubkey(peer_id)
d, k = self.encode_data(pk, d)
f = 'b%05ds%03ds' % (len(d), len(k))
f1 = f + '256s'
pd1 = struct.pack('18s', f1.encode('utf-8'))
pd2 = struct.pack(f, t, d, k)
pd = pd1 + pd2
s = self.sign_data(pd)
pd += s
self.pack_d = [t,d,k,s]
origin_len = len(pd)
pd = zlib.compress(pd)
return pd
def unpack(self, peer_id, data):
data = zlib.decompress(data)
if data[:18] == b'\x00' * 18:
data = data[18:]
t = ord(chr(data[0]))
d = data[1:]
if t == DATA_TYPE_BYTES:
return d
d = d.decode('utf-8')
if t == DATA_TYPE_STR:
return d
return json.loads(d)
org_data = data
pk = self.get_peer_pubkey(peer_id)
f = data[:18]
while f[-1] == 0 and len(f) > 0:
f = f[:-1]
f = f.decode('utf-8')
data = data[18:]
t, d, k, s = struct.unpack(f, data)
self.unpack_d = [t,d,k,s]
data1 = org_data[:org_data.index(s)]
if not self.verify_sign(data1, s, pk):
raise Exception('data sign verify failed')
data = self.decode_data(d, k)
if t == DATA_TYPE_BYTES:
return data
if t == DATA_TYPE_STR:
return data.decode('utf-8')
return json.loads(data)
def encode_data(self, peer_pubkey, data):
key = getID()
if isinstance(key, str):
key = key.encode('utf-8')
ctext = self.rc4.encode_bytes(data, key)
encoded_key = self.rsa.encode_bytes(peer_pubkey, key)
return ctext, encoded_key
def sign_data(self, data):
return self.rsa.sign_bdata(self.private_key, data)
def decode_data(self, data, encoded_key):
key = self.rsa.decode_bytes(self.private_key, encoded_key)
return self.rc4.decode_bytes(data, key)
def verify_sign(self, data, sign, peer_pubkey):
return self.rsa.check_sign_bdata(peer_pubkey, data, sign)
def quotedstr(s):
def conv(c):
if c == '"':
return '\\"'
if c == '\n':
return '\\n'
return c
x = [ conv(c) for c in s ]
return ''.join(x)

View File

@ -0,0 +1,50 @@
#dataMapping
from appPublic.dictObject import DictObject
def keyMapping(dic,mappingtab,keepmiss=True):
"""
keyMapping mappingtab
{
"a1":"b1",
"a2":'b2",
...
}
"an" is key in dic
"bn" is key in result dictionary
"""
ret = {}
keys = [ k for k in dic.keys()]
if not keepmiss:
keys = [ k for k in dic.keys() if k in mappingtab.keys() ]
[ ret.update({mappingtab.get(k,k):dic[k]}) for k in keys ]
return ret
def valueMapping(dic,mappingtab):
"""
mappingtab format:
{
"field1":{
"a":"1",
"b":"2",
"__default__":"5"
},
"field2":{
"a":"3",
"b":"4"
}
}
field1,field2 is in dic.keys()
"""
ret = {}
for k in dic.keys():
mt = mappingtab.get(k,None)
if mt is None:
ret[k] = dic[k]
else:
dv = mt.get('__default__',dic[k])
v = mt.get(dic[k],dv)
ret[k] = v
return DictObject(**ret)

View File

@ -0,0 +1,38 @@
def arrayExtend(s,addon):
ret = []
s_cnt = len(s)
a_cnt = len(addon)
for i,v in enumerate(addon):
if i < s_cnt:
if type(v)!=type(s[i]):
ret.append(v)
continue
if isinstance(v,dict):
x = dictExtend(v,s[i])
ret.append(x)
continue
ret.append(v)
if s_cnt < a_cnt:
ret += s[i:]
return ret
def dictExtend(s,addon):
ret = {}
ret.update(s)
skeys = ret.keys()
for k,v in addon.items():
if k not in skeys:
ret[k] = v
continue
if type(v)!=type(ret[k]):
ret[k] = v
continue
if type(v)==type({}):
ret[k] = dictExtend(ret[k],v)
continue
if type(v)==type([]):
ret[k] = arrayExtend(ret[k],v)
continue
ret[k] = v
return ret

View File

@ -0,0 +1,161 @@
import json
from json import JSONEncoder
from inspect import ismethod, isfunction, isbuiltin, isabstract
def multiDict2Dict(md):
ns = {}
for k,v in md.items():
ov = ns.get(k,None)
if ov is None:
ns[k] = v
elif type(ov) == type([]):
ov.append(v)
ns[k] = ov
else:
ns[k] = [ov,v]
return ns
class DictObject:
def __init__(self,**kw):
self.org_keys__ = []
self.org_keys__ = [ k for k in self.__dict__.keys()]
for k,v in kw.items():
self.update({k:self.__DOitem(v)})
def __getattr__(self,name):
if name in self._addon().keys():
return self.__getitem__(name)
return None
def update(self,kw):
self.__dict__.update(kw)
def _addon(self):
ks = [ k for k in self.__dict__.keys() if k not in self.org_keys__]
return {k:v for k,v in self.__dict__.items() if k in ks}
def clear(self):
for k in self._addon().keys():
self.__dict__.pop(k)
def get(self,name,default=None):
return self._addon().get(name,default)
def pop(self,k,default=None):
return self.__dict__.pop(k,default)
def popitem(self):
return self.__dict__.popitem()
def items(self):
return self._addon().items()
def keys(self):
return self._addon().keys()
def values(self):
return self._addon().values()
def __delitem__(self,key):
self.pop(key)
def __getitem__(self,name):
return self._addon().get(name)
def __setitem__(self,name,value):
self.__dict__[name] = value
def __str__(self):
return str(self._addon())
def __expr__(self):
return self.addon().__expr__()
def copy(self):
return {k:v for k,v in self._addon().items()}
def to_dict(self):
d = self._addon()
newd = self.dict_to_dict(d)
return newd
def dict_to_dict(self,dic):
d = {}
for k,v in dic.items():
if isinstance(v,DictObject):
d[k] = v.to_dict()
elif isinstance(v,dict):
d[k] = self.dict_to_dict(v)
elif isinstance(v,list):
d[k] = self.array_to_dict(v)
elif k == '__builtins__':
pass
elif isbuiltin(v) or isfunction(v) or ismethod(v) or isabstract(v):
pass
else:
d[k] = v
return d
def array_to_dict(self,v):
r = []
for i in v:
if isinstance(i,list):
r.append(self.array_to_dict(i))
elif isinstance(i,dict):
r.append(self.dict_to_dict(i))
elif isinstance(i,DictObject):
r.append(i.to_dict())
elif isbuiltin(i) or isfunction(i) or ismethod(i) or isabstract(i):
pass
else:
r.append(i)
return r
@classmethod
def isMe(self,name):
return name == 'DictObject'
def __DOArray(self,a):
b = [ self.__DOitem(i) for i in a ]
return b
def __DOitem(self, i):
if isinstance(i,DictObject):
return i
if isinstance(i,dict):
i = {k:v for k,v in i.items() if isinstance(k,str)}
try:
d = DictObject(**i)
return d
except Exception as e:
print("****************",i,"*******dictObject.py")
raise e
if type(i) == type([]) or type(i) == type(()) :
return self.__DOArray(i)
return i
class DictObjectEncoder(JSONEncoder):
def default(self, o):
return o._addon()
def dictObjectFactory(_klassName__,**kwargs):
def findSubclass(_klassName__,klass):
for k in klass.__subclasses__():
if k.isMe(_klassName__):
return k
k1 = findSubclass(_klassName__,k)
if k1 is not None:
return k1
return None
try:
if _klassName__=='DictObject':
return DictObject(**kwargs)
k = findSubclass(_klassName__,DictObject)
if k is None:
return DictObject(**kwargs)
return k(**kwargs)
except Exception as e:
print("dictObjectFactory()",e,_klassName__)
raise e

View File

@ -0,0 +1,130 @@
import json
from json import JSONEncoder
from inspect import ismethod, isfunction, isbuiltin, isabstract
def multiDict2Dict(md):
ns = {}
for k,v in md.items():
ov = ns.get(k,None)
if ov is None:
ns[k] = v
elif type(ov) == type([]):
ov.append(v)
ns[k] = ov
else:
ns[k] = [ov,v]
return ns
class DictObjectEncoder(JSONEncoder):
def default(self, o):
return o._addon()
class DictObject(dict):
def __getattr__(self, attr):
"""
实现点操作符访问字典中的键值对
"""
try:
v = self.__DOitem(self[attr])
self[attr] = v
return v
except KeyError:
return None
def has(self, key):
try:
v = super().__getitem__(key)
return True
except KeyError:
return False
def get_data_by_keys(self, keys):
try:
ks = keys.split('.', 1)
if '[' not in ks[0]:
d = getattr(self, ks[0])
if len(ks) == 1:
return d
if isinstance(d, DictObject):
return d.get_data_by_keys(ks[1])
return None
ks1 = ks[0].split('[', 1)
k = ks1[0]
idx = int(ks1[1].split(']',1)[0])
d = getattr(self, k)[idx]
if len(ks) == 1:
return d
if isinstance(d, DictObject):
return d.get_data_by_keys(ks[1])
return None
except:
return None
def __getitem__(self, key):
try:
v = self.__DOitem(super().__getitem__(key))
self[key] = v
return v
except KeyError:
return None
def __setattr__(self, attr, value):
"""
实现点操作符设置字典中的键值对
"""
self[attr] = value
def get(self, k, defv=None):
if self.has(k):
return self[k]
else:
return defv
def copy(self):
return self.__DOitem(super().copy())
@classmethod
def isMe(self,name):
return name == 'DictObject'
def to_dict(self):
return self
def __DOArray(self,a):
b = [ self.__DOitem(i) for i in a ]
return b
def __DOitem(self, i):
if isinstance(i,DictObject):
return i
if isinstance(i,dict):
i = {k:v for k,v in i.items() if isinstance(k,str)}
try:
d = DictObject(**i)
return d
except Exception as e:
raise e
if type(i) == type([]) or type(i) == type(()) :
return self.__DOArray(i)
return i
def dictObjectFactory(_klassName__,**kwargs):
def findSubclass(_klassName__,klass):
for k in klass.__subclasses__():
if k.isMe(_klassName__):
return k
k1 = findSubclass(_klassName__,k)
if k1 is not None:
return k1
return None
try:
if _klassName__=='DictObject':
return DictObject(**kwargs)
k = findSubclass(_klassName__,DictObject)
if k is None:
return DictObject(**kwargs)
return k(**kwargs)
except Exception as e:
print("dictObjectFactory()",e,_klassName__)
raise e

View File

@ -0,0 +1,71 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from win32com.client import Dispatch
import win32com.client
class EasyExcel:
"""A utility to make it easier to get at Excel. Remembering
to save the data is your problem, as is error handling.
Operates on one workbook at a time."""
def __init__(self, filename=None):
self.xlApp = win32com.client.Dispatch('Excel.Application')
if filename:
self.filename = filename
self.xlBook = self.xlApp.Workbooks.Open(filename)
#self.xlBook.Visible = False
else:
self.xlBook = self.xlApp.Workbooks.Add()
self.filename = ''
def save(self, newfilename=None):
if newfilename:
self.filename = newfilename
self.xlBook.SaveAs(newfilename)
else:
self.xlBook.Save()
def setSheetName(self,sheet,name):
self.xlBook.Worksheets(sheet).Name = name
def newSheet(self,sheetName):
pass
def close(self):
self.xlBook.Close(SaveChanges=0)
self.xlApp.Quit()
del self.xlApp
def getCell(self, sheet, row, col):
"Get value of one cell"
sht = self.xlBook.Worksheets(sheet)
return sht.Cells(row, col).Value
def setCell(self, sheet, row, col, value):
"set value of one cell"
sht = self.xlBook.Worksheets(sheet)
sht.Cells(row, col).Value = value
def getRange(self, sheet, row1, col1, row2, col2):
"return a 2d array (i.e. tuple of tuples)"
sht = self.xlBook.Worksheets(sheet)
return sht.Range(sht.Cells(row1, col1), sht.Cells(row2, col2)).Value
def addPicture(self, sheet, pictureName, Left, Top, Width, Height):
"Insert a picture in sheet"
sht = self.xlBook.Worksheets(sheet)
sht.Shapes.AddPicture(pictureName, 1, 1, Left, Top, Width, Height)
def cpSheet(self, before):
"copy sheet"
shts = self.xlBook.Worksheets
shts(1).Copy(None,shts(1))
if __name__ == "__main__":
PNFILE = r'c:\screenshot.bmp'
xls = EasyExcel(r'D:\test.xls')
xls.addPicture('Sheet1', PNFILE, 20,20,1000,1000)
xls.cpSheet('Sheet1')
xls.save()
xls.close()

View File

@ -0,0 +1,281 @@
import xlrd
import os
import sys
import datetime
from appPublic.strUtils import *
TCS={
'int':int,
'float':float,
'str':str,
}
def isEmptyCell(cell):
return cell.ctype == xlrd.XL_CELL_EMPTY
def isCommentValue(v):
if type(v)==type('') and v[0] == '#':
return True
return False
def purekey(k):
return k.split(':')[0]
def castedValue(v,k):
ki = k.split(':')
if len(ki)<2 or v is None:
return v
ki = ki[1:]
if 'list' in ki:
if type(v) == type(''):
v = v.split(',')
elif type(v) != type([]):
v = [v]
for k,tc in TCS.items():
if k in ki:
if type(v) == type([]):
return [ tc(i) for i in v ]
else:
return tc(v)
return v
class ExcelData:
_indictors = {
':__dict__':'ff',
':__list__':'ff',
':__include__':'ff',
}
def __init__(self,xlsfile,encoding='UTF8',startrow=0,startcol=0):
self._book = xlrd.open_workbook(xlsfile)
self.encoding = encoding
self._filename = xlsfile
self.startrow=0
self.startcol=0
self._dataset = self.dataload()
def __del__(self):
del self._book
del self._dataset
def cellvalue(self,sheet,x,y):
if sheet.cell_type(x,y)==xlrd.XL_CELL_EMPTY:
return None
if sheet.cell_type(x,y)==xlrd.XL_CELL_DATE:
y,m,d,hh,mm,ss = xlrd.xldate_as_tuple(sheet.cell_value(x,y),self._book.datemode)
return datetime.date(y,m,d)
s = sheet.cell_value(x,y)
return self.trimedValue(s)
def isCommentCell(self,cell):
if isEmptyCell(cell):
return False
v = self.trimedValue(cell.value)
return isCommentValue(v)
def dateMode(self):
return self._book.datemode
def trimedValue(self,v):
if type(v) == type(u' '):
v = v.encode(self.encoding)
if type(v) == type(''):
v = lrtrim(v)
return v
def dataload(self):
dat = {}
for name in self._book.sheet_names():
sheet = self._book.sheet_by_name(name)
#name = name.encode(self.encoding)
dat[self.trimedValue(name)] = self.loadSheetData(sheet)
return dat
def findDataRange(self,sheet,pos,maxr):
x,y = pos
j = y + 1
while j < sheet.ncols:
if isEmptyCell(sheet.cell(x,j)) or self.isCommentCell(sheet.cell(x,y)):
maxy = j
break
j += 1
i = x + 1
maxx = maxr
while i < maxr:
if not isEmptyCell(sheet.cell(i,y)):
maxx = i
break
i += 1
return maxx
def loadSheetData(self,sheet):
return self.loadSheetDataRange(sheet,(self.startrow,self.startcol),sheet.nrows)
def include(self,filename,id):
try:
sub = ExcelData(filename,self.encoding)
except Exception as e:
print(e,filename)
return None
if id is None:
return sub.dict()
env = {'data':sub.dict()}
try:
exec("""resutl__ = data%s""" % id,globals(),env)
except Exception as e:
print(e,id)
return None
return env['resutl__']
def loadSingleData(self,sheet,pos):
x,y = pos
if sheet.ncols==y:
v = self.cellvalue(sheet,x,y)
if isCommentValue(v):
return None
return v
ret = []
while y < sheet.ncols:
v = self.cellvalue(sheet,x,y)
if v is None:
break
if isCommentValue(v):
break
ret.append(v)
y += 1
if len(ret) < 1:
return None
if len(ret)<2:
return ret[0]
if ret[0] == '__include__':
if len(ret)<2:
print("include mode error: __include__ filename id")
return None
id = None
if len(ret)>=3:
id = ret[2]
return self.include(ret[1],id)
return ret
def loadDictData(self,sheet,pos,maxr):
ret = {}
x,y = pos
while x < maxr:
mr = self.findDataRange(sheet,(x,y),maxr)
#print "loadDictData:debug:",x,y,maxr,mr
k = self.cellvalue(sheet,x,y)
if isCommentValue(k):
x = x + 1
continue
if k is not None:
if 'records' in k.split(':'):
v = self.loadRecords(sheet,(x,y+1),maxr)
else:
v = self.loadSheetDataRange(sheet,(x,y+1),mr)
ret[purekey(k)] = castedValue(v,k)
x = mr
return ret
def loadSheetDataRange(self,sheet,pos,maxr):
x,y = pos
#print "debug1:",pos,maxr
if maxr - x < 1 :
#print "debug1-1:",pos,maxr
return None
if isEmptyCell(sheet.cell(x,y)):
#print "debug1-2:",pos,maxr
return None
cv = self.cellvalue(sheet,x,y)
#print cv
if isCommentValue(cv):
pos = (x+1,y)
return self.loadSheetDataRange(sheet,pos,maxr)
if cv == '__include__':
return self.include(self.cellvalue(sheet,x,y+1),self.cellvalue(sheet,x,y+2))
if cv == '__dict__':
#print "cv==__dict__"
i = x + 1
vs = []
while i < maxr:
v = self.cellvalue(sheet,i,y)
if v == '__dict__':
vs.append(self.loadDictData(sheet,(x+1,y),i))
x = i
i += 1
vs.append(self.loadDictData(sheet,(x+1,y),i))
if len(vs) < 1:
return None
if len(vs) < 2:
return vs[0]
return vs
return self.loadDictData(sheet,(x+1,y),maxr)
if cv == '__list__':
i = x + 1
vs = []
while i < maxr:
v = self.loadSingleData(sheet,(i,y))
vs.append(v)
i += 1
return vs
if maxr - x < 2:
v = self.loadSingleData(sheet,(x,y))
return v
return self.loadRecords(sheet,pos,maxr)
def loadRecords(self,sheet,pos,maxr):
x,y = pos
v = self.cellvalue(sheet,x,y)
if v==None or isCommentValue(v):
return self.loadRecords(sheet,(x+1,y),maxr)
data = []
i = x + 1
j = y
keys = [ self.trimedValue(k.value) for k in sheet.row(x)[y:] ]
while i < maxr:
d = {}
j = y
while j < sheet.ncols:
k = self.cellvalue(sheet,x,j)
if k is None or isCommentValue(k):
break
if sheet.cell_type(x,j) == xlrd.XL_CELL_EMPTY:
break
v = self.cellvalue(sheet,i,j)
if sheet.cell_type(x,j) != xlrd.XL_CELL_EMPTY:
d[purekey(k)] = castedValue(v,k)
j += 1
data.append(d)
i += 1
return data
def dict(self):
return self._dataset
class ExcelDataL(ExcelData):
def dataload(self):
ret = []
for name in self._book.sheet_names():
dat = {}
sheet = self._book.sheet_by_name(name)
name = name.encode(self.encoding)
dat[name] = self.loadSheetData(sheet)
ret.append(dat)
return ret
if __name__ == '__main__':
if len(sys.argv)<2:
print("Usage:\n%s execlfile" % sys.argv[0])
sys.exit(1)
ed = ExcelData(sys.argv[1])
print(ed.dict())

View File

@ -0,0 +1,144 @@
import xlwt
from appPublic.strUtils import *
class ExcelWriter:
def __init__(self,encoding='gb2312'):
self.encoding = encoding
def writeV(self,sheet,x,y,v):
if type(v) == type([]):
return self.writeList(sheet,x,y,v)
if type(v) == type({}):
return self.writeDict(sheet,x,y,v)
if type(v) not in (type({}),type([])):
if type(v) == type(' '):
v = lrtrim(v)
sheet.write(x,y,v)
return 1
def write(self,excelfile,dictdata):
wb = xlwt.Workbook(encoding=self.encoding)
for tbl in dictdata.keys():
ws = wb.add_sheet(tbl,cell_overwrite_ok=True)
self.writeV(ws,0,0,dictdata[tbl])
wb.save(excelfile)
def createRecordTitle(self,ws,x,y,title,poss,isList=False):
if isList:
poss['__list__'][title] = True
if title in poss.keys():
return
if len(poss.keys()) > 1:
d_ = {}
for k,v in poss.items():
if k != '__list__':
d_[k] = v
y = max(d_.values()) + 1
# ws.write(x,y,title)
poss[title] = y
def writeRecordTitle(self,ws,x,poss):
for k in poss.keys():
if k == '__list__':
continue
if k in poss['__list__'].keys():
ws.write(x,poss[k],k+':list')
else:
ws.write(x,poss[k],k)
def writeRecords(self,ws,x,y,alist):
ox = x
oy = y
poss = {'__list__':{}}
x = ox + 1
for r in alist:
for k,v in r.items():
isList = False
if type(v) == type([]):
isList = True
v = ','.join(v)
self.createRecordTitle(ws,ox,oy,k,poss,isList)
ws.write(x,poss[k],v)
x = x + 1
self.writeRecordTitle(ws,ox,poss)
return x - ox
def isRecords(self,alist):
records = True
for r in alist:
if type(r) != type({}):
return False
for k,v in r.items():
if type(v) == type({}):
return False
if type(v) == type([]):
for c in v:
if type(c) in [type([]),type({})]:
return False
return True
def writeDict(self,ws,x,y,adict):
ox = x
ws.write(x,y,'__dict__')
x = x + 1
for k in adict.keys():
ws.write(x,y,k)
cnt = self.writeV(ws,x,y+1,adict[k])
x = x + cnt
# print "writeV return ",cnt,"handled key=",k,"next row=",x
return x - ox
def writeList(self,ws,x,y,alist,singlecell=False):
if self.isRecords(alist):
return self.writeRecords(ws,x,y,alist)
ox = x
if singlecell is True:
s = ','.join([ str(i) for i in alist ])
ws.write(x,y,s)
return 1
multiline = False
for d in alist:
if type(d) == type({}):
multiline=True
if multiline is True:
for i in alist:
if type(i) == type({}):
rows = self.writeDict(ws,x,y,i)
elif type(i) == type([]):
rows = self.writeMultiLineList(ws,x,y,i)
else:
ws.write(x,y,i)
rows = 1
x = x + rows
return x - ox
else:
for i in alist:
if type(i) == type([]):
self.writeList(ws,x,y,i,singlecell=True)
else:
ws.write(x,y,i)
y = y + 1
return 1
def writeMultiLineList(self,ws,x,y,alist):
ox = x
ws.write(x,y,'__list__')
x = x + 1
for i in alist:
ws.write(x,y,i)
x = x + 1
return x - os
if __name__ == '__main__':
data = {
'my1':['23423','423424','t334t3',2332,'erfverfefew'],
'my2':[{'aaa':1,'bbb':'bbb'},{'aaa':1,'bbb':'bbb'}],
}
w = ExcelWriter()
w.write('d:\\text.xls',data)

View File

@ -0,0 +1,60 @@
# !/usr/bin/env python
# -*- coding:UTF-8 -*-
from socket import *
import json
from appPublic.sockPackage import get_free_local_addr
from appPublic.background import Background
BUFSIZE = 1024
class BroadcastServer:
def __init__(self, port, info):
self.info = info
self.port = port
self.udpSerSock = socket(AF_INET, SOCK_DGRAM)
# 设置阻塞
self.udpSerSock.setblocking(1)
# 设置超时时间 1s
# self.udpSerSock.settimeout(1)
self.udpSerSock.bind(('' ,port))
self.run_flg = True
self.thread = Background(self.run)
self.thread.start()
def run(self):
while self.run_flg:
try:
data, addr = self.udpSerSock.recvfrom(BUFSIZE)
ret = json.dumps(self.info).encode('utf-8')
self.udpSerSock.sendto(ret, addr)
except Exception as e:
print('exception happened:',e)
pass
def stop(self):
self.run_flg = False
self.udpSerSock.close()
def find_players(port):
# broadcast_addr = '.'.join(host.split('.')[:-1]) + '.255'
host = get_free_local_addr()[0]
udpCliSock = socket(AF_INET, SOCK_DGRAM)
#设置阻塞
#udpCliSock.setblocking(2)
#设置超时时间
udpCliSock.settimeout(5)
udpCliSock.bind(('', 0))
udpCliSock.setsockopt(SOL_SOCKET, SO_BROADCAST, 1)
udpCliSock.sendto(b'findplayers', ('255.255.255.255',port))
players = []
while True:
try:
data,addr = udpCliSock.recvfrom(BUFSIZE)
if addr[0] != host and data:
data = data.decode('utf-8')
d = json.loads(data)
d['ip'] = addr[0]
players.append(d)
except Exception as e:
break
udpCliSock.close()
return players

View File

@ -0,0 +1,195 @@
# -*- coding: utf-8 -*-
import os
import sys
import stat
import os.path
import platform
import time
import random
import tempfile
"""
import win32api
"""
import sys
def temp_file(suffix=None, prefix=None, dir=None, text=False):
x = tempfile.mkstemp(suffix=suffix, prefix=prefix, dir=dir, text=text)
os.close(x[0])
return x[1]
def filepoolpath(root):
paths=[191,193,197,199,97]
v = random.randint(0,9999999)
path = os.path.abspath(os.path.join(root,
str(v % paths[0]),
str(v % paths[1]),
str(v % paths[2]),
str(v % paths[3]),
str(v % paths[4])))
return path
def startsWith(text,s):
return text[:len(s)] == s
def endsWith(text,s):
return text[-len(s):] == s
def ProgramPath():
filename = sys.argv[0]
if getattr(sys,'frozen',False):
filename = sys.executable
p = os.path.dirname(os.path.abspath(filename))
return p
def timestamp2datatiemStr(ts):
t = time.localtime(ts)
return '%04d-%02d-%-02d %02d:%02d:%02d' % (t.tm_year,t.tm_mon,t.tm_mday,t.tm_hour,t.tm_min,t.tm_sec)
"""
def findAllDrives():
Drives=[]
# print "Searching for drives..."
drives=win32api.GetLogicalDriveStrings().split(":")
for i in drives:
# print "i=",i,":"
dr=i[-1].lower()
if dr.isalpha():
dr+=":\\"
inf=None
try:
inf=win32api.GetVolumeInformation(dr)
except:
pass # Removable drive, not ready
# You'll still get the drive letter, but inf will be None
Drives.append([dr,inf])
return Drives
"""
## list all folder name under folder named by path
#
def listFolder(path, rescursive=False) :
for name in os.listdir(path) :
full_name = os.path.join(path,name)
if os.path.isdir(full_name):
for f in listFolder(full_name, rescursive=rescursive):
yield f
yield full_name
def listFile(folder,suffixs=[],rescursive=False):
subffixs = [ i.lower() for i in suffixs ]
for f in os.listdir(folder):
p = os.path.join(folder,f)
if rescursive and os.path.isdir(p):
for p1 in listFile(p,suffixs=suffixs,rescursive=True):
yield p1
if os.path.isfile(p):
e = p.lower()
if suffixs == [] :
yield p
for s in subffixs:
if e.endswith(s):
yield p
def folderInfo(root,uri=''):
relpath = uri
if uri[1]=='/':
relpath = uri[1:]
path = os.path.join(root,*relpath.split('/'))
ret = []
for name in os.listdir(path):
full_name = os.path.join(path,name)
s = os.stat(full_name)
if stat.S_ISDIR(s.st_mode):
ret.append( {
'id':relpath + '/' + name,
'name':name,
'path':relpath,
'type':'dir',
'size':s.st_size,
'mtime':timestamp2datatiemStr(s.st_mtime),
})
if stat.S_ISREG(s.st_mode):
ret.append( {
'id':relpath + '/' + name,
'name':name,
'path':relpath,
'type':'file',
'size':s.st_size,
'mtime':timestamp2datatiemStr(s.st_mtime),
})
return ret
def rmdir_recursive(dir):
"""Remove a directory, and all its contents if it is not already empty."""
for name in os.listdir(dir):
full_name = os.path.join(dir, name)
# on Windows, if we don't have write permission we can't remove
# the file/directory either, so turn that on
if not os.access(full_name, os.W_OK):
os.chmod(full_name, 0o600)
if os.path.isdir(full_name):
rmdir_recursive(full_name)
else:
os.remove(full_name)
os.rmdir(dir)
def _mkdir(newdir) :
"""works the way a good mkdir should :)
- already exists, silently complete
- regular file in the way, raise an exception
- parent directory(ies) does not exist, make them as well
"""
if os.path.isdir(newdir):
pass
elif os.path.isfile(newdir):
raise OSError("a file with the same name as the desired " \
"dir, '%s', already exists." % newdir)
else:
head, tail = os.path.split(newdir)
if head and not os.path.isdir(head):
_mkdir(head)
#print "_mkdir %s" % repr(newdir)
if tail:
os.mkdir(newdir)
def _copyfile(fp,dir) :
fs = open(fp,'rb')
name = os.path.basename(fp)
newfp = os.path.join(dir,getFileName(name,dir))
f = open(newfp,'wb')
while True :
data = fs.read(65536)
if not data :
break
f.write(data)
fs.close()
f.close()
return True
def _copydir(fp,dir,topdistinct) :
name = os.path.basename(fp)
newname = getFileName(name,dir)
debug(newname)
newfp = os.path.join(dir,newname)
_mkdir(newfp)
if fp==topdistinct :
return True
flist = os.listdir(fp)
for name in flist :
full_name = os.path.join(fp,name)
if os.path.isdir(full_name) :
p = os.path.join(dir,name)
_copydir(full_name,newfp,topdistinct)
else :
if os.path.isfile(full_name) :
_copyfile(full_name,newfp)
return True
mkdir=_mkdir
copyfile = _copyfile
copydir = _copydir
rmdir = rmdir_recursive

View File

@ -0,0 +1,44 @@
class Genetic:
"""
A Base class for genetical objects,
all the instances can inherite attributes from its parent.
"""
def __init__(self):
self.__parent__ = None
self.__children__ = []
#print dir(self)
def __getattr__(self,n):
d = self.__dict__
if n in d.keys():
return d[n]
p = self.__parent__ #d['__parent__']
if p is not None:
return getattr(p,n)
raise AttributeError(n)
def addChild(self,c):
self.__children__.append(c)
c.__parent__ = self
def setParent(self,p):
p.addChild(self)
if __name__ == '__main__':
class A(Genetic):
def __init__(self,a1,a2):
Genetic.__init__(self)
self.a1 = a1
self.a2 = a2
class B(Genetic):
def __init__(self,b):
Genetic.__init__(self)
self.b = b
gp = A(1,2)
p = B(3)
c = A(4,5)
gc = B(6)
gc.setParent(c)
c.setParent(p)
p.setParent(gp)

20
build/lib/appPublic/hf.py Normal file
View File

@ -0,0 +1,20 @@
import requests
from huggingface_hub import configure_http_backend, get_session
def hf_socks5proxy(proxies={
"http": "socks5h://127.0.0.1:1086",
"https": "socks5h://127.0.0.1:1086"
}):
# Create a factory function that returns a Session with configured proxies
print(f'proxies={proxies}')
def backend_factory() -> requests.Session:
session = requests.Session()
session.proxies = proxies
print(f'socks5 proxy set {proxies=}')
return session
# Set it as the default session factory
configure_http_backend(backend_factory=backend_factory)
if __name__ == '__main__':
hf_socks5proxy()

View File

@ -0,0 +1,131 @@
import requests
class NeedLogin(Exception):
pass
class InsufficientPrivilege(Exception):
pass
class HTTPError(Exception):
def __init__(self,resp_code,url=None):
self.resp_code = resp_code
self.url = url
Exception.__init__(self)
def __expr__(self):
return f'{self.url}:{self.resp_code}'
def __str__(self):
return f'{self.url}:{self.resp_code}'
hostsessions = {}
class Http_Client:
def __init__(self):
self.s = requests.Session()
self.s.verify = False
self.s.hooks['response'].append(self.response_handler)
def prepped_handler(self, prepped):
pass
def response_handler(self, resp, *args, **kw):
return resp
def url2domain(self,url):
parts = url.split('/')[:3]
pre = '/'.join(parts)
return pre
def _webcall(self,url,method="GET",
params={},
files={},
headers={},
stream=False):
domain = self.url2domain(url)
sessionid = hostsessions.get(domain,None)
if sessionid:
headers.update({'session':sessionid})
if method in ['GET']:
req = requests.Request(method,url,
params=params,headers=headers)
else:
req = requests.Request(method,url,
data=params,files=files,headers=headers)
prepped = self.s.prepare_request(req)
self.prepped_handler(prepped)
resp = self.s.send(prepped)
if resp.status_code == 200:
h = resp.headers.get('Set-Cookie',None)
if h:
sessionid = h.split(';')[0]
hostsessions[domain] = sessionid
if resp.status_code == 401:
print('NeedLogin:',url)
raise NeedLogin
if resp.status_code == 403:
raise InsufficientPrivilege
if resp.status_code != 200:
print('Error', url, method,
params, resp.status_code,
type(resp.status_code))
raise HTTPError(resp.status_code,url)
return resp
def webcall(self,url,method="GET",
params={},
files={},
headers={},
stream=False):
resp = self._webcall(url,method=method,
params=params,
files=files,
headers=headers,
stream=stream)
if stream:
return resp
try:
data = resp.json()
if type(data) != type({}):
return data
status = data.get('status',None)
if status is None:
return data
if status == 'OK':
return data.get('data')
return data
except:
return resp.text
def __call__(self,url,method="GET",
params={},
headers={},
files={},
stream=False):
return self.webcall(url, method=method,
params=params, files=files,
headers=headers, stream=stream)
def get(self, url, params={}, headers={}, stream=False):
return self.__call__(url,method='GET',params=params,
headers=headers, stream=stream)
def post(self, url, params={}, headers={}, files={}, stream=False):
return self.__call__(url,method='POST',params=params, files=files,
headers=headers, stream=stream)
def put(self, url, params={}, headers={}, stream=False):
return self.__call__(url,method='PUT',params=params,
headers=headers, stream=stream)
def delete(self, url, params={}, headers={}, stream=False):
return self.__call__(url,method='DELETE',params=params,
headers=headers, stream=stream)
def option(self, url, params={}, headers={}, stream=False):
return self.__call__(url,method='OPTION',params=params,
headers=headers, stream=stream)

View File

@ -0,0 +1,117 @@
import asyncio
import aiohttp
import re
from appPublic.log import info, debug, warning, error, exception, critical
RESPONSE_BIN = 0
RESPONSE_TEXT = 1
RESPONSE_JSON = 2
RESPONSE_FILE = 3
RESPONSE_STREAM = 4
class HttpError(Exception):
def __init__(self, code, msg, *args, **kw):
super().__init__(*msg, **kw)
self.code = code
self.msg = msg
def __str__(self):
return f"Error Code:{self.code}, {self.msg}"
def __expr__(self):
return str(self)
class HttpClient:
def __init__(self,coding='utf-8'):
self.coding = coding
self.session = None
self.cookies = {}
async def close(self):
if self.session:
await self.session.close()
self.session = None
def url2domain(self,url):
parts = url.split('/')[:3]
pre = '/'.join(parts)
return pre
def setCookie(self,url,cookies):
name = self.url2domain(url)
self.cookies[name] = cookies
def getCookies(self,url):
name = url2domain(url)
return self.cookies.get(name,None)
def getsession(self,url):
if self.session is None:
jar = aiohttp.CookieJar(unsafe=True)
self.session = aiohttp.ClientSession(cookie_jar=jar)
return self.session
async def handleResp(self,url,resp,resp_type, stream_func=None):
if resp.cookies is not None:
self.setCookie(url,resp.cookies)
if resp_type == RESPONSE_BIN:
return await resp.read()
if resp_type == RESPONSE_JSON:
return await resp.json()
if resp_type == RESPONSE_TEXT:
return await resp.text(self.coding)
async for chunk in resp.content.iter_chunked(1024):
if stream_func:
await stream_func(chunk)
def grapCookie(self,url):
session = self.getsession(url)
domain = self.url2domain(url)
filtered = session.cookie_jar.filter_cookies(domain)
return filtered
async def request(self, url, method,
response_type=RESPONSE_TEXT,
params=None,
data=None,
jd=None,
stream_func=None,
headers=None,
**kw):
session = self.getsession(url)
if params == {}:
params = None
if data == {}:
data = None
if jd == {}:
jd = None
if headers == {}:
headers = None
resp = await session.request(method, url,
params=params,
data=data,
json=jd,
headers=headers, **kw)
if resp.status==200:
return await self.handleResp(url, resp, response_type, stream_func=stream_func)
msg = f'http error({resp.status}, {url=},{params=}, {data=}, {jd=}, {headers=}, {kw=})'
exception(msg)
raise HttpError(resp.status, msg)
async def get(self,url,**kw):
return self.request(url, 'GET', **kw)
async def post(self,url, **kw):
return self.request(url, 'POST', **kw)
session = self.getsession(url)
if __name__ == '__main__':
async def gbaidu(hc):
r = await hc.get('https://www.baidu.com')
print(r)
await hc.close()
loop = asyncio.get_event_loop()
hc = HttpClient()
loop.run_until_complete(gbaidu(hc))

161
build/lib/appPublic/i18n.py Normal file
View File

@ -0,0 +1,161 @@
import os,re,sys
import codecs
from appPublic.folderUtils import _mkdir
from appPublic.Singleton import SingletonDecorator
from appPublic.folderUtils import ProgramPath
from appPublic.jsonConfig import getConfig
import threading
import time
import locale
comment_re = re.compile(r'\s*#.*')
msg_re = re.compile(r'\s*([^:]*)\s*:\s*([^\s].*)')
def dictModify(d, md) :
for i in md.keys() :
if md[i]!=None :
d[i] = md[i]
return d
convert_pairs = {':':'\\x3A',
'\n':'\\x0A',
'\r':'\\x0D',
}
def charEncode(s) :
r = ''
v = s.split('\\')
s = '\\\\'.join(v)
for i in convert_pairs.keys() :
v = s.split(i)
s = convert_pairs[i].join(v)
# print 'i=',i,'iv=',convert_pairs[i],'s=',s
return s
def charDecode(s) :
for i in convert_pairs.items() :
v = s.split(i[1])
s = i[0].join(v)
v = s.split('\\\\')
s = '\\'.join(v)
return s
def getTextDictFromLines(lines) :
d = {}
for l in lines :
l = ''.join(l.split('\r'))
if comment_re.match(l) :
continue
m = msg_re.match(l)
if m :
grp = m.groups()
d[charDecode(grp[0])] = charDecode(grp[1])
return d
def getFirstLang(lang) :
s = lang.split(',')
return s[0]
@SingletonDecorator
class MiniI18N:
"""
"""
def __init__(self,path,lang=None,coding='utf8') :
self.path = path
l = locale.getdefaultlocale()
self.curLang = l[0]
self.coding = coding
self.id = 'i18n'
self.langTextDict = {}
self.messages = {}
self.setupMiniI18N()
self.missed_pt = None
self.translated_pt = None
self.header_pt = None
self.footer_pt = None
self.show_pt=None
self.clientLangs = {}
self.languageMapping = {}
self.timeout = 600
config = getConfig()
for l1,l in config.langMapping.items():
self.setLangMapping(l1,l)
def __call__(self,msg,lang=None) :
"""
"""
if type(msg) == type(b''):
msg = msg.decode(self.coding)
return self.getLangText(msg,lang)
def setLangMapping(self,lang,path):
self.languageMapping[lang] = path
def getLangMapping(self,lang):
return self.languageMapping.get(lang,lang)
def setTimeout(self,timeout=600):
self.timeout = timeout
def delClientLangs(self):
t = threading.currentThread()
tim = time.time() - self.timeout
[ self.clientLangs.pop(k,None) for k in self.clientLangs.keys() if self.clientLangs[k]['timestamp'] < tim ]
def getLangDict(self,lang):
lang = self.getLangMapping(lang)
return self.langTextDict.get(lang,{})
def getLangText(self,msg,lang=None) :
"""
"""
if lang==None :
lang = self.getCurrentLang()
textMapping = self.getLangDict(lang)
return textMapping.get(msg,msg)
def setupMiniI18N(self) :
"""
"""
p = os.path.join(self.path,'i18n')
langs = []
for f in os.listdir(p) :
if os.path.isdir(os.path.join(p,f)) :
langs.append(f)
for dir in langs :
p1 = os.path.join(p,dir,'msg.txt')
if os.path.exists(p1) :
f = codecs.open(p1,'r',self.coding)
textDict = getTextDictFromLines(f.readlines())
f.close()
d = {}
if dir in self.langTextDict :
d = self.langTextDict[dir]
self.langTextDict[dir] = textDict
for i in textDict.keys() :
self.messages[i] = ''
self._p_changed = 1
def setCurrentLang(self,lang):
lang = self.getLangMapping(lang)
t = time.time()
threadid = threading.currentThread()
a = dict(timestamp=t,lang=lang)
self.clientLangs[threadid] = a
def getCurrentLang(self) :
"""
"""
threadid = threading.currentThread()
return self.clientLangs[threadid]['lang']
def getI18N(path=None, coding='utf8'):
if path is None:
path = ProgramPath()
i18n = MiniI18N(path,coding)
return i18n

View File

@ -0,0 +1,238 @@
#!/usr/bin/env python
"""
This module is designed to fetch your external IP address from the internet.
It is used mostly when behind a NAT.
It picks your IP randomly from a serverlist to minimize request
overhead on a single server
If you want to add or remove your server from the list contact me on github
API Usage
=========
>>> import ipgetter
>>> myip = ipgetter.myip()
>>> myip
'8.8.8.8'
>>> ipgetter.IPgetter().test()
Number of servers: 47
IP's :
8.8.8.8 = 47 ocurrencies
Copyright 2014 phoemur@gmail.com
This work is free. You can redistribute it and/or modify it under the
terms of the Do What The Fuck You Want To Public License, Version 2,
as published by Sam Hocevar. See http://www.wtfpl.net/ for more details.
"""
import re
import json
import time
import random
import socket
from threading import Timer
from sys import version_info
import future.moves.urllib.request
urllib = future.moves.urllib.request
PY3K = version_info >= (3, 0)
__version__ = "0.6"
def myip():
return IPgetter().get_external_ip()
class IPgetter(object):
"""
This class is designed to fetch your external IP address from the internet.
It is used mostly when behind a NAT.
It picks your IP randomly from a serverlist to minimize request overhead
on a single server
# 'http://ip.dnsexit.com',
# 'http://checkip.dyndns.org/plain',
# 'http://ipogre.com/linux.php',
# 'http://whatismyipaddress.com/',
# 'http://ip.my-proxy.com/',
# 'http://websiteipaddress.com/WhatIsMyIp',
# 'http://www.iplocation.net/',
# 'http://www.howtofindmyipaddress.com/',
# 'http://www.ip-adress.com/',
# 'http://checkmyip.com/',
# 'http://www.tracemyip.org/',
# 'http://checkmyip.net/',
# 'http://www.findmyip.co/',
# 'http://www.dslreports.com/whois',
# 'http://www.mon-ip.com/en/my-ip/',
# 'http://www.myip.ru',
# 'http://www.whatsmyipaddress.net/',
# 'http://formyip.com/',
# 'https://check.torproject.org/',
# 'http://www.displaymyip.com/',
# 'http://www.bobborst.com/tools/whatsmyip/',
# 'https://www.whatsmydns.net/whats-my-ip-address.html',
# 'https://www.privateinternetaccess.com/pages/whats-my-ip/',
# 'http://www.infosniper.net/',
# 'http://ipinfo.io/',
# 'http://myexternalip.com/',
"""
def __init__(self):
self.server_list = [
'http://ifconfig.me/ip',
'http://ipecho.net/plain',
'http://getmyipaddress.org/',
'http://www.my-ip-address.net/',
'http://www.canyouseeme.org/',
'http://www.trackip.net/',
'http://icanhazip.com/',
'http://www.ipchicken.com/',
'http://whatsmyip.net/',
'http://www.lawrencegoetz.com/programs/ipinfo/',
'http://ip-lookup.net/',
'http://ipgoat.com/',
'http://www.myipnumber.com/my-ip-address.asp',
'http://www.geoiptool.com/',
'http://checkip.dyndns.com/',
'http://www.ip-adress.eu/',
'http://wtfismyip.com/',
'http://httpbin.org/ip',
]
self.parsers = {}
self.timeout = 1.6
self.url = None
def get_external_ip(self):
"""
This function gets your IP from a random server
"""
random.shuffle(self.server_list)
myip = ''
for server in self.server_list:
myip = self.defaultparser(self.fetch(server))
if myip != '' and not (myip.startswith('192.') or myip.startswith('10.')) and not myip.startswith('127'):
return myip
else:
continue
return ''
def add_server(self, server, parser):
self.server_list.append(server)
self.parsers[server] = parser
def defaultparser(self, content):
p = '(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.('
p += '25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|['
p += '01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)'
try:
m = re.search(p, content)
myip = m.group(0)
if len(myip) > 0:
return myip
else:
return ''
except:
return ''
def handle_timeout(self, url):
if self.url is not None:
self.url.close()
self.url = None
def fetch(self, server):
"""
This function gets your IP from a specific server
"""
t = None
socket_default_timeout = socket.getdefaulttimeout()
opener = urllib.build_opener()
opener.addheaders = [('User-agent',
"Mozilla/5.0 (X11; Linux x86_64; rv:24.0)"
" Gecko/20100101 Firefox/24.0")]
try:
# Close url resource if fetching not finished within timeout.
t = Timer(self.timeout, self.handle_timeout, [self.url])
t.start()
# Open URL.
if version_info[0:2] == (2, 5):
# Support for Python 2.5.* using socket hack
# (Changes global socket timeout.)
socket.setdefaulttimeout(self.timeout)
self.url = opener.open(server)
else:
self.url = opener.open(server, timeout=self.timeout)
# Read response.
content = self.url.read()
# Didn't want to import chardet. Prefered to stick to stdlib
if PY3K:
try:
content = content.decode('UTF-8')
except UnicodeDecodeError:
content = content.decode('ISO-8859-1')
parser = self.parsers.get(server, self.defaultparser)
return parser(content)
except Exception as e:
print(server, e)
return ''
finally:
if self.url is not None:
self.url.close()
self.url = None
if t is not None:
t.cancel()
# Reset default socket timeout.
if socket.getdefaulttimeout() != socket_default_timeout:
socket.setdefaulttimeout(socket_default_timeout)
def all_result(self):
x= []
for s in self.server_list:
x.append([s, self.fetch(s)])
print(x)
def test(self):
"""
This functions tests the consistency of the servers
on the list when retrieving your IP.
All results should be the same.
"""
resultdict = {}
for server in self.server_list:
resultdict.update(**{server: self.fetch(server)})
ips = sorted(resultdict.values())
ips_set = set(ips)
print('\nNumber of servers: {}'.format(len(self.server_list)))
print("IP's :")
for ip, ocorrencia in zip(ips_set, map(lambda x: ips.count(x), ips_set)):
print('{0} = {1} ocurrenc{2}'.format(ip if len(ip) > 0 else 'broken server', ocorrencia, 'y' if ocorrencia == 1 else 'ies'))
print('\n')
print(resultdict)
if __name__ == '__main__':
def p(content):
d = json.loads(content)
return d['ip']
g = IPgetter()
server = 'http://ipinfo.io/json'
g.add_server(server, p)
print(g.get_external_ip())

View File

@ -0,0 +1,75 @@
import os
import sys
from requests import get
from bs4 import BeautifulSoup
from appPublic.http_client import Http_Client
from appPublic.sockPackage import get_free_local_addr
public_headers = {
"User-Agent":"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3100.0 Safari/537.36"
}
def get_outip():
ip = get('https://api.ipify.org').content.decode('utf8')
return ip
def ipip(ip=None):
# ipip.net
if ip is None:
ip = get_outip()
api= f"http://freeapi.ipip.net/{ip}"
hc = Http_Client()
r= hc.get(api, headers=public_headers)
return {
'country':r[0],
'city':r[2]
}
def ipapi_co(ip):
url = f'https://ipapi.co/{ip}/json/'
hc = Http_Client()
r = hc.get(url)
r['City'] = r['city']
r['lat'] = r['latitude']
r['lon'] = r['longitude']
return r
def ip_api_com(ip):
url = f'http://ip-api.com/json/{ip}'
hc = Http_Client()
r = hc.get(url)
r['City'] = r['city']
return r
def iplocation(ip=None):
if ip is None:
ip = get_outip()
# apikey come from
# https://app.apiary.io/globaliptv/tests/runs
# using my github accout
apikey='c675f89c4a0e9315437a1a5edca9b92c'
api = f"https://www.iplocate.io/api/lookup/{ip}?apikey={apikey}",
hc = Http_Client()
r= hc.get(api, headers=public_headers)
return r
def get_ip_location(ip):
funcs = [
ip_api_com,
ipapi_co,
ipip,
iplocation
]
hc = Http_Client()
for f in funcs:
try:
r = f(ip)
return r
except:
pass
if __name__ == '__main__':
print(get_free_local_addr())
if len(sys.argv) > 1:
info = get_ip_location(sys.argv[1])
print(info)

View File

@ -0,0 +1,74 @@
import os,sys
import json
from pathlib import Path
from appPublic.dictObject import DictObject
from appPublic.Singleton import SingletonDecorator
from appPublic.folderUtils import ProgramPath
from appPublic.argsConvert import ArgsConvert
def key2ansi(dict):
#print dict
return dict
a = {}
for k,v in dict.items():
k = k.encode('utf-8')
#if type(v) == type(u" "):
# v = v.encode('utf-8')
a[k] = v
return a
class JsonObject(DictObject):
"""
JsonObject class load json from a json file
"""
def __init__(self,jsonholder,keytype='ansi',NS=None):
jhtype = type(jsonholder)
if jhtype == type("") or jhtype == type(u''):
f = open(jsonholder,'r')
else:
f = jsonholder
try:
a = json.load(f)
except Exception as e:
print("exception:",self.__jsonholder__,e)
raise e
finally:
if type(jsonholder) == type(""):
f.close()
if NS is not None:
ac = ArgsConvert('$[',']$')
a = ac.convert(a,NS)
a['__jsonholder__'] = jsonholder
a['NS'] = NS
DictObject.__init__(self,**a)
@SingletonDecorator
class JsonConfig(JsonObject):
pass
def getConfig(path=None,NS=None):
pp = ProgramPath()
if path==None:
path = os.getcwd()
cfname = os.path.abspath(os.path.join(path,"conf","config.json"))
# print __name__,cfname
ns = {
'home':str(Path.home()),
'workdir':path,
'ProgramPath':pp
}
if NS is not None:
ns.update(NS)
a = JsonConfig(cfname,NS=ns)
return a
if __name__ == '__main__':
conf = JsonConfig(sys.argv[1])
#print conf.db,conf.sql
#conf1 = JsonConfig(sys.argv[1],keytype='unicode')
conf1 = JsonConfig(sys.argv[1],keytype='ansi')
print("conf=",dir(conf))
print("conf1=",dir(conf1) )

View File

@ -0,0 +1,38 @@
import json
def uni_str(a, encoding):
if a is None:
return None
if isinstance(a, (list, tuple)):
s = []
for i, k in enumerate(a):
s.append(uni_str(k, encoding))
return s
elif isinstance(a, dict):
s = {}
for i, k in enumerate(a.items()):
key, value = k
s[uni_str(key, encoding)] = uni_str(value, encoding)
return s
elif isinstance(a, bool):
return a
elif isinstance(a, unicode):
return a
elif isinstance(a, str) or (hasattr(a, '__str__') and callable(getattr(a, '__str__'))):
if getattr(a, '__str__'):
a = str(a)
return unicode(a, encoding)
else:
return a
def success(data):
return dict(success=True,data=data)
def error(errors):
return dict(success=False,errors=errors)
def jsonEncode(data,encode='utf-8'):
return json.dumps(uni_str(data, encode))
def jsonDecode(jsonstring):
return json.loads(jsonstring)

View File

@ -0,0 +1,43 @@
# this function will fix a bug for open a file with a not english name.
#
import sys
import locale
language, local_encoding = locale.getdefaultlocale()
if sys.platform == 'win32':
import locale, codecs
local_encoding = locale.getdefaultlocale()[1]
if local_encoding.startswith('cp'): # "cp***" ?
try:
codecs.lookup(local_encoding)
except LookupError:
import encodings
encodings._cache[local_encoding] = encodings._unknown
encodings.aliases.aliases[local_encoding] = 'mbcs'
def locale_open(filename,mode='rb') :
return open(filename.encode(local_encoding),mode)
def localeString(s) :
try :
return unicode(s,'utf-8').encode(local_encoding)
except :
return s
def utf8String(s) :
try :
return unicode(s,local_encoding).encode('utf-8')
except :
return s
def charsetString(s,charset) :
try :
return unicode(s,local_encoding).encode(charset)
except :
try :
return unicode(s,'utf-8').encode(charset)
except :
return s

View File

@ -0,0 +1,95 @@
import sys
import codecs
from appPublic.timeUtils import timestampstr
from appPublic.Singleton import SingletonDecorator
import inspect
def my_function():
frame_info = inspect.currentframe()
caller_frame = frame_info.f_back
file_name = inspect.getframeinfo(caller_frame).filename
line_number = inspect.getframeinfo(caller_frame).lineno
print(f"Called from file: {file_name}, line: {line_number}")
@SingletonDecorator
class MyLogger:
levels={
"debug":5,
"info":6,
"warning":4,
"error":3,
"exception":2,
"critical":1
}
formater='%(timestamp)s[%(name)s][%(levelname)s][%(filename)s:%(lineno)s]%(message)s\n'
def __init__(self, name, levelname='debug', logfile=None):
self.name = name
self.levelname = levelname
self.level = self.levels.get(levelname)
self.logfile = logfile
def open_logger(self):
if self.logfile:
self.logger = codecs.open(self.logfile, 'a', 'utf-8')
else:
self.logger = sys.stdout
def close_logger(self):
if self.logfile:
self.logger.close();
self.logger = None
self.logger = None
def log(self, levelname, message, frame_info):
caller_frame = frame_info.f_back
filename = inspect.getframeinfo(caller_frame).filename
lineno = inspect.getframeinfo(caller_frame).lineno
level = self.levels.get(levelname)
if level > self.level:
print(f'{level=},{self.level=}')
return
data = {
'timestamp':timestampstr(),
'name':self.name,
'levelname':levelname,
'message':message,
'filename':filename,
'lineno':lineno
}
self.open_logger()
s = self.formater % data
self.logger.write(s)
self.logger.flush()
self.close_logger()
def info(message):
frame_info = inspect.currentframe()
logger = MyLogger('Test')
logger.log('info', message, frame_info)
def debug(message):
frame_info = inspect.currentframe()
logger = MyLogger('Test')
logger.log('debug', message, frame_info)
def warning(message):
frame_info = inspect.currentframe()
logger = MyLogger('Test')
logger.log('warning', message, frame_info)
def error(message):
frame_info = inspect.currentframe()
logger = MyLogger('Test')
logger.log('error', message, frame_info)
def critical(message):
frame_info = inspect.currentframe()
logger = MyLogger('Test')
logger.log('critical', message, frame_info)
def exception(message):
frame_info = inspect.currentframe()
logger = MyLogger('exception')
logger.log('exception', message, frame_info)

View File

@ -0,0 +1,38 @@
#! /usr/bin/env python
import locale
import psutil
import socket
def getAllAddress():
iocounts = psutil.net_io_counters(pernic=True)
ns = [ k for k in iocounts.keys() if iocounts[k].bytes_sent>0 and iocounts[k].bytes_recv>0 ]
stats = psutil.net_if_stats()
stat = [ i for i in stats.keys() if i in ns ]
hds = psutil.net_if_addrs()
for n,v in hds.items():
if n not in stat:
continue
for i in v:
if i.family == socket.AF_INET:
yield n,i.address
def getAllMacAddress():
coding = locale.getdefaultlocale()[1]
iocounts = psutil.net_io_counters(pernic=True)
ns = [ k for k in iocounts.keys() if iocounts[k].bytes_sent>0 and iocounts[k].bytes_recv>0 ]
stats = psutil.net_if_stats()
stat = [ i for i in stats.keys() if i in ns ]
hds = psutil.net_if_addrs()
for n,v in hds.items():
if n not in stat:
continue
for i in v:
if i.family == socket.AF_PACKET:
yield n,i.address
if __name__ == '__main__':
def test():
for i in getAllAddress():
print("mac=",i)
test()

View File

@ -0,0 +1,6 @@
def myImport(modulename):
modules = modulename.split('.')
if len(modules) > 1:
a = __import__(modules[0])
return eval('a.' + '.'.join(modules[1:]))
return __import__(modulename)

View File

@ -0,0 +1,82 @@
import os
import sys
try:
import ujson as json
except:
import json
from jinja2 import Environment,FileSystemLoader, BaseLoader
import codecs
from appPublic.argsConvert import ArgsConvert
from appPublic.dictObject import DictObject
def isNone(obj):
return obj is None
def string_template_render(tmp_string, data):
rtemplate = Environment(loader=BaseLoader()).from_string(tmp_string)
return rtemplate.render(**data)
class MyTemplateEngine:
def __init__(self,pathList,file_coding='utf-8',out_coding='utf-8'):
self.file_coding = file_coding
self.out_coding = out_coding
loader = FileSystemLoader(pathList, encoding=self.file_coding)
self.env = Environment(loader=loader, enable_async=False)
denv={
'json':json,
'hasattr':hasattr,
'int':int,
'float':float,
'str':str,
'type':type,
'isNone':isNone,
'len':len,
'render':self.render,
'renders':self.renders,
'ArgsConvert':ArgsConvert,
'renderJsonFile':self.renderJsonFile,
'ospath':lambda x:os.path.sep.join(x.split(os.altsep)),
'basename':lambda x:os.path.basename(x),
'basenameWithoutExt':lambda x:os.path.splitext(os.path.basename(x))[0],
'extname':lambda x:os.path.splitext(x)[-1],
}
self.env.globals.update(denv)
def set(self,k,v):
self.env.globals.update({k:v})
def _render(self,template,data):
# print('**********template=',template,'**data=',data,'type_data=',type(data),'************')
uRet = template.render(**data)
return uRet
def renders(self,tmplstring,data):
def getGlobal():
return data
self.set('global',getGlobal)
template = self.env.from_string(tmplstring)
return self._render(template,data)
def render(self,tmplfile,data):
def getGlobal():
return data
self.set('global',getGlobal)
template = self.env.get_template(tmplfile)
return self._render(template,data)
def renderJsonFile(self,tmplfile,jsonfile):
with codecs.open(jsonfile,"r",self.file_coding) as f:
data = json.load(f)
return self.render(tmplfile,data)
def tmpTml(f, ns):
te = MyTemplateEngine('.')
with codecs.open(f, 'r', 'utf-8') as fd:
d = fd.read()
b = te.renders(d, ns)
filename = os.path.basename(f)
p = f'/tmp/{filename}'
with codecs.open(p, 'w', 'utf-8') as wf:
wf.write(b)
return p

View File

@ -0,0 +1,22 @@
try:
import ujson as json
except:
import json
import codecs
def loadf(fn,coding='utf8'):
f = codecs.open(fn,'r',coding)
d = json.load(f)
f.close()
return d
def dumpf(obj,fn,coding='utf8'):
f = codecs.open(fn,'w',coding)
json.dump(obj,f)
f.close()
load = json.load
dump = json.dump
loads = json.loads
dumps = json.dumps

View File

@ -0,0 +1,89 @@
import os
from datetime import datetime
from PublicData import public_data
from folderUtils import mkdir
myLogPath = '.'
AllCatelogs=['SYSError',
'SYSWarn',
'APPError',
'APPWarn',
'APPInfo',
'DEBUG1',
'DEBUG2',
'DEBUG3',
'DEBUG4',
'DEBUG5',
]
class MyLog :
def __init__(self,path) :
self.setLogPath(path)
def setLogPath(self,path='.') :
self.myLogPath = path
logp=os.path.join(path,'log')
mkdir(logp)
def __call__(self,msg='') :
p = os.path.join(self.myLogPath,'log','my.log')
f = open(p,'a')
d = datetime.now()
f.write('%04d-%02d-%02d %02d:%02d:%02d %s\n' % ( d.year,d.month,d.day,d.hour,d.minute,d.second,msg))
f.close()
class LogMan :
def __init__(self) :
self.logers = {}
self.catelogs = AllCatelogs
def addCatelog(self,catelog) :
if catelog not in self.catelogs :
self.catelogs.append(catelog)
def addLoger(self,name,func,catelog) :
if type(catelog)!=type([]) :
catelog = [catelog]
catelog = [ i for i in catelog if i in self.catelogs ]
log = {
'name':name,
'func':func,
'catelog':catelog,
}
self.logers[name] = log
def delLoger(self,name) :
if name in self.logers.keys() :
del self.logers[name]
def setCatelog(self,name,catelog) :
if type(catelog)!=type([]) :
catelog = [catelog]
catelog = [ i for i in catelog if i in self.catelogs ]
if name in self.logers.keys() :
log = self.logers[name]
log['catelog'] = catelog
self.logers[name] = log
def __call__(self,msg='',catelog='APPInfo') :
for name,loger in self.logers.items() :
c = loger['catelog']
if type(c)!=type([]) :
c = [c]
if catelog in c :
f = loger['func']
f(msg)
def mylog(s,catelog='APPInfo') :
logman = public_data.get('mylog',None)
if logman==None :
path = public_data.get('ProgramPath',None)
if path==None :
raise Exception('ProgramPath Not found in "public_data"')
log = MyLog(path)
logman = LogMan()
logman.addLoger('mylog',log,AllCatelogs)
public_data.set('mylog',logman)
return logman(s,catelog)

View File

@ -0,0 +1,188 @@
import json
from appPublic.httpclient import HttpClient, RESPONSE_TEXT, RESPONSE_JSON, RESPONSE_BIN,RESPONSE_FILE, RESPONSE_STREAM, HttpError
from appPublic.argsConvert import ArgsConvert
from appPublic.dictObject import DictObject
class OAuthClient:
"""
OauthClient is a http(s) client for request a api annouce by other
it send https request and get back a json data
we can check the response json data to check if the call is success or failed
desc has the following format
{
"data": predefined data, and if nessory, you can add to new data to it
"method_name":{
"url"
"method",
"headers",
"params":arguments will appended to url with ?a=a&b=b...
"data":data will send in the request body, json format
"resp":[
{
"name":
"converter":default none, if not, convert response data first before set the returen data
"keys":resposne json data keys to achieve data
}
]
"error_if":{
"keys":
"op",
"value"
}
}
}
return:
if error:
return {
"status":"error",
"code":code returned by server
"message":"message returned by server
}
else:
return {
"status":"ok",
"data":...
}
"""
def __init__(self, desc, converters={}):
assert desc.get('data')
self.desc = desc
self.data = desc.get('data')
self.converters = converters
self.ac = ArgsConvert('${', '}')
def setup_req_data(self, data=[], ns={}):
d = {}
if data is None:
return None
for h in data:
d1 = self.setup_req_kv(h, ns)
d.update(d1)
if d == {}:
return None
return d
def setup_req_kv(self, d, ns):
rd = {
d.name:d.value
}
nd = self.datalize(rd, ns)
if d.converter:
f = self.converters.get(d.converter)
if f:
nd[d.name] = f(nd.get(d.name))
return nd
async def __call__(self, host, mapi, params):
if not self.desc.get(mapi):
raise Exception(f'{mapi} not defined')
self.api = self.desc[mapi]
if not self.api:
return {
"status":"error",
"code":'9999',
"message":f"{mapi} not defined"
}
path = self.datalize(self.api.path, params)
url = host + path
method = self.api.get('method', 'GET')
myheaders = self.setup_req_data(self.api.headers, params)
myparams = self.setup_req_data(self.api.params, params)
mydata = self.setup_req_data(self.api.data, params)
response_type = RESPONSE_JSON
hc = HttpClient()
print(f'{url=}, {method=}, {myparams=}, {mydata=}, {myheaders=}')
resp_data = None
try:
resp_data = await hc.request(url, method, response_type=response_type,
params=None if not myparams else myparams,
data=None if not mydata else mydata,
headers=myheaders)
resp_data = DictObject(**resp_data)
print(resp_data)
except HttpError as e:
return {
"status":"error",
"code":e.code,
"message":e.msg
}
if resp_data is None:
return {
"status":"error",
"code":None,
"message":"https error"
}
err = self.check_if_error(resp_data)
if err:
return err
return self.setup_return_data(resp_data)
def datalize(self, dic, data={}):
mydata = self.data.copy()
mydata.update(data)
s1 = self.ac.convert(dic, mydata)
return s1
def get_resp_data(self, resp, keys, converter=None):
d = resp.get_data_by_keys(keys)
if converter:
f = self.converters.get(converter)
if f:
d = f(d)
return d
def setup_return_data(self, resp):
data = {}
if not self.api.resp:
return {
'status':'ok',
'data':{}
}
for desc in self.api.resp:
k = desc.name
v = self.get_resp_data(resp, desc.resp_keys, desc.converter)
data[k] = v
return {
"status":"ok",
"data":data
}
def check_if_error(self, resp):
if not self.api.error_if:
return None
ei = self.api.error_if
v = resp.get_data_by_keys(ei.error_keys)
v1 = ei.value
if ei.converter:
f = self.converters.get(ei.converter)
if f:
v = f(v)
if not ei.op:
ei.op = '=='
print(f'{ei.keys=},{v=}, {v1=}, {ei.op=}{v==v1}, {resp.base_resp.status_code=}')
if (ei.op == '==' and v == v1) or (ei.op == '!=' and v != v1):
print(f'{v=}, {v1=}, {ei.op=}{v==v1}')
code = None
message = None
if ei.code_keys:
code = resp.get_data_by_keys(ei.code_keys)
if ei.msg_keys:
message = resp.get_data_by_keys(ei.msg_keys)
return {
"status":"error",
"code":code,
"message":message
}
print(f'check_if_error ok:{v=}, {v1=}, {ei.op=}{v==v1}')
return None
def set_data(self, resp_data, data_desc):
for dd in data_desc:
f = dd['field']
n = dd['name']
if resp_data.get(f):
self.data[n] = resp_data[f]

View File

@ -0,0 +1,62 @@
from appPublic.Singleton import SingletonDecorator
@SingletonDecorator
class ObjectAction(object):
def __init__(self):
self.actionList = {}
def init(self,id,action):
idA = self.actionList.get(id,None)
if idA is None:
idA = self.actionList[id] = {}
self.actionList[id][action] = []
def add(self,id,action,func):
idA = self.actionList.get(id,None)
if idA is None:
idA = self.actionList[id] = {}
fL = idA.get(action,None)
if fL is None:
fL = self.actionList[id][action] = []
self.actionList[id][action].append(func)
def execute(self,id,action,data,callback=None):
if action in ['#','*']:
return data
idA = self.actionList.get(id,None)
if idA is None:
return data
fL = idA.get(action,[])
fL += idA.get('*',[])
for f in fL:
data = f(id,action,data)
if len(fL)==0:
for f in idA.get('#',[]):
data = f(id,action,data)
if callback is not None:
callback(data)
return data
if __name__ == '__main__':
def f(id,act,data):
return data
def f1(id,act,data):
return data
def f2(id,act,data):
return data
def add():
oa = ObjectAction()
oa.add('test','b',f)
#oa.add('test','*',f1)
oa.add('test','#',f2)
def exe():
oa = ObjectAction()
oa.execute('test','a','data1')
add()
exe()

View File

@ -0,0 +1,93 @@
import re
import time
import requests
class IpGetter:
def __init__(self, url, parser):
self.url = url
self.parser = parser
self.cnt = 0
self.total_time = 0
self.avg_time = 0
def get(self):
try:
tim1 = time.time()
r = requests.get(self.url)
txt = r.text
ip = self.parser(txt)
tim2 = time.time()
cost_tim = tim2 - tim1
self.cnt += 1
self.total_time += cost_tim
self.avg_time = self.total_time / self.cnt
ret = self.check_ip(ip)
if ret:
return ret
self.avg_time = 10000
print('Error, get=', ip)
return None
except Exception as e:
print(f'{self.url=}. {e=}')
self.avg_time = cost_tim = 10000
return None
def check_ip(self, ip):
ret = re.compile(r'(\d+.\d+.\d+.\d+)').search(ip)
if ret:
return ret.group(1)
print('ip format check failed', ip, self.url)
return None
def get_average_time(self):
return self.avg_time
def __str__(self):
return f'{self.url=},{self.avg_time=}'
class OutIP:
def __init__(self):
self.getters = []
self.set_known_getters()
def set_known_getters(self):
g = IpGetter('http://ipinfo.io/ip', lambda x: x)
self.add_getter(g)
g = IpGetter('https://api.ipify.org', lambda x: x)
self.add_getter(g)
g = IpGetter('https://ident.me', lambda x: x)
self.add_getter(g)
# g = IpGetter('https://ipapi.co/ip/', lambda x: x)
# self.add_getter(g)
g = IpGetter('http://myip.dnsomatic.com', lambda x: x)
self.add_getter(g)
g = IpGetter('https://checkip.amazonaws.com', lambda x: x.strip())
self.add_getter(g)
def f(t):
return re.compile(r'Address: (\d+.\d+.\d+.\d+)').search(t).group(1)
g = IpGetter('http://checkip.dyndns.com', f)
self.add_getter(g)
def add_getter(self, getter):
self.getters.append(getter)
def get(self):
gs = self.getters.copy()
gs.sort(key=lambda a: a.get_average_time())
for g in gs:
# print(*[str(g) for g in self.getters ])
ip = g.get()
if ip:
return ip
return None
if __name__ == '__main__':
oi = OutIP()
i = 0
while i < 100:
ip = oi.get()
print('ip = ', ip)
time.sleep(1)
i += 1

View File

@ -0,0 +1,16 @@
import pickle
def saveData(fn,*args):
f = open(fn,'wb')
a = [ pickle.dump(arg,f) for arg in args ]
f.close()
def loadData(fn,cnt):
a = [None] * cnt
try:
f = open(fn,'rb')
a = [ pickle.load(f) for i in range(cnt) ]
f.close()
return a
except:
return a

View File

@ -0,0 +1,162 @@
import sys
import select
import paramiko
import socket
from appPublic.background import Background
try:
import SocketServer
except ImportError:
import socketserver as SocketServer
class ForwardServer(SocketServer.ThreadingTCPServer):
daemon_threads = True
allow_reuse_address = True
server_ready = False
ready_callback = None
def service_actions(self):
super().service_actions()
if not self.server_ready:
self.server_ready = True
if self.ready_callback:
self.ready_callback()
def shutdown(self):
self.server_ready = False
super().shutdown()
g_verbose = True
def verbose(s):
if g_verbose:
print(s)
class Handler(SocketServer.BaseRequestHandler):
def handle(self):
try:
chan = self.ssh_transport.open_channel(
"direct-tcpip",
(self.chain_host, self.chain_port),
self.request.getpeername(),
)
except Exception as e:
verbose(
"Incoming request to %s:%d failed: %s"
% (self.chain_host, self.chain_port, repr(e))
)
return
if chan is None:
verbose(
"Incoming request to %s:%d was rejected by the SSH server."
% (self.chain_host, self.chain_port)
)
return
verbose(
"Connected! Tunnel open %r -> %r -> %r"
% (
self.request.getpeername(),
chan.getpeername(),
(self.chain_host, self.chain_port),
)
)
while True:
r, w, x = select.select([self.request, chan], [], [])
if self.request in r:
data = self.request.recv(1024)
if len(data) == 0:
break
chan.send(data)
if chan in r:
data = chan.recv(1024)
if len(data) == 0:
break
self.request.send(data)
peername = self.request.getpeername()
chan.close()
self.request.close()
verbose("Tunnel closed from %r" % (peername,))
def connect_ssh_server(host, port, user, password):
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(host, port=port, username=user,
password=password)
return ssh
class SSHPortForward:
def __init__(self, local_port, remote_host, remote_port,
ssh_host, ssh_port, ssh_user, ssh_password):
self.local_port = int(local_port)
self.remote_host = remote_host
self.remote_port = int(remote_port)
self.ssh_host = ssh_host
self.ssh_port = int(ssh_port)
self.ssh_user = ssh_user
self.ssh_password = ssh_password
self.running = False
self._ready = False
def service_ready(self):
print('servie ready .....')
self._ready = True
def run(self):
if self.running:
return
self.running = True
b = Background(self._run)
b.start()
def _run(self):
self.ssh = connect_ssh_server(self.ssh_host,
self.ssh_port,
self.ssh_user,
self.ssh_password)
self.transport = self.ssh.get_transport()
class MyForwardServer(ForwardServer):
ready_callback = self.service_ready
class SubHandler(Handler):
chain_host = socket.gethostbyname(self.remote_host)
chain_port = self.remote_port
local_port = self.local_port
ssh_transport = self.transport
self.forward_server = MyForwardServer((socket.gethostbyname('localhost'), self.local_port), SubHandler)
self.forward_server.serve_forever()
print('forward ....')
def stop(self):
if not self.running:
return
self.running = False
self.forward_server.shutdown()
self.forward_server.server_close()
self.transport.close()
self.ssh.close()
if __name__ == '__main__':
if len(sys.argv) < 8:
print("""Usage:
{sys.argv[0] local_port remote_host remote_port ssh_host ssh_port ssh_user ssh_password
""")
sys.exit(1)
s = SSHPortForward(*sys.argv[1:])
while True:
print("""start) start server,
stop) stop server
quit) quit
""")
x = input()
if x == 'start':
s.run()
continue
if x == 'stop':
s.stop()
continue
if x == 'quit':
s.stop()
break
print('error input')

View File

@ -0,0 +1,36 @@
import time
from multiprocessing import Process
import threading
import random
from appPublic.background import Background
class ProcessWorkers:
def __init__(self, worker_cnt=10):
self.semaphore = threading.Semaphore(value=worker_cnt)
self.co_worker = 0
def _do(self, func, *args, **kwargs):
self.semaphore.acquire()
self.co_worker += 1
p = Process(target=func, args=args, kwargs=kwargs)
p.start()
p.join()
self.co_worker -= 1
self.semaphore.release()
def do(self, func, *args, **kwargs):
b = Background(self._do, func, *args, **kwargs)
b.start()
def get_workers(self):
return self.co_worker
if __name__ == '__main__':
def k(worker):
t = random.randint(1,4)
print('current workers=',worker.get_workers(), 'sleep=', t)
time.sleep(t)
w = ProcessWorkers()
for i in range(100):
w.do(k, w)

View File

@ -0,0 +1,12 @@
import socket
import socks
import requests
original_socket = socket.socket
def set_socks_proxy(host, port):
socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5, host, port)
socket.socket = socks.socksocket
def unset_proxy():
socket.socket = original_socket

215
build/lib/appPublic/rc4.py Normal file
View File

@ -0,0 +1,215 @@
# -*- coding: utf-8 -*-
import time
import datetime
import random, base64
from hashlib import sha1
class RC4:
def __init__(self,data_coding='utf8'):
self.bcoding = 'iso-8859-1'
self.dcoding = data_coding
self.salt = b'AFUqx9WZuI32lnHk'
def _crypt(self,data,key):
"""RC4 algorithm return bytes"""
x = 0
box = [i for i in range(256) ]
for i in range(256):
x = (x + box[i] + key[i % len(key)]) % 256
box[i], box[x] = box[x], box[i]
x = y = 0
out = []
for char in data:
x = (x + 1) % 256
y = (y + box[x]) % 256
box[x], box[y] = box[y], box[x]
out.append(chr(char ^ box[(box[x] + box[y]) % 256]))
return ''.join(out).encode(self.bcoding)
def encode_bytes(self, bdata, key):
a = sha1(key + self.salt)
k = a.digest()
data = self.salt + self._crypt(bdata, k)
return data
def encode(self,data, key,encode=base64.b64encode, salt_length=16):
"""RC4 encryption with random salt and final encoding"""
if type(data)==type(''):
data = data.encode(self.dcoding)
key = key.encode(self.bcoding)
code = self.encode_bytes(data, key)
if encode:
code = encode(code)
return code.decode(self.dcoding)
return code
def decode_bytes(self, data, key):
salt_length = 16
salt = data[:salt_length]
a = sha1(key + self.salt)
k = a.digest() #.decode('iso-8859-1')
r = self._crypt(data[salt_length:], k)
return r
def decode(self,data, key,decode=base64.b64decode, salt_length=16):
"""RC4 decryption of encoded data"""
if type(data)==type(''):
data = data.encode(self.dcoding)
key = key.encode(self.bcoding)
if decode:
data = decode(data)
r = self.decode_bytes(data, key)
return r.decode(self.dcoding)
class KeyChain(object):
def __init__(self, seed_str, crypter=None, keylen=23, period=600, threshold=60, time_delta=0):
self.seed_str = seed_str
if isinstance(self.seed_str, str):
self.seed_str = self.seed_str.encode('utf-8')
self.period = int(period)
self.threshold = int(threshold)
self.crypter = crypter
self.time_delta = time_delta
if crypter is None:
self.crypter = RC4()
self.keylen = keylen
self.keypool = {
}
delta = datetime.timedelta(0)
self.timezone = datetime.timezone(delta, name='gmt')
def get_timestamp(self):
ts = int(time.time()) - self.time_delta
return ts
def is_near_bottom(self, indicator=None):
ts = self.get_timestamp()
i = indicator
if i is None:
i = self.get_indicator(ts)
if i + self.threshold > ts:
return True
return False
def is_near_top(self, indicator=None):
ts = self.get_timestamp()
i = indicator
if i is None:
i = self.get_indicator(ts)
if i + self.period - self.threshold < ts:
return True
return False
def get_indicator(self, ts=None):
if ts is None:
ts = self.get_timestamp()
return int(ts / self.period) * self.period
def genKey(self, indicator):
vv = indicator
if self.keypool.get(vv):
return self.keypool[vv]
v = vv
k1 = 0
k = ''
m = len(self.seed_str)
while k1 < self.keylen:
j = v % self.keylen
v = v - (j + k1) * m + self.keylen
k = k + chr(self.seed_str[j])
k1 += self.threshold / 2
key = k.encode('utf-8')
self.keypool[vv] = key
dates = [ d for d in self.keypool.keys() ]
for d in dates:
if d < indicator - self.period:
del self.keypool[d]
return key
def encode(self, text):
bdata = text.encode('utf-8')
return self.encode_bytes(bdata)
def encode_bytes(self, bdata):
indicator = self.get_indicator()
key = self.genKey(indicator)
data = key + bdata
return self.crypter.encode_bytes(data, key)
def _decode(self, data, key):
d = self.crypter.decode_bytes(data, key)
if d[:len(key)] == key:
return d[len(key):]
return None
def decode_bytes(self, data):
indicator = self.get_indicator()
key = self.genKey(indicator)
d = self._decode(data, key)
if d is not None:
return d
if self.is_near_bottom(indicator):
indicator -= self.period
key = self.genKey(indicator)
return self._decode(data, key)
if self.is_near_top(indicator):
indicator += self.period
key = self.genKey(indicator)
return self._decode(data, key)
return None
def decode(self, data):
d = self.decode_bytes(data)
if d is None:
return None
return d.decode('utf-8')
pwdkey = 'ytguiojbhvhbnkl'
def password(pwdtxt, key=pwdkey):
rc = RC4()
code = rc.encode(pwdtxt, key)
t = rc.decode(code, key)
if (t == pwdtxt):
return code
else:
return None
def unpassword(code, key=pwdkey):
rc = RC4()
t = rc.decode(code, key)
return t
"""
if __name__ == '__main__':
import sys
if len(sys.argv) > 1:
print(password(sys.argv[1]))
sys.exit(0)
ps = [
'45dr6tcfyvguh',
'ft7gy8uh9ij0',
'opiluykhcgjfncm'
]
for p in ps:
print(password(p))
"""
if __name__=='__main__':
# 需要加密的数据长度没有限制
# 密钥
data=b"231r3 feregrenerjk gkht324g8924gnfw k;ejkvwkjerv"
key = b'123456'
rc4 = RC4()
kc = KeyChain('in the heaven, we are equal', rc4)
print(data)
# 加码
encoded_data = kc.encode_bytes(data)
print(encoded_data,len(encoded_data) )
# 解码
decoded_data = kc.decode_bytes(encoded_data)
print(data, decoded_data, decoded_data==data)

View File

@ -0,0 +1,137 @@
import poplib,pdb,email,re,time
from email import header
import datetime
import os
POP_ADDR = r'pop.126.com'
USER = ''
PASS = ''
CONFIG = ''
def getYear(date):
rslt = re.search(r'\b2\d{3}\b', date)
return int(rslt.group())
def getMonth(date):
monthMap = {'Jan':1,'Feb':2,'Mar':3,'Apr':4,'May':5,'Jun':6,
'Jul':7,'Aug':8,'Sep':9,'Oct':10,'Nov':11,'Dec':12,}
rslt = re.findall(r'\b\w{3}\b', date)
for i in range(len(rslt)):
month = monthMap.get(rslt[i])
if None != month:
break
return month
def getDay(date):
rslt = re.search(r'\b\d{1,2}\b', date)
return int(rslt.group())
def getTime(date):
rslt = re.search(r'\b\d{2}:\d{2}:\d{2}\b', date)
timeList = rslt.group().split(':')
for i in range(len(timeList)):
timeList[i] = int(timeList[i])
return timeList
def transformDate(date):
rslt = getYear(date)
rslt = rslt * 100
rslt = rslt + getMonth(date)
rslt = rslt * 100
rslt = rslt + getDay(date)
timeList = getTime(date)
for i in range(len(timeList)):
rslt = rslt * 100
rslt = rslt + timeList[i]
return rslt
def getRecentReadMailTime():
fp = open(CONFIG, 'r')
rrTime = fp.read()
fp.close()
return rrTime
def setRecentReadMailTime():
fp = open(CONFIG, 'w')
fp.write(time.ctime())
fp.close()
return
def getTimeEarly(period):
def years(n):
return datetime.timedelta(years=n)
def months(n):
return datetime.timedelta(years=n)
def days(n):
return datetime.timedelta(days=n)
def hours(n):
return datetime.timedelta(hours=n)
def minutes(n):
return datetime.timedelta(minutes=n)
def seconds(n):
return datetime.timedelta(seconds=n)
funcs={
'y':years,
'm':months,
'd':days,
'H':hours,
'M':minutes,
'S':seconds,
}
pattern='(\d*)([ymdHMS])'
r=re.compile(pattern)
s = r.findall(period)
t = datetime.datetime.now()
for v,ty in s:
td = funcs[ty](int(v))
t = t - td
return time.ctime(t.timestamp())
def parseMailContent(msg):
if msg.is_multipart():
for part in msg.get_payload():
parseMailContent(part)
else:
bMsgStr = msg.get_payload(decode=True)
charset = msg.get_param('charset')
msgStr = 'Decode Failed'
try:
if None == charset:
msgStr = bMsgStr.decode()
else:
msgStr = bMsgStr.decode(charset)
except:
pass
print(msgStr)
def recvEmail(POP_ADDR,USER,PASS,PERIOD,callback):
server = poplib.POP3(POP_ADDR)
server.user(USER)
server.pass_(PASS)
mailCount,size = server.stat()
mailNoList = list(range(mailCount))
mailNoList.reverse()
FROMTIME = getTimeEarly(PERIOD)
hisTime = transformDate(FROMTIME)
#pdb.set_trace()
for i in mailNoList:
message = server.retr(i+1)[1]
mail = email.message_from_bytes(b'\n'.join(message))
if transformDate(mail.get('Date')) > hisTime:
if not callback(mail):
break
#parseMailContent(mail)
else:
break

View File

@ -0,0 +1,89 @@
import asyncio
from inspect import isfunction, iscoroutinefunction
from functools import partial
from appPublic.dictObject import DictObject
from appPublic.Singleton import SingletonDecorator
from appPublic.log import info, error
@SingletonDecorator
class RegisterFunction:
def __init__(self):
self.registKW = {}
def register(self,name,func):
if not isfunction(func) and not iscoroutinefunction(func):
error(f'RegisterFunction.register({name}, {func}): func is not a function or routine')
return
self.registKW[name] = func
def get(self,name):
return self.registKW.get(name,None)
async def exe(self, name, *args, **kw):
f = self.get(name)
if f is None:
error(f'{name=} function not registed')
return None
if iscoroutinefunction(f):
info(f'{name=} is coroutine function');
return await f(*args, **kw)
return f(*args, **kw)
@SingletonDecorator
class RegisterCoroutine:
def __init__(self):
self.kw = DictObject()
def register(self, name, func):
if not isfunction(func) and not iscoroutinefunction(func):
error(f'RegisterFunction.register({name}, {func}): func is not a function or routine')
return
if not self.kw.get(name):
self.kw[name] = [func]
else:
self.kw[name].append(func)
async def exe(self, name, *args, **kw):
fs = self.kw.get(name)
if fs is None:
return
fs = fs.copy()
fs.reverse()
if fs:
for f in fs:
if iscoroutinefunction(f):
await f(*args, **kw)
else:
f(*args, **kw)
return None
def getRegisterFunctionByName(name):
rf = RegisterFunction()
return rf.get(name)
def registerFunction(name, func):
rf = RegisterFunction()
rf.register(name, func)
async def main():
d = {}
rf = RegisterCoroutine()
rf.register('test', z)
rf.register('test', y)
rf.register('test', x)
nd = await rf.exe('test', d)
print(nd)
if __name__ == '__main__':
def x(dic):
dic['a'] = 'a'
return dic
async def y(dic):
dic['b'] = 'b'
return dic
def z(dic):
dic['c'] = 1
return dic
asyncio.get_event_loop().run_until_complete(main())

View File

@ -0,0 +1,34 @@
import appPublic.timeUtils as tu
import datetime as dt
class RestrictedEnv:
def __init__(self):
self.reg('today',self.today)
self.reg('date',self.date)
self.reg('datetime',self.datetime)
self.reg('now',dt.datetime.now)
def reg(self,k,v):
self.__dict__[k] = v
def run(self,dstr):
dstr = '__tempkey__ = %s' % dstr
exec(dstr,globals(),self.__dict__)
return self.__tempkey__
def today(self):
now = dt.datetime.now()
return tu.ymdDate(now.year,now.month,now.day)
def date(self,dstr):
return tu.str2Date(dstr)
def datetime(self,dstr):
return tu.str2Datetime(dstr)
if __name__ == '__main__':
ns = RestrictedEnv()
a = ns.run('today()')
b = ns.run("date('2011-10-31')")
c = ns.run('datetime("2012-03-12 10:22:22")')
d = ns.run('now()')

View File

@ -0,0 +1,128 @@
from appPublic.rsawrap import RSA
from appPublic.rc4 import RC4
try:
import ujson as json
except:
import json
import random
class DeliverPacket:
def __init__(self,sender,c,k,s):
self.sender = sender
self.c = c
self.k = k
self.s = s
def pack(self):
d = {
"sender":self.sender,
"c":self.c,
"k":self.k,
"s":self.s,
}
return json.dumps(d)
def unpack(self,body):
d = json.loads(body)
self.sender = d.sender
self.c = d['c']
self.k = d['k']
self.s = d['s']
class RSAPeer:
def __init__(self,myid,myPrikey,pearPubKey=None):
self.myid = myid
self.mypri = myPrikey
self.peerpub = pearPubKey
self.rsa = RSA()
def getPeerPublicKey(self,id):
pass
def _genSystematicKey(self):
t = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890~!@#$%^&*'
kl = random.randint(10,15)
ky = []
klen = len(t) - 1
for k in range(kl):
i = random.randint(0,klen)
# print(k,klen,i)
ky.append(t[i])
return ''.join(ky)
def encode(self,text):
"""
return a json text
json ojbect have three addt:
k:encrypted rc4 key
s:signature
c:ciphertext encrypted by key
"""
d = {"id":self.myid,"data":text}
text = json.dumps(d)
sk = self._genSystematicKey()
rc4 = RC4(sk)
c = rc4.encode(text)
s = self.rsa.sign(self.mypri,sk)
if self.peerpub is None:
return None
k = self.rsa.encode(self.peerpub,sk)
d = {
'c':c,
'k':k,
's':s
}
return json.dumps(d)
def decode(self,body):
"""
cipher a json text
json ojbect have three addt:
k:encrypted rc4 key
s:signature
c:ciphertext encrypted by key
"""
d = json.loads(body)
signature = d['s']
sk = self.rsa.decode(self.mypri,d['k'])
# print('sk=',sk,'k=',d['k'],type(d['k']))
rc4 = RC4(sk)
t = rc4.decode(d['c'])
d = json.loads(t)
ret = d['data']
if self.peerpub is not None and not self.rsa.check_sign(self.peerpub,sk,signature):
return None
if self.peerpub is None:
peerpub = self.getPeerPublicKey(d['id'])
if peerpub is None:
return None
if not self.rsa.check_sign(peerpub,sk,signature):
return None
return ret
if __name__ == '__main__':
r = RSA()
mary_pri = r.create_privatekey()
mary_pub = r.create_publickey(mary_pri)
john_pri = r.create_privatekey()
john_pub = r.create_publickey(john_pri)
john_rp = RSAPeer(john_pri,mary_pub)
mary_rp = RSAPeer(mary_pri,john_pub)
txt = '''hello python 爱的实打实大师大师大师的发送到发送到而非个人格个二哥而而二哥而个人各位,UDP是一种无连接对等通信协议没有服务器和客户端概念通信的任何一方均可通过通信原语直接和其他方通信
HOME FAQ DOCS DOWNLOAD
index
next |
previous |
Twisted 18.9.0 documentation » Twisted Names (DNS) » Developer Guides » '''
c = john_rp.encode(txt)
newtxt = mary_rp.decode(c)
print(txt)
print('<===>')
print(c)
print('<===>')
print(newtxt)

View File

@ -0,0 +1,108 @@
import rsa
class RSA:
def __init__(self, keylength=4096, coding='iso8859'):
self.coding = coding
self.keylength = keylength
def write_privatekey(self,private_key,fname,password=None):
bd = private_key.save_pkcs1()
with open(fname, 'wb') as f:
f.write(bd)
def publickeyText(self,public_key):
bd = public_key.save_pkcs1()
return bd.decode(self.coding)
def write_publickey(self,public_key,fname):
bd = public_key.save_pkcs1()
with open(fname, 'wb') as f:
f.write(bd)
def read_privatekey(self,fname,password=None):
with open(fname, 'rb') as pf:
kd = pf.read()
return rsa.PrivateKey.load_pkcs1(kd)
def publickeyFromText(self,text):
bd = text.encode(self.coding)
return rsa.PublicKey.load_pkcs1(bd)
def read_publickey(self,fname):
with open(fname, 'rb') as pf:
kd = pf.read()
return rsa.PublicKey.load_pkcs1(kd)
def create_privatekey(self, keylength=4096):
_, prik = rsa.newkeys(keylength)
return prik
def create_publickey(self,private_key):
return rsa.PublicKey(private_key.n, private_key.e)
def encode_bytes(self, public_key, bdata):
return rsa.encrypt(bdata, public_key)
def encode(self,public_key,text):
bdata = text.encode(self.coding)
bc = self.encode_bytes(public_key, bdata)
return bc.decode(self.coding)
def decode_bytes(self, private_key, bdata):
return rsa.decrypt(bdata, private_key)
def decode(self,private_key,cipher):
bc = cipher.encode(self.coding)
bd = self.decode_bytes(private_key, bc)
return bd.decode(self.coding)
def sign_bdata(self, private_key, data_to_sign):
return rsa.sign(data_to_sign, private_key, 'SHA-1')
def sign(self,private_key,message):
bd = message.encode(self.coding)
bs = self.sign_bdata(private_key, bd)
return bs.decode(self.coding)
def check_sign_bdata(self, public_key, bdata, sign):
try:
r = rsa.verify(bdata, sign, public_key)
if r == 'SHA-1':
return True
print(f'verify()={r}')
return False
except Exception as e:
print(f'check_sign_bdata() raise Exception{e}')
return False
def check_sign(self,public_key,plain_text,signature):
bd = plain_text.encode(self.coding)
bs = signature.encode(self.coding)
return self.check_sign_bdata(public_key, bd, bs)
if __name__ == '__main__':
import os
prikey1_file = os.path.join(os.path.dirname(__file__),'..','test', 'prikey1.rsa')
r = RSA()
mpri = r.create_privatekey(2048)
mpub = r.create_publickey(mpri)
zpri = r.create_privatekey(2048)
zpub = r.create_publickey(zpri)
l = 100
while True:
text = 'h' * l
cipher = r.encode(mpub,text)
ntext = r.decode(mpri,cipher)
print('textlen=', l, 'encode text=', text, \
'decode result=', ntext,
'cyber size=', len(cipher),
'check if equal=', text==ntext)
signature = r.sign(zpri,text)
check = r.check_sign(zpub,text,signature)
print('sign and verify=',len(signature),check)
l += 1

View File

@ -0,0 +1,33 @@
# -*- coding=utf-8 -*-
"""
kivy color:
[ r, g, b, a]
不同的颜色值总能找到一个人眼感知的灰度值这是著名的心理学公式
灰度 = ×0.299 + 绿×0.587 + ×0.114
当灰度值大于0.5时使用暗色否则使用明色
colors 两个颜色缺省为空使用函数内置的两个颜色
"""
def color_gray_rate(color):
graylevel = 0.299 * color[0] + \
0.587 * color[1] + \
0.114 * color[2]
return graylevel
def get_fgcolor_from_bgcolor(bgcolor, colors=None):
dark_fgcolor=[0.11,0.11,0.11,1]
bright_fgcolor=[0.89,0.89,0.89,1]
graylevel = color_gray_rate(bgcolor)
if colors == None:
if graylevel > 0.5:
return dark_fgcolor
else:
return bright_fgcolor
r1 = color_gray_rate(colors[0])
r2 = color_gray_rate(colors[1])
if abs(graylevel - r1) > abs(graylevel - r2):
return colors[0]
return colors[1]

View File

@ -0,0 +1,136 @@
import os
import time
import threading
import sys
import socket
def get_free_local_addr():
with socket.socket(socket.AF_INET, socket.SOCK_DGRAM) as s:
s.connect(("8.8.8.8", 80))
return s.getsockname()
# return (ip,port)
class background(threading.Thread) :
def __init__(self,func,kw) :
threading.Thread.__init__(self)
self.func = func
self.kw = kw
def run(self) :
if self.func!=None :
self.func(**self.kw)
return
def BackgroundCall(func,datas) :
b=background(func,datas)
b.start()
return
class SocketServerError(Exception) :
pass
class SocketClientError(Exception) :
pass
class SocketServer(threading.Thread) :
def __init__(self,host,port,max_connect=10,callee=None) :
threading.Thread.__init__(self, name = 'SocketServer')
self.setDaemon(False)
self.host = host
self.port = int(port)
self.max_c = max_connect
self.ready = False
self.keep_running = 0
self.callee = callee
self.setSocketServer()
def setSocketServer(self) :
try :
self.sock = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
self.sock.bind((self.host,self.port))
self.sock.listen(self.max_c)
self.ready = True
except Exception as e:
print('setSocketServer() Error:%s\nhost=%s,port=%d' % (e,self.host,self.port))
pass
def run(self) :
if not self.ready :
raise SocketServerError('not ready')
callee = self.callee
if self.callee!=None :
callee = self.callee
self.keep_running = 1
while self.keep_running :
conn,addr = self.sock.accept()
BackgroundCall(callee,{'conn':conn,'addr':addr})
# conn.close()
def stop(self) :
self.keep_running = 0
def callee(self,conn,addr) :
while 1 :
d = conn.recv(1024)
if d==None :
break
conn.send(d)
con.close()
class SocketClient :
def __init__(self,host,port) :
self.host = host
self.port = port
self.ready = False
self.connect()
# if tim ==0 not blocking
def timeout(self,tim) :
if self.ready :
self.sock.setblocking(tim>0)
if tim>0 :
self.sock.settimeout(tim)
def connect(self) :
try :
self.sock = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
self.sock.connect((self.host,self.port))
self.ready = True
except Exception as e:
self.ready = False
print('Socket connect error,%s\nhost=%s,port=%s' % (e,self.host,self.port))
raise SocketClientError('connect error')
def read(self,size) :
try :
data = self.sock.recv(size)
return data
except Exception as e:
print('recv error,%s' % e)
raise SocketClientError('recv error')
def write(self,data) :
try :
self.sock.send(data)
except Exception as e:
print('recv error,%s' % e)
raise SocketClientError('send error')
def close(self) :
self.sock.close()
self.ready = False
if __name__ == '__main__' :
s = SocketServer('localhost',12232)
s.start()
time.sleep(5)
while 1 :
c = SocketClient('localhost',12232)
msg = 'msg1'
print("send:",msg)
c.write(msg)
d = c.read(1024)
print("get:",d)
time.sleep(1)

335
build/lib/appPublic/sshx.py Normal file
View File

@ -0,0 +1,335 @@
import os
import sys
import time
import shlex
from functools import partial
from threading import Thread
from appPublic.myTE import tmpTml
import asyncio, asyncssh, sys
class SSHNode:
def __init__(self, host,
username='root',
port=22,
password=None,
jumpers=[]):
self.server2 = {
"host":host,
"username":username,
"password":password,
"port":port
}
print(self.server2)
self.jumpers = jumpers
self.conn = None
self.jumper_conns = []
self.batch_cmds = []
def info(self):
d = {
"jumpers":self.jumpers,
}
d.update(self.server2)
return d
def asjumper(self):
a = self.jumpers.copy()
a.append(self.server2)
return a
def set_jumpers(self, jumpers):
self.jumpers = jumpers
async def connect(self):
refconn = None
for j in self.jumpers:
host = j['host']
username = j.get('username', 'root')
port = j.get('port',22)
password= j.get('password', None)
if refconn:
refconn = await refconn.connect_ssh(host,
username=username,
known_hosts=None,
password=password,
port=port)
else:
refconn = await asyncssh.connect(host,
username=username,
known_hosts=None,
password=password,
port=port)
self.jumper_conns.append(refconn)
host = self.server2['host']
username = self.server2.get('username', 'root')
port = self.server2.get('port',22)
password = self.server2.get('password', None)
if refconn:
self.conn = await refconn.connect_ssh(host,
username=username,
port=port,
password=password,
known_hosts=None)
else:
self.conn = await asyncssh.connect(host,
username=username,
password=password,
port=port)
def close(self):
self.conn.close()
cnt = len(self.jumper_conns)
cnt -= 1
while cnt >= 0:
self.jumper_conns[cnt].close()
cnt -= 1
self.jumper_conns = []
self.conn = None
async def _l2r(self, lf, rf):
x = await asyncssh.scp(lf, (self.conn, rf),
preserve=True, recurse=True)
return x
async def _process(self, *args, **kw):
a = await self.conn.create_process(*args, **kw)
return a
async def _r2l(self, rf, lf):
x = await asyncssh.scp((self.conn, rf), lf,
preserve=True, recurse=True)
return x
async def _cmd(self, cmd, input=None, stdin=None, stdout=None):
return await self.conn.run(cmd, input=input, stdin=stdin, stdout=stdout)
async def _xcmd(self, cmd, xmsgs=[], ns={},
show_input=None,
show_stdout=None):
proc = await self._process(cmd, term_type='xterm',
term_size=(80,24),
encoding='utf-8'
)
keyin = False
def feed_data(xmsgs, debug_input):
if len(xmsgs) == 0:
print('#####++##### xmsgs has zero elements')
return
keyin = True
a = xmsgs.pop(0)
while True:
if a[1] is None:
proc.stdin.write_eof()
self.running = False
else:
s = a[1].format(**ns)
proc.stdin.write(s)
if len(xmsgs) == 0 or xmsgs[0][0]:
break
a = xmsgs.pop(0)
already_output = False
callee = None
loop = asyncio.get_event_loop()
self.running = True
while self.running:
if keyin:
keyin = False
await proc.stdin.drain()
if proc.stdout.at_eof():
break
tup = proc.collect_output()
x = tup[0]
if x!='' and show_stdout:
if x is None:
break
if callee:
callee.cancel()
callee = None
show_stdout(x)
else:
if callee is None:
if len(xmsgs) > 0:
f = partial(feed_data, xmsgs, show_input)
t = xmsgs[0][0] or 0
callee = loop.call_later(t, f)
await asyncio.sleep(0.05)
print('##########fininshed##########')
async def _run(self, cmd, input=None, stdin=None, stdout=None):
if cmd.startswith('l2r'):
args = shlex.split(cmd)
if len(args) == 3:
x = await self._l2r(args[1], args[2])
return x
if cmd.startswith('r2l'):
args = shlex.split(cmd)
if len(args) == 3:
x = await self._r2l(args[1], args[2])
return x
return await self._cmd(cmd, input=input, stdin=stdin, stdout=stdout)
def show_result(self, x):
if isinstance(x, Exception):
print('Exception:',e)
else:
print('stdout:', x.stdout)
print('stderr:', x.stderr)
async def run(self, cmd, input=None, stdin=None, stdout=None):
await self.connect()
result = await self._run(cmd, input=input,
stdin=stdin, stdout=stdout)
self.close()
return result
class SSHNodes:
def __init__(self, nodes, usernmae='root', port=22, jumpers=[]):
self.nodes = [ Node(n, username=username, port=port, jumpers=jumpers) for n in nodes ]
self.batch_cmds = []
def append_cmd(self, cmd, stdin=None, stdout=None):
self.batch_cmds.append({
"cmd":cmd,
"stdin":stdin,
"stdout":stdout})
def show_result(self, result, i=0):
if isinstance(result, Exception):
print(f'Task {i} failed:{result}')
elif result.exit_status != 0:
print(f'Task {i} exit {result.exit_status}')
print(result.stderr, end='')
else:
print(f'Task {i} successed:')
print(result.stdout, end='')
async def run(self, cmd, stdin=None, stdout=None):
tasks = [ n.run(cmd, stdin=stdin, stdout=stdout) for n in self.nodes ]
results = await asyncio.gather(*tasks, return_exceptions=True)
return results
async def exe_batch(self):
tasks = [ n.exe_batch(self.batch_cmds) for n in self.nodes ]
results = await asyncio.gather(*tasks, return_excetion=True)
return results
for i, result in enumerate(results):
self.show_result(result,i)
async def main():
if len(sys.argv) < 3:
print(f'{sys.argv[0]} cmd host1 host2 ....')
sys.exit(1)
cmd = sys.argv[1]
jumpor = {
"host":"glib.cc",
"username":"ceni",
"port":10022
}
hosts = sys.argv[2:]
mn = SSHNodes(hosts, jumpers=[jumpor])
while True:
print('input command:')
cmd = input()
print('input stdin:')
stdin = input()
if stdin == '':
stdin = None
print('input stdout:(default is stdout)')
stdout = input()
if stdout == '':
stdout = None
x = await mn.run(cmd, stdin=stdin, stdout=stdout)
for r in x:
if isinstance(r, Exception):
print(r)
else:
print(r.stdout)
class SSHBash:
def __init__(self, node, loop=None):
if loop is None:
loop = asyncio.get_event_loop()
self.node = node
self.loop = loop
self.conn = None
self.stdin_need = False
self.subloop = asyncio.new_event_loop()
self.subthread = Thread(target=self.start_thread_loop)
self.subthread.setDaemon(True)
self.subthread.start()
def start_thread_loop(self):
asyncio.set_event_loop(self.subloop)
self.subloop.run_forever()
def exit(self):
if self.conn:
self.node.close(self.conn)
self.p_obj.close()
self.subloop.stop()
self.loop.stop()
async def feed_stdin(self, f):
self.stdin_need = False
x = await f(65535)
if x is None:
self.exit()
self.p_obj.stdin.write(x)
await self.p_obj.stdin.drain()
self.stdin_need = True
async def run(self, read_co, write_co):
await self.node.connect()
self.p_obj = await self.node._process('bash',
term_type='vt100',
term_size=(80,24),
encoding=None)
if isinstance(self.p_obj, Exception):
print('Excetion:', self.p_obj)
self.exit()
return
if self.p_obj is None:
print('self.p_obj is None')
self.exit()
return
# self.loop.add_reader(sys.stdin.fileno(), self.read_input)
self.stdin_need = True
while True:
if self.stdin_need:
asyncio.run_coroutine_threadsafe(self.feed_stdin(read_co), self.subloop)
if self.p_obj.stdout.at_eof():
self.exit()
break
x = await self.p_obj.stdout.read(1024)
await write_co(x)
if __name__ == '__main__':
async def sysstdin_read():
return os.read(sys.stdin.fileno(), 65535)
async def sysstdout_write(x):
sys.stdout.write(x.decode('utf-8'))
async def test_sshbash():
jp = {
"host":"glib.cc",
"username":"ceni",
"port":10022
}
jn = SSHNode('k3', jumpers=[jp])
bash = SSHBash(jn)
await bash.run(sysstdin_read, sysstdout_write)
loop = asyncio.get_event_loop()
loop.run_until_complete(test_sshbash())

View File

@ -0,0 +1,20 @@
# strUtils
def rtrim(ss):
s = ss
if s=='':
return s
while s[-1] == ' ':
s = s[:-1]
return s
def ltrim(ss):
s = ss
if s=='':
return s
while s[0] == ' ':
s = s[1:]
return s
def lrtrim(ss):
s = ltrim(ss)
s = rtrim(s)
return s

8
build/lib/appPublic/t.py Normal file
View File

@ -0,0 +1,8 @@
import json
from dataencoder import quotedstr
d = {
"gret":"HGREert",
"ynh":"RtghretbertBHER"
}
print(quotedstr(json.dumps(d)))

View File

@ -0,0 +1,6 @@
import ExecFile
c = ExecFile.DictConfig(path='./config.dict')
print(c.d.b[1].c,c.d.c.a,c.d.c.b,c.d.c.c[3].f)
print(c.d.c.c[1])
print(c.d.c.d)

View File

@ -0,0 +1,41 @@
import time
import threading
import random
from appPublic.background import Background
class ThreadWorkers:
def __init__(self, max_workers=10):
self.semaphore = threading.Semaphore(value=max_workers)
self.co_worker = 0
def _do(self, func, *args, **kwargs):
try:
self.semaphore.acquire()
self.co_worker += 1
func(*args, **kwargs)
finally:
self.co_worker -= 1
self.semaphore.release()
def do(self, func, *args, **kwargs):
b = Background(self._do, func, *args, **kwargs)
b.start()
def get_workers(self):
return self.co_worker
def until_done(self):
time.sleep(0.1)
while self.co_worker > 0:
time.sleep(0.01)
if __name__ == '__main__':
def k(worker):
t = random.randint(1,4)
print('current workers=',worker.get_workers(), 'sleep=', t)
time.sleep(t)
w = ThreadWorkers(max_workers=30)
for i in range(100000):
w.do(k, w)

View File

@ -0,0 +1,253 @@
import os,sys
import time
from datetime import date, timedelta, datetime
leapMonthDays = [0,31,29,31,30,31,30,31,31,30,31,30,31]
unleapMonthDays = [0,31,28,31,30,31,30,31,31,30,31,30,31]
def curDatetime():
return datetime.now()
def curDateString():
d = curDatetime()
return '%04d-%02d-%02d' %(d.year,d.month,d.day)
def curTimeString():
d = curDatetime()
return '%02d:%02d:%02d' %(d.hour,d.minute,d.second)
def timestampstr():
d = curDatetime()
return '%04d-%02d-%02d %02d:%02d:%02d.%03d' % (d.year,
d.month,
d.day,
d.hour,
d.minute,
d.second,
d.microsecond/1000)
def isMonthLastDay(d):
dd = timedelta(1)
d1 = d + dd
if d1.month != d.month:
return True
return False
def isLeapYear(year):
if year % 4 == 0 and year % 100 == 0 and not (year % 400 == 0):
return True
return False
def timestamp(dt):
return int(time.mktime((dt.year,dt.month,dt.day,dt.hour,dt.minute,dt.second,dt.microsecond,0,0)))
def timeStampSecond(dt):
return int(time.mktime((dt.year,dt.month,dt.day,dt.hour,dt.minute,dt.second,0,0,0)))
def addSeconds(dt,s):
ndt = dt + timedelta(0,s)
return ndt
def monthMaxDay(y,m):
if isLeapYear(y):
return leapMonthDays[m]
return unleapMonthDays[m]
def date2str(dt=None):
if dt is None:
dt = curDatetime()
return '%04d-%02d-%02d' % (dt.year,dt.month,dt.day)
def time2str(dt):
return '%02d:%02d:%02d' % (dt.hour,dt,minute,dt.second)
def str2Date(dstr):
try:
haha = dstr.split(' ')
y,m,d = haha[0].split('-')
H = M = S = 0
if len(haha) > 1:
H,M,S = haha[1].split(':')
return ymdDate(int(y),int(m),int(d),int(H),int(M),int(S))
except Exception as e:
print(e)
return None
def ymdDate(y,m,d,H=0,M=0,S=0):
return datetime(y,m,d,H,M,S)
def str2Datetime(dstr):
x = dstr.split(' ')
d = x[0]
t = '00:00:00'
if len(x) > 1:
t = x[1]
y,m,d = d.split('-')
H,M,S = t.split(':')
return datetime(int(y),int(m),int(d),int(H),int(M),int(S))
def strdate_add(date_str, days=0, months=0, years=0):
dt = str2Datetime(date_str)
dt = dateAdd(dt, days=days, months=months, years=years)
ds = date2str(dt)
return ds
def addMonths(dt,months):
y = dt.year
m = dt.month + months
d = dt.day
mm = (m - 1) % 12 + 1
md = int((m - 1) / 12)
y += md
m = mm
maxd = monthMaxDay(y,m)
if d > maxd:
d = maxd
return ymdDate(y,m,d)
def addYears(dt,years):
y = dt.year + years
m = dt.month
d = dt.day
maxd = monthMaxDay(y,m)
if d > maxd:
d = maxd
return ymdDate(y,m,d)
def dateAdd(dt,days=0,months=0,years=0):
if days != 0:
dd = timedelta(days)
dt = dt + dd
if months != 0:
dt = addMonths(dt,months)
if years != 0:
dt = addYears(dt,years)
return dt
def firstSunday(dt):
f = dt.weekday()
if f<6:
return dt + timedelta(7 - f)
return dt
DTFORMAT = '%Y%m%d %H%M%S'
def getCurrentTimeStamp() :
t = time.localtime()
return TimeStamp(t)
def TimeStamp(t) :
return time.strftime(DTFORMAT,t)
def StepedTimestamp(baseTs,ts,step) :
if step<2 :
return ts
offs = int(timestampSub(ts,baseTs))
step = int(step)
r,m = divmod(offs,step)
if m < step/2 :
return timestampAdd(baseTs,step * r)
else :
return timestampAdd(baseTs,step * (r+1))
def timestampAdd(ts1,ts2) :
t1 = time.strptime(ts1,DTFORMAT)
tf = time.mktime(t1)
if type(ts2)=='' :
t2 = time.strptime(ts2,DTFORMAT)
ts2 = time.mktime(t2)
tf += ts2
t = time.localtime(tf)
return TimeStamp(t)
def timestampSub(ts1,ts2) :
t1 = time.strptime(ts1,DTFORMAT)
t2 = time.strptime(ts2,DTFORMAT)
ret = time.mktime(t1) - time.mktime(t2)
return int(ret)
def timestamp2dt(t):
return datetime.fromtimestamp(t)
def date_weekinyear(date_str):
w = datetime.strptime(date_str, '%Y-%m-%d').strftime('%W')
return date_str[:5] + w
def date_season(date_str):
m = date_str[5:7]
sl = {
'01':'1',
'02':'1',
'03':'1',
'04':'2',
'05':'2',
'06':'2',
'07':'3',
'08':'3',
'09':'3',
'10':'4',
'11':'4',
'12':'4',
}
s = sl.get(m)
return date_str[:5] + s
"""
Patterns =
'D'
'W[0-6]'
'M[00-31]'
'S[0-2]-[00-31]'
'Y[01-12]-[00-31]'
}
"""
def str2date(sd):
a = [ int(i) for i in sd.split('-') ]
return date(*a)
def is_monthend(dt):
if isinstance(dt, str):
dt = str2date(dt)
nxt_day = dt + timedelta(days=1)
if dt.month != nxt_day.month:
return True
return False
def is_match_pattern(pattern, strdate):
"""
R:代表实时
D代表日
W[0-6]代表周日到周六
M[00-31]:代表月末月到某一天
S[1-3]-[00-31]:代表季度第几个月的第几天
Y[1-12]-[00-31]:代表一年中的某个月的某一天
"""
if pattern == 'D':
return True
dt = str2date(strdate)
if pattern.startswith('W'):
w = (int(pattern[1]) + 6) % 7
if dt.weekday() == w:
return True
return False
if pattern.startswith('M'):
day = int(pattern[1:])
if day == 0 and is_monthend(dt):
return True
if day == dt.day:
return True
return False
if pattern.startswith('S'):
m,d = [ int(i) for i in pattern[1:].split('-') ]
print(f'{m=}-{d=}, {dt.month=} {dt.day}')
m %= 4
if m == dt.month % 4 and d == dt.day:
return True
return False
if pattern.startswith('Y'):
m,d = [ int(i) for i in pattern[1:].split('-') ]
print(f'{m=}-{d=}, {dt.month=} {dt.day}')
if m == dt.month and d == dt.day:
return True
return False

View File

@ -0,0 +1,42 @@
import time
import datetime
from .Singleton import SingletonDecorator
timerecord = {}
class TimeCost:
def __init__(self,name):
self.name = name
def __enter__(self):
self.begin_time = time.time()
def __exit__(self,*args):
self.end_time = time.time()
d = timerecord.get(self.name,[])
d.append(self.end_time - self.begin_time)
timerecord[self.name] = d
def clear(self):
timerecord = {}
@classmethod
def clear_all(self):
timerecord = {}
@classmethod
def clear(self, name):
timerecord[name] = []
@classmethod
def show(self):
def getTimeCost(name):
x = timerecord.get(name,[])
if len(x) == 0:
return 0,0,0
return len(x), sum(x), sum(x)/len(x)
print('TimeCost ....')
for name in timerecord.keys():
print(name, * getTimeCost(name))

View File

@ -0,0 +1,62 @@
import sys
import threading
from threading import Thread
from queue import Queue, Empty
class Worker(Thread):
def __init__(self, rqueue, timeout=1):
Thread.__init__(self)
self.timeout = timeout
self.setDaemon(False)
self.r_queue = rqueue
self.start()
def run(self):
emptyQueue = False
while True:
try:
callable,args,kw = self.r_queue.get(timeout=self.timeout)
if callable is None:
break
callable(*args,**kw)
except Empty:
time.sleep(1)
def resulthandler(self,rez):
pass
class ThreadWorkers:
def __init__(self,num_workers=20):
self.workQueue = Queue()
self.worker_cnt = num_workers
self.workers = []
self.__createThreadPool(num_workers)
def __createThreadPool(self,num):
for i in range(num):
thread = Worker(self.workQueue)
self.workers.append(thread)
def wait_for_complete(self):
for i in range(self.worker_cnt):
self.add_job(None,None,None)
while len(self.workers):
thread = self.workers.pop()
if thread.isAlive():
thread.join()
def add_job(self,callable,args=[],kw={}):
self.workQueue.put([callable,args,kw])
if __name__ == '__main__':
import requests
def get(url):
x = requests.get(url)
print(x.status_code)
tw = ThreadWorkers()
for i in range(10000):
tw.add_job(get,['http://www.baidu.com'])
tw.wait_for_complete()
print('finished')

View File

@ -0,0 +1,103 @@
# -*- coding:UTF-8 -*-
import time
from traceback import print_exc
from socket import *
from select import select
import json
from appPublic.sockPackage import get_free_local_addr
from appPublic.background import Background
BUFSIZE = 1024 * 64
class UdpComm:
def __init__(self, port, callback, timeout=0):
self.buffer = []
self.callback = callback
self.timeout = timeout
self.host = get_free_local_addr()[0]
self.port = port
self.udpSerSock = socket(AF_INET, SOCK_DGRAM)
# 设置阻塞
# self.udpSerSock.setblocking(1 if timeout > 0 else 0)
# 设置超时时间 1s
# self.udpSerSock.settimeout(timeout)
self.udpSerSock.bind(('' ,port))
self.run_flg = True
self.thread = Background(self.run)
self.thread.start()
def run(self):
sock = self.udpSerSock
while self.run_flg:
outs = []
if len(self.buffer) > 0:
outs = [sock]
in_s, out_s, exc_s = select([sock], outs, [], 0.1)
if sock in in_s:
b, addr = sock.recvfrom(BUFSIZE)
t = b[0]
b = b[1:]
if t == 'b':
self.callback(b, addr)
else:
try:
txt = b.decode('utf-8')
d = json.loads(txt)
self.callback(d, addr)
except Exception as e:
print('except:',e)
print_exc()
print(t, b)
break
if sock in out_s:
while len(self.buffer) > 0:
d,addr = self.buffer.pop(0)
sock.sendto(d, addr)
time.sleep(0.1)
self.run_flg = False
self.udpSerSock.close()
def stop(self):
self.run_flg = False
self.udpSerSock.close()
self.thread.join()
def broadcast(self, data):
broadcast_host = '.'.join(self.host.split('.')[:-1]) + '.255'
udpCliSock = socket(AF_INET, SOCK_DGRAM)
# udpCliSock.settimeout(1)
udpCliSock.bind(('', 0))
udpCliSock.setsockopt(SOL_SOCKET, SO_BROADCAST, 1)
b = data
if not isinstance(data, bytes):
b = json.dumps(data).encode('utf-8')
udpCliSock.sendto(b, (broadcast_host,self.port))
def send(self,data,addr):
b = data
if not isinstance(data, bytes):
b = b'j' + json.dumps(data).encode('utf-8')
else:
b = b'b' + data
if isinstance(addr,list):
addr = tuple(addr)
self.buffer.append((b, addr))
def sends(self,data, addrs):
for a in addrs:
self.send(data, a)
if __name__ == '__main__':
import sys
def msg_handle(data, addr):
print('addr:', addr, 'data=', data, len(data))
port = 50000
if len(sys.argv)>1:
port = int(sys.argv[1])
d = UdpComm(port, msg_handle)
x = input()
while x:
port, data = x.split(':')
d.send(data, ('', int(port)))
x = input()

View File

@ -0,0 +1,70 @@
import os
import time
from natpmp import NATPMP as pmp
import upnpclient
from appPublic.ipgetter import IPgetter
from multiprocessing import Process, Pipe
def pmp_get_external_ip():
try:
return pmp.get_public_address()
except:
return None
def upnp_get_external_ip():
try:
igd = upnpclient.discover()[0]
print(igd.service_map)
s_names = [ n for n in igd.service_map.keys() if 'WAN' in n and 'Conn' in n]
upnp = igd.service_map[s_names[0]]
x = upnp.GetExternalIPAddress()
return x.get('NewExternalIPAddress', None)
except Exception as e:
print(f'e={e}')
return None
def ipgetter_get_external_ip():
getter = IPgetter()
ip = None
while ip is None:
try:
ip = getter.get_external_ip()
except:
ip = None
if ip:
return ip
time.sleep(0.1)
def get_external_ip():
ip = pmp_get_external_ip()
if ip:
return ip
ip = upnp_get_external_ip()
if ip:
return ip
return ipgetter_get_external_ip()
def outip(w):
os.dup2(w.fileno(), 1)
ip = get_external_ip()
print(ip)
def get_ip():
r, w = Pipe()
reader = os.fdopen(r.fileno(), 'r')
p = Process(None, outip, 'TESTER', (w, ))
p.start()
ip = reader.readline()
p.join()
return ip.strip()
def run():
while True:
ip = get_ip()
if ip:
print(f'{ip=}')
time.sleep(10)
if __name__ == '__main__':
run()

View File

@ -0,0 +1,39 @@
#unidict.py
import locale
def unicoding(d,coding='utf8'):
if type(d) == type(''):
return d
if type(d) == type(b''):
try:
if coding is not Noene:
return d.decode(coding)
else:
return d.decode(locale.getdefaultlocale()[1])
except:
try:
return d.decode(locale.getdefaultlocale()[1])
except:
try:
return d.decode('utf8')
except:
return d
return d
def uObject(obj,coding='utf8'):
otype = type(obj)
if otype == type(u''):
return obj
if otype == type({}):
return uDict(obj,coding)
if otype == type([]):
return [uObject(i,coding) for i in obj ]
if hasattr(obj,'decode'):
return obj.decode(coding)
return obj
def uDict(dict,coding='utf8'):
d = {}
for k,v in dict.items():
d[uObject(k)] = uObject(v)
return d

View File

@ -0,0 +1,36 @@
import uuid
from nanoid import generate
def setNode(n='ff001122334455'):
pass
def getID(size=21):
return generate(size=size)
def validate_code(id, cnt=6):
b = int(len(id) / cnt)
j = 0
code = []
v = 0
print(f'{b=}, {cnt=}')
for c in id:
if j >= b:
v = v % 10
code.append(str(v))
j = 0
v += ord(c)
j += 1
if len(code) >= cnt:
break
return ''.join(code)
def check_code(id, code):
c = validate_code(id)
return c==code
if __name__ == '__main__':
id = getID()
code = validate_code(id)
b = check_code(id, code)
print(id, code, b)

View File

@ -0,0 +1 @@
__version__ = '5.1.27'

View File

@ -0,0 +1,27 @@
def calculate_luminence(rgba):
return 0.2126 * color[0] + \
0.7152 * color[1] + \
0.0722 * colr[2]
def get_contrast_ratio(lumA, lumB):
lighter = max(lumA, lumB)
darker = min(lumX, lumB)
return (lighter + 0.05) / (darker + 0.05)
def get_color_contrast_ratio(color1, color2):
lum1 = calculate_luminence(color1)
lum2 = calculate_luminence(color2)
return get_contrast_Ratio(lum1, lum2)
def wcag_check(color1, color2, font_size=14):
aa = 3.0
aaa = 4.5
if font_size < 18:
aa = 4.5
aaa = 7.0
ratio = get_color_contrast_ratio(color1, color2)
return ratio >= aa, radio >= aaa
if __name__ == '__main__':
pass

View File

@ -0,0 +1,74 @@
import time
import random
import asyncio
import inspect
from functools import wraps
from functools import wraps
def awaitify(sync_func):
"""Wrap a synchronous callable to allow ``await``'ing it"""
@wraps(sync_func)
async def async_func(*args, **kw):
loop = asyncio.get_event_loop()
return await loop.run_in_executor(None, sync_func, *args, **kw)
return async_func
def coroutinify(func):
@wraps(func)
async def async_func(*args):
loop = asyncio.get_event_loop()
return await loop.run_in_executor(None, func, *args)
return async_func
def to_func(func):
@wraps(func)
def wraped_func(*args,**kw):
if inspect.iscoroutinefunction(func):
task = asyncio.ensure_future(func(*args,**kw))
ret = asyncio.gather(task)
return ret
return func(*args, **kw)
return wraped_func
class AsyncWorker:
def __init__(self,maxtask=50):
self.semaphore = asyncio.Semaphore(maxtask)
async def __call__(self,callee,*args,**kw):
async with self.semaphore:
if inspect.iscoroutinefunction(callee):
return await callee(*args,**kw)
return callee(*args, **kw)
async def run(self,cmd):
async with self.semaphore:
proc = await asyncio.create_subprocess_shell(cmd,
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.PIPE)
stdout, stderr = await proc.comunicate()
return stdout, stderr
if __name__ == '__main__':
def hello(cnt,greeting):
t = random.randint(1,10)
print(cnt,'will sleep ',t,'seconds')
time.sleep(t)
print(cnt,'cost ',t,'seconds to',greeting)
async def ahello(cnt,greeting):
t = random.randint(1,10)
print(cnt,'will sleep ',t,'seconds')
await asyncio.sleep(t)
print(cnt,'cost ',t,'seconds to',greeting)
async def run():
w = AsyncWorker()
f = awaitify(hello)
g = [ asyncio.create_task(w(f,i,'hello world')) for i in range(100) ]
await asyncio.wait(g)
print('aaaaaaaaaaaaaaaaaaa')
loop = asyncio.get_event_loop()
loop.run_until_complete(run())

View File

@ -0,0 +1,135 @@
# zmq_reqresp.py
import asyncio
import zmq
import zmq.asyncio
from .background import Background
from inspect import iscoroutinefunction
class ZmqRequester(object):
def __init__(self, url, async_mode=False, timeout=0):
super().__init__()
self.async_mode = async_mode
self.url = url
self.timeout = timeout
self._connect()
def __del__(self):
self._close()
def _connect(self):
if self.async_mode:
self.ctx = zmq.asyncio.Context()
else:
self.ctx = zmq.Context()
# Socket to talk to server
self.sock = self.ctx.socket(zmq.REQ)
self.sock.connect(self.url)
if self.timeout > 0:
self.sock.setsockopt(zmq.LINGER, 0)
self.poller = zmq.Poller()
self.poller.register(self.sock, zmq.POLLIN)
def _close(self):
self.sock.close()
self.ctx.term()
def send(self, msg):
"""
send s string to responser, and return a string
"""
if self.async_mode:
raise Exception('ZMQ_Requester: in async mode, use asend instead')
b = msg.encode('utf-8')
r = self.send_b(b)
if r is not None:
return r.decode('utf-8')
return None
def send_b(self, b):
"""
send a bytes and return a bytes
"""
if self.async_mode:
raise Exception('ZMQ_Requester: in async mode, use asend_b instead')
self.sock.send(b)
if self.timeout > 0:
if self.poller.poll(self.timeout * 1000):
return self.sock.recv()
else:
self._close()
self._connect()
return None
else:
return self.sock.recv()
async def asend_b(self, b):
if not self.async_mode:
raise Exception('ZMQ_Requester: not in async mode, use send_b instead')
await self.sock.send_multipart([b])
if self.timeout > 0:
if self.poller.poll(self.timeout * 1000):
r = await self.sock.recv_multipart()
return r
else:
self._close()
self._connect()
return None
r = await self.sock.recv_multipart()
return r[0]
async def asend(self, msg):
if not self.async_mode:
raise Exception('ZMQ_Requester: not in async mode, use send instead')
b = msg.encode('utf-8')
r = await self.asend_b(b)
if r is None:
return None
return r.decode('utf-8')
class ZmqReplier(object):
def __init__(self, url, handler, async_mode=False):
self.async_mode = async_mode
self.url = url
if not self.async_mode and iscoroutinefunction(handler):
raise('not in async mode, handler can not be a coroutine')
self.handler = handler
if self.async_mode:
self.ctx = zmq.asyncio.Context()
else:
self.ctx = zmq.Context()
self.sock = self.ctx.socket(zmq.REP)
self.sock.bind(self.url)
self.keep_running = True
async def async_run(self):
while self.keep_running:
bs = await self.sock.recv_multipart()
b = b[0]
if iscoroutinefunction(self.handler):
rb = await self.handler(b)
else:
rb =self.self.handler(b)
if isinstance(rb, str):
rb = rb.encode('utf-8')
await self.sock.send_multipart([rb])
def run(self):
self.background = Background(self._run)
self.background.daemon = True
self.background.start()
def _run(self):
while self.keep_running:
b = self.sock.recv()
rb = self.handler(b)
if isinstance(rb, str):
rb = rb.encode('utf-8')
self.sock.send(rb)
def stop(self):
self.keep_running = False
self.join()

View File

@ -0,0 +1,123 @@
import sys
import zmq
import time
from zmq import Context
from appPublic.jsonConfig import getConfig
class TopicServer:
def __init__(self, address='127.0.0.1', pub_port='5566', sub_port='5567'):
# get ZeroMQ version
print("Current libzmq version is %s" % zmq.zmq_version())
print("Current pyzmq version is %s" % zmq.pyzmq_version())
self.context = Context.instance()
# 2 sockets, because we can only bind once to a socket (as opposed to connect)
self.pub_port = "tcp://{}:{}".format(address, pub_port)
self.sub_port = "tcp://{}:{}".format(address, sub_port)
self.xpub_xsub_proxy()
# N publishers to 1 sub; proxy 1 sub to 1 pub; publish to M subscribers
def xpub_xsub_proxy(self):
print("Init proxy")
# Socket subscribing to publishers
frontend_pubs = self.context.socket(zmq.XSUB)
frontend_pubs.bind(self.pub_port)
# Socket publishing to subscribers
backend_subs = self.context.socket(zmq.XPUB)
backend_subs.bind(self.sub_port)
print("Try: Proxy... CONNECT!")
zmq.proxy(frontend_pubs, backend_subs)
print("CONNECT successful!")
"""
while True:
time.sleep(1)
"""
class ConfiguredTopicServer(TopicServer):
"""
in config file has a topicserver key
{
"topicserver":{
"address":"11.11.1.11",
"pub_port":1234,
"sub_server":1235
}
}
"""
def __init__(self):
config = getConfig()
params = config.topicserver
if not params:
raise MissTopicServerConfig
super(ConfiguredTopicServer, self).__init__(**params)
class TopicPublisher:
def __init__(self, topic='en', address='127.0.0.1', port='5566'):
# get ZeroMQ version
print("Current libzmq version is %s" % zmq.zmq_version())
print("Current pyzmq version is %s" % zmq.pyzmq_version())
self.topic = topic
self._topic = topic.encode('utf-8')
self.context = Context.instance()
self.url = "tcp://{}:{}".format(address, port)
self.pub = self.context.socket(zmq.PUB)
self.pub.connect(self.url)
time.sleep(0.5)
def send(self, message):
self.pub.send_multipart([self._topic, message.encode('utf-8')])
class ConfiguredTopicPublisher(TopicPublisher):
def __init__(self, topic=''):
config = getConfig()
params = config.topicserver
if not params:
raise MissTopicServerConfig
super(ConfiguredTopicPublisher, self).__init__(topic=topic,
address = params.address,
port=params.pub_port)
class TopicSubscriber:
def __init__(self, topic='', address='127.0.0.1', port='5567', callback=None):
# get ZeroMQ version
print("Current libzmq version is %s" % zmq.zmq_version())
print("Current pyzmq version is %s" % zmq.pyzmq_version())
self.callback = callback
self.topic = topic
self.context = Context.instance()
self.url = "tcp://{}:{}".format(address, port)
self.sub = self.context.socket(zmq.SUB)
self.sub.connect(self.url)
# subscribe to topic 'en' or 'jp'
if isinstance(self.topic, list):
for t in self.topic:
self.sub.setsockopt(zmq.SUBSCRIBE, t.encode('utf-8'))
else:
self.sub.setsockopt(zmq.SUBSCRIBE, self.topic.encode('utf-8'))
def run(self):
# keep listening to all published message, filtered on topic
print("Sub {}: Going to wait for messages!".format(self.topic))
while True:
msg_received = self.sub.recv_multipart()
print("sub {}: {}".format(self.topic, msg_received))
if self.callback:
self.callback(msg_received)
class ConfiguredTopicSubscriber(TopicSubscriber):
def __init__(self, topic=''):
config = getConfig()
params = config.topicserver
if not params:
raise MissTopicServerConfig
super(ConfiguredTopicSubscriber, self).__init__(topic=topic,
address=params.address,
port=params.sub_port)

View File

@ -0,0 +1,176 @@
import asyncio
from collections.abc import Coroutine
# from asyncio.coroutines import iscoroutine
import zmq
import zmq.asyncio
import json
class Publisher:
def __init__(self,port,coding='utf-8',msgid=1000):
self.port = port
self.socket = None
self.coding = coding
self.msgid = msgid
context = zmq.asyncio.Context()
self.socket = context.socket(zmq.PUB)
self.socket.bind('tcp://*:%d' % self.port)
async def publish(self,msg,msgtype='text',msgid=-1):
print(msg,msgtype)
if msgid == -1:
msgid = self.msgid
if msgtype != 'text':
msg = json.dumps(msg)
msgtype = 'json'
s = '%d %s %s' % (msgid,msgtype,msg)
print(s,msgtype,msgid)
b = s.encode(self.coding)
await self.socket.send(b)
def __del__(self):
self.socket.close()
class Subscriber:
def __init__(self,host,ports,msgid,coding='utf-8'):
self.host = host
self.ports = ports
self.msgid = msgid
self.coding = coding
context = zmq.asyncio.Context()
self.socket = context.socket(zmq.SUB)
f = b'%d' % self.msgid
self.socket.setsockopt(zmq.SUBSCRIBE, f)
for p in self.ports:
self.socket.connect("tcp://%s:%d" % (self.host,p))
def addPort(self,port):
self.socket.connect("tcp://%s:%d" % (self.host,port))
#f = b'%d' % self.msgid
#self.socket.setsockopt(zmq.SUBSCRIBE, f)
async def subscribe(self):
ret = await self.socket.recv()
ret = ret.decode(self.coding)
msgid, msgtype, body = ret.split(' ',2)
print('msgid=',msgid,'msgtype=',msgtype,'body=',body)
if msgtype == 'json':
return json.loads(body)
return body
def __del__(self):
self.socket.close()
class RRServer:
"""
a request / response mode server
"""
def __init__(self,port,handler=None):
self.port = port
self.handler = handler
print(type(self.handler))
async def run(self):
running = True
context = zmq.asyncio.Context()
socket = context.socket(zmq.REP)
socket.bind('tcp://*:%s' % self.port)
while running:
rmsg = await socket.recv()
wmsg = rmsg
if self.handler is not None:
wmsg = self.handler(rmsg)
if isinstance(wmsg,Coroutine):
wmsg = await wmsg
await socket.send(wmsg)
socket.close()
class RRClient:
"""
a request / response mode client
"""
def __init__(self,host,port):
self.host = host
self.port = port
context = zmq.asyncio.Context()
self.socket = context.socket(zmq.REQ)
self.socket.connect('tcp://%s:%d' % (self.host,self.port))
async def request(self,msg):
await self.socket.send(msg)
return await self.socket.recv()
class PPPusher:
"""
pusher of Push / Pull mode
"""
def __init__(self,host,port):
self.host = host
self.port = port
context = zmq.asyncio.Context()
self.socket = context.socket(zmq.PUSH)
self.socket.bind('tcp://%s:%d' % (self.host,self.port))
async def push(self,msg):
await self.socket.send(msg)
class PPPuller:
"""
puller of Push / Pull mode
"""
def __init__(self,host,port,handler=None):
self.host = host
self.port = port
self.handler = handler
async def run(self):
self.running = True
context = zmq.asyncio.Context()
socket = context.socket(zmq.PULL)
socket.bind('tcp://%s:%d' % (self.host,self.port))
while self.running:
msg = await self.socket.recv()
if self.handler is not None:
x = self.handler(msg)
if isinstance(x,Coroutine):
await x
class PairClient:
"""
client of Pair mode
"""
def __init__(self,host,port):
self.host = host
self.port = port
context = zmq.asyncio.Context()
self.socket = context.socket(zmq.PAIR)
self.socket.bind('tcp://%s:%d' % (self.host,self.port))
async def request(self,msg):
await self.socket.send(msg)
return await self.socket.recv()
class PairServer:
"""
server of Pair mode
"""
def __init__(self,port,handler=None):
self.port = port
self.handler = handler
self.running = True
async def run(self):
self.running = True
context = zmq.asyncio.Context()
socket = context.socket(zmq.PAIR)
socket.bind('tcp://*:%d' % self.port)
while self.running:
msg = await socket.recv()
ret = msg
if self.handler is not None:
ret = self.handler()
if isinstance(ret,Coroutine):
ret = await ret
await socket.send(ret)

BIN
dist/appPublic-5.1.26-py3-none-any.whl vendored Executable file

Binary file not shown.

Some files were not shown because too many files have changed in this diff Show More