master
yumoqing 2024-06-14 17:03:09 +08:00
parent 335653a23b
commit 2bdd7150f3
3 changed files with 155 additions and 4 deletions

View File

@ -2,7 +2,7 @@ import aiohttp
from aiohttp import web, BasicAuth
from aiohttp import client
from appPublic.dictObject import DictObject
from .llm_client import LlmProxy
from .llm_client import StreamLlmProxy, AsyncLlmProxy, SyncLlmProxy
from .baseProcessor import *
class LlmProcessor(BaseProcessor):
@ -24,9 +24,58 @@ class LlmProcessor(BaseProcessor):
async def datahandle(self,request):
chunk_size = 40960
d = await self.path_call(request)
llm = LlmProxy(self, d)
llm = StreamLlmProxy(self, d)
self.retResponse = await llm(request, self.run_ns.params_kw)
def setheaders(self):
pass
class LlmSProcessor(BaseProcessor):
@classmethod
def isMe(self,name):
return name=='llms'
async def path_call(self, request, params={}):
await self.set_run_env(request)
path = self.path
url = self.resource.entireUrl(request, path)
ns = self.run_ns
ns.update(params)
te = self.run_ns['tmpl_engine']
txt = await te.render(url,**ns)
data = json.loads(txt)
return DictObject(**data)
async def datahandle(self,request):
chunk_size = 40960
d = await self.path_call(request)
llm = SyncLlmProxy(self, d)
self.content = await llm(request, self.run_ns.params_kw)
def setheaders(self):
pass
class LlmAProcessor(BaseProcessor):
@classmethod
def isMe(self,name):
return name=='llma'
async def path_call(self, request, params={}):
await self.set_run_env(request)
path = self.path
url = self.resource.entireUrl(request, path)
ns = self.run_ns
ns.update(params)
te = self.run_ns['tmpl_engine']
txt = await te.render(url,**ns)
data = json.loads(txt)
return DictObject(**data)
async def datahandle(self,request):
chunk_size = 40960
d = await self.path_call(request)
llm = AsyncLlmProxy(self, d)
self.retResponse = await llm(request, self.run_ns.params_kw)
def setheaders(self):
pass

View File

@ -11,7 +11,7 @@ def encode_imagefile(fn):
with open(fn, 'rb') as f:
return base64.b64encode(f.read()).decode('utf-8')
class LlmProxy:
class StreamLlmProxy:
def __init__(self, processor, desc):
assert desc.name
self.name = desc.name
@ -38,6 +38,8 @@ class LlmProxy:
'==':eq,
'!=':ne
}
if '[DONE]' in ll:
return
try:
print('write_chunk(),l=', ll)
l = self.line_chunk_match(ll)
@ -182,3 +184,103 @@ class LlmProxy:
s1 = self.ac.convert(dic, mydata)
return s1
class SyncLlmProxy(StreamLlmProxy):
async def __call__(self, request, params):
self.user = await self.processor.run_ns.get_user()
mapi = params.mapi
if not self.desc[mapi]:
return {
"status":"Error",
"message":f'{mapi} not defined'
}
d = self.desc[mapi]
self.api = d
if self.api.need_auth and self.auth_api:
await self.do_auth(request)
else:
self.data = await self.get_apikey(self.name)
assert d.get('url')
method = d.get('method', 'POST')
headers = {}
for h in d.get('headers',{}):
headers[h.get('name')] = h.get('value')
mydata = {}
for p in d.get('data', {}):
mydata[p.get('name')] = p.get('value')
myparams = {}
for p in d.get('params', {}):
myparams[p.get('name')] = p.get('value')
url = d.get('url')
_params = self.datalize(myparams, params)
_headers = self.datalize(headers, params)
_data = self.datalize(mydata, params)
response_type = RESPONSE_JSON
hc = HttpClient()
print(f'{url=},{method=},{_params=},{_data=},{_headers=}')
resp_data = await hc.request(url, method, response_type=response_type,
params=_params,
data=None if _data == {} else json.dumps(_data),
headers=_headers)
resp_data = DictObject(resp_data)
if resp_data is None:
return {
"status":"Error",
"message":f'{mapi} not defined'
}
return self.convert_resp(resp_data)
def convert_resp(self, resp):
j = {}
for r in self.api.resp or []:
j[r.name] = resp.get_data_by_keys(r.value);
return j
class AsyncLlmProxy(StreamLlmProxy):
pass
class AsyncLlmProxy:
async def __call__(self, request, params):
self.user = await self.processor.run_ns.get_user()
mapi = params.mapi
stream = params.stream
self.resp = web.StreamResponse()
await self.resp.prepare(request)
if stream is None:
stream = True
self.remain_str = ''
if not self.desc[mapi]:
raise Exception(f'{mapi} not defined')
d = self.desc[mapi]
self.api = d
self.chunk_match = d.chunk_match
if self.api.need_auth and self.auth_api:
await self.do_auth(request)
else:
self.data = await self.get_apikey(self.name)
assert d.get('url')
method = d.get('method', 'POST')
headers = {}
for h in d.get('headers',{}):
headers[h.get('name')] = h.get('value')
mydata = {}
for p in d.get('data', {}):
mydata[p.get('name')] = p.get('value')
myparams = {}
for p in d.get('params', {}):
myparams[p.get('name')] = p.get('value')
url = d.get('url')
_params = self.datalize(myparams, params)
_headers = self.datalize(headers, params)
_data = self.datalize(mydata, params)
response_type = RESPONSE_JSON
hc = HttpClient()
print(f'{url=},{method=},{_params=},{_data=},{_headers=}')
resp_data = await hc.request(url, method, response_type=response_type,
params=_params,
data=None if _data == {} else json.dumps(_data),
headers=_headers)
if self.remain_str != '':
await self.write_chunk(self.remain_str)
return self.resp

View File

@ -44,7 +44,7 @@ from .baseProcessor import getProcessor, BricksUIProcessor, TemplateProcessor
from .baseProcessor import PythonScriptProcessor, MarkdownProcessor
from .xlsxdsProcessor import XLSXDataSourceProcessor
from .llmProcessor import LlmProcessor
from .llmProcessor import LlmProcessor, LlmSProcessor, LlmAProcessor
from .websocketProcessor import WebsocketProcessor, XtermProcessor
from .sqldsProcessor import SQLDataSourceProcessor
from .functionProcessor import FunctionProcessor