Merge branch 'main' of git.kaiyuancloud.cn:yumoqing/llmengine

This commit is contained in:
ymq1 2025-06-26 11:05:17 +08:00
commit 68c35a7c10
14 changed files with 708 additions and 67 deletions

View File

@ -0,0 +1,23 @@
from abc import ABC, abstractmethod
from typing import Dict
model_pathMap = {}
def collection_register(model_key, Klass):
"""Register a model class for a given model key."""
global model_pathMap
model_pathMap[model_key] = Klass
def get_collection_class(model_path):
"""Find the model class for a given model path."""
for k, klass in model_pathMap.items():
if len(model_path.split(k)) > 1:
return klass
print(f'{model_pathMap=}')
return None
class BaseCollection(ABC):
@abstractmethod
def create_collection(self, db_type: str) -> Dict:
"""Create a Milvus collection for the given db_type."""
pass

View File

@ -0,0 +1,23 @@
from abc import ABC, abstractmethod
from typing import Dict
model_pathMap = {}
def collection_delete_register(model_key, Klass):
"""Register a model class for a given model key."""
global model_pathMap
model_pathMap[model_key] = Klass
def get_collection_delete_class(model_path):
"""Find the model class for a given model path."""
for k, klass in model_pathMap.items():
if len(model_path.split(k)) > 1:
return klass
print(f'{model_pathMap=}')
return None
class BaseCollectionDelete(ABC):
@abstractmethod
def delete_collection(self, db_type: str) -> Dict:
"""Delete a Milvus collection for the given db_type."""
pass

151
llmengine/client/chat Executable file
View File

@ -0,0 +1,151 @@
#!/d/ymq/py3/bin/python
from traceback import format_exc
import asyncio
import codecs
import json
import base64
import argparse
from appPublic.streamhttpclient import liner, StreamHttpClient
from appPublic.log import MyLogger
filetypes = {
'png': ['image_url', 'data:image/png'],
'jpg': ['image_url', 'data:image/jpeg'],
'jpeg': ['image_url', 'data:image/jpeg'],
'wav': ['audio_url', 'data:audio/wav'],
'mp3': ['audio_url', 'data:audio/mp3'],
'mp4': ['video_url', 'data:video/mp4'],
'avi': ['video_url', 'data:video/avi']
}
def file2base64i_content(f):
ft = f.split('.')[-1].lower()
typ, content = filetypes.get(ft, '')
with open(f, 'rb') as f:
b = f.read()
b64 = base64.b64encode(b)
return {
'type': typ,
typ: {
'url': content
}
}
class M2T:
def system_message(self, prompt):
return {
'role':'system',
'content': [{
'type': 'text',
'text'" prompt
}]
}
def assistant_message(self, prompt):
return {
'role': 'assistant',
'content': [{
'type': 'text',
'text': promot
}]
}
def user_message(self, prompt, textfile=None,
audiofile=None,
videofile=None,
imagefile=None):
txt = prompt
if textpath:
txt = f'{prompt}: {self.user_file(textfile)}'
content = [
{
'type': 'text',
'text': txt
}
]
for f in [audiofile, videofile, imagefile]:
if isinstance(f, []):
for f1 in f:
content.append(file2base64_content(f1))
elif isinstance(f, str):
content.append(file2base64_content(f)
return {
'role': 'user',
'content': content
}
def user_file(self, fn):
with codecs.open(fn, 'r', 'utf-8') as f:
return f.read()
class T2T:
def system_message(self, prompt):
return {
'role':'system',
'content': prompt
}
def assistant_message(self, prompt):
return {
'role': 'assistant',
'content': promot
}
def user_message(self, prompt, filepath=None):
if filepath:
prompt += f':{user_file(filepath)}'
return {
'role': 'user',
'content': prompt
}
def user_file(self, fn):
with codecs.open(fn, 'r', 'utf-8') as f:
return f.read()
async def main():
parser = argparse.ArgumentParser(prog='llmclient')
parser.add_argument('-f', '--textfile')
parser.add_argument('-i', '--imagefile')
parser.add_argument('-v', '--videofile')
parser.add_argument('-a', '--audiofile')
parser.add_argument('-s', '--sys_prompt')
parser.add_argument('-S', '--sessionfile')
parser.add_argument('-m', '--model')
parser.add_argument('url')
parser.add_argument('prompt')
args = parser.parse_args()
messages = [ system_message(args.sys_prompt) ] if args.sys_prompt else []
messages.append(user_message(args.prompt, filepath=args.file))
d = {
'model': args.model,
'stream': True,
'messages': messages
}
hc = StreamHttpClient()
headers = {
'Content-Type': 'application/json'
}
i = 0
buffer = ''
reco = hc('POST', args.url, headers=headers, data=json.dumps(d))
async for chunk in liner(reco):
chunk = chunk[6:]
if chunk != '[DONE]':
try:
f = json.loads(chunk)
except Exception as e:
print(f'****{chunk=} error {e} {format_exc()}')
continue
if not f['choices'][0]['finish_reason']:
print(f['choices'][0]['delta']['content'], end='', flush=True)
else:
pass
print('\n\n')
if __name__ == '__main__':
MyLogger('null', levelname='error', logfile='/dev/null')
asyncio.new_event_loop().run_until_complete(main())

View File

@ -1,67 +0,0 @@
#!/d/ymq/py3/bin/python
from traceback import format_exc
import asyncio
import codecs
import json
import argparse
from appPublic.streamhttpclient import liner, StreamHttpClient
from appPublic.log import MyLogger
def system_message(prompt):
return {
'role':'system',
'content': prompt
}
def user_message(prompt, filepath=None):
if filepath:
prompt += f':{user_file(filepath)}'
return {
'role': 'user',
'content': prompt
}
def user_file(fn):
with codecs.open(fn, 'r', 'utf-8') as f:
return f.read()
async def main():
parser = argparse.ArgumentParser(prog='devops')
parser.add_argument('-f', '--file')
parser.add_argument('-p', '--prompt')
parser.add_argument('-s', '--sys_prompt')
parser.add_argument('-m', '--model')
parser.add_argument('url')
args = parser.parse_args()
messages = [ system_message(args.sys_prompt) ] if args.sys_prompt else []
messages.append(user_message(args.prompt, filepath=args.file))
d = {
'model': args.model,
'stream': True,
'messages': messages
}
hc = StreamHttpClient()
headers = {
'Content-Type': 'application/json'
}
i = 0
buffer = ''
reco = hc('POST', args.url, headers=headers, data=json.dumps(d))
async for chunk in liner(reco):
chunk = chunk[6:]
if chunk != '[DONE]':
try:
f = json.loads(chunk)
except Exception as e:
print(f'****{chunk=} error {e} {format_exc()}')
continue
if not f['choices'][0]['finish_reason']:
print(f['choices'][0]['delta']['content'], end='', flush=True)
else:
pass
print('\n\n')
if __name__ == '__main__':
MyLogger('null', levelname='error', logfile='/dev/null')
asyncio.new_event_loop().run_until_complete(main())

77
llmengine/collection.py Normal file
View File

@ -0,0 +1,77 @@
from traceback import format_exc
import os
import sys
import argparse
from llmengine.milvus_collection import *
from llmengine.base_collection import get_collection_class
from typing import Dict
from appPublic.registerfunction import RegisterFunction
from appPublic.worker import awaitify
from appPublic.log import debug, exception
from ahserver.serverenv import ServerEnv
from ahserver.globalEnv import stream_response
from ahserver.webapp import webserver
helptext = """Milvus Collection Creation API:
1. Create Collection Endpoint:
path: /v1/collections
headers: {
"Content-Type": "application/json"
}
data: {
"db_type": "textdb"
}
response: {
"status": "success",
"collection_name": "ragdb_textdb",
"message": "集合 ragdb_textdb 创建成功"
}
2. Docs Endpoint:
path: /v1/docs
response: This help text
"""
def init():
rf = RegisterFunction()
rf.register('collections', create_collection)
rf.register('docs', docs)
async def docs(request, params_kw, *params, **kw):
return helptext
async def create_collection(request, params_kw, *params, **kw):
debug(f'{params_kw=}')
se = ServerEnv()
engine = se.engine
f = awaitify(engine.create_collection)
db_type = params_kw.get('db_type')
if db_type is None:
e = exception(f'db_type is None')
raise e
result = await f(db_type)
debug(f'{result=}')
return result
def main():
parser = argparse.ArgumentParser(prog="Milvus Collection Service")
parser.add_argument('-w', '--workdir')
parser.add_argument('-p', '--port')
parser.add_argument('model_path')
args = parser.parse_args()
Klass = get_collection_class(args.model_path)
if Klass is None:
e = Exception(f'{args.model_path} has not mapping to a model class')
exception(f'{e}, {format_exc()}')
raise e
se = ServerEnv()
se.engine = Klass(args.model_path)
workdir = args.workdir or os.getcwd()
port = args.port
debug(f'{args=}')
webserver(init, workdir, port)
if __name__ == '__main__':
main()

View File

@ -0,0 +1,77 @@
from traceback import format_exc
import os
import sys
import argparse
from llmengine.milvus_collection_delete import *
from llmengine.base_collection_delete import get_collection_delete_class
from typing import Dict
from appPublic.registerfunction import RegisterFunction
from appPublic.worker import awaitify
from appPublic.log import debug, exception
from ahserver.serverenv import ServerEnv
from ahserver.globalEnv import stream_response
from ahserver.webapp import webserver
helptext = """Milvus Collection Deletion API:
1. Delete Collection Endpoint:
path: /v1/collections/delete
headers: {
"Content-Type": "application/json"
}
data: {
"db_type": "textdb"
}
response: {
"status": "success",
"collection_name": "ragdb_textdb",
"message": "集合 ragdb_textdb 删除成功"
}
2. Docs Endpoint:
path: /v1/docs
response: This help text
"""
def init():
rf = RegisterFunction()
rf.register('collections/delete', delete_collection)
rf.register('docs', docs)
async def docs(request, params_kw, *params, **kw):
return helptext
async def delete_collection(request, params_kw, *params, **kw):
debug(f'{params_kw=}')
se = ServerEnv()
engine = se.engine
f = awaitify(engine.delete_collection)
db_type = params_kw.get('db_type')
if db_type is None:
e = exception(f'db_type is None')
raise e
result = await f(db_type)
debug(f'{result=}')
return result
def main():
parser = argparse.ArgumentParser(prog="Milvus Collection Delete Service")
parser.add_argument('-w', '--workdir')
parser.add_argument('-p', '--port')
parser.add_argument('model_path')
args = parser.parse_args()
Klass = get_collection_delete_class(args.model_path)
if Klass is None:
e = Exception(f'{args.model_path} has not mapping to a model class')
exception(f'{e}, {format_exc()}')
raise e
se = ServerEnv()
se.engine = Klass(args.model_path)
workdir = args.workdir or os.getcwd()
port = args.port
debug(f'{args=}')
webserver(init, workdir, port)
if __name__ == '__main__':
main()

View File

@ -0,0 +1,144 @@
import os
import logging
from typing import Dict
from pymilvus import connections, utility, Collection, CollectionSchema, FieldSchema, DataType
from llmengine.base_collection import BaseCollection, collection_register
logger = logging.getLogger(__name__)
CONFIG_PATH = os.getenv('CONFIG_PATH', '/share/wangmeihua/rag/conf/milvusconfig.yaml')
try:
with open(CONFIG_PATH, 'r', encoding='utf-8') as f:
import yaml
config = yaml.safe_load(f)
MILVUS_DB_PATH = config['database']['milvus_db_path']
except Exception as e:
logger.error(f"加载配置文件 {CONFIG_PATH} 失败: {str(e)}")
raise RuntimeError(f"无法加载配置文件: {str(e)}")
class MilvusCollection(BaseCollection):
def __init__(self, model_id):
self.model_id = model_id
self.db_path = MILVUS_DB_PATH
def ensure_milvus_directory(self) -> None:
"""确保 Milvus 数据库目录存在"""
db_dir = os.path.dirname(self.db_path)
if not os.path.exists(db_dir):
os.makedirs(db_dir, exist_ok=True)
logger.debug(f"创建 Milvus 目录: {db_dir}")
if not os.access(db_dir, os.W_OK):
raise RuntimeError(f"Milvus 目录 {db_dir} 不可写")
def initialize_milvus_connection(self) -> None:
"""初始化 Milvus 连接"""
try:
if not connections.has_connection("default"):
connections.connect("default", uri=self.db_path)
logger.debug(f"已连接到 Milvus Lite路径: {self.db_path}")
else:
logger.debug("已存在 Milvus 连接,跳过重复连接")
except Exception as e:
logger.error(f"连接 Milvus 失败: {str(e)}")
raise RuntimeError(f"连接 Milvus 失败: {str(e)}")
def cleanup_milvus_connection(self) -> None:
"""清理 Milvus 连接"""
try:
if connections.has_connection("default"):
connections.disconnect("default")
logger.debug("已断开 Milvus 连接")
except Exception as e:
logger.warning(f"断开 Milvus 连接失败: {str(e)}")
def create_collection(self, db_type: str) -> Dict:
"""
为指定 db_type 创建 Milvus 集合
返回创建结果包含集合名称和状态
"""
try:
# 参数验证
if not db_type:
raise ValueError("db_type 不能为空")
if "_" in db_type:
raise ValueError("db_type 不能包含下划线")
if len(db_type) > 100:
raise ValueError("db_type 的长度应小于 100")
self.ensure_milvus_directory()
self.initialize_milvus_connection()
# 集合名称
collection_name = f"ragdb_{db_type}"
if len(collection_name) > 255:
raise ValueError(f"集合名称 {collection_name} 超过 255 个字符")
logger.debug(f"集合名称: {collection_name}")
# 如果集合已存在,检查 schema 兼容性
if utility.has_collection(collection_name):
try:
collection = Collection(collection_name)
collection.load()
logger.debug(f"集合 {collection_name} 已存在并加载成功")
return {
"status": "success",
"collection_name": collection_name,
"message": f"集合 {collection_name} 已存在"
}
except Exception as e:
logger.error(f"加载集合 {collection_name} 失败: {str(e)}")
raise RuntimeError(f"加载集合失败: {str(e)}")
# 定义 schema
fields = [
FieldSchema(name="pk", dtype=DataType.VARCHAR, is_primary=True, max_length=36, auto_id=True),
FieldSchema(name="userid", dtype=DataType.VARCHAR, max_length=100),
FieldSchema(name="document_id", dtype=DataType.VARCHAR, max_length=36),
FieldSchema(name="text", dtype=DataType.VARCHAR, max_length=65535),
FieldSchema(name="vector", dtype=DataType.FLOAT_VECTOR, dim=1024),
FieldSchema(name="filename", dtype=DataType.VARCHAR, max_length=255),
FieldSchema(name="file_path", dtype=DataType.VARCHAR, max_length=1024),
FieldSchema(name="upload_time", dtype=DataType.VARCHAR, max_length=64),
FieldSchema(name="file_type", dtype=DataType.VARCHAR, max_length=64),
]
schema = CollectionSchema(
fields=fields,
description=f"{db_type} 数据集合,跨用户使用,包含 document_id 和元数据字段",
auto_id=True,
primary_field="pk",
)
# 创建集合
try:
collection = Collection(collection_name, schema)
collection.create_index(
field_name="vector",
index_params={"index_type": "AUTOINDEX", "metric_type": "COSINE"}
)
for field in ["userid", "document_id", "filename", "file_path", "upload_time", "file_type"]:
collection.create_index(
field_name=field,
index_params={"index_type": "INVERTED"}
)
collection.load()
logger.debug(f"成功创建并加载集合: {collection_name}")
return {
"status": "success",
"collection_name": collection_name,
"message": f"集合 {collection_name} 创建成功"
}
except Exception as e:
logger.error(f"创建集合 {collection_name} 失败: {str(e)}")
raise RuntimeError(f"创建集合失败: {str(e)}")
except Exception as e:
logger.error(f"创建集合失败: {str(e)}")
return {
"status": "error",
"collection_name": collection_name,
"message": str(e)
}
finally:
self.cleanup_milvus_connection()
collection_register('Milvus', MilvusCollection)

View File

@ -0,0 +1,107 @@
import os
import logging
from typing import Dict
from pymilvus import connections, utility
from llmengine.base_collection_delete import BaseCollectionDelete, collection_delete_register
logger = logging.getLogger(__name__)
CONFIG_PATH = os.getenv('CONFIG_PATH', '/share/wangmeihua/rag/conf/milvusconfig.yaml')
try:
with open(CONFIG_PATH, 'r', encoding='utf-8') as f:
import yaml
config = yaml.safe_load(f)
MILVUS_DB_PATH = config['database']['milvus_db_path']
except Exception as e:
logger.error(f"加载配置文件 {CONFIG_PATH} 失败: {str(e)}")
raise RuntimeError(f"无法加载配置文件: {str(e)}")
class MilvusCollectionDelete(BaseCollectionDelete):
def __init__(self, model_id):
self.model_id = model_id
self.db_path = MILVUS_DB_PATH
def ensure_milvus_directory(self) -> None:
"""确保 Milvus 数据库目录存在"""
db_dir = os.path.dirname(self.db_path)
if not os.path.exists(db_dir):
os.makedirs(db_dir, exist_ok=True)
logger.debug(f"创建 Milvus 目录: {db_dir}")
if not os.access(db_dir, os.W_OK):
raise RuntimeError(f"Milvus 目录 {db_dir} 不可写")
def initialize_milvus_connection(self) -> None:
"""初始化 Milvus 连接"""
try:
if not connections.has_connection("default"):
connections.connect("default", uri=self.db_path)
logger.debug(f"已连接到 Milvus Lite路径: {self.db_path}")
else:
logger.debug("已存在 Milvus 连接,跳过重复连接")
except Exception as e:
logger.error(f"连接 Milvus 失败: {str(e)}")
raise RuntimeError(f"连接 Milvus 失败: {str(e)}")
def cleanup_milvus_connection(self) -> None:
"""清理 Milvus 连接"""
try:
if connections.has_connection("default"):
connections.disconnect("default")
logger.debug("已断开 Milvus 连接")
except Exception as e:
logger.warning(f"断开 Milvus 连接失败: {str(e)}")
def delete_collection(self, db_type: str) -> Dict:
"""
删除指定 db_type Milvus 集合
返回删除结果包含集合名称和状态
"""
try:
# 参数验证
if not db_type:
raise ValueError("db_type 不能为空")
if "_" in db_type:
raise ValueError("db_type 不能包含下划线")
if len(db_type) > 100:
raise ValueError("db_type 的长度应小于 100")
self.ensure_milvus_directory()
self.initialize_milvus_connection()
# 集合名称
collection_name = f"ragdb_{db_type}"
logger.debug(f"集合名称: {collection_name}")
# 检查集合是否存在
if not utility.has_collection(collection_name):
logger.debug(f"集合 {collection_name} 不存在")
return {
"status": "success",
"collection_name": collection_name,
"message": f"集合 {collection_name} 不存在,无需删除"
}
# 删除集合
try:
utility.drop_collection(collection_name)
logger.debug(f"成功删除集合: {collection_name}")
return {
"status": "success",
"collection_name": collection_name,
"message": f"集合 {collection_name} 删除成功"
}
except Exception as e:
logger.error(f"删除集合 {collection_name} 失败: {str(e)}")
raise RuntimeError(f"删除集合失败: {str(e)}")
except Exception as e:
logger.error(f"删除集合失败: {str(e)}")
return {
"status": "error",
"collection_name": collection_name,
"message": str(e)
}
finally:
self.cleanup_milvus_connection()
collection_delete_register('Milvus', MilvusCollectionDelete)

View File

@ -0,0 +1,50 @@
{
"filesroot":"$[workdir]$/files",
"logger":{
"name":"llmengine",
"levelname":"info",
"logfile":"$[workdir]$/logs/llmengine.log"
},
"website":{
"paths":[
["$[workdir]$/wwwroot",""]
],
"client_max_size":10000,
"host":"0.0.0.0",
"port":9992,
"coding":"utf-8",
"indexes":[
"index.html",
"index.ui"
],
"startswiths":[
{
"leading":"/idfile",
"registerfunction":"idfile"
},{
"leading": "/v1/collections",
"registerfunction": "collections"
},{
"leading": "/docs",
"registerfunction": "docs"
}
],
"processors":[
[".tmpl","tmpl"],
[".app","app"],
[".ui","bui"],
[".dspy","dspy"],
[".md","md"]
],
"rsakey_oops":{
"privatekey":"$[workdir]$/conf/rsa_private_key.pem",
"publickey":"$[workdir]$/conf/rsa_public_key.pem"
},
"session_max_time":3000,
"session_issue_time":2500,
"session_redis_notuse":{
"url":"redis://127.0.0.1:6379"
}
}
}

View File

3
test/collections/start.sh Executable file
View File

@ -0,0 +1,3 @@
#!/bin/bash
export CONFIG_PATH=/share/wangmeihua/rag/conf/milvusconfig.yaml
CUDA_VISIBLE_DEVICES=7 /share/vllm-0.8.5/bin/python -m llmengine.collection -p 8889 /share/wangmeihua/rag/conf/Milvus/milvus.db

View File

@ -0,0 +1,50 @@
{
"filesroot":"$[workdir]$/files",
"logger":{
"name":"llmengine",
"levelname":"info",
"logfile":"$[workdir]$/logs/llmengine.log"
},
"website":{
"paths":[
["$[workdir]$/wwwroot",""]
],
"client_max_size":10000,
"host":"0.0.0.0",
"port":8888,
"coding":"utf-8",
"indexes":[
"index.html",
"index.ui"
],
"startswiths":[
{
"leading":"/idfile",
"registerfunction":"idfile"
},{
"leading": "/v1/collections/delete",
"registerfunction": "collections/delete"
},{
"leading": "/docs",
"registerfunction": "docs"
}
],
"processors":[
[".tmpl","tmpl"],
[".app","app"],
[".ui","bui"],
[".dspy","dspy"],
[".md","md"]
],
"rsakey_oops":{
"privatekey":"$[workdir]$/conf/rsa_private_key.pem",
"publickey":"$[workdir]$/conf/rsa_public_key.pem"
},
"session_max_time":3000,
"session_issue_time":2500,
"session_redis_notuse":{
"url":"redis://127.0.0.1:6379"
}
}
}

View File

@ -0,0 +1,3 @@
#!/bin/bash
export CONFIG_PATH=/share/wangmeihua/rag/conf/milvusconfig.yaml
CUDA_VISIBLE_DEVICES=7 /share/vllm-0.8.5/bin/python -m llmengine.collection_delete -p 8888 /share/wangmeihua/rag/conf/Milvus/milvus.db