diff --git a/llmengine/__pycache__/ahserver.cpython-310.pyc b/llmengine/__pycache__/ahserver.cpython-310.pyc index 4bc5873..6416a7a 100644 Binary files a/llmengine/__pycache__/ahserver.cpython-310.pyc and b/llmengine/__pycache__/ahserver.cpython-310.pyc differ diff --git a/llmengine/ahserver.py b/llmengine/ahserver.py deleted file mode 100644 index 6068fde..0000000 --- a/llmengine/ahserver.py +++ /dev/null @@ -1,5 +0,0 @@ -from ahserver.configuredServer import ConfiguredServer - -if __name__ == '__main__': - server = ConfiguredServer() - server.run() \ No newline at end of file diff --git a/llmengine/gemma3_it.py b/llmengine/gemma3_it.py index ac5613f..3936fd2 100644 --- a/llmengine/gemma3_it.py +++ b/llmengine/gemma3_it.py @@ -24,6 +24,10 @@ class Gemma3LLM(MMChatLLM): llm_register("gemma-3", Gemma3LLM) if __name__ == '__main__': + def get_stream_text(chunk): + chunk = chunk[6:] + d = json.loads(chunk) + return d['choices'][0]['delta']['content'] gemma3 = Gemma3LLM('/share/models/google/gemma-3-4b-it') session = {} while True: @@ -35,7 +39,7 @@ if __name__ == '__main__': print('input image path') imgpath=input() for d in gemma3.stream_generate(session, p, image_path=imgpath): - if not d['done']: + if not d['DONE']: print(d['text'], end='', flush=True) else: x = {k:v for k,v in d.items() if k != 'text'}