From d4ff002c16fc9e1357ed16fc676a0eca15753338 Mon Sep 17 00:00:00 2001 From: yumoqing Date: Thu, 4 Sep 2025 11:00:45 +0800 Subject: [PATCH] bugfix --- llmage/init.py | 2 ++ llmage/llmclient.py | 9 +++++++++ wwwroot/llm_dialog.ui | 2 +- 3 files changed, 12 insertions(+), 1 deletion(-) diff --git a/llmage/init.py b/llmage/init.py index 4589d21..7769c28 100644 --- a/llmage/init.py +++ b/llmage/init.py @@ -1,4 +1,5 @@ from llmage.llmclient import ( + b64media. get_llm, inference, get_llmcatelogs, @@ -16,6 +17,7 @@ from ahserver.serverenv import ServerEnv def load_llmage(): env = ServerEnv() env.get_llm = get_llm + env.b64media = b64media env.inference = inference env.get_llms_by_catelog = get_llms_by_catelog env.get_llmcatelogs = get_llmcatelogs diff --git a/llmage/llmclient.py b/llmage/llmclient.py index 2ad5531..4388ebb 100644 --- a/llmage/llmclient.py +++ b/llmage/llmclient.py @@ -83,6 +83,15 @@ async def uapi_request(request, sor, caller_orgid, callerid, uapi, llm, params): yield l debug(f'{d=}, {txt=}') +def b64media(meidafile): + if meidafile.startswith('data:'): + return meidafile + fs = FileStorage() + fn = fs.realPath(meidafile) + with open(fn, 'rb') as f: + b = f.read() + return base64.b64encode(b).decode('iso-8859-1') + async def inference(request, *args, **kw): env = request._run_ns caller_orgid = await env.get_userorgid() diff --git a/wwwroot/llm_dialog.ui b/wwwroot/llm_dialog.ui index b2f317d..e4ecc4d 100644 --- a/wwwroot/llm_dialog.ui +++ b/wwwroot/llm_dialog.ui @@ -28,7 +28,7 @@ { "widgettype":"Text", "options":{ - "otype": "没找到模型", + "otext": "没找到模型", "i18n":true } }