bugfix
This commit is contained in:
parent
0399ca60dd
commit
cb8c4d44c3
@ -150,14 +150,14 @@ class BaseChatLLM:
|
||||
for d in self.output_generator(streamer):
|
||||
if i == 0:
|
||||
i = 1
|
||||
t1 = time()
|
||||
t2 = time()
|
||||
if d['choices'][0]['finish_reason'] != 'stop':
|
||||
txt += d['choices'][0]['delta']['content']
|
||||
else:
|
||||
o_tokens = d.get('output_tokens', 0)
|
||||
i_tokens = input_len
|
||||
|
||||
t2 = time()
|
||||
t3 = time()
|
||||
return {
|
||||
'id': f'chatcmpl-{getID()}',
|
||||
"object":"chat.completion",
|
||||
|
||||
@ -8,7 +8,7 @@ from llmengine.gemma3_it import Gemma3LLM
|
||||
from llmengine.medgemma3_it import MedgemmaLLM
|
||||
from llmengine.qwen3 import Qwen3LLM
|
||||
from llmengine.qwen3coder import Qwen3CoderLLM
|
||||
from llmengine.baiduanm2 import BaichuanM2LLM
|
||||
from llmengine.baichuanm2 import BaichuanM2LLM
|
||||
|
||||
from appPublic.registerfunction import RegisterFunction
|
||||
from appPublic.log import debug, exception
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user