bugfix
This commit is contained in:
parent
b8c52aa77a
commit
0399ca60dd
@ -145,6 +145,7 @@ class BaseChatLLM:
|
|||||||
kwargs=kwargs)
|
kwargs=kwargs)
|
||||||
thread.start()
|
thread.start()
|
||||||
txt = ''
|
txt = ''
|
||||||
|
i_tokens = o_tokens = 0
|
||||||
i = 0
|
i = 0
|
||||||
for d in self.output_generator(streamer):
|
for d in self.output_generator(streamer):
|
||||||
if i == 0:
|
if i == 0:
|
||||||
@ -153,8 +154,8 @@ class BaseChatLLM:
|
|||||||
if d['choices'][0]['finish_reason'] != 'stop':
|
if d['choices'][0]['finish_reason'] != 'stop':
|
||||||
txt += d['choices'][0]['delta']['content']
|
txt += d['choices'][0]['delta']['content']
|
||||||
else:
|
else:
|
||||||
i_tokens = d['input_tokens']
|
o_tokens = d.get('output_tokens', 0)
|
||||||
o_tokens = d['output_tokens']
|
i_tokens = input_len
|
||||||
|
|
||||||
t2 = time()
|
t2 = time()
|
||||||
return {
|
return {
|
||||||
@ -164,7 +165,6 @@ class BaseChatLLM:
|
|||||||
"model":self.model_id,
|
"model":self.model_id,
|
||||||
"response_time": t2 - t1,
|
"response_time": t2 - t1,
|
||||||
"finish_time": t3 - t1,
|
"finish_time": t3 - t1,
|
||||||
"output_tokens": output_tokens,
|
|
||||||
"choices":[
|
"choices":[
|
||||||
{
|
{
|
||||||
"index":0,
|
"index":0,
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user