Skip to content

Commit a6085c2

Browse files
authored
bugfix streaming mode of openai_api.py
1 parent 11e0087 commit a6085c2

File tree

1 file changed

+3
-3
lines changed

1 file changed

+3
-3
lines changed

openai_api.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -484,7 +484,7 @@ async def predict(
484484
stop_words_ids = [tokenizer.encode(s)
485485
for s in stop_words] if stop_words else None
486486

487-
delay_token_num = max([len(x) for x in stop_words])
487+
delay_token_num = max([len(x) for x in stop_words]) if stop_words_ids else 0
488488
response_generator = model.chat_stream(tokenizer,
489489
query,
490490
history=history,
@@ -493,8 +493,8 @@ async def predict(
493493
**gen_kwargs)
494494
for _new_response in response_generator:
495495
if len(_new_response) <= delay_token_num:
496-
continue
497-
new_response = _new_response[:-delay_token_num]
496+
continue
497+
new_response = _new_response[:-delay_token_num] if delay_token_num else _new_response
498498

499499
if len(new_response) == current_length:
500500
continue

0 commit comments

Comments
 (0)