bugfix streaming mode of openai_api.py

main
Jianxin Ma 10 months ago committed by GitHub
parent 11e00874a9
commit a6085c2a91
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

@ -484,7 +484,7 @@ async def predict(
stop_words_ids = [tokenizer.encode(s)
for s in stop_words] if stop_words else None
delay_token_num = max([len(x) for x in stop_words])
delay_token_num = max([len(x) for x in stop_words]) if stop_words_ids else 0
response_generator = model.chat_stream(tokenizer,
query,
history=history,
@ -494,7 +494,7 @@ async def predict(
for _new_response in response_generator:
if len(_new_response) <= delay_token_num:
continue
new_response = _new_response[:-delay_token_num]
new_response = _new_response[:-delay_token_num] if delay_token_num else _new_response
if len(new_response) == current_length:
continue

Loading…
Cancel
Save