Spaces:
Sleeping
Sleeping
Tuchuanhuhuhu
commited on
Commit
·
ab6c9c5
1
Parent(s):
9e43466
解决闪烁问题,降低输出延迟
Browse files- ChuanhuChatbot.py +2 -1
- modules/chat_func.py +1 -2
ChuanhuChatbot.py
CHANGED
|
@@ -272,7 +272,8 @@ with gr.Blocks(css=customCSS, theme=small_and_beautiful_theme) as demo:
|
|
| 272 |
reset_state,
|
| 273 |
outputs=[chatbot, history, token_count, status_display],
|
| 274 |
show_progress=True,
|
| 275 |
-
)
|
|
|
|
| 276 |
|
| 277 |
retryBtn.click(**start_outputing_args).then(
|
| 278 |
retry,
|
|
|
|
| 272 |
reset_state,
|
| 273 |
outputs=[chatbot, history, token_count, status_display],
|
| 274 |
show_progress=True,
|
| 275 |
+
)
|
| 276 |
+
emptyBtn.click(**reset_textbox_args)
|
| 277 |
|
| 278 |
retryBtn.click(**start_outputing_args).then(
|
| 279 |
retry,
|
modules/chat_func.py
CHANGED
|
@@ -268,6 +268,7 @@ def predict(
|
|
| 268 |
should_check_token_count=True,
|
| 269 |
): # repetition_penalty, top_k
|
| 270 |
logging.info("输入为:" + colorama.Fore.BLUE + f"{inputs}" + colorama.Style.RESET_ALL)
|
|
|
|
| 271 |
if reply_language == "跟随问题语言(不稳定)":
|
| 272 |
reply_language = "the same language as the question, such as English, 中文, 日本語, Español, Français, or Deutsch."
|
| 273 |
if files:
|
|
@@ -320,8 +321,6 @@ def predict(
|
|
| 320 |
yield chatbot+[(inputs, "")], history, status_text, all_token_counts
|
| 321 |
return
|
| 322 |
|
| 323 |
-
yield chatbot+[(inputs, "")], history, "开始生成回答……", all_token_counts
|
| 324 |
-
|
| 325 |
if stream:
|
| 326 |
logging.info("使用流式传输")
|
| 327 |
iter = stream_predict(
|
|
|
|
| 268 |
should_check_token_count=True,
|
| 269 |
): # repetition_penalty, top_k
|
| 270 |
logging.info("输入为:" + colorama.Fore.BLUE + f"{inputs}" + colorama.Style.RESET_ALL)
|
| 271 |
+
yield chatbot+[(inputs, "")], history, "开始生成回答……", all_token_counts
|
| 272 |
if reply_language == "跟随问题语言(不稳定)":
|
| 273 |
reply_language = "the same language as the question, such as English, 中文, 日本語, Español, Français, or Deutsch."
|
| 274 |
if files:
|
|
|
|
| 321 |
yield chatbot+[(inputs, "")], history, status_text, all_token_counts
|
| 322 |
return
|
| 323 |
|
|
|
|
|
|
|
| 324 |
if stream:
|
| 325 |
logging.info("使用流式传输")
|
| 326 |
iter = stream_predict(
|