Spaces:
Sleeping
Sleeping
Tuchuanhuhuhu
commited on
Commit
·
883ed13
1
Parent(s):
dbdf4db
修复token占用过高的问题
Browse files- llama_func.py +4 -4
llama_func.py
CHANGED
|
@@ -107,7 +107,7 @@ def chat_ai(
|
|
| 107 |
|
| 108 |
logging.info(f"Question: {question}")
|
| 109 |
|
| 110 |
-
response, status_text = ask_ai(
|
| 111 |
api_key,
|
| 112 |
index,
|
| 113 |
question,
|
|
@@ -123,7 +123,7 @@ def chat_ai(
|
|
| 123 |
response = response
|
| 124 |
|
| 125 |
context.append({"role": "user", "content": question})
|
| 126 |
-
context.append({"role": "assistant", "content":
|
| 127 |
chatbot.append((question, response))
|
| 128 |
|
| 129 |
os.environ["OPENAI_API_KEY"] = ""
|
|
@@ -174,12 +174,12 @@ def ask_ai(
|
|
| 174 |
nodes.append(
|
| 175 |
f"<details><summary>[{index+1}]\t{brief}...</summary><p>{node.source_text}</p></details>"
|
| 176 |
)
|
| 177 |
-
ret_text
|
| 178 |
logging.info(
|
| 179 |
f"Response: {colorama.Fore.BLUE}{ret_text}{colorama.Style.RESET_ALL}"
|
| 180 |
)
|
| 181 |
os.environ["OPENAI_API_KEY"] = ""
|
| 182 |
-
return ret_text, f"查询消耗了{llm_predictor.last_token_usage} tokens"
|
| 183 |
else:
|
| 184 |
logging.warning("No response found, returning None")
|
| 185 |
os.environ["OPENAI_API_KEY"] = ""
|
|
|
|
| 107 |
|
| 108 |
logging.info(f"Question: {question}")
|
| 109 |
|
| 110 |
+
response, chatbot_display, status_text = ask_ai(
|
| 111 |
api_key,
|
| 112 |
index,
|
| 113 |
question,
|
|
|
|
| 123 |
response = response
|
| 124 |
|
| 125 |
context.append({"role": "user", "content": question})
|
| 126 |
+
context.append({"role": "assistant", "content": chatbot_display})
|
| 127 |
chatbot.append((question, response))
|
| 128 |
|
| 129 |
os.environ["OPENAI_API_KEY"] = ""
|
|
|
|
| 174 |
nodes.append(
|
| 175 |
f"<details><summary>[{index+1}]\t{brief}...</summary><p>{node.source_text}</p></details>"
|
| 176 |
)
|
| 177 |
+
new_response = ret_text + "\n\n".join(nodes)
|
| 178 |
logging.info(
|
| 179 |
f"Response: {colorama.Fore.BLUE}{ret_text}{colorama.Style.RESET_ALL}"
|
| 180 |
)
|
| 181 |
os.environ["OPENAI_API_KEY"] = ""
|
| 182 |
+
return ret_text, new_response, f"查询消耗了{llm_predictor.last_token_usage} tokens"
|
| 183 |
else:
|
| 184 |
logging.warning("No response found, returning None")
|
| 185 |
os.environ["OPENAI_API_KEY"] = ""
|