Spaces:
Running
Running
Upload folder using huggingface_hub
Browse files- app.py +22 -31
- app3.py +15 -0
- graph_helper.py +8 -1
app.py
CHANGED
|
@@ -95,17 +95,18 @@ thinking_verbs = [
|
|
| 95 |
|
| 96 |
async def chat_wrapper(message, history, thread_id, debug):
|
| 97 |
if debug:
|
| 98 |
-
async for chunk in chat_streaming(message, history, thread_id):
|
| 99 |
yield chunk
|
| 100 |
else:
|
| 101 |
-
response = chat(message, history, thread_id)
|
| 102 |
yield response
|
| 103 |
|
| 104 |
|
| 105 |
-
def chat(message, history, thread_id):
|
| 106 |
config = {"configurable": {"thread_id": thread_id}}
|
| 107 |
response = graph.invoke(
|
| 108 |
-
{"messages": [{"role": "user", "content": message}]},
|
|
|
|
| 109 |
)
|
| 110 |
return response["messages"][-1].content
|
| 111 |
|
|
@@ -139,8 +140,11 @@ def get_args_for_toolcall(tool_calls_buffer: dict, tool_call_id: str):
|
|
| 139 |
)
|
| 140 |
|
| 141 |
|
| 142 |
-
async def chat_streaming(message, history, thread_id):
|
| 143 |
-
state = {
|
|
|
|
|
|
|
|
|
|
| 144 |
config = {"configurable": {"thread_id": thread_id}}
|
| 145 |
start_time = time.time()
|
| 146 |
streamed_response = ""
|
|
@@ -330,31 +334,11 @@ supported_scriptures = "\n - ".join(
|
|
| 330 |
]
|
| 331 |
)
|
| 332 |
|
| 333 |
-
intro_messages = [
|
| 334 |
-
{
|
| 335 |
-
"role": "assistant",
|
| 336 |
-
"content": f"Namaskaram 🙏 I am Sanatan-Bot and I can help you explore the following scriptures:\n\n - {supported_scriptures}",
|
| 337 |
-
},
|
| 338 |
-
{
|
| 339 |
-
"role": "assistant",
|
| 340 |
-
"content": """
|
| 341 |
-
#### Example questions you can ask:
|
| 342 |
-
- How is the form of Vishnu described across the scriptures?
|
| 343 |
-
- What teachings did Krishna share in the Gita?
|
| 344 |
-
- How did Arjuna respond upon witnessing Krishna’s Vishwarupa?
|
| 345 |
-
- What are some names of Vishnu from the Sahasranamam?
|
| 346 |
-
- Give me a pasuram by Andal
|
| 347 |
-
- explain sakrudeva prapannaaya shlokam in ramayana
|
| 348 |
-
- give the shlokam in ramayanam that vibheeshana uses to perform sharanagathi to rama, give the sanskrit shlokam and its meaning
|
| 349 |
-
- give one name in vishnu sahasranamam related to "cause"
|
| 350 |
-
- explain name of vishnu in sahasranamam related to wheel
|
| 351 |
-
""",
|
| 352 |
-
},
|
| 353 |
-
]
|
| 354 |
-
|
| 355 |
init()
|
| 356 |
|
| 357 |
-
message_textbox = gr.Textbox(
|
|
|
|
|
|
|
| 358 |
|
| 359 |
with gr.Blocks(
|
| 360 |
theme=gr.themes.Citrus(),
|
|
@@ -424,7 +408,10 @@ with gr.Blocks(
|
|
| 424 |
}
|
| 425 |
""",
|
| 426 |
) as app:
|
| 427 |
-
|
|
|
|
|
|
|
|
|
|
| 428 |
# session_id = gr.Textbox(value=f"Thread: {thread_id}")
|
| 429 |
# gr.Markdown(value=f"{'\n'.join([msg['content'] for msg in intro_messages])}")
|
| 430 |
gr.Markdown(
|
|
@@ -439,6 +426,10 @@ with gr.Blocks(
|
|
| 439 |
await asyncio.sleep(0.05)
|
| 440 |
return
|
| 441 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 442 |
for scripture in sorted(SanatanConfig.scriptures, key=lambda d: d.get("title")):
|
| 443 |
with gr.Accordion(label=f"{scripture['title']}", open=False):
|
| 444 |
gr.Markdown(f"* Source: [🔗 click here]({scripture['source']})")
|
|
@@ -449,7 +440,7 @@ with gr.Blocks(
|
|
| 449 |
scripture["example_labels"], scripture["examples"]
|
| 450 |
):
|
| 451 |
btn = gr.Button(value=f"{example_label}", size="sm")
|
| 452 |
-
btn.click(
|
| 453 |
populate_chat_input,
|
| 454 |
inputs=[gr.State(example_text)],
|
| 455 |
outputs=[message_textbox],
|
|
|
|
| 95 |
|
| 96 |
async def chat_wrapper(message, history, thread_id, debug):
|
| 97 |
if debug:
|
| 98 |
+
async for chunk in chat_streaming(debug, message, history, thread_id):
|
| 99 |
yield chunk
|
| 100 |
else:
|
| 101 |
+
response = chat(debug, message, history, thread_id)
|
| 102 |
yield response
|
| 103 |
|
| 104 |
|
| 105 |
+
def chat(debug_mode, message, history, thread_id):
|
| 106 |
config = {"configurable": {"thread_id": thread_id}}
|
| 107 |
response = graph.invoke(
|
| 108 |
+
{"debug_mode": debug_mode, "messages": [{"role": "user", "content": message}]},
|
| 109 |
+
config=config,
|
| 110 |
)
|
| 111 |
return response["messages"][-1].content
|
| 112 |
|
|
|
|
| 140 |
)
|
| 141 |
|
| 142 |
|
| 143 |
+
async def chat_streaming(debug_mode: bool, message, history, thread_id):
|
| 144 |
+
state = {
|
| 145 |
+
"debug_mode": debug_mode,
|
| 146 |
+
"messages": (history or []) + [{"role": "user", "content": message}],
|
| 147 |
+
}
|
| 148 |
config = {"configurable": {"thread_id": thread_id}}
|
| 149 |
start_time = time.time()
|
| 150 |
streamed_response = ""
|
|
|
|
| 334 |
]
|
| 335 |
)
|
| 336 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 337 |
init()
|
| 338 |
|
| 339 |
+
message_textbox = gr.Textbox(
|
| 340 |
+
placeholder="Search the scriptures ...", submit_btn=True, stop_btn=True
|
| 341 |
+
)
|
| 342 |
|
| 343 |
with gr.Blocks(
|
| 344 |
theme=gr.themes.Citrus(),
|
|
|
|
| 408 |
}
|
| 409 |
""",
|
| 410 |
) as app:
|
| 411 |
+
show_sidebar = gr.State(True)
|
| 412 |
+
|
| 413 |
+
# with gr.Column(scale=1, visible=show_sidebar.value) as sidebar_container:
|
| 414 |
+
with gr.Sidebar(open=show_sidebar.value) as sidebar:
|
| 415 |
# session_id = gr.Textbox(value=f"Thread: {thread_id}")
|
| 416 |
# gr.Markdown(value=f"{'\n'.join([msg['content'] for msg in intro_messages])}")
|
| 417 |
gr.Markdown(
|
|
|
|
| 426 |
await asyncio.sleep(0.05)
|
| 427 |
return
|
| 428 |
|
| 429 |
+
def close_side_bar():
|
| 430 |
+
print("close_side_bar invoked")
|
| 431 |
+
yield gr.update(open=False)
|
| 432 |
+
|
| 433 |
for scripture in sorted(SanatanConfig.scriptures, key=lambda d: d.get("title")):
|
| 434 |
with gr.Accordion(label=f"{scripture['title']}", open=False):
|
| 435 |
gr.Markdown(f"* Source: [🔗 click here]({scripture['source']})")
|
|
|
|
| 440 |
scripture["example_labels"], scripture["examples"]
|
| 441 |
):
|
| 442 |
btn = gr.Button(value=f"{example_label}", size="sm")
|
| 443 |
+
btn.click(close_side_bar,outputs=[sidebar]).then(
|
| 444 |
populate_chat_input,
|
| 445 |
inputs=[gr.State(example_text)],
|
| 446 |
outputs=[message_textbox],
|
app3.py
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import gradio as gr
|
| 2 |
+
|
| 3 |
+
def collapse_sidebar():
|
| 4 |
+
# return a config change for the Sidebar component
|
| 5 |
+
return gr.update(open=False)
|
| 6 |
+
|
| 7 |
+
with gr.Blocks() as demo:
|
| 8 |
+
with gr.Sidebar() as sidebar:
|
| 9 |
+
gr.Markdown("Sidebar content")
|
| 10 |
+
close_btn = gr.Button("Close sidebar")
|
| 11 |
+
|
| 12 |
+
# clicking the button tells Gradio to update the Sidebar's 'open' prop
|
| 13 |
+
close_btn.click(fn=collapse_sidebar, inputs=None, outputs=sidebar)
|
| 14 |
+
|
| 15 |
+
demo.launch()
|
graph_helper.py
CHANGED
|
@@ -25,15 +25,21 @@ logger.setLevel(logging.INFO)
|
|
| 25 |
|
| 26 |
|
| 27 |
class ChatState(TypedDict):
|
|
|
|
| 28 |
messages: Annotated[list[str], add_messages]
|
| 29 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 30 |
|
| 31 |
def branching_condition(state: ChatState) -> str:
|
| 32 |
last_message = state["messages"][-1]
|
| 33 |
if hasattr(last_message, "tool_calls") and last_message.tool_calls:
|
| 34 |
return "tools"
|
| 35 |
else:
|
| 36 |
-
return
|
| 37 |
|
| 38 |
|
| 39 |
def generate_graph() -> CompiledStateGraph:
|
|
@@ -220,6 +226,7 @@ Where:
|
|
| 220 |
{
|
| 221 |
"tools": "tools",
|
| 222 |
"validator": "validator",
|
|
|
|
| 223 |
},
|
| 224 |
)
|
| 225 |
graph.add_edge("tools", "llm")
|
|
|
|
| 25 |
|
| 26 |
|
| 27 |
class ChatState(TypedDict):
|
| 28 |
+
debug_mode : bool = True
|
| 29 |
messages: Annotated[list[str], add_messages]
|
| 30 |
|
| 31 |
+
def check_debug_condition(state: ChatState) -> str:
|
| 32 |
+
if state["debug_mode"]:
|
| 33 |
+
return "validator"
|
| 34 |
+
else:
|
| 35 |
+
return "__end__"
|
| 36 |
|
| 37 |
def branching_condition(state: ChatState) -> str:
|
| 38 |
last_message = state["messages"][-1]
|
| 39 |
if hasattr(last_message, "tool_calls") and last_message.tool_calls:
|
| 40 |
return "tools"
|
| 41 |
else:
|
| 42 |
+
return check_debug_condition(state)
|
| 43 |
|
| 44 |
|
| 45 |
def generate_graph() -> CompiledStateGraph:
|
|
|
|
| 226 |
{
|
| 227 |
"tools": "tools",
|
| 228 |
"validator": "validator",
|
| 229 |
+
"__end__" : END
|
| 230 |
},
|
| 231 |
)
|
| 232 |
graph.add_edge("tools", "llm")
|