import json
import os
import random
import asyncio
import logging
import threading
import time
import traceback
from html import escape
import gradio as gr
from dotenv import load_dotenv
from langchain_core.messages.ai import AIMessageChunk, AIMessage
from langchain_core.messages.system import SystemMessage
from langchain_core.messages.tool import ToolMessage
from chat_utils import (
MAX_MESSAGES_IN_CONVERSATION,
chat_wrapper,
init_session,
limited_chat_wrapper,
)
from config import SanatanConfig
from db import SanatanDatabase
from drive_downloader import ZipDownloader
from graph_helper import generate_graph
from modules.youtube_metadata.app import initialize_youtube_metadata_and_poll
from nalayiram_helper import delete_taniyan
import pycountry
# Logging
logging.basicConfig()
logger = logging.getLogger()
logger.setLevel(logging.INFO)
# Suppress OpenAI debug logs
logging.getLogger("openai").setLevel(logging.WARNING)
# Silence httpx + httpcore logs
logging.getLogger("httpx").setLevel(logging.WARNING)
logging.getLogger("httpcore").setLevel(logging.WARNING)
# (Optional) Silence OpenAI logs too
logging.getLogger("openai").setLevel(logging.WARNING)
graph = generate_graph()
def get_all_languages():
"""
Returns a sorted list of all languages by their English names.
Uses ISO 639 data from pycountry.
"""
languages = [lang.name for lang in pycountry.languages if hasattr(lang, "name")]
return sorted(set(languages)) # remove duplicates and sort alphabetically
def init():
load_dotenv(override=True)
try:
SanatanDatabase().test_sanity()
except Exception as e:
logger.warning("Sanity Test Failed - %s", e)
logger.info("Downloading database ...")
downloader = ZipDownloader(
service_account_json=os.getenv("GOOGLE_SERVICE_ACCOUNT_JSON")
)
zip_path = downloader.download_zip_from_drive(
file_id=os.getenv("CHROMADB_FILE_ID"),
output_path=SanatanConfig.dbStorePath,
)
downloader.unzip(zip_path, extract_to="./")
# add global index
# delete taniyan records
SanatanDatabase().delete_taniyans_in_divya_prabandham()
SanatanDatabase().build_global_index_for_all_scriptures()
# Launch the whole thing in a background thread
yt_init_thread = threading.Thread(target=initialize_youtube_metadata_and_poll, daemon=True)
yt_init_thread.start()
def render_message_with_tooltip(content: str, max_chars=200):
short = escape(content[:max_chars]) + ("โฆ" if len(content) > max_chars else "")
return f"
{short}
"
# UI Elements
message_count = gr.State(0)
thread_id = gr.State(init_session)
supported_scriptures = "\n - ".join(
[
f"๐ **{scripture['title']}** [source]({scripture['source']})"
for scripture in SanatanConfig.scriptures
]
)
init()
message_textbox = gr.Textbox(
placeholder="Search the scriptures ...", submit_btn=True, stop_btn=True
)
with gr.Blocks(
theme=gr.themes.Citrus(),
title="Sanatan-AI | Chat",
css="""
/* hide the additional inputs row under the textbox */
.gr-chat-interface .gr-form {
display: none !important;
}
table {
border-collapse: collapse;
width: 90%;
}
table, th, td {
border: 1px solid #ddd;
padding: 6px;
font-size: small;
}
td {
word-wrap: break-word;
white-space: pre-wrap; /* preserves line breaks but wraps long lines */
max-width: 300px; /* control width */
vertical-align: top;
}
.spinner {
display: inline-block;
width: 1em;
height: 1em;
border: 2px solid transparent;
border-top: 2px solid #333;
border-radius: 50%;
animation: spin 0.8s linear infinite;
vertical-align: middle;
margin-left: 0.5em;
}
@keyframes spin {
0% { transform: rotate(0deg); }
100% { transform: rotate(360deg); }
}
.thinking-bubble {
opacity: 0.5;
font-style: italic;
animation: pulse 1.5s infinite;
margin-bottom: 5px;
}
@keyframes pulse {
0% { opacity: 0.3; }
50% { opacity: 1; }
100% { opacity: 0.3; }
}
.node-label {
cursor: help;
border-bottom: 1px dotted #aaa;
}
.intermediate-output {
opacity: 0.4;
font-style: italic;
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
}
""",
) as gradio_app:
show_sidebar = gr.State(True)
# with gr.Column(scale=1, visible=show_sidebar.value) as sidebar_container:
with gr.Sidebar(open=show_sidebar.value) as sidebar:
# session_id = gr.Textbox(value=f"Thread: {thread_id}")
# gr.Markdown(value=f"{'\n'.join([msg['content'] for msg in intro_messages])}")
gr.Markdown(
value="Namaskaram ๐ I am Sanatan-Bot and I can help you explore the following scriptures:\n\n"
)
async def populate_chat_input(text: str):
buffer = ""
for c in text:
buffer += c
yield buffer
await asyncio.sleep(0.05)
return
def close_side_bar():
print("close_side_bar invoked")
yield gr.update(open=False)
for scripture in sorted(SanatanConfig.scriptures, key=lambda d: d.get("title")):
with gr.Accordion(label=f"{scripture['title']}", open=False):
gr.Markdown(f"* Source: [๐ click here]({scripture['source']})")
gr.Markdown(f"* Language: {scripture['language']}")
gr.Markdown(f"* Examples :")
with gr.Row():
for example_label, example_text in zip(
scripture["example_labels"], scripture["examples"]
):
btn = gr.Button(value=f"{example_label}", size="sm")
btn.click(close_side_bar, outputs=[sidebar]).then(
populate_chat_input,
inputs=[gr.State(example_text)],
outputs=[message_textbox],
)
gr.Markdown(value="------")
debug_checkbox = gr.Checkbox(label="Debug (Streaming)", value=True)
preferred_language = gr.Dropdown(
choices=get_all_languages(), value="English", label="Preferred Language"
)
navigation_followup_shortcuts = {
"โฌ
๏ธ Prev verse": "show me the previous verse",
"โก๏ธ Next verse": "show me the next verse",
"โฌ
๏ธ Prev Chapter": "From the same prabandham as above, show the first pasuram from the previous chapter of the same decade",
"โก๏ธ Next Chapter": "From the same prabandham as above, show the first pasuram from the next chapter of the same decade",
"โฌ
๏ธ Prev Decade": "From the same prabandham as above, show the first pasuram from the previous decade",
"โก๏ธ Next Decade": "From the same prabandham as above, show the first pasuram from the next decade",
}
further_questions_followup_shortcuts = {
"๐งน Sanitize": "sanitize the native verses",
"๐ Explain": "provide explanatory notes if available for the above verses",
"๐ Detailed Meaning": "provide word by word meaning if available for the above verses",
"๐ Explore Divya Desam": "From the same divya desam as the above pasuram, show me other pasurams",
"๐ Explore Ashwar": "From the same azhwar as the above pasuram, show me other pasurams",
"๐๏ธ Another divya desam (same ฤzhwฤr)": "show pasuram from another divya desam by the same azhwar",
"๐ค Another ฤzhwฤr (same divya desam)": "show pasuram from the same divya desam by another azhwar",
"โ Quiz": "Pick any pasuram. Frame a question to ask me related to that pasuram based on its explanatory notes and word by word meanings. Output ONLY the pasuram title, the verse number, the pasuram native lyrics, the question you framed and the answer to that question.",
}
chatbot = gr.Chatbot(
elem_id="chatbot",
avatar_images=("assets/avatar_user.png", "assets/adiyen_bot.png"),
# value=intro_messages,
label="Sanatan-AI-Bot",
show_copy_button=True,
show_copy_all_button=True,
type="messages",
height=600,
render_markdown=True,
placeholder="Search the scriptures ...",
)
additional_inputs = gr.Accordion(
label="Additional Inputs", open=False, visible=False
)
chatInterface = gr.ChatInterface(
title="Sanatan-AI",
fn=limited_chat_wrapper,
additional_inputs=[
thread_id,
debug_checkbox,
preferred_language,
message_count,
],
additional_inputs_accordion=additional_inputs,
additional_outputs=[thread_id, message_count],
chatbot=chatbot,
textbox=message_textbox,
type="messages",
)
with gr.Column(visible=False) as followup_examples:
with gr.Row():
followup_count_textbox = gr.Markdown(container=False, show_label=False)
with gr.Row():
gr.Examples(
label="Quick Navigation Follow-ups",
example_labels=[key for key in navigation_followup_shortcuts.keys()],
examples=[value for value in navigation_followup_shortcuts.values()],
inputs=[message_textbox],
examples_per_page=len(
navigation_followup_shortcuts
), ## Show all examples in the same page.
)
with gr.Row():
gr.Examples(
label="Further Questions Follow-ups",
example_labels=[
key for key in further_questions_followup_shortcuts.keys()
],
examples=[
value for value in further_questions_followup_shortcuts.values()
],
inputs=[message_textbox],
examples_per_page=len(
further_questions_followup_shortcuts
), ## Show all examples in the same page.
)
# Function to toggle visibility once chat has started
def toggle_examples(history):
return gr.update(visible=len(history) > 0)
def hide_examples_while_processing(is_processing: bool):
return gr.update(visible=not is_processing)
# Whenever chatbot updates โ toggle examples row
# chatbot.change(toggle_examples, chatbot, followup_examples)
message_textbox.submit(
hide_examples_while_processing,
inputs=[gr.State(True)],
outputs=[followup_examples],
)
chatbot.change(
hide_examples_while_processing,
inputs=[gr.State(False)],
outputs=[followup_examples],
)
def update_followup_counter(count):
remaining_followups = MAX_MESSAGES_IN_CONVERSATION - count
no_more_followups = False
if remaining_followups > 1:
text = f"โจ `{remaining_followups}` more follow-ups to go."
elif remaining_followups == 1:
text = "๐ Just one more follow-up to go!"
else:
text = "โ
That was the last follow-up."
no_more_followups = True
return gr.update(value=text), gr.update(visible=not no_more_followups)
message_count.change(
update_followup_counter,
inputs=[message_count],
outputs=[followup_count_textbox, followup_examples],
)