Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
Upload folder using huggingface_hub
Browse files
README.md
CHANGED
|
@@ -1,6 +1,6 @@
|
|
| 1 |
---
|
| 2 |
title: sanatan_ai
|
| 3 |
-
app_file:
|
| 4 |
sdk: gradio
|
| 5 |
sdk_version: 5.38.0
|
| 6 |
python_version: 3.12
|
|
|
|
| 1 |
---
|
| 2 |
title: sanatan_ai
|
| 3 |
+
app_file: main.py
|
| 4 |
sdk: gradio
|
| 5 |
sdk_version: 5.38.0
|
| 6 |
python_version: 3.12
|
app.py
CHANGED
|
@@ -342,7 +342,7 @@ message_textbox = gr.Textbox(
|
|
| 342 |
|
| 343 |
with gr.Blocks(
|
| 344 |
theme=gr.themes.Citrus(),
|
| 345 |
-
title="Sanatan-
|
| 346 |
css="""
|
| 347 |
table {
|
| 348 |
border-collapse: collapse;
|
|
@@ -407,7 +407,7 @@ with gr.Blocks(
|
|
| 407 |
text-overflow: ellipsis;
|
| 408 |
}
|
| 409 |
""",
|
| 410 |
-
) as
|
| 411 |
show_sidebar = gr.State(True)
|
| 412 |
|
| 413 |
# with gr.Column(scale=1, visible=show_sidebar.value) as sidebar_container:
|
|
@@ -468,4 +468,4 @@ with gr.Blocks(
|
|
| 468 |
textbox=message_textbox,
|
| 469 |
)
|
| 470 |
|
| 471 |
-
app.launch()
|
|
|
|
| 342 |
|
| 343 |
with gr.Blocks(
|
| 344 |
theme=gr.themes.Citrus(),
|
| 345 |
+
title="Sanatan-AI",
|
| 346 |
css="""
|
| 347 |
table {
|
| 348 |
border-collapse: collapse;
|
|
|
|
| 407 |
text-overflow: ellipsis;
|
| 408 |
}
|
| 409 |
""",
|
| 410 |
+
) as gradio_app:
|
| 411 |
show_sidebar = gr.State(True)
|
| 412 |
|
| 413 |
# with gr.Column(scale=1, visible=show_sidebar.value) as sidebar_container:
|
|
|
|
| 468 |
textbox=message_textbox,
|
| 469 |
)
|
| 470 |
|
| 471 |
+
# app.launch()
|
main.py
CHANGED
|
@@ -1,22 +1,34 @@
|
|
| 1 |
-
from
|
| 2 |
-
|
| 3 |
-
from
|
| 4 |
-
from
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 5 |
|
| 6 |
-
|
| 7 |
|
| 8 |
-
|
| 9 |
|
|
|
|
|
|
|
| 10 |
|
| 11 |
-
|
| 12 |
-
|
| 13 |
-
llm = ChatOpenAI()
|
| 14 |
-
llm_response = llm.invoke("Generate a simple question to ask the user about geography.")
|
| 15 |
-
# print(response)
|
| 16 |
-
response = tool_search_web.invoke(llm_response.content)
|
| 17 |
-
message = "Question: " + llm_response.content + "\nAnswer: " + response
|
| 18 |
-
tool_push.invoke(message)
|
| 19 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 20 |
|
| 21 |
if __name__ == "__main__":
|
| 22 |
-
main
|
|
|
|
| 1 |
+
from fastapi.responses import RedirectResponse
|
| 2 |
+
import uvicorn
|
| 3 |
+
from fastapi import FastAPI
|
| 4 |
+
from server import router as mobile_router
|
| 5 |
+
from app import gradio_app # your Blocks object
|
| 6 |
+
import gradio as gr
|
| 7 |
+
import logging
|
| 8 |
+
from fastapi.middleware import Middleware
|
| 9 |
+
from fastapi import Request
|
| 10 |
|
| 11 |
+
logging.basicConfig(level=logging.INFO)
|
| 12 |
|
| 13 |
+
app = FastAPI(title="Sanatan AI Unified Server")
|
| 14 |
|
| 15 |
+
# Mount mobile endpoints
|
| 16 |
+
app.include_router(mobile_router, prefix="/api")
|
| 17 |
|
| 18 |
+
# Convert Gradio Blocks to ASGI app
|
| 19 |
+
app = gr.mount_gradio_app(app, gradio_app,"/web")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 20 |
|
| 21 |
+
# Redirect root URL to /web/
|
| 22 |
+
@app.get("/")
|
| 23 |
+
async def redirect_to_web():
|
| 24 |
+
return RedirectResponse(url="/web/")
|
| 25 |
+
|
| 26 |
+
@app.middleware("http")
|
| 27 |
+
async def log_requests(request: Request, call_next):
|
| 28 |
+
logging.info(f"Request: {request.method} {request.url}")
|
| 29 |
+
response = await call_next(request)
|
| 30 |
+
logging.info(f"Response status: {response.status_code}")
|
| 31 |
+
return response
|
| 32 |
|
| 33 |
if __name__ == "__main__":
|
| 34 |
+
uvicorn.run("main:app", host="0.0.0.0", port=7860)
|
server.py
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# server.py
|
| 2 |
+
import traceback
|
| 3 |
+
import uuid
|
| 4 |
+
from fastapi import APIRouter, Request
|
| 5 |
+
from fastapi.responses import JSONResponse
|
| 6 |
+
from gradio_client import Client
|
| 7 |
+
from pydantic import BaseModel
|
| 8 |
+
from app import chat
|
| 9 |
+
|
| 10 |
+
router = APIRouter()
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
class Message(BaseModel):
|
| 14 |
+
text: str
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
@router.post("/chat")
|
| 18 |
+
async def handle_chat(msg: Message):
|
| 19 |
+
try:
|
| 20 |
+
thread_id = uuid.uuid4()
|
| 21 |
+
reply_text = chat(
|
| 22 |
+
debug_mode=False, message=msg.text, history=None, thread_id=thread_id
|
| 23 |
+
)
|
| 24 |
+
|
| 25 |
+
return {"reply": reply_text}
|
| 26 |
+
|
| 27 |
+
except Exception as e:
|
| 28 |
+
traceback.print_exc()
|
| 29 |
+
return {"reply": f"Error: {e}"}
|