Eyob-Sol's picture
Upload 41 files
ac1f51b verified
raw
history blame
2.98 kB
# app/llm_router.py
from __future__ import annotations
import os
from typing import Any, Dict
from utils.config import get_settings
# --- Existing rule-based pieces kept as a fallback ---
def small_router(text: str) -> dict:
t = (text or "").lower()
if any(k in t for k in ["hour", "open", "close", "address", "location"]):
return {"tool": "get_hours", "args": {}}
if any(k in t for k in ["menu", "vegan", "gluten", "pizza", "salad", "special"]):
flt = []
for k in ["vegan", "gluten-free", "pizza", "salad"]:
if k in t:
flt.append(k)
return {"tool": "menu_lookup", "args": {"filters": flt}}
if any(k in t for k in ["reserve", "reservation", "book", "table"]):
party = 2 if ("2" in t or "two" in t) else None
time = "19:00" if "7" in t else None
return {"tool": "create_reservation", "args": {"party_size": party, "datetime_str": time}}
if any(k in t for k in ["order", "buy"]):
return {"tool": "create_order", "args": {"items": []}}
return {"tool": None, "args": {}}
def nlg(intent: str, tool_result: dict, user_text: str) -> str:
if intent == "get_hours":
h = tool_result
return f"We’re open {h['open']}{h['close']} daily at {h['address']}."
if intent == "menu_lookup":
items = (tool_result or {}).get("items") or []
if not items:
return "We have a variety of options—anything specific you’d like?"
tops = ", ".join(f"{it['name']} (${it['price']})" for it in items[:3])
return f"Popular picks: {tops}."
if intent == "create_reservation":
if tool_result.get("ok"):
return f"Reservation confirmed for {tool_result['party_size']} at {tool_result['when']}. Code {tool_result['reservation_id']}."
return "I couldn't confirm that reservation—want me to try again?"
if intent == "create_order":
if tool_result.get("ok"):
items = ", ".join(f"{it['qty']}× {it['name']}" for it in tool_result.get("items", []))
return f"Got it: {items}. Total ${tool_result.get('total', 0)}."
return "I couldn't place that order—want me to try again?"
return "Hello, this is Marta, an AI agent for FutureCafe. How can I help you today?"
# --- Router mode switch (env-controlled) ---
# ROUTER_MODE = "rules" | "llm"
# - rules: use small_router (current behavior)
# - llm: return no tool; the chat LLM handles everything in text/voice flows
def _router_mode() -> str:
s = get_settings()
# allow either .env or process env to override
return os.getenv("ROUTER_MODE", getattr(s, "ROUTER_MODE", "rules")).strip().lower()
def respond(user_text: str) -> Dict[str, Any]:
mode = _router_mode()
if mode == "llm":
# Pure LLM flow: don’t pre-select tools; downstream chat model decides.
return {"tool": None, "args": {}}
# Default / fallback: rule-based
return small_router(user_text)