futurecafe-voice-core / app /orchestrator.py
Eyob-Sol's picture
Upload 41 files
ac1f51b verified
raw
history blame
5.29 kB
# app/orchestrator.py
from __future__ import annotations
import re
from typing import Dict, Any, Callable, Optional
from utils.config import get_settings
from utils.phone import extract_phone, looks_valid
from app.tools import dispatch_tool
# -----------------------------
# Resolve router/NLG per BACKEND
# -----------------------------
_s = get_settings()
_route_fn: Optional[Callable[[str], Dict[str, Any]]] = None
_nlg_fn: Optional[Callable[[str, Dict[str, Any], str], str]] = None
def _load_router():
global _route_fn, _nlg_fn
backend = (_s.BACKEND_LLM or "").lower()
try:
if backend == "llamacpp":
from models.llm_router import respond as route, nlg as nlg_impl
_route_fn, _nlg_fn = route, nlg_impl
elif backend == "openai":
from models.openai_router import respond as route, nlg as nlg_impl
_route_fn, _nlg_fn = route, nlg_impl
elif backend == "groq":
from models.groq_router import respond as route, nlg as nlg_impl
_route_fn, _nlg_fn = route, nlg_impl
else:
# Unknown backend → safe fallbacks
_route_fn = lambda _: {"tool": None, "args": {}}
_nlg_fn = _fallback_nlg
except Exception:
# If import fails, still keep app running with safe fallbacks
_route_fn = lambda _: {"tool": None, "args": {}}
_nlg_fn = _fallback_nlg
def _fallback_nlg(tool: str, tool_result: Dict[str, Any] | None, user_text: str) -> str:
"""Minimal reply if no NLG provided by the chosen backend."""
tr = tool_result or {}
if tool in (None, "", "smalltalk"):
return "Hello! How can I help with FutureCafe—menu, hours, reservations, or orders?"
if tool == "get_hours":
hours = tr.get("hours") or "11:00–22:00 daily"
address = tr.get("address") or "123 Main St"
return f"We’re open {hours} at {address}. What else can I do for you?"
if tool == "menu_lookup":
items = tr.get("items") or []
if items:
names = ", ".join(i.get("name", "item") for i in items[:6])
return f"Here are some popular items: {names}. Would you like to order any of these?"
return "I can look up menu items—any dietary needs or a specific dish?"
if tool == "create_reservation":
when = tr.get("when") or tr.get("datetime") or "your requested time"
code = tr.get("reservation_id") or tr.get("code") or "a confirmation code"
return f"Reservation confirmed for {when}. Code {code}. Anything else I can help with?"
if tool == "create_order":
items = tr.get("items") or []
if items:
summary = ", ".join(f"{i.get('qty','1')}× {i.get('name','item')}" for i in items)
total = tr.get("total")
return f"Order placed: {summary}" + (f". Total ${total:.2f}" if isinstance(total, (int,float)) else "") + "."
return "Your order is noted. Anything to add?"
# Generic fallback
return "Done. Anything else I can help you with?"
# Load router once at import
_load_router()
# -----------------------------
# Public API
# -----------------------------
def llm_route_and_execute(user_text: str) -> Dict[str, Any]:
"""
1) Route the user_text to a tool (model-dependent)
2) Enrich args (e.g., reservation phone/name)
3) Execute tool (dispatch_tool)
4) Generate reply (NLG if available, else fallback)
Returns a single dict suitable for the UI diagnostics panel.
"""
text = (user_text or "").strip()
if not text:
return {
"intent": "smalltalk",
"slots": {},
"tool_selected": None,
"tool_result": None,
"response": "Hello! How can I help with FutureCafe today?",
}
# --- 1) Route ---
try:
route = _route_fn(text) if _route_fn else {"tool": None, "args": {}}
if not isinstance(route, dict):
route = {"tool": None, "args": {}}
except Exception:
route = {"tool": None, "args": {}}
tool = route.get("tool")
args = dict(route.get("args") or {})
# --- 2) Enrich args for reservation ---
if tool == "create_reservation":
phone = extract_phone(text)
if looks_valid(phone) and not args.get("phone"):
args["phone"] = phone
# lightweight name inference: “my name is X”, “I am X”, “I’m X”
if not args.get("name"):
m = re.search(r"(?:my name is|i am|i'm)\s+([A-Z][a-z]+)", text, re.I)
if m:
args["name"] = m.group(1)
# --- 3) Execute tool (optional) ---
tool_result = None
if tool:
try:
tool_result = dispatch_tool(tool, args)
except Exception as e:
tool_result = {"ok": False, "error": f"tool_error: {e!s}"}
# --- 4) NLG (or fallback) ---
try:
reply = _nlg_fn(tool or "", tool_result or {}, text) if _nlg_fn else _fallback_nlg(tool or "", tool_result or {}, text)
except Exception:
reply = _fallback_nlg(tool or "", tool_result or {}, text)
return {
"intent": tool or "smalltalk",
"slots": args,
"tool_selected": tool,
"tool_result": tool_result,
"response": reply,
}