File size: 5,288 Bytes
ac1f51b
 
 
 
 
 
74bb5fe
ac1f51b
 
 
 
 
 
 
 
 
 
 
 
 
74bb5fe
ac1f51b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
74bb5fe
ac1f51b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
74bb5fe
ac1f51b
74bb5fe
ac1f51b
74bb5fe
ac1f51b
 
74bb5fe
ac1f51b
74bb5fe
ac1f51b
 
 
74bb5fe
ac1f51b
74bb5fe
 
ac1f51b
 
 
 
74bb5fe
ac1f51b
 
 
 
 
74bb5fe
 
ac1f51b
74bb5fe
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
# app/orchestrator.py
from __future__ import annotations
import re
from typing import Dict, Any, Callable, Optional

from utils.config import get_settings
from utils.phone import extract_phone, looks_valid
from app.tools import dispatch_tool

# -----------------------------
# Resolve router/NLG per BACKEND
# -----------------------------
_s = get_settings()

_route_fn: Optional[Callable[[str], Dict[str, Any]]] = None
_nlg_fn:   Optional[Callable[[str, Dict[str, Any], str], str]] = None

def _load_router():
    global _route_fn, _nlg_fn
    backend = (_s.BACKEND_LLM or "").lower()

    try:
        if backend == "llamacpp":
            from models.llm_router import respond as route, nlg as nlg_impl
            _route_fn, _nlg_fn = route, nlg_impl
        elif backend == "openai":
            from models.openai_router import respond as route, nlg as nlg_impl
            _route_fn, _nlg_fn = route, nlg_impl
        elif backend == "groq":
            from models.groq_router import respond as route, nlg as nlg_impl
            _route_fn, _nlg_fn = route, nlg_impl
        else:
            # Unknown backend → safe fallbacks
            _route_fn = lambda _: {"tool": None, "args": {}}
            _nlg_fn = _fallback_nlg
    except Exception:
        # If import fails, still keep app running with safe fallbacks
        _route_fn = lambda _: {"tool": None, "args": {}}
        _nlg_fn   = _fallback_nlg

def _fallback_nlg(tool: str, tool_result: Dict[str, Any] | None, user_text: str) -> str:
    """Minimal reply if no NLG provided by the chosen backend."""
    tr = tool_result or {}
    if tool in (None, "", "smalltalk"):
        return "Hello! How can I help with FutureCafe—menu, hours, reservations, or orders?"
    if tool == "get_hours":
        hours = tr.get("hours") or "11:00–22:00 daily"
        address = tr.get("address") or "123 Main St"
        return f"We’re open {hours} at {address}. What else can I do for you?"
    if tool == "menu_lookup":
        items = tr.get("items") or []
        if items:
            names = ", ".join(i.get("name", "item") for i in items[:6])
            return f"Here are some popular items: {names}. Would you like to order any of these?"
        return "I can look up menu items—any dietary needs or a specific dish?"
    if tool == "create_reservation":
        when = tr.get("when") or tr.get("datetime") or "your requested time"
        code = tr.get("reservation_id") or tr.get("code") or "a confirmation code"
        return f"Reservation confirmed for {when}. Code {code}. Anything else I can help with?"
    if tool == "create_order":
        items = tr.get("items") or []
        if items:
            summary = ", ".join(f"{i.get('qty','1')}× {i.get('name','item')}" for i in items)
            total = tr.get("total")
            return f"Order placed: {summary}" + (f". Total ${total:.2f}" if isinstance(total, (int,float)) else "") + "."
        return "Your order is noted. Anything to add?"
    # Generic fallback
    return "Done. Anything else I can help you with?"

# Load router once at import
_load_router()

# -----------------------------
# Public API
# -----------------------------
def llm_route_and_execute(user_text: str) -> Dict[str, Any]:
    """
    1) Route the user_text to a tool (model-dependent)
    2) Enrich args (e.g., reservation phone/name)
    3) Execute tool (dispatch_tool)
    4) Generate reply (NLG if available, else fallback)
    Returns a single dict suitable for the UI diagnostics panel.
    """
    text = (user_text or "").strip()
    if not text:
        return {
            "intent": "smalltalk",
            "slots": {},
            "tool_selected": None,
            "tool_result": None,
            "response": "Hello! How can I help with FutureCafe today?",
        }

    # --- 1) Route ---
    try:
        route = _route_fn(text) if _route_fn else {"tool": None, "args": {}}
        if not isinstance(route, dict):
            route = {"tool": None, "args": {}}
    except Exception:
        route = {"tool": None, "args": {}}

    tool = route.get("tool")
    args = dict(route.get("args") or {})

    # --- 2) Enrich args for reservation ---
    if tool == "create_reservation":
        phone = extract_phone(text)
        if looks_valid(phone) and not args.get("phone"):
            args["phone"] = phone
        # lightweight name inference: “my name is X”, “I am X”, “I’m X”
        if not args.get("name"):
            m = re.search(r"(?:my name is|i am|i'm)\s+([A-Z][a-z]+)", text, re.I)
            if m:
                args["name"] = m.group(1)

    # --- 3) Execute tool (optional) ---
    tool_result = None
    if tool:
        try:
            tool_result = dispatch_tool(tool, args)
        except Exception as e:
            tool_result = {"ok": False, "error": f"tool_error: {e!s}"}

    # --- 4) NLG (or fallback) ---
    try:
        reply = _nlg_fn(tool or "", tool_result or {}, text) if _nlg_fn else _fallback_nlg(tool or "", tool_result or {}, text)
    except Exception:
        reply = _fallback_nlg(tool or "", tool_result or {}, text)

    return {
        "intent": tool or "smalltalk",
        "slots": args,
        "tool_selected": tool,
        "tool_result": tool_result,
        "response": reply,
    }