Spaces:
Sleeping
Sleeping
File size: 3,411 Bytes
ac1f51b 4172aa2 a38f382 e9182c9 a38f382 e9182c9 a38f382 a96e484 e9182c9 a38f382 e9182c9 a38f382 f418bc8 a38f382 0444b68 1c18c9b e9182c9 a96e484 769987b a38f382 0444b68 769987b 1c18c9b 769987b 0444b68 1c18c9b 769987b 0444b68 e9182c9 f418bc8 769987b 0646466 e9182c9 b8510d4 e9182c9 b8510d4 e9182c9 a38f382 e9182c9 1c18c9b f418bc8 e9182c9 8126bac a38f382 0646466 b8510d4 e9182c9 1da78a7 e9182c9 a38f382 4172aa2 1da78a7 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 |
# app.py
from app.gradio_app import build_demo
from models.tts_router import cleanup_old_audio
import os
import shutil
import stat
import huggingface_hub
from shutil import which
# -----------------------------
# Model: ensure GGUF is present
# -----------------------------
def ensure_model() -> str:
"""
Download the GGUF model into ./models if not already present.
Requires in environment:
- LLAMACPP_MODEL_PATH (target path including filename)
- HF_MODEL_REPO (e.g. Qwen/Qwen2.5-1.5B-Instruct-GGUF)
- HF_MODEL_FILE (e.g. qwen2.5-1.5b-instruct-q4_k_m.gguf)
"""
model_path = os.getenv("LLAMACPP_MODEL_PATH")
repo_id = os.getenv("HF_MODEL_REPO")
filename = os.getenv("HF_MODEL_FILE") or (os.path.basename(model_path) if model_path else None)
if not model_path or not repo_id or not filename:
raise RuntimeError("Missing config: set LLAMACPP_MODEL_PATH and HF_MODEL_REPO (optionally HF_MODEL_FILE).")
if os.path.exists(model_path):
print(f"[MODEL] Found existing model at {model_path}")
return model_path
os.makedirs(os.path.dirname(model_path), exist_ok=True)
print(f"[MODEL] Downloading {filename} from {repo_id} …")
local_path = huggingface_hub.hf_hub_download(
repo_id=repo_id,
filename=filename,
local_dir=os.path.dirname(model_path),
local_dir_use_symlinks=False,
)
# If hf_hub_download wrote elsewhere (cache), move/rename into desired path
if os.path.abspath(local_path) != os.path.abspath(model_path):
shutil.copyfile(local_path, model_path)
print(f"[MODEL] Ready at {model_path}")
return model_path
def _ensure_exec(fp: str):
if fp and os.path.exists(fp):
try:
os.chmod(fp, 0o755)
except Exception as e:
print("[WARN] chmod failed for", fp, e)
def log_env_for_audio():
print("[ENV] TTS_ENGINE =", os.getenv("TTS_ENGINE", "(unset)"))
print("[ENV] PIPER_MODEL=", os.getenv("PIPER_MODEL", "(unset)"))
print("[ENV] PIPER_BIN =", os.getenv("PIPER_BIN", "(unset)"))
import os, stat
def ensure_piper_ready():
bin_path = os.getenv("PIPER_BIN", "piper")
model = os.getenv("PIPER_MODEL")
espeak = os.getenv("ESPEAK_DATA_PATH")
print(f"[PIPER] BIN={bin_path} exists={os.path.exists(bin_path)}")
print(f"[PIPER] MODEL={model} exists={os.path.exists(model) if model else None}")
print(f"[PIPER] ESPEAK_DATA_PATH={espeak} exists={os.path.exists(espeak) if espeak else None}")
# If PIPER_BIN is an absolute/relative file in repo, make it executable.
if bin_path and os.path.isfile(bin_path):
try:
st = os.stat(bin_path)
os.chmod(bin_path, st.st_mode | stat.S_IEXEC)
print("[PIPER] chmod +x applied to binary")
except Exception as e:
print("[PIPER] chmod failed:", e)
# -------------
# Application
# -------------
def main():
# Log a few envs to help debug Spaces
log_env_for_audio()
# Clean runtime/audio on boot
cleanup_old_audio(keep_latest=None)
# Ensure model exists locally
# ensure_model()
# Make sure bundled Piper can run (no downloads on Spaces)
ensure_piper_ready()
# Launch Gradio
demo = build_demo()
# On Spaces, share=True is ignored (safe locally)
demo.launch(share=True)
if __name__ == "__main__":
main() |