HAL1993 commited on
Commit
7d8d7b7
·
verified ·
1 Parent(s): 444a0d9

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +11 -11
app.py CHANGED
@@ -9,20 +9,20 @@ from diffusers import DiffusionPipeline, AutoencoderTiny
9
  from custom_pipeline import FluxWithCFGPipeline
10
  from libretranslatepy import LibreTranslateAPI
11
 
12
- # --- Start LibreTranslate Server ---
13
  def start_libretranslate():
14
  from libretranslate.main import start
15
  start(host="0.0.0.0", port=5000, no_cache=True)
16
 
17
  threading.Thread(target=start_libretranslate, daemon=True).start()
18
- time.sleep(5) # Wait for server startup
19
  lt = LibreTranslateAPI("http://localhost:5000")
20
 
21
- # --- Torch Optimizations ---
22
  torch.backends.cuda.matmul.allow_tf32 = True
23
  torch.backends.cudnn.benchmark = True
24
 
25
- # --- Constants ---
26
  MAX_SEED = np.iinfo(np.int32).max
27
  DEFAULT_WIDTH = 1024
28
  DEFAULT_HEIGHT = 576
@@ -33,7 +33,7 @@ ASPECT_RATIOS = {
33
  }
34
  INFERENCE_STEPS = 8
35
 
36
- # --- Device and Model Setup ---
37
  dtype = torch.float16
38
  device = "cuda" if torch.cuda.is_available() else "cpu"
39
 
@@ -45,24 +45,24 @@ print("✅ Flux pipeline loaded.")
45
 
46
  @spaces.GPU
47
  def translate_albanian_to_english(text):
48
- """Translate Albanian text to English using local LibreTranslate."""
49
  if not text.strip():
50
  return ""
51
  for attempt in range(2):
52
  try:
53
  translated = lt.translate(text, source_lang="sq", target_lang="en")
54
  if translated:
55
- print(f"LibreTranslate response: {translated}") # Debug
56
  return translated
57
  except Exception as e:
58
  print(f"Translation error (attempt {attempt + 1}): {e}")
59
  if attempt == 1:
60
- return f"Përkthimi dështoi: {str(e)}. Ju lutem provoni përsëri ose përdorni anglisht."
61
- return f"Përkthimi dështoi. Ju lutem provoni përsëri ose përdorni anglisht."
62
 
63
  @spaces.GPU
64
  def generate_image(prompt: str, seed: int = 42, aspect_ratio: str = "16:9", randomize_seed: bool = False):
65
- """Generate an image based on the provided English prompt."""
66
  if pipe is None:
67
  raise gr.Error("Pipeline nuk u ngarkua.")
68
 
@@ -101,7 +101,7 @@ def generate_image(prompt: str, seed: int = 42, aspect_ratio: str = "16:9", rand
101
  torch.cuda.empty_cache()
102
  raise gr.Error(f"Gabim gjatë gjenerimit: {e}")
103
 
104
- # --- UI Layout ---
105
  def create_demo():
106
  with gr.Blocks() as app:
107
  gr.HTML("""
 
9
  from custom_pipeline import FluxWithCFGPipeline
10
  from libretranslatepy import LibreTranslateAPI
11
 
12
+ # Start LibreTranslate Server
13
  def start_libretranslate():
14
  from libretranslate.main import start
15
  start(host="0.0.0.0", port=5000, no_cache=True)
16
 
17
  threading.Thread(target=start_libretranslate, daemon=True).start()
18
+ time.sleep(5)
19
  lt = LibreTranslateAPI("http://localhost:5000")
20
 
21
+ # Torch Optimizations
22
  torch.backends.cuda.matmul.allow_tf32 = True
23
  torch.backends.cudnn.benchmark = True
24
 
25
+ # Constants
26
  MAX_SEED = np.iinfo(np.int32).max
27
  DEFAULT_WIDTH = 1024
28
  DEFAULT_HEIGHT = 576
 
33
  }
34
  INFERENCE_STEPS = 8
35
 
36
+ # Device and Model Setup
37
  dtype = torch.float16
38
  device = "cuda" if torch.cuda.is_available() else "cpu"
39
 
 
45
 
46
  @spaces.GPU
47
  def translate_albanian_to_english(text):
48
+ """Translate Albanian to English using local LibreTranslate."""
49
  if not text.strip():
50
  return ""
51
  for attempt in range(2):
52
  try:
53
  translated = lt.translate(text, source_lang="sq", target_lang="en")
54
  if translated:
55
+ print(f"LibreTranslate response: {translated}")
56
  return translated
57
  except Exception as e:
58
  print(f"Translation error (attempt {attempt + 1}): {e}")
59
  if attempt == 1:
60
+ return f"Përkthimi dështoi: {str(e)}. Provoni përsëri ose përdorni anglisht."
61
+ return f"Përkthimi dështoi. Provoni përsëri ose përdorni anglisht."
62
 
63
  @spaces.GPU
64
  def generate_image(prompt: str, seed: int = 42, aspect_ratio: str = "16:9", randomize_seed: bool = False):
65
+ """Generate image from English prompt."""
66
  if pipe is None:
67
  raise gr.Error("Pipeline nuk u ngarkua.")
68
 
 
101
  torch.cuda.empty_cache()
102
  raise gr.Error(f"Gabim gjatë gjenerimit: {e}")
103
 
104
+ # UI Layout
105
  def create_demo():
106
  with gr.Blocks() as app:
107
  gr.HTML("""