1inkusFace commited on
Commit
c97c7cd
·
verified ·
1 Parent(s): 3d4a02d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +9 -5
app.py CHANGED
@@ -7,6 +7,12 @@
7
 
8
  import spaces
9
  import os
 
 
 
 
 
 
10
  import random
11
  import uuid
12
  import gradio as gr
@@ -79,8 +85,6 @@ styles = {k["name"]: (k["prompt"], k["negative_prompt"]) for k in style_list}
79
  DEFAULT_STYLE_NAME = "Style Zero"
80
  STYLE_NAMES = list(styles.keys())
81
  HF_TOKEN = os.getenv("HF_TOKEN")
82
- os.putenv("HF_HUB_ENABLE_HF_TRANSFER","1")
83
- os.environ["SAFETENSORS_FAST_GPU"] = "1"
84
 
85
  device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
86
 
@@ -199,7 +203,7 @@ def generate_30(
199
  progress=gr.Progress(track_tqdm=True)
200
  ):
201
  seed = random.randint(0, MAX_SEED)
202
- generator = torch.Generator(device='cuda').manual_seed(seed)
203
  pipe.text_encoder=text_encoder.to(device=device, dtype=torch.bfloat16)
204
  pipe.text_encoder_2=text_encoder_2.to(device=device, dtype=torch.bfloat16)
205
  options = {
@@ -257,7 +261,7 @@ def generate_60(
257
  progress=gr.Progress(track_tqdm=True)
258
  ):
259
  seed = random.randint(0, MAX_SEED)
260
- generator = torch.Generator(device='cuda').manual_seed(seed)
261
  pipe.text_encoder=text_encoder.to(device=device, dtype=torch.bfloat16)
262
  pipe.text_encoder_2=text_encoder_2.to(device=device, dtype=torch.bfloat16)
263
  options = {
@@ -315,7 +319,7 @@ def generate_90(
315
  progress=gr.Progress(track_tqdm=True)
316
  ):
317
  seed = random.randint(0, MAX_SEED)
318
- generator = torch.Generator(device='cuda').manual_seed(seed)
319
  pipe.text_encoder=text_encoder.to(device=device, dtype=torch.bfloat16)
320
  pipe.text_encoder_2=text_encoder_2.to(device=device, dtype=torch.bfloat16)
321
  options = {
 
7
 
8
  import spaces
9
  import os
10
+
11
+ os.putenv('TORCH_LINALG_PREFER_CUSOLVER','1')
12
+ os.putenv('PYTORCH_CUDA_ALLOC_CONF','max_split_size_mb:128')
13
+ os.environ["SAFETENSORS_FAST_GPU"] = "1"
14
+ os.putenv('HF_HUB_ENABLE_HF_TRANSFER','1')
15
+
16
  import random
17
  import uuid
18
  import gradio as gr
 
85
  DEFAULT_STYLE_NAME = "Style Zero"
86
  STYLE_NAMES = list(styles.keys())
87
  HF_TOKEN = os.getenv("HF_TOKEN")
 
 
88
 
89
  device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
90
 
 
203
  progress=gr.Progress(track_tqdm=True)
204
  ):
205
  seed = random.randint(0, MAX_SEED)
206
+ generator = torch.Generator(device='cpu').manual_seed(seed)
207
  pipe.text_encoder=text_encoder.to(device=device, dtype=torch.bfloat16)
208
  pipe.text_encoder_2=text_encoder_2.to(device=device, dtype=torch.bfloat16)
209
  options = {
 
261
  progress=gr.Progress(track_tqdm=True)
262
  ):
263
  seed = random.randint(0, MAX_SEED)
264
+ generator = torch.Generator(device='cpu').manual_seed(seed)
265
  pipe.text_encoder=text_encoder.to(device=device, dtype=torch.bfloat16)
266
  pipe.text_encoder_2=text_encoder_2.to(device=device, dtype=torch.bfloat16)
267
  options = {
 
319
  progress=gr.Progress(track_tqdm=True)
320
  ):
321
  seed = random.randint(0, MAX_SEED)
322
+ generator = torch.Generator(device='cpu').manual_seed(seed)
323
  pipe.text_encoder=text_encoder.to(device=device, dtype=torch.bfloat16)
324
  pipe.text_encoder_2=text_encoder_2.to(device=device, dtype=torch.bfloat16)
325
  options = {