KingNish commited on
Commit
ed38e7e
·
verified ·
1 Parent(s): 56b40fc

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +29 -2
app.py CHANGED
@@ -6,6 +6,7 @@ import torch
6
  import time
7
  from diffusers import DiffusionPipeline, AutoencoderTiny
8
  from custom_pipeline import FluxWithCFGPipeline
 
9
 
10
  # --- Torch Optimizations ---
11
  torch.backends.cuda.matmul.allow_tf32 = True
@@ -28,8 +29,34 @@ device = "cuda" if torch.cuda.is_available() else "cpu"
28
  pipe = FluxWithCFGPipeline.from_pretrained("black-forest-labs/FLUX.1-schnell", torch_dtype=dtype)
29
  pipe.vae = AutoencoderTiny.from_pretrained("madebyollin/taef1", torch_dtype=dtype)
30
 
31
- pipe.vae.enable_slicing()
32
- pipe.vae.enable_tiling()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
33
 
34
  pipe.to(device)
35
 
 
6
  import time
7
  from diffusers import DiffusionPipeline, AutoencoderTiny
8
  from custom_pipeline import FluxWithCFGPipeline
9
+ from diffusers.hooks import apply_group_offloading
10
 
11
  # --- Torch Optimizations ---
12
  torch.backends.cuda.matmul.allow_tf32 = True
 
29
  pipe = FluxWithCFGPipeline.from_pretrained("black-forest-labs/FLUX.1-schnell", torch_dtype=dtype)
30
  pipe.vae = AutoencoderTiny.from_pretrained("madebyollin/taef1", torch_dtype=dtype)
31
 
32
+ apply_group_offloading(
33
+ pipe.transformer,
34
+ offload_type="leaf_level",
35
+ offload_device=torch.device("cpu"),
36
+ onload_device=torch.device("cuda"),
37
+ use_stream=True,
38
+ )
39
+ apply_group_offloading(
40
+ pipe.text_encoder,
41
+ offload_device=torch.device("cpu"),
42
+ onload_device=torch.device("cuda"),
43
+ offload_type="leaf_level",
44
+ use_stream=True,
45
+ )
46
+ apply_group_offloading(
47
+ pipe.text_encoder_2,
48
+ offload_device=torch.device("cpu"),
49
+ onload_device=torch.device("cuda"),
50
+ offload_type="leaf_level",
51
+ use_stream=True,
52
+ )
53
+ apply_group_offloading(
54
+ pipe.vae,
55
+ offload_device=torch.device("cpu"),
56
+ onload_device=torch.device("cuda"),
57
+ offload_type="leaf_level",
58
+ use_stream=True,
59
+ )
60
 
61
  pipe.to(device)
62