fffiloni commited on
Commit
4acef03
·
verified ·
1 Parent(s): d713a89

disable xformers memory efficient attention on unet

Browse files
Files changed (1) hide show
  1. app.py +3 -2
app.py CHANGED
@@ -136,16 +136,17 @@ def main(video_path, audio_path, progress=gr.Progress(track_tqdm=True)):
136
  unet, _ = UNet3DConditionModel.from_pretrained(
137
  OmegaConf.to_container(config.model),
138
  inference_ckpt_path, # load checkpoint
139
- #device="cpu",
140
  )
141
 
142
  unet = unet.to(dtype=torch.float16)
143
 
 
144
  # set xformers
145
 
146
  if is_xformers_available():
147
  unet.enable_xformers_memory_efficient_attention()
148
-
149
 
150
  pipeline = LipsyncPipeline(
151
  vae=vae,
 
136
  unet, _ = UNet3DConditionModel.from_pretrained(
137
  OmegaConf.to_container(config.model),
138
  inference_ckpt_path, # load checkpoint
139
+ device="cpu",
140
  )
141
 
142
  unet = unet.to(dtype=torch.float16)
143
 
144
+ """
145
  # set xformers
146
 
147
  if is_xformers_available():
148
  unet.enable_xformers_memory_efficient_attention()
149
+ """
150
 
151
  pipeline = LipsyncPipeline(
152
  vae=vae,