Jinl commited on
Commit
ce39c0f
1 Parent(s): 343b49a

Add application file

Browse files
Files changed (1) hide show
  1. app.py +4 -4
app.py CHANGED
@@ -61,7 +61,7 @@ class GlobalText:
61
  self.pipeline = None
62
  self.torch_dtype = torch.float16 if torch.cuda.is_available() else torch.float32
63
  self.lora_model_state_dict = {}
64
- self.device = torch.device("cuda") if torch.cuda.is_available() else torch.device("cpu")
65
 
66
  def init_source_image_path(self, source_path):
67
  self.source_paths = sorted(glob(os.path.join(source_path, '*')))
@@ -83,9 +83,9 @@ class GlobalText:
83
 
84
  self.scheduler = 'LCM'
85
  scheduler = LCMScheduler.from_pretrained(model_path, subfolder="scheduler")
86
- self.pipeline = ZePoPipeline.from_pretrained(model_path,scheduler=scheduler,torch_dtype=torch.float16,).to('cuda')
87
- if is_xformers:
88
- self.pipeline.enable_xformers_memory_efficient_attention()
89
  time_end = datetime.now()
90
  print(f'Load {model_path} successful in {time_end-time_start}')
91
  return gr.Dropdown()
 
61
  self.pipeline = None
62
  self.torch_dtype = torch.float16 if torch.cuda.is_available() else torch.float32
63
  self.lora_model_state_dict = {}
64
+ self.device = torch.device("cpu")
65
 
66
  def init_source_image_path(self, source_path):
67
  self.source_paths = sorted(glob(os.path.join(source_path, '*')))
 
83
 
84
  self.scheduler = 'LCM'
85
  scheduler = LCMScheduler.from_pretrained(model_path, subfolder="scheduler")
86
+ self.pipeline = ZePoPipeline.from_pretrained(model_path,scheduler=scheduler,torch_dtype=torch.float16,)
87
+ # if is_xformers:
88
+ # self.pipeline.enable_xformers_memory_efficient_attention()
89
  time_end = datetime.now()
90
  print(f'Load {model_path} successful in {time_end-time_start}')
91
  return gr.Dropdown()