Manjushri commited on
Commit
700b834
1 Parent(s): be04960

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -1
app.py CHANGED
@@ -6,7 +6,7 @@ from PIL import Image
6
  from diffusers import DiffusionPipeline, StableDiffusionLatentUpscalePipeline
7
 
8
  device = 'cuda' if torch.cuda.is_available() else 'cpu'
9
-
10
  refiner = DiffusionPipeline.from_pretrained("stabilityai/stable-diffusion-xl-refiner-1.0", use_safetensors=True, torch_dtype=torch.float16, variant="fp16") if torch.cuda.is_available() else DiffusionPipeline.from_pretrained("stabilityai/stable-diffusion-xl-refiner-1.0")
11
  refiner.enable_xformers_memory_efficient_attention()
12
  refiner = refiner.to(device)
@@ -105,6 +105,7 @@ def genie (Model, Prompt, negative_prompt, height, width, scale, steps, seed, up
105
  return image
106
 
107
  if Model == "SDXL 1.0":
 
108
  sdxl = DiffusionPipeline.from_pretrained("stabilityai/stable-diffusion-xl-base-1.0", torch_dtype=torch.float16, variant="fp16", use_safetensors=True)
109
  sdxl = sdxl.to(device)
110
  sdxl.enable_xformers_memory_efficient_attention()
 
6
  from diffusers import DiffusionPipeline, StableDiffusionLatentUpscalePipeline
7
 
8
  device = 'cuda' if torch.cuda.is_available() else 'cpu'
9
+ torch.cuda.max_memory_allocated(device=device)
10
  refiner = DiffusionPipeline.from_pretrained("stabilityai/stable-diffusion-xl-refiner-1.0", use_safetensors=True, torch_dtype=torch.float16, variant="fp16") if torch.cuda.is_available() else DiffusionPipeline.from_pretrained("stabilityai/stable-diffusion-xl-refiner-1.0")
11
  refiner.enable_xformers_memory_efficient_attention()
12
  refiner = refiner.to(device)
 
105
  return image
106
 
107
  if Model == "SDXL 1.0":
108
+ torch.cuda.max_memory_allocated(device=device)
109
  sdxl = DiffusionPipeline.from_pretrained("stabilityai/stable-diffusion-xl-base-1.0", torch_dtype=torch.float16, variant="fp16", use_safetensors=True)
110
  sdxl = sdxl.to(device)
111
  sdxl.enable_xformers_memory_efficient_attention()