Manjushri commited on
Commit
3f4749b
1 Parent(s): b32943f

Update app.py

Browse files

Fixed Bells and Whistles by adding Pulleys and Levers

Files changed (1) hide show
  1. app.py +4 -4
app.py CHANGED
@@ -12,12 +12,12 @@ upscaler = upscaler.to(device)
12
  pipe = pipe.to(device)
13
 
14
  def genie (Prompt, negative_prompt, height, width, scale, steps, seed, upscale, upscale_prompt, upscale_neg, upscale_scale, upscale_steps):
15
- generator = torch.Generator(device=device).manual_seed(Seed)
16
  if upscale == "Yes":
17
- low_res_latents = pipe(Prompt, negative_prompt=negative_prompt, num_inference_steps=steps, guidance_scale=scale, generator=generator, output_type="latent").images
18
- image = upscaler(prompt='', image=low_res_latents, num_inference_steps=upscale_iter, guidance_scale=0, generator=generator).images[0]
19
  else:
20
- image = pipe(Prompt, negative_prompt=negative_prompt, num_inference_steps=steps, guidance_scale=scale, generator=generator).images[0]
21
  return image
22
 
23
  gr.Interface(fn=genie, inputs=[gr.Textbox(label='What you want the AI to generate. 77 Token Limit.'),
 
12
  pipe = pipe.to(device)
13
 
14
  def genie (Prompt, negative_prompt, height, width, scale, steps, seed, upscale, upscale_prompt, upscale_neg, upscale_scale, upscale_steps):
15
+ generator = torch.Generator(device=device).manual_seed(seed)
16
  if upscale == "Yes":
17
+ low_res_latents = pipe(Prompt, negative_prompt=negative_prompt, height=height, width=width, num_inference_steps=steps, guidance_scale=scale, generator=generator, output_type="latent").images
18
+ image = upscaler(prompt=upscale_prompt, negative_prompt=upscale_neg, image=low_res_latents, num_inference_steps=upscale_steps, guidance_scale=upscale_scale, generator=generator).images[0]
19
  else:
20
+ image = pipe(Prompt, negative_prompt=negative_prompt, height=height, width=width, num_inference_steps=steps, guidance_scale=scale, generator=generator).images[0]
21
  return image
22
 
23
  gr.Interface(fn=genie, inputs=[gr.Textbox(label='What you want the AI to generate. 77 Token Limit.'),