multimodalart HF staff commited on
Commit
c4cae52
1 Parent(s): f1c584e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -2
app.py CHANGED
@@ -19,7 +19,7 @@ pipe = pipe.to(device)
19
  MAX_SEED = np.iinfo(np.int32).max
20
  MAX_IMAGE_SIZE = 1024
21
 
22
- @spaces.GPU #[uncomment to use ZeroGPU]
23
  def infer(prompt, negative_prompt, seed, randomize_seed, width, height, guidance_scale, true_guidance, num_inference_steps, progress=gr.Progress(track_tqdm=True)):
24
 
25
  if randomize_seed:
@@ -64,7 +64,6 @@ with gr.Blocks(css=css) as demo:
64
  with gr.Row():
65
  prompt = gr.Text(
66
  label="Prompt",
67
- show_label=False,
68
  max_lines=1,
69
  placeholder="Enter your prompt",
70
  container=False,
 
19
  MAX_SEED = np.iinfo(np.int32).max
20
  MAX_IMAGE_SIZE = 1024
21
 
22
+ @spaces.GPU(duration=65) #[uncomment to use ZeroGPU]
23
  def infer(prompt, negative_prompt, seed, randomize_seed, width, height, guidance_scale, true_guidance, num_inference_steps, progress=gr.Progress(track_tqdm=True)):
24
 
25
  if randomize_seed:
 
64
  with gr.Row():
65
  prompt = gr.Text(
66
  label="Prompt",
 
67
  max_lines=1,
68
  placeholder="Enter your prompt",
69
  container=False,