gokaygokay commited on
Commit
54e5a29
1 Parent(s): d50abc1
Files changed (1) hide show
  1. app.py +11 -16
app.py CHANGED
@@ -10,9 +10,6 @@ title = """<h1 align="center">Random Prompt Generator</h1>
10
  </center></p>
11
  """
12
 
13
- # Global variable to store selected prompt type
14
- selected_prompt_type = "Long" # Default value
15
-
16
  def create_interface():
17
  llm_node = LLMInferenceNode()
18
 
@@ -31,15 +28,16 @@ def create_interface():
31
  interactive=True
32
  )
33
 
34
- # Function to update the selected prompt type
35
- def update_prompt_type(value):
36
- global selected_prompt_type
37
- selected_prompt_type = value
38
- print(f"Updated prompt type: {selected_prompt_type}")
39
- return value
 
40
 
41
  # Connect the update_prompt_type function to the prompt_type dropdown
42
- prompt_type.change(update_prompt_type, inputs=[prompt_type], outputs=[prompt_type])
43
 
44
  with gr.Column(scale=2):
45
  with gr.Accordion("LLM Prompt Generation", open=False):
@@ -105,8 +103,7 @@ def create_interface():
105
  )
106
 
107
  # **Unified Function to Generate Prompt and Text**
108
- def generate_random_prompt_with_llm(custom_input, prompt_type, long_talk, compress, compression_level, custom_base_prompt, provider, api_key, model_selected):
109
- global selected_prompt_type # Declare as global
110
  try:
111
  # Step 1: Generate Prompt
112
  dynamic_seed = random.randint(0, 1000000)
@@ -128,7 +125,7 @@ def create_interface():
128
  compress=compress,
129
  compression_level=compression_level,
130
  poster=poster, # Added the missing 'poster' argument
131
- prompt_type=selected_prompt_type,
132
  custom_base_prompt=custom_base_prompt,
133
  provider=provider,
134
  api_key=api_key,
@@ -136,8 +133,6 @@ def create_interface():
136
  )
137
  print(f"Generated Text: {result}")
138
 
139
- # Reset selected_prompt_type if necessary
140
- selected_prompt_type = "Long"
141
  return result
142
 
143
  except Exception as e:
@@ -147,7 +142,7 @@ def create_interface():
147
  # **Connect the Unified Function to the Single Button**
148
  generate_button.click(
149
  generate_random_prompt_with_llm,
150
- inputs=[custom, prompt_type, long_talk, compress, compression_level, custom_base_prompt, llm_provider, api_key, model],
151
  outputs=[text_output],
152
  api_name="generate_random_prompt_with_llm"
153
  )
 
10
  </center></p>
11
  """
12
 
 
 
 
13
  def create_interface():
14
  llm_node = LLMInferenceNode()
15
 
 
28
  interactive=True
29
  )
30
 
31
+ # Add a State component to store the selected prompt type
32
+ prompt_type_state = gr.State("Long")
33
+
34
+ # Update the function to use State
35
+ def update_prompt_type(value, state):
36
+ print(f"Updated prompt type: {value}")
37
+ return value, value
38
 
39
  # Connect the update_prompt_type function to the prompt_type dropdown
40
+ prompt_type.change(update_prompt_type, inputs=[prompt_type, prompt_type_state], outputs=[prompt_type, prompt_type_state])
41
 
42
  with gr.Column(scale=2):
43
  with gr.Accordion("LLM Prompt Generation", open=False):
 
103
  )
104
 
105
  # **Unified Function to Generate Prompt and Text**
106
+ def generate_random_prompt_with_llm(custom_input, prompt_type, long_talk, compress, compression_level, custom_base_prompt, provider, api_key, model_selected, prompt_type_state):
 
107
  try:
108
  # Step 1: Generate Prompt
109
  dynamic_seed = random.randint(0, 1000000)
 
125
  compress=compress,
126
  compression_level=compression_level,
127
  poster=poster, # Added the missing 'poster' argument
128
+ prompt_type=prompt_type_state,
129
  custom_base_prompt=custom_base_prompt,
130
  provider=provider,
131
  api_key=api_key,
 
133
  )
134
  print(f"Generated Text: {result}")
135
 
 
 
136
  return result
137
 
138
  except Exception as e:
 
142
  # **Connect the Unified Function to the Single Button**
143
  generate_button.click(
144
  generate_random_prompt_with_llm,
145
+ inputs=[custom, prompt_type, long_talk, compress, compression_level, custom_base_prompt, llm_provider, api_key, model, prompt_type_state],
146
  outputs=[text_output],
147
  api_name="generate_random_prompt_with_llm"
148
  )