gokaygokay commited on
Commit
ec31dc5
1 Parent(s): 4489ce8
Files changed (2) hide show
  1. app.py +3 -2
  2. llm_inference.py +14 -12
app.py CHANGED
@@ -166,8 +166,9 @@ def create_interface():
166
  # Function to generate image
167
  def generate_image(text):
168
  try:
169
- image = llm_node.generate_image(text)
170
- return image
 
171
  except Exception as e:
172
  print(f"An error occurred while generating the image: {e}")
173
  return None
 
166
  # Function to generate image
167
  def generate_image(text):
168
  try:
169
+ seed = random.randint(0, 1000000)
170
+ image_path = llm_node.generate_image(text, seed=seed)
171
+ return image_path
172
  except Exception as e:
173
  print(f"An error occurred while generating the image: {e}")
174
  return None
llm_inference.py CHANGED
@@ -5,6 +5,7 @@ from openai import OpenAI
5
  import requests
6
  from PIL import Image
7
  import io
 
8
 
9
  class LLMInferenceNode:
10
  def __init__(self):
@@ -21,6 +22,7 @@ class LLMInferenceNode:
21
  api_key=self.sambanova_api_key,
22
  base_url="https://api.sambanova.ai/v1",
23
  )
 
24
 
25
  def generate_prompt(self, dynamic_seed, prompt_type, custom_input):
26
  """
@@ -242,15 +244,15 @@ Your output is only the caption itself, no comments or extra formatting. The cap
242
  print(f"An error occurred: {e}")
243
  return f"Error occurred while processing the request: {str(e)}"
244
 
245
- def generate_image(self, prompt):
246
- API_URL = "https://api-inference.huggingface.co/models/black-forest-labs/FLUX.1-dev"
247
- headers = {"Authorization": f"Bearer {self.huggingface_token}"}
248
-
249
- response = requests.post(API_URL, headers=headers, json={"inputs": prompt})
250
-
251
- if response.status_code != 200:
252
- raise Exception(f"Error generating image: {response.text}")
253
-
254
- image_bytes = response.content
255
- image = Image.open(io.BytesIO(image_bytes))
256
- return image
 
5
  import requests
6
  from PIL import Image
7
  import io
8
+ from gradio_client import Client
9
 
10
  class LLMInferenceNode:
11
  def __init__(self):
 
22
  api_key=self.sambanova_api_key,
23
  base_url="https://api.sambanova.ai/v1",
24
  )
25
+ self.flux_client = Client("KingNish/Realtime-FLUX", hf_token=self.huggingface_token)
26
 
27
  def generate_prompt(self, dynamic_seed, prompt_type, custom_input):
28
  """
 
244
  print(f"An error occurred: {e}")
245
  return f"Error occurred while processing the request: {str(e)}"
246
 
247
+ def generate_image(self, prompt, seed=42, width=1024, height=1024):
248
+ try:
249
+ result = self.flux_client.predict(
250
+ prompt=prompt,
251
+ seed=seed,
252
+ width=width,
253
+ height=height,
254
+ api_name="/generate_image"
255
+ )
256
+ return result
257
+ except Exception as e:
258
+ raise Exception(f"Error generating image: {str(e)}")