gokaygokay commited on
Commit
f4cfcff
1 Parent(s): c23432a

Update llm_inference.py

Browse files
Files changed (1) hide show
  1. llm_inference.py +1 -17
llm_inference.py CHANGED
@@ -22,7 +22,6 @@ class LLMInferenceNode:
22
  api_key=self.sambanova_api_key,
23
  base_url="https://api.sambanova.ai/v1",
24
  )
25
- self.flux_client = Client("KingNish/Realtime-FLUX", hf_token=self.huggingface_token)
26
 
27
  def generate_prompt(self, dynamic_seed, prompt_type, custom_input):
28
  """
@@ -242,19 +241,4 @@ Your output is only the caption itself, no comments or extra formatting. The cap
242
 
243
  except Exception as e:
244
  print(f"An error occurred: {e}")
245
- return f"Error occurred while processing the request: {str(e)}"
246
-
247
- def generate_image(self, prompt, seed=42, width=1024, height=1024):
248
- try:
249
- result = self.flux_client.predict(
250
- prompt=prompt,
251
- seed=seed,
252
- width=width,
253
- height=height,
254
- api_name="/generate_image"
255
- )
256
- # Extract the image path from the result tuple
257
- image_path = result[0]
258
- return image_path
259
- except Exception as e:
260
- raise Exception(f"Error generating image: {str(e)}")
 
22
  api_key=self.sambanova_api_key,
23
  base_url="https://api.sambanova.ai/v1",
24
  )
 
25
 
26
  def generate_prompt(self, dynamic_seed, prompt_type, custom_input):
27
  """
 
241
 
242
  except Exception as e:
243
  print(f"An error occurred: {e}")
244
+ return f"Error occurred while processing the request: {str(e)}"