Yhhxhfh commited on
Commit
5b21cd9
1 Parent(s): 4b2e116

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -4
app.py CHANGED
@@ -61,7 +61,7 @@ class ModelManager:
61
  model = Llama.from_pretrained(repo_id=config['repo_id'], filename=config['filename'], use_auth_token=HUGGINGFACE_TOKEN)
62
  models.append(model)
63
  except Exception as e:
64
- pass
65
  self.model = models
66
 
67
  model_manager = ModelManager()
@@ -90,15 +90,15 @@ def remove_duplicates(text):
90
  @spaces.GPU()
91
  async def generate_combined_response(inputs):
92
  combined_response = ""
93
- top_p = round(random.uniform(0.1, 1.0), 2)
94
  top_k = random.randint(1, 100)
95
- temperature = round(random.uniform(0.1, 2.0), 2)
96
  for model in global_data['model']:
97
  try:
98
  response = model(inputs, top_p=top_p, top_k=top_k, temperature=temperature)
99
  combined_response += remove_duplicates(response['choices'][0]['text']) + "\n"
100
  except Exception as e:
101
- pass
102
  return combined_response
103
 
104
  async def process_message(message):
 
61
  model = Llama.from_pretrained(repo_id=config['repo_id'], filename=config['filename'], use_auth_token=HUGGINGFACE_TOKEN)
62
  models.append(model)
63
  except Exception as e:
64
+ continue
65
  self.model = models
66
 
67
  model_manager = ModelManager()
 
90
  @spaces.GPU()
91
  async def generate_combined_response(inputs):
92
  combined_response = ""
93
+ top_p = round(random.uniform(0.01, 1.00), 2)
94
  top_k = random.randint(1, 100)
95
+ temperature = round(random.uniform(0.01, 2.00), 2)
96
  for model in global_data['model']:
97
  try:
98
  response = model(inputs, top_p=top_p, top_k=top_k, temperature=temperature)
99
  combined_response += remove_duplicates(response['choices'][0]['text']) + "\n"
100
  except Exception as e:
101
+ continue
102
  return combined_response
103
 
104
  async def process_message(message):