AIdeaText commited on
Commit
ca32070
1 Parent(s): 5780733

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +19 -9
app.py CHANGED
@@ -95,22 +95,32 @@ st.set_page_config(
95
  layout="wide",
96
  page_icon="random"
97
  )
98
-
99
  @st.cache_resource
100
  def load_chatbot_model():
101
- tokenizer = BlenderbotTokenizer.from_pretrained("facebook/blenderbot-400M-distill")
102
- model = BlenderbotForConditionalGeneration.from_pretrained("facebook/blenderbot-400M-distill")
103
- return tokenizer, model
 
 
 
 
 
104
 
105
  # Load the chatbot model
106
  chatbot_tokenizer, chatbot_model = load_chatbot_model()
107
 
108
  def get_chatbot_response(input_text):
109
- inputs = chatbot_tokenizer(input_text, return_tensors="pt")
110
- reply_ids = chatbot_model.generate(**inputs)
111
- response = chatbot_tokenizer.batch_decode(reply_ids, skip_special_tokens=True)[0]
112
- return response
113
-
 
 
 
 
 
114
  def load_spacy_models():
115
  return {
116
  'es': spacy.load("es_core_news_lg"),
 
95
  layout="wide",
96
  page_icon="random"
97
  )
98
+ #####################################################################################################
99
  @st.cache_resource
100
  def load_chatbot_model():
101
+ try:
102
+ from transformers import BlenderbotTokenizer, BlenderbotForConditionalGeneration
103
+ tokenizer = BlenderbotTokenizer.from_pretrained("facebook/blenderbot-400M-distill")
104
+ model = BlenderbotForConditionalGeneration.from_pretrained("facebook/blenderbot-400M-distill")
105
+ return tokenizer, model
106
+ except Exception as e:
107
+ logger.error(f"Error al cargar el modelo del chatbot: {str(e)}")
108
+ return None, None
109
 
110
  # Load the chatbot model
111
  chatbot_tokenizer, chatbot_model = load_chatbot_model()
112
 
113
  def get_chatbot_response(input_text):
114
+ if chatbot_tokenizer is None or chatbot_model is None:
115
+ return "Lo siento, el chatbot no está disponible en este momento."
116
+ try:
117
+ inputs = chatbot_tokenizer(input_text, return_tensors="pt")
118
+ reply_ids = chatbot_model.generate(**inputs)
119
+ return chatbot_tokenizer.batch_decode(reply_ids, skip_special_tokens=True)[0]
120
+ except Exception as e:
121
+ logger.error(f"Error al generar respuesta del chatbot: {str(e)}")
122
+ return "Lo siento, hubo un error al procesar tu mensaje."
123
+ ########################################################################################################
124
  def load_spacy_models():
125
  return {
126
  'es': spacy.load("es_core_news_lg"),