tangzhy commited on
Commit
292a3d4
1 Parent(s): eeb8739

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -5
app.py CHANGED
@@ -35,7 +35,7 @@ model.config.sliding_window = 4096
35
  model.eval()
36
 
37
 
38
- @spaces.GPU(duration=120)
39
  def generate(
40
  message: str,
41
  chat_history: list[tuple[str, str]],
@@ -46,7 +46,7 @@ def generate(
46
  repetition_penalty: float = 1.2,
47
  ) -> Iterator[str]:
48
  if chat_history != []:
49
- yield "Sorry, I am an instruction-tuned model and currently do not support chatting. Please try clearing the chat history or refreshing the page to ask a new question."
50
 
51
  tokenized_example = tokenizer(message, return_tensors='pt', max_length=MAX_INPUT_TOKEN_LENGTH, truncation=True)
52
  input_ids = tokenized_example.input_ids
@@ -70,9 +70,10 @@ def generate(
70
  outputs = []
71
  for text in streamer:
72
  outputs.append(text)
73
- to_yield = "".join(outputs)
74
- to_yield += "\n\nI have now attempted to solve the optimization modeling task! Please try executing the code in your environment, making sure it is equipped with `coptpy`."
75
- yield to_yield
 
76
 
77
 
78
  chat_interface = gr.ChatInterface(
 
35
  model.eval()
36
 
37
 
38
+ @spaces.GPU(duration=720)
39
  def generate(
40
  message: str,
41
  chat_history: list[tuple[str, str]],
 
46
  repetition_penalty: float = 1.2,
47
  ) -> Iterator[str]:
48
  if chat_history != []:
49
+ return "Sorry, I am an instruction-tuned model and currently do not support chatting. Please try clearing the chat history or refreshing the page to ask a new question."
50
 
51
  tokenized_example = tokenizer(message, return_tensors='pt', max_length=MAX_INPUT_TOKEN_LENGTH, truncation=True)
52
  input_ids = tokenized_example.input_ids
 
70
  outputs = []
71
  for text in streamer:
72
  outputs.append(text)
73
+ yield "".join(outputs)
74
+
75
+ outputs.append("\n\nI have now attempted to solve the optimization modeling task! Please try executing the code in your environment, making sure it is equipped with `coptpy`.")
76
+ yield "".join(outputs)
77
 
78
 
79
  chat_interface = gr.ChatInterface(