hysts HF staff commited on
Commit
14f59ea
1 Parent(s): 8d96483
Files changed (1) hide show
  1. app.py +3 -2
app.py CHANGED
@@ -88,8 +88,9 @@ def process_example(message: str) -> tuple[str, list[tuple[str, str]]]:
88
  def check_prompt_length(message: str, chat_history: list[tuple[str, str]], system_prompt: str) -> None:
89
  prompt = get_prompt(message, chat_history, system_prompt)
90
  input_ids = tokenizer([prompt], return_tensors='np')['input_ids']
91
- if input_ids.shape[-1] > MAX_INPUT_TOKEN_LENGTH:
92
- raise gr.Error('The accumulated input is too long. Clear your chat history and try again.')
 
93
 
94
 
95
  with gr.Blocks(css='style.css') as demo:
 
88
  def check_prompt_length(message: str, chat_history: list[tuple[str, str]], system_prompt: str) -> None:
89
  prompt = get_prompt(message, chat_history, system_prompt)
90
  input_ids = tokenizer([prompt], return_tensors='np')['input_ids']
91
+ input_token_length = input_ids.shape[-1]
92
+ if input_token_length > MAX_INPUT_TOKEN_LENGTH:
93
+ raise gr.Error(f'The accumulated input is too long ({input_token_length} > {MAX_INPUT_TOKEN_LENGTH}). Clear your chat history and try again.')
94
 
95
 
96
  with gr.Blocks(css='style.css') as demo: