Adapters
Inference Endpoints
jeremyarancio commited on
Commit
6dec8ee
1 Parent(s): 2487e56

Update handler

Browse files
Files changed (1) hide show
  1. handler.py +4 -1
handler.py CHANGED
@@ -25,9 +25,12 @@ class EndpointHandler():
25
  # Get inputs
26
  inputs = data.pop("inputs", data)
27
  parameters = data.pop("parameters", None)
 
28
  # Preprocess
 
29
  inputs_ids = self.tokenizer(inputs, return_tensors="pt").inputs_ids
30
  # Forward
 
31
  if parameters is not None:
32
  outputs = self.model.generate(inputs_ids, **parameters)
33
  else:
@@ -35,4 +38,4 @@ class EndpointHandler():
35
  # Postprocess
36
  prediction = self.tokenizer.decode(outputs[0])
37
  LOGGER.info(f"Generated text: {prediction}")
38
- return [{"generated_text": prediction}]
 
25
  # Get inputs
26
  inputs = data.pop("inputs", data)
27
  parameters = data.pop("parameters", None)
28
+ LOGGER.info("Data extracted.")
29
  # Preprocess
30
+ LOGGER.info(f"Start tokenizer: {inputs}")
31
  inputs_ids = self.tokenizer(inputs, return_tensors="pt").inputs_ids
32
  # Forward
33
+ LOGGER.info(f"Start generation.")
34
  if parameters is not None:
35
  outputs = self.model.generate(inputs_ids, **parameters)
36
  else:
 
38
  # Postprocess
39
  prediction = self.tokenizer.decode(outputs[0])
40
  LOGGER.info(f"Generated text: {prediction}")
41
+ return {"generated_text": prediction}