TiberiuCristianLeon commited on
Commit
326c762
·
verified ·
1 Parent(s): 99c354e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -2
app.py CHANGED
@@ -41,12 +41,13 @@ def model_to_cuda(model):
41
  return model
42
 
43
  def flan(model_name, sl, tl, input_text):
44
- tokenizer = T5Tokenizer.from_pretrained(model_name)
45
  model = T5ForConditionalGeneration.from_pretrained(model_name)
46
  input_text = f"translate {sl} to {tl}: {input_text}?"
47
  input_ids = tokenizer(input_text, return_tensors="pt").input_ids
48
  outputs = model.generate(input_ids)
49
- return tokenizer.decode(outputs[0])
 
50
 
51
  def teuken(model_name, sl, tl, input_text):
52
  device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
 
41
  return model
42
 
43
  def flan(model_name, sl, tl, input_text):
44
+ tokenizer = T5Tokenizer.from_pretrained(model_name, legacy=False)
45
  model = T5ForConditionalGeneration.from_pretrained(model_name)
46
  input_text = f"translate {sl} to {tl}: {input_text}?"
47
  input_ids = tokenizer(input_text, return_tensors="pt").input_ids
48
  outputs = model.generate(input_ids)
49
+ return tokenizer.decode(outputs[0], skip_special_tokens=True).strip()
50
+ # return tokenizer.decode(outputs[0], skip_special_tokens=True).replace('<pad>', '').replace('</s>', '').strip()
51
 
52
  def teuken(model_name, sl, tl, input_text):
53
  device = torch.device("cuda" if torch.cuda.is_available() else "cpu")