File size: 663 Bytes
79a132f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
`# This file contains the translation logic`
`import torch`
`from transformers import AutoModelForSeq2SeqLM, AutoTokenizer`
`def translate_text(text):`
`    model_name = "t5-small"`
`    tokenizer = AutoTokenizer.from_pretrained(model_name)`
`    model = AutoModelForSeq2SeqLM.from_pretrained(model_name)`
`    inputs = tokenizer.encode_plus(text,`
`        add_special_tokens=True,`
`        max_length=512,`
`        return_attention_mask=True,`
`        return_tensors='pt',`
`        truncation=True,`
`    )`
`    outputs = model.generate(**inputs)`
`    translated_text = tokenizer.decode(outputs[0], skip_special_tokens=True)`
`    return translated_text`