mateuo commited on
Commit
6a60d72
·
1 Parent(s): c55bd22

first handler

Browse files
Files changed (2) hide show
  1. hanler.py +34 -0
  2. model-00001-of-00002.safetensors +0 -3
hanler.py ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ from transformers import AutoModelForCausalLM, AutoTokenizer
3
+ from typing import Dict, List, Any
4
+
5
+ class EndpointHandler:
6
+ def __init__(self, path=""):
7
+ # Load the model and tokenizer
8
+ self.device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
9
+ self.model = AutoModelForCausalLM.from_pretrained(path, torch_dtype=torch.bfloat16).to(self.device).eval()
10
+ self.tokenizer = AutoTokenizer.from_pretrained(path)
11
+
12
+ def __call__(self, data: Dict[str, Any]) -> List[Dict[str, Any]]:
13
+ # Handle the incoming request
14
+ input_text = data["inputs"]["text"]
15
+ template = data["inputs"]["template"]
16
+
17
+ # Use the predict function
18
+ output = self.predict_NuExtract([input_text], template)
19
+ return [{"extracted_information": output}]
20
+
21
+ def predict_NuExtract(self, texts, template, batch_size=1, max_length=10_000, max_new_tokens=4_000):
22
+ # Generate prompts based on the template
23
+ template = json.dumps(json.loads(template), indent=4)
24
+ prompts = [f"""<|input|>\n### Template:\n{template}\n### Text:\n{text}\n\n<|output|>""" for text in texts]
25
+ outputs = []
26
+
27
+ with torch.no_grad():
28
+ for i in range(0, len(prompts), batch_size):
29
+ batch_prompts = prompts[i:i+batch_size]
30
+ batch_encodings = self.tokenizer(batch_prompts, return_tensors="pt", truncation=True, padding=True, max_length=max_length).to(self.device)
31
+ pred_ids = self.model.generate(**batch_encodings, max_new_tokens=max_new_tokens)
32
+ outputs += self.tokenizer.batch_decode(pred_ids, skip_special_tokens=True)
33
+
34
+ return [output.split("<|output|>")[1] for output in outputs]
model-00001-of-00002.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:6210785e5d4ce6a8ab2a874d8367cba7100b76fd14cf7350f5def3db2834c46f
3
- size 4972489328