File size: 5,288 Bytes
c96df66 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 |
from dataclasses import dataclass
from typing import Any, Dict, List
import torch
@dataclass
class MyCollator:
pad_token_id: int
attention_pad_value: int = 0
label_pad_value: int = -100
def __call__(self, features: List[Dict[str, Any]]) -> Dict[str, Any]:
# * extract data from features, and format them from dict to list
input_ids = [f["input_ids"] for f in features] # List[List[int]]
placeholder_indices = [f["placeholder_indices"] for f in features] # List[List[int]]
super_input_ids = [f["super_input_ids"] for f in features] # List[List[List[int]]]
super_token_indices = [f["super_token_indices"] for f in features] # List[List[List[int]]]
labels = [f["labels"] for f in features] if "labels" in features[0] else None # List[List[int]]
# * process model input
(
input_ids,
attention_mask,
placeholder_indices,
labels,
) = self.process_model_inputs(
input_ids,
placeholder_indices,
labels,
)
# * process super_tokenizer input
(
super_input_ids,
super_attention_mask,
super_token_indices,
) = self.process_super_tokenizer_inputs(
super_input_ids,
super_token_indices,
)
# * to torch tensor
input_ids = torch.tensor(input_ids)
attention_mask = torch.tensor(attention_mask)
super_input_ids = torch.tensor(super_input_ids)
super_attention_mask = torch.tensor(super_attention_mask)
labels = torch.tensor(labels) if labels else None
# * format
res = {
"input_ids": input_ids,
"attention_mask": attention_mask,
"super_input_ids": super_input_ids,
"super_attention_mask": super_attention_mask,
"placeholder_indices": placeholder_indices,
"super_token_indices": super_token_indices,
"labels": labels,
}
return res
def process_model_inputs(self, input_ids, placeholder_indices, labels):
# * get attention mask
max_len = get_max_length_in_nested_lists(input_ids)
attention_mask = get_attention_mask_from_nested_lists(input_ids)
# * get new placeholder_indices since padding side is left
placeholder_indices = [
[idx + max_len - len(input_ids[i]) for idx in placeholder_indices[i]]
for i in range(len(placeholder_indices))
]
# * pad
input_ids = pad_nested_lists(input_ids, max_len, self.pad_token_id, "left")
attention_mask = pad_nested_lists(attention_mask, max_len, self.attention_pad_value, "left")
if labels:
labels = pad_nested_lists(labels, max_len, self.label_pad_value, "left")
return input_ids, attention_mask, placeholder_indices, labels
def process_super_tokenizer_inputs(self, input_ids, super_token_indices):
# * 3D -> 2D
input_ids = sum(input_ids, []) # List[List[int]]
super_token_indices = sum(super_token_indices, []) # List[List[int]]
# * filter empty item
new_input_ids = []
new_super_token_indices = []
for i in range(len(input_ids)):
if len(super_token_indices[i]) != 0:
new_input_ids.append(input_ids[i])
new_super_token_indices.append(super_token_indices[i])
input_ids = new_input_ids
super_token_indices = new_super_token_indices
if len(input_ids) == 0:
return [], [], []
# * get attention mask and pad
max_len = get_max_length_in_nested_lists(input_ids)
attention_mask = get_attention_mask_from_nested_lists(input_ids)
input_ids = pad_nested_lists(input_ids, max_len, self.pad_token_id)
attention_mask = pad_nested_lists(attention_mask, max_len, self.attention_pad_value)
return input_ids, attention_mask, super_token_indices
def get_max_length_in_nested_lists(lst):
if isinstance(lst[0], list):
lengths = []
for elem in lst:
length = get_max_length_in_nested_lists(elem)
lengths.append(length)
max_length = max(lengths)
return max_length
else:
return len(lst)
def get_attention_mask_from_nested_lists(lst):
if isinstance(lst[0], list):
attention_mask = []
for elem in lst:
mask = get_attention_mask_from_nested_lists(elem)
attention_mask.append(mask)
return attention_mask
else:
return [1] * len(lst)
def pad_nested_lists(lst, max_length, padding_value, padding_side="right"):
if isinstance(lst, list) and len(lst) and isinstance(lst[0], list):
for i, elem in enumerate(lst):
lst[i] = pad_nested_lists(elem, max_length, padding_value, padding_side)
return lst
elif isinstance(lst, list):
if padding_side == "right":
return lst + [padding_value for _ in range(max_length - len(lst))]
else:
return [padding_value for _ in range(max_length - len(lst))] + lst
else:
raise NotImplementedError(f"Unrecognized type {lst}")
|