File size: 1,025 Bytes
c96df66
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
import torch
import torch.nn as nn
from transformers import LlamaModel
from transformers.models.llama.configuration_llama import LlamaConfig


class SuperTokenizer(LlamaModel):
    def __init__(self, config: LlamaConfig):
        super().__init__(config)

        self.batch_size = 1
        self.my_pooler = nn.Linear(config.hidden_size, 4096)

    def forward(self, input_ids, attention_mask, super_token_indices, **kwargs):
        hidden_state = []
        for i in range(0, input_ids.shape[0], self.batch_size):
            end_index = min(i + self.batch_size, input_ids.shape[0])

            output = super().forward(
                input_ids[i:end_index], 
                attention_mask[i:end_index], 
                **kwargs,
            )

            for j in range(end_index - i):
                hidden_state.append(
                    output.last_hidden_state[j][super_token_indices[i + j]]
                )
        
        embedding = torch.cat(hidden_state, dim=0).contiguous()

        return embedding