File size: 2,868 Bytes
9b0d6c2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
from models.frame_passt.fpasst import get_model
from models.frame_passt.preprocess import AugmentMelSTFT
from models.transformer_wrapper import BaseModelWrapper


class FPaSSTWrapper(BaseModelWrapper):
    def __init__(self):
        super().__init__()
        self.mel = AugmentMelSTFT(
            n_mels=128,
            sr=16_000,
            win_length=400,
            hopsize=160,
            n_fft=512,
            freqm=0,
            timem=0,
            htk=False,
            fmin=0.0,
            fmax=None,
            norm=1,
            fmin_aug_range=10,
            fmax_aug_range=2000,
            fast_norm=True,
            preamp=True,
        )
        self.fpasst = get_model(
            arch="passt_deit_bd_p16_384",
            n_classes=527,
            pos_embed_length=250,
            frame_patchout=0,
            in_channels=16
        )

    def mel_forward(self, x):
        return self.mel(x)

    def forward(self, x):
        return self.fpasst(x)

    def separate_params(self):
        pt_params = [[], [], [], [], [], [], [], [], [], [], [], []]
        for k, p in self.fpasst.named_parameters():
            if k in ['cls_token',
                     'dist_token',
                     'new_pos_embed',
                     'freq_new_pos_embed',
                     'time_new_pos_embed',
                     'conv_in_1.weight',
                     'conv_in_1.bias',
                     'conv_in_2.weight',
                     'conv_in_2.bias',
                     'conv_in_3.weight',
                     'conv_in_3.bias',
                     'patch_embed.proj.weight',
                     'patch_embed.proj.bias',
                     ]:
                pt_params[0].append(p)
            elif 'blocks.0.' in k:
                pt_params[0].append(p)
            elif 'blocks.1.' in k:
                pt_params[1].append(p)
            elif 'blocks.2.' in k:
                pt_params[2].append(p)
            elif 'blocks.3.' in k:
                pt_params[3].append(p)
            elif 'blocks.4.' in k:
                pt_params[4].append(p)
            elif 'blocks.5.' in k:
                pt_params[5].append(p)
            elif 'blocks.6.' in k:
                pt_params[6].append(p)
            elif 'blocks.7.' in k:
                pt_params[7].append(p)
            elif 'blocks.8.' in k:
                pt_params[8].append(p)
            elif 'blocks.9.' in k:
                pt_params[9].append(p)
            elif 'blocks.10.' in k:
                pt_params[10].append(p)
            elif 'blocks.11.' in k:
                pt_params[11].append(p)
            elif k in ['norm.weight', 'norm.bias']:
                pt_params[11].append(p)
            else:
                raise ValueError(f"Check separate params for frame-passt! Unexpected key: {k}")
        return list(reversed(pt_params))