download
history
blame
contribute
delete
Detected Pickle imports (240)
- "__torch__.torch.nn.modules.linear.___torch_mangle_13.Linear",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_162.LayerNorm",
- "__torch__.transformers.activations.___torch_mangle_77.GELUActivation",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_54.LayerNorm",
- "__torch__.transformers.activations.___torch_mangle_131.GELUActivation",
- "__torch__.torch.nn.modules.linear.___torch_mangle_197.Linear",
- "__torch__.transformers.models.bert.modeling_bert.___torch_mangle_88.BertSelfAttention",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_90.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_12.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_50.Linear",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_116.LayerNorm",
- "__torch__.transformers.models.bert.modeling_bert.___torch_mangle_129.BertAttention",
- "__torch__.torch.nn.modules.linear.___torch_mangle_5.Linear",
- "__torch__.transformers.models.bert.modeling_bert.___torch_mangle_178.BertSelfAttention",
- "__torch__.torch.nn.modules.linear.___torch_mangle_97.Linear",
- "__torch__.transformers.models.bert.modeling_bert.___torch_mangle_150.BertIntermediate",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_37.Dropout",
- "__torch__.transformers.activations.___torch_mangle_149.GELUActivation",
- "__torch__.torch.nn.modules.linear.___torch_mangle_179.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_86.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_193.Linear",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_135.Dropout",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_163.Dropout",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_4.Dropout",
- "__torch__.transformers.models.bert.modeling_bert.___torch_mangle_57.BertAttention",
- "__torch__.transformers.models.bert.modeling_bert.___torch_mangle_52.BertSelfAttention",
- "__torch__.torch.nn.modules.linear.___torch_mangle_157.Linear",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_11.Dropout",
- "__torch__.torch.nn.modules.linear.___torch_mangle_130.Linear",
- "__torch__.transformers.activations.GELUActivation",
- "__torch__.transformers.models.bert.modeling_bert.___torch_mangle_46.BertOutput",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_55.Dropout",
- "__torch__.transformers.models.bert.modeling_bert.BertEmbeddings",
- "__torch__.transformers.models.bert.modeling_bert.___torch_mangle_182.BertSelfOutput",
- "__torch__.transformers.models.bert.modeling_bert.___torch_mangle_100.BertOutput",
- "__torch__.transformers.models.bert.modeling_bert.___torch_mangle_114.BertIntermediate",
- "__torch__.transformers.models.bert.modeling_bert.___torch_mangle_60.BertIntermediate",
- "__torch__.transformers.activations.___torch_mangle_185.GELUActivation",
- "__torch__.transformers.models.bert.modeling_bert.___torch_mangle_106.BertSelfAttention",
- "__torch__.transformers.models.bert.modeling_bert.___torch_mangle_70.BertSelfAttention",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_18.LayerNorm",
- "__torch__.transformers.models.bert.modeling_bert.___torch_mangle_110.BertSelfOutput",
- "__torch__.torch.nn.modules.linear.___torch_mangle_192.Linear",
- "__torch__.transformers.models.bert.modeling_bert.BertModel",
- "__torch__.torch.nn.modules.linear.___torch_mangle_125.Linear",
- "__torch__.torch.nn.modules.sparse.___torch_mangle_0.Embedding",
- "__torch__.torch.nn.modules.linear.___torch_mangle_31.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_79.Linear",
- "__torch__.transformers.models.bert.modeling_bert.BertOutput",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_15.Dropout",
- "__torch__.torch.nn.modules.linear.___torch_mangle_139.Linear",
- "__torch__.transformers.models.bert.modeling_bert.BertSelfOutput",
- "__torch__.torch.nn.modules.linear.___torch_mangle_138.Linear",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_180.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_133.Linear",
- "__torch__.transformers.models.bert.modeling_bert.___torch_mangle_147.BertAttention",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_33.Dropout",
- "__torch__.transformers.activations.___torch_mangle_59.GELUActivation",
- "__torch__.transformers.models.bert.modeling_bert.___torch_mangle_183.BertAttention",
- "__torch__.transformers.models.bert.modeling_bert.___torch_mangle_204.BertIntermediate",
- "__torch__.transformers.models.bert.modeling_bert.___torch_mangle_101.BertLayer",
- "__torch__.transformers.models.bert.modeling_bert.BertLayer",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_105.Dropout",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_81.Dropout",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_27.Dropout",
- "__torch__.torch.nn.modules.linear.___torch_mangle_174.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_22.Linear",
- "__torch__.transformers.models.bert.modeling_bert.___torch_mangle_34.BertSelfAttention",
- "__torch__.transformers.models.bert.modeling_bert.___torch_mangle_65.BertLayer",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_134.LayerNorm",
- "__torch__.transformers.models.bert.modeling_bert.___torch_mangle_154.BertOutput",
- "__torch__.transformers.models.bert.modeling_bert.___torch_mangle_164.BertSelfOutput",
- "__torch__.torch.nn.modules.linear.___torch_mangle_3.Linear",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_159.Dropout",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_99.Dropout",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_109.Dropout",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_207.Dropout",
- "__torch__.transformers.models.bert.modeling_bert.___torch_mangle_28.BertOutput",
- "__torch__.torch.nn.modules.linear.___torch_mangle_9.Linear",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_62.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_43.Linear",
- "__torch__.transformers.models.bert.modeling_bert.___torch_mangle_119.BertLayer",
- "__torch__.transformers.activations.___torch_mangle_23.GELUActivation",
- "__torch__.transformers.models.bert.modeling_bert.___torch_mangle_111.BertAttention",
- "__torch__.transformers.models.bert.modeling_bert.___torch_mangle_136.BertOutput",
- "__torch__.transformers.models.bert.modeling_bert.___torch_mangle_165.BertAttention",
- "__torch__.torch.nn.modules.linear.___torch_mangle_66.Linear",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_123.Dropout",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_80.LayerNorm",
- "__torch__.transformers.activations.___torch_mangle_95.GELUActivation",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_145.Dropout",
- "__torch__.torch.nn.modules.linear.___torch_mangle_102.Linear",
- "__torch__.transformers.activations.___torch_mangle_167.GELUActivation",
- "__torch__.torch.nn.modules.sparse.___torch_mangle_1.Embedding",
- "__torch__.transformers.models.bert.modeling_bert.___torch_mangle_124.BertSelfAttention",
- "__torch__.transformers.models.bert.modeling_bert.___torch_mangle_42.BertIntermediate",
- "__torch__.torch.nn.modules.linear.___torch_mangle_158.Linear",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_177.Dropout",
- "__torch__.transformers.models.bert.modeling_bert.___torch_mangle_16.BertSelfAttention",
- "__torch__.transformers.models.bert.modeling_bert.___torch_mangle_29.BertLayer",
- "__torch__.torch.nn.modules.activation.Tanh",
- "__torch__.transformers.models.bert.modeling_bert.___torch_mangle_56.BertSelfOutput",
- "__torch__.transformers.models.bert.modeling_bert.___torch_mangle_191.BertLayer",
- "__torch__.torch.nn.modules.linear.___torch_mangle_89.Linear",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_98.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_53.Linear",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_87.Dropout",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_45.Dropout",
- "__torch__.transformers.models.bert.modeling_bert.___torch_mangle_201.BertAttention",
- "__torch__.torch.nn.modules.linear.___torch_mangle_35.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_14.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_58.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_76.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_169.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_140.Linear",
- "__torch__.transformers.models.bert.modeling_bert.BertIntermediate",
- "__torch__.transformers.models.bert.modeling_bert.BertPooler",
- "__torch__.torch.nn.modules.linear.___torch_mangle_121.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_68.Linear",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_127.Dropout",
- "__torch__.transformers.models.bert.modeling_bert.___torch_mangle_208.BertOutput",
- "torch._utils._rebuild_tensor_v2",
- "__torch__.torch.nn.modules.linear.Linear",
- "torch.LongStorage",
- "__torch__.transformers.models.bert.modeling_bert.___torch_mangle_20.BertSelfOutput",
- "__torch__.transformers.models.bert.modeling_bert.___torch_mangle_47.BertLayer",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_72.LayerNorm",
- "__torch__.transformers.models.bert.modeling_bert.___torch_mangle_132.BertIntermediate",
- "__torch__.transformers.models.bert.modeling_bert.___torch_mangle_155.BertLayer",
- "__torch__.torch.nn.modules.linear.___torch_mangle_166.Linear",
- "__torch__.transformers.models.bert.modeling_bert.___torch_mangle_160.BertSelfAttention",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_141.Dropout",
- "__torch__.transformers.models.bert.modeling_bert.___torch_mangle_200.BertSelfOutput",
- "__torch__.transformers.models.bert.modeling_bert.___torch_mangle_168.BertIntermediate",
- "__torch__.transformers.models.bert.modeling_bert.___torch_mangle_173.BertLayer",
- "__torch__.transformers.models.bert.modeling_bert.___torch_mangle_142.BertSelfAttention",
- "__torch__.torch.nn.modules.linear.___torch_mangle_210.Linear",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_10.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_187.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_25.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_103.Linear",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_189.Dropout",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_69.Dropout",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_126.LayerNorm",
- "__torch__.transformers.models.bert.modeling_bert.___torch_mangle_24.BertIntermediate",
- "__torch__.torch.nn.modules.linear.___torch_mangle_30.Linear",
- "__torch__.torch.nn.modules.container.ModuleList",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_117.Dropout",
- "__torch__.transformers.models.bert.modeling_bert.BertSelfAttention",
- "__torch__.torch.nn.modules.linear.___torch_mangle_143.Linear",
- "__torch__.torch.nn.modules.dropout.Dropout",
- "__torch__.torch.nn.modules.linear.___torch_mangle_32.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_61.Linear",
- "__torch__.transformers.models.bert.modeling_bert.___torch_mangle_92.BertSelfOutput",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_153.Dropout",
- "__torch__.torch.nn.modules.linear.___torch_mangle_184.Linear",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_181.Dropout",
- "__torch__.transformers.models.bert.modeling_bert.___torch_mangle_38.BertSelfOutput",
- "__torch__.torch.nn.modules.linear.___torch_mangle_107.Linear",
- "__torch__.transformers.activations.___torch_mangle_41.GELUActivation",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_170.LayerNorm",
- "__torch__.transformers.activations.___torch_mangle_203.GELUActivation",
- "__torch__.torch.nn.modules.linear.___torch_mangle_205.Linear",
- "__torch__.transformers.models.bert.modeling_bert.___torch_mangle_93.BertAttention",
- "__torch__.torch.nn.modules.linear.___torch_mangle_122.Linear",
- "__torch__.transformers.models.bert.modeling_bert.___torch_mangle_196.BertSelfAttention",
- "__torch__.torch.nn.modules.sparse.Embedding",
- "__torch__.transformers.models.bert.modeling_bert.___torch_mangle_82.BertOutput",
- "__torch__.torch.nn.modules.linear.___torch_mangle_120.Linear",
- "__torch__.transformers.models.bert.modeling_bert.___torch_mangle_190.BertOutput",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_73.Dropout",
- "__torch__.torch.nn.modules.linear.___torch_mangle_8.Linear",
- "torch.FloatStorage",
- "__torch__.torch.nn.modules.linear.___torch_mangle_148.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_84.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_161.Linear",
- "__torch__.transformers.models.bert.modeling_bert.___torch_mangle_118.BertOutput",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_26.LayerNorm",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_36.LayerNorm",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_188.LayerNorm",
- "__torch__.transformers.models.bert.modeling_bert.___torch_mangle_209.BertLayer",
- "__torch__.transformers.models.bert.modeling_bert.___torch_mangle_75.BertAttention",
- "__torch__.torch.nn.modules.linear.___torch_mangle_202.Linear",
- "__torch__.transformers.models.bert.modeling_bert.___torch_mangle_83.BertLayer",
- "__torch__.transformers.activations.___torch_mangle_113.GELUActivation",
- "__torch__.torch.nn.modules.linear.___torch_mangle_104.Linear",
- "__torch__.transformers.models.bert.modeling_bert.___torch_mangle_78.BertIntermediate",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_152.LayerNorm",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_199.Dropout",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_171.Dropout",
- "__torch__.torch.nn.modules.linear.___torch_mangle_194.Linear",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_51.Dropout",
- "__torch__.torch.nn.modules.linear.___torch_mangle_156.Linear",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_195.Dropout",
- "__torch__.torch.nn.modules.linear.___torch_mangle_112.Linear",
- "__torch__.transformers.models.bert.modeling_bert.___torch_mangle_21.BertAttention",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_91.Dropout",
- "__torch__.torch.nn.modules.linear.___torch_mangle_94.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_49.Linear",
- "collections.OrderedDict",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_63.Dropout",
- "__torch__.transformers.models.bert.modeling_bert.___torch_mangle_96.BertIntermediate",
- "__torch__.torch.nn.modules.linear.___torch_mangle_115.Linear",
- "__torch__.transformers.models.bert.modeling_bert.BertAttention",
- "__torch__.transformers.models.bert.modeling_bert.___torch_mangle_172.BertOutput",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_198.LayerNorm",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_206.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_17.Linear",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_7.Dropout",
- "__torch__.transformers.models.bert.modeling_bert.___torch_mangle_137.BertLayer",
- "__torch__.transformers.models.bert.modeling_bert.___torch_mangle_39.BertAttention",
- "__torch__.torch.nn.modules.linear.___torch_mangle_71.Linear",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_108.LayerNorm",
- "__torch__.transformers.models.bert.modeling_bert.___torch_mangle_146.BertSelfOutput",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_44.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_175.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_48.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_85.Linear",
- "__torch__.torch.nn.modules.normalization.LayerNorm",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_19.Dropout",
- "__torch__.transformers.models.bert.modeling_bert.BertEncoder",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_6.LayerNorm",
- "__torch__.transformers.models.bert.modeling_bert.___torch_mangle_128.BertSelfOutput",
- "__torch__.torch.nn.modules.linear.___torch_mangle_67.Linear",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_144.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_40.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_151.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_176.Linear",
- "__torch__.transformers.models.bert.modeling_bert.___torch_mangle_186.BertIntermediate",
- "__torch__.torch.nn.modules.linear.___torch_mangle_2.Linear",
- "__torch__.transformers.models.bert.modeling_bert.___torch_mangle_74.BertSelfOutput",
- "__torch__.transformers.models.bert.modeling_bert.___torch_mangle_64.BertOutput",
- "collections.OrderedDict",
- "torch.DoubleStorage",
- "torch.LongStorage",
- "torch._utils._rebuild_tensor_v2",
- "torch.LongStorage",
- "collections.OrderedDict",
- "torch.jit._pickle.restore_type_tag",
- "torch._utils._rebuild_tensor_v2"
438 MB
- SHA256:
- c2a1c0ddc92609defb8e9dad2281539f377198f638b29e97d15244399f9b70d6
- Pointer size:
- 134 Bytes
- Size of remote file:
- 438 MB
- Xet backed hash:
- df119e9fb77f1e56b450486249db757c1f717cb2da83ae446a3a2babcd13f020
·
·
Xet efficiently stores Large Files inside Git, intelligently splitting files into unique chunks and accelerating uploads and downloads. More info.