File size: 952 Bytes
a7ab6f8
 
 
 
 
14b5ba9
 
 
 
a7ab6f8
 
 
f6e7dba
a7ab6f8
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
#!/bin/bash

# In this example, we show how to train SimCSE on unsupervised Wikipedia data.
# If you want to train it with multiple GPU cards, see "run_sup_example.sh"
# about how to use PyTorch's distributed data parallel.
export ZONE=us-central2-b
export XRT_TPU_CONFIG="localservice;0;localhost:51011"
export TPU_NUM_DEVICES=4
export ALLOW_MULTIPLE_LIBTPU_LOAD=1

python3 ../../SimCSE/train.py \
    --model_name_or_path NbAiLab/nb-bert-base \
    --train_file data/nor_news_1998_2019_sentences_1M.txt \
    --output_dir result/unsup-simcse-nb-bert-bert-base \
    --num_train_epochs 1 \
    --per_device_train_batch_size 64 \
    --learning_rate 3e-5 \
    --max_seq_length 32 \
    --evaluation_strategy steps \
    --metric_for_best_model stsb_spearman \
    --load_best_model_at_end \
    --eval_steps 125 \
    --pooler_type cls \
    --mlp_only_train \
    --overwrite_output_dir \
    --temp 0.05 \
    --do_train \
    --do_eval \
    "$@"