File size: 1,134 Bytes
c96df66
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
export CUDA_VISIBLE_DEVICES=0,1,2,3,4,5,6,7

model_name_or_path="lmsys/vicuna-7b-v1.5-16k"
# model_name_or_path="meta-llama/Llama-2-7b-chat-hf"
# model_name_or_path="Yukang/LongAlpaca-7B-16k"
# model_name_or_path="lmsys/longchat-7b-v1.5-32k"
super_tokenizer_name_or_path="/share/ninglu_shao/code/PluginTransformer/data/outputs/90k_0104+8-longalpaca_0106/super_tokenizer"
# super_tokenizer_name_or_path="/home/baaiks/ninglu/code/PluginTransformer/data/outputs/90k_0111+8-longalpaca_0111/super_tokenizer"
output_dir="data/results/longbench/test"

mkdir -p ${output_dir}

torchrun --nproc_per_node 8 --master_port=12345 -m main.eval_longbench \
    --model_name_or_path ${model_name_or_path} \
    --super_tokenizer_name_or_path ${super_tokenizer_name_or_path} \
    --super_tokenizer_num_hidden_layers 8 \
    --dataset_list "gov_report,qmsum,multi_news" \
    --output_dir ${output_dir} \
    --max_length 3500 \
    --batch_size 2 \
    --compression_ratio 32 \
    --down_scale_method "uniform" \
    | tee "${output_dir}/eval_longbench.log"

# narrativeqa,qasper,multifieldqa_en,hotpotqa,2wikimqa,musique,gov_report,qmsum,multi_news