applied-ai-018's picture
Add files using upload-large-folder tool
fe1c232 verified
# NOTE: for minhash and duplicates, using aria2c results in status-429
# (too many requests). So we use plain wget
# Similarly for docs and quality signals, files were not completely
# downloaded due to rate-limit by RPv2 servers (data.together.xyz)
CC_SNAPSHOT="2023-14"
LANG="en"
BASE_URL="https://data.together.xyz/redpajama-data-v2/v1.0.0"
BASE_DIR="/mnt/weka/peacock/enfm-dataprocessing/RedPajamaV2/data"
listings_tag="${LANG}-${CC_SNAPSHOT}-head_middle"
mkdir -p ${BASE_DIR}/listings
# wget "${BASE_URL}/listings/${listings_tag}.txt" -O "${BASE_DIR}/listings/${listings_tag}.txt" # already downloaded
listings_file="${BASE_DIR}/listings/${listings_tag}.txt"
# create aria2c input filenames
# # 1. signals
# component="quality_signals"
# component_extension="signals.json.gz"
# # 2. docs
# component="documents"
# component_extension="json.gz"
# # 3. minhash
# component="minhash"
# component_extension="minhash.parquet"
# # 4. duplicates
# component="duplicates"
# component_extension="duplicates.parquet"
aria2c_inputfile="${BASE_DIR}/listings/${component}-${listings_tag}.txt"
out_dir="${BASE_DIR}/${component}" # for downloaded files
truncate -s 0 $aria2c_inputfile
while read line; do
url="${BASE_URL}/${component}/${line}.${component_extension}"
dest="${line}.${component_extension}" # path relative to --dir option of aria2c
echo $url >> $aria2c_inputfile
echo " out=${dest}" >> $aria2c_inputfile
done <"$listings_file"
# download
aria2c -x 8 --dir=$out_dir --input-file=$aria2c_inputfile --continue=true