File size: 1,331 Bytes
fe1c232 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 |
"""
General utilities
"""
import os
def listdir_fullpath(d):
return [os.path.join(d, f) for f in os.listdir(d)]
def get_chunks(
items,
num_chunks
):
assert num_chunks > 0, "ERROR: zero chunks"
item_chunks = []
num_items = len(items)
len_chunk = max(1, num_items // num_chunks)
i = 0
while i + len_chunk < num_items:
item_chunks.append(items[i: i + len_chunk])
i += len_chunk
if i < num_items:
item_chunks.append(items[i:])
return item_chunks
def check_minhash_ids(
minhash_df
):
"""
Check if id == line number
in minhash dataframe. This later
helps in faster filtering
"""
for index, row in minhash_df.iterrows():
id_number = int(row["id"].split("/")[-1])
assert id_number == index, \
f"ERROR: check_minhash_ids with {id_number=} and {index=}"
def check_shard_of_filtered_docids(
filtered_docids,
expected_shard_number
):
"""
Sanity check for shard number
of filtered docids that were populated
by quality filtering
"""
for filtered_docid in filtered_docids:
shard_number = filtered_docid.split("/")[1]
assert shard_number == expected_shard_number, \
f"ERROR: check_shard_of_filtered_docids with {shard_number=} and {expected_shard_number=}"
|