applied-ai-018's picture
Add files using upload-large-folder tool
fe1c232 verified
"""
General utilities
"""
import os
def listdir_fullpath(d):
return [os.path.join(d, f) for f in os.listdir(d)]
def get_chunks(
items,
num_chunks
):
assert num_chunks > 0, "ERROR: zero chunks"
item_chunks = []
num_items = len(items)
len_chunk = max(1, num_items // num_chunks)
i = 0
while i + len_chunk < num_items:
item_chunks.append(items[i: i + len_chunk])
i += len_chunk
if i < num_items:
item_chunks.append(items[i:])
return item_chunks
def check_minhash_ids(
minhash_df
):
"""
Check if id == line number
in minhash dataframe. This later
helps in faster filtering
"""
for index, row in minhash_df.iterrows():
id_number = int(row["id"].split("/")[-1])
assert id_number == index, \
f"ERROR: check_minhash_ids with {id_number=} and {index=}"
def check_shard_of_filtered_docids(
filtered_docids,
expected_shard_number
):
"""
Sanity check for shard number
of filtered docids that were populated
by quality filtering
"""
for filtered_docid in filtered_docids:
shard_number = filtered_docid.split("/")[1]
assert shard_number == expected_shard_number, \
f"ERROR: check_shard_of_filtered_docids with {shard_number=} and {expected_shard_number=}"