organization-logos / chunk-dataset.py
tzvc's picture
org 100k chunked
ef74fc2
raw
history blame contribute delete
No virus
1.23 kB
import json
import os
import shutil
def file_exists(file_path):
return os.path.isfile(file_path)
def read_jsonl(file_path):
data = []
with open(file_path, 'r') as f:
for line in f:
data.append(json.loads(line))
return data
if __name__ == "__main__":
count = 0
chunk_nb = 0
data = read_jsonl("./data/metadata.jsonl")
os.mkdir("./data/" + f"chunk_{chunk_nb}/")
print(len(data))
for d in data:
# check if file exist
if not file_exists("./data/" + d.get("file_name")):
print("File not found: " + d.get("file_name"))
continue
new_path = "./data/" + f"chunk_{chunk_nb}/" + d.get("file_name")
shutil.copyfile("./data/" + d.get("file_name"), new_path)
# update file name in JSONL recor
d["file_name"] = f"./chunk_{chunk_nb}/" + d.get("file_name")
count += 1
if count >= 1000:
chunk_nb += 1
count = 0
os.mkdir("./data/" + f"chunk_{chunk_nb}/")
# write to new JSONL file
with open("./data/metadata-chunked.jsonl", "a") as f:
f.write(json.dumps(d) + "\n")
# print(d.get("file_name"))
print(len(data))
print(count)