import os import lmdb import re import multiprocessing from tqdm import tqdm def sort_key(filename): # Extract frame number and cow id from filenames like: # "pmfeed_4_3_16_frame_10000_cow_1.jpg" match = re.search(r'frame_(\d+)_cow_(\d+)', filename) if match: frame_number = int(match.group(1)) cow_id = int(match.group(2)) return (frame_number, cow_id) return (float('inf'), float('inf')) def read_image(args): image_folder, image_name = args image_path = os.path.join(image_folder, image_name) try: with open(image_path, 'rb') as f: image_data = f.read() return (image_name, image_data) except Exception as e: print(f"Error reading {image_name}: {e}") return None def main(): # Define your pathscon image_folder = 'all_crops_pmfeed_4_3_16' lmdb_path = 'lmdb_all_crops_pmfeed_4_3_16' # Create LMDB directory if it doesn't exist if not os.path.exists(lmdb_path): os.makedirs(lmdb_path) # List and sort JPEG files image_files = [f for f in os.listdir(image_folder) if f.endswith('.jpg')] sorted_files = sorted(image_files, key=sort_key) # For sanity check, take the first 20 images sanity_files = sorted_files # Prepare arguments for multiprocessing args = [(image_folder, image_name) for image_name in sanity_files] # Use multiprocessing Pool to read images concurrently with multiprocessing.Pool(processes=multiprocessing.cpu_count()) as pool: results = list(tqdm(pool.imap(read_image, args), total=len(args), desc="Reading images")) # Filter out any failed reads results = [res for res in results if res is not None] # Open LMDB environment with an appropriate map size (e.g., 10GB) map_size = 10 * 1024 * 1024 * 1024 # 10GB in bytes env = lmdb.open(lmdb_path, map_size=map_size) # Write the results into LMDB using a single write transaction with env.begin(write=True) as txn: for key, value in tqdm(results, desc="Writing to LMDB"): txn.put(key.encode('utf-8'), value) print("LMDB database creation complete for all images!") if __name__ == '__main__': main()