metadata
dataset_info:
features:
- name: label
dtype: string
- name: latent
sequence:
sequence:
sequence:
sequence: float32
splits:
- name: train
num_bytes: 10743176571
num_examples: 1281167
- name: test
num_bytes: 419229818
num_examples: 50000
download_size: 2961362265
dataset_size: 11162406389
configs:
- config_name: default
data_files:
- split: train
path: data/train.*
- split: test
path: data/validation.*
class ImageNet96Dataset(torch.utils.data.Dataset):
def __init__(
self, hf_ds, text_enc, tokenizer, bs, ddp, col_label="label", col_latent="latent"
):
self.hf_ds=hf_ds
self.col_label, self.col_latent = col_label, col_latent
self.text_enc, self.tokenizer = text_enc, tokenizer
self.tokenizer.padding_side = "right"
self.prompt_len = 50
if ddp:
self.sampler = DistributedSampler(hf_ds, shuffle = True, seed = seed)
else:
self.sampler = RandomSampler(hf_ds, generator = torch.manual_seed(seed))
self.dataloader = DataLoader(
hf_ds, sampler=self.sampler, collate_fn=self.collate, batch_size=bs, num_workers=4, prefetch_factor=2
)
def collate(self, items):
labels = [i[self.col_label] for i in items]
# latents shape [B, num_aug, 32, 3, 3]
latents = torch.Tensor([i[self.col_latent] for i in items])
B, num_aug, _, _, _ = latents.shape
# pick random augmentation -> latents shape [B, 32, 3, 3]
aug_idx = torch.randint(0, num_aug, (B,)) # Random int between 0-4 for each batch item
batch_idx = torch.arange(B)
latents = latents[batch_idx, aug_idx]
return labels, latents
def __iter__(self):
for labels, latents in self.dataloader:
label_embs, label_atnmasks = self.encode_prompts(labels)
latents = latents.to(dtype).to(device)
yield labels, latents, label_embs, label_atnmasks
def encode_prompts(self, prompts):
prompts_tok = self.tokenizer(
prompts, padding="max_length", truncation=True, max_length=self.prompt_len, return_tensors="pt"
)
with torch.no_grad():
prompts_encoded = self.text_enc(**prompts_tok.to(self.text_enc.device))
return prompts_encoded.last_hidden_state, prompts_tok.attention_mask
def __len__(self):
return len(self.dataloader)