pytorch学习笔记(4)--dataloader

batch_size:有多少张

shuffle=True:顺序不打乱

num_workers: 进程数

drop_last:最后不够64张是否舍去

import torchvision
from torch.utils.data import DataLoader


# 1
from torch.utils.tensorboard import SummaryWriter

test_data = torchvision.datasets.CIFAR10("./dataset", train=False, transform=torchvision.transforms.ToTensor(), download=False)

test_loader = DataLoader(dataset=test_data, batch_size=64, shuffle=True, num_workers=0, drop_last=True)

#
img, target = test_data[0]
print(img.shape)
print(target)

writer = SummaryWriter("dataloader")
for epoch in range(2):
    step = 0
    for data in test_loader:
        imgs, targets = data
        # print(imgs.shape)
        # print(targets)
        writer.add_images("epoch: {}".format(epoch), imgs, step)
        step = step+1

writer.close()

 

posted @ 2022-08-01 09:37  helloWorldhelloWorld  阅读(34)  评论(0)    收藏  举报