-
Notifications
You must be signed in to change notification settings - Fork 1
/
dataloader.py
111 lines (95 loc) · 6.36 KB
/
dataloader.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
from cgi import test
import os
import torch
import torchvision
from torchvision import datasets, models
from torchvision.transforms import transforms
def DataLoader(args):
args.batch_size //= args.world_size
if args.task == 'mnist':
trainset = datasets.MNIST(args.dataset, download=True, train=True,transform=transforms.Compose( [transforms.ToTensor(), transforms.Normalize((0.1307,), (0.3081,))]))
train_sampler = torch.utils.data.distributed.DistributedSampler(trainset)
trainloader = torch.utils.data.DataLoader(trainset, batch_size=args.batch_size, shuffle=False, sampler = train_sampler, pin_memory=True)
testset = datasets.MNIST(args.dataset, download=True, train=False,transform=transforms.Compose( [transforms.ToTensor(), transforms.Normalize((0.1307,), (0.3081,))]))
test_sampler = torch.utils.data.distributed.DistributedSampler(testset, shuffle=False, drop_last=True)
testloader = torch.utils.data.DataLoader(testset, batch_size=args.batch_size, shuffle=False, sampler=test_sampler, pin_memory=True)
return trainloader, testloader
if args.task == 'cifar10':
transform_train = transforms.Compose([
transforms.RandomCrop(args.img_width, padding=4),
transforms.RandomHorizontalFlip(),
transforms.ColorJitter(),
transforms.ToTensor(),
transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)),
])
transform_test = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)),
])
trainset = torchvision.datasets.CIFAR10(root=args.dataset, train=True, download=True, transform=transform_train)
train_sampler = torch.utils.data.distributed.DistributedSampler(trainset)
trainloader = torch.utils.data.DataLoader(trainset, batch_size=args.batch_size, shuffle=False, num_workers=args.num_worker, sampler=train_sampler, pin_memory=True)
testset = torchvision.datasets.CIFAR10(root=args.dataset, train=False, download=True, transform=transform_test)
test_sampler = torch.utils.data.distributed.DistributedSampler(testset, shuffle=False, drop_last=True)
testloader = torch.utils.data.DataLoader(testset, batch_size=args.batch_size, shuffle=False, num_workers=args.num_worker, sampler=test_sampler, pin_memory=True)
return trainloader, testloader
if args.task == 'cifar100':
transform_train = transforms.Compose([
transforms.RandomCrop(args.img_width, padding=4),
transforms.RandomHorizontalFlip(),
transforms.ColorJitter(),
transforms.ToTensor(),
transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)),
])
transform_test = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)),
])
trainset = torchvision.datasets.CIFAR100(root=args.dataset, train=True, download=True, transform=transform_train)
train_sampler = torch.utils.data.distributed.DistributedSampler(trainset)
trainloader = torch.utils.data.DataLoader(trainset, batch_size=args.batch_size, shuffle=False, num_workers=args.num_worker, sampler=train_sampler, pin_memory=True)
testset = torchvision.datasets.CIFAR100(root=args.dataset, train=False, download=True, transform=transform_test)
test_sampler = torch.utils.data.distributed.DistributedSampler(testset, shuffle=False, drop_last=True)
testloader = torch.utils.data.DataLoader(testset, batch_size=args.batch_size, shuffle=False, num_workers=args.num_worker, sampler=test_sampler, pin_memory=True)
return trainloader, testloader
if args.task == 'tiny':
transform_train = transforms.Compose([
transforms.RandomCrop(args.img_width, padding=4),
transforms.RandomHorizontalFlip(),
transforms.ColorJitter(),
transforms.ToTensor(),
transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)),
])
transform_test = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)),
])
trainset = datasets.ImageFolder(os.path.join(args.dataset, "train"), transform=transform_train)
train_sampler = torch.utils.data.distributed.DistributedSampler(trainset)
trainloader = torch.utils.data.DataLoader(trainset, batch_size=args.batch_size, shuffle=False, num_workers=args.num_worker, sampler=train_sampler, pin_memory=True)
testset = datasets.ImageFolder(os.path.join(args.dataset, "val"), transform=transform_test)
test_sampler = torch.utils.data.distributed.DistributedSampler(testset, shuffle=False, drop_last=True)
testloader = torch.utils.data.DataLoader(testset, batch_size=args.batch_size, shuffle=False, num_workers=args.num_worker, sampler=test_sampler, pin_memory=True)
return trainloader, testloader
if args.task == 'imagenet' or args.task == 'imagenet100':
normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406],std=[0.229, 0.224, 0.225])
transform_train = transforms.Compose([
transforms.RandomResizedCrop(224),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
normalize,
])
transform_test = transforms.Compose([
transforms.Resize(256),
transforms.CenterCrop(224),
transforms.ToTensor(),
normalize,
])
trainset = datasets.ImageFolder(os.path.join(args.dataset, "train"), transform=transform_train)
train_sampler = torch.utils.data.distributed.DistributedSampler(trainset)
trainloader = torch.utils.data.DataLoader(trainset, batch_size=args.batch_size, shuffle=False, num_workers=args.num_worker, sampler=train_sampler, pin_memory=True)
testset = datasets.ImageFolder(os.path.join(args.dataset, "val"), transform=transform_test)
test_sampler = torch.utils.data.distributed.DistributedSampler(testset, shuffle=False, drop_last=True)
testloader = torch.utils.data.DataLoader(testset, batch_size=args.batch_size, shuffle=False, num_workers=args.num_worker, sampler=test_sampler, pin_memory=True)
return trainloader, testloader
raise "Unknown task"