forked from khanrc/pt.darts
-
Notifications
You must be signed in to change notification settings - Fork 1
/
preproc.py
66 lines (54 loc) · 1.93 KB
/
preproc.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
import torch
import torch.nn as nn
import numpy as np
import torchvision.transforms as transforms
class Cutout(object):
def __init__(self, length):
self.length = length
def __call__(self, img):
h, w = img.size(1), img.size(2)
mask = np.ones((h, w), np.float32)
y = np.random.randint(h)
x = np.random.randint(w)
y1 = np.clip(y - self.length // 2, 0, h)
y2 = np.clip(y + self.length // 2, 0, h)
x1 = np.clip(x - self.length // 2, 0, w)
x2 = np.clip(x + self.length // 2, 0, w)
mask[y1: y2, x1: x2] = 0.
mask = torch.from_numpy(mask)
mask = mask.expand_as(img)
img *= mask
return img
def data_transforms(dataset, cutout_length):
dataset = dataset.lower()
if dataset == 'cifar10':
MEAN = [0.49139968, 0.48215827, 0.44653124]
STD = [0.24703233, 0.24348505, 0.26158768]
transf = [
transforms.RandomCrop(32, padding=4),
transforms.RandomHorizontalFlip()
]
elif dataset == 'mnist':
MEAN = [0.13066051707548254]
STD = [0.30810780244715075]
transf = [
transforms.RandomAffine(degrees=15, translate=(0.1, 0.1), scale=(0.9, 1.1), shear=0.1)
]
elif dataset == 'fashionmnist':
MEAN = [0.28604063146254594]
STD = [0.35302426207299326]
transf = [
transforms.RandomAffine(degrees=15, translate=(0.1, 0.1), scale=(0.9, 1.1), shear=0.1),
transforms.RandomVerticalFlip()
]
else:
raise ValueError('not expected dataset = {}'.format(dataset))
normalize = [
transforms.ToTensor(),
transforms.Normalize(MEAN, STD)
]
train_transform = transforms.Compose(transf + normalize)
valid_transform = transforms.Compose(normalize)
if cutout_length > 0:
train_transform.transforms.append(Cutout(cutout_length))
return train_transform, valid_transform