forked from kekmodel/MPL-pytorch
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathutils.py
106 lines (82 loc) · 2.84 KB
/
utils.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
import logging
import os
import shutil
from collections import OrderedDict
import torch
from torch import distributed as dist
from torch import nn
from torch.nn import functional as F
logger = logging.getLogger(__name__)
def reduce_tensor(tensor, n):
rt = tensor.clone()
dist.all_reduce(rt, op=dist.ReduceOp.SUM)
rt /= n
return rt
def create_loss_fn(args):
if args.label_smoothing > 0:
criterion = SmoothCrossEntropy(alpha=args.label_smoothing)
else:
criterion = nn.CrossEntropyLoss()
return criterion.to(args.device)
def module_load_state_dict(model, state_dict):
new_state_dict = OrderedDict()
for k, v in state_dict.items():
name = k[7:] # remove `module.`
new_state_dict[name] = v
model.load_state_dict(new_state_dict)
def model_load_state_dict(model, state_dict):
try:
model.load_state_dict(state_dict)
except:
module_load_state_dict(model, state_dict)
def save_checkpoint(args, state, is_best, finetune=False):
os.makedirs(args.save_path, exist_ok=True)
if finetune:
name = f'{args.name}_finetune'
else:
name = args.name
filename = f'{args.save_path}/{name}_last.pth.tar'
torch.save(state, filename, _use_new_zipfile_serialization=False)
if is_best:
shutil.copyfile(filename, f'{args.save_path}/{args.name}_best.pth.tar')
def accuracy(output, target, topk=(1,)):
output = output.to(torch.device('cpu'))
target = target.to(torch.device('cpu'))
maxk = max(topk)
batch_size = target.shape[0]
_, idx = output.sort(dim=1, descending=True)
pred = idx.narrow(1, 0, maxk).t()
correct = pred.eq(target.reshape(1, -1).expand_as(pred))
res = []
res = []
for k in topk:
correct_k = correct[:k].reshape(-1).float().sum(dim=0, keepdim=True)
res.append(correct_k.mul_(100.0 / batch_size))
return res
class SmoothCrossEntropy(nn.Module):
def __init__(self, alpha=0.1):
super(SmoothCrossEntropy, self).__init__()
self.alpha = alpha
def forward(self, logits, labels):
num_classes = logits.shape[-1]
alpha_div_k = self.alpha / num_classes
target_probs = F.one_hot(labels, num_classes=num_classes).float() * \
(1. - self.alpha) + alpha_div_k
loss = -(target_probs * torch.log_softmax(logits, dim=-1)).sum(dim=-1)
return loss.mean()
class AverageMeter(object):
"""Computes and stores the average and current value
Imported from https://github.com/pytorch/examples/blob/master/imagenet/main.py#L247-L262
"""
def __init__(self):
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = self.sum / self.count