-
Notifications
You must be signed in to change notification settings - Fork 2
/
Copy pathutil.py
111 lines (88 loc) · 3.38 KB
/
util.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
from __future__ import print_function
import math
import torch
import numpy as np
# NOTE: assumes that the epoch starts with 1
def adjust_learning_rate(epoch, opt, optimizer):
if hasattr(opt, 'cos') and opt.cos:
# NOTE: since epoch starts with 1, we have to subtract 1
new_lr = opt.learning_rate * 0.5 * (1. + math.cos(math.pi * (epoch-1) / opt.epochs))
print('LR: {}'.format(new_lr))
for param_group in optimizer.param_groups:
param_group['lr'] = new_lr
else:
steps = np.sum(epoch > np.asarray(opt.lr_decay_epochs))
if steps > 0:
new_lr = opt.learning_rate * (opt.lr_decay_rate ** steps)
print('LR: {}'.format(new_lr))
for param_group in optimizer.param_groups:
param_group['lr'] = new_lr
class AverageMeter(object):
"""Computes and stores the average and current value"""
def __init__(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = self.sum / self.count
class AverageMeterV2(object):
"""Computes and stores the average and current value"""
def __init__(self, name, fmt=':f'):
self.name = name
self.fmt = fmt
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = self.sum / self.count
def __str__(self):
fmtstr = '{name} {val' + self.fmt + '} ({avg' + self.fmt + '})'
return fmtstr.format(**self.__dict__)
def accuracy(output, target, topk=(1,)):
"""Computes the accuracy over the k top predictions for the specified values of k"""
with torch.no_grad():
maxk = max(topk)
batch_size = target.size(0)
_, pred = output.topk(maxk, 1, True, True)
pred = pred.t()
correct = pred.eq(target.view(1, -1).expand_as(pred))
res = []
for k in topk:
correct_k = correct[:k].reshape(-1).float().sum(0, keepdim=True)
res.append(correct_k.mul_(100.0 / batch_size))
return res
def subset_classes(dataset, num_classes=10):
np.random.seed(1234)
all_classes = sorted(dataset.class_to_idx.items(), key=lambda x: x[1])
subset_classes = [all_classes[i] for i in np.random.permutation(len(all_classes))[:num_classes]]
subset_classes = sorted(subset_classes, key=lambda x: x[1])
dataset.classes_to_idx = {c: i for i, (c, _) in enumerate(subset_classes)}
dataset.classes = [c for c, _ in subset_classes]
orig_to_new_inds = {orig_ind: new_ind for new_ind, (_, orig_ind) in enumerate(subset_classes)}
dataset.samples = [(p, orig_to_new_inds[i]) for p, i in dataset.samples if i in orig_to_new_inds]
def get_shuffle_ids(bsz):
"""generate shuffle ids for ShuffleBN
"""
forward_inds = torch.randperm(bsz).long().cuda()
backward_inds = torch.zeros(bsz).long().cuda()
value = torch.arange(bsz).long().cuda()
backward_inds.index_copy_(0, forward_inds, value)
return forward_inds, backward_inds
if __name__ == '__main__':
meter = AverageMeter()