Skip to content

Commit

Permalink
first commit
Browse files Browse the repository at this point in the history
  • Loading branch information
shaoshitong committed Oct 24, 2023
0 parents commit eb1d690
Show file tree
Hide file tree
Showing 850 changed files with 46,263 additions and 0 deletions.
676 changes: 676 additions & 0 deletions Branch_CIFAR_10/recover/baseline.py

Large diffs are not rendered by default.

Large diffs are not rendered by default.

138 changes: 138 additions & 0 deletions Branch_CIFAR_10/recover/models/ShuffleNetv1.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,138 @@
'''ShuffleNet in PyTorch.
See the paper "ShuffleNet: An Extremely Efficient Convolutional Neural Network for Mobile Devices" for more details.
'''
import torch
import torch.nn as nn
import torch.nn.functional as F


class ShuffleBlock(nn.Module):
def __init__(self, groups):
super(ShuffleBlock, self).__init__()
self.groups = groups

def forward(self, x):
'''Channel shuffle: [N,C,H,W] -> [N,g,C/g,H,W] -> [N,C/g,g,H,w] -> [N,C,H,W]'''
N,C,H,W = x.size()
g = self.groups
return x.view(N,g,C//g,H,W).permute(0,2,1,3,4).reshape(N,C,H,W)


class Bottleneck(nn.Module):
def __init__(self, in_planes, out_planes, stride, groups, is_last=False):
super(Bottleneck, self).__init__()
self.is_last = is_last
self.stride = stride

mid_planes = int(out_planes/4)
g = 1 if in_planes == 24 else groups
self.conv1 = nn.Conv2d(in_planes, mid_planes, kernel_size=1, groups=g, bias=False)
self.bn1 = nn.BatchNorm2d(mid_planes)
self.shuffle1 = ShuffleBlock(groups=g)
self.conv2 = nn.Conv2d(mid_planes, mid_planes, kernel_size=3, stride=stride, padding=1, groups=mid_planes, bias=False)
self.bn2 = nn.BatchNorm2d(mid_planes)
self.conv3 = nn.Conv2d(mid_planes, out_planes, kernel_size=1, groups=groups, bias=False)
self.bn3 = nn.BatchNorm2d(out_planes)

self.shortcut = nn.Sequential()
if stride == 2:
self.shortcut = nn.Sequential(nn.AvgPool2d(3, stride=2, padding=1))

def forward(self, x):
out = F.relu(self.bn1(self.conv1(x)))
out = self.shuffle1(out)
out = F.relu(self.bn2(self.conv2(out)))
out = self.bn3(self.conv3(out))
res = self.shortcut(x)
preact = torch.cat([out, res], 1) if self.stride == 2 else out+res
out = F.relu(preact)
# out = F.relu(torch.cat([out, res], 1)) if self.stride == 2 else F.relu(out+res)
if self.is_last:
return out, preact
else:
return out


class ShuffleNet(nn.Module):
def __init__(self, cfg, num_classes=10):
super(ShuffleNet, self).__init__()
out_planes = cfg['out_planes']
num_blocks = cfg['num_blocks']
groups = cfg['groups']

self.conv1 = nn.Conv2d(3, 24, kernel_size=1, bias=False)
self.bn1 = nn.BatchNorm2d(24)
self.in_planes = 24
self.layer1 = self._make_layer(out_planes[0], num_blocks[0], groups)
self.layer2 = self._make_layer(out_planes[1], num_blocks[1], groups)
self.layer3 = self._make_layer(out_planes[2], num_blocks[2], groups)
self.linear = nn.Linear(out_planes[2], num_classes)

def _make_layer(self, out_planes, num_blocks, groups):
layers = []
for i in range(num_blocks):
stride = 2 if i == 0 else 1
cat_planes = self.in_planes if i == 0 else 0
layers.append(Bottleneck(self.in_planes, out_planes-cat_planes,
stride=stride,
groups=groups,
is_last=(i == num_blocks - 1)))
self.in_planes = out_planes
return nn.Sequential(*layers)

def get_feat_modules(self):
feat_m = nn.ModuleList([])
feat_m.append(self.conv1)
feat_m.append(self.bn1)
feat_m.append(self.layer1)
feat_m.append(self.layer2)
feat_m.append(self.layer3)
return feat_m

def get_bn_before_relu(self):
raise NotImplementedError('ShuffleNet currently is not supported for "Overhaul" teacher')

def forward(self, x, is_feat=False, preact=False):
out = F.relu(self.bn1(self.conv1(x)))
f0 = out
out, f1_pre = self.layer1(out)
f1 = out
out, f2_pre = self.layer2(out)
f2 = out
out, f3_pre = self.layer3(out)
f3 = out
out = F.avg_pool2d(out, 4)
out = out.view(out.size(0), -1)
f4 = out
out = self.linear(out)

if is_feat:
if preact:
return [f0, f1_pre, f2_pre, f3_pre, f4], out
else:
return [f0, f1, f2, f3, f4], out
else:
return out


def ShuffleV1(**kwargs):
cfg = {
'out_planes': [240, 480, 960],
'num_blocks': [4, 8, 4],
'groups': 3
}
return ShuffleNet(cfg, **kwargs)


if __name__ == '__main__':

x = torch.randn(2, 3, 32, 32)
net = ShuffleV1(num_classes=100)
import time
a = time.time()
feats, logit = net(x, is_feat=True, preact=True)
b = time.time()
print(b - a)
for f in feats:
print(f.shape, f.min().item())
print(logit.shape)
218 changes: 218 additions & 0 deletions Branch_CIFAR_10/recover/models/ShuffleNetv2.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,218 @@
'''ShuffleNetV2 in PyTorch.
See the paper "ShuffleNet V2: Practical Guidelines for Efficient CNN Architecture Design" for more details.
'''
import torch
import torch.nn as nn
import torch.nn.functional as F


class ShuffleBlock(nn.Module):
def __init__(self, groups=2):
super(ShuffleBlock, self).__init__()
self.groups = groups

def forward(self, x):
'''Channel shuffle: [N,C,H,W] -> [N,g,C/g,H,W] -> [N,C/g,g,H,w] -> [N,C,H,W]'''
N, C, H, W = x.size()
g = self.groups
return x.view(N, g, C//g, H, W).permute(0, 2, 1, 3, 4).reshape(N, C, H, W)


class SplitBlock(nn.Module):
def __init__(self, ratio):
super(SplitBlock, self).__init__()
self.ratio = ratio

def forward(self, x):
c = int(x.size(1) * self.ratio)
return x[:, :c, :, :], x[:, c:, :, :]


class BasicBlock(nn.Module):
def __init__(self, in_channels, split_ratio=0.5, is_last=False):
super(BasicBlock, self).__init__()
self.is_last = is_last
self.split = SplitBlock(split_ratio)
in_channels = int(in_channels * split_ratio)
self.conv1 = nn.Conv2d(in_channels, in_channels,
kernel_size=1, bias=False)
self.bn1 = nn.BatchNorm2d(in_channels)
self.conv2 = nn.Conv2d(in_channels, in_channels,
kernel_size=3, stride=1, padding=1, groups=in_channels, bias=False)
self.bn2 = nn.BatchNorm2d(in_channels)
self.conv3 = nn.Conv2d(in_channels, in_channels,
kernel_size=1, bias=False)
self.bn3 = nn.BatchNorm2d(in_channels)
self.shuffle = ShuffleBlock()

def forward(self, x):
x1, x2 = self.split(x)
out = F.relu(self.bn1(self.conv1(x2)))
out = self.bn2(self.conv2(out))
preact = self.bn3(self.conv3(out))
out = F.relu(preact)
# out = F.relu(self.bn3(self.conv3(out)))
preact = torch.cat([x1, preact], 1)
out = torch.cat([x1, out], 1)
out = self.shuffle(out)
if self.is_last:
return out, preact
else:
return out


class DownBlock(nn.Module):
def __init__(self, in_channels, out_channels):
super(DownBlock, self).__init__()
mid_channels = out_channels // 2
# left
self.conv1 = nn.Conv2d(in_channels, in_channels,
kernel_size=3, stride=2, padding=1, groups=in_channels, bias=False)
self.bn1 = nn.BatchNorm2d(in_channels)
self.conv2 = nn.Conv2d(in_channels, mid_channels,
kernel_size=1, bias=False)
self.bn2 = nn.BatchNorm2d(mid_channels)
# right
self.conv3 = nn.Conv2d(in_channels, mid_channels,
kernel_size=1, bias=False)
self.bn3 = nn.BatchNorm2d(mid_channels)
self.conv4 = nn.Conv2d(mid_channels, mid_channels,
kernel_size=3, stride=2, padding=1, groups=mid_channels, bias=False)
self.bn4 = nn.BatchNorm2d(mid_channels)
self.conv5 = nn.Conv2d(mid_channels, mid_channels,
kernel_size=1, bias=False)
self.bn5 = nn.BatchNorm2d(mid_channels)

self.shuffle = ShuffleBlock()

def forward(self, x):
# left
out1 = self.bn1(self.conv1(x))
out1 = F.relu(self.bn2(self.conv2(out1)))
# right
out2 = F.relu(self.bn3(self.conv3(x)))
out2 = self.bn4(self.conv4(out2))
out2 = F.relu(self.bn5(self.conv5(out2)))
# concat
out = torch.cat([out1, out2], 1)
out = self.shuffle(out)
return out


class ShuffleNetV2(nn.Module):
def __init__(self, net_size, num_classes=10, img_size=32):
super(ShuffleNetV2, self).__init__()
out_channels = configs[net_size]['out_channels']
num_blocks = configs[net_size]['num_blocks']

# self.conv1 = nn.Conv2d(3, 24, kernel_size=3,
# stride=1, padding=1, bias=False)
self.conv1 = nn.Conv2d(3, 24, kernel_size=1, bias=False)
self.bn1 = nn.BatchNorm2d(24)
self.in_channels = 24
self.layer1 = self._make_layer(out_channels[0], num_blocks[0])
self.layer2 = self._make_layer(out_channels[1], num_blocks[1])
self.layer3 = self._make_layer(out_channels[2], num_blocks[2])
self.conv2 = nn.Conv2d(out_channels[2], out_channels[3],
kernel_size=1, stride=1, padding=0, bias=False)
self.bn2 = nn.BatchNorm2d(out_channels[3])
self.linear = nn.Linear(out_channels[3], num_classes)

def _make_layer(self, out_channels, num_blocks):
layers = [DownBlock(self.in_channels, out_channels)]
for i in range(num_blocks):
layers.append(BasicBlock(out_channels, is_last=(i == num_blocks - 1)))
self.in_channels = out_channels
return nn.Sequential(*layers)

def get_feat_modules(self):
feat_m = nn.ModuleList([])
feat_m.append(self.conv1)
feat_m.append(self.bn1)
feat_m.append(self.layer1)
feat_m.append(self.layer2)
feat_m.append(self.layer3)
return feat_m

def get_bn_before_relu(self):
raise NotImplementedError('ShuffleNetV2 currently is not supported for "Overhaul" teacher')

def forward(self, x, is_feat=False, preact=False):
out = F.relu(self.bn1(self.conv1(x)))
# out = F.max_pool2d(out, 3, stride=2, padding=1)
f0 = out
out, f1_pre = self.layer1(out)
f1 = out
out, f2_pre = self.layer2(out)
f2 = out
out, f3_pre = self.layer3(out)
f3 = out
out = F.relu(self.bn2(self.conv2(out)))
# out = F.avg_pool2d(out, 4)
out = F.avg_pool2d(out, out.size(3))
out = out.view(out.size(0), -1)
f4 = out
out = self.linear(out)
if is_feat:
if preact:
return [f0, f1_pre, f2_pre, f3_pre, f4], out
else:
return [f0, f1, f2, f3, f4], out
else:
return out


configs = {
0.2: {
'out_channels': (40, 80, 160, 512),
'num_blocks': (3, 3, 3)
},

0.3: {
'out_channels': (40, 80, 160, 512),
'num_blocks': (3, 7, 3)
},

0.5: {
'out_channels': (48, 96, 192, 1024),
'num_blocks': (3, 7, 3)
},

1: {
'out_channels': (116, 232, 464, 1024),
'num_blocks': (3, 7, 3)
},
1.5: {
'out_channels': (176, 352, 704, 1024),
'num_blocks': (3, 7, 3)
},
2: {
'out_channels': (224, 488, 976, 2048),
'num_blocks': (3, 7, 3)
}
}


def ShuffleV2(**kwargs):
model = ShuffleNetV2(net_size=1, **kwargs)
return model

def ShuffleV2_0_3(**kwargs):
model = ShuffleNetV2(net_size=0.3, **kwargs)
return model

def ShuffleV2_0_5(**kwargs):
model = ShuffleNetV2(net_size=0.5, **kwargs)
return model

if __name__ == '__main__':
net = ShuffleV2(num_classes=100)
x = torch.randn(3, 3, 32, 32)
import time
a = time.time()
feats, logit = net(x, is_feat=True, preact=True)
b = time.time()
print(b - a)
for f in feats:
print(f.shape, f.min().item())
print(logit.shape)
Loading

0 comments on commit eb1d690

Please sign in to comment.