删除无效代码
This commit is contained in:
parent
a7b08c786e
commit
b8b3255eeb
@ -7,7 +7,7 @@ nc: 38 # number of classes
|
||||
# YOLOv8.0n backbone
|
||||
backbone:
|
||||
# [from, repeats, module, args]
|
||||
- [-1, 1, ResNet18, []] # 0-P1/2
|
||||
- [-1, 1, resnet18, [False]] # 0-P1/2
|
||||
|
||||
# YOLOv8.0n head
|
||||
head:
|
@ -1,182 +0,0 @@
|
||||
import torch
|
||||
import torch.nn as nn
|
||||
import torch.nn.functional as F
|
||||
|
||||
__all__ = [ 'ResNet18','ResNet34','ResNet50']
|
||||
|
||||
'''-------------一、BasicBlock模块-----------------------------'''
|
||||
# 用于ResNet18和ResNet34基本残差结构块
|
||||
class BasicBlock(nn.Module):
|
||||
def __init__(self, inchannel, outchannel, stride=1):
|
||||
super(BasicBlock, self).__init__()
|
||||
self.left = nn.Sequential(
|
||||
nn.Conv2d(inchannel, outchannel, kernel_size=3, stride=stride, padding=1, bias=False),
|
||||
nn.BatchNorm2d(outchannel),
|
||||
nn.ReLU(inplace=True), #inplace=True表示进行原地操作,一般默认为False,表示新建一个变量存储操作
|
||||
nn.Conv2d(outchannel, outchannel, kernel_size=3, stride=1, padding=1, bias=False),
|
||||
nn.BatchNorm2d(outchannel)
|
||||
)
|
||||
self.shortcut = nn.Sequential()
|
||||
#论文中模型架构的虚线部分,需要下采样
|
||||
if stride != 1 or inchannel != outchannel:
|
||||
self.shortcut = nn.Sequential(
|
||||
nn.Conv2d(inchannel, outchannel, kernel_size=1, stride=stride, bias=False),
|
||||
nn.BatchNorm2d(outchannel)
|
||||
)
|
||||
|
||||
def forward(self, x):
|
||||
out = self.left(x) #这是由于残差块需要保留原始输入
|
||||
out += self.shortcut(x)#这是ResNet的核心,在输出上叠加了输入x
|
||||
out = F.relu(out)
|
||||
return out
|
||||
|
||||
|
||||
'''-------------二、Bottleneck模块-----------------------------'''
|
||||
# 用于ResNet50及以上的残差结构块
|
||||
class Bottleneck(nn.Module):
|
||||
def __init__(self, inchannel, outchannel, stride=1):
|
||||
super(Bottleneck, self).__init__()
|
||||
self.left = nn.Sequential(
|
||||
nn.Conv2d(inchannel, int(outchannel / 4), kernel_size=1, stride=stride, padding=0, bias=False),
|
||||
nn.BatchNorm2d(int(outchannel / 4)),
|
||||
nn.ReLU(inplace=True),
|
||||
nn.Conv2d(int(outchannel / 4), int(outchannel / 4), kernel_size=3, stride=1, padding=1, bias=False),
|
||||
nn.BatchNorm2d(int(outchannel / 4)),
|
||||
nn.ReLU(inplace=True),
|
||||
nn.Conv2d(int(outchannel / 4), outchannel, kernel_size=1, stride=1, padding=0, bias=False),
|
||||
nn.BatchNorm2d(outchannel),
|
||||
)
|
||||
self.shortcut = nn.Sequential()
|
||||
if stride != 1 or inchannel != outchannel:
|
||||
self.shortcut = nn.Sequential(
|
||||
nn.Conv2d(inchannel, outchannel, kernel_size=1, stride=stride, bias=False),
|
||||
nn.BatchNorm2d(outchannel)
|
||||
)
|
||||
|
||||
def forward(self, x):
|
||||
out = self.left(x)
|
||||
y = self.shortcut(x)
|
||||
out += self.shortcut(x)
|
||||
out = F.relu(out)
|
||||
return out
|
||||
|
||||
'''-------------ResNet18---------------'''
|
||||
class ResNet_18(nn.Module):
|
||||
def __init__(self, ResidualBlock, num_classes=10):
|
||||
super(ResNet_18, self).__init__()
|
||||
self.inchannel = 64
|
||||
self.conv1 = nn.Sequential(
|
||||
nn.Conv2d(3, 64, kernel_size=3, stride=1, padding=1, bias=False),
|
||||
nn.BatchNorm2d(64),
|
||||
nn.ReLU(),
|
||||
)
|
||||
self.layer1 = self.make_layer(ResidualBlock, 64, 2, stride=1)
|
||||
self.layer2 = self.make_layer(ResidualBlock, 128, 2, stride=2)
|
||||
self.layer3 = self.make_layer(ResidualBlock, 256, 2, stride=2)
|
||||
self.layer4 = self.make_layer(ResidualBlock, 512, 2, stride=2)
|
||||
self.channel = [i.size(1) for i in self.forward(torch.randn(1, 3, 224, 224))]
|
||||
|
||||
def make_layer(self, block, channels, num_blocks, stride):
|
||||
strides = [stride] + [1] * (num_blocks - 1) # strides=[1,1]
|
||||
layers = []
|
||||
for stride in strides:
|
||||
layers.append(block(self.inchannel, channels, stride))
|
||||
self.inchannel = channels
|
||||
return nn.Sequential(*layers)
|
||||
|
||||
def forward(self, x): # 3*32*32
|
||||
out = self.conv1(x) # 64*32*32
|
||||
out = self.layer1(out) # 64*32*32
|
||||
out = self.layer2(out) # 128*16*16
|
||||
out = self.layer3(out) # 256*8*8
|
||||
out = self.layer4(out) # 512*4*4
|
||||
out = F.avg_pool2d(out, 4) # 512*1*1
|
||||
return out
|
||||
|
||||
'''-------------ResNet34---------------'''
|
||||
class ResNet_34(nn.Module):
|
||||
def __init__(self, ResidualBlock, num_classes=10):
|
||||
super(ResNet_34, self).__init__()
|
||||
self.inchannel = 64
|
||||
self.conv1 = nn.Sequential(
|
||||
nn.Conv2d(3, 64, kernel_size=3, stride=1, padding=1, bias=False),
|
||||
nn.BatchNorm2d(64),
|
||||
nn.ReLU(),
|
||||
)
|
||||
self.layer1 = self.make_layer(ResidualBlock, 64, 3, stride=1)
|
||||
self.layer2 = self.make_layer(ResidualBlock, 128, 4, stride=2)
|
||||
self.layer3 = self.make_layer(ResidualBlock, 256, 6, stride=2)
|
||||
self.layer4 = self.make_layer(ResidualBlock, 512, 3, stride=2)
|
||||
self.channel = [i.size(1) for i in self.forward(torch.randn(1, 3, 224, 224))]
|
||||
|
||||
def make_layer(self, block, channels, num_blocks, stride):
|
||||
strides = [stride] + [1] * (num_blocks - 1) # strides=[1,1]
|
||||
layers = []
|
||||
for stride in strides:
|
||||
layers.append(block(self.inchannel, channels, stride))
|
||||
self.inchannel = channels
|
||||
return nn.Sequential(*layers)
|
||||
|
||||
def forward(self, x): # 3*32*32
|
||||
out = self.conv1(x) # 64*32*32
|
||||
out = self.layer1(out) # 64*32*32
|
||||
out = self.layer2(out) # 128*16*16
|
||||
out = self.layer3(out) # 256*8*8
|
||||
out = self.layer4(out) # 512*4*4
|
||||
out = F.avg_pool2d(out, 4) # 512*1*1
|
||||
out = out.view(out.size(0), -1) # 512
|
||||
return out
|
||||
|
||||
'''-------------ResNet50---------------'''
|
||||
class ResNet_50(nn.Module):
|
||||
def __init__(self, ResidualBlock, num_classes=10):
|
||||
super(ResNet_50, self).__init__()
|
||||
self.inchannel = 64
|
||||
self.conv1 = nn.Sequential(
|
||||
nn.Conv2d(3, 64, kernel_size=3, stride=1, padding=1, bias=False),
|
||||
nn.BatchNorm2d(64),
|
||||
nn.ReLU(),
|
||||
)
|
||||
self.layer1 = self.make_layer(ResidualBlock, 256, 3, stride=1)
|
||||
self.layer2 = self.make_layer(ResidualBlock, 512, 4, stride=2)
|
||||
self.layer3 = self.make_layer(ResidualBlock, 1024, 6, stride=2)
|
||||
self.layer4 = self.make_layer(ResidualBlock, 2048, 3, stride=2)
|
||||
self.channel = [i.size(1) for i in self.forward(torch.randn(1, 3, 224, 224))]
|
||||
# **************************
|
||||
|
||||
def make_layer(self, block, channels, num_blocks, stride):
|
||||
strides = [stride] + [1] * (num_blocks - 1) # strides=[1,1]
|
||||
layers = []
|
||||
for stride in strides:
|
||||
layers.append(block(self.inchannel, channels, stride))
|
||||
self.inchannel = channels
|
||||
return nn.Sequential(*layers)
|
||||
|
||||
def forward(self, x): # 3*32*32
|
||||
out = self.conv1(x) # 64*32*32
|
||||
out = self.layer1(out) # 64*32*32
|
||||
out = self.layer2(out) # 128*16*16
|
||||
out = self.layer3(out) # 256*8*8
|
||||
out = self.layer4(out) # 512*4*4
|
||||
out = F.avg_pool2d(out, 4) # 512*1*1
|
||||
# print(out.size())
|
||||
return out
|
||||
|
||||
|
||||
def ResNet18():
|
||||
return ResNet_18(BasicBlock)
|
||||
|
||||
|
||||
def ResNet34():
|
||||
return ResNet_34(BasicBlock)
|
||||
|
||||
|
||||
def ResNet50():
|
||||
return ResNet_50(Bottleneck)
|
||||
|
||||
if __name__ == '__main__':
|
||||
model = ResNet18()
|
||||
inputs = torch.randn((1, 3, 224, 224))
|
||||
res = model(inputs)
|
||||
for i in res:
|
||||
print(i.size())
|
Loading…
Reference in New Issue
Block a user