• 后RCNN时代的物体检测及实例分割进展


    https://mp.weixin.qq.com/s?__biz=MzA3MzI4MjgzMw==&mid=2650736740&idx=3&sn=cdce446703e69b47cf48f12b3d451afc&chksm=871acc1ab06d450ccde3148df96436c98adb2de3b6a34559b95af322c5186513460329dc20bd&pass_ticket=fRFENbG47o6E12opTV0zxlHKhCFDxvRrZMSQpTw%2BcZ9h0Z38WqvICgwk5ynPYCBm#rd后RCNN时代的物体检测及实例分割进展

    def conv3x3(in_channels, out_channels, stride=1):
        return nn.Conv2d(in_channels, out_channels, kernel_size=3, 
                        stride=stride, padding=1, bias=False)
    
    class ResidualBlock(nn.Module):
        def __init__(self, in_channels, out_channels, stride=1, downsample=None):
            super(ResidualBlock, self).__init__()
            self.conv1 = conv3x3(in_channels, out_channels, stride)
            self.bn1 = nn.BatchNorm2d(out_channels)
            self.relu = nn.ReLU(inplace=True)
            self.conv2 = conv3x3(out_channels, out_channels, stride)
            self.bn2 = nn.BatchNorm2d(out_channels)
            self.downsample = downsample
        
        def forward(self, x):
            residual = x
            out = self.conv1(x)
            out = self.bn1(out)
            out = self.relu(out)
            out = self.conv2(out)
            out = self.bn2(out)
            if self.downsample:
                residual = self.downsample(residual)
            
            out += residual
            out = self.relu(out)
            return out
    
    class ResNet(nn.Module):
        def __init__(self, block, layers, num_classes=10):
            super(ResNet, self).__init__()
            self.in_channels = 16
            self.conv =  conv3x3(1, 16)
            self.bn = nn.BatchNorm2d(16)
            #self.relu = nn.Relu(inplace=True)
            self.relu = nn.ReLU(inplace=True)
            self.layers1 = self.make_layers(block, 16, layers[0])
            self.layers2 = self.make_layers(block, 32, layers[1])
            self.layers3 = self.make_layers(block, 64, layers[2])
            self.avg_pool = nn.AvgPool2d(8)
            self.fc = nn.Linear(64, num_classes)
            
        def make_layers(self, block, out_channels, blocks, stride=1):
            downsample = None
            if(stride!=1) or (self.in_channels != out_channels):
                downsample = nn.Sequential(conv3x3(self.in_channels, out_channels, stride = stride),
                                          nn.BatchNorm2d(out_channels))
                
            layers = []
            layers.append(block(self.in_channels, out_channels, stride, downsample))
            self.in_channels = out_channels
            for i in range(blocks):
                layers.append(block(self.in_channels, out_channels, stride, downsample))
            
            return nn.Sequential(*layers)
        
        def forward(self, x):
            out = self.conv(x)
            out = self.bn(out)
            out = self.relu(out)
            out = self.layers1(out)
            out = self.layers2(out)
            out = self.layers3(out)
            out = self.avg_pool(out)
            out = self.fc(out)
            
            return out
    
    resnet = ResNet(ResidualBlock, layers=[2, 2, 2, 2])
    View Code
  • 相关阅读:
    2020软件工程第四次作业
    软件工程第三次作业
    软件工程第二次作业
    软件工程第一次作业
    #数据挖掘与数据化运营实战#2.3数据挖掘技术以及在数据化运营中的应用
    #简单统计学#单样本t检验
    #简单统计学#加权平均数
    软件工程(2019)结对编程第二次作业
    软件工程(2019)结对编程第一次作业
    软件工程(2019)第三次个人作业
  • 原文地址:https://www.cnblogs.com/573177885qq/p/8417152.html
Copyright © 2020-2023  润新知