• S3pool pytorch


    随机池化

    import torch
    import torch.nn as nn
    from torch.autograd import Variable
    
    class StochasticPool2DLayer(nn.Module):
        def __init__(self, pool_size=2, maxpool=True, training=False, grid_size=None, **kwargs):
            super(StochasticPool2DLayer, self).__init__(**kwargs)
            self.rng = torch.cuda.manual_seed_all(123) # this changed in Pytorch for working
            self.pool_size = pool_size
            self.maxpool_flag = maxpool
            self.training = training
            if grid_size:
                self.grid_size = grid_size
            else:
                self.grid_size = pool_size
    
            self.Maxpool = torch.nn.MaxPool2d(kernel_size=self.pool_size, stride=1)
            self.Avgpool = torch.nn.AvgPool2d(kernel_size=self.pool_size,
                                              stride=self.pool_size,
                                              padding=self.pool_size//2,)
            self.padding = nn.ConstantPad2d((0,1,0,1),0)
    
        def forward(self, x, **kwargs):
            if self.maxpool_flag:
                x = self.Maxpool(x)
                x = self.padding(x)
            if not self.training:
                # print(x.size())
                x = self.Avgpool(x)
                return x
                # return x[:, :, ::self.pool_size, ::self.pool_size]       
            else:
                w, h = x.data.shape[2:]
                n_w, n_h = w//self.grid_size, h//self.grid_size
                n_sample_per_grid = self.grid_size//self.pool_size
                # print('===========================')
                idx_w = []
                idx_h = []
                if w>2 and h>2:
                    for i in range(n_w):
                        offset = self.grid_size * i
                        if i < n_w - 1:
                            this_n = self.grid_size
                        else:
                            this_n = x.data.shape[2] - offset
                        
                        this_idx, _ = torch.sort(torch.randperm(this_n)[:n_sample_per_grid])
                        idx_w.append(offset + this_idx)
                    for i in range(n_h):
                        offset = self.grid_size * i
                        if i < n_h - 1:
                            this_n = self.grid_size
                        else:
                            this_n = x.data.shape[3] - offset
                        this_idx, _ = torch.sort(torch.randperm(this_n)[:n_sample_per_grid])
    
                        idx_h.append(offset + this_idx)
                    idx_w = torch.cat(idx_w, dim=0)
                    idx_h = torch.cat(idx_h, dim=0)
                else:
                    idx_w = torch.LongTensor([0])
                    idx_h = torch.LongTensor([0])
    
                output = x[:, :, idx_w.cuda()][:, :, :, idx_h.cuda()]
                return output
                
    if __name__=='__main__':
        a = torch.randn(1, 3, 4, 4)
        print(a)
        layer = StochasticPool2DLayer(pool_size=2, maxpool=True, training=True)
        b = layer.forward(a)
        print(b)
        
    

    随机池化
    不太看好这种池化方式,比一般的池化更加难以解释,这样在训练好的model会有一个随机的表现怎么办?

  • 相关阅读:
    从0系统学Android-2.6Activity间数据传递
    观察者模式详解
    从0系统学Android-2.5更多隐式Intent用法
    从 View 的四个构造方法说起
    ListView详细介绍与使用
    推荐一个程序员系统学习网址
    从 http协议角度解析okhttp
    从0系统学Android-2.4隐式Intent
    菜单布局记录篇
    轮播图记录篇
  • 原文地址:https://www.cnblogs.com/o-v-o/p/9975420.html
Copyright © 2020-2023  润新知