【語義分割】large kernel matters中GCN模塊的pytorch實現


GCN模塊的實現比較簡單,在giuhub上看到兩種實現,輕微不同

實現一:https://github.com/ycszen/pytorch-segmentation/blob/master/gcn.py

class GCN(nn.Module):
    def __init__(self, inplanes, planes, ks=7):
        super(GCN, self).__init__()
        self.conv_l1 = nn.Conv2d(inplanes, planes, kernel_size=(ks, 1),
                                 padding=(ks/2, 0))

        self.conv_l2 = nn.Conv2d(planes, planes, kernel_size=(1, ks),
                                 padding=(0, ks/2))
        self.conv_r1 = nn.Conv2d(inplanes, planes, kernel_size=(1, ks),
                                 padding=(0, ks/2))
        self.conv_r2 = nn.Conv2d(planes, planes, kernel_size=(ks, 1),
                                 padding=(ks/2, 0))

    def forward(self, x):
        x_l = self.conv_l1(x)
        x_l = self.conv_l2(x_l)

        x_r = self.conv_r1(x)
        x_r = self.conv_r2(x_r)

        x = x_l + x_r

        return x

實現二:https://github.com/ogvalt/large_kernel_matters/blob/master/scripts/model.py

class GCN(nn.Module):
    def __init__(self, inchannels, channels=21, k=3):
        super(GCN, self).__init__()

        self.conv_l1 = Conv2D(in_channels=inchannels, out_channels=channels, kernel_size=(k, 1), padding='same')
        self.conv_l2 = Conv2D(in_channels=channels, out_channels=channels, kernel_size=(1, k), padding='same')

        self.conv_r1 = Conv2D(in_channels=inchannels, out_channels=channels, kernel_size=(1, k), padding='same')
        self.conv_r2 = Conv2D(in_channels=channels, out_channels=channels, kernel_size=(k, 1), padding='same')

    def forward(self, x):
        x1 = self.conv_l1(x)
        x1 = self.conv_l2(x1)

        x2 = self.conv_r1(x)
        x2 = self.conv_r2(x2)

        out = x1 + x2

        return out

兩種實現不同之處在padding的方式,一種是設定值,一種是自動的。不過我發現pytorch0.4.0是不支持對padding關鍵字參數傳入字符串的,另外,我自己寫了一個3D版的,不知道對否。

class GCN(nn.Module):
    def __init__(self, inplanes, planes, ks=7):
        super(GCN, self).__init__()
        self.conv_l1 = nn.Conv3d(inplanes, planes, kernel_size=(ks, 1, 1),
                                 padding=(ks/2, 0, 0))
        self.conv_l2 = nn.Conv3d(planes, planes, kernel_size=(1, ks, 1),
                                 padding=(0, ks/2, 0))
        self.conv_l3 = nn.Conv3d(planes, planes, kernel_size=(1, 1, ks),
                                 padding=(0, 0, ks/2))

        self.conv_c1 = nn.Conv3d(inplanes, planes, kernel_size=(1, ks, 1),
                                 padding=(0, ks/2, 0))
        self.conv_c2 = nn.Conv3d(planes, planes, kernel_size=(1, 1, ks),
                                 padding=(0, 0, ks/2))
        self.conv_c3 = nn.Conv3d(planes, planes, kernel_size=(ks, 1, 1),
                                 padding=(ks/2, 0, 0))

        self.conv_r1 = nn.Conv3d(inplanes, planes, kernel_size=(1, 1, ks),
                                 padding=(0, 0, ks/2))
        self.conv_r2 = nn.Conv3d(planes, planes, kernel_size=(ks, 1, 1),
                                 padding=(ks/2, 0, 0))
        self.conv_r3 = nn.Conv3d(planes, planes, kernel_size=(1, ks, 1),
                                 padding=(0, ks/2, 0))

    def forward(self, x):
        x_l = self.conv_l1(x)
        x_l = self.conv_l2(x_l)
        x_l = self.conv_l3(x_l)

        x_c = self.conv_c1(x)
        x_c = self.conv_c2(x_c)
        x_c = self.conv_c3(x_c)

        x_r = self.conv_r1(x)
        x_r = self.conv_r2(x_r)
        x_r = self.conv_r3(x_r)
        x = x_l + x_r + x_c

        return x

  


免責聲明!

本站轉載的文章為個人學習借鑒使用,本站對版權不負任何法律責任。如果侵犯了您的隱私權益,請聯系本站郵箱yoyou2525@163.com刪除。



 
粵ICP備18138465號   © 2018-2025 CODEPRJ.COM