1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80
| import torch import torch.nn as nn import torch.nn.functional as F
class ASFF(nn.Module): def __init__(self, level, rfb=False, vis=False): super(ASFF, self).__init__() self.level = level self.dim = [512, 256, 256] self.inter_dim = self.dim[self.level] if level==0: self.stride_level_1 = add_conv(256, self.inter_dim, 3, 2) self.stride_level_2 = add_conv(256, self.inter_dim, 3, 2) self.expand = add_conv(self.inter_dim, 1024, 3, 1) elif level==1: self.compress_level_0 = add_conv(512, self.inter_dim, 1, 1) self.stride_level_2 = add_conv(256, self.inter_dim, 3, 2) self.expand = add_conv(self.inter_dim, 512, 3, 1) elif level==2: self.compress_level_0 = add_conv(512, self.inter_dim, 1, 1) self.expand = add_conv(self.inter_dim, 256, 3, 1)
compress_c = 8 if rfb else 16
self.weight_level_0 = add_conv(self.inter_dim, compress_c, 1, 1) self.weight_level_1 = add_conv(self.inter_dim, compress_c, 1, 1) self.weight_level_2 = add_conv(self.inter_dim, compress_c, 1, 1)
self.weight_levels = nn.Conv2d(compress_c*3, 3, kernel_size=1, stride=1, padding=0) self.vis= vis
def forward(self, x_level_0, x_level_1, x_level_2): if self.level==0: level_0_resized = x_level_0 level_1_resized = self.stride_level_1(x_level_1)
level_2_downsampled_inter =F.max_pool2d(x_level_2, 3, stride=2, padding=1) level_2_resized = self.stride_level_2(level_2_downsampled_inter)
elif self.level==1: level_0_compressed = self.compress_level_0(x_level_0) level_0_resized =F.interpolate(level_0_compressed, scale_factor=2, mode='nearest') level_1_resized =x_level_1 level_2_resized =self.stride_level_2(x_level_2) elif self.level==2: level_0_compressed = self.compress_level_0(x_level_0) level_0_resized =F.interpolate(level_0_compressed, scale_factor=4, mode='nearest') level_1_resized =F.interpolate(x_level_1, scale_factor=2, mode='nearest') level_2_resized =x_level_2
level_0_weight_v = self.weight_level_0(level_0_resized) level_1_weight_v = self.weight_level_1(level_1_resized) level_2_weight_v = self.weight_level_2(level_2_resized) levels_weight_v = torch.cat((level_0_weight_v, level_1_weight_v, level_2_weight_v),1) levels_weight = self.weight_levels(levels_weight_v) levels_weight = F.softmax(levels_weight, dim=1) fused_out_reduced = level_0_resized * levels_weight[:,0:1,:,:]+\ level_1_resized * levels_weight[:,1:2,:,:]+\ level_2_resized * levels_weight[:,2:,:,:]
out = self.expand(fused_out_reduced)
if self.vis: return out, levels_weight, fused_out_reduced.sum(dim=1) else: return out
if __name__ == '__main__': model = ASFF(level=1) l1 = torch.ones(1,512,10,10) l2 = torch.ones(1,256,20,20) l3 = torch.ones(1,256,40,40)
out = model(l1,l2,l3) print(out.shape)
|