@@ -47,14 +47,14 @@ def __init__(self, in_channels, feats_channels, out_channels, dilations, pooling
4747 self .global_branch = nn .Sequential (
4848 nn .Conv2d (in_channels , feats_channels , kernel_size = 1 , stride = 1 , padding = 0 , bias = False ),
4949 BuildNormalization (placeholder = feats_channels , norm_cfg = norm_cfg ),
50- BuildActivation ( act_cfg = act_cfg ),
50+ nn . LeakyReLU ( 0.01 ),
5151 nn .Conv2d (feats_channels , feats_channels , kernel_size = 1 , stride = 1 , padding = 0 , bias = False ),
5252 )
5353 # output project
5454 self .bottleneck_conv = nn .Conv2d (feats_channels * len (dilations ), out_channels , kernel_size = 1 , stride = 1 , padding = 0 , bias = False )
5555 self .bottleneck_bn = nn .Sequential (
5656 BuildNormalization (placeholder = out_channels , norm_cfg = norm_cfg ),
57- BuildActivation ( act_cfg = act_cfg ),
57+ nn . LeakyReLU ( 0.01 ),
5858 )
5959 # initialize parameters
6060 assert norm_cfg ['activation' ] == 'identity'
@@ -77,9 +77,9 @@ def forward(self, x):
7777 input_size = x .shape
7878 # feed to parallel convolutions branch1 and branch2
7979 outputs_branch1 = torch .cat ([conv (x ) for conv in self .parallel_convs_branch1 ], dim = 1 )
80- outputs_branch1 = self .parallel_bn_branch1 [ 0 ] (outputs_branch1 )
80+ outputs_branch1 = self .parallel_bn_branch1 (outputs_branch1 )
8181 outputs_branch2 = torch .cat ([conv (x ) for conv in self .parallel_convs_branch2 ], dim = 1 )
82- outputs_branch2 = self .parallel_bn_branch2 [ 0 ] (outputs_branch2 )
82+ outputs_branch2 = self .parallel_bn_branch2 (outputs_branch2 )
8383 # merge
8484 r = torch .rand (1 , outputs_branch1 .shape [1 ], 1 , 1 , dtype = torch .float32 )
8585 if not self .training : r [:, :, :, :] = 1.0
@@ -91,7 +91,7 @@ def forward(self, x):
9191 weight_branch2 [(r < 0.66 ) & (r >= 0.33 )] = 2.
9292 weight_branch2 [r >= 0.66 ] = 1.
9393 outputs = outputs_branch1 * weight_branch1 .type_as (outputs_branch1 ) * 0.5 + outputs_branch2 * weight_branch2 .type_as (outputs_branch2 ) * 0.5
94- outputs = self . parallel_bn_branch1 [ 1 ] (outputs )
94+ outputs = F . leaky_relu (outputs , negative_slope = 0.01 )
9595 outputs = self .bottleneck_conv (outputs )
9696 # feed to global branch
9797 global_feats = self .globalpooling (x )
0 commit comments