Change activation default to leaky ReLU in conv block
This commit is contained in:
parent
6e48905cc0
commit
01ff0aca37
@ -43,7 +43,7 @@ class ConvBlock(nn.Module):
|
|||||||
#return nn.InstanceNorm3d(self.bn_channels, affine=True, track_running_stats=True)
|
#return nn.InstanceNorm3d(self.bn_channels, affine=True, track_running_stats=True)
|
||||||
return nn.InstanceNorm3d(self.bn_channels)
|
return nn.InstanceNorm3d(self.bn_channels)
|
||||||
elif l == 'A':
|
elif l == 'A':
|
||||||
return Swish()
|
return nn.LeakyReLU()
|
||||||
else:
|
else:
|
||||||
raise NotImplementedError('layer type {} not supported'.format(l))
|
raise NotImplementedError('layer type {} not supported'.format(l))
|
||||||
|
|
||||||
@ -84,7 +84,7 @@ class ResBlock(ConvBlock):
|
|||||||
'not supported yet')
|
'not supported yet')
|
||||||
|
|
||||||
if seq[-1] == 'A':
|
if seq[-1] == 'A':
|
||||||
self.act = Swish()
|
self.act = nn.LeakyReLU()
|
||||||
else:
|
else:
|
||||||
self.act = None
|
self.act = None
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user