Source code for easycv.models.utils.norm

# Copyright (c) Alibaba, Inc. and its affiliates.
import torch
import torch.nn as nn

from easycv.framework.errors import KeyError, NotImplementedError


[docs]class SyncIBN(nn.Module): r"""Instance-Batch Normalization layer from `"Two at Once: Enhancing Learning and Generalization Capacities via IBN-Net" <https://arxiv.org/pdf/1807.09441.pdf>` Args: planes (int): Number of channels for the input tensor ratio (float): Ratio of instance normalization in the IBN layer """
[docs] def __init__(self, planes, ratio=0.5, eps=1e-5): super(SyncIBN, self).__init__() self.half = int(planes * ratio) self.IN = nn.InstanceNorm2d(self.half, affine=True) self.BN = nn.SyncBatchNorm(planes - self.half, eps=eps)
[docs] def forward(self, x): split = torch.split(x, self.half, 1) out1 = self.IN(split[0].contiguous()) out2 = self.BN(split[1].contiguous()) out = torch.cat((out1, out2), 1) return out
[docs]class IBN(nn.Module): r"""Instance-Batch Normalization layer from `"Two at Once: Enhancing Learning and Generalization Capacities via IBN-Net" <https://arxiv.org/pdf/1807.09441.pdf>` Args: planes (int): Number of channels for the input tensor ratio (float): Ratio of instance normalization in the IBN layer """
[docs] def __init__(self, planes, ratio=0.5, eps=1e-5): super(IBN, self).__init__() self.half = int(planes * ratio) self.IN = nn.InstanceNorm2d(self.half, affine=True) self.BN = nn.BatchNorm2d(planes - self.half, eps=eps)
[docs] def forward(self, x): split = torch.split(x, self.half, 1) out1 = self.IN(split[0].contiguous()) out2 = self.BN(split[1].contiguous()) out = torch.cat((out1, out2), 1) return out
norm_cfg = { # format: layer_type: (abbreviation, module) 'BN': ('bn', nn.BatchNorm2d), 'SyncBN': ('bn', nn.SyncBatchNorm), 'GN': ('gn', nn.GroupNorm), # and potentially 'SN' 'IBN': ('ibn', IBN), 'SyncIBN': ('ibn', SyncIBN), 'IN': ('in', nn.InstanceNorm2d), 'LN': ('ln', nn.LayerNorm) }
[docs]def build_norm_layer(cfg, num_features, postfix=''): """ Build normalization layer Args: cfg (dict): cfg should contain: type (str): identify norm layer type. layer args: args needed to instantiate a norm layer. requires_grad (bool): [optional] whether stop gradient updates num_features (int): number of channels from input. postfix (int, str): appended into norm abbreviation to create named layer. Returns: name (str): abbreviation + postfix layer (nn.Module): created norm layer """ assert isinstance(cfg, dict) and 'type' in cfg cfg_ = cfg.copy() layer_type = cfg_.pop('type') if layer_type not in norm_cfg: raise KeyError('Unrecognized norm type {}'.format(layer_type)) else: abbr, norm_layer = norm_cfg[layer_type] if norm_layer is None: raise NotImplementedError assert isinstance(postfix, (int, str)) name = abbr + str(postfix) requires_grad = cfg_.pop('requires_grad', True) cfg_.setdefault('eps', 1e-5) if layer_type != 'GN': layer = norm_layer(num_features, **cfg_) if layer_type == 'SyncBN' and hasattr(layer, '_specify_ddp_gpu_num'): layer._specify_ddp_gpu_num(1) elif layer_type == 'SyncIBN' and hasattr(layer, '_specify_ddp_gpu_num'): layer.BN._specify_ddp_gpu_num(1) else: assert 'num_groups' in cfg_ layer = norm_layer(num_channels=num_features, **cfg_) for param in layer.parameters(): param.requires_grad = requires_grad return name, layer