Skip to content

Commit

Permalink
[C] Refactorization
Browse files Browse the repository at this point in the history
  • Loading branch information
kunkunlin1221 committed Dec 20, 2024
1 parent e3a2550 commit c3cc3f6
Show file tree
Hide file tree
Showing 93 changed files with 2,456 additions and 4,293 deletions.
8 changes: 2 additions & 6 deletions chameleon/__init__.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,6 @@
from .backbone import *
from .efficientdet import *
from .base import *
from .metrics import *
from .neck import *
from .nn import *
from .optim import *
from .modules import *
from .tools import *
from .transformers import *

__version__ = '0.1.0'
8 changes: 8 additions & 0 deletions chameleon/base/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
from .blocks import build_block, list_blocks
from .components import build_component, list_components
from .layers import build_layer, list_layers
from .optim import (build_lr_scheduler, build_optimizer, list_lr_schedulers,
list_optimizers)
from .power_module import PowerModule
from .utils import (has_children, initialize_weights_, replace_module,
replace_module_attr_value)
21 changes: 21 additions & 0 deletions chameleon/base/blocks/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
import fnmatch

from .conv_block import Conv2dBlock, SeparableConv2dBlock

# from .mamba_block import build_mamba_block
# from .vit_block import build_vit_block


def build_block(name, **kwargs):
cls = globals().get(name, None)
if cls is None:
raise ValueError(f'Block named {name} is not support.')
return cls(**kwargs)


def list_blocks(filter=''):
block_list = [k for k in globals().keys() if 'Block' in k]
if len(filter):
return fnmatch.filter(block_list, filter) # include these blocks
else:
return block_list
192 changes: 192 additions & 0 deletions chameleon/base/blocks/conv_block.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,192 @@
from typing import Optional, Tuple, Union

import torch
import torch.nn as nn

from ..components import build_component
from ..power_module import PowerModule


class SeparableConv2dBlock(PowerModule):

def __init__(
self,
in_channels: int,
out_channels: int = None,
kernel: Union[int, Tuple[int, int]] = 3,
stride: Union[int, Tuple[int, int]] = 1,
padding: Union[int, Tuple[int, int]] = 1,
bias: bool = False,
inner_norm: Optional[Union[dict, nn.Module]] = None,
inner_act: Optional[Union[dict, nn.Module]] = None,
norm: Optional[Union[dict, nn.Module]] = None,
act: Optional[Union[dict, nn.Module]] = None,
init_type: str = 'normal',
):
"""
A separable convolution block consisting of a depthwise convolution and a pointwise convolution.
Args:
in_channels (int):
Number of input channels.
out_channels (int, optional):
Number of output channels. If not provided, defaults to `in_channels`.
kernel (int or Tuple[int, int], optional):
Size of the convolution kernel. Defaults to 3.
stride (int or Tuple[int, int], optional):
Stride of the convolution. Defaults to 1.
padding (int or Tuple[int, int], optional):
Padding added to all four sides of the input. Defaults to 1.
bias (bool):
Whether to include a bias term in the convolutional layer.
Noted: if normalization layer is not None, bias will always be set to False.
Defaults to False.
inner_norm (dict or nn.Module, optional):
Configuration of normalization layer between dw and pw layer. Defaults to None.
inner_act (dict or nn.Module, optional):
Configuration of activation layer between dw and pw layer. Defaults to None.
norm (dict or nn.Module, optional):
Configuration of normalization layer after pw layer. Defaults to None.
act (dict or nn.Module, optional):
Configuration of activation layer after pw layer. Defaults to None.
init_type (str, optional):
Initialization method for the model parameters. Defaults to 'normal'.
"""
super().__init__()
out_channels = in_channels if out_channels is None else out_channels

bias = False if norm is not None else bias

self.block = nn.ModuleDict()

self.block['dw_conv'] = nn.Conv2d(
in_channels,
in_channels,
kernel_size=kernel,
stride=stride,
padding=padding,
groups=in_channels,
bias=False,
)
self.block['pw_conv'] = nn.Conv2d(
in_channels,
out_channels,
kernel_size=1,
stride=1,
padding=0,
bias=bias,
)
if inner_norm is not None:
self.block['inner_norm'] = build_component(**inner_norm) if isinstance(inner_norm, dict) else inner_norm
if inner_act is not None:
self.block['inner_act'] = build_component(**inner_act) if isinstance(inner_act, dict) else inner_act
if norm is not None:
self.block['norm'] = build_component(**norm) if isinstance(norm, dict) else norm
if act is not None:
self.block['act'] = build_component(**act) if isinstance(act, dict) else act
self.initialize_weights_(init_type)

def forward(self, x: torch.Tensor) -> torch.Tensor:
for _, m in self.block.items():
x = m(x)
return x


class Conv2dBlock(PowerModule):

def __init__(
self,
in_channels: Union[float, int],
out_channels: Union[float, int],
kernel: Union[int, Tuple[int, int]] = 3,
stride: Union[int, Tuple[int, int]] = 1,
padding: Union[int, Tuple[int, int]] = 1,
dilation: int = 1,
groups: int = 1,
bias: bool = False,
padding_mode: str = 'zeros',
norm: Union[dict, nn.Module] = None,
act: Union[dict, nn.Module] = None,
init_type: str = 'normal',
):
"""
This class is used to build a 2D convolutional neural network cell.
Args:
in_channels (int or float):
Number of input channels.
out_channels (int or float):
Number of output channels.
kernel (int or tuple, optional):
Size of the convolutional kernel. Defaults to 3.
stride (int or tuple, optional):
Stride size. Defaults to 1.
padding (int or tuple, optional):
Padding size. Defaults to 1.
dilation (int, optional):
Spacing between kernel elements. Defaults to 1.
groups (int, optional):
Number of blocked connections from input channels to output
channels. Defaults to 1.
bias (bool, optional):
Whether to include a bias term in the convolutional layer.
If bias = None, bias would be set as Ture when normalization layer is None and
False when normalization layer is not None.
Defaults to None.
padding_mode (str, optional):
Options = {'zeros', 'reflect', 'replicate', 'circular'}.
Defaults to 'zeros'.
norm (Union[dict, nn.Module], optional):
normalization layer or a dictionary of arguments for building a
normalization layer. Default to None.
act (Union[dict, nn.Module], optional):
Activation function or a dictionary of arguments for building an
activation function. Default to None.
pool (Union[dict, nn.Module], optional):
pooling layer or a dictionary of arguments for building a pooling
layer. Default to None.
init_type (str):
Method for initializing model parameters. Default to 'normal'.
Options = {'normal', 'uniform'}
Examples for using norm, act, and pool:
1. conv_block = Conv2dBlock(in_channels=3,
out_channels=12,
norm=nn.BatchNorm2d(12),
act=nn.ReLU(),
pool=nn.AdaptiveAvgPool2d(1))
2. conv_block = Conv2dBlock(in_channels=3,
out_channels=12,
norm={'name': 'BatchNorm2d', 'num_features': 12},
act={'name': 'ReLU', 'inplace': True})
Attributes:
block (nn.ModuleDict): a model block.
"""
super().__init__()
self.block = nn.ModuleDict()

bias = False if norm is not None else bias

self.block['conv'] = nn.Conv2d(
int(in_channels),
int(out_channels),
kernel_size=kernel,
stride=stride,
padding=padding,
dilation=dilation,
groups=groups,
bias=bias,
padding_mode=padding_mode,
)
if norm is not None:
self.block['norm'] = build_component(**norm) if isinstance(norm, dict) else norm
if act is not None:
self.block['act'] = build_component(**act) if isinstance(act, dict) else act

self.initialize_weights_(init_type)

def forward(self, x: torch.Tensor) -> torch.Tensor:
for _, m in self.block.items():
x = m(x)
return x
2 changes: 2 additions & 0 deletions chameleon/base/blocks/mamba_block.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
def build_mamba_block(**kwargs):
return print('to be implemented')
2 changes: 2 additions & 0 deletions chameleon/base/blocks/vit_block.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
def build_vit_block(**kwargs):
return print('to be implemented')
29 changes: 29 additions & 0 deletions chameleon/base/components/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
import fnmatch

from .activation import *
from .dropout import *
from .loss import *
from .norm import *
from .pooling import *


def build_component_cls(name):
cls = globals().get(name, None)
if cls is None:
raise ValueError(f'Component named {name} is not support.')
return cls


def build_component(name, **options):
cls = globals().get(name, None)
if cls is None:
raise ValueError(f'Component named {name} is not support.')
return cls(**options)


def list_components(filter=''):
component_list = [k for k in globals().keys() if 'Component' in k]
if len(filter):
return fnmatch.filter(component_list, filter) # include these components
else:
return component_list
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
Tanhshrink, Threshold)

__all__ = [
'Swish', 'Hsigmoid', 'Hswish', 'build_activation', 'StarReLU', 'SquaredReLU',
'Swish', 'Hsigmoid', 'Hswish', 'StarReLU', 'SquaredReLU',
]

__all__ += ['CELU', 'ELU', 'GELU', 'GLU', 'LeakyReLU', 'LogSigmoid',
Expand Down Expand Up @@ -95,10 +95,3 @@ def forward(self, x):

# Ref: https://pytorch.org/docs/stable/generated/torch.nn.SiLU.html
Swish = nn.SiLU


def build_activation(name, **options) -> Union[nn.Module, None]:
cls = globals().get(name, None)
if cls is None:
raise ValueError(f'Activation named {name} is not supported.')
return cls(**options)
6 changes: 6 additions & 0 deletions chameleon/base/components/dropout.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
import torch.nn as nn
from torch.nn import AlphaDropout, Dropout, Dropout2d, Dropout3d

__all__ = [
'Dropout', 'Dropout2d', 'Dropout3d', 'AlphaDropout',
]
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
L1Loss, MSELoss, SmoothL1Loss)

__all__ = [
'build_loss', 'AWingLoss', 'WeightedAWingLoss',
'AWingLoss', 'WeightedAWingLoss',
'BCELoss', 'BCEWithLogitsLoss', 'CrossEntropyLoss',
'CTCLoss', 'KLDivLoss', 'L1Loss', 'MSELoss', 'SmoothL1Loss',
'ArcFace', 'CosFace', 'LogCoshDiceLoss',
Expand Down Expand Up @@ -83,14 +83,6 @@ def forward(self, preds, targets, weight_map=None):
return weighted.mean()


def build_loss(name: str, **options) -> Union[nn.Module, None]:
"""Build a loss func layer given the name and options."""
cls = globals().get(name, None)
if cls is None:
raise KeyError(f'Unsupported loss func: {name}')
return cls(**options)


class ArcFace(nn.Module):

def __init__(self, s=64.0, m=0.5):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
__all__ = [
'BatchNorm1d', 'BatchNorm2d', 'BatchNorm3d', 'SyncBatchNorm', 'InstanceNorm1d',
'InstanceNorm2d', 'InstanceNorm3d', 'CrossMapLRN2d', 'GroupNorm', 'LayerNorm',
'LocalResponseNorm', 'build_norm', 'LayerNorm2d',
'LocalResponseNorm', 'LayerNorm2d',
]


Expand Down Expand Up @@ -42,11 +42,3 @@ def forward(self, x: torch.Tensor) -> torch.Tensor:
self.weight, self.bias, self.eps)
x = x.permute(0, 3, 1, 2)
return x


def build_norm(name: str, **options) -> Union[nn.Module, None]:
cls = globals().get(name, None)
if cls is None:
raise ValueError(
f'Normalization named {name} is not supported. Available options: {__all__}')
return cls(**options)
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
MaxPool1d, MaxPool2d, MaxPool3d)

__all__ = [
'build_pool', 'AvgPool1d', 'AvgPool2d', 'AvgPool3d', 'MaxPool1d',
'AvgPool1d', 'AvgPool2d', 'AvgPool3d', 'MaxPool1d',
'MaxPool2d', 'MaxPool3d', 'AdaptiveAvgPool1d', 'AdaptiveAvgPool2d',
'AdaptiveAvgPool3d', 'AdaptiveMaxPool1d', 'AdaptiveMaxPool2d',
'AdaptiveMaxPool3d', 'GAP', 'GMP',
Expand Down Expand Up @@ -44,11 +44,3 @@ def __init__(self):
def forward(self, x: torch.Tensor) -> torch.Tensor:
"""Apply global max pooling on the input tensor."""
return self.pool(x)


def build_pool(name: str, **options) -> Union[nn.Module, None]:
"""Build a pooling layer given the name and options."""
cls = globals().get(name, None)
if cls is None:
raise KeyError(f'Unsupported pooling layer: {name}')
return cls(**options)
22 changes: 22 additions & 0 deletions chameleon/base/layers/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
import fnmatch

from .aspp import ASPP
from .grl import GradientReversalLayer
from .selayer import SELayer
from .vae import VAE
from .weighted_sum import WeightedSum


def build_layer(name, **options):
cls = globals().get(name, None)
if cls is None:
raise ValueError(f'Layer named {name} is not support.')
return cls(**options)


def list_layers(filter=''):
layer_list = [k for k in globals().keys() if 'Layer' in k]
if len(filter):
return fnmatch.filter(layer_list, filter) # include these layers
else:
return layer_list
Loading

0 comments on commit c3cc3f6

Please sign in to comment.