Skip to content

resnet

Classes:

  • ResBlock

    Residual block: conv => BN => act. => conv => BN => residual link => (optional) act.

  • Resnet

    Implementation of the Resnet architecture.

ResBlock

ResBlock(
    in_ch: int,
    out_ch: int,
    kernel_size: int,
    pad_mode: str = "replicate",
    last_block: bool = False,
    bias: bool = True,
)

Bases: Module

Residual block: conv => BN => act. => conv => BN => residual link => (optional) act.

Source code in src/autoden/models/resnet.py
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
def __init__(
    self,
    in_ch: int,
    out_ch: int,
    kernel_size: int,
    pad_mode: str = "replicate",
    last_block: bool = False,
    bias: bool = True,
):
    super().__init__()
    pad_size = (kernel_size - 1) // 2
    self.main_seq = nn.ModuleList(
        [
            nn.Conv2d(in_ch, out_ch, kernel_size=kernel_size, padding=pad_size, padding_mode=pad_mode, bias=bias),
            nn.BatchNorm2d(out_ch),
            nn.LeakyReLU(0.2, inplace=True),
            nn.Conv2d(out_ch, out_ch, kernel_size=kernel_size, padding=pad_size, padding_mode=pad_mode, bias=bias),
            nn.BatchNorm2d(out_ch),
        ]
    )
    self.scale_inp = nn.Conv2d(in_ch, out_ch, kernel_size=1, bias=bias) if in_ch != out_ch else None
    self.post_res = nn.LeakyReLU(0.2, inplace=True) if not last_block else None

Resnet

Resnet(
    n_channels_in: int,
    n_channels_out: int,
    n_layers: int = 10,
    n_features: int = 32,
    kernel_size: int = 3,
    pad_mode: str = "replicate",
    device: str = "cuda" if is_available() else "cpu",
)

Bases: Sequential

Implementation of the Resnet architecture.

Source code in src/autoden/models/resnet.py
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
def __init__(
    self,
    n_channels_in: int,
    n_channels_out: int,
    n_layers: int = 10,
    n_features: int = 32,
    kernel_size: int = 3,
    pad_mode: str = "replicate",
    device: str = "cuda" if pt.cuda.is_available() else "cpu",
):
    init_params = locals()
    del init_params["self"]
    del init_params["__class__"]

    layers = [
        ResBlock(
            n_channels_in if i_l == 0 else n_features,
            n_channels_out if i_l == (n_layers - 1) else n_features,
            kernel_size=kernel_size,
            pad_mode=pad_mode,
            last_block=(i_l == (n_layers - 1)),
        )
        for i_l in range(n_layers)
    ]

    super().__init__(*layers)
    self.init_params = init_params
    self.device = device

    self.to(self.device)