내용


import torch.nn as nn

class SampleLayer(nn.Module):
  """Sample layer to check recursive."""

  def __init__(self) ->None:
    """Initialize."""
super(SampleLayer,self).__init__()
self.conv_s = nn.Conv2d(16, 16, 3, bias=False)
self.bn_s = nn.BatchNorm2d(16)
self.relu_s = nn.ReLU(inplace=True)

  def forward(self, x: torch.Tensor) -> torch.Tensor:
    """Forward."""
    returnself.relu_s(self.bn_s(self.conv_s(x)))

class SimpleNet(nn.Module):
    """SimpleNet architecture."""

    def __init__(self, num_classes:int) ->None:
        """Initialize."""
super(SimpleNet,self).__init__()
self.conv1 = nn.Conv2d(3, 6, 3, bias=False)
self.bn1 = nn.BatchNorm2d(6)
self.relu1 = nn.ReLU(inplace=True)

self.conv2 = nn.Conv2d(6, 6, 3, bias=False)
self.bn2 = nn.BatchNorm2d(6)
self.relu2 = nn.ReLU(inplace=True)

self.conv3 = nn.Conv2d(6, 16, 3, bias=False)
self.bn3 = nn.BatchNorm2d(16)
self.relu3 = nn.ReLU(inplace=True)

self.samplelayer = SampleLayer()

self.fc1 = nn.Linear(16 * 5 * 5, num_classes)# 5x5 image dimension

    def _forward_impl(self, x: torch.Tensor):
        """Actual forward procedures."""
        x =self.relu1(self.bn1(self.conv1(x)))
        x =self.relu2(self.bn2(self.conv2(x)))
        x = F.max_pool2d(x, (2, 2))
        x =self.relu3(self.bn3(self.conv3(x)))
        x =self.samplelayer(x)
        x = F.max_pool2d(x, 2)
        x = torch.flatten(x, 1)
        x =self.fc1(x)
        return x

    def forward(self, x: torch.Tensor) -> torch.Tensor:
        """Forward."""
        returnself._forward_impl(x)

1) state_dict()


for n, m in model.state_dict().items():
    print(n, m)
conv1.weight tensor(~)
bn1.weight tensor(~)
bn1.bias tensor(~)
bn1.running_mean tensor(~)
bn1.running_var tensor(~)
bn1.num_batches_tracked tensor(~)
...
samplelayer.conv_s.weight tensor(~)
...

2) parameters / named_parameters


3) modules / named_modules