← Back to Index
Research & Engineering Archive

D2L 5.4 Custom Layer

By Jingnan Huang · January 19, 2025 · 353 Words

Last Edit: 1/19/25

在整体网络中,存在一些不同的层,他们都是专门用来处理不同事件的,这也令自定义层变得有必要

5.4.1 Layer without parameter
#

import torch
import torch.nn.functional as F
from torch import nn

class CenteredLayer(nn.Module):
    def __init__(self):
        super().__init__()

    def forward(self, X):
        return X - X.mean()

5.4.2 Layer with parameter
#

class MyLinear(nn.Module):
    def __init__(self, in_units, units):
        super().__init__()
        self.weight = nn.Parameter(torch.randn(in_units, units))
        self.bias = nn.Parameter(torch.randn(units,))
    def forward(self, X):
        linear = torch.matmul(X, self.weight.data) + self.bias.data
        return F.relu(linear)
self.weight = nn.Parameter(torch.randn(in_units, units))
        self.bias = nn.Parameter(torch.randn(units,))
import torch
import torch.nn as nn
import torch.nn.functional as F

class MyLinearWithBuiltin(nn.Module):
    def __init__(self, in_units, units):
        super().__init__()
        self.linear = nn.Linear(in_units, units)  # 内置线性层
    
    def forward(self, X):
        linear = self.linear(X)  # 使用内置线性层
        return F.relu(linear)    # ReLU 激活函数