Layer Normalization
Layer Normalization
#导入包
import torch
from torch import nn
import torch.nn.functional as f 
import math 

from math import sqrt


class LayerNormal(nn.Module):
    def __init__(self,d_moule,eps=1e-12):
        """
            d_moule:数据维度
            eps:稳定系数
        """
        super(LayerNormal,self).__init__()
        #nn.Parameter所创造的张量会随着模型的训练而自动优化
        #初始化为全1的向量
        self.gamma = nn.Parameter(torch.ones(d_moule))
        self.beta = nn.Parameter(torch.zeros(d_moule))
        self.eps = eps
    def forward(self,x):
        #求平均值,keepdim=True保持维度数量不变
        mean = x.mean(-1,keepdim=True)
        #求方差,unbiased=False计算总体方差,keepdim=True保持维度不不变
        var = x.var(-1,unbiased=False,keepdim=True)
        out = (x-mean)/sqrt(var+self.eps)
        out = self.gamma*out + self.beta
        return out



posted on 2024-07-23 16:45  凯申物流——  阅读(16)  评论(0)    收藏  举报