pytorch 替换softmax
import torch
import torch.nn.functional as F
y = F.softmax(x,dim=-1)
中的y = F.softmax(x,dim=-1)等价于y = x.exp()/torch.exp(x).sum(dim=-1, keepdim=True)
import torch
import torch.nn.functional as F
y = F.softmax(x,dim=-1)
中的y = F.softmax(x,dim=-1)等价于y = x.exp()/torch.exp(x).sum(dim=-1, keepdim=True)