# 生成随机矩阵

x = np.random.rand(2,3)


array([[0.10786477, 0.56611762, 0.10557245], [0.4596513 , 0.13174377, 0.82373043]])

# 计算softmax

## 在numpy中

y = np.exp(x) / np.sum(np.exp(x), axis=1, keepdims=True)


array([[0.27940617, 0.44182742, 0.27876641], [0.31649398, 0.22801164, 0.45549437]])

## 在pytorch中

torch_x = torch.from_numpy(x) torch_y = nn.Softmax(dim=-1)(torch_x)


tensor([[0.2794, 0.4418, 0.2788], [0.3165, 0.2280, 0.4555]], dtype=torch.float64)

# 计算log_softmax

## 在numpy中

import numpy as np
x = np.array([[-0.7715, -0.6205,-0.2562]])
y = np.exp(x) / np.sum(np.exp(x), axis=1, keepdims=True)
y = np.log(y)


array([[-1.27508877, -0.81683591, -1.27738109], [-1.15045104, -1.47835858, -0.78637192]])

## 在pytorch中

torch_x = torch.from_numpy(x)
torch_y = nn.LogSoftmax(dim=-1)(torch_x)


tensor([[-1.2751, -0.8168, -1.2774], [-1.1505, -1.4784, -0.7864]], dtype=torch.float64)

# 计算NLLLoss

## 在numpy中

targets = np.array([0, 2])
nll_loss = -(np.sum(np.choose(targets, y.T)) / y.shape[0])


1.0307303437846973

## 在pytorch中

 |      >>> m = nn.LogSoftmax(dim=1)
|      >>> loss = nn.NLLLoss()
|      >>> # input is of size N x C = 3 x 5
|      >>> input = torch.randn(3, 5, requires_grad=True)
|      >>> # each element in target has to have 0 <= value < C
|      >>> target = torch.tensor([1, 0, 4])
|      >>> output = loss(m(input), target)
|      >>> output.backward()


torch_targets = torch.tensor([0, 2])
torch_nll_loss = nn.NLLLoss()(torch_y, torch_targets)


tensor(1.0307, dtype=torch.float64)

import torch.nn.functional as F
output = F.nll_loss(F.log_softmax(torch_x, dim=1), torch_targets, reduction='mean')


tensor(1.0307, dtype=torch.float64)

posted @ 2021-07-19 11:00  西西嘛呦  阅读(538)  评论(0编辑  收藏  举报