2026.1.16总结

了解PyTorch,虽然啥也看不懂,但是这个东西也许有用

1: PyTorch基础
import torch
import torch.nn as nn
import torch.optim as optim

# 1.1 张量基础
print("PyTorch版本:", torch.__version__)
print("CUDA是否可用:", torch.cuda.is_available())

# 创建张量
x = torch.tensor([1.0, 2.0, 3.0])
y = torch.tensor([4.0, 5.0, 6.0])

# 张量运算
z = x + y
print(f"张量加法: {z}")

# 自动微分
x = torch.tensor(2.0, requires_grad=True)
y = x**2 + 3*x + 1
y.backward()
print(f"dy/dx at x=2: {x.grad}")

# 1.2 简单的神经网络
class SimpleNN(nn.Module):
    def __init__(self):
        super(SimpleNN, self).__init__()
        self.layer1 = nn.Linear(10, 5)  # 10个输入,5个输出
        self.layer2 = nn.Linear(5, 2)   # 5个输入,2个输出
        self.relu = nn.ReLU()
    
    def forward(self, x):
        x = self.relu(self.layer1(x))
        x = self.layer2(x)
        return x

# 创建模型实例
model = SimpleNN()
print("模型结构:")
print(model)

# 1.3 数据加载
from torch.utils.data import Dataset, DataLoader
import numpy as np

class CustomDataset(Dataset):
    def __init__(self, data, labels):
        self.data = torch.FloatTensor(data)
        self.labels = torch.LongTensor(labels)
    
    def __len__(self):
        return len(self.data)
    
    def __getitem__(self, idx):
        return self.data[idx], self.labels[idx]

# 创建模拟数据
data = np.random.randn(1000, 10)  # 1000个样本,每个10个特征
labels = np.random.randint(0, 2, 1000)  # 二分类标签

dataset = CustomDataset(data, labels)
dataloader = DataLoader(dataset, batch_size=32, shuffle=True)

# 1.4 训练循环
def train_one_epoch(model, dataloader, criterion, optimizer):
    model.train()
    total_loss = 0
    
    for batch_data, batch_labels in dataloader:
        # 前向传播
        outputs = model(batch_data)
        loss = criterion(outputs, batch_labels)
        
        # 反向传播
        optimizer.zero_grad()
        loss.backward()
        optimizer.step()
        
        total_loss += loss.item()
    
    return total_loss / len(dataloader)

# 训练配置
criterion = nn.CrossEntropyLoss()
optimizer = optim.Adam(model.parameters(), lr=0.001)

# 训练模型
for epoch in range(5):
    avg_loss = train_one_epoch(model, dataloader, criterion, optimizer)
    print(f"Epoch {epoch+1}, Loss: {avg_loss:.4f}")
``
posted @ 2026-01-19 23:47  臧博涛  阅读(2)  评论(0)    收藏  举报