pytorch搭建全连接网络

import torch    
import random
import numpy as np
from torch.nn import init
import torch.optim as optim
import matplotlib.pyplot as plt
import pandas as pd
nethid=37
print(9//4*4)
df=pd.read_excel('1.xlsx')
w=df.values
def data_iter(batch_size,features,lables):
    num_example=len(features)
    indices=list(range(num_example))
    random.shuffle(indices)
    for i in range(0,num_example,batch_size):
        j = torch.LongTensor(indices[i: min(i + batch_size,num_example)])
        yield   features.index_select(0,j),lables.index_select(0,j)
class simple_net(torch.nn.Module):
    def __init__(self,imn,hid,out):
        super(simple_net,self).__init__()
        self.layer=torch.nn.Linear(imn,hid)
        self.layer1=torch.nn.Linear(hid,out)
    def forward(self,x):
        x=self.layer(x)
        """x=torch.nn.functional.sigmoid(x)"""
        x=torch.sigmoid(x)
        x=self.layer1(x)

        return x
num_example=50
net=simple_net(4,nethid,1)
num_inputs=1

feature=w[:,1:5]
feature=np.reshape(feature,(-1,4))
feature=torch.tensor(feature, dtype=torch.float)
lable=w[:,6]
lable=torch.tensor(lable,dtype=torch.float)
batch_size=3
loss = torch.nn.MSELoss()
optimizer = optim.SGD(net.parameters(), lr=0.000003)
num=0
sum=0
for epoch in range(10000):
    for X,y in data_iter(batch_size,feature,lable):
        output = net(X)
        l = loss(output, y.view(-1, 1))
        optimizer.zero_grad() # 梯度清零,等价于net.zero_grad()
        l.backward()
        optimizer.step()
    num=num+1
    if epoch%16==0:
        sum=0
    print('epoch %d, loss: %f' % (epoch, l.item()))
for epoch in range(155128):
    for X,y in data_iter(1000,feature,lable):
        output = net(X)
        l = loss(output, y.view(-1, 1))
        optimizer.zero_grad() # 梯度清零,等价于net.zero_grad()
        l.backward()
        optimizer.step()
    num=num+1
    if epoch%16==0:
        sum=0
    print('epoch %d, loss: %f' % (epoch, l.item()))
posted @ 2021-12-05 12:39  祥瑞哈哈哈  阅读(224)  评论(0)    收藏  举报