//目录

多层感知机MLP的gluon版分类minist

 

MLP_Gluon

 

 

In [2]:
import gluonbook as gb
from mxnet import gluon, init
from mxnet.gluon import loss as gloss,nn
In [4]:
net = nn.Sequential()
net.add(nn.Dense(256,activation='relu'),nn.Dense(10))
net.initialize(init.Normal(sigma=0.01))
In [5]:
batch_size = 256
train_iter, test_iter = gb.load_data_fashion_mnist(batch_size)
 

损失函数

In [6]:
loss = gloss.SoftmaxCrossEntropyLoss()
trainer = gluon.Trainer(net.collect_params(),'sgd',{'learning_rate':0.5})
num_epochs = 5
gb.train_ch3(net,train_iter,test_iter,loss,num_epochs,batch_size,None,None,trainer)
 
epoch 1, loss 0.8074, train acc 0.700, test acc 0.829
epoch 2, loss 0.4819, train acc 0.823, test acc 0.852
epoch 3, loss 0.4306, train acc 0.840, test acc 0.855
epoch 4, loss 0.3935, train acc 0.856, test acc 0.856
epoch 5, loss 0.3714, train acc 0.863, test acc 0.865
 
posted @ 2018-11-26 16:43  小草的大树梦  阅读(283)  评论(0编辑  收藏  举报