微信扫一扫打赏支持

TensorFlow2_200729系列---11、测试(张量)实例

TensorFlow2_200729系列---11、测试(张量)实例

一、总结

一句话总结:

就用算好的w和b,来计算测试集上面的正确率即可,非常简单
# test/evluation
# [w1, b1, w2, b2, w3, b3]
total_correct, total_num = 0, 0
for step, (x,y) in enumerate(test_db):

    # [b, 28, 28] => [b, 28*28]
    x = tf.reshape(x, [-1, 28*28])

    # [b, 784] => [b, 256] => [b, 128] => [b, 10]
    h1 = tf.nn.relu(x@w1 + b1)
    h2 = tf.nn.relu(h1@w2 + b2)
    out = h2@w3 +b3

    # out: [b, 10] ~ R
    # prob: [b, 10] ~ [0, 1]
    prob = tf.nn.softmax(out, axis=1)
    # [b, 10] => [b]
    # int64!!!
    pred = tf.argmax(prob, axis=1)
    pred = tf.cast(pred, dtype=tf.int32)
    # y: [b]
    # [b], int32
    # print(pred.dtype, y.dtype)
    # tf.cast:bool 值转换成整型的0和1
    correct = tf.cast(tf.equal(pred, y), dtype=tf.int32)
    # 求和,预测对的个数
    correct = tf.reduce_sum(correct)

    total_correct += int(correct)
    total_num += x.shape[0]

acc = total_correct / total_num
print('test acc:', acc)

 

 

 

二、测试(张量)实例

博客对应课程的视频位置:

 

import  tensorflow as tf
from    tensorflow import keras
from    tensorflow.keras import datasets
import  os

os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'

# x: [60k, 28, 28], [10, 28, 28]
# y: [60k], [10k]
(x, y), (x_test, y_test) = datasets.mnist.load_data()
# x: [0~255] => [0~1.]
x = tf.convert_to_tensor(x, dtype=tf.float32) / 255.
y = tf.convert_to_tensor(y, dtype=tf.int32)
x_test = tf.convert_to_tensor(x_test, dtype=tf.float32) / 255.
y_test = tf.convert_to_tensor(y_test, dtype=tf.int32)

print(x.shape, y.shape, x.dtype, y.dtype)
print(tf.reduce_min(x), tf.reduce_max(x))
print(tf.reduce_min(y), tf.reduce_max(y))


train_db = tf.data.Dataset.from_tensor_slices((x,y)).batch(128)
test_db = tf.data.Dataset.from_tensor_slices((x_test,y_test)).batch(128)
train_iter = iter(train_db)
sample = next(train_iter)
print('batch:', sample[0].shape, sample[1].shape)


# [b, 784] => [b, 256] => [b, 128] => [b, 10]
# [dim_in, dim_out], [dim_out]
w1 = tf.Variable(tf.random.truncated_normal([784, 256], stddev=0.1))
b1 = tf.Variable(tf.zeros([256]))
w2 = tf.Variable(tf.random.truncated_normal([256, 128], stddev=0.1))
b2 = tf.Variable(tf.zeros([128]))
w3 = tf.Variable(tf.random.truncated_normal([128, 10], stddev=0.1))
b3 = tf.Variable(tf.zeros([10]))

lr = 1e-3

for epoch in range(100): # iterate db for 10
    for step, (x, y) in enumerate(train_db): # for every batch
        # x:[128, 28, 28]
        # y: [128]

        # [b, 28, 28] => [b, 28*28]
        x = tf.reshape(x, [-1, 28*28])

        with tf.GradientTape() as tape: # tf.Variable
            # x: [b, 28*28]
            # h1 = x@w1 + b1
            # [b, 784]@[784, 256] + [256] => [b, 256] + [256] => [b, 256] + [b, 256]
            h1 = x@w1 + tf.broadcast_to(b1, [x.shape[0], 256])
            h1 = tf.nn.relu(h1)
            # [b, 256] => [b, 128]
            h2 = h1@w2 + b2
            h2 = tf.nn.relu(h2)
            # [b, 128] => [b, 10]
            out = h2@w3 + b3

            # compute loss
            # out: [b, 10]
            # y: [b] => [b, 10]
            y_onehot = tf.one_hot(y, depth=10)

            # mse = mean(sum(y-out)^2)
            # [b, 10]
            loss = tf.square(y_onehot - out)
            # mean: scalar
            loss = tf.reduce_mean(loss)

        # compute gradients
        grads = tape.gradient(loss, [w1, b1, w2, b2, w3, b3])
        # print(grads)
        # w1 = w1 - lr * w1_grad
        w1.assign_sub(lr * grads[0])
        b1.assign_sub(lr * grads[1])
        w2.assign_sub(lr * grads[2])
        b2.assign_sub(lr * grads[3])
        w3.assign_sub(lr * grads[4])
        b3.assign_sub(lr * grads[5])


        if step % 100 == 0:
            print(epoch, step, 'loss:', float(loss))


    # test/evluation
    # [w1, b1, w2, b2, w3, b3]
    total_correct, total_num = 0, 0
    for step, (x,y) in enumerate(test_db):

        # [b, 28, 28] => [b, 28*28]
        x = tf.reshape(x, [-1, 28*28])

        # [b, 784] => [b, 256] => [b, 128] => [b, 10]
        h1 = tf.nn.relu(x@w1 + b1)
        h2 = tf.nn.relu(h1@w2 + b2)
        out = h2@w3 +b3

        # out: [b, 10] ~ R
        # prob: [b, 10] ~ [0, 1]
        prob = tf.nn.softmax(out, axis=1)
        # [b, 10] => [b]
        # int64!!!
        pred = tf.argmax(prob, axis=1)
        pred = tf.cast(pred, dtype=tf.int32)
        # y: [b]
        # [b], int32
        # print(pred.dtype, y.dtype)
        # tf.cast:bool 值转换成整型的0和1
        correct = tf.cast(tf.equal(pred, y), dtype=tf.int32)
        # 求和,预测对的个数
        correct = tf.reduce_sum(correct)

        total_correct += int(correct)
        total_num += x.shape[0]

    acc = total_correct / total_num
    print('test acc:', acc)
(60000, 28, 28) (60000,) <dtype: 'float32'> <dtype: 'int32'>
tf.Tensor(0.0, shape=(), dtype=float32) tf.Tensor(1.0, shape=(), dtype=float32)
tf.Tensor(0, shape=(), dtype=int32) tf.Tensor(9, shape=(), dtype=int32)
batch: (128, 28, 28) (128,)
0 0 loss: 0.38571780920028687
0 100 loss: 0.2173786610364914
0 200 loss: 0.1917204111814499
0 300 loss: 0.1711275279521942
0 400 loss: 0.15989932417869568
test acc: 0.1125
1 0 loss: 0.15362629294395447
1 100 loss: 0.15366746485233307
1 200 loss: 0.1535947322845459
1 300 loss: 0.1428387463092804
1 400 loss: 0.1365879327058792
test acc: 0.1662
2 0 loss: 0.13212983310222626
2 100 loss: 0.13453000783920288
2 200 loss: 0.13448789715766907
2 300 loss: 0.12588170170783997
2 400 loss: 0.12246117740869522
test acc: 0.2297
3 0 loss: 0.11838757991790771
3 100 loss: 0.12203065305948257
3 200 loss: 0.12162841856479645
3 300 loss: 0.11454033851623535
3 400 loss: 0.11295472085475922
test acc: 0.2932
4 0 loss: 0.10881924629211426
4 100 loss: 0.11313991248607635
4 200 loss: 0.11218862235546112
4 300 loss: 0.10627762973308563
4 400 loss: 0.10603668540716171
test acc: 0.3477
5 0 loss: 0.10164723545312881
5 100 loss: 0.1064164862036705
5 200 loss: 0.10499069839715958
5 300 loss: 0.0998879224061966
5 400 loss: 0.10066087543964386
test acc: 0.3942
6 0 loss: 0.09599001705646515
6 100 loss: 0.10103683173656464
6 200 loss: 0.0991998165845871
6 300 loss: 0.09476562589406967
6 400 loss: 0.09629850089550018
test acc: 0.4323
7 0 loss: 0.09137346595525742
7 100 loss: 0.09660382568836212
7 200 loss: 0.09442339837551117
7 300 loss: 0.09055226296186447
7 400 loss: 0.09264139831066132
test acc: 0.4651
8 0 loss: 0.08751724660396576
8 100 loss: 0.0928950160741806
8 200 loss: 0.09039851278066635
8 300 loss: 0.08697732537984848
8 400 loss: 0.08951622247695923
test acc: 0.4931
9 0 loss: 0.08421923220157623
9 100 loss: 0.08972366899251938
9 200 loss: 0.08691062033176422
9 300 loss: 0.08391834795475006
9 400 loss: 0.0867990031838417
test acc: 0.5196
10 0 loss: 0.08135680854320526
10 100 loss: 0.08697109669446945
10 200 loss: 0.08386717736721039
10 300 loss: 0.08123292773962021
10 400 loss: 0.08437168598175049
test acc: 0.5398
11 0 loss: 0.07881790399551392
11 100 loss: 0.08452443778514862
11 200 loss: 0.0811881572008133
11 300 loss: 0.07886318862438202
11 400 loss: 0.08217452466487885
test acc: 0.5569
12 0 loss: 0.0765639990568161
12 100 loss: 0.08232416212558746
12 200 loss: 0.07880674302577972
12 300 loss: 0.07675238698720932
12 400 loss: 0.08019421249628067
test acc: 0.5748
13 0 loss: 0.07454421371221542
13 100 loss: 0.08034920692443848
13 200 loss: 0.07666034251451492
13 300 loss: 0.0748479962348938
13 400 loss: 0.07839646935462952
test acc: 0.589
14 0 loss: 0.07271402329206467
14 100 loss: 0.07857382297515869
14 200 loss: 0.07471559941768646
14 300 loss: 0.07312760502099991
14 400 loss: 0.0767514556646347
test acc: 0.6044
15 0 loss: 0.07105317711830139
15 100 loss: 0.07696118205785751
15 200 loss: 0.07294266670942307
15 300 loss: 0.07157392054796219
15 400 loss: 0.07523997128009796
test acc: 0.6189
16 0 loss: 0.06952624022960663
16 100 loss: 0.0754871815443039
16 200 loss: 0.07133008539676666
16 300 loss: 0.0701526403427124
16 400 loss: 0.07383577525615692
test acc: 0.631
17 0 loss: 0.06811363995075226
17 100 loss: 0.07411835342645645
17 200 loss: 0.06983362138271332
17 300 loss: 0.06885457038879395
17 400 loss: 0.07253213226795197
test acc: 0.6425
18 0 loss: 0.066804900765419
18 100 loss: 0.072852224111557
18 200 loss: 0.06844879686832428
18 300 loss: 0.06764169782400131
18 400 loss: 0.07132522761821747
test acc: 0.6534
19 0 loss: 0.06558557599782944
19 100 loss: 0.07167867571115494
19 200 loss: 0.0671733021736145
19 300 loss: 0.06650887429714203
19 400 loss: 0.07019434869289398
test acc: 0.6629
20 0 loss: 0.06445295363664627
20 100 loss: 0.07058436423540115
20 200 loss: 0.06599204987287521
20 300 loss: 0.06546109914779663
20 400 loss: 0.06913967430591583
test acc: 0.671
21 0 loss: 0.06340227276086807
21 100 loss: 0.0695638656616211
21 200 loss: 0.06488761305809021
21 300 loss: 0.0644821897149086
21 400 loss: 0.06815551221370697
test acc: 0.6788
22 0 loss: 0.06241794675588608
22 100 loss: 0.06861011683940887
22 200 loss: 0.06385044753551483
22 300 loss: 0.06356855481863022
22 400 loss: 0.06722912937402725
test acc: 0.6848
23 0 loss: 0.06149246171116829
23 100 loss: 0.06771349906921387
23 200 loss: 0.06287343800067902
23 300 loss: 0.06270618736743927
23 400 loss: 0.0663640946149826
test acc: 0.6918
24 0 loss: 0.06062353402376175
24 100 loss: 0.06687076389789581
24 200 loss: 0.06195928901433945
24 300 loss: 0.061889875680208206
24 400 loss: 0.06555217504501343
test acc: 0.698
25 0 loss: 0.05980183929204941
25 100 loss: 0.06607715040445328
25 200 loss: 0.06109970808029175
25 300 loss: 0.06111394241452217
25 400 loss: 0.06478184461593628
test acc: 0.7038
26 0 loss: 0.059017110615968704
26 100 loss: 0.06532399356365204
26 200 loss: 0.06028430536389351
26 300 loss: 0.0603770986199379
26 400 loss: 0.06404663622379303
test acc: 0.7099
27 0 loss: 0.05826854705810547
27 100 loss: 0.06460431218147278
27 200 loss: 0.059507906436920166
27 300 loss: 0.05968160554766655
27 400 loss: 0.06334567815065384
test acc: 0.7154
28 0 loss: 0.057557571679353714
28 100 loss: 0.06392316520214081
28 200 loss: 0.05877317115664482
28 300 loss: 0.05902203917503357
28 400 loss: 0.062675341963768
test acc: 0.7205
29 0 loss: 0.05687836557626724
29 100 loss: 0.06327170878648758
29 200 loss: 0.058074962347745895
29 300 loss: 0.058392882347106934
29 400 loss: 0.062033139169216156
test acc: 0.7237
30 0 loss: 0.056225549429655075
30 100 loss: 0.06264326721429825
30 200 loss: 0.05740489810705185
30 300 loss: 0.05779348686337471
30 400 loss: 0.061418019235134125
test acc: 0.7282
31 0 loss: 0.0556040033698082
31 100 loss: 0.062039006501436234
31 200 loss: 0.05676410347223282
31 300 loss: 0.05722089856863022
31 400 loss: 0.060826193541288376
test acc: 0.7342
32 0 loss: 0.0550076849758625
32 100 loss: 0.061461757868528366
32 200 loss: 0.05614975839853287
32 300 loss: 0.05667402595281601
32 400 loss: 0.06025956943631172
test acc: 0.7379
33 0 loss: 0.05443425849080086
33 100 loss: 0.060914743691682816
33 200 loss: 0.055560629814863205
33 300 loss: 0.056152939796447754
33 400 loss: 0.059715963900089264
test acc: 0.7414
34 0 loss: 0.05388679355382919
34 100 loss: 0.06039080768823624
34 200 loss: 0.05499520152807236
34 300 loss: 0.05565224960446358
34 400 loss: 0.059191275388002396
test acc: 0.744
35 0 loss: 0.05336226895451546
35 100 loss: 0.05988597124814987
35 200 loss: 0.054453153163194656
35 300 loss: 0.055168915539979935
35 400 loss: 0.05868368223309517
test acc: 0.7481
36 0 loss: 0.0528588704764843
36 100 loss: 0.059394218027591705
36 200 loss: 0.05393476411700249
36 300 loss: 0.054706085473299026
36 400 loss: 0.05818701907992363
test acc: 0.7519
37 0 loss: 0.05237654969096184
37 100 loss: 0.05891808122396469
37 200 loss: 0.053437985479831696
37 300 loss: 0.05426173657178879
37 400 loss: 0.05770976468920708
test acc: 0.7569
38 0 loss: 0.05191376805305481
38 100 loss: 0.05845411866903305
38 200 loss: 0.05295874923467636
38 300 loss: 0.053832851350307465
38 400 loss: 0.05725214630365372
test acc: 0.7605
39 0 loss: 0.05146925523877144
39 100 loss: 0.058003224432468414
39 200 loss: 0.05249810218811035
39 300 loss: 0.053421951830387115
39 400 loss: 0.056813664734363556
test acc: 0.7633
40 0 loss: 0.05103911831974983
40 100 loss: 0.05756411701440811
40 200 loss: 0.052052091807127
40 300 loss: 0.05302652716636658
40 400 loss: 0.056393466889858246
test acc: 0.7666
41 0 loss: 0.05062655732035637
41 100 loss: 0.05713721364736557
41 200 loss: 0.051622163504362106
41 300 loss: 0.052644502371549606
41 400 loss: 0.05598137900233269
test acc: 0.7688
42 0 loss: 0.0502278208732605
42 100 loss: 0.05672335624694824
42 200 loss: 0.051204800605773926
42 300 loss: 0.052278757095336914
42 400 loss: 0.05558023601770401
test acc: 0.7715
43 0 loss: 0.049841053783893585
43 100 loss: 0.05631899833679199
43 200 loss: 0.05080088973045349
43 300 loss: 0.05192501097917557
43 400 loss: 0.05519380420446396
test acc: 0.7742
44 0 loss: 0.04946431145071983
44 100 loss: 0.055924542248249054
44 200 loss: 0.05041190981864929
44 300 loss: 0.051581550389528275
44 400 loss: 0.05481947213411331
test acc: 0.7777
45 0 loss: 0.049099136143922806
45 100 loss: 0.05553946644067764
45 200 loss: 0.05003342777490616
45 300 loss: 0.05124782398343086
45 400 loss: 0.05445672199130058
test acc: 0.7801
46 0 loss: 0.04874817654490471
46 100 loss: 0.05516337230801582
46 200 loss: 0.04966534301638603
46 300 loss: 0.05092122405767441
46 400 loss: 0.05410322546958923
test acc: 0.7828
47 0 loss: 0.048408813774585724
47 100 loss: 0.054797541350126266
47 200 loss: 0.04930717498064041
47 300 loss: 0.050603706389665604
47 400 loss: 0.05376031994819641
test acc: 0.7842
48 0 loss: 0.0480792373418808
48 100 loss: 0.05444105342030525
48 200 loss: 0.048959434032440186
48 300 loss: 0.05029461905360222
48 400 loss: 0.05342777818441391
test acc: 0.7861
49 0 loss: 0.047758717089891434
49 100 loss: 0.05409204959869385
49 200 loss: 0.048620499670505524
49 300 loss: 0.04999549314379692
49 400 loss: 0.05310485512018204
test acc: 0.788
50 0 loss: 0.04744725301861763
50 100 loss: 0.053752340376377106
50 200 loss: 0.04828915745019913
50 300 loss: 0.04970484599471092
50 400 loss: 0.05279148370027542
test acc: 0.7903
51 0 loss: 0.04714208096265793
51 100 loss: 0.05341966077685356
51 200 loss: 0.047963883727788925
51 300 loss: 0.0494220107793808
51 400 loss: 0.05248744413256645
test acc: 0.7918
52 0 loss: 0.046845942735672
52 100 loss: 0.053093887865543365
52 200 loss: 0.047644250094890594
52 300 loss: 0.049146927893161774
52 400 loss: 0.05219132825732231
test acc: 0.7935
53 0 loss: 0.04655710235238075
53 100 loss: 0.05277685075998306
53 200 loss: 0.04733050614595413
53 300 loss: 0.04887847602367401
53 400 loss: 0.0519016869366169
test acc: 0.7952
54 0 loss: 0.04627660661935806
54 100 loss: 0.05246717482805252
54 200 loss: 0.04702361300587654
54 300 loss: 0.04861682280898094
54 400 loss: 0.05161689966917038
test acc: 0.7969
55 0 loss: 0.04600272700190544
55 100 loss: 0.052165042608976364
55 200 loss: 0.046723343431949615
55 300 loss: 0.048362791538238525
55 400 loss: 0.051340095698833466
test acc: 0.7989
56 0 loss: 0.0457344613969326
56 100 loss: 0.05187077447772026
56 200 loss: 0.04642946645617485
56 300 loss: 0.04811382293701172
56 400 loss: 0.05107064172625542
test acc: 0.8012
57 0 loss: 0.045471709221601486
57 100 loss: 0.051583193242549896
57 200 loss: 0.04614195227622986
57 300 loss: 0.04786881059408188
57 400 loss: 0.05080564692616463
test acc: 0.803
58 0 loss: 0.045213907957077026
58 100 loss: 0.05130178853869438
58 200 loss: 0.045861516147851944
58 300 loss: 0.04762851074337959
58 400 loss: 0.05054401606321335
test acc: 0.8039
59 0 loss: 0.04496104270219803
59 100 loss: 0.05102714151144028
59 200 loss: 0.04558849334716797
59 300 loss: 0.04739319533109665
59 400 loss: 0.0502888560295105
test acc: 0.8054
60 0 loss: 0.04471330717206001
60 100 loss: 0.050758250057697296
60 200 loss: 0.04532066732645035
60 300 loss: 0.04716295748949051
60 400 loss: 0.05003789812326431
test acc: 0.8068
61 0 loss: 0.04446972534060478
61 100 loss: 0.05049530416727066
61 200 loss: 0.045057184994220734
61 300 loss: 0.04693887382745743
61 400 loss: 0.049790799617767334
test acc: 0.8088
62 0 loss: 0.04423215612769127
62 100 loss: 0.05023801326751709
62 200 loss: 0.04479958862066269
62 300 loss: 0.046718962490558624
62 400 loss: 0.04954972863197327
test acc: 0.81
63 0 loss: 0.04400014504790306
63 100 loss: 0.049985505640506744
63 200 loss: 0.044548697769641876
63 300 loss: 0.046503446996212006
63 400 loss: 0.049313709139823914
test acc: 0.8102
64 0 loss: 0.043772488832473755
64 100 loss: 0.049738604575395584
64 200 loss: 0.04430307075381279
64 300 loss: 0.04629255086183548
64 400 loss: 0.04908251762390137
test acc: 0.8114
65 0 loss: 0.04354875162243843
65 100 loss: 0.049495942890644073
65 200 loss: 0.04406172037124634
65 300 loss: 0.04608604311943054
65 400 loss: 0.048856597393751144
test acc: 0.8133
66 0 loss: 0.04332948476076126
66 100 loss: 0.04925796389579773
66 200 loss: 0.0438249371945858
66 300 loss: 0.04588426277041435
66 400 loss: 0.04863450303673744
test acc: 0.8145
67 0 loss: 0.04311296343803406
67 100 loss: 0.049024343490600586
67 200 loss: 0.04359283298254013
67 300 loss: 0.04568526893854141
67 400 loss: 0.04841398820281029
test acc: 0.8158
68 0 loss: 0.04289940744638443
68 100 loss: 0.04879366606473923
68 200 loss: 0.0433647520840168
68 300 loss: 0.0454895906150341
68 400 loss: 0.04819738492369652
test acc: 0.8175
69 0 loss: 0.04268815740942955
69 100 loss: 0.04856716841459274
69 200 loss: 0.04314028471708298
69 300 loss: 0.04529682546854019
69 400 loss: 0.04798464477062225
test acc: 0.8186
70 0 loss: 0.04248129203915596
70 100 loss: 0.04834536835551262
70 200 loss: 0.04291931912302971
70 300 loss: 0.04510772228240967
70 400 loss: 0.04777703434228897
test acc: 0.8196
71 0 loss: 0.04227766394615173
71 100 loss: 0.0481267124414444
71 200 loss: 0.042702723294496536
71 300 loss: 0.044922634959220886
71 400 loss: 0.04757494479417801
test acc: 0.8213
72 0 loss: 0.04207731410861015
72 100 loss: 0.0479096993803978
72 200 loss: 0.04249046742916107
72 300 loss: 0.04474009945988655
72 400 loss: 0.047377604991197586
test acc: 0.8227
73 0 loss: 0.041880182921886444
73 100 loss: 0.047696229070425034
73 200 loss: 0.04228126257658005
73 300 loss: 0.044561050832271576
73 400 loss: 0.04718439653515816
test acc: 0.8238
74 0 loss: 0.04168543964624405
74 100 loss: 0.047486014664173126
74 200 loss: 0.04207534343004227
74 300 loss: 0.044385358691215515
74 400 loss: 0.04699423909187317
test acc: 0.8249
75 0 loss: 0.04149235785007477
75 100 loss: 0.04727887362241745
75 200 loss: 0.041871607303619385
75 300 loss: 0.044211845844984055
75 400 loss: 0.04680665582418442
test acc: 0.8262
76 0 loss: 0.041303087025880814
76 100 loss: 0.047074366360902786
76 200 loss: 0.041670359671115875
76 300 loss: 0.04404059424996376
76 400 loss: 0.04662255570292473
test acc: 0.8272
77 0 loss: 0.04111681506037712
77 100 loss: 0.0468735508620739
77 200 loss: 0.04147212207317352
77 300 loss: 0.04387206956744194
77 400 loss: 0.04644252359867096
test acc: 0.8283
78 0 loss: 0.04093288257718086
78 100 loss: 0.04667596518993378
78 200 loss: 0.04127761349081993
78 300 loss: 0.04370657354593277
78 400 loss: 0.04626502841711044
test acc: 0.8295
79 0 loss: 0.04075128585100174
79 100 loss: 0.04648040980100632
79 200 loss: 0.04108574613928795
79 300 loss: 0.0435444600880146
79 400 loss: 0.04608960822224617
test acc: 0.8308
80 0 loss: 0.04057186841964722
80 100 loss: 0.04628724604845047
80 200 loss: 0.040897708386182785
80 300 loss: 0.0433855876326561
80 400 loss: 0.04591671749949455
test acc: 0.8313
81 0 loss: 0.04039503261446953
81 100 loss: 0.04609670117497444
81 200 loss: 0.040712736546993256
81 300 loss: 0.043228499591350555
81 400 loss: 0.045745961368083954
test acc: 0.8318
82 0 loss: 0.04022274166345596
82 100 loss: 0.0459088459610939
82 200 loss: 0.04052998498082161
82 300 loss: 0.04307367652654648
82 400 loss: 0.04557780548930168
test acc: 0.8324
83 0 loss: 0.04005350545048714
83 100 loss: 0.045724399387836456
83 200 loss: 0.040349725633859634
83 300 loss: 0.04292098805308342
83 400 loss: 0.045411963015794754
test acc: 0.8327
84 0 loss: 0.03988692909479141
84 100 loss: 0.045541830360889435
84 200 loss: 0.04017125070095062
84 300 loss: 0.04277034476399422
84 400 loss: 0.04524741694331169
test acc: 0.833
85 0 loss: 0.03972318768501282
85 100 loss: 0.04536179453134537
85 200 loss: 0.0399952232837677
85 300 loss: 0.04262210428714752
85 400 loss: 0.04508556053042412
test acc: 0.8342
86 0 loss: 0.039561014622449875
86 100 loss: 0.04518372192978859
86 200 loss: 0.039821505546569824
86 300 loss: 0.04247588664293289
86 400 loss: 0.04492555931210518
test acc: 0.8351
87 0 loss: 0.03940123692154884
87 100 loss: 0.045007701963186264
87 200 loss: 0.03965107351541519
87 300 loss: 0.042331963777542114
87 400 loss: 0.044767558574676514
test acc: 0.8367
88 0 loss: 0.03924410417675972
88 100 loss: 0.044833969324827194
88 200 loss: 0.03948286175727844
88 300 loss: 0.04218969866633415
88 400 loss: 0.04461175948381424
test acc: 0.8374
89 0 loss: 0.03908901661634445
89 100 loss: 0.044661737978458405
89 200 loss: 0.039316870272159576
89 300 loss: 0.042049478739500046
89 400 loss: 0.04445768520236015
test acc: 0.8381
90 0 loss: 0.03893637657165527
90 100 loss: 0.04449290782213211
90 200 loss: 0.03915274888277054
90 300 loss: 0.04191197082400322
90 400 loss: 0.044304657727479935
test acc: 0.8388
91 0 loss: 0.038787275552749634
91 100 loss: 0.044326573610305786
91 200 loss: 0.03899093717336655
91 300 loss: 0.04177701473236084
91 400 loss: 0.04415366053581238
test acc: 0.8396
92 0 loss: 0.03864089399576187
92 100 loss: 0.04416312649846077
92 200 loss: 0.03883194923400879
92 300 loss: 0.04164421558380127
92 400 loss: 0.044005364179611206
test acc: 0.8404
93 0 loss: 0.03849639371037483
93 100 loss: 0.04400233179330826
93 200 loss: 0.03867446258664131
93 300 loss: 0.04151249676942825
93 400 loss: 0.04385807365179062
test acc: 0.8412
94 0 loss: 0.038353823125362396
94 100 loss: 0.04384317994117737
94 200 loss: 0.03851848095655441
94 300 loss: 0.04138302803039551
94 400 loss: 0.043712861835956573
test acc: 0.8419
95 0 loss: 0.038213036954402924
95 100 loss: 0.04368530958890915
95 200 loss: 0.038364164531230927
95 300 loss: 0.04125489667057991
95 400 loss: 0.04356975108385086
test acc: 0.8425
96 0 loss: 0.03807401657104492
96 100 loss: 0.043529678136110306
96 200 loss: 0.0382118821144104
96 300 loss: 0.041128192096948624
96 400 loss: 0.043428994715213776
test acc: 0.8433
97 0 loss: 0.03793691471219063
97 100 loss: 0.04337584227323532
97 200 loss: 0.03806223347783089
97 300 loss: 0.041003115475177765
97 400 loss: 0.04329059645533562
test acc: 0.8439
98 0 loss: 0.03780188411474228
98 100 loss: 0.04322321340441704
98 200 loss: 0.03791496902704239
98 300 loss: 0.04087929055094719
98 400 loss: 0.0431545190513134
test acc: 0.8447
99 0 loss: 0.03766778111457825
99 100 loss: 0.043072618544101715
99 200 loss: 0.03776995465159416
99 300 loss: 0.040756918489933014
99 400 loss: 0.04302016645669937
test acc: 0.8453
In [ ]:
 

 

 

 
posted @ 2020-08-03 02:20  范仁义  阅读(247)  评论(0编辑  收藏  举报