Forward prop in tensorflow——MLP
1.how to build layers
-
Manually(1x3matrix)
x = np.arrat([200.0, 17.0]) layer_1 = Dense(units=3, activation='sigmoid') a1 = layer_1(x) layer_2 = Dense(units=1, activation='sigmoid') a2 = layer_2(a1) #a1&a2 are stored in TensorFlow format. Run this to convert it to NumPy: a1.numpy(). -
Automatically
layer_1 = Dense(units=3, activation='sigmoid') layer_2 = Dense(units=1, activation='sigmoid') model = Sequential([layer_1, layer_2]) -
Compile the model
model.compile(...) x = np.array([...]) y = np.array([...]) model.fit(x,y) model.predict(x_test) -
All
model = Sequential([ Dense(units=25, activation='sigmoid'), Dense(units=15, activation='sigmoid'), Dense(units=1, activation='sigmoid')]) model.compile(...) x = np.array([...]) y = np.array([...]) model.fit(x,y) model.predict(x_test) -
def function to return a for next layer
def dense(a_in, W, b, g): # Here W is 2by3 units = W.shape[1] # Obtain columns in W(=number of neurons in this layer) a_out = np.zeros(units) for j in range(units): w = W[:,j] z = np.dot(w,a_in) + b[j] a_out[j] = g[z] return a_out # capital letters refer to a matrix -
Sequential Structure
def sequential(x): a1 = dense(x, W1, b1) a2 = dense(x, W2, b2) a3 = dense(x, W3, b3) a4 = dense(x, W4, b4) f_x = a4 return f_x -
example

# 构建网络层
import tensorflow as ts
from tensorflow.keras import Squential
model = Sequential([
Dense(units=25, activation='sigmoid'),
Dense(units=15, activation='sigmoid'),
Dense(units=1, activation='sigmoid')])
# 编译模型
from tensorflow.keras.losses import BinaryCrossentropy
model.compile(loss=BinaryCrossentropy())
# 训练
model.fit(X,Y,epochs=100)

浙公网安备 33010602011771号