Forward prop in tensorflow——MLP

1.how to build layers

  1. Manually(1x3matrix)

    x = np.arrat([200.0, 17.0])
    layer_1 = Dense(units=3, activation='sigmoid')
    a1 = layer_1(x)
    layer_2 = Dense(units=1, activation='sigmoid')
    a2 = layer_2(a1)
    #a1&a2 are stored in TensorFlow format. Run this to convert it to NumPy: a1.numpy().
    
  2. Automatically

    layer_1 = Dense(units=3, activation='sigmoid')
    layer_2 = Dense(units=1, activation='sigmoid')
    model = Sequential([layer_1, layer_2])
    
  3. Compile the model

    model.compile(...)
    x = np.array([...])
    y = np.array([...])
    model.fit(x,y)
    model.predict(x_test)
    
  4. All

    model = Sequential([
      Dense(units=25, activation='sigmoid'),
      Dense(units=15, activation='sigmoid'),
      Dense(units=1, activation='sigmoid')])
    model.compile(...)
    x = np.array([...])
    y = np.array([...])
    model.fit(x,y)
    model.predict(x_test)
    
  5. def function to return a for next layer

    def dense(a_in, W, b, g):  # Here W is 2by3 
        units = W.shape[1]       # Obtain columns in W(=number of neurons in this layer)
        a_out = np.zeros(units)
        for j in range(units):
          w = W[:,j]
          z = np.dot(w,a_in) + b[j]
          a_out[j] = g[z]
        return a_out
        # capital letters refer to a matrix
    
  6. Sequential Structure

    def sequential(x):
      a1 = dense(x, W1, b1) 
      a2 = dense(x, W2, b2) 
      a3 = dense(x, W3, b3) 
      a4 = dense(x, W4, b4) 
      f_x = a4
      return f_x
    
  7. example

1

# 构建网络层
import tensorflow as ts
from tensorflow.keras import Squential
  model = Sequential([
      Dense(units=25, activation='sigmoid'),
      Dense(units=15, activation='sigmoid'),
      Dense(units=1, activation='sigmoid')])
# 编译模型
from tensorflow.keras.losses import BinaryCrossentropy
  model.compile(loss=BinaryCrossentropy())
# 训练
model.fit(X,Y,epochs=100)
posted @ 2025-07-29 15:29  铁鼠  阅读(4)  评论(0)    收藏  举报