"""
前向传播解释:
https://towardsdatascience.com/forward-propagation-in-neural-networks-simplified-math-and-code-version-bbcfef6f9250
"""
import math
import random
# Sigmoid激活函数
def sigmoid_function(value: float, deriv: bool = False) -> float:
"""返回浮点数的Sigmoid函数值。
>>> sigmoid_function(3.5)
0.9706877692486436
>>> sigmoid_function(3.5, True)
-8.75
"""
if deriv:
return value * (1 - value)
return 1 / (1 + math.exp(-value))
# 初始值
INITIAL_VALUE = 0.02
def forward_propagation(expected: int, number_propagations: int) -> float:
"""返回经过前向传播训练后找到的值。
>>> res = forward_propagation(32, 450_000) # 原来是 10_000_000
>>> res > 31 and res < 33
True
>>> res = forward_propagation(32, 1000)
>>> res > 31 and res < 33
False
"""
# 随机权重
weight = float(2 * (random.randint(1, 100)) - 1)
for _ in range(number_propagations):
# 前向传播
layer_1 = sigmoid_function(INITIAL_VALUE * weight)
# 我们错过了多少?
layer_1_error = (expected / 100) - layer_1
# 错误的增量
layer_1_delta = layer_1_error * sigmoid_function(layer_1, True)
# 更新权重
weight += INITIAL_VALUE * layer_1_delta
return layer_1 * 100
if __name__ == "__main__":
import doctest
doctest.testmod()
expected = int(input("期望值: "))
number_propagations = int(input("传播次数: "))
print(forward_propagation(expected, number_propagations))