c#神经网络演示代码

using System;

class SimpleNeuralNetwork
{
    // 定义学习率和网络参数
    static double learningRate = 0.1;
    static int inputLayerSize = 2;
    static int hiddenLayerSize = 3;
    static int outputLayerSize = 1;

    // 权重矩阵
    static double[,] weightsInputHidden;
    static double[,] weightsHiddenOutput;

    // 偏置
    static double[] biasHidden;
    static double[] biasOutput;

    static void Main(string[] args)
    {
        // 初始化权重和偏置
        weightsInputHidden = InitializeWeights(inputLayerSize, hiddenLayerSize);
        weightsHiddenOutput = InitializeWeights(hiddenLayerSize, outputLayerSize);
        biasHidden = InitializeBias(hiddenLayerSize);
        biasOutput = InitializeBias(outputLayerSize);

        // 训练集 (输入数据和目标输出)
        double[][] trainingInputs = {
            new double[] { 0.1, 0.5 },
            new double[] { 0.9, 0.8 },
            new double[] { 0.4, 0.2 }
        };
        double[][] trainingTargets = {
            new double[] { 0.05 },
            new double[] { 0.72 },
            new double[] { 0.08 }
        };

        // 训练网络
        for (int epoch = 0; epoch < 1000000; epoch++)
        {
            for (int i = 0; i < trainingInputs.Length; i++)
            {
                Train(trainingInputs[i], trainingTargets[i]);
            }
        }

        // 测试网络
        Console.WriteLine("训练完成,开始测试:");
        foreach (var input in trainingInputs)
        {
            double[] output = Forward(input);
            Console.WriteLine($"输入: [{input[0]}, {input[1]}] -> 预测输出: {output[0]:F4}");
        }
    }

    // 初始化权重矩阵
    static double[,] InitializeWeights(int rows, int cols)
    {
        var weights = new double[rows, cols];
        Random rand = new Random();
        for (int i = 0; i < rows; i++)
        {
            for (int j = 0; j < cols; j++)
            {
                weights[i, j] = rand.NextDouble() - 0.5; // 随机值在[-0.5, 0.5]
            }
        }
        return weights;
    }

    // 初始化偏置
    static double[] InitializeBias(int size)
    {
        var bias = new double[size];
        Random rand = new Random();
        for (int i = 0; i < size; i++)
        {
            bias[i] = rand.NextDouble() - 0.5; // 随机值在[-0.5, 0.5]
        }
        return bias;
    }

    // 激活函数 (Sigmoid)
    static double Sigmoid(double x)
    {
        return 1 / (1 + Math.Exp(-x));
    }

    // 激活函数的导数
    static double SigmoidDerivative(double x)
    {
        return x * (1 - x);
    }

    // 前向传播
    static double[] Forward(double[] inputs)
    {
        // 输入层到隐藏层
        double[] hidden = new double[hiddenLayerSize];
        for (int i = 0; i < hiddenLayerSize; i++)
        {
            double sum = biasHidden[i];
            for (int j = 0; j < inputLayerSize; j++)
            {
                sum += inputs[j] * weightsInputHidden[j, i];
            }
            hidden[i] = Sigmoid(sum);
        }

        // 隐藏层到输出层
        double[] output = new double[outputLayerSize];
        for (int i = 0; i < outputLayerSize; i++)
        {
            double sum = biasOutput[i];
            for (int j = 0; j < hiddenLayerSize; j++)
            {
                sum += hidden[j] * weightsHiddenOutput[j, i];
            }
            output[i] = Sigmoid(sum);
        }

        return output;
    }

    // 反向传播
    static void Backpropagation(double[] inputs, double[] targets, double[] output)
    {
        // 计算输出层误差
        double[] outputErrors = new double[outputLayerSize];
        for (int i = 0; i < outputLayerSize; i++)
        {
            outputErrors[i] = (targets[i] - output[i]) * SigmoidDerivative(output[i]);
        }

        // 计算隐藏层误差
        double[] hidden = new double[hiddenLayerSize];
        for (int i = 0; i < hiddenLayerSize; i++)
        {
            double sum = biasHidden[i];
            for (int j = 0; j < inputLayerSize; j++)
            {
                sum += inputs[j] * weightsInputHidden[j, i];
            }
            hidden[i] = Sigmoid(sum);
        }

        double[] hiddenErrors = new double[hiddenLayerSize];
        for (int i = 0; i < hiddenLayerSize; i++)
        {
            double error = 0;
            for (int j = 0; j < outputLayerSize; j++)
            {
                error += outputErrors[j] * weightsHiddenOutput[i, j];
            }
            hiddenErrors[i] = error * SigmoidDerivative(hidden[i]);
        }

        // 更新隐藏层到输出层的权重和偏置
        for (int i = 0; i < hiddenLayerSize; i++)
        {
            for (int j = 0; j < outputLayerSize; j++)
            {
                weightsHiddenOutput[i, j] += learningRate * outputErrors[j] * hidden[i];
            }
        }
        for (int i = 0; i < outputLayerSize; i++)
        {
            biasOutput[i] += learningRate * outputErrors[i];
        }

        // 更新输入层到隐藏层的权重和偏置
        for (int i = 0; i < inputLayerSize; i++)
        {
            for (int j = 0; j < hiddenLayerSize; j++)
            {
                weightsInputHidden[i, j] += learningRate * hiddenErrors[j] * inputs[i];
            }
        }
        for (int i = 0; i < hiddenLayerSize; i++)
        {
            biasHidden[i] += learningRate * hiddenErrors[i];
        }
    }

    // 训练函数
    static void Train(double[] inputs, double[] targets)
    {
        // 前向传播
        double[] output = Forward(inputs);

        // 反向传播
        Backpropagation(inputs, targets, output);
    }
}

  

posted @ 2025-03-16 20:53  China Soft  阅读(36)  评论(0)    收藏  举报