Регистрация | Вход
class Mlp { private int[] layers; private double[][] inputs; private double[][][] weights; private double[][] biases; private double lr; private int epochs; Random random; public Mlp(int[] layers, double lr = 0.01, int epochs = 100) { this.layers = layers; this.lr = lr; this.epochs = epochs; InitializeNetwork(); } private void InitializeNetwork() { random = new Random(); inputs = new double[layers.Length][]; biases = new double[layers.Length][]; weights = new double[layers.Length][][]; for (int i = 0; i < layers.Length; i++) { inputs[i] = new double[layers[i]]; biases[i] = new double[layers[i]]; if (i > 0) { weights[i] = new double[layers[i]][]; for (int j = 0; j < layers[i]; j++) { weights[i][j] = new double[layers[i - 1]]; double limit = Math.Sqrt(6.0 / (layers[i - 1] + layers[i])); for (int k = 0; k < layers[i - 1]; k++) weights[i][j][k] = (random.NextDouble() * 2 - 1) * limit; biases[i][j] = (random.NextDouble() * 2 - 1) * 0.01; } } } } public double[] Forward(double[] input) { inputs[0] = input.ToArray(); for (int i = 1; i < layers.Length; i++) { for (int j = 0; j < layers[i]; j++) { double sum = 0; for (int k = 0; k < layers[i - 1]; k++) sum += inputs[i - 1][k] * weights[i][j][k]; inputs[i][j] = Activate(sum + biases[i][j]); } } return inputs[layers.Length - 1].ToArray(); } public void Backward(double[] expected) { double[][] deltas = new double[layers.Length][]; for (int i = layers.Length - 1; i >= 0; i--) { deltas[i] = new double[layers[i]]; if (i == layers.Length - 1) { for (int j = 0; j < layers[i]; j++) deltas[i][j] = (inputs[i][j] - expected[j]) * Derivative(inputs[i][j]); } else { for (int j = 0; j < layers[i]; j++) { double error = 0; for (int k = 0; k < layers[i + 1]; k++) error += deltas[i + 1][k] * weights[i + 1][k][j]; deltas[i][j] = error * Derivative(inputs[i][j]); } } for (int j = 0; j < layers[i]; j++) { biases[i][j] -= lr * deltas[i][j]; if (i > 0) { for (int k = 0; k < layers[i - 1]; k++) weights[i][j][k] -= lr * deltas[i][j] * inputs[i - 1][k]; } } } } private double ComputeLoss(double[] output, double[] target) { double loss = 0; for (int i = 0; i < output.Length; i++) loss += Math.Pow(output[i] - target[i], 2); return loss / output.Length; } public void Fit(double[][] inputs, double[][] outputs) { for (int epoch = 0; epoch < epochs; epoch++) { var mix = MlTools.MixIndex(inputs.Length, random); double totalLoss = 0; for (int i = 0; i < inputs.Length; i++) { var mi = mix[i]; var output = Forward(inputs[mi]); totalLoss += ComputeLoss(output, outputs[mi]); Backward(outputs[mi]); } if (epoch % 10 == 0) Console.WriteLine($"Epoch {epoch}, Loss: {totalLoss / inputs.Length:F6}"); } } public double[] Decide(double[] vector) { return Forward(vector); } private double Activate(double value) { return Math.Abs(value) < 1 ? value : Math.Sign(value); } private double Derivative(double value) { return Math.Abs(value) < 1 ? 1 : 0; } }