This commit is contained in:
2026-04-28 23:23:57 +07:00
parent ced1c1c291
commit 229c5d85c4
4 changed files with 158 additions and 16 deletions
+82
View File
@@ -0,0 +1,82 @@
#include "core.h"
NeuralNetwork::NeuralNetwork(LayerStructure_t layers[], int count) {
numLayers = count;
for (int i = 0; i < count; i++) {
layerSizes.push_back(layers[i].size);
}
// Инициализация весов случайными числами
for (int i = 0; i < count - 1; i++) {
std::vector<std::vector<double>> layerWeights;
for (int j = 0; j < layerSizes[i+1]; j++) {
std::vector<double> nodeWeights;
for (int k = 0; k < layerSizes[i]; k++) {
nodeWeights.push_back(((double)rand() / RAND_MAX) * 2 - 1);
}
layerWeights.push_back(nodeWeights);
}
weights.push_back(layerWeights);
std::vector<double> layerBiases;
for (int j = 0; j < layerSizes[i+1]; j++) {
layerBiases.push_back(((double)rand() / RAND_MAX) * 2 - 1);
}
biases.push_back(layerBiases);
}
}
std::vector<double> NeuralNetwork::feedForward(std::vector<double> input) {
outputs.clear();
outputs.push_back(input);
std::vector<double> current = input;
for (int i = 0; i < numLayers - 1; i++) {
std::vector<double> next;
for (int j = 0; j < layerSizes[i+1]; j++) {
double sum = biases[i][j];
for (int k = 0; k < layerSizes[i]; k++) {
sum += current[k] * weights[i][j][k];
}
next.push_back(sigmoid(sum));
}
current = next;
outputs.push_back(current);
}
return current;
}
void NeuralNetwork::train(std::vector<double> input, std::vector<double> target, double lr) {
// 1. Прямой проход
feedForward(input);
// 2. Вычисление ошибок для выходного слоя
std::vector<std::vector<double>> errors(numLayers);
errors[numLayers - 1].resize(layerSizes[numLayers - 1]);
for (int i = 0; i < layerSizes[numLayers - 1]; i++) {
double output = outputs[numLayers - 1][i];
errors[numLayers - 1][i] = (target[i] - output) * sigmoidDerivative(output);
}
// 3. Обратное распространение ошибки на скрытые слои
for (int i = numLayers - 2; i > 0; i--) {
errors[i].resize(layerSizes[i]);
for (int j = 0; j < layerSizes[i]; j++) {
double error = 0.0;
for (int k = 0; k < layerSizes[i+1]; k++) {
error += errors[i+1][k] * weights[i][k][j];
}
errors[i][j] = error * sigmoidDerivative(outputs[i][j]);
}
}
// 4. Обновление весов и смещений
for (int i = 0; i < numLayers - 1; i++) {
for (int j = 0; j < layerSizes[i+1]; j++) {
for (int k = 0; k < layerSizes[i]; k++) {
weights[i][j][k] += lr * errors[i+1][j] * outputs[i][k];
}
biases[i][j] += lr * errors[i+1][j];
}
}
}