#include "core.h" #include #include #define USE_PARALLEL #define MAX_THREADS 0 #ifdef USE_PARALLEL #include #define OMP_PARALLEL _Pragma("omp parallel for") #define OMP_SET_THREADS() { if (MAX_THREADS > 0) omp_set_num_threads(MAX_THREADS); } #else #define OMP_PARALLEL #define OMP_SET_THREADS() #endif NeuralNetwork::NeuralNetwork(LayerStructure_t layers[], int count) : numLayers(count) { for (int i = 0; i < count; i++) sizes.push_back(layers[i].size); for (int i = 0; i < count - 1; i++) { std::vector> layerW; double scale = sqrt(2.0 / sizes[i]); for (int j = 0; j < sizes[i+1]; j++) { std::vector nodeW; for (int k = 0; k < sizes[i]; k++) nodeW.push_back(((double)rand()/RAND_MAX * 2 - 1) * scale); layerW.push_back(nodeW); } weights.push_back(layerW); biases.push_back(std::vector(sizes[i+1], 0.0)); } } std::vector NeuralNetwork::feedForward(const std::vector& input) { OMP_SET_THREADS(); outputs.clear(); outputs.push_back(input); std::vector curr = input; for (int i = 0; i < numLayers - 1; i++) { std::vector next(sizes[i + 1]); OMP_PARALLEL for (int j = 0; j < sizes[i + 1]; j++) { double sum = biases[i][j]; for (int k = 0; k < (int)curr.size(); k++) { sum += curr[k] * weights[i][j][k]; } next[j] = 1.0 / (1.0 + exp(-sum)); } curr = next; outputs.push_back(curr); } return curr; } double NeuralNetwork::train(const std::vector& input, const std::vector& target, double lr) { OMP_SET_THREADS(); std::vector pred = feedForward(input); std::vector> errors(numLayers); errors[numLayers - 1].resize(sizes[numLayers - 1]); double totalErr = 0; for (int i = 0; i < sizes[numLayers - 1]; i++) { double e = target[i] - pred[i]; errors[numLayers - 1][i] = e * pred[i] * (1.0 - pred[i]); totalErr += e * e; } for (int i = numLayers - 2; i > 0; i--) { errors[i].resize(sizes[i]); OMP_PARALLEL for (int j = 0; j < sizes[i]; j++) { double e = 0; for (int k = 0; k < sizes[i + 1]; k++) { e += errors[i + 1][k] * weights[i][k][j]; } errors[i][j] = e * outputs[i][j] * (1.0 - outputs[i][j]); } } for (int i = 0; i < numLayers - 1; i++) { OMP_PARALLEL for (int j = 0; j < sizes[i + 1]; j++) { for (int k = 0; k < sizes[i]; k++) { weights[i][j][k] += lr * errors[i + 1][j] * outputs[i][k]; } biases[i][j] += lr * errors[i + 1][j]; } } return totalErr; }