diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index ac2abd4..e233d17 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -6,7 +6,6 @@ set(HEADER_FILES ./utility.hpp ) set(SOURCE_FILES - ./neural_net.cpp ) # Check if any source files exist diff --git a/src/neural_net.hpp b/src/neural_net.hpp index 820799d..c4f7f48 100644 --- a/src/neural_net.hpp +++ b/src/neural_net.hpp @@ -2,18 +2,70 @@ #define NEURAL_NET_H #include "activation_function.hpp" +#include "utility.hpp" +#include #include template class NeuralNet { public: - NeuralNet(std::vector &layer_sizes); + NeuralNet(std::vector &layer_sizes) : m_sizes(layer_sizes) { + int total_neurons = std::accumulate(layer_sizes.begin(), layer_sizes.end(), + 0, std::plus()); + // Initialize the activation function + m_activation_func = ActivationFunction(); + + // Create random sampling device + std::random_device rd{}; + std::mt19937 gen{rd()}; + std::normal_distribution dist{0.0, 1.0}; + + // Initialize the weights + m_weights.reserve(total_neurons); + int start_idx = 0; + for (auto size : m_sizes) { + for (int i = 0; i < size; i++) { + m_weights[i + start_idx] = + dist(gen) * m_activation_func.init_stddev(size); + } + start_idx += size; + } + }; private: ActivationFunction m_activation_func; SoftMax m_soft_max; std::vector m_sizes; std::vector m_weights; - std::vector feed_forward(std::vector &x); - std::vector feed_layer_forward(size_t layer_start_idx, size_t size, - std::vector &A); + + /** Pass input vector through the neural network. + * This is a fully connected neural network geometry. + * @param x Input vector + * @return output of feed forward phase + */ + std::vector feed_forward(std::vector &x) { + std::vector A = x; + int start_idx = 0; + + // Feed each layer forward except the last layer using the user specified + // activation function + for (auto size = m_sizes.begin(); size < m_sizes.end() - 1; size++) { + // Get the iterator range for the current layer + auto layer_start = m_weights.begin() + start_idx; + auto layer_end = m_weights.end() + start_idx + *size; + + std::vector Anew = Utilities::feed_layer( + layer_start, layer_end, &A, m_activation_func); + if (Anew.size() > A.capacity()) { + A.reserve(Anew.size()); + } + std::move(Anew.begin(), Anew.end(), A.begin()); + start_idx += *size; + } + + // Always use soft max for the final layer + auto last_layer_start = m_weights.begin() + start_idx; + auto output = Utilities::feed_layer( + last_layer_start, m_weights.end(), A, m_soft_max); + return output; + }; }; #endif