generated from aselimov/cpp_project_template
Move templates to correct spot
This commit is contained in:
parent
c19f58b4b7
commit
0ac3df4e1e
@ -6,7 +6,6 @@ set(HEADER_FILES
|
||||
./utility.hpp
|
||||
)
|
||||
set(SOURCE_FILES
|
||||
./neural_net.cpp
|
||||
)
|
||||
|
||||
# Check if any source files exist
|
||||
|
@ -2,18 +2,70 @@
|
||||
#define NEURAL_NET_H
|
||||
|
||||
#include "activation_function.hpp"
|
||||
#include "utility.hpp"
|
||||
#include <random>
|
||||
#include <vector>
|
||||
template <class ActivationFunction> class NeuralNet {
|
||||
public:
|
||||
NeuralNet(std::vector<size_t> &layer_sizes);
|
||||
NeuralNet(std::vector<size_t> &layer_sizes) : m_sizes(layer_sizes) {
|
||||
int total_neurons = std::accumulate(layer_sizes.begin(), layer_sizes.end(),
|
||||
0, std::plus<size_t>());
|
||||
// Initialize the activation function
|
||||
m_activation_func = ActivationFunction();
|
||||
|
||||
// Create random sampling device
|
||||
std::random_device rd{};
|
||||
std::mt19937 gen{rd()};
|
||||
std::normal_distribution<float> dist{0.0, 1.0};
|
||||
|
||||
// Initialize the weights
|
||||
m_weights.reserve(total_neurons);
|
||||
int start_idx = 0;
|
||||
for (auto size : m_sizes) {
|
||||
for (int i = 0; i < size; i++) {
|
||||
m_weights[i + start_idx] =
|
||||
dist(gen) * m_activation_func.init_stddev(size);
|
||||
}
|
||||
start_idx += size;
|
||||
}
|
||||
};
|
||||
|
||||
private:
|
||||
ActivationFunction m_activation_func;
|
||||
SoftMax m_soft_max;
|
||||
std::vector<size_t> m_sizes;
|
||||
std::vector<float> m_weights;
|
||||
std::vector<float> feed_forward(std::vector<float> &x);
|
||||
std::vector<float> feed_layer_forward(size_t layer_start_idx, size_t size,
|
||||
std::vector<float> &A);
|
||||
|
||||
/** Pass input vector through the neural network.
|
||||
* This is a fully connected neural network geometry.
|
||||
* @param x Input vector
|
||||
* @return output of feed forward phase
|
||||
*/
|
||||
std::vector<float> feed_forward(std::vector<float> &x) {
|
||||
std::vector<float> A = x;
|
||||
int start_idx = 0;
|
||||
|
||||
// Feed each layer forward except the last layer using the user specified
|
||||
// activation function
|
||||
for (auto size = m_sizes.begin(); size < m_sizes.end() - 1; size++) {
|
||||
// Get the iterator range for the current layer
|
||||
auto layer_start = m_weights.begin() + start_idx;
|
||||
auto layer_end = m_weights.end() + start_idx + *size;
|
||||
|
||||
std::vector<float> Anew = Utilities::feed_layer<ActivationFunction>(
|
||||
layer_start, layer_end, &A, m_activation_func);
|
||||
if (Anew.size() > A.capacity()) {
|
||||
A.reserve(Anew.size());
|
||||
}
|
||||
std::move(Anew.begin(), Anew.end(), A.begin());
|
||||
start_idx += *size;
|
||||
}
|
||||
|
||||
// Always use soft max for the final layer
|
||||
auto last_layer_start = m_weights.begin() + start_idx;
|
||||
auto output = Utilities::feed_layer<SoftMax>(
|
||||
last_layer_start, m_weights.end(), A, m_soft_max);
|
||||
return output;
|
||||
};
|
||||
};
|
||||
#endif
|
||||
|
Loading…
x
Reference in New Issue
Block a user