generated from aselimov/cpp_project_template
62 lines
1.4 KiB
C++
62 lines
1.4 KiB
C++
#ifndef ACTIVATION_FUNCTION_H
|
|
#define ACTIVATION_FUNCTION_H
|
|
|
|
#include <algorithm>
|
|
#include <cmath>
|
|
#include <vector>
|
|
/**
|
|
* Functor to set the activation function as a Sigmoid function
|
|
*/
|
|
struct Sigmoid {
|
|
void static apply(std::vector<float> &z) {
|
|
for (size_t i = 0; i < z.size(); i++) {
|
|
z[i] = 1 / (1 + exp(-z[i]));
|
|
};
|
|
};
|
|
float static init_stddev(int n) { return sqrt(1.0 / n); };
|
|
float static derivative(float x) {
|
|
float exp_x = exp(-x);
|
|
return exp_x / pow(exp_x + 1.0, 2.0);
|
|
}
|
|
};
|
|
|
|
/**
|
|
* Functor to set the activation function as Rectified Linear Unit
|
|
*/
|
|
struct ReLU {
|
|
void static apply(std::vector<float> &z) {
|
|
for (size_t i = 0; i < z.size(); i++) {
|
|
z[i] = std::max(0.0f, z[i]);
|
|
};
|
|
};
|
|
float static init_stddev(int n) { return sqrt(2.0 / n); };
|
|
float static derivative(float x) {
|
|
if (x < 0) {
|
|
return 0;
|
|
} else {
|
|
return 1;
|
|
}
|
|
};
|
|
};
|
|
|
|
/**
|
|
* SoftMax Activation function.
|
|
* This is generally used in the final output layer
|
|
*/
|
|
struct SoftMax {
|
|
void static apply(std::vector<float> &z) {
|
|
float zmax = *std::max_element(z.begin(), z.end());
|
|
float sum = 0.0;
|
|
for (size_t i = 0; i < z.size(); i++) {
|
|
z[i] = exp(z[i] - zmax);
|
|
sum += z[i];
|
|
};
|
|
for (size_t i = 0; i < z.size(); i++) {
|
|
z[i] = z[i] / sum;
|
|
};
|
|
};
|
|
float init_stddev(int n) { return sqrt(1.0 / n); };
|
|
};
|
|
|
|
#endif
|