neural_net/tests/unit_tests/test_feed_forward.cpp

135 lines
4.1 KiB
C++

#include "../../src/activation_function.hpp"
#include "../../src/forward_feed.hpp"
#include "../../src/matrix.hpp"
#include <gtest/gtest.h>
#include <stdexcept>
#include <vector>
class ForwardFeedTest : public ::testing::Test {
protected:
void SetUp() override {
// Create simple weights for testing
weights = {Matrix<float>(2, 2, {0.5, 0.5, 0.5, 0.5}),
Matrix<float>(2, 2, {0.5, 0.5, 0.5, 0.5})};
}
std::vector<Matrix<float>> weights;
};
TEST_F(ForwardFeedTest, BasicForwardFeed) {
// Create input data
std::vector<float> input = {1.0, 2.0};
// Create ForwardFeed with ReLU activation
ForwardFeed<ReLU> feed(input, weights);
// Verify output size
EXPECT_EQ(feed.m_yhat.size(), 2);
// Verify number of activations stored
EXPECT_EQ(feed.m_activations.size(), 1); // Only one hidden layer
// Verify input was stored as first activation
EXPECT_EQ(feed.m_activations[0].rows(), 2);
EXPECT_EQ(feed.m_activations[0].cols(), 1);
EXPECT_FLOAT_EQ(feed.m_activations[0](0, 0), 1.0);
EXPECT_FLOAT_EQ(feed.m_activations[0](1, 0), 2.0);
}
TEST_F(ForwardFeedTest, DifferentActivationFunctions) {
// Test with different activation functions
std::vector<float> input = {1.0, 2.0};
// Test with Sigmoid
ForwardFeed<Sigmoid> sigmoid_feed(input, weights);
EXPECT_EQ(sigmoid_feed.m_yhat.size(), 2);
// Test with ReLU
ForwardFeed<ReLU> relu_feed(input, weights);
EXPECT_EQ(relu_feed.m_yhat.size(), 2);
// Test with different input values
std::vector<float> neg_input = {-1.0, -2.0};
ForwardFeed<ReLU> neg_feed(neg_input, weights);
EXPECT_EQ(neg_feed.m_yhat.size(), 2);
}
TEST_F(ForwardFeedTest, ActivationStorage) {
// Test that activations are properly stored
std::vector<float> input = {1.0, 2.0};
ForwardFeed<ReLU> feed(input, weights);
// Verify first activation (input)
EXPECT_EQ(feed.m_activations[0].rows(), 2);
EXPECT_EQ(feed.m_activations[0].cols(), 1);
EXPECT_FLOAT_EQ(feed.m_activations[0](0, 0), 1.0);
EXPECT_FLOAT_EQ(feed.m_activations[0](1, 0), 2.0);
// Verify final output (after softmax)
EXPECT_EQ(feed.m_yhat.size(), 2);
float sum = 0.0;
for (float val : feed.m_yhat) {
sum += val;
}
EXPECT_NEAR(sum, 1.0, 1e-6); // Softmax outputs should sum to 1
}
TEST_F(ForwardFeedTest, EdgeCases) {
// Test with zero input
std::vector<float> zero_input = {0.0, 0.0};
ForwardFeed<ReLU> zero_feed(zero_input, weights);
EXPECT_EQ(zero_feed.m_yhat.size(), 2);
// Test with negative input
std::vector<float> neg_input = {-1.0, -2.0};
ForwardFeed<ReLU> neg_feed(neg_input, weights);
EXPECT_EQ(neg_feed.m_yhat.size(), 2);
// Test with large input
std::vector<float> large_input = {100.0, 200.0};
ForwardFeed<ReLU> large_feed(large_input, weights);
EXPECT_EQ(large_feed.m_yhat.size(), 2);
}
TEST_F(ForwardFeedTest, DifferentNetworkSizes) {
// Test with different network architectures
std::vector<std::vector<Matrix<float>>> test_weights = {
// Single hidden layer
{Matrix<float>(2, 2, {0.5, 0.5, 0.5, 0.5}),
Matrix<float>(2, 2, {0.5, 0.5, 0.5, 0.5})},
// Multiple hidden layers
{Matrix<float>(3, 2, {0.5, 0.5, 0.5, 0.5, 0.5, 0.5}),
Matrix<float>(2, 3, {0.5, 0.5, 0.5, 0.5, 0.5, 0.5}),
Matrix<float>(2, 2, {0.5, 0.5, 0.5, 0.5})}};
for (const auto &w : test_weights) {
std::vector<float> input(2, 1.0);
ForwardFeed<ReLU> feed(input, w);
// Verify number of activations matches number of hidden layers
EXPECT_EQ(feed.m_activations.size(), w.size() - 1);
// Verify final output size
EXPECT_EQ(feed.m_yhat.size(), w.back().rows());
}
}
TEST_F(ForwardFeedTest, WeightMatrixDimensions) {
// Test with invalid weight matrix dimensions
std::vector<float> input = {1.0, 2.0};
// Test with mismatched dimensions
std::vector<Matrix<float>> invalid_weights = {
Matrix<float>(2, 3, {0.5, 0.5, 0.5, 0.5, 0.5, 0.5}), // 3x2 instead of 2x2
Matrix<float>(2, 2, {0.5, 0.5, 0.5, 0.5})};
EXPECT_THROW(ForwardFeed<ReLU> feed(input, invalid_weights),
std::invalid_argument);
}
int main(int argc, char **argv) {
::testing::InitGoogleTest(&argc, argv);
return RUN_ALL_TESTS();
}