#include "../src/activation_function.hpp" #include "../src/neural_net.hpp" #include #include #include #include class NeuralNetTest : public ::testing::Test { protected: void SetUp() override { // Create a simple neural network with 2 input neurons, 2 hidden neurons, // and 2 output neurons std::vector layer_sizes = {2, 2, 2}; net = std::make_unique>(layer_sizes); } std::unique_ptr> net; }; TEST_F(NeuralNetTest, FeedForward_SimpleNetwork) { // Test a simple network with known weights and inputs std::vector input = {0.5f, 0.5f}; // Set known weights for testing std::vector> weights = { Matrix(2, 2, 0.5f), // First layer weights Matrix(2, 2, 0.5f) // Output layer weights }; // Replace the network's weights with our test weights net->set_weights(weights); // Calculate expected output manually // First layer: Z1 = W1 * X Matrix X(2, 1, 0.0); X(0, 0) = input[0]; X(1, 0) = input[1]; Matrix Z1 = weights[0] * X; // Apply sigmoid activation Sigmoid sigmoid; sigmoid(Z1.data()); // Second layer: Z2 = W2 * A1 Matrix Z2 = weights[1] * Z1; SoftMax softmax; softmax(Z2.data()); // Convert to output vector std::vector expected_output(Z2.cols()); for (size_t i = 0; i < Z2.rows(); i++) { expected_output[i] = Z2(i, 0); } // Get actual output from feed_forward std::vector output = net->feed_forward(input); // Compare actual and expected outputs for (size_t i = 0; i < output.size(); i++) { EXPECT_NEAR(output[i], expected_output[i], 1e-6); } } TEST_F(NeuralNetTest, FeedForward_DifferentLayerSizes) { // Create a network with different layer sizes std::vector layer_sizes = {3, 4, 2}; NeuralNet net2(layer_sizes); std::vector input = {0.1f, 0.2f, 0.3f}; std::vector output = net2.feed_forward(input); // Output should have 2 elements (size of last layer) EXPECT_EQ(output.size(), 2); } TEST_F(NeuralNetTest, FeedForward_InvalidInputSize) { std::vector input = {0.1f}; // Only 1 input, but network expects 2 // This should throw an exception since input size doesn't match first layer // size EXPECT_THROW(net->feed_forward(input), std::invalid_argument); } TEST_F(NeuralNetTest, FeedForward_IdentityTest) { // Create a network with identity weights (1.0) and no bias std::vector layer_sizes = {2, 2}; NeuralNet net2(layer_sizes); // Set weights to identity matrix std::vector> weights = {Matrix(2, 2, 1.0f)}; net2.set_weights(weights); std::vector input = {0.5f, 0.5f}; std::vector output = net2.feed_forward(input); // Since we're using sigmoid activation, the output should be // sigmoid(0.5 + 0.5) = sigmoid(1.0) for each neuron SoftMax softmax; std::vector expected_output = input; softmax(expected_output); for (float val : output) { EXPECT_NEAR(val, expected_output[0], 1e-6); } } TEST_F(NeuralNetTest, FeedForward_SoftmaxOutput) { std::vector input = {1.0f, -1.0f}; std::vector output = net->feed_forward(input); // Verify that the output sums to 1 (property of softmax) float sum = 0.0f; for (float val : output) { sum += val; } EXPECT_NEAR(sum, 1.0f, 1e-6); // Verify that all outputs are positive for (float val : output) { EXPECT_GT(val, 0.0f); } }