From 66f6a1b77904eb8d6a1b0c543a55006a2479ee42 Mon Sep 17 00:00:00 2001 From: Oghenemano Utomudo Omogha Date: Tue, 17 Sep 2024 20:35:40 +0200 Subject: [PATCH] [fix] modify activation function --- .vscode/settings.json | 4 +- .vscode/tasks.json | 28 ++++++++ CMakeLists.txt | 18 ++++- examples/activation_functions_example.cpp | 29 ++++++++ include/activation/activation_functions.h | 36 ++++------ src/activation/activation_functions.cpp | 83 +++++++++++------------ 6 files changed, 129 insertions(+), 69 deletions(-) create mode 100644 .vscode/tasks.json create mode 100644 examples/activation_functions_example.cpp diff --git a/.vscode/settings.json b/.vscode/settings.json index f857814..5cbf611 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -1,6 +1,8 @@ { "files.associations": { "iosfwd": "cpp", - "vector": "cpp" + "vector": "cpp", + "iostream": "cpp", + "ostream": "cpp" } } \ No newline at end of file diff --git a/.vscode/tasks.json b/.vscode/tasks.json new file mode 100644 index 0000000..05054c5 --- /dev/null +++ b/.vscode/tasks.json @@ -0,0 +1,28 @@ +{ + "tasks": [ + { + "type": "cppbuild", + "label": "C/C++: g++ build active file", + "command": "/usr/bin/g++", + "args": [ + "-fdiagnostics-color=always", + "-g", + "${file}", + "-o", + "${fileDirname}/${fileBasenameNoExtension}" + ], + "options": { + "cwd": "${fileDirname}" + }, + "problemMatcher": [ + "$gcc" + ], + "group": { + "kind": "build", + "isDefault": true + }, + "detail": "Task generated by Debugger." + } + ], + "version": "2.0.0" +} \ No newline at end of file diff --git a/CMakeLists.txt b/CMakeLists.txt index 270be19..18a064b 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1,18 +1,30 @@ cmake_minimum_required(VERSION 3.10) project(InfernoML) +# Set C++ standard set(CMAKE_CXX_STANDARD 11) +set(CMAKE_CXX_STANDARD_REQUIRED True) # Include directories include_directories(include) -# Source files +# Define libraries add_library(InfernoML STATIC src/algorithms/linear_regression.cpp ) -# Example executable +add_library(Activations STATIC + src/activation/activation_functions.cpp +) + +# Define executables add_executable(linear_regression_example examples/linear_regression_example.cpp) -target_link_libraries(linear_regression_example PRIVATE InfernoML) +add_executable(activation_functions_example examples/activation_functions_example.cpp) +# Link libraries to executables +target_link_libraries(linear_regression_example PRIVATE InfernoML) +target_link_libraries(activation_functions_example PRIVATE Activations) +# Ensure that include directories are added for the specific targets if necessary +target_include_directories(linear_regression_example PRIVATE ${PROJECT_SOURCE_DIR}/include) +target_include_directories(activation_functions_example PRIVATE ${PROJECT_SOURCE_DIR}/include) diff --git a/examples/activation_functions_example.cpp b/examples/activation_functions_example.cpp new file mode 100644 index 0000000..341a1f2 --- /dev/null +++ b/examples/activation_functions_example.cpp @@ -0,0 +1,29 @@ +#include +#include +#include "activation/activation_functions.h" // Include your header file + +int main() { + // Define input vector + std::vector inputs = {0.5, -0.3, 0.8, -1.2, 0.0}; + + // Apply sigmoid function to the inputs + std::vector sigmoid_results = activation::ActivationFunctions::apply(inputs, activation::ActivationFunctions::sigmoid); + + // Apply ReLU function to the inputs + std::vector relu_results = activation::ActivationFunctions::apply(inputs, activation::ActivationFunctions::relu); + + // Print results + std::cout << "Sigmoid Results:" << std::endl; + for (double result : sigmoid_results) { + std::cout << result << " "; + } + std::cout << std::endl; + + std::cout << "ReLU Results:" << std::endl; + for (double result : relu_results) { + std::cout << result << " "; + } + std::cout << std::endl; + + return 0; +} diff --git a/include/activation/activation_functions.h b/include/activation/activation_functions.h index 6f306ea..3bda961 100644 --- a/include/activation/activation_functions.h +++ b/include/activation/activation_functions.h @@ -2,31 +2,23 @@ #define ACTIVATION_FUNCTIONS_H #include -#include namespace activation { - // Sigmoid activation function - inline double sigmoid(double x); - - // Derivative of sigmoid function - inline double sigmoid_derivative(double x); - - // Tanh activation function - inline double tanh(double x); - - // Derivative of tanh function - inline double tanh_derivative(double x); - - // ReLU activation function - inline double relu(double x); - - // Derivative of ReLU function - inline double relu_derivative(double x); - - // Apply an activation function to a vector - template - std::vector apply(const std::vector& inputs, Func func); + class ActivationFunctions { + public: + // Activation functions + static double sigmoid(double x); + static double sigmoid_derivative(double x); + static double tanh(double x); + static double tanh_derivative(double x); + static double relu(double x); + static double relu_derivative(double x); + + // Apply an activation function to a vector + template + static std::vector apply(const std::vector& inputs, Func func); + }; } // namespace activation diff --git a/src/activation/activation_functions.cpp b/src/activation/activation_functions.cpp index 319c4aa..6e23bbb 100644 --- a/src/activation/activation_functions.cpp +++ b/src/activation/activation_functions.cpp @@ -1,51 +1,48 @@ #include "activation/activation_functions.h" +#include +#include namespace activation { -// Sigmoid activation function -inline double sigmoid(double x) { - return 1.0 / (1.0 + std::exp(-x)); -} - -// Derivative of sigmoid function -inline double sigmoid_derivative(double x) { - double sig = sigmoid(x); - return sig * (1.0 - sig); -} - -// Tanh activation function -inline double tanh(double x) { - return std::tanh(x); -} - -// Derivative of tanh function -inline double tanh_derivative(double x) { - double tanh_x = tanh(x); - return 1.0 - tanh_x * tanh_x; -} - -// ReLU activation function -inline double relu(double x) { - return std::max(0.0, x); -} - -// Derivative of ReLU function -inline double relu_derivative(double x) { - return (x > 0) ? 1.0 : 0.0; -} - -// Apply an activation function to a vector -template -std::vector apply(const std::vector& inputs, Func func) { - std::vector result; - result.reserve(inputs.size()); - for (double input : inputs) { - result.push_back(func(input)); + // Activation functions + double ActivationFunctions::sigmoid(double x) { + return 1.0 / (1.0 + std::exp(-x)); } - return result; -} -// Explicit template instantiations -template std::vector apply(const std::vector& inputs, double (*func)(double)); + double ActivationFunctions::sigmoid_derivative(double x) { + double sig = sigmoid(x); + return sig * (1.0 - sig); + } + + double ActivationFunctions::tanh(double x) { + return std::tanh(x); + } + + double ActivationFunctions::tanh_derivative(double x) { + double tanh_x = tanh(x); + return 1.0 - tanh_x * tanh_x; + } + + double ActivationFunctions::relu(double x) { + return std::max(0.0, x); + } + + double ActivationFunctions::relu_derivative(double x) { + return (x > 0) ? 1.0 : 0.0; + } + + // Apply an activation function to a vector + template + std::vector ActivationFunctions::apply(const std::vector& inputs, Func func) { + std::vector result; + result.reserve(inputs.size()); + for (double input : inputs) { + result.push_back(func(input)); + } + return result; + } + + // Explicit template instantiation + template std::vector ActivationFunctions::apply(const std::vector&, double (*)(double)); } // namespace activation