Skip to content

Commit

Permalink
[fix] modify activation function
Browse files Browse the repository at this point in the history
  • Loading branch information
omoghaoghenemano committed Sep 17, 2024
1 parent 5591fb6 commit 66f6a1b
Show file tree
Hide file tree
Showing 6 changed files with 129 additions and 69 deletions.
4 changes: 3 additions & 1 deletion .vscode/settings.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
{
"files.associations": {
"iosfwd": "cpp",
"vector": "cpp"
"vector": "cpp",
"iostream": "cpp",
"ostream": "cpp"
}
}
28 changes: 28 additions & 0 deletions .vscode/tasks.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
{
"tasks": [
{
"type": "cppbuild",
"label": "C/C++: g++ build active file",
"command": "/usr/bin/g++",
"args": [
"-fdiagnostics-color=always",
"-g",
"${file}",
"-o",
"${fileDirname}/${fileBasenameNoExtension}"
],
"options": {
"cwd": "${fileDirname}"
},
"problemMatcher": [
"$gcc"
],
"group": {
"kind": "build",
"isDefault": true
},
"detail": "Task generated by Debugger."
}
],
"version": "2.0.0"
}
18 changes: 15 additions & 3 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -1,18 +1,30 @@
cmake_minimum_required(VERSION 3.10)
project(InfernoML)

# Set C++ standard
set(CMAKE_CXX_STANDARD 11)
set(CMAKE_CXX_STANDARD_REQUIRED True)

# Include directories
include_directories(include)

# Source files
# Define libraries
add_library(InfernoML STATIC
src/algorithms/linear_regression.cpp
)

# Example executable
add_library(Activations STATIC
src/activation/activation_functions.cpp
)

# Define executables
add_executable(linear_regression_example examples/linear_regression_example.cpp)
target_link_libraries(linear_regression_example PRIVATE InfernoML)
add_executable(activation_functions_example examples/activation_functions_example.cpp)

# Link libraries to executables
target_link_libraries(linear_regression_example PRIVATE InfernoML)
target_link_libraries(activation_functions_example PRIVATE Activations)

# Ensure that include directories are added for the specific targets if necessary
target_include_directories(linear_regression_example PRIVATE ${PROJECT_SOURCE_DIR}/include)
target_include_directories(activation_functions_example PRIVATE ${PROJECT_SOURCE_DIR}/include)
29 changes: 29 additions & 0 deletions examples/activation_functions_example.cpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
#include <iostream>
#include <vector>
#include "activation/activation_functions.h" // Include your header file

int main() {
// Define input vector
std::vector<double> inputs = {0.5, -0.3, 0.8, -1.2, 0.0};

// Apply sigmoid function to the inputs
std::vector<double> sigmoid_results = activation::ActivationFunctions::apply(inputs, activation::ActivationFunctions::sigmoid);

// Apply ReLU function to the inputs
std::vector<double> relu_results = activation::ActivationFunctions::apply(inputs, activation::ActivationFunctions::relu);

// Print results
std::cout << "Sigmoid Results:" << std::endl;
for (double result : sigmoid_results) {
std::cout << result << " ";
}
std::cout << std::endl;

std::cout << "ReLU Results:" << std::endl;
for (double result : relu_results) {
std::cout << result << " ";
}
std::cout << std::endl;

return 0;
}
36 changes: 14 additions & 22 deletions include/activation/activation_functions.h
Original file line number Diff line number Diff line change
Expand Up @@ -2,31 +2,23 @@
#define ACTIVATION_FUNCTIONS_H

#include <vector>
#include <cmath>

namespace activation {

// Sigmoid activation function
inline double sigmoid(double x);

// Derivative of sigmoid function
inline double sigmoid_derivative(double x);

// Tanh activation function
inline double tanh(double x);

// Derivative of tanh function
inline double tanh_derivative(double x);

// ReLU activation function
inline double relu(double x);

// Derivative of ReLU function
inline double relu_derivative(double x);

// Apply an activation function to a vector
template <typename Func>
std::vector<double> apply(const std::vector<double>& inputs, Func func);
class ActivationFunctions {
public:
// Activation functions
static double sigmoid(double x);
static double sigmoid_derivative(double x);
static double tanh(double x);
static double tanh_derivative(double x);
static double relu(double x);
static double relu_derivative(double x);

// Apply an activation function to a vector
template <typename Func>
static std::vector<double> apply(const std::vector<double>& inputs, Func func);
};

} // namespace activation

Expand Down
83 changes: 40 additions & 43 deletions src/activation/activation_functions.cpp
Original file line number Diff line number Diff line change
@@ -1,51 +1,48 @@
#include "activation/activation_functions.h"
#include <cmath>
#include <algorithm>

namespace activation {

// Sigmoid activation function
inline double sigmoid(double x) {
return 1.0 / (1.0 + std::exp(-x));
}

// Derivative of sigmoid function
inline double sigmoid_derivative(double x) {
double sig = sigmoid(x);
return sig * (1.0 - sig);
}

// Tanh activation function
inline double tanh(double x) {
return std::tanh(x);
}

// Derivative of tanh function
inline double tanh_derivative(double x) {
double tanh_x = tanh(x);
return 1.0 - tanh_x * tanh_x;
}

// ReLU activation function
inline double relu(double x) {
return std::max(0.0, x);
}

// Derivative of ReLU function
inline double relu_derivative(double x) {
return (x > 0) ? 1.0 : 0.0;
}

// Apply an activation function to a vector
template <typename Func>
std::vector<double> apply(const std::vector<double>& inputs, Func func) {
std::vector<double> result;
result.reserve(inputs.size());
for (double input : inputs) {
result.push_back(func(input));
// Activation functions
double ActivationFunctions::sigmoid(double x) {
return 1.0 / (1.0 + std::exp(-x));
}
return result;
}

// Explicit template instantiations
template std::vector<double> apply(const std::vector<double>& inputs, double (*func)(double));
double ActivationFunctions::sigmoid_derivative(double x) {
double sig = sigmoid(x);
return sig * (1.0 - sig);
}

double ActivationFunctions::tanh(double x) {
return std::tanh(x);
}

double ActivationFunctions::tanh_derivative(double x) {
double tanh_x = tanh(x);
return 1.0 - tanh_x * tanh_x;
}

double ActivationFunctions::relu(double x) {
return std::max(0.0, x);
}

double ActivationFunctions::relu_derivative(double x) {
return (x > 0) ? 1.0 : 0.0;
}

// Apply an activation function to a vector
template <typename Func>
std::vector<double> ActivationFunctions::apply(const std::vector<double>& inputs, Func func) {
std::vector<double> result;
result.reserve(inputs.size());
for (double input : inputs) {
result.push_back(func(input));
}
return result;
}

// Explicit template instantiation
template std::vector<double> ActivationFunctions::apply(const std::vector<double>&, double (*)(double));

} // namespace activation

0 comments on commit 66f6a1b

Please sign in to comment.