Files
Flux-openbuild/include/modules/neural_networks/activation_functions/Activation_ReLU.h
2025-10-09 08:44:15 +00:00

41 lines
936 B
C++

#pragma once
#include "./core/omp_config.h"
#include "./utils/vector.h"
#include "./utils/matrix.h"
namespace neural_networks{
template <typename T>
struct Activation_ReLU{
utils::Matrix<T> _inputs;
utils::Matrix<T> outputs;
utils::Matrix<T> dinputs;
void forward(const utils::Matrix<T>& inputs){
_inputs = inputs;
outputs = numerics::matclip_low(inputs, T{0});
}
void backward(const utils::Matrix<T>& dvalues){
// Since we need to modify the original variable,
// let's make a copy of the values first
dinputs = dvalues;
// Zero gradients where input values were negative
for (uint64_t i = 0; i < dinputs.rows(); ++i){
for (uint64_t j = 0; j < dinputs.cols(); ++j){
if (_inputs(i,j) <= T{0}){
dinputs(i,j) = T{0};
}
}
}
}
};
} // end namespace neural_networks