#pragma once #include "./core/omp_config.h" #include "./utils/vector.h" #include "./utils/matrix.h" #include "./numerics/matmax.h" #include "./numerics/matsubtract.h" #include "./numerics/matexp.h" #include "./numerics/matdiv.h" namespace neural_networks{ template struct Activation_Softmax{ //utils::Matrix exp_values; //utils::Matrix probabilities; utils::Matrix outputs; utils::Matrix dinputs; void forward(const utils::Matrix& inputs){ // Get unnormalized probabilities utils::Matrix exp_values = numerics::matexp(numerics::matsubtract(inputs, numerics::matmax(inputs, "rows"), "col")); // Normalize them for each sample utils::Matrix probabilities = numerics::matdiv(exp_values, numerics::matsum(exp_values, "col"), "col"); outputs = probabilities; } void backward(const utils::Matrix& dvalues){ const uint64_t rows = dvalues.rows(); const uint64_t cols = dvalues.cols(); if ((dinputs.rows() != rows) || dinputs.cols() != cols){ dinputs.resize(rows, cols); } for (uint64_t i = 0; i < rows; ++i){ T dot = T{0}; for (uint64_t j = 0; j < cols; ++j){ dot += outputs(i,j) * dvalues(i,j); } for (uint64_t j = 0; j < cols; ++j){ dinputs(i,j) = outputs(i,j) * (dvalues(i,j) - dot); } } } }; } // end namespace neural_networks