Sync public subset from Flux

This commit is contained in:
Gitea CI
2025-10-20 12:24:21 +00:00
parent 9a69d64d79
commit a334b74935
8 changed files with 230 additions and 35 deletions

View File

@@ -11,30 +11,47 @@
#include "./numerics/matdiv.h"
namespace neural_networks{
template <typename T>
struct Activation_Softmax{
utils::Matrix<T> exp_values;
utils::Matrix<T> probabilities;
//utils::Matrix<T> exp_values;
//utils::Matrix<T> probabilities;
utils::Matrix<T> outputs;
utils::Matrix<T> dinputs;
void forward(const utils::Matrix<T>& inputs){
// Get unnormalized probabilities
exp_values = numerics::matexp(numerics::matsubtract(inputs, numerics::matmax(inputs, "rows"), "col"));
utils::Matrix<T> exp_values = numerics::matexp(numerics::matsubtract(inputs, numerics::matmax(inputs, "rows"), "col"));
// Normalize them for each sample
probabilities = numerics::matdiv(exp_values, numerics::matsum(exp_values, "col"), "col");
utils::Matrix<T> probabilities = numerics::matdiv(exp_values, numerics::matsum(exp_values, "col"), "col");
outputs = probabilities;
}
void backward(const utils::Matrix<T>& dvalues){
const uint64_t rows = dvalues.rows();
const uint64_t cols = dvalues.cols();
if ((dinputs.rows() != rows) || dinputs.cols() != cols){
dinputs.resize(rows, cols);
}
for (uint64_t i = 0; i < rows; ++i){
T dot = T{0};
for (uint64_t j = 0; j < cols; ++j){
dot += outputs(i,j) * dvalues(i,j);
}
for (uint64_t j = 0; j < cols; ++j){
dinputs(i,j) = outputs(i,j) * (dvalues(i,j) - dot);
}
}
}
};
} // end namespace neural_networks

View File

@@ -0,0 +1,68 @@
#pragma once
#include "./core/omp_config.h"
#include "./utils/vector.h"
#include "./utils/matrix.h"
#include "./numerics/matmax.h"
#include "./numerics/matsubtract.h"
#include "./numerics/matexp.h"
#include "./numerics/matdiv.h"
#include "./modules/neural_networks/activation_functions/Activation_Softmax.h"
#include "./modules/neural_networks/loss/Loss_CategoricalCrossentrophy.h"
namespace neural_networks{
template <typename Td, typename Ti>
struct Activation_Softmax_Loss_CategoricalCrossentropy{
neural_networks::Activation_Softmax<Td> activation;
neural_networks::Loss_CategoricalCrossentrophy<Td, Ti> loss;
//utils::Matrix<T> exp_values;
//utils::Matrix<T> probabilities;
utils::Matrix<Td> outputs;
utils::Matrix<Td> dinputs;
utils::Vector<Td> forward(const utils::Matrix<Td>& inputs, const utils::Matrix<Ti>& y_true){
// Output layer's activation function
activation.forward(inputs);
// Set the output
outputs = activation.outputs;
// Calculate and return loss value
Td data_loss = loss.calculate(inputs, y_true);
return data_loss;
}
void backward(const utils::Matrix<Td>& dvalues, const utils::Matrix<Ti>& y_true){
// Number of samples
const uint64_t samples = y_true.rows();
// If the labels are one-hot encoded,
// turn them into discrete values
const uint64_t rows = dvalues.rows();
const uint64_t cols = dvalues.cols();
if ((dinputs.rows() != rows) || dinputs.cols() != cols){
dinputs.resize(rows, cols);
}
for (uint64_t i = 0; i < rows; ++i){
Td dot = Td{0};
for (uint64_t j = 0; j < cols; ++j){
dot += outputs(i,j) * dvalues(i,j);
}
for (uint64_t j = 0; j < cols; ++j){
dinputs(i,j) = outputs(i,j) * (dvalues(i,j) - dot);
}
}
}
};
} // end namespace neural_networks