LeNN/layer.h
2024-12-31 01:34:56 +01:00

69 lines
1.6 KiB
C++

#include "matrices.h"
#include <cassert>
#include <math.h>
#define assertm(exp, msg) assert((void(msg), exp))
class Layer {
public:
Matrix input;
Matrix weights;
Matrix raw_output;
Matrix activated_output;
Matrix biases;
static inline float Sigmoid(float);
static inline float SigmoidPrime(float);
inline Layer(int); // Number of neurons
inline void Forward(); // Forward Pass with sigmoid
inline void Forward(float (*activation)(float)); // Forward Pass with custom activation function
inline void Feed(Matrix);
// Constructors
// Input size, Size
Layer(int, int);
};
Layer::Layer(int input_size, int size){
this->input = Matrix(input_size, 1);
this->weights = Matrix(input_size, size);
this->weights.Randomize(0.0F, 1.0F);
// Z, A and B are the same size as A
this->raw_output = this->input;
this->activated_output = this->input;
this->biases = this->input;
this->biases.Randomize(0.0F, 1.0F);
}
void Layer::Feed(Matrix a){
this->input = a;
}
float Layer::Sigmoid(float x){
return 1 / (1 + exp(-x));
}
float Layer::SigmoidPrime(float x){
float buffer = Layer::Sigmoid(x);
return buffer * (1 - buffer);
}
void Layer::Forward(float (*activation)(float)){
// Multiply inputs by weights
// W x I + B = Z
this->raw_output = this->input.Multiply(&this->weights).Add(&this->biases);
// Now through activation function
// A = F(Z)
this->activated_output = this->raw_output.Function(activation);
}
void Layer::Forward(){
this->Forward(&Layer::Sigmoid);
}