Compare commits

..

8 Commits

Author SHA1 Message Date
LeLeLeLeto
9a1810775b Updated gitignore 2024-12-31 02:08:55 +01:00
LeLeLeLeto
17259076a0 Added network forward propagation and feeding 2024-12-31 02:08:44 +01:00
LeLeLeLeto
81515a50d2 Matrix header guard 2024-12-31 02:08:27 +01:00
LeLeLeLeto
db04387314 Fix wrong matrix sizes in layer constructor 2024-12-31 02:08:13 +01:00
LeLeLeLeto
627679252f Layer default constructor 2024-12-31 01:38:56 +01:00
LeLeLeLeto
87d39a705d Matrix default constructor 2024-12-31 01:35:05 +01:00
LeLeLeLeto
25fe960caf Layer feed function and constructor 2024-12-31 01:34:56 +01:00
LeLeLeLeto
02f0c8eac7 Attributes to public and removed learning rate in layer 2024-12-31 01:15:20 +01:00
4 changed files with 106 additions and 10 deletions

3
.gitignore vendored
View File

@ -9,3 +9,6 @@
layer.exe layer.exe
main.exe main.exe
network.exe
build/Debug/main.o
build/Debug/outDebug.exe

46
layer.h
View File

@ -1,3 +1,6 @@
#ifndef LAYER_H_
#define LAYER_H_
#include "matrices.h" #include "matrices.h"
#include <cassert> #include <cassert>
#include <math.h> #include <math.h>
@ -5,25 +8,50 @@
#define assertm(exp, msg) assert((void(msg), exp)) #define assertm(exp, msg) assert((void(msg), exp))
class Layer { class Layer {
private: public:
Matrix input; Matrix input;
Matrix weights; Matrix weights;
Matrix raw_output; Matrix raw_output;
Matrix activated_output; Matrix activated_output;
Matrix biases; Matrix biases;
float learning_rate = 0.1;
static inline float Sigmoid(float); static inline float Sigmoid(float);
static inline float SigmoidPrime(float); static inline float SigmoidPrime(float);
public:
inline Layer(int); // Number of neurons
inline void Forward(); // Forward Pass with sigmoid inline void Forward(); // Forward Pass with sigmoid
inline void Forward(float (*activation)(float)); // Forward Pass with custom activation function inline void Forward(float (*activation)(float)); // Forward Pass with custom activation function
inline void Feed(Matrix);
// Constructors
// Input size, Size
Layer(int, int);
Layer();
}; };
Layer::Layer(){
}
Layer::Layer(int input_size, int size){
this->input = Matrix(input_size, 1);
// Every neuron has a weight for every input
this->weights = Matrix(size, input_size);
this->weights.Randomize(-1.0F, 1.0F);
this->raw_output = Matrix(size, 1);
this->activated_output = this->raw_output;
// One bias per neuron
this->biases = Matrix(size, 1);
this->biases.Randomize(-1.0F, 1.0F);
}
void Layer::Feed(Matrix a){
this->input = a;
}
float Layer::Sigmoid(float x){ float Layer::Sigmoid(float x){
return 1 / (1 + exp(-x)); return 1 / (1 + exp(-x));
} }
@ -34,9 +62,9 @@ float Layer::SigmoidPrime(float x){
} }
void Layer::Forward(float (*activation)(float)){ void Layer::Forward(float (*activation)(float)){
// Multiply inputs by weights // Multiply weight matrix by input matrix
// W x I + B = Z // W x I + B = Z
this->raw_output = this->input.Multiply(&this->weights).Add(&this->biases); this->raw_output = this->weights.Multiply(&this->input).Add(&this->biases);
// Now through activation function // Now through activation function
// A = F(Z) // A = F(Z)
@ -46,3 +74,5 @@ void Layer::Forward(float (*activation)(float)){
void Layer::Forward(){ void Layer::Forward(){
this->Forward(&Layer::Sigmoid); this->Forward(&Layer::Sigmoid);
} }
#endif

View File

@ -1,3 +1,6 @@
#ifndef MATRICES_H_
#define MATRICES_H_
#include <string> #include <string>
#include <vector> #include <vector>
#include <cassert> #include <cassert>
@ -34,6 +37,7 @@ class Matrix{
inline Matrix Transpose(); inline Matrix Transpose();
// Operators // Operators
inline Matrix();
inline Matrix operator=(const Matrix*); inline Matrix operator=(const Matrix*);
inline Matrix operator+(const Matrix*); inline Matrix operator+(const Matrix*);
inline Matrix operator-(const Matrix*); inline Matrix operator-(const Matrix*);
@ -48,6 +52,10 @@ class Matrix{
inline Matrix(const Matrix*); inline Matrix(const Matrix*);
}; };
Matrix::Matrix(){
}
Matrix Matrix::operator=(const Matrix* other){ Matrix Matrix::operator=(const Matrix* other){
return this->Swap(other); return this->Swap(other);
} }
@ -254,3 +262,5 @@ void Matrix::Randomize(float min, float max){
} }
} }
} }
#endif

53
network.h Normal file
View File

@ -0,0 +1,53 @@
#include "layer.h"
#include "matrices.h"
#include <vector>
class Network {
public:
Matrix input;
float (*activation)(float) = Layer::Sigmoid; // Activation function is sigmoid by default
std::vector<Layer> hidden_layers;
Layer output_layer;
inline void Feed(Matrix);
inline Matrix GetOutput();
inline void Forward();
// Constructors
// Input size, Array of hidden sizes, Output size
Network(int, std::vector<int>, int);
};
Network::Network(int input_size, std::vector<int> hidden_sizes, int output_size){
this->input = Matrix(input_size, 1);
this->hidden_layers.push_back(Layer(input_size, hidden_sizes[0]));
for(int i = 1; i < hidden_sizes.size(); i++){
// For every hidden layer, create a layer of specified size
this->hidden_layers.push_back(Layer(hidden_sizes[i-1], hidden_sizes[i]));
}
this->output_layer = Layer(hidden_sizes[hidden_sizes.size() - 1], output_size);
}
Matrix Network::GetOutput(){
return this->output_layer.activated_output;
}
void Network::Feed(Matrix a){
this->input = a;
}
void Network::Forward(){
// Feeding first layer
this->hidden_layers[0].Feed(this->input);
this->hidden_layers[0].Forward();
for(int i = 1; i < this->hidden_layers.size(); i++){
// Feeding A(L-1) and forwarding
this->hidden_layers[i].Feed(this->hidden_layers[i - 1].activated_output);
this->hidden_layers[i].Forward();
}
this->output_layer.Feed(this->hidden_layers[this->hidden_layers.size() - 1].activated_output);
this->output_layer.Forward();
}