Compare commits
8 Commits
f4c5a3a92b
...
9a1810775b
Author | SHA1 | Date | |
---|---|---|---|
![]() |
9a1810775b | ||
![]() |
17259076a0 | ||
![]() |
81515a50d2 | ||
![]() |
db04387314 | ||
![]() |
627679252f | ||
![]() |
87d39a705d | ||
![]() |
25fe960caf | ||
![]() |
02f0c8eac7 |
3
.gitignore
vendored
3
.gitignore
vendored
@ -9,3 +9,6 @@
|
||||
|
||||
layer.exe
|
||||
main.exe
|
||||
network.exe
|
||||
build/Debug/main.o
|
||||
build/Debug/outDebug.exe
|
||||
|
46
layer.h
46
layer.h
@ -1,3 +1,6 @@
|
||||
#ifndef LAYER_H_
|
||||
#define LAYER_H_
|
||||
|
||||
#include "matrices.h"
|
||||
#include <cassert>
|
||||
#include <math.h>
|
||||
@ -5,25 +8,50 @@
|
||||
#define assertm(exp, msg) assert((void(msg), exp))
|
||||
|
||||
class Layer {
|
||||
private:
|
||||
public:
|
||||
Matrix input;
|
||||
Matrix weights;
|
||||
Matrix raw_output;
|
||||
Matrix activated_output;
|
||||
Matrix biases;
|
||||
|
||||
float learning_rate = 0.1;
|
||||
|
||||
static inline float Sigmoid(float);
|
||||
static inline float SigmoidPrime(float);
|
||||
|
||||
public:
|
||||
inline Layer(int); // Number of neurons
|
||||
|
||||
inline void Forward(); // Forward Pass with sigmoid
|
||||
inline void Forward(float (*activation)(float)); // Forward Pass with custom activation function
|
||||
|
||||
inline void Feed(Matrix);
|
||||
|
||||
// Constructors
|
||||
// Input size, Size
|
||||
Layer(int, int);
|
||||
Layer();
|
||||
};
|
||||
|
||||
Layer::Layer(){
|
||||
|
||||
}
|
||||
|
||||
Layer::Layer(int input_size, int size){
|
||||
this->input = Matrix(input_size, 1);
|
||||
|
||||
// Every neuron has a weight for every input
|
||||
this->weights = Matrix(size, input_size);
|
||||
this->weights.Randomize(-1.0F, 1.0F);
|
||||
|
||||
this->raw_output = Matrix(size, 1);
|
||||
this->activated_output = this->raw_output;
|
||||
|
||||
// One bias per neuron
|
||||
this->biases = Matrix(size, 1);
|
||||
this->biases.Randomize(-1.0F, 1.0F);
|
||||
}
|
||||
|
||||
void Layer::Feed(Matrix a){
|
||||
this->input = a;
|
||||
}
|
||||
|
||||
float Layer::Sigmoid(float x){
|
||||
return 1 / (1 + exp(-x));
|
||||
}
|
||||
@ -34,9 +62,9 @@ float Layer::SigmoidPrime(float x){
|
||||
}
|
||||
|
||||
void Layer::Forward(float (*activation)(float)){
|
||||
// Multiply inputs by weights
|
||||
// Multiply weight matrix by input matrix
|
||||
// W x I + B = Z
|
||||
this->raw_output = this->input.Multiply(&this->weights).Add(&this->biases);
|
||||
this->raw_output = this->weights.Multiply(&this->input).Add(&this->biases);
|
||||
|
||||
// Now through activation function
|
||||
// A = F(Z)
|
||||
@ -46,3 +74,5 @@ void Layer::Forward(float (*activation)(float)){
|
||||
void Layer::Forward(){
|
||||
this->Forward(&Layer::Sigmoid);
|
||||
}
|
||||
|
||||
#endif
|
10
matrices.h
10
matrices.h
@ -1,3 +1,6 @@
|
||||
#ifndef MATRICES_H_
|
||||
#define MATRICES_H_
|
||||
|
||||
#include <string>
|
||||
#include <vector>
|
||||
#include <cassert>
|
||||
@ -34,6 +37,7 @@ class Matrix{
|
||||
inline Matrix Transpose();
|
||||
|
||||
// Operators
|
||||
inline Matrix();
|
||||
inline Matrix operator=(const Matrix*);
|
||||
inline Matrix operator+(const Matrix*);
|
||||
inline Matrix operator-(const Matrix*);
|
||||
@ -48,6 +52,10 @@ class Matrix{
|
||||
inline Matrix(const Matrix*);
|
||||
};
|
||||
|
||||
Matrix::Matrix(){
|
||||
|
||||
}
|
||||
|
||||
Matrix Matrix::operator=(const Matrix* other){
|
||||
return this->Swap(other);
|
||||
}
|
||||
@ -254,3 +262,5 @@ void Matrix::Randomize(float min, float max){
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#endif
|
53
network.h
Normal file
53
network.h
Normal file
@ -0,0 +1,53 @@
|
||||
#include "layer.h"
|
||||
#include "matrices.h"
|
||||
#include <vector>
|
||||
|
||||
class Network {
|
||||
public:
|
||||
Matrix input;
|
||||
float (*activation)(float) = Layer::Sigmoid; // Activation function is sigmoid by default
|
||||
|
||||
std::vector<Layer> hidden_layers;
|
||||
Layer output_layer;
|
||||
|
||||
inline void Feed(Matrix);
|
||||
inline Matrix GetOutput();
|
||||
|
||||
inline void Forward();
|
||||
|
||||
// Constructors
|
||||
// Input size, Array of hidden sizes, Output size
|
||||
Network(int, std::vector<int>, int);
|
||||
};
|
||||
|
||||
Network::Network(int input_size, std::vector<int> hidden_sizes, int output_size){
|
||||
this->input = Matrix(input_size, 1);
|
||||
|
||||
this->hidden_layers.push_back(Layer(input_size, hidden_sizes[0]));
|
||||
for(int i = 1; i < hidden_sizes.size(); i++){
|
||||
// For every hidden layer, create a layer of specified size
|
||||
this->hidden_layers.push_back(Layer(hidden_sizes[i-1], hidden_sizes[i]));
|
||||
}
|
||||
this->output_layer = Layer(hidden_sizes[hidden_sizes.size() - 1], output_size);
|
||||
}
|
||||
|
||||
Matrix Network::GetOutput(){
|
||||
return this->output_layer.activated_output;
|
||||
}
|
||||
|
||||
void Network::Feed(Matrix a){
|
||||
this->input = a;
|
||||
}
|
||||
|
||||
void Network::Forward(){
|
||||
// Feeding first layer
|
||||
this->hidden_layers[0].Feed(this->input);
|
||||
this->hidden_layers[0].Forward();
|
||||
for(int i = 1; i < this->hidden_layers.size(); i++){
|
||||
// Feeding A(L-1) and forwarding
|
||||
this->hidden_layers[i].Feed(this->hidden_layers[i - 1].activated_output);
|
||||
this->hidden_layers[i].Forward();
|
||||
}
|
||||
this->output_layer.Feed(this->hidden_layers[this->hidden_layers.size() - 1].activated_output);
|
||||
this->output_layer.Forward();
|
||||
}
|
Loading…
x
Reference in New Issue
Block a user