Compare commits

..

No commits in common. "9a1810775b30fd5f9baf6e935c9b351264e92395" and "f4c5a3a92b8b55d03a302f7970c6beb955f38021" have entirely different histories.

4 changed files with 10 additions and 106 deletions

3
.gitignore vendored
View File

@ -9,6 +9,3 @@
layer.exe
main.exe
network.exe
build/Debug/main.o
build/Debug/outDebug.exe

46
layer.h
View File

@ -1,6 +1,3 @@
#ifndef LAYER_H_
#define LAYER_H_
#include "matrices.h"
#include <cassert>
#include <math.h>
@ -8,50 +5,25 @@
#define assertm(exp, msg) assert((void(msg), exp))
class Layer {
public:
private:
Matrix input;
Matrix weights;
Matrix raw_output;
Matrix activated_output;
Matrix biases;
float learning_rate = 0.1;
static inline float Sigmoid(float);
static inline float SigmoidPrime(float);
public:
inline Layer(int); // Number of neurons
inline void Forward(); // Forward Pass with sigmoid
inline void Forward(float (*activation)(float)); // Forward Pass with custom activation function
inline void Feed(Matrix);
// Constructors
// Input size, Size
Layer(int, int);
Layer();
};
Layer::Layer(){
}
Layer::Layer(int input_size, int size){
this->input = Matrix(input_size, 1);
// Every neuron has a weight for every input
this->weights = Matrix(size, input_size);
this->weights.Randomize(-1.0F, 1.0F);
this->raw_output = Matrix(size, 1);
this->activated_output = this->raw_output;
// One bias per neuron
this->biases = Matrix(size, 1);
this->biases.Randomize(-1.0F, 1.0F);
}
void Layer::Feed(Matrix a){
this->input = a;
}
float Layer::Sigmoid(float x){
return 1 / (1 + exp(-x));
}
@ -62,9 +34,9 @@ float Layer::SigmoidPrime(float x){
}
void Layer::Forward(float (*activation)(float)){
// Multiply weight matrix by input matrix
// Multiply inputs by weights
// W x I + B = Z
this->raw_output = this->weights.Multiply(&this->input).Add(&this->biases);
this->raw_output = this->input.Multiply(&this->weights).Add(&this->biases);
// Now through activation function
// A = F(Z)
@ -74,5 +46,3 @@ void Layer::Forward(float (*activation)(float)){
void Layer::Forward(){
this->Forward(&Layer::Sigmoid);
}
#endif

View File

@ -1,6 +1,3 @@
#ifndef MATRICES_H_
#define MATRICES_H_
#include <string>
#include <vector>
#include <cassert>
@ -37,7 +34,6 @@ class Matrix{
inline Matrix Transpose();
// Operators
inline Matrix();
inline Matrix operator=(const Matrix*);
inline Matrix operator+(const Matrix*);
inline Matrix operator-(const Matrix*);
@ -52,10 +48,6 @@ class Matrix{
inline Matrix(const Matrix*);
};
Matrix::Matrix(){
}
Matrix Matrix::operator=(const Matrix* other){
return this->Swap(other);
}
@ -262,5 +254,3 @@ void Matrix::Randomize(float min, float max){
}
}
}
#endif

View File

@ -1,53 +0,0 @@
#include "layer.h"
#include "matrices.h"
#include <vector>
class Network {
public:
Matrix input;
float (*activation)(float) = Layer::Sigmoid; // Activation function is sigmoid by default
std::vector<Layer> hidden_layers;
Layer output_layer;
inline void Feed(Matrix);
inline Matrix GetOutput();
inline void Forward();
// Constructors
// Input size, Array of hidden sizes, Output size
Network(int, std::vector<int>, int);
};
Network::Network(int input_size, std::vector<int> hidden_sizes, int output_size){
this->input = Matrix(input_size, 1);
this->hidden_layers.push_back(Layer(input_size, hidden_sizes[0]));
for(int i = 1; i < hidden_sizes.size(); i++){
// For every hidden layer, create a layer of specified size
this->hidden_layers.push_back(Layer(hidden_sizes[i-1], hidden_sizes[i]));
}
this->output_layer = Layer(hidden_sizes[hidden_sizes.size() - 1], output_size);
}
Matrix Network::GetOutput(){
return this->output_layer.activated_output;
}
void Network::Feed(Matrix a){
this->input = a;
}
void Network::Forward(){
// Feeding first layer
this->hidden_layers[0].Feed(this->input);
this->hidden_layers[0].Forward();
for(int i = 1; i < this->hidden_layers.size(); i++){
// Feeding A(L-1) and forwarding
this->hidden_layers[i].Feed(this->hidden_layers[i - 1].activated_output);
this->hidden_layers[i].Forward();
}
this->output_layer.Feed(this->hidden_layers[this->hidden_layers.size() - 1].activated_output);
this->output_layer.Forward();
}