Layer library with forward propagation
This commit is contained in:
		
							
								
								
									
										48
									
								
								layer.h
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										48
									
								
								layer.h
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,48 @@
 | 
				
			|||||||
 | 
					#include "matrices.h"
 | 
				
			||||||
 | 
					#include <cassert>
 | 
				
			||||||
 | 
					#include <math.h>
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					#define assertm(exp, msg) assert((void(msg), exp))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					class Layer {
 | 
				
			||||||
 | 
					    private:
 | 
				
			||||||
 | 
					    Matrix input;
 | 
				
			||||||
 | 
					    Matrix weights;
 | 
				
			||||||
 | 
					    Matrix raw_output;
 | 
				
			||||||
 | 
					    Matrix activated_output;
 | 
				
			||||||
 | 
					    Matrix biases;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    float learning_rate = 0.1;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    static inline float Sigmoid(float);
 | 
				
			||||||
 | 
					    static inline float SigmoidPrime(float);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    public:
 | 
				
			||||||
 | 
					    inline Layer(int); // Number of neurons
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    inline void Forward(); // Forward Pass with sigmoid
 | 
				
			||||||
 | 
					    inline void Forward(float (*activation)(float)); // Forward Pass with custom activation function
 | 
				
			||||||
 | 
					};
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					float Layer::Sigmoid(float x){
 | 
				
			||||||
 | 
					    return 1 / (1 + exp(-x));
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					float Layer::SigmoidPrime(float x){
 | 
				
			||||||
 | 
					    float buffer = Layer::Sigmoid(x);
 | 
				
			||||||
 | 
					    return buffer * (1 - buffer);
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					void Layer::Forward(float (*activation)(float)){
 | 
				
			||||||
 | 
					    // Multiply inputs by weights
 | 
				
			||||||
 | 
					    // W x I + B = Z
 | 
				
			||||||
 | 
					    this->raw_output = this->input.Multiply(&this->weights).Add(&this->biases);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    // Now through activation function
 | 
				
			||||||
 | 
					    // A = F(Z)
 | 
				
			||||||
 | 
					    this->activated_output = this->raw_output.Function(activation);
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					void Layer::Forward(){
 | 
				
			||||||
 | 
					    this->Forward(&Layer::Sigmoid);
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
		Reference in New Issue
	
	Block a user