create layer struct

This commit is contained in:
vikshar 2025-01-11 14:36:46 -06:00
parent 07a40fdb01
commit 8c48894c42
2 changed files with 53 additions and 85 deletions

85
cnn.c
View File

@ -1,85 +0,0 @@
#include <stdio.h>
#include <stdlib.h>
#include <math.h>
typedef enum {
input,
conv,
max_pool,
fully_connected,
output
} ltype;
typedef enum {
relu,
softmax,
sigmoid,
tanh
} activation;
typedef struct {
int filters;
int filter_h;
int filter_w;
int stride;
int zeropadding; // amount of zeropadding (1 = one layer... etc.)
} convparams;
typedef struct {
int pool_height; // height and width of the pooling window
int pool_width;
} poolparams;
typedef struct {
ltype type;
activation atype;
int input_height;
int input_width;
int input_channels;
int output_height;
int output_width;
int output_channels;
union {
convparams layerconv;
poolparams layerpool;
} params;
float* weights;
float* biases;
} Layer;
Layer* createlayer(ltype type, int height, int width, int channels, void* params) {
Layer* layer = (Layer*)malloc(sizeof(Layer));
layer->type = type;
layer->input_height = height;
layer->input_width = width;
layer->input_channels = channels;
layer->weights = NULL;
layer->biases = NULL;
switch(type) {
case input: {
layer->output_height = input_height;
layer->output_width = input_width;
layer->output_channels = input_channels;
layer->activation = relu;
break;
}
case conv: {
convparams* cparams = (convparams*)params;
layer->params.layerconv = *cparams;
layer->activation = relu;
// https://cs231n.github.io/convolutional-networks/#pool - formula to find dimensions
layer->output_height = ((input_height + 2*conv_params->zero_padding - conv_params->filter_height) / conv_params->stride_height) + 1;
layer->output_width = ((input_width + 2*conv_params->zero_padding - conv_params->filter_width) / conv_params->stride_width) + 1;
layer->output_channels = convparams->filters;
}
}

53
cnn.h Normal file
View File

@ -0,0 +1,53 @@
// convolutional neural network c header library
// inspired by euske's nn1
// meant to be synthesized into RTL through vitus HLS for an FPGA implementation
#include <stdlib.h>
#include <math.h>
typedef enum {
input,
conv,
max_pool,
fully_connected
} ltype;
typedef enum {
fc_input,
fc_hidden,
fc_output,
} fcpos;
typedef struct Layer {
ltype type;
// spatial extent of layer- l,w,depth (color space)
int height;
int width;
int channels;
// layer params
union {
struct {
int num_filters;
int filter_height;
int filter_width;
int stride;
int zero_padding; // how many layers of zero padding
float*** filters; // (width x height) x filters
} conv_params;
struct {
int pool_height;
int pool_width;
int stride;
} pool_params;
struct {
int input_neurons;
int output_neurons;
float** weights;
float* biases;
fcpos position;
} fc_params;
} params;
} Layer;