Files
backprop/nn.h
James Griffin 6f23634e32 initial github commit
Implementation of backprop in C using Grand Central Dispatch and Blocks
2014-08-06 15:12:09 -03:00

172 lines
5.4 KiB
C

#ifndef NNMODEL
#define NNMODEL
#define DEFAULT_LAYERS 2
#define DEFAULT_HIDDEN_NODES_PER_LAYER 20
#define DEFAULT_LEARNING_RATE 0.1
#define DEFAULT_MOMENTUM 0.9
#define DEFAULT_TEST_ERROR_MIN 0.3
#define DEFAULT_MAX_EPOCHS_SINCE_MIN 500
#define EPOCH_MAX_ERROR_INCREASE 20
/*
A struct containing a 2 dimensional array storing connection weights
*/
typedef struct matrix {
double ** weight_matrix;
int rows;
int columns;
} matrix;
/*
A struct representing the network and parameters
*/
typedef struct nn_model {
double learning_rate;
double momentum;
int layers;
int * nodes_per_layer;
int outputs;
matrix ** layer_weights;
matrix ** layer_input_vectors;
matrix ** layer_output_vectors;
matrix * output;
matrix ** previous_weight_updates;
} nn_model;
/*
A function that will free all the memory allocated for a matrix struct
@param to_free The data struct that should be free'd
@return 0 if it is free'd successfully
*/
int free_matrix(matrix * to_free);
/*
A function that will free all the memory allocated for a model struct
@param to_free The data struct that should be free'd
@return 0 if it is free'd successfully
*/
int free_model(nn_model * to_free);
/*
A function that allocates memory for a matrix
@param r The number of rows
@param c The number of columns
*/
matrix * create_matrix(int r, int c);
/*
A function that creates a new neural network model.
@param rate The network learning rate
@param layers The number of layers of nodes (inclusive of input layer)
@param layernodes An array of size layers, containing the number of nodes per layer
@param outputs the number of nodes in the output layer
@return A new network with random weights
*/
struct nn_model * create_model(const double rate, const int layers, const int layer_nodes[], const int outputs);
/*
A method that creates a copy of a model
@param model The model to be copied
@return A copy of the model that was passed.
*/
struct nn_model * copy_model(const struct nn_model * model);
/*
A function that writes a well performing and trained model to txt file
@param m The model to be saved
@param file The file name for the saved model
@return 0 if it wrote correctly
*/
int save_model(const nn_model * m, const char * file);
/*
A method that reads a saved model in from a file
@param file The file name for the saved model
@return A trained nn_model with the weights from the saved file.
*/
struct nn_model * load_model(char * file);
/*
A function that takes two matricies and multiplies them together, returning a new matrix
@param a Matrix 1 to be considered
@param a Matrix 2 to be considered
@result the product of matrix multiplication
*/
matrix * multiply_matricies(const matrix * a, const matrix * b);
/*
Add the contents of matrix b to matrix a.
@param a A matrix to be modified
@param b A matrix of values to add to the first matrix
@return 0 if successfully modified the first matrix
*/
int add_matricies(matrix * a, const matrix * b);
/*
A function that takes an input vector representing a layer of nodes and uses the Sigmoid
activation functoin to create values that will be used for the next layer
@param input_vector the input vector to be activated for the next later
@return a matrix containing an 1xn vector of sigmoid values of the input vector
*/
matrix * activation_function(const matrix * input_vector);
/*
Classify a provided message using the currently learned model.
@param current The model to be used to classify
@param input The input vector to be classified by the model
@return 0 if vector has been classified
*/
int classify_instance(nn_model * current, message * input, const int size);
/*
Free the memory used by layer value vectors.
This function is for use after classifying.
@param m The model whose input and output vectors should be freed
@return 0 if the models vectors are reset for another run
*/
int reset_model_vectors(nn_model * m);
/*
Propagate error back through the network
@param model the model whose weights need to be updated
@param outputs the desired outputs given the current values
@return 0 if model weights are updated correctly
*/
int backprop_update(nn_model * update, matrix * output);
/*
Train the network using backpropagation using the training_data and validating
against the test_data
@param m The model to traing
@param training_data The dataset of training instances
@param test_data The dataset of the test instances
@param error_rate The minimum test data error rate accepted
@param epoch_max The maximum number of times to continue training without finding a minimum
@return 0 when an acceptible threshold of training has been reached.
*/
nn_model * train_model(nn_model * m, data * training_data, data * test_data, double error_rate, int epoch_max);
/*
A function that classifies all the instances in the dataset
@param m The model that is to be used for classification
@param set The dataset whose instances are to be classified
@return 0 If the set is successfully classified
*/
int classify_dataset(nn_model * m, data * set);
/*
A function that prints a confusion matrix for a dataset of how many of each instance
are classifed as what, as well as some stats
@param set The dataset whose confusion matrix is to be printed
*/
void print_confusion_matrix(data * set);
/*
A function that prints a matrix with optional column labels
@param m The matrix that is to be printed
@param labels The string labels to be output in the case that there are some
*/
void print_matrix(matrix * m, char ** labels);
#endif