This commit is contained in:
Kristin 2025-11-25 10:48:24 +01:00
parent efa260ccbe
commit 9606b5a03e

View File

@ -1,35 +1,29 @@
#include <stdlib.h>
#include <stdio.h>
#include <math.h>
#include <string.h>
#include "neuralNetwork.h" #include "neuralNetwork.h"
#include <math.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#define BUFFER_SIZE 100 #define BUFFER_SIZE 100
#define FILE_HEADER_STRING "__info2_neural_network_file_format__" #define FILE_HEADER_STRING "__info2_neural_network_file_format__"
static void softmax(Matrix *matrix) static void softmax(Matrix *matrix) {
{ if (matrix->cols > 0) {
if(matrix->cols > 0)
{
double *colSums = (double *)calloc(matrix->cols, sizeof(double)); double *colSums = (double *)calloc(matrix->cols, sizeof(double));
if(colSums != NULL) if (colSums != NULL) {
{ for (int colIdx = 0; colIdx < matrix->cols; colIdx++) {
for(int colIdx = 0; colIdx < matrix->cols; colIdx++) for (int rowIdx = 0; rowIdx < matrix->rows; rowIdx++) {
{
for(int rowIdx = 0; rowIdx < matrix->rows; rowIdx++)
{
MatrixType expValue = exp(getMatrixAt(*matrix, rowIdx, colIdx)); MatrixType expValue = exp(getMatrixAt(*matrix, rowIdx, colIdx));
setMatrixAt(expValue, *matrix, rowIdx, colIdx); setMatrixAt(expValue, *matrix, rowIdx, colIdx);
colSums[colIdx] += expValue; colSums[colIdx] += expValue;
} }
} }
for(int colIdx = 0; colIdx < matrix->cols; colIdx++) for (int colIdx = 0; colIdx < matrix->cols; colIdx++) {
{ for (int rowIdx = 0; rowIdx < matrix->rows; rowIdx++) {
for(int rowIdx = 0; rowIdx < matrix->rows; rowIdx++) MatrixType normalizedValue =
{ getMatrixAt(*matrix, rowIdx, colIdx) / colSums[colIdx];
MatrixType normalizedValue = getMatrixAt(*matrix, rowIdx, colIdx) / colSums[colIdx];
setMatrixAt(normalizedValue, *matrix, rowIdx, colIdx); setMatrixAt(normalizedValue, *matrix, rowIdx, colIdx);
} }
} }
@ -38,16 +32,13 @@ static void softmax(Matrix *matrix)
} }
} }
static void relu(Matrix *matrix) static void relu(Matrix *matrix) {
{ for (int i = 0; i < matrix->rows * matrix->cols; i++) {
for(int i = 0; i < matrix->rows * matrix->cols; i++)
{
matrix->buffer[i] = matrix->buffer[i] >= 0 ? matrix->buffer[i] : 0; matrix->buffer[i] = matrix->buffer[i] >= 0 ? matrix->buffer[i] : 0;
} }
} }
static int checkFileHeader(FILE *file) static int checkFileHeader(FILE *file) {
{
int isValid = 0; int isValid = 0;
int fileHeaderLen = strlen(FILE_HEADER_STRING); int fileHeaderLen = strlen(FILE_HEADER_STRING);
char buffer[BUFFER_SIZE] = {0}; char buffer[BUFFER_SIZE] = {0};
@ -61,8 +52,7 @@ static int checkFileHeader(FILE *file)
return isValid; return isValid;
} }
static unsigned int readDimension(FILE *file) static unsigned int readDimension(FILE *file) {
{
int dimension = 0; int dimension = 0;
if (fread(&dimension, sizeof(int), 1, file) != 1) if (fread(&dimension, sizeof(int), 1, file) != 1)
@ -71,21 +61,20 @@ static unsigned int readDimension(FILE *file)
return dimension; return dimension;
} }
static Matrix readMatrix(FILE *file, unsigned int rows, unsigned int cols) static Matrix readMatrix(FILE *file, unsigned int rows, unsigned int cols) {
{
Matrix matrix = createMatrix(rows, cols); Matrix matrix = createMatrix(rows, cols);
if(matrix.buffer != NULL) if (matrix.buffer != NULL) {
{ if (fread(matrix.buffer, sizeof(MatrixType), rows * cols, file) !=
if(fread(matrix.buffer, sizeof(MatrixType), rows*cols, file) != rows*cols) rows * cols)
clearMatrix(&matrix); clearMatrix(&matrix);
} }
return matrix; return matrix;
} }
static Layer readLayer(FILE *file, unsigned int inputDimension, unsigned int outputDimension) static Layer readLayer(FILE *file, unsigned int inputDimension,
{ unsigned int outputDimension) {
Layer layer; Layer layer;
layer.weights = readMatrix(file, outputDimension, inputDimension); layer.weights = readMatrix(file, outputDimension, inputDimension);
layer.biases = readMatrix(file, outputDimension, 1); layer.biases = readMatrix(file, outputDimension, 1);
@ -93,25 +82,22 @@ static Layer readLayer(FILE *file, unsigned int inputDimension, unsigned int out
return layer; return layer;
} }
static int isEmptyLayer(const Layer layer) static int isEmptyLayer(const Layer layer) {
{ return layer.biases.cols == 0 || layer.biases.rows == 0 ||
return layer.biases.cols == 0 || layer.biases.rows == 0 || layer.biases.buffer == NULL || layer.weights.rows == 0 || layer.weights.cols == 0 || layer.weights.buffer == NULL; layer.biases.buffer == NULL || layer.weights.rows == 0 ||
layer.weights.cols == 0 || layer.weights.buffer == NULL;
} }
static void clearLayer(Layer *layer) static void clearLayer(Layer *layer) {
{ if (layer != NULL) {
if(layer != NULL)
{
clearMatrix(&layer->weights); clearMatrix(&layer->weights);
clearMatrix(&layer->biases); clearMatrix(&layer->biases);
layer->activation = NULL; layer->activation = NULL;
} }
} }
static void assignActivations(NeuralNetwork model) static void assignActivations(NeuralNetwork model) {
{ for (int i = 0; i < (int)model.numberOfLayers - 1; i++) {
for(int i = 0; i < (int)model.numberOfLayers-1; i++)
{
model.layers[i].activation = relu; model.layers[i].activation = relu;
} }
@ -119,36 +105,31 @@ static void assignActivations(NeuralNetwork model)
model.layers[model.numberOfLayers - 1].activation = softmax; model.layers[model.numberOfLayers - 1].activation = softmax;
} }
NeuralNetwork loadModel(const char *path) NeuralNetwork loadModel(const char *path) {
{
NeuralNetwork model = {NULL, 0}; NeuralNetwork model = {NULL, 0};
FILE *file = fopen(path, "rb"); FILE *file = fopen(path, "rb");
if(file != NULL) if (file != NULL) {
{ if (checkFileHeader(file)) {
if(checkFileHeader(file))
{
unsigned int inputDimension = readDimension(file); unsigned int inputDimension = readDimension(file);
unsigned int outputDimension = readDimension(file); unsigned int outputDimension = readDimension(file);
while(inputDimension > 0 && outputDimension > 0) while (inputDimension > 0 && outputDimension > 0) {
{
Layer layer = readLayer(file, inputDimension, outputDimension); Layer layer = readLayer(file, inputDimension, outputDimension);
Layer *layerBuffer = NULL; Layer *layerBuffer = NULL;
if(isEmptyLayer(layer)) if (isEmptyLayer(layer)) {
{
clearLayer(&layer); clearLayer(&layer);
clearModel(&model); clearModel(&model);
break; break;
} }
layerBuffer = (Layer *)realloc(model.layers, (model.numberOfLayers + 1) * sizeof(Layer)); layerBuffer = (Layer *)realloc(
model.layers, (model.numberOfLayers + 1) * sizeof(Layer));
if (layerBuffer != NULL) if (layerBuffer != NULL)
model.layers = layerBuffer; model.layers = layerBuffer;
else else {
{
clearModel(&model); clearModel(&model);
break; break;
} }
@ -168,20 +149,16 @@ NeuralNetwork loadModel(const char *path)
return model; return model;
} }
static Matrix imageBatchToMatrixOfImageVectors(const GrayScaleImage images[], unsigned int count) static Matrix imageBatchToMatrixOfImageVectors(const GrayScaleImage images[],
{ unsigned int count) {
Matrix matrix = {NULL, 0, 0}; Matrix matrix = {0, 0, NULL};
if(count > 0 && images != NULL) if (count > 0 && images != NULL) {
{
matrix = createMatrix(images[0].height * images[0].width, count); matrix = createMatrix(images[0].height * images[0].width, count);
if(matrix.buffer != NULL) if (matrix.buffer != NULL) {
{ for (int i = 0; i < count; i++) {
for(int i = 0; i < count; i++) for (int j = 0; j < images[i].width * images[i].height; j++) {
{
for(int j = 0; j < images[i].width * images[i].height; j++)
{
setMatrixAt((MatrixType)images[i].buffer[j], matrix, j, i); setMatrixAt((MatrixType)images[i].buffer[j], matrix, j, i);
} }
} }
@ -191,14 +168,11 @@ static Matrix imageBatchToMatrixOfImageVectors(const GrayScaleImage images[], un
return matrix; return matrix;
} }
static Matrix forward(const NeuralNetwork model, Matrix inputBatch) static Matrix forward(const NeuralNetwork model, Matrix inputBatch) {
{
Matrix result = inputBatch; Matrix result = inputBatch;
if(result.buffer != NULL) if (result.buffer != NULL) {
{ for (int i = 0; i < model.numberOfLayers; i++) {
for(int i = 0; i < model.numberOfLayers; i++)
{
Matrix biasResult; Matrix biasResult;
Matrix weightResult; Matrix weightResult;
@ -216,23 +190,19 @@ static Matrix forward(const NeuralNetwork model, Matrix inputBatch)
return result; return result;
} }
unsigned char *argmax(const Matrix matrix) unsigned char *argmax(const Matrix matrix) {
{
unsigned char *maxIdx = NULL; unsigned char *maxIdx = NULL;
if(matrix.rows > 0 && matrix.cols > 0) if (matrix.rows > 0 && matrix.cols > 0) {
{
maxIdx = (unsigned char *)malloc(sizeof(unsigned char) * matrix.cols); maxIdx = (unsigned char *)malloc(sizeof(unsigned char) * matrix.cols);
if(maxIdx != NULL) if (maxIdx != NULL) {
{ for (int colIdx = 0; colIdx < matrix.cols; colIdx++) {
for(int colIdx = 0; colIdx < matrix.cols; colIdx++)
{
maxIdx[colIdx] = 0; maxIdx[colIdx] = 0;
for(int rowIdx = 1; rowIdx < matrix.rows; rowIdx++) for (int rowIdx = 1; rowIdx < matrix.rows; rowIdx++) {
{ if (getMatrixAt(matrix, rowIdx, colIdx) >
if(getMatrixAt(matrix, rowIdx, colIdx) > getMatrixAt(matrix, maxIdx[colIdx], colIdx)) getMatrixAt(matrix, maxIdx[colIdx], colIdx))
maxIdx[colIdx] = rowIdx; maxIdx[colIdx] = rowIdx;
} }
} }
@ -242,8 +212,8 @@ unsigned char *argmax(const Matrix matrix)
return maxIdx; return maxIdx;
} }
unsigned char *predict(const NeuralNetwork model, const GrayScaleImage images[], unsigned int numberOfImages) unsigned char *predict(const NeuralNetwork model, const GrayScaleImage images[],
{ unsigned int numberOfImages) {
Matrix inputBatch = imageBatchToMatrixOfImageVectors(images, numberOfImages); Matrix inputBatch = imageBatchToMatrixOfImageVectors(images, numberOfImages);
Matrix outputBatch = forward(model, inputBatch); Matrix outputBatch = forward(model, inputBatch);
@ -254,12 +224,9 @@ unsigned char *predict(const NeuralNetwork model, const GrayScaleImage images[],
return result; return result;
} }
void clearModel(NeuralNetwork *model) void clearModel(NeuralNetwork *model) {
{ if (model != NULL) {
if(model != NULL) for (int i = 0; i < model->numberOfLayers; i++) {
{
for(int i = 0; i < model->numberOfLayers; i++)
{
clearLayer(&model->layers[i]); clearLayer(&model->layers[i]);
} }
model->layers = NULL; model->layers = NULL;