implement prepareNeuralNetworkFile()

This commit is contained in:
D2A62006 2025-11-27 11:54:47 +01:00
parent ce371e4228
commit 880514b55f
3 changed files with 36 additions and 3 deletions

View File

@ -164,7 +164,7 @@ NeuralNetwork loadModel(const char *path)
assignActivations(model);
}
printf("%d\n", model.numberOfLayers);
return model;
}

1
neuralNetwork.sh Normal file
View File

@ -0,0 +1 @@
make clean && make && make neuralNetworkTests

View File

@ -8,9 +8,41 @@
static void prepareNeuralNetworkFile(const char *path, const NeuralNetwork nn)
{
// TODO
}
FILE *file = fopen(path, "wb");
if(file != NULL){
const char *fileTag = "__info2_neural_network_file_format__";
// Write file header
fwrite(fileTag, sizeof(char), strlen(fileTag), file);
// Write the input dimension of the first layer
if(nn.numberOfLayers > 0){
fwrite(&nn.layers[0].weights.cols, sizeof(int), 1, file);
}
// Write dimensions and data for each layer
for(int i = 0; i < nn.numberOfLayers; i++){
// Write output dimension (rows of weights)
fwrite(&nn.layers[i].weights.rows, sizeof(int), 1, file);
// Write weight matrix data
int weightSize = nn.layers[i].weights.rows * nn.layers[i].weights.cols;
fwrite(nn.layers[i].weights.buffer, sizeof(MatrixType), weightSize, file);
// Write bias matrix data
int biasSize = nn.layers[i].biases.rows * nn.layers[i].biases.cols;
fwrite(nn.layers[i].biases.buffer, sizeof(MatrixType), biasSize, file);
}
// Write terminating 0 to signal end of layers
int zero = 0;
fwrite(&zero, sizeof(int), 1, file);
fclose(file);
}
}
void test_loadModelReturnsCorrectNumberOfLayers(void)
{
const char *path = "some__nn_test_file.info2";