completed prepareNeuralNetworkFile
This commit is contained in:
parent
c7c68a0ce0
commit
284a313751
@ -5,12 +5,89 @@
|
||||
#include "unity.h"
|
||||
#include "neuralNetwork.h"
|
||||
|
||||
/*
|
||||
################
|
||||
Aufbau Test File
|
||||
################
|
||||
|
||||
|
||||
HEADER
|
||||
|
||||
inputDim
|
||||
outputDim
|
||||
|
||||
-- Layer 1 --
|
||||
weights (outputDim * inputDim * MatrixType)
|
||||
biases (outputDim * MatrixType)
|
||||
|
||||
outputDim
|
||||
|
||||
-- Layer 2 --
|
||||
weights
|
||||
biases
|
||||
|
||||
...
|
||||
...
|
||||
-- Layer n --
|
||||
weights
|
||||
biases
|
||||
|
||||
outputDim = 0 => Ende
|
||||
*/
|
||||
|
||||
|
||||
static void prepareNeuralNetworkFile(const char *path, const NeuralNetwork nn)
|
||||
{
|
||||
// TODO
|
||||
FILE *file = fopen(path, "wb");
|
||||
if (file)
|
||||
{
|
||||
const char *fileTag = "__info2_neural_network_file_format__";
|
||||
fwrite(fileTag, 1, strlen(fileTag), file);
|
||||
|
||||
//Stopt loadModel, falls keine Layer vorhanden
|
||||
if (nn.numberOfLayers == 0)
|
||||
{
|
||||
int zero = 0;
|
||||
fwrite(&zero, sizeof(int), 1, file);
|
||||
fclose(file);
|
||||
return;
|
||||
}
|
||||
|
||||
// input und output dimension schreiben
|
||||
int inputDim = nn.layers[0].weights.cols;
|
||||
int outputDim = nn.layers[0].weights.rows;
|
||||
fwrite(&inputDim, sizeof(int), 1, file);
|
||||
fwrite(&outputDim, sizeof(int), 1, file);
|
||||
|
||||
// erstes Layer schreiben
|
||||
int weightCount = nn.layers[0].weights.rows * nn.layers[0].weights.cols;
|
||||
fwrite(nn.layers[0].weights.buffer, sizeof(MatrixType), weightCount, file);
|
||||
|
||||
int biasesCount = nn.layers[0].biases.rows * nn.layers[0].biases.cols;
|
||||
fwrite(nn.layers[0].biases.buffer, sizeof(MatrixType), biasesCount, file);
|
||||
|
||||
// für weiter Layer nur outputDimension schreiben
|
||||
for (unsigned int i = 1; i < nn.numberOfLayers; i++)
|
||||
{
|
||||
outputDim = nn.layers[i].weights.rows;
|
||||
fwrite(&outputDim, sizeof(int), 1, file);
|
||||
|
||||
weightCount = nn.layers[i].weights.rows * nn.layers[i].weights.cols;
|
||||
fwrite(nn.layers[i].weights.buffer, sizeof(MatrixType), weightCount, file);
|
||||
|
||||
biasesCount = nn.layers[i].biases.rows * nn.layers[i].biases.cols;
|
||||
fwrite(nn.layers[i].biases.buffer, sizeof(MatrixType), biasesCount, file);
|
||||
}
|
||||
|
||||
// loadModel ließt 0 ein -> Stop
|
||||
int fileEnd = 0;
|
||||
fwrite(&fileEnd, sizeof(int), 1, file);
|
||||
}
|
||||
|
||||
fclose(file);
|
||||
}
|
||||
|
||||
|
||||
void test_loadModelReturnsCorrectNumberOfLayers(void)
|
||||
{
|
||||
const char *path = "some__nn_test_file.info2";
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user