Verbesserungen für die Unittests

This commit is contained in:
AD005\z004z3ez 2025-11-24 15:08:02 +01:00
parent e86179f3f1
commit 15b4d5d016

View File

@ -9,39 +9,45 @@
static void writeWeights(Layer layer, FILE *file) static void writeWeights(Layer layer, FILE *file)
{ {
fwrite(&layer.weights.rows, sizeof(unsigned int), 1, file); unsigned int n = (unsigned int)layer.weights.rows * layer.weights.cols; //col und row müssen nicht extra eingelesen werden, da loadModel die Dimensionen aus der Fkt loadModel() selbst liest
fwrite(&layer.weights.cols, sizeof(unsigned int), 1, file); fwrite(layer.weights.buffer, sizeof(MatrixType), n, file);
fwrite(layer.weights.buffer, sizeof(float ), layer.weights.rows * layer.weights.cols, file);
} }
static void writeBiases(Layer layer, FILE *file) static void writeBiases(Layer layer, FILE *file)
{ {
fwrite(&layer.biases.rows, sizeof(unsigned int), 1, file); unsigned int n = (unsigned int)layer.weights.rows * layer.weights.cols;
fwrite(&layer.biases.cols, sizeof(unsigned int), 1, file); fwrite(layer.biases.buffer, sizeof(float ), n, file);
fwrite(layer.biases.buffer, sizeof(float ), layer.biases.rows * layer.biases.cols, file);
} }
static void prepareNeuralNetworkFile(const char *path, const NeuralNetwork nn) static void prepareNeuralNetworkFile(const char *path, const NeuralNetwork nn)
{ {
//file erstellen und zum binärschreiben öffnen //file erstellen und zum binärschreiben öffnen
FILE *file = fopen(path, "wb"); FILE *file = fopen(path, "wb");
if(!file) if(file == NULL)
return; return;
//header reinschreiben //header reinschreiben
const char *header = IDENT_TAG; const char *header = IDENT_TAG;
fwrite(header, sizeof(char), strlen(header), file); fwrite(header, sizeof(char), strlen(header), file);
//einfachheitshalber ein layer erstellen //Schließen der Datei, falls kein Layer vorhanden
if (nn.numberOfLayers == 0 || nn.layers == NULL)
fwrite(&nn.numberOfLayers, sizeof(unsigned int), 1, file); {
fclose(file);
return;
}
//Erste Eingangsdimension: Spalten der ersten Gewichtsmatrix
unsigned int inputDim = (unsigned int)nn.layers[0].weights.cols;
fwrite(&inputDim, sizeof(unsigned int), 1, file);
//für jede Schicht: Dimension, Gewichte und Biases einlesen
for (unsigned int i = 0; i < nn.numberOfLayers; i++) for (unsigned int i = 0; i < nn.numberOfLayers; i++)
{ {
Layer layer = nn.layers[i]; Layer layer = nn.layers[i];
//activationType initialisieren (formt ergebnis der matritzenmultiplikation um, damit es in einem neuronalen Netzwerk sinnvoll weiterverwendet werden kann.) int outputDim = (unsigned int)layer.weights.rows;
unsigned int activationType = 1; //Aktivirungstyp id (zb 1 für ReLU) fwrite(&outputDim, sizeof(unsigned int), 1, file);
fwrite(&activationType, sizeof(unsigned int), 1, file);
//dimensionen festlegen(weights) //dimensionen festlegen(weights)
writeWeights(layer, file); writeWeights(layer, file);