angepasst

This commit is contained in:
maxgrf 2025-11-24 12:09:28 +01:00
parent 54ca7acca2
commit 632bdeb9b9

View File

@ -5,10 +5,74 @@
#include "unity.h" #include "unity.h"
#include "neuralNetwork.h" #include "neuralNetwork.h"
static void prepareNeuralNetworkFile(const char *path, const NeuralNetwork nn) static void prepareNeuralNetworkFile(const char *path, const NeuralNetwork nn)
{ {
// TODO /*
typedef struct
{
Matrix weights;
Matrix biases;
ActivationFunctionType activation;
} Layer;
typedef struct
{
Layer *layers;
unsigned int numberOfLayers;
} NeuralNetwork;
*/
FILE *file = fopen(path, "wb");
if (!file)
return;
//---------------------------------------------------------------------------
const char *tag = "__info2_neural_network_file_format__";
fwrite(tag, 1, strlen(tag), file);
// Schreibe die Anzahl der Layer
if (nn.numberOfLayers == 0)
{
fclose(file);
return;
}
// Schreibe die Eingabe- und Ausgabegrößen des Netzwerks
int input = nn.layers[0].weights.cols;
int output = nn.layers[0].weights.rows;
fwrite(&input, sizeof(int), 1, file);
fwrite(&output, sizeof(int), 1, file);
// Schreibe die Layer-Daten
for (int i = 0; i < nn.numberOfLayers; i++)
{
const Layer *layer = &nn.layers[i];
int out = layer->weights.rows;
int in = layer->weights.cols;
fwrite(layer->weights.buffer, sizeof(MatrixType), out * in, file);
fwrite(layer->biases.buffer, sizeof(MatrixType), out * 1, file);
if (i + 1 < nn.numberOfLayers)
{
int nextOut = nn.layers[i + 1].weights.rows;
fwrite(&nextOut, sizeof(int), 1, file);
}
}
fclose(file);
// Debuging-Ausgabe
printf("prepareNeuralNetworkFile: Datei '%s' erstellt mit %u Layer(n)\n", path, nn.numberOfLayers);
for (unsigned int i = 0; i < nn.numberOfLayers; i++)
{
Layer layer = nn.layers[i];
printf("Layer %u: weights (%u x %u), biases (%u x %u)\n",
i, layer.weights.rows, layer.weights.cols, layer.biases.rows, layer.biases.cols);
}
//---------------------------------------------------------------------------
} }
void test_loadModelReturnsCorrectNumberOfLayers(void) void test_loadModelReturnsCorrectNumberOfLayers(void)
@ -205,8 +269,8 @@ void test_predictReturnsCorrectLabels(void)
Matrix biases1 = {.buffer = biasBuffer1, .rows = 2, .cols = 1}; Matrix biases1 = {.buffer = biasBuffer1, .rows = 2, .cols = 1};
Matrix biases2 = {.buffer = biasBuffer2, .rows = 3, .cols = 1}; Matrix biases2 = {.buffer = biasBuffer2, .rows = 3, .cols = 1};
Matrix biases3 = {.buffer = biasBuffer3, .rows = 5, .cols = 1}; Matrix biases3 = {.buffer = biasBuffer3, .rows = 5, .cols = 1};
Layer layers[] = {{.weights=weights1, .biases=biases1, .activation=someActivation}, \ Layer layers[] = {{.weights = weights1, .biases = biases1, .activation = someActivation},
{.weights=weights2, .biases=biases2, .activation=someActivation}, \ {.weights = weights2, .biases = biases2, .activation = someActivation},
{.weights = weights3, .biases = biases3, .activation = someActivation}}; {.weights = weights3, .biases = biases3, .activation = someActivation}};
NeuralNetwork netUnderTest = {.layers = layers, .numberOfLayers = 3}; NeuralNetwork netUnderTest = {.layers = layers, .numberOfLayers = 3};
unsigned char *predictedLabels = predict(netUnderTest, inputImages, 2); unsigned char *predictedLabels = predict(netUnderTest, inputImages, 2);
@ -216,11 +280,13 @@ void test_predictReturnsCorrectLabels(void)
free(predictedLabels); free(predictedLabels);
} }
void setUp(void) { void setUp(void)
{
// Falls notwendig, kann hier Vorbereitungsarbeit gemacht werden // Falls notwendig, kann hier Vorbereitungsarbeit gemacht werden
} }
void tearDown(void) { void tearDown(void)
{
// Hier kann Bereinigungsarbeit nach jedem Test durchgeführt werden // Hier kann Bereinigungsarbeit nach jedem Test durchgeführt werden
} }