added some alternatives, however non of them work

This commit is contained in:
Tobias Grampp 2025-11-26 12:11:48 +01:00
parent 06782431a4
commit 4272d04f0d
2 changed files with 9 additions and 3 deletions

View File

@ -134,11 +134,14 @@ Matrix multiply(const Matrix matrix1, const Matrix matrix2)
} }
void writeMatrix(Matrix matrix, FILE *file)//Added for neuralNetworkTests void writeMatrix(Matrix matrix, FILE *file)//Added for neuralNetworkTests
{ {
//fprintf(file, "%d%d", matrix.rows, matrix.cols);
for(int i = 0; i < matrix.rows; i++) for(int i = 0; i < matrix.rows; i++)
{ {
for(int j = 0; j < matrix.cols; j++) for(int j = 0; j < matrix.cols; j++)
{ {
fprintf(file, (char*)(matrix.buffer + (j+i)*sizeof(MatrixType))); putw(*(matrix.buffer + (j+i)*sizeof(MatrixType)), file);
//fprintf(file, "%f", *(matrix.buffer + (j+i)*sizeof(MatrixType)));
printf("%f", *(matrix.buffer + (j+i)*sizeof(MatrixType)));
} }
} }
} }

View File

@ -11,9 +11,12 @@ static void prepareNeuralNetworkFile(const char *path, const NeuralNetwork nn)
{ {
FILE *testDatei = fopen(path, "w"); FILE *testDatei = fopen(path, "w");
fprintf(testDatei, "__info2_neural_network_file_format__"); fprintf(testDatei, "__info2_neural_network_file_format__");
fprintf(testDatei, "%d%d", nn.layers->weights.rows, nn.layers->weights.cols);
// fprintf(testDatei, (char*) nn.numberOfLayers);
for(int i = 0; i < nn.numberOfLayers; i++) for(int i = 0; i < nn.numberOfLayers; i++)
{ {
fprintf(testDatei, "\n"); //fprintf(testDatei, "\n");
//putw((nn.layers + sizeof(Layer) * i), testDatei);
writeMatrix(nn.layers[i].weights, testDatei); writeMatrix(nn.layers[i].weights, testDatei);
writeMatrix(nn.layers[i].biases, testDatei); writeMatrix(nn.layers[i].biases, testDatei);
} }
@ -40,7 +43,7 @@ void test_loadModelReturnsCorrectNumberOfLayers(void)
netUnderTest = loadModel(path); netUnderTest = loadModel(path);
remove(path); remove(path);
printf("\n%d\n%d\n", netUnderTest.numberOfLayers, expectedNet.numberOfLayers);
TEST_ASSERT_EQUAL_INT(expectedNet.numberOfLayers, netUnderTest.numberOfLayers); TEST_ASSERT_EQUAL_INT(expectedNet.numberOfLayers, netUnderTest.numberOfLayers);
clearModel(&netUnderTest); clearModel(&netUnderTest);
} }