Compare commits

..

No commits in common. "4e238675c8c3bee558df95febd510496c016e2e0" and "34a471bda6c52a8050c079683e8fec376b3f9c38" have entirely different histories.

3 changed files with 4 additions and 72 deletions

View File

@ -40,8 +40,8 @@ mnistVisualization.o: mnistVisualization.c
matrixTests: matrix.o matrixTests.c
$(CC) $(CFLAGS) -I$(unityfolder) -o runMatrixTests matrixTests.c matrix.o $(BINARIES)/libunity.a
neuralNetworkTests: neuralNetwork.o neuralNetworkTests.c matrix.o
$(CC) $(CFLAGS) -I$(unityfolder) -o runNeuralNetworkTests neuralNetworkTests.c neuralNetwork.o matrix.o $(BINARIES)/libunity.a
neuralNetworkTests: neuralNetwork.o neuralNetworkTests.c
$(CC) $(CFLAGS) -I$(unityfolder) -o runNeuralNetworkTests neuralNetworkTests.c neuralNetwork.o $(BINARIES)/libunity.a
imageInputTests: imageInput.o imageInputTests.c
$(CC) $(CFLAGS) -I$(unityfolder) -o runImageInputTests imageInputTests.c imageInput.o $(BINARIES)/libunity.a

View File

@ -42,7 +42,7 @@ void setMatrixAt(MatrixType value, Matrix matrix, unsigned int rowIdx, unsigned
{
if (matrix.buffer != NULL)
{
if (rowIdx < matrix.rows && colIdx < matrix.cols)
if (rowIdx < matrix.rows || colIdx < matrix.cols)
{
matrix.buffer[rowIdx * matrix.cols + colIdx] = value;
}

View File

@ -5,80 +5,12 @@
#include "unity.h"
#include "neuralNetwork.h"
/*
################
Aufbau Test File
################
HEADER
inputDim
outputDim
-- Layer 1 --
weights (outputDim * inputDim * MatrixType)
biases (outputDim * MatrixType)
outputDim
-- Layer 2 --
weights
biases
...
...
-- Layer n --
weights
biases
outputDim = 0 => Ende
*/
static void prepareNeuralNetworkFile(const char *path, const NeuralNetwork nn)
{
FILE *file = fopen(path, "wb");
if (file)
{
const char *fileTag = "__info2_neural_network_file_format__";
fwrite(fileTag, 1, strlen(fileTag), file);
//Stopt loadModel, falls keine Layer vorhanden
if (nn.numberOfLayers == 0)
{
int zero = 0;
fwrite(&zero, sizeof(int), 1, file);
fclose(file);
return;
// TODO
}
// input und output dimension schreiben
int inputDim = nn.layers[0].weights.cols;
fwrite(&inputDim, sizeof(int), 1, file);
// für weiter Layer nur outputDimension schreiben
for (unsigned int i = 0; i < nn.numberOfLayers; i++)
{
int outputDim = nn.layers[i].weights.rows;
fwrite(&outputDim, sizeof(int), 1, file);
int weightCount = nn.layers[i].weights.rows * nn.layers[i].weights.cols;
fwrite(nn.layers[i].weights.buffer, sizeof(MatrixType), weightCount, file);
int biasesCount = nn.layers[i].biases.rows * nn.layers[i].biases.cols;
fwrite(nn.layers[i].biases.buffer, sizeof(MatrixType), biasesCount, file);
}
// loadModel ließt 0 ein -> Stop
int fileEnd = 0;
fwrite(&fileEnd, sizeof(int), 1, file);
}
fclose(file);
}
void test_loadModelReturnsCorrectNumberOfLayers(void)
{
const char *path = "some__nn_test_file.info2";