This commit is contained in:
Bienert 2025-11-20 14:37:56 +01:00
parent 17c546f4d5
commit bf8c2b0bb5
2 changed files with 93 additions and 7 deletions

View File

@ -60,15 +60,58 @@ Matrix add(const Matrix matrix1, const Matrix matrix2)
result.rows = result.cols = 0;
result.buffer = NULL;
if (matrix1.rows != matrix2.rows || matrix1.cols != matrix2.cols) return result;
/* einfache Validierung */
if (matrix1.rows == 0 || matrix1.cols == 0 || matrix1.buffer == NULL) return result;
if (matrix2.rows == 0 || matrix2.cols == 0 || matrix2.buffer == NULL) return result;
result = createMatrix(matrix1.rows, matrix1.cols);
if (!result.buffer) return result;
/* Fall 1: gleiche Dimensionen -> elementweise Addition */
if (matrix1.rows == matrix2.rows && matrix1.cols == matrix2.cols)
{
result = createMatrix(matrix1.rows, matrix1.cols);
if (!result.buffer) return result;
size_t count = (size_t)matrix1.rows * matrix1.cols;
for (size_t i = 0; i < count; ++i) {
result.buffer[i] = matrix1.buffer[i] + matrix2.buffer[i];
size_t count = (size_t)matrix1.rows * matrix1.cols;
for (size_t i = 0; i < count; ++i)
result.buffer[i] = matrix1.buffer[i] + matrix2.buffer[i];
return result;
}
/* Fall 2: matrix2 ist Bias (rows x 1) -> broadcast über Spalten von matrix1 */
if (matrix1.rows == matrix2.rows && matrix2.cols == 1)
{
result = createMatrix(matrix1.rows, matrix1.cols);
if (!result.buffer) return result;
for (unsigned int c = 0; c < matrix1.cols; ++c)
{
for (unsigned int r = 0; r < matrix1.rows; ++r)
{
size_t idx = (size_t)r * matrix1.cols + c;
result.buffer[idx] = matrix1.buffer[idx] + matrix2.buffer[r];
}
}
return result;
}
/* Fall 3: matrix1 ist Bias (rows x 1) -> broadcast über Spalten von matrix2 */
if (matrix2.rows == matrix1.rows && matrix1.cols == 1)
{
result = createMatrix(matrix2.rows, matrix2.cols);
if (!result.buffer) return result;
for (unsigned int c = 0; c < matrix2.cols; ++c)
{
for (unsigned int r = 0; r < matrix2.rows; ++r)
{
size_t idx = (size_t)r * matrix2.cols + c;
result.buffer[idx] = matrix2.buffer[idx] + matrix1.buffer[r];
}
}
return result;
}
/* keine kompatible Form -> leere Matrix zurückgeben */
return result;
}

View File

@ -8,7 +8,50 @@
static void prepareNeuralNetworkFile(const char *path, const NeuralNetwork nn)
{
// TODO
if (!path) return;
FILE *file = fopen(path, "wb");
if (!file) return;
/* Datei-Header */
const char *fileTag = "__info2_neural_network_file_format__";
fwrite(fileTag, sizeof(char), strlen(fileTag), file);
/* Falls kein Layer vorhanden -> schreibe zwei Nullen und beenden */
if (nn.numberOfLayers == 0) {
int zero = 0;
fwrite(&zero, sizeof(int), 1, file);
fwrite(&zero, sizeof(int), 1, file);
fclose(file);
return;
}
/* erste Dimensionen: input = cols der ersten Weight-Matrix, output = rows der ersten Weight-Matrix */
int inputDim = (int)nn.layers[0].weights.cols;
int outputDim = (int)nn.layers[0].weights.rows;
fwrite(&inputDim, sizeof(int), 1, file);
fwrite(&outputDim, sizeof(int), 1, file);
/* für jede Schicht: weights (rows*cols floats), biases (rows*1 floats)
danach die nächste output-dimension (oder 0 zum Beenden) */
for (unsigned int i = 0; i < nn.numberOfLayers; ++i) {
Layer layer = nn.layers[i];
size_t wcount = (size_t)layer.weights.rows * layer.weights.cols;
if (wcount > 0 && layer.weights.buffer != NULL) {
fwrite(layer.weights.buffer, sizeof(MatrixType), wcount, file);
}
size_t bcount = (size_t)layer.biases.rows * layer.biases.cols;
if (bcount > 0 && layer.biases.buffer != NULL) {
fwrite(layer.biases.buffer, sizeof(MatrixType), bcount, file);
}
int nextOutput = (i + 1 < nn.numberOfLayers) ? (int)nn.layers[i + 1].weights.rows : 0;
fwrite(&nextOutput, sizeof(int), 1, file);
}
fclose(file);
}
void test_loadModelReturnsCorrectNumberOfLayers(void)