#include "matrix.h" #include // Matrix erstellen Matrix createMatrix(unsigned int rows, unsigned int cols) { Matrix m; if (rows == 0 || cols == 0) { m.rows = 0; m.cols = 0; m.buffer = NULL; return m; } m.rows = rows; m.cols = cols; m.buffer = (MatrixType*)calloc(rows * cols, sizeof(MatrixType)); if (!m.buffer) { m.rows = 0; m.cols = 0; } return m; } // Speicher freigeben void clearMatrix(Matrix *matrix) { if (!matrix || !matrix->buffer) return; free(matrix->buffer); matrix->buffer = NULL; matrix->rows = 0; matrix->cols = 0; } // Wert setzen void setMatrixAt(MatrixType value, Matrix matrix, unsigned int rowIdx, unsigned int colIdx) { if (rowIdx >= matrix.rows || colIdx >= matrix.cols) return; matrix.buffer[rowIdx * matrix.cols + colIdx] = value; } // Wert auslesen MatrixType getMatrixAt(const Matrix matrix, unsigned int rowIdx, unsigned int colIdx) { if (rowIdx >= matrix.rows || colIdx >= matrix.cols) return UNDEFINED_MATRIX_VALUE; return matrix.buffer[rowIdx * matrix.cols + colIdx]; } // Addition Matrix add(const Matrix matrix1, const Matrix matrix2) { Matrix result; if (matrix1.rows != matrix2.rows || matrix1.cols != matrix2.cols) { result.rows = 0; result.cols = 0; result.buffer = NULL; return result; } result = createMatrix(matrix1.rows, matrix1.cols); for (unsigned int i = 0; i < matrix1.rows * matrix1.cols; i++) result.buffer[i] = matrix1.buffer[i] + matrix2.buffer[i]; return result; } // Multiplikation Matrix multiply(const Matrix matrix1, const Matrix matrix2) { Matrix result; if (matrix1.cols != matrix2.rows) { result.rows = 0; result.cols = 0; result.buffer = NULL; return result; } result = createMatrix(matrix1.rows, matrix2.cols); for (unsigned int i = 0; i < matrix1.rows; i++) { for (unsigned int j = 0; j < matrix2.cols; j++) { MatrixType sum = 0; for (unsigned int k = 0; k < matrix1.cols; k++) sum += matrix1.buffer[i * matrix1.cols + k] * matrix2.buffer[k * matrix2.cols + j]; result.buffer[i * result.cols + j] = sum; } } return result; }