softmaxDeriv function
derivative of a tanh matrix
matrixThe matrix to find the derivative- Returns The new matrix
Implementation
Matrix softmaxDeriv(Matrix matrix) {
Matrix softmaxValues = Matrix(matrix.row, matrix.col);
for (int i = 0; i < matrix.row; i++) {
List<double> row = matrix[i] as List<double>;
double maxV = row.reduce((a, b) => a > b ? a : b);
// Calculate softmax values for the current row
List<double> softmaxRow = row.map((x) => exp(x - maxV)).toList();
double sumExp = softmaxRow.reduce((a, b) => a + b);
softmaxRow = softmaxRow.map((x) => x / sumExp).toList();
// Fill the softmaxValues matrix
for (int j = 0; j < matrix.col; j++) {
softmaxValues.setAt(i, j, value: softmaxRow[j]);
}
}
// Create the Jacobian matrix
Matrix jacobian = Matrix(softmaxValues.row, softmaxValues.col);
for (int i = 0; i < softmaxValues.row; i++) {
for (int j = 0; j < softmaxValues.col; j++) {
if (i == j) {
// Diagonal elements
jacobian.setAt(
i,
j,
value: softmaxValues.getAt(i, j) * (1 - softmaxValues.getAt(i, j)),
);
} else {
// Off-diagonal elements
jacobian.setAt(
i,
j,
value: -softmaxValues.getAt(i, j) * softmaxValues.getAt(j, j),
);
}
}
}
return jacobian;
}