forward method
Applies 2D convolution to multi-channel input
Implementation
List<Matrix2d> forward(List<Matrix2d> input) {
if (input.length != inChannels) {
throw ArgumentError(
"Expected $inChannels input channels, got ${input.length}");
}
// Apply padding to each input channel if needed
List<Matrix2d> paddedInputs =
input.map((mat) => padding > 0 ? mat.pad(padding) : mat).toList();
int outputRows = ((paddedInputs[0].rows() - kernelSize) ~/ stride) + 1;
int outputCols = ((paddedInputs[0].cols() - kernelSize) ~/ stride) + 1;
// Output feature maps for each output channel
List<Matrix2d> outputFeatureMaps =
List.generate(outChannels, (_) => Matrix2d(outputRows, outputCols));
for (int outIdx = 0; outIdx < outChannels; outIdx++) {
Matrix2d output = outputFeatureMaps[outIdx];
Value bias = biases.values[outIdx];
for (int i = 0; i < outputRows; i++) {
for (int j = 0; j < outputCols; j++) {
Value sum = Value(0);
// Sum over all input channels
for (int inIdx = 0; inIdx < inChannels; inIdx++) {
Matrix2d inputChannel = paddedInputs[inIdx];
Matrix2d kernel = kernels[outIdx][inIdx];
for (int ki = 0; ki < kernelSize; ki++) {
for (int kj = 0; kj < kernelSize; kj++) {
int row = i * stride + ki;
int col = j * stride + kj;
sum += inputChannel.at(row, col) * kernel.at(ki, kj);
}
}
}
// Add bias and store result
sum += bias;
output.data!.values[i * outputCols + j] = sum;
}
}
}
return outputFeatureMaps;
}