forward method
The core logic of the layer's transformation.
Subclasses must implement this method to define how they process input tensors and return an output tensor.
Implementation
@override
Tensor<Vector> forward(Tensor<dynamic> input) {
Matrix inputMatrix = (input as Tensor<Matrix>).value;
int numFeatures = inputMatrix[0].length;
Vector sum = List<double>.filled(numFeatures, 0.0);
for (Vector row in inputMatrix) {
for (int i = 0; i < numFeatures; i++) {
sum[i] += row[i];
}
}
Vector outValue = [];
for (int i = 0; i < numFeatures; i++) {
outValue.add(sum[i] / sequenceLength);
}
Tensor<Vector> out = Tensor<Vector>(outValue);
out.creator = Node([input], () {
// The backward pass distributes the gradient evenly to all timesteps
double distributed_grad = 1.0 / sequenceLength;
for (int r = 0; r < sequenceLength; r++) {
for (int c = 0; c < numFeatures; c++) {
input.grad[r][c] += out.grad[c] * distributed_grad;
}
}
}, opName: 'global_avg_pool_1d');
return out;
}