relu function

Tensor<Vector> relu(
  1. Tensor<Vector> v
)

Implementation

Tensor<Vector> relu(Tensor<Vector> v) {
  int N = v.value.length;
  Vector outValue = [];
  for (int i = 0; i < N; i++) {
    outValue.add(v.value[i] > 0 ? v.value[i] : 0.0);
  }
  Tensor<Vector> out = Tensor<Vector>(outValue);
  out.creator = Node(
    [v],
    () {
      for (int i = 0; i < v.value.length; i++) {
        v.grad[i] += out.grad[i] * (v.value[i] > 0 ? 1.0 : 0.0);
      }
    },
    opName: 'relu_vector', // <-- Renamed for clarity
    cost: N,
  );
  return out;
}