forward method
Implementation
Node forward(List<Node> inputs) {
// Hidden Layer (2 Neurons, ReLU Activation)
Node h1 =
(inputs[0] * weights[0] + inputs[1] * weights[1] + biases[0]).relu();
Node h2 =
(inputs[0] * weights[2] + inputs[1] * weights[3] + biases[1]).relu();
// Output Layer (Linear Activation)
Node output = (h1 * weights[4] + h2 * weights[5] + biases[2]);
return output;
}