softmax function
Mathematical operation for the Softmax function on a vector.
Implementation
Tensor<Vector> softmax(Tensor<Vector> v) {
double maxVal = -double.infinity;
for (double val in v.value) {
if (val > maxVal) {
maxVal = val;
}
}
Vector exps = [];
double sumExps = 0.0;
for (double val in v.value) {
double expVal = exp(val - maxVal);
exps.add(expVal);
sumExps += expVal;
}
Vector outValue = [];
for (double expVal in exps) {
outValue.add(expVal / sumExps);
}
Tensor<Vector> out = Tensor<Vector>(outValue);
out.creator = Node([v], () {
for (int i = 0; i < out.value.length; i++) {
for (int j = 0; j < out.value.length; j++) {
double delta = (i == j) ? 1.0 : 0.0;
double jacobian = out.value[i] * (delta - out.value[j]);
v.grad[j] += out.grad[i] * jacobian;
}
}
}, opName: 'softmax', cost: v.value.length * v.value.length);
return out;
}