createModelFromArff method
This function creates a model from given ARFF file and class name
Implementation
Model createModelFromArff(ARFFModelCreationParameter params) {
List<Layer> mlp = connectLayers(layers: createLayers());
List<List<ARFFData>> data =
params.arff.data.map((list) => List<ARFFData>.from(list)).toList();
int dataLineIndex = 0;
int classIndex = params.arff.attributesList
.indexWhere((attrs) => attrs.name == params.className);
for (List<ARFFData> dt in data) {
dt.removeAt(classIndex);
}
Map<String, List<double>> normalizationValues =
_normalizationReferenceValues(arffData: data);
List<ARFFAttributes> attrs = [];
attrs.addAll(params.arff.attributesList);
attrs.removeAt(classIndex);
for (int i = 0; i < epoch; i++) {
data = params.arff.data.map((list) => List<ARFFData>.from(list)).toList();
dataLineIndex = 0;
classIndex = params.arff.attributesList
.indexWhere((attrs) => attrs.name == params.className);
for (List<ARFFData> dt in data) {
dt.removeAt(classIndex);
}
do {
Map<String, double> inputNomAttribValue = {};
for (ARFFAttributes attr in attrs) {
if (attr.type == 'nominal') {
for (ARFFData arffDt in data.first) {
if (arffDt.name == attr.name) {
inputNomAttribValue[arffDt.value] = 1.0;
}
}
}
}
Map<String, double> outputNomAttribValue = {};
if (params.arff.attributesList[classIndex].type == 'nominal') {
if (params.arff.data[dataLineIndex][classIndex].name ==
params.arff.attributesList[classIndex].name) {
outputNomAttribValue[
params.arff.data[dataLineIndex][classIndex].value] = 1.0;
}
}
for (Layer layer in mlp) {
if (layer.layerType == LayerType.input) {
for (Neuron neuron in layer.neurons) {
try {
if (inputNomAttribValue.containsKey(neuron.name)) {
neuron.value = inputNomAttribValue[neuron.name];
} else if (neuron.name ==
data.first
.firstWhere((dt) => dt.name == neuron.name)
.name) {
int index =
data.first.indexWhere((dt) => dt.name == neuron.name);
neuron.value = (double.parse(data.first
.firstWhere((dt) => dt.name == neuron.name)
.value) -
normalizationValues['minValues']![index]) /
(normalizationValues['maxValues']![index] -
normalizationValues['minValues']![index]);
}
} catch (exception) {
neuron.value = 0.0;
}
}
} else if (layer.layerType == LayerType.output) {
for (Neuron neuron in layer.neurons) {
try {
if (outputNomAttribValue.containsKey(neuron.name)) {
neuron.value = outputNomAttribValue[neuron.name];
} else if (neuron.name ==
params.arff.data[dataLineIndex][classIndex].name) {
neuron.value = double.parse(
params.arff.data[dataLineIndex][classIndex].value);
} else {
neuron.value = 0.0;
}
} catch (except) {
neuron.value = 0.0;
}
}
}
}
for (Layer layer in mlp) {
if (layer.layerType == LayerType.hidden ||
layer.layerType == LayerType.output) {
for (Neuron neuron in layer.neurons) {
double net = 0.0;
for (Edge edge in neuron.inputEdges!) {
net += edge.weight! * edge.inputNeuron.value!;
}
neuron.oldValue = neuron.value;
neuron.value = activationFunction(net);
}
}
}
// ERROR CALCULATION
Layer currentLayer = mlp.last;
for (Layer layer in mlp) {
if (layer.layerType == LayerType.output) {
currentLayer = layer;
for (Neuron neuron in layer.neurons) {
neuron.error = neuron.value! *
(1 - neuron.value!) *
(neuron.oldValue! - neuron.value!);
}
}
}
do {
for (Neuron neuron in currentLayer.neurons) {
for (Edge inEdge in neuron.inputEdges!) {
double error = 0;
for (Edge outEdge in inEdge.inputNeuron.outputEdges!) {
error += outEdge.weight! * outEdge.outputNeuron.error!;
}
inEdge.inputNeuron.error = inEdge.inputNeuron.value! *
(1 - inEdge.inputNeuron.value!) *
error;
}
}
currentLayer = currentLayer.previousLayer!;
} while (currentLayer.layerType != LayerType.input);
//WEIGHT UPDATES
do {
for (Neuron neuron in currentLayer.neurons) {
for (Edge edge in neuron.outputEdges!) {
edge.weight = edge.weight! +
learningRate *
edge.outputNeuron.error! *
edge.inputNeuron.value!;
}
}
currentLayer = currentLayer.nextLayer!;
} while (currentLayer.layerType != LayerType.output);
dataLineIndex++;
data.removeAt(0);
} while (data.isNotEmpty);
}
return Model(layers: mlp);
}