separateTokens function
List<Token<TokenType> >
separateTokens(
- String input, {
- ComputeContext context = const DefaultComputeContext(),
Does a lexical analysis by splitting the input into tokens.
Implementation
List<Token> separateTokens(String input,
{ComputeContext context = const DefaultComputeContext()}) {
int lastTokenSeparation = 0;
int pos = 0;
input = input.replaceAll(' ', '');
List<Token> tokens = [];
while (pos < input.length) {
LinkedHashMap<TokenType, int> lengthPerToken = LinkedHashMap();
for (TokenType type in context.registeredTokens) {
pos = lastTokenSeparation;
while (true) {
final buildingToken = input.substring(lastTokenSeparation, pos + 1);
final doesValidate = type.validateType(context,
tokens.lastOrNull?.type ?? NoTokenType(), buildingToken, null);
if (!type.isMultiChar || pos == input.length - 1) {
if (doesValidate) {
lengthPerToken[type] = buildingToken.length;
} else {
lengthPerToken[type] = buildingToken.length - 1;
}
break;
}
if (!doesValidate) {
lengthPerToken[type] = buildingToken.length - 1;
break;
}
pos++;
}
}
final longest = lengthPerToken.entries.toList()
..removeWhere((element) => element.value == 0)
..sort((a, b) => b.value.compareTo(a.value));
if (lengthPerToken.isEmpty) {
throw ComputationError(ComputationStep.lexing,
message: 'No tokens registered in context?');
}
while (true) {
if (longest.isEmpty) {
throw ComputationError(ComputationStep.lexing,
message: 'No matching token found',
globalPosition: lastTokenSeparation);
}
final currentLongest = longest.first;
final buildingToken = input.substring(
lastTokenSeparation, lastTokenSeparation + currentLongest.value);
if (currentLongest.key.validate(
context, tokens.lastOrNull ?? NoToken(), buildingToken, null)) {
tokens.add(currentLongest.key
.createToken(context, buildingToken, lastTokenSeparation));
lastTokenSeparation = lastTokenSeparation + currentLongest.value;
pos = lastTokenSeparation;
break;
} else {
_log.finest(
'Candidate deleted ${currentLongest.key.runtimeType.toString()}');
longest.remove(currentLongest);
}
}
}
return tokens;
}