ensProcess method
ENSProcessResult
ensProcess(})
Used to compute
ens_normalize
ens_beautify
ens_tokenize
ens_normalizations
ens_cure
in one go.
Returns ENSProcessResult
with the following fields:
normalized
: normalized name orNone
if input cannot be normalized ordo_normalize
isFalse
beautified
: beautified name orNone
if input cannot be normalized ordo_beautify
isFalse
tokens
: list ofToken
objects orNone
ifdo_tokenize
isFalse
cured
: cured name orNone
if input cannot be cured ordo_cure
isFalse
cures
: list of fixedCurableSequence
objects orNone
if input cannot be cured ordo_cure
isFalse
error
:DisallowedSequence
orCurableSequence
orNone
if input is validnormalizations
: list ofNormalizableSequence
objects orNone
ifdo_normalizations
isFalse
e.g
ENSNormalize ensn = await ENSNormalize.getInstance();
ensn.ensProcess(
"Nàme🧙♂️1⃣.eth",
doNormalize: true,
doBeautify: true,
doTokenize: true,
doNormalizations: true,
doCure: true,
);
// Instance of 'ENSProcessResult'
Implementation
ENSProcessResult ensProcess(
String input, {
bool doNormalize = false,
bool doBeautify = false,
bool doTokenize = false,
bool doNormalizations = false,
bool doCure = false,
}) {
_requireInitialized();
if (simpleNameRegex.hasMatch(input)) {
List<Token>? tokens;
if (doTokenize) {
tokens = [];
var currentCps = <int>[];
for (var c in input.runes) {
if (c == CP_STOP) {
tokens.add(TokenValid(cps: currentCps));
tokens.add(TokenStop());
currentCps = [];
} else {
currentCps.add(c);
}
}
tokens.add(TokenValid(cps: currentCps));
}
return ENSProcessResult(
normalized: doNormalize ? input : null,
beautified: doBeautify ? input : null,
tokens: tokens,
cured: doCure ? input : null,
cures: doCure ? [] : null,
error: null,
normalizations: doNormalizations ? [] : null,
);
}
var tokens = <Token>[];
DisallowedSequence? error;
var inputCur = 0;
var emojiIter = normalizationData.emojiRegex.allMatches(input).iterator;
var nextEmojiMatch = emojiIter.moveNext() ? emojiIter.current : null;
while (inputCur < input.length) {
if (nextEmojiMatch != null && nextEmojiMatch.start == inputCur) {
var emoji = nextEmojiMatch.group(0)!;
inputCur = nextEmojiMatch.end;
nextEmojiMatch = emojiIter.moveNext() ? emojiIter.current : null;
var emojiNoFe0f = _filterFe0f(emoji);
var emojiFe0f = normalizationData.emojiFe0fLookup[emojiNoFe0f]!;
tokens.add(
TokenEmoji(
emoji: _strToCodePoints(emojiFe0f).toList(),
input: _strToCodePoints(emoji).toList(),
cps: _strToCodePoints(emojiNoFe0f).toList(),
),
);
continue;
}
var c = input[inputCur];
var cp = c.codeUnitAt(0);
inputCur += 1;
if (cp == CP_STOP) {
tokens.add(TokenStop());
continue;
}
if (normalizationData.valid.contains(cp)) {
tokens.add(
TokenValid(
cps: [cp],
),
);
continue;
}
if (normalizationData.ignored.contains(cp)) {
tokens.add(
TokenIgnored(
cp: cp,
),
);
continue;
}
var mapping = normalizationData.mapped[cp];
if (mapping != null) {
tokens.add(
TokenMapped(
cp: cp,
cps: mapping,
),
);
continue;
}
error ??= CurableSequence(
type: c == '\u200d' || c == '\u200c'
? CurableSequenceType.invisible
: CurableSequenceType.disallowed,
index: inputCur - 1,
sequence: c,
suggested: '',
);
tokens.add(
TokenDisallowed(
cp: cp,
),
);
}
tokens = normalizeTokens(tokens);
var normalizations = doNormalizations ? _findNormalizations(tokens) : null;
List<bool> labelIsGreek = [];
if (error == null) {
var emojisAsFe0f = tokens2str(tokens, (tok) => '\uFE0F');
error = _postCheck(emojisAsFe0f, labelIsGreek, input);
if (error is CurableSequence) {
_offsetErrStart(error, tokens);
}
}
String? normalized;
String? beautified;
if (error == null) {
normalized = doNormalize ? tokens2str(tokens, null) : null;
beautified = doBeautify ? tokens2beautified(tokens, labelIsGreek) : null;
}
tokens = doTokenize ? tokens : [];
String? cured;
List<CurableSequence>? cures;
if (doCure) {
try {
var result = _cure(input);
cured = result.item1;
cures = result.item2;
} on DisallowedSequence {
// pass
}
}
return ENSProcessResult(
normalized: normalized,
beautified: beautified,
tokens: tokens,
cured: cured,
cures: cures,
error: error,
normalizations: normalizations,
);
}