ensProcess method

ENSProcessResult ensProcess(
  1. String input, {
  2. bool doNormalize = false,
  3. bool doBeautify = false,
  4. bool doTokenize = false,
  5. bool doNormalizations = false,
  6. bool doCure = false,
})

Used to compute

  • ens_normalize
  • ens_beautify
  • ens_tokenize
  • ens_normalizations
  • ens_cure in one go.

Returns ENSProcessResult with the following fields:

  • normalized: normalized name or None if input cannot be normalized or do_normalize is False
  • beautified: beautified name or None if input cannot be normalized or do_beautify is False
  • tokens: list of Token objects or None if do_tokenize is False
  • cured: cured name or None if input cannot be cured or do_cure is False
  • cures: list of fixed CurableSequence objects or None if input cannot be cured or do_cure is False
  • error: DisallowedSequence or CurableSequence or None if input is valid
  • normalizations: list of NormalizableSequence objects or None if do_normalizations is False e.g
ENSNormalize ensn = await ENSNormalize.getInstance();
 ensn.ensProcess(
    "Nàme🧙‍♂️1⃣.eth",
    doNormalize: true,
    doBeautify: true,
    doTokenize: true,
    doNormalizations: true,
    doCure: true,
  );
 // Instance of 'ENSProcessResult'

Implementation

ENSProcessResult ensProcess(
  String input, {
  bool doNormalize = false,
  bool doBeautify = false,
  bool doTokenize = false,
  bool doNormalizations = false,
  bool doCure = false,
}) {
  _requireInitialized();
  if (simpleNameRegex.hasMatch(input)) {
    List<Token>? tokens;
    if (doTokenize) {
      tokens = [];
      var currentCps = <int>[];
      for (var c in input.runes) {
        if (c == CP_STOP) {
          tokens.add(TokenValid(cps: currentCps));
          tokens.add(TokenStop());
          currentCps = [];
        } else {
          currentCps.add(c);
        }
      }
      tokens.add(TokenValid(cps: currentCps));
    }
    return ENSProcessResult(
      normalized: doNormalize ? input : null,
      beautified: doBeautify ? input : null,
      tokens: tokens,
      cured: doCure ? input : null,
      cures: doCure ? [] : null,
      error: null,
      normalizations: doNormalizations ? [] : null,
    );
  }
  var tokens = <Token>[];
  DisallowedSequence? error;
  var inputCur = 0;
  var emojiIter = normalizationData.emojiRegex.allMatches(input).iterator;
  var nextEmojiMatch = emojiIter.moveNext() ? emojiIter.current : null;

  while (inputCur < input.length) {
    if (nextEmojiMatch != null && nextEmojiMatch.start == inputCur) {
      var emoji = nextEmojiMatch.group(0)!;
      inputCur = nextEmojiMatch.end;
      nextEmojiMatch = emojiIter.moveNext() ? emojiIter.current : null;
      var emojiNoFe0f = _filterFe0f(emoji);
      var emojiFe0f = normalizationData.emojiFe0fLookup[emojiNoFe0f]!;
      tokens.add(
        TokenEmoji(
          emoji: _strToCodePoints(emojiFe0f).toList(),
          input: _strToCodePoints(emoji).toList(),
          cps: _strToCodePoints(emojiNoFe0f).toList(),
        ),
      );
      continue;
    }
    var c = input[inputCur];
    var cp = c.codeUnitAt(0);
    inputCur += 1;
    if (cp == CP_STOP) {
      tokens.add(TokenStop());
      continue;
    }
    if (normalizationData.valid.contains(cp)) {
      tokens.add(
        TokenValid(
          cps: [cp],
        ),
      );
      continue;
    }
    if (normalizationData.ignored.contains(cp)) {
      tokens.add(
        TokenIgnored(
          cp: cp,
        ),
      );
      continue;
    }
    var mapping = normalizationData.mapped[cp];
    if (mapping != null) {
      tokens.add(
        TokenMapped(
          cp: cp,
          cps: mapping,
        ),
      );
      continue;
    }
    error ??= CurableSequence(
      type: c == '\u200d' || c == '\u200c'
          ? CurableSequenceType.invisible
          : CurableSequenceType.disallowed,
      index: inputCur - 1,
      sequence: c,
      suggested: '',
    );

    tokens.add(
      TokenDisallowed(
        cp: cp,
      ),
    );
  }

  tokens = normalizeTokens(tokens);
  var normalizations = doNormalizations ? _findNormalizations(tokens) : null;
  List<bool> labelIsGreek = [];
  if (error == null) {
    var emojisAsFe0f = tokens2str(tokens, (tok) => '\uFE0F');
    error = _postCheck(emojisAsFe0f, labelIsGreek, input);
    if (error is CurableSequence) {
      _offsetErrStart(error, tokens);
    }
  }
  String? normalized;
  String? beautified;
  if (error == null) {
    normalized = doNormalize ? tokens2str(tokens, null) : null;
    beautified = doBeautify ? tokens2beautified(tokens, labelIsGreek) : null;
  }
  tokens = doTokenize ? tokens : [];

  String? cured;
  List<CurableSequence>? cures;
  if (doCure) {
    try {
      var result = _cure(input);
      cured = result.item1;
      cures = result.item2;
    } on DisallowedSequence {
      // pass
    }
  }

  return ENSProcessResult(
    normalized: normalized,
    beautified: beautified,
    tokens: tokens,
    cured: cured,
    cures: cures,
    error: error,
    normalizations: normalizations,
  );
}