run method

  1. @override
Future<void> run()
override

This application uses platform speech-to-text to listen to audio from the host mic, convert to text, and send the text to the Frame. An image generation request is also sent, and the resulting content is shown in Frame. So the lifetime of this run() is 10s of seconds or so, due to image generation time and image transfer time It has a running main loop on the Frame (frame_app.lua)

Implementation

@override
Future<void> run() async {
  currentState = ApplicationState.running;
  if (mounted) setState(() {});

  // listen for STT
  await _speechToText.listen(
    listenOptions: SpeechListenOptions(
        cancelOnError: true, onDevice: true, listenMode: ListenMode.search),
    onResult: (SpeechRecognitionResult result) async {
      if (currentState == ApplicationState.ready) {
        // user has cancelled already, don't process result
        return;
      }

      if (result.finalResult) {
        // on a final result we generate the image
        _finalResult = result.recognizedWords;
        _partialResult = '';
        _log.fine('Final result: $_finalResult');
        _stopListening();
        // send final query text to Frame line 1 (before we confirm the title)
        if (_finalResult != _prevText) {
          await frame!.sendMessage(TxPlainText(
              msgCode: 0x0a,
              text: TextUtils.wrapText(_finalResult, 300, 4).join('\n')));
          _prevText = _finalResult;
        }

        // first, download the image based on the prompt
        String? error;
        Uint8List? bytes;
        (bytes, error) = await fetchImage(_finalResult);

        if (bytes != null) {
          try {
            _imageBytes = bytes;

            // Update the UI based on the original image
            setState(() {
              _image = Image.memory(_imageBytes!,
                  gaplessPlayback: true, fit: BoxFit.cover);
            });

            // yield here a moment in order to show the first image first
            await Future.delayed(const Duration(milliseconds: 10));

            // creating the sprite this way will quantize colors and possibly scale the image
            var sprite = TxSprite.fromImageBytes(
                msgCode: 0x0d, imageBytes: _imageBytes!);

            // Update the UI with the modified image
            setState(() {
              _image = Image.memory(img.encodePng(sprite.toImage()),
                  gaplessPlayback: true, fit: BoxFit.cover);
            });

            // create the image sprite block header and its sprite lines
            // based on the sprite
            TxImageSpriteBlock isb = TxImageSpriteBlock(
                msgCode: 0x0d,
                image: sprite,
                spriteLineHeight: 20,
                progressiveRender: true);

            // and send the block header then the sprite lines to Frame
            await frame!.sendMessage(isb);

            for (var sprite in isb.spriteLines) {
              await frame!.sendMessage(sprite);
            }

            // final result is done
            currentState = ApplicationState.ready;
            if (mounted) setState(() {});
          } catch (e) {
            _log.severe('Error processing image: $e');
          }
        } else {
          _log.fine('Error fetching image for "$_finalResult": "$error"');
        }
      } else {
        // partial result - just display in-progress text
        _partialResult = result.recognizedWords;
        if (mounted) setState(() {});

        _log.fine('Partial result: $_partialResult, ${result.alternates}');
        if (_partialResult != _prevText) {
          // send partial result to Frame line 1
          await frame!.sendMessage(TxPlainText(
              msgCode: 0x0a,
              text: TextUtils.wrapText(_partialResult, 300, 4).join('\n')));
          _prevText = _partialResult;
        }
      }
    },
  );
}