startCapture method

Future<MediaStream?> startCapture({
  1. int frameRate = 30,
})

Start capturing the combined video + annotation stream.

Returns the processed MediaStream that can be sent via WebRTC.

Implementation

Future<MediaStream?> startCapture({int frameRate = 30}) async {
  if (_mainCanvas == null || _screenVideo == null) {
    return null;
  }

  if (_isCapturing) {
    return _processedStream;
  }

  try {
    _isCapturing = true;

    // Draw initial frame before starting capture stream
    _drawCombined();

    // Create MediaStream from main canvas (React: mediaCanvas!.captureStream(30))
    final jsStream = _mainCanvas!.captureStream(frameRate);

    // Wrap in dart_webrtc MediaStreamWeb
    _processedStream = webrtc.MediaStreamWeb(jsStream, 'annotation-capture');

    // Verify the stream has tracks
    final _ = jsStream.getVideoTracks().toDart;

    // Start draw interval (React: setInterval(() => { drawCombined(); }, 30))
    final frameDuration = Duration(milliseconds: (1000 / frameRate).round());
    _drawTimer = Timer.periodic(frameDuration, (_) {
      if (!_isCapturing) return;
      _drawCombined();
    });

    return _processedStream;
  } catch (_) {
    _isCapturing = false;
    return null;
  }
}