createProcessedStream method
Create a MediaStream from processed frames
This creates a WebRTC MediaStream that receives frames from the virtual background processor.
React Equivalent
processedStream = mediaCanvas.captureStream(frameRate);
Implementation
Future<MediaStream?> createProcessedStream({
required int width,
required int height,
required int fps,
}) async {
if (!isSupported) {
// Fallback: Create a dummy stream for unsupported platforms
return _createFallbackStream();
}
try {
// Initialize native source if needed
if (_virtualTrackId == null) {
final initialized =
await initialize(width: width, height: height, fps: fps);
if (!initialized) {
return _createFallbackStream();
}
}
// Create MediaStream with the virtual track
final result = await _channel.invokeMethod<Map<dynamic, dynamic>>(
'createStream',
{'trackId': _virtualTrackId},
);
if (result != null && result['streamId'] != null) {
// Get the MediaStream from WebRTC
_virtualStream =
await _getStreamFromNative(result['streamId'] as String);
debugPrint(
'VirtualBackgroundChannel: Created stream ${_virtualStream?.id}');
return _virtualStream;
}
} on PlatformException catch (e) {
debugPrint(
'VirtualBackgroundChannel: Failed to create stream: ${e.message}');
} on MissingPluginException {
debugPrint('VirtualBackgroundChannel: Native plugin not available');
}
return _createFallbackStream();
}