miniav 0.1.0
miniav: ^0.1.0 copied to clipboard
A cross-platform capture library for audio and video providing a standard buffer with GPU path
example/miniav_example.dart
import 'package:miniav/miniav.dart';
import 'dart:async';
void main() async {
print('๐ฅ MiniAV Example - Opening all stream types');
// Set log level for debugging
MiniAV.setLogLevel(MiniAVLogLevel.info);
// Storage for contexts to clean up later
final contexts = <String, dynamic>{};
try {
// 1. Camera Stream
await setupCameraStream(contexts);
// 2. Screen Capture Stream
// await setupScreenStream(contexts);
// 3. Audio Input Stream
// await setupAudioInputStream(contexts);
// 4. Loopback Audio Stream
// await setupLoopbackStream(contexts);
print('\nโ
All streams started successfully!');
print('๐ Capturing for 10 seconds...\n');
// Let it run for 10 seconds
await Future.delayed(Duration(seconds: 10));
} catch (e, stackTrace) {
print('โ Error: $e');
print('Stack trace: $stackTrace');
} finally {
// Clean up all contexts
await cleanupAllStreams(contexts);
print('\n๐งน All streams cleaned up');
MiniAV.dispose();
}
}
Future<void> setupCameraStream(Map<String, dynamic> contexts) async {
print('๐น Setting up camera stream...');
try {
// Enumerate camera devices
final cameras = await MiniCamera.enumerateDevices();
if (cameras.isEmpty) {
print('โ ๏ธ No cameras found');
return;
}
print('๐ฑ Found ${cameras.length} camera(s):');
for (int i = 0; i < cameras.length; i++) {
final camera = cameras[i];
print(
' $i: ${camera.name} (${camera.deviceId}) ${camera.isDefault ? '[DEFAULT]' : ''}',
);
}
// Use first camera
final selectedCamera = cameras[1];
final format = await MiniCamera.getDefaultFormat(selectedCamera.deviceId);
print('๐ฏ Using camera: ${selectedCamera.name}');
print(
'๐ Format: ${format.width}x${format.height} @ ${format.frameRateNumerator}/${format.frameRateDenominator} FPS',
);
// Create and configure context
final context = await MiniCamera.createContext();
await context.configure(selectedCamera.deviceId, format);
contexts['camera'] = context;
// Start capture
int frameCount = 0;
await context.startCapture((buffer, userData) {
frameCount++;
print(
'๐น Camera frame #$frameCount - ${buffer.dataSizeBytes} bytes - ${buffer.timestampUs}ยตs',
);
MiniAV.releaseBuffer(buffer); // Release buffer after processing
});
print('โ
Camera stream started');
} catch (e) {
print('โ Camera setup failed: $e');
}
}
Future<void> setupScreenStream(Map<String, dynamic> contexts) async {
print('\n๐ฅ๏ธ Setting up screen capture stream...');
try {
// Enumerate displays
final displays = await MiniScreen.enumerateDisplays();
if (displays.isEmpty) {
print('โ ๏ธ No displays found');
return;
}
print('๐ฅ๏ธ Found ${displays.length} display(s):');
for (int i = 0; i < displays.length; i++) {
final display = displays[i];
print(
' $i: ${display.name} (${display.deviceId}) ${display.isDefault ? '[DEFAULT]' : ''}',
);
}
// Use first display
final selectedDisplay = displays.first;
final formats = await MiniScreen.getDefaultFormats(
selectedDisplay.deviceId,
);
var videoFormat = formats.$1; // Video format
final audioFormat = formats.$2; // Audio format (may be null)
print('๐ฏ Using display: ${selectedDisplay.name}');
print(
'๐ Format: ${videoFormat.width}x${videoFormat.height} @ ${videoFormat.frameRateNumerator}/${videoFormat.frameRateDenominator} FPS',
);
if (audioFormat != null) {
print(
'๐ Audio: ${audioFormat.channels}ch ${audioFormat.sampleRate}Hz ${audioFormat.format}',
);
}
// Create and configure context
final context = await MiniScreen.createContext();
MiniAVVideoInfo newFormat = MiniAVVideoInfo(
width: videoFormat.width,
height: videoFormat.height,
frameRateNumerator: videoFormat.frameRateNumerator,
frameRateDenominator: videoFormat.frameRateDenominator,
pixelFormat: videoFormat.pixelFormat,
outputPreference: MiniAVOutputPreference.gpu,
);
await context.configureDisplay(
selectedDisplay.deviceId,
newFormat,
captureAudio: audioFormat != null,
);
contexts['screen'] = context;
// Start capture
int screenFrameCount = 0;
int audioFrameCount = 0;
await context.startCapture((buffer, userData) {
if (buffer.type == MiniAVBufferType.video) {
screenFrameCount++;
print(
'๐ฅ๏ธ Screen frame #$screenFrameCount - ${buffer.dataSizeBytes} bytes - ${buffer.timestampUs}ยตs',
);
final rawData = (buffer.data as MiniAVVideoBuffer).planes[0];
print(
' Raw data: ${rawData!.length} bytes, first 10 bytes: ${rawData.take(10).join(', ')}',
);
MiniAV.releaseBuffer(buffer); // Release video buffer after processing
} else if (buffer.type == MiniAVBufferType.audio) {
audioFrameCount++;
print(
'๐ Screen audio frame #$audioFrameCount - ${buffer.dataSizeBytes} bytes - ${buffer.timestampUs}ยตs',
);
}
});
print('โ
Screen capture started');
} catch (e) {
print('โ Screen capture setup failed: $e');
}
}
Future<void> setupAudioInputStream(Map<String, dynamic> contexts) async {
print('\n๐ค Setting up audio input stream...');
try {
// Enumerate audio input devices
final audioDevices = await MiniAudioInput.enumerateDevices();
if (audioDevices.isEmpty) {
print('โ ๏ธ No audio input devices found');
return;
}
print('๐ค Found ${audioDevices.length} audio input device(s):');
for (int i = 0; i < audioDevices.length; i++) {
final device = audioDevices[i];
print(
' $i: ${device.name} (${device.deviceId}) ${device.isDefault ? '[DEFAULT]' : ''}',
);
}
// Use first audio device
final selectedDevice = audioDevices.first;
final format = await MiniAudioInput.getDefaultFormat(
selectedDevice.deviceId,
);
print('๐ฏ Using audio device: ${selectedDevice.name}');
print(
'๐ Format: ${format.channels}ch ${format.sampleRate}Hz ${format.format} (${format.numFrames} frames)',
);
// Create and configure context
final context = await MiniAudioInput.createContext();
await context.configure(selectedDevice.deviceId, format);
contexts['audioInput'] = context;
// Start capture
int bufferCount = 0;
await context.startCapture((buffer, userData) {
bufferCount++;
print(
'๐ค Audio buffer #$bufferCount - ${buffer.dataSizeBytes} bytes - ${buffer.timestampUs}ยตs',
);
MiniAV.releaseBuffer(buffer); // Release buffer after processing
});
print('โ
Audio input stream started');
} catch (e) {
print('โ Audio input setup failed: $e');
}
}
Future<void> setupLoopbackStream(Map<String, dynamic> contexts) async {
print('\n๐ Setting up loopback audio stream...');
try {
// Enumerate loopback devices
final loopbackDevices = await MiniLoopback.enumerateDevices();
if (loopbackDevices.isEmpty) {
print(
'โ ๏ธ No loopback devices found (may not be supported on this platform)',
);
return;
}
print('๐ Found ${loopbackDevices.length} loopback device(s):');
for (int i = 0; i < loopbackDevices.length; i++) {
final device = loopbackDevices[i];
print(
' $i: ${device.name} (${device.deviceId}) ${device.isDefault ? '[DEFAULT]' : ''}',
);
}
// Use first loopback device
final selectedDevice = loopbackDevices[4];
final format = await MiniLoopback.getDefaultFormat(selectedDevice.deviceId);
print('๐ฏ Using loopback device: ${selectedDevice.name}');
print(
'๐ Format: ${format.channels}ch ${format.sampleRate}Hz ${format.format} (${format.numFrames} frames)',
);
// Create and configure context
final context = await MiniLoopback.createContext();
await context.configure(selectedDevice.deviceId, format);
contexts['loopback'] = context;
// Start capture
int bufferCount = 0;
await context.startCapture((buffer, userData) {
bufferCount++;
print(
'๐ Loopback buffer #$bufferCount - ${buffer.dataSizeBytes} bytes - ${buffer.timestampUs}ยตs',
);
//MiniAV.releaseBuffer(buffer); // Release buffer after processing
});
print('โ
Loopback stream started');
} catch (e) {
print('โ Loopback setup failed: $e');
if (e.toString().contains('UnsupportedError')) {
print('โน๏ธ Loopback capture is not supported on this platform');
}
}
}
Future<void> cleanupAllStreams(Map<String, dynamic> contexts) async {
print('\n๐ Stopping all streams...');
final cleanupTasks = <Future>[];
for (final entry in contexts.entries) {
final name = entry.key;
final context = entry.value;
print('๐ Stopping $name...');
cleanupTasks.add(
Future(() async {
try {
if (context is MiniCameraContext) {
await context.stopCapture();
await context.destroy();
} else if (context is MiniScreenContext) {
await context.stopCapture();
await context.destroy();
} else if (context is MiniAudioInputContext) {
await context.stopCapture();
await context.destroy();
} else if (context is MiniLoopbackContext) {
await context.stopCapture();
await context.destroy();
}
print('โ
$name stopped');
} catch (e) {
print('โ Error stopping $name: $e');
}
}),
);
}
// Wait for all cleanup tasks to complete
await Future.wait(cleanupTasks);
}