getAudioDuration static method
Add audio to video
Implementation
static Future<Duration> getAudioDuration(String path) async {
XFile fileLocal = XFile(path);
if (path.isNetworkImage()) {
final HttpClient httpClient = HttpClient();
try {
final File audioPath = File(
'${await getCacheDirectory()}/${DateTime.now().millisecondsSinceEpoch}.mp3');
final HttpClientRequest request =
await httpClient.getUrl(Uri.parse(path));
final HttpClientResponse response = await request.close();
if (response.statusCode == 200) {
final Uint8List bytes =
await consolidateHttpClientResponseBytes(response);
await audioPath.writeAsBytes(bytes);
fileLocal = XFile(audioPath.path);
} else {}
} catch (ex) {
print(ex);
} finally {
httpClient.close();
}
}
final Completer<Duration> completer = Completer<Duration>();
unawaited(
FFmpegKit.executeAsync(
'-i ${fileLocal.path} -hide_banner',
(FFmpegSession session) async {
final String? output = await session.getOutput();
if (output != null) {
final RegExp durationRegExp =
RegExp(r'Duration: (\d{2}):(\d{2}):(\d{2})\.(\d{2})');
final RegExpMatch? match = durationRegExp.firstMatch(output);
if (match != null) {
final int hours = int.parse(match.group(1)!);
final int minutes = int.parse(match.group(2)!);
final int seconds = int.parse(match.group(3)!);
final int milliseconds = int.parse(match.group(4)!) * 10;
completer.complete(Duration(
hours: hours,
minutes: minutes,
seconds: seconds,
milliseconds: milliseconds,
));
} else {
completer.complete(Duration.zero);
}
}
},
),
);
return completer.future;
}