azure_stt 0.0.9 azure_stt: ^0.0.9 copied to clipboard
A Flutter plugin which can use azure stt
example/lib/main.dart
import 'dart:ffi';
import 'package:azure_stt/azure_continuous_recognition_callback.dart';
import 'package:azure_stt/azure_conversation_transcriber_callback.dart';
import 'package:flutter/material.dart';
import 'dart:async';
import 'package:flutter/services.dart';
import 'package:azure_stt/azure_stt.dart';
import 'package:fluttertoast/fluttertoast.dart';
void main() {
runApp(const MyApp());
}
class MyApp extends StatefulWidget {
const MyApp({super.key});
@override
State<MyApp> createState() => _MyAppState();
}
class _MyAppState extends State<MyApp>
implements
AzureContinuousRecognitionCallback,
AzureConversationTranscriberCallback {
String _platformVersion = 'Unknown';
String _result = "";
String subscriptionKey = "subscriptionKey";
String region = "region";
String translatorKey = "translatorKey";
String translatorRegion = "translatorRegion";
bool recording = false;
final _azureSttPlugin = AzureStt();
List<String> texts = List.empty(growable: true);
@override
void initState() {
super.initState();
initPlatformState();
}
// Platform messages are asynchronous, so we initialize in an async method.
Future<void> initPlatformState() async {
String platformVersion;
// Platform messages may fail, so we use a try/catch PlatformException.
// We also handle the message potentially returning null.
try {
_azureSttPlugin.initAzureTranslatorConfig(translatorKey, translatorRegion);
_azureSttPlugin.setAzureContinuousRecognitionCallback(this);
_azureSttPlugin.setAzureConversationTranscriberCallback(this);
platformVersion = await _azureSttPlugin.getPlatformVersion() ??
'Unknown platform version';
} on PlatformException {
platformVersion = 'Failed to get platform version.';
}
// If the widget was removed from the tree while the asynchronous platform
// message was in flight, we want to discard the reply rather than calling
// setState to update our non-existent appearance.
if (!mounted) return;
setState(() {
_platformVersion = platformVersion;
});
}
@override
Widget build(BuildContext context) {
return MaterialApp(
home: Scaffold(
appBar: AppBar(
title: const Text('Plugin example app'),
),
body: Column(crossAxisAlignment: CrossAxisAlignment.start, children: [
Text('Running on: $_platformVersion\n'),
TextButton(
onPressed: () async {
_azureSttPlugin.initAzureConfig(
subscriptionKey, region);
var result = await _azureSttPlugin.startConversation("zh-CN",false,samplerRate: 16000,bitRate: 16,channel: 1,autoTranslate: true,fromLang:"en",toLang:"zh-CN");
setState(() {
_result = result ?? "";
});
var buffer = await rootBundle.load("assets/katiesteve.wav");
var total = buffer.lengthInBytes;
var l = 1024;
var offset = 0;
while(offset<total){
var r = (l + offset)<= total ?l : (total - offset);
Uint8List list = buffer.buffer.asUint8List(offset,r);
offset += r;
await _azureSttPlugin.writeAudioBuffer(list);
}
},
child: Text("Start")),
TextButton(
onPressed: () async {
setState(() {
_result = "";
texts.clear();
recording = false;
});
var result = await _azureSttPlugin.stopConversation();
},
child: Text("Stop")),
TextButton(
onPressed: () async {
var result = await _azureSttPlugin.translate("你好呀", "", "en");
setState(() {
_result = result ?? "";
});
},
child: Text("测试翻译")),
Text('语音录制中: $recording\n'),
...texts.map((e) => Text('$e\n')),
Text('当前识别结果: $_result\n'),
]),
),
);
}
@override
void onCanceledEvent(msg) {
_azureSttPlugin.stopConversation();
Fluttertoast.showToast(
msg: "ErrorCode:${msg["errorCode"]},ErrorDetails:${msg["errorDetails"]},Reason:${msg["reason"]}",
toastLength: Toast.LENGTH_LONG,
gravity: ToastGravity.CENTER,
timeInSecForIosWeb: 1,
backgroundColor: Colors.red,
textColor: Colors.white,
fontSize: 16.0
);
}
@override
void onRecognizedEvent(msg) {
print(msg);
}
@override
void onRecognizingEvent(msg) {
setState(() {
_result = msg;
});
}
@override
void onSessionStartedEvent(msg) {
print(msg);
setState(() {
recording = true;
});
}
@override
void onSessionStoppedEvent(msg) {
print(msg);
setState(() {
recording = false;
});
}
@override
void onSpeechEndDetectedEvent(msg) {
print("当前对话结束了");
}
@override
void onSpeechStartDetectedEvent(msg) {
print("当前对话开始了");
}
@override
void onTranscribedEvent(String speakerId,String srcText,String transText,int transType,bool transSuccess,int beginTime,int endTime,int transTime) {
// TODO: implement onTranscribedEvent
setState(() {
texts.add("${speakerId}~${beginTime}-${endTime}:${transText}");
});
}
@override
void onTranscribingEvent(String speakerId,String srcText,String transText,int transType,bool transSuccess,int beginTime,int endTime,int transTime) {
// TODO: implement onTranscribingEvent
setState(() {
_result = "${speakerId}~${beginTime}-${endTime}:${transText}";
});
}
}