speech_recognition 0.2.0+1 speech_recognition: ^0.2.0+1 copied to clipboard
A flutter plugin to use the speech recognition on iOS and Android
import 'package:flutter/material.dart';
import 'package:speech_recognition/speech_recognition.dart';
void main() {
runApp(new MyApp());
}
class MyApp extends StatefulWidget {
@override
_MyAppState createState() => new _MyAppState();
}
class _MyAppState extends State<MyApp> {
SpeechRecognition _speech;
bool _speechRecognitionAvailable = false;
bool _isListening = false;
String transcription = '';
String _currentLocale = 'en_US';
@override
initState() {
super.initState();
activateSpeechRecognizer();
}
// Platform messages are asynchronous, so we initialize in an async method.
void activateSpeechRecognizer() {
print('_MyAppState.activateSpeechRecognizer... ');
_speech = new SpeechRecognition();
_speech.setAvailabilityHandler(onSpeechAvailability);
_speech.setCurrentLocaleHandler(onCurrentLocale);
_speech.setRecognitionStartedHandler(onRecognitionStarted);
_speech.setRecognitionResultHandler(onRecognitionResult);
_speech.setRecognitionCompleteHandler(onRecognitionComplete);
_speech
.activate()
.then((res) => setState(() => _speechRecognitionAvailable = res));
}
@override
Widget build(BuildContext context) {
return new MaterialApp(
home: new Scaffold(
appBar: new AppBar(
title: new Text('SpeechRecognition'),
),
body: new Padding(
padding: new EdgeInsets.all(8.0),
child: new Center(
child: new Column(
mainAxisSize: MainAxisSize.min,
crossAxisAlignment: CrossAxisAlignment.stretch,
children: [
new Expanded(
child: new Container(
padding: const EdgeInsets.all(8.0),
color: Colors.grey.shade200,
child: new Text(transcription))),
_buildButton(
onPressed: _speechRecognitionAvailable && !_isListening
? () => start()
: null,
label: _isListening
? 'Listening...'
: 'Listen ($_currentLocale)',
),
_buildButton(
onPressed: _isListening ? () => cancel() : null,
label: 'Cancel',
),
_buildButton(
onPressed: _isListening ? () => stop() : null,
label: 'Stop',
),
],
),
)),
),
);
}
Widget _buildButton({String label, VoidCallback onPressed}) => new Padding(
padding: new EdgeInsets.all(12.0),
child: new RaisedButton(
color: Colors.cyan.shade600,
onPressed: onPressed,
child: new Text(
label,
style: const TextStyle(color: Colors.white),
),
));
void start() => _speech
.listen(locale: _currentLocale)
.then((result) => print('_MyAppState.start => result ${result}'));
void cancel() =>
_speech.cancel().then((result) => setState(() => _isListening = result));
void stop() =>
_speech.stop().then((result) => setState(() => _isListening = result));
void onSpeechAvailability(bool result) =>
setState(() => _speechRecognitionAvailable = result);
void onCurrentLocale(String locale) =>
setState(() => _currentLocale = locale);
void onRecognitionStarted() => setState(() => _isListening = true);
void onRecognitionResult(String text) => setState(() => transcription = text);
void onRecognitionComplete() => setState(() => _isListening = false);
}