class ChatPage extends StatefulWidget {
const ChatPage({super.key});
@override
State createState() => _ChatPageState();
}
class _ChatPageState extends State {
final _speechToText = SpeechToText();
late final OpenAIService _gptService;
bool _speechEnabled = false;
String _lastWords = '';
late final TextEditingController _controlMessage;
hi i am trying to use speech_to_text but when ever i touch the microphone icon starting sound is opened and after 1 second stop sound is playing.
i cant use it
import 'dart:async'; import 'dart:developer';
import 'package:com_voice_to_gpt_application/pages/chat_screen/chat_message.dart'; import 'package:com_voice_to_gpt_application/services/open_ai_service.dart'; import 'package:flutter/material.dart'; import 'package:speech_to_text/speech_recognition_result.dart'; import 'package:speech_to_text/speech_to_text.dart'; import 'package:velocity_x/velocity_x.dart';
class ChatPage extends StatefulWidget { const ChatPage({super.key});
@override State createState() => _ChatPageState();
}
class _ChatPageState extends State {
final _speechToText = SpeechToText();
late final OpenAIService _gptService;
bool _speechEnabled = false;
String _lastWords = '';
late final TextEditingController _controlMessage;
List _messages = [];
@override
void initState() {
super.initState();
_initSpeech();
_gptService = OpenAIService();
//_gptService.chatGptPrompt("Merhaba çalışıyor musun?");
_controlMessage = TextEditingController();
}
@override void dispose() { _controlMessage.dispose(); _speechToText.stop(); setState(() {}); super.dispose(); }
void _initSpeech() async { _speechEnabled = await _speechToText.initialize(); setState(() {}); }
void _startListening() async { await _speechToText.listen(onResult: _onSpeechResult); setState(() {}); }
void _stopListening() async { await _speechToText.stop(); setState(() {}); }
void _onSpeechResult(SpeechRecognitionResult result) { setState(() { _lastWords = result.recognizedWords; }); }
Widget _buildTextComposer() { return Row( children: [ Expanded( child: TextField( onSubmitted: (value) => _sendMessage(), controller: _controlMessage, decoration: InputDecoration.collapsed(hintText: "Mesaj Gönder"), )), IconButton(onPressed: () => _sendMessage(), icon: Icon(Icons.send)) ], ).px16(); }
void _sendMessage() async { final message = ChatMessage( text: _controlMessage.text, sender: "User", ); setState(() { _messages.insert(0, message); }); _controlMessage.clear(); }
@override Widget build(BuildContext context) { return Scaffold( appBar: AppBar( title: const Text('Chat'), ), body: SafeArea( child: Column( children: [ Container( padding: EdgeInsets.all(16), child: Text( // If listening is active show the recognized words _speechToText.isListening ? '$_lastWords' // If listening isn't active but could be tell the user // how to start it, otherwise indicate that speech // recognition is not yet ready or not supported on // the target device : _speechEnabled ? 'Tap the microphone to start listening...' : 'Speech not available', ), ), Flexible( child: ListView.builder( itemCount: _messages.length, itemBuilder: (context, index) { return _messages[index]; }), ), Container( decoration: BoxDecoration( color: context.cardColor, ), child: _buildTextComposer(), ), FloatingActionButton( onPressed: () async { if (await _speechToText.hasPermission && _speechToText.isNotListening) { _startListening(); } else if (_speechToText.isListening) { //final spech = await OpenAIService().chatGptPrompt(_lastWords); log(_lastWords); _stopListening(); } else { _initSpeech(); } }, child: Icon(_speechEnabled ? Icons.mic_off : Icons.mic), ), ], ), ), ); } }