rxlabz / speech_recognition

A Flutter plugin to use speech recognition on iOS & Android (Swift/Java)
https://pub.dartlang.org/packages/speech_recognition
Other
336 stars 197 forks source link

Build Error: can't find speech_recognition-Swift.h #39

Open crashinc99 opened 5 years ago

crashinc99 commented 5 years ago

Failed to build iOS app Error output from Xcode build: ↳ BUILD FAILED Xcode's output: ↳ === BUILD TARGET speech_recognition OF PROJECT Pods WITH CONFIGURATION Debug === /Users/abcdefg/Development/flutter/.pub-cache/hosted/pub.dartlang.org/speech_recognition-0.3.0+1/ios/Classes/SpeechRecognitionPlugin.m:2:9: fatal error: 'speech_recognition/speech_recognition-Swift.h' file not found

import <speech_recognition/speech_recognition-Swift.h>

        ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
1 error generated.

Could not build the application for the simulator. Error launching application on iPhone Xʀ. Exited (sigterm)

Is this an issue with flutter plugin or swift/xcode setup? I tried to find speech_recognition-Swift.h from the root directory and the find command returned:

./Users/abcdefg/Library/Developer/Xcode/DerivedData/Runner-ezxihojhlxwsmcdyavncncoswdha/Build/Intermediates.noindex/Pods.build/Debug-iphonesimulator/speech_recognition.build/Objects-normal/x86_64/speech_recognition-Swift.h ./Users/abcdefg/Library/Developer/Xcode/DerivedData/Runner-ezxihojhlxwsmcdyavncncoswdha/Build/Intermediates.noindex/Pods.build/Debug-iphonesimulator/speech_recognition.build/DerivedSources/speech_recognition-Swift.h ./Users/abcdefg/Library/Developer/Xcode/DerivedData/Runner-ezxihojhlxwsmcdyavncncoswdha/Build/Intermediates.noindex/Pods.build/Debug-iphoneos/speech_recognition.build/Objects-normal/arm64/speech_recognition-Swift.h ./Users/abcdefg/Library/Developer/Xcode/DerivedData/Runner-ezxihojhlxwsmcdyavncncoswdha/Build/Intermediates.noindex/Pods.build/Debug-iphoneos/speech_recognition.build/DerivedSources/speech_recognition-Swift.h

I believe I added the correct keys to the info.plist file. I'd assume if I got these wrong, it wouldn't effect the build though. Is that a false assumption?

Thanks for any help anyone can provide!

vintage commented 5 years ago

@crashinc99 Can you show content of yours ios/Podfile file?

crashinc99 commented 5 years ago

I fixed my issue. I had a bunch of different minor issues. I changed a few things to get everything to work but I don't remember which specific changed fixed this issue. Was hoping to document the fix for future use by someone else. Sorry.

crashinc99 commented 5 years ago

...and thanks for responding!

JsDoingYJ commented 5 years ago

I have the same problem, can you tell me how to fix this issue? Please.

Grekain1 commented 5 years ago

@JsDoingYJ

Creating a fresh project I can tell you the exact steps I did to make it work for me finally:

import 'dart:async';

import 'dart:ui';
import 'package:flutter/services.dart';

typedef void AvailabilityHandler(bool result);
typedef void StringResultHandler(String text);

/// the channel to control the speech recognition
class SpeechRecognition {
  static const MethodChannel _channel =
      const MethodChannel('speech_recognition');

  static final SpeechRecognition _speech = new SpeechRecognition._internal();

  factory SpeechRecognition() => _speech;

  SpeechRecognition._internal() {
    _channel.setMethodCallHandler(_platformCallHandler);
  }

  AvailabilityHandler availabilityHandler;

  StringResultHandler currentLocaleHandler;
  StringResultHandler recognitionResultHandler;

  VoidCallback recognitionStartedHandler;

  StringResultHandler recognitionCompleteHandler;

  VoidCallback errorHandler;

  /// ask for speech  recognizer permission
  Future activate() => _channel.invokeMethod("speech.activate");

  /// start listening
  Future listen({String locale}) =>
      _channel.invokeMethod("speech.listen", locale);

  /// cancel speech
  Future cancel() => _channel.invokeMethod("speech.cancel");

  /// stop listening
  Future stop() => _channel.invokeMethod("speech.stop");

  Future _platformCallHandler(MethodCall call) async {
    print("_platformCallHandler call ${call.method} ${call.arguments}");
    switch (call.method) {
      case "speech.onSpeechAvailability":
        availabilityHandler(call.arguments);
        break;
      case "speech.onCurrentLocale":
        currentLocaleHandler(call.arguments);
        break;
      case "speech.onSpeech":
        recognitionResultHandler(call.arguments);
        break;
      case "speech.onRecognitionStarted":
        recognitionStartedHandler();
        break;
      case "speech.onRecognitionComplete":
        recognitionCompleteHandler(call.arguments);
        break;
      case "speech.onError":
        errorHandler();
        break;
      default:
        print('Unknowm method ${call.method} ');
    }
  }

  // define a method to handle availability / permission result
  void setAvailabilityHandler(AvailabilityHandler handler) =>
      availabilityHandler = handler;

  // define a method to handle recognition result
  void setRecognitionResultHandler(StringResultHandler handler) =>
      recognitionResultHandler = handler;

  // define a method to handle native call
  void setRecognitionStartedHandler(VoidCallback handler) =>
      recognitionStartedHandler = handler;

  // define a method to handle native call
  void setRecognitionCompleteHandler(StringResultHandler handler) =>
      recognitionCompleteHandler = handler;

  void setCurrentLocaleHandler(StringResultHandler handler) =>
      currentLocaleHandler = handler;

  void setErrorHandler(VoidCallback handler) => errorHandler = handler;
}

Finally, this is the main.dart I used. It is the basic example given in this repo with a few slight changes:

import 'package:flutter/material.dart';
import 'package:speech_recognition/speech_recognition.dart';

void main() {
  runApp(new MyApp());
}

const languages = const [
  const Language('Francais', 'fr_FR'),
  const Language('English', 'en_US'),
  const Language('Pусский', 'ru_RU'),
  const Language('Italiano', 'it_IT'),
  const Language('Español', 'es_ES'),
];

class Language {
  final String name;
  final String code;

  const Language(this.name, this.code);
}

class MyApp extends StatefulWidget {
  @override
  _MyAppState createState() => new _MyAppState();
}

class _MyAppState extends State<MyApp> {
  SpeechRecognition _speech;

  bool _speechRecognitionAvailable = false;
  bool _isListening = false;

  String transcription = '';

  //String _currentLocale = 'en_US';
  Language selectedLang = languages.first;

  @override
  initState() {
    super.initState();
    activateSpeechRecognizer();
  }

  // Platform messages are asynchronous, so we initialize in an async method.
  void activateSpeechRecognizer() {
    print('_MyAppState.activateSpeechRecognizer... ');
    _speech = new SpeechRecognition();
    _speech.setAvailabilityHandler(onSpeechAvailability);
    _speech.setCurrentLocaleHandler(onCurrentLocale);
    _speech.setRecognitionStartedHandler(onRecognitionStarted);
    _speech.setRecognitionResultHandler(onRecognitionResult);
    _speech.setRecognitionCompleteHandler((_) => onRecognitionComplete);
    _speech.setErrorHandler(errorHandler);
    _speech
        .activate()
        .then((res) => setState(() => _speechRecognitionAvailable = res));
  }

  @override
  Widget build(BuildContext context) {
    return new MaterialApp(
      home: new Scaffold(
        appBar: new AppBar(
          title: new Text('SpeechRecognition'),
          actions: [
            new PopupMenuButton<Language>(
              onSelected: _selectLangHandler,
              itemBuilder: (BuildContext context) => _buildLanguagesWidgets,
            )
          ],
        ),
        body: new Padding(
            padding: new EdgeInsets.all(8.0),
            child: new Center(
              child: new Column(
                mainAxisSize: MainAxisSize.min,
                crossAxisAlignment: CrossAxisAlignment.stretch,
                children: [
                  new Expanded(
                      child: new Container(
                          padding: const EdgeInsets.all(8.0),
                          color: Colors.grey.shade200,
                          child: new Text(transcription))),
                  _buildButton(
                    onPressed: _speechRecognitionAvailable && !_isListening
                        ? () => start()
                        : null,
                    label: _isListening
                        ? 'Listening...'
                        : 'Listen (${selectedLang.code})',
                  ),
                  _buildButton(
                    onPressed: _isListening ? () => cancel() : null,
                    label: 'Cancel',
                  ),
                  _buildButton(
                    onPressed: _isListening ? () => stop() : null,
                    label: 'Stop',
                  ),
                ],
              ),
            )),
      ),
    );
  }

  List<CheckedPopupMenuItem<Language>> get _buildLanguagesWidgets => languages
      .map((l) => new CheckedPopupMenuItem<Language>(
            value: l,
            checked: selectedLang == l,
            child: new Text(l.name),
          ))
      .toList();

  void _selectLangHandler(Language lang) {
    setState(() => selectedLang = lang);
  }

  Widget _buildButton({String label, VoidCallback onPressed}) => new Padding(
      padding: new EdgeInsets.all(12.0),
      child: new RaisedButton(
        color: Colors.cyan.shade600,
        onPressed: onPressed,
        child: new Text(
          label,
          style: const TextStyle(color: Colors.white),
        ),
      ));

  void start() => _speech
      .listen(locale: selectedLang.code)
      .then((result) => print('_MyAppState.start => result $result'));

  void cancel() =>
      _speech.cancel().then((result) => setState(() => _isListening = result));

  void stop() => _speech.stop().then((result) {
        setState(() => _isListening = result);
      });

  void onSpeechAvailability(bool result) =>
      setState(() => _speechRecognitionAvailable = result);

  void onCurrentLocale(String locale) {
    print('_MyAppState.onCurrentLocale... $locale');
    setState(
        () => selectedLang = languages.firstWhere((l) => l.code == locale));
  }

  void onRecognitionStarted() => setState(() => _isListening = true);

  void onRecognitionResult(String text) => setState(() => transcription = text);

  void onRecognitionComplete() => setState(() => _isListening = false);

  void errorHandler() => activateSpeechRecognizer();
}

Let me know if it works for you! I made no changes to the Build Phases in XCode as that broke the app further, so make sure it's all the default settings, hence I used a fresh project to reproduce this.