tensorflow / tflite-support

TFLite Support is a toolkit that helps users to develop ML and deploy TFLite models onto mobile / ioT devices.
Apache License 2.0
373 stars 126 forks source link

error: An error occured while running audio classification: TFLAudioRecord hasn't started receiving samples from the audio input source. Please wait for the input. #928

Open viratQversity opened 1 year ago

viratQversity commented 1 year ago

I am getting error in swift error: An error occured while running audio classification: TFLAudioRecord hasn't started receiving samples from the audio input source. Please wait for the input.

in func audioClassificationHelper(_ helper: AudioClassificationHelper, didFail error: Error) I am getting this error when i am running a video using AVPlayer and tflite simultaneously.

I am using code from tensorflow swift example (audio Classification) https://github.com/tensorflow/examples/tree/master/lite/examples/audio_classification/ios

// // wakeWordHelper.swift // virdioMobileApp // // Created by Virat Naithani on 7/18/23. // Copyright © 2023 Virat Naithani. All rights reserved. //

import Foundation

protocol wakeWordHelperDelegate: class { func wakeWordDetected() }

class wakeWordHelper: NSObject{
//    private var modelType: ModelType = .speechCommandModel
    private var overLap = 0.9
    private var maxResults = 1
    private var threshold: Float = 0.9
    private var threadCount = 2

    private var audioClassificationHelper: AudioClassificationHelper?

    weak var delegate: wakeWordHelperDelegate?

    private func startClassifier() {
      // Stop the existing classifier if one is running.
        self.stopDetectingWakeWord()
      // Create a new classifier instance.
      audioClassificationHelper = AudioClassificationHelper(
        modelType: "model",
        threadCount: threadCount,
        scoreThreshold: threshold,
        maxResults: maxResults)

      // Start the new classification routine.
      audioClassificationHelper?.delegate = self
      audioClassificationHelper?.startClassifier(overlap: overLap)
    }

    func stopDetectingWakeWord(){
        audioClassificationHelper?.stopClassifier()
    }

    func startDetectingWakeWord(){
        self.startClassifier()
    }
}

extension wakeWordHelper: AudioClassificationHelperDelegate {
  func audioClassificationHelper(_ helper: AudioClassificationHelper, didSucceed result: Result) {
      for i in 0..<result.categories.count{
          print("result: \(result.categories[i].label), score: \(result.categories[i].score)")

          if(result.categories[i].label!.hasSuffix("Virdio")){
//              print("result: \(result.categories[i].label), score: \(result.categories[i].score)")
              if(result.categories[i].score > 0.95){
                  self.delegate?.wakeWordDetected()
              }
          }
      }
      DispatchQueue.main.async {

//          self.inferenceView.setInferenceTime(result.inferenceTime)

      }
  }

  func audioClassificationHelper(_ helper: AudioClassificationHelper, didFail error: Error) {
    let errorMessage =
      "An error occurred while running audio classification: \(error.localizedDescription)"
      print("error: \(errorMessage)")
//    let alert = UIAlertController(
//      title: "Error", message: errorMessage, preferredStyle: UIAlertController.Style.alert)
//    alert.addAction(UIAlertAction(title: "OK", style: UIAlertAction.Style.default, handler: nil))
//    present(alert, animated: true, completion: nil)
  }
}

Video Player

convenience init(frame: CGRect, urlString: String, id: Int?) {
        self.init(frame: frame)
        if id != nil {
            onDemandID = id!
        }
        do {
//           try AVAudioSession.sharedInstance().setCategory(.playAndRecord)
//            try AVAudioSession.sharedInstance().setCategory(.playAndRecord, options: .defaultToSpeaker)
//            try AVAudioSession.sharedInstance().setActive(true, options: .notifyOthersOnDeactivation)
            try AVAudioSession.sharedInstance().setCategory(.playAndRecord, options: .mixWithOthers)
                    try AVAudioSession.sharedInstance().setActive(true)
        } catch(let error) {
            print(error.localizedDescription)
        }

        print(urlString)
        let finalURLString = urlString.addingPercentEncoding(withAllowedCharacters: .urlQueryAllowed)
        let url = URL(string: finalURLString!)
        let asset = AVAsset(url: url!)
        let playerItem = AVPlayerItem(asset: asset)
        statusObserver = playerItem.addObserver(self, forKeyPath: #keyPath(AVPlayerItem.status), options: [.old, .new], context: nil)
        playerItem.preferredForwardBufferDuration = TimeInterval(2)
        player = AVPlayer(playerItem: playerItem)
        self.setUpVideoPlayer(item: nil)

    }

func setUpVideoPlayer(item: AVPlayerItem?){
        if player == nil {
            if item != nil {
                player = AVPlayer(playerItem: item)
            }else{
                player = AVPlayer()
            }
            player.automaticallyWaitsToMinimizeStalling = false

        }
        playerLayer = AVPlayerLayer(player: player)
        self.adjustVideoLayerFrame()
        // Removed mirroring: Mirror AVPlayer
//        playerLayer.transform = CATransform3DMakeScale(-1, 1, 1);

        playerLayer.videoGravity = .resizeAspectFill
        playerLayer.needsDisplayOnBoundsChange = true
        self.layer.addSublayer(playerLayer)
        NotificationCenter.default.removeObserver(self, name: NSNotification.Name.AVPlayerItemDidPlayToEndTime, object: nil)
        NotificationCenter.default.addObserver(self, selector: #selector(playerItemDidReachEnd(note:)), name: NSNotification.Name.AVPlayerItemDidPlayToEndTime, object: player.currentItem)
        player.play()
    }
lu-wang-g commented 1 year ago

@khanhlvg may you please help with this question?

khanhlvg commented 1 year ago

@priankakariatyml can you take a look?