twilio / video-quickstart-ios

Twilio Video Quickstart for iOS
https://www.twilio.com/docs/api/video
MIT License
460 stars 178 forks source link

How to create a audio device to handle audio input when the connections drops #665

Open cyopaulo opened 11 months ago

cyopaulo commented 11 months ago

Description

I'm currently saving a local file from the data provide by Audio Sink and Video Render, when the connection drops I lose the audio, so I'm trying to create an Audio Device class to handle it. Can you provide an example on how to do that?

Steps to Reproduce

Drop the internet connection loss the audio

Code

Here is the code that I'm trying to use to capture the audio by my self.

class LocalRecordingAudioDevice: NSObject {
    private var engineAudioDeviceContext: AudioDeviceContext?
    let audioSession = AVAudioSession.sharedInstance()
    let captureSession = AVCaptureSession()

    override init() {
        super.init()
        setupAudioSession()
    }

    private func setupAudioSession() {
        do {
            try audioSession.setCategory(.playAndRecord, mode: .default)
            try audioSession.setActive(true)
            try audioSession.setInputGain(1)
        } catch {
            print("Error setting up audio session: \(error)")
        }
    }

    private func setupCaptureSession() {
        guard let microphone = AVCaptureDevice.default(for: .audio) else {
            print("No microphone available")
            return
        }

        do {
            let input = try AVCaptureDeviceInput(device: microphone)
            if captureSession.canAddInput(input) {
                captureSession.addInput(input)
            }

            let output = AVCaptureAudioDataOutput()
            if captureSession.canAddOutput(output) {
                captureSession.addOutput(output)
            }

            let queue = DispatchQueue(label: "com.localrecord.audioQueue")
            output.setSampleBufferDelegate(self, queue: queue)

            captureSession.startRunning()
        } catch {
            print("Error setting up capture session: \(error)")
        }
    }
}

extension LocalRecordingAudioDevice: AVCaptureAudioDataOutputSampleBufferDelegate {
    func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
        guard let audioDeviceContext = engineAudioDeviceContext else { return }
        if let blockBuffer = CMSampleBufferGetDataBuffer(sampleBuffer) {
            var lengthAtOffset = 0
            var totalLength = 0
            var dataPointer: UnsafeMutablePointer<Int8>?

            let status = CMBlockBufferGetDataPointer(blockBuffer, atOffset: 0, lengthAtOffsetOut: &lengthAtOffset, totalLengthOut: &totalLength, dataPointerOut: &dataPointer)

            if status == kCMBlockBufferNoErr {
                if let data = dataPointer {
                    let int8Buffer = UnsafeMutablePointer<Int8>.allocate(capacity: totalLength)
                    let sizeInBytes = MemoryLayout<Int8>.stride * totalLength

                    AudioDeviceWriteCaptureData(context: audioDeviceContext, data: data, sizeInBytes: 32)

                    int8Buffer.deallocate()
                }
            }
        }
    }
}

extension LocalRecordingAudioDevice : AudioDevice {
    func renderFormat() -> AudioFormat? {
        return AudioFormat(channels: 1, sampleRate: 44100, framesPerBuffer: 16)
    }

    func initializeRenderer() -> Bool {
        return false
    }

    func startRendering(context: AudioDeviceContext) -> Bool {
        return false
    }

    func stopRendering() -> Bool {
        return false
    }

    func captureFormat() -> AudioFormat? {
        return AudioFormat(channels: 1, sampleRate: 44100, framesPerBuffer: 16)
    }

    func initializeCapturer() -> Bool {
        return true
    }

    func startCapturing(context: AudioDeviceContext) -> Bool {
        engineAudioDeviceContext = context
        setupCaptureSession()
        return true
    }

    func stopCapturing() -> Bool {
        captureSession.stopRunning()
        engineAudioDeviceContext = nil
        return true
    }
}

Expected Behavior

When I lose the connection I need to keep capturing the audio

Actual Behavior

When I lose the connection my video turns muted

Reproduces How Often

All the time

Versions

5.7.0

Video iOS SDK

5.7.0

Xcode

15.0.1

iOS Version

17.0.1

iOS Device

iPhone 13 Pro