llfbandit / record

Audio recorder from microphone to a given file path. No external dependencies, MediaRecorder is used for Android an AVAudioRecorder for iOS.
https://pub.dev/packages/record
235 stars 200 forks source link

No audio file generated by Record.stop() on iOS #170

Closed mregnauld closed 1 year ago

mregnauld commented 1 year ago

Package version 4.4.4

Describe the bug When the audio recorder has finished recording, I got a path returned by stop(). But when I try to access the file with that path, the file doesn't exist. Only occurs on iOS, no problem on Android. (That being said, it did work before, surprisingly.)

To Reproduce Steps to reproduce the behavior:

  1. In the _buildRecordStopControl() below, there is a button that allows to start and stop recording
  2. Tap it once to start, and a second time to stop.
  3. When _stop() is called, the path in final path = await _audioRecorder.stop(); doesn't exist

Expected behavior I should be able to access the generated audio file.

Smartphone (please complete the following information):

Additional context Here is the full code:

class AudioRecorder extends StatefulWidget
{
  final void Function(String path) onStop;

  const AudioRecorder({Key? key, required this.onStop}) : super(key: key);

  @override
  State<AudioRecorder> createState() => _AudioRecorderState();
}

class _AudioRecorderState extends State<AudioRecorder>
{
  int _recordDuration = 0;
  Timer? _timer;
  final _audioRecorder = Record();
  StreamSubscription<RecordState>? _recordSub;
  RecordState _recordState = RecordState.stop;
  StreamSubscription<Amplitude>? _amplitudeSub;
  Amplitude? _amplitude;

  @override
  void initState()
  {
    _recordSub = _audioRecorder.onStateChanged().listen((recordState) {
      setState(() => _recordState = recordState);
    });

    _amplitudeSub = _audioRecorder
        .onAmplitudeChanged(const Duration(milliseconds: 300))
        .listen((amp) => setState(() => _amplitude = amp));

    super.initState();
  }

  @override
  void dispose()
  {
    _timer?.cancel();
    _recordSub?.cancel();
    _amplitudeSub?.cancel();
    _audioRecorder.dispose();
    super.dispose();
  }

  Future<void> _start() async
  {
    try
    {
      if (await _audioRecorder.hasPermission())
      {
        // We don't do anything with this but printing
        final isSupported = await _audioRecorder.isEncoderSupported(
          AudioEncoder.aacLc,
        );
        if (kDebugMode) {
          print('${AudioEncoder.aacLc.name} supported: $isSupported');
        }

        await _audioRecorder.start(
          bitRate: 128000,
          samplingRate: 44100,
          numChannels: 1,
        );
        _recordDuration = 0;

        _startTimer();
      }
    }
    catch (e)
    {
      if (kDebugMode) print(e);
    }
  }

  Future<void> _stop() async
  {
    _timer?.cancel();
    _recordDuration = 0;

    final path = await _audioRecorder.stop();

    if (path != null) {
      widget.onStop(path);
    }
  }

  Future<void> _pause() async
  {
    _timer?.cancel();
    await _audioRecorder.pause();
  }

  Future<void> _resume() async
  {
    _startTimer();
    await _audioRecorder.resume();
  }

  Widget _buildRecordStopControl()
  {
    late Icon icon;
    late Color color;

    if (_recordState != RecordState.stop)
    {
      icon = const Icon(Icons.stop, color: Colors.red, size: 30);
      color = Colors.red.withOpacity(0.1);
    }
    else
    {
      final theme = Theme.of(context);
      icon = Icon(Icons.mic, color: theme.primaryColor, size: 30);
      color = theme.primaryColor.withOpacity(0.1);
    }

    return ClipOval(
      child: Material(
        color: color,
        child: InkWell(
          child: SizedBox(width: 56, height: 56, child: icon),
          onTap: () {
            (_recordState != RecordState.stop) ? _stop() : _start();
          },
        ),
      ),
    );
  }

  Widget _buildPauseResumeControl()
  {
    if (_recordState == RecordState.stop)
    {
      return const SizedBox.shrink();
    }

    late Icon icon;
    late Color color;

    if (_recordState == RecordState.record)
    {
      icon = const Icon(Icons.pause, color: Colors.red, size: 30);
      color = Colors.red.withOpacity(0.1);
    }
    else
    {
      final theme = Theme.of(context);
      icon = const Icon(Icons.play_arrow, color: Colors.red, size: 30);
      color = theme.primaryColor.withOpacity(0.1);
    }

    return ClipOval(
      child: Material(
        color: color,
        child: InkWell(
          child: SizedBox(width: 56, height: 56, child: icon),
          onTap: () {
            (_recordState == RecordState.pause) ? _resume() : _pause();
          },
        ),
      ),
    );
  }

  Widget _buildText()
  {
    if (_recordState != RecordState.stop) return _buildTimer();
    return const Text("Waiting to record");
  }

  Widget _buildTimer()
  {
    final String minutes = _formatNumber(_recordDuration ~/ 60);
    final String seconds = _formatNumber(_recordDuration % 60);

    return Text(
      '$minutes : $seconds',
      style: const TextStyle(color: Colors.red),
    );
  }

  String _formatNumber(int number)
  {
    String numberStr = number.toString();
    if (number < 10) {
      numberStr = '0$numberStr';
    }

    return numberStr;
  }

  void _startTimer()
  {
    _timer?.cancel();

    _timer = Timer.periodic(const Duration(seconds: 1), (Timer t) {
      setState(() => _recordDuration++);
    });
  }

  @override
  Widget build(BuildContext context)
  {
    return MaterialApp(
      home: Scaffold(
        body: Column(
          mainAxisAlignment: MainAxisAlignment.center,
          children: [
            Row(
              mainAxisAlignment: MainAxisAlignment.center,
              children: <Widget>[
                _buildRecordStopControl(),
                const SizedBox(width: 20),
                _buildPauseResumeControl(),
                const SizedBox(width: 20),
                _buildText(),
              ],
            ),
            if (_amplitude != null) ...[
              const SizedBox(height: 40),
              Text('Current: ${_amplitude?.current ?? 0.0}'),
              Text('Max: ${_amplitude?.max ?? 0.0}'),
            ],
          ],
        ),
      ),
    );
  }

}
mregnauld commented 1 year ago

After further investigation, it appears that if I do File(path).existsSync() (in the _stop() function, with the path returned by _audioRecorder.stop()), it always returns false (even though I can use it with audioplayers.

So I tried the following:

final directory = await getApplicationDocumentsDirectory();
final audioFile = File("${directory.path}/audio/test.m4a");
await _audioRecorder.start(
  path: audioFile.path,
  encoder: AudioEncoder.aacLc,
  bitRate: 128000,
  samplingRate: 44100,
  numChannels: 2,
);

but even here, File(path).existsSync() still returns false. And audioplayers doesn't work anymore.

So how can I access the generated audio file so I can copy and save it elsewhere?

Any help would be highly appreciated.

llfbandit commented 1 year ago

Use File.fromUri(Uri.parse(path)). This issue should be fixed in v5

mregnauld commented 1 year ago

Awesome! Thank you! Is there a documentation available for the current v5 beta?

mregnauld commented 1 year ago

Also FYI, I noticed that the following sampling rate only works for Android, not on iOS:

await _audioRecorder.start(
  bitRate: 128000,
  samplingRate: 22050, // doesn't work on iOS
  numChannels: 2,
);

And if I use 44100 for the sampling rate, it works but I always have an artefact in the beginning of the sound, like a drum kick.

llfbandit commented 1 year ago

This should be fixed also in next v5.

mregnauld commented 1 year ago

Great! Can't wait! Thank you!

MrCsabaToth commented 1 month ago

Is the 22kHz sampling rate 22000 or 22050?