chipweinberger / dart_melty_soundfont

A port of Melty Synth by Nobuaki Tanaka (C#) to Dart
Other
33 stars 8 forks source link

Is there any way to record the rendered waves to wav file #23

Open Snoobdog10x opened 3 months ago

Snoobdog10x commented 3 months ago

test_record.webm This is the sound, but too much noise in there

Snoobdog10x commented 3 months ago

here is my render code


import 'dart:isolate';

import 'package:dart_melty_soundfont/dart_melty_soundfont.dart';

import 'sound_render.dart';

class RenderRequest {
  final int midi;
  final bool isOff;

  RenderRequest({
    required this.midi,
    required this.isOff,
  });

  factory RenderRequest.fromJson(Map<String, dynamic> json) {
    return RenderRequest(
      midi: json['midi'] as int,
      isOff: json['isOff'] as bool,
    );
  }

  Map<String, dynamic> toJson() => <String, dynamic>{
        "midi": midi,
        "isOff": isOff,
      };
}

class IsolateSoundRender extends SoundRender {
  final StreamController<ArrayInt16> _renderStream =
      StreamController.broadcast();
  Isolate? _isolate;
  SendPort? _renderRequestSendPort;
  StreamSubscription? _isolateSubscription;

  IsolateSoundRender.fromBytes(super.byteData) : super.fromBytes();

  void requestRender() {
    _renderRequestSendPort?.send("please_render");
  }

  @override
  void renderMidi(
    int midi, {
    bool isOff = false,
  }) {
    _renderRequestSendPort?.send(
      RenderRequest(midi: midi, isOff: isOff).toJson(),
    );
  }

  Future<void> startIsolate() async {
    var receivePort = ReceivePort();

    _isolate = await Isolate.spawn(
      (sendPort) => _handleRenderRequest(synth, sendPort),
      receivePort.sendPort,
    );

    _isolateSubscription = receivePort.listen((message) {
      if (message is SendPort) {
        _renderRequestSendPort = message;
        return;
      }

      if (message is ArrayInt16) {
        _renderStream.add(message);
        return;
      }
    });
  }

  StreamSubscription subscribeSoundBuffer(
      void Function(ArrayInt16 buffer) soundHandler) {
    return _renderStream.stream.listen(soundHandler);
  }

  void dispose() {
    _isolate?.kill();
    _isolateSubscription?.cancel();
  }

  static void _handleRenderRequest(Synthesizer synthesizer, SendPort sendPort) {
    var updatePort = ReceivePort();
    sendPort.send(updatePort.sendPort);
    updatePort.listen((args) {
      if (args is Map<String, dynamic>) {
        var renderRequest = RenderRequest.fromJson(args);
        if (renderRequest.isOff) {
          synthesizer.noteOff(channel: 1, key: renderRequest.midi);
        } else {
          synthesizer.noteOn(
              channel: 1, key: renderRequest.midi, velocity: 127);
        }
      }

      var buffer = ArrayInt16.zeros(numShorts: SoundRender.renderNoteLength);
      synthesizer.renderMonoInt16(buffer);
      sendPort.send(buffer);
    });
  }
}```
Snoobdog10x commented 3 months ago

i'm listen to all rendered buffer stream and combine its with this code

import 'dart:async';
import 'dart:io';

import 'package:dart_melty_soundfont/dart_melty_soundfont.dart';
import 'package:flutter/foundation.dart';
import 'package:path/path.dart';
import 'package:wav/wav.dart';

import '../../../data/models/song.dart';
import '../sound_util/flutter_sound_helper.dart';
import 'sound_render.dart';

late final Directory documentDir;

String buildWavPath(String id) {
  return join(documentDir.path, "$id.wav");
}

void deleteCacheWave(String id) {
  var filePath = buildWavPath(id);
  var file = File(filePath);
  if (!file.existsSync()) {
    return;
  }

  file.deleteSync();
}

int mixTwoSample(int a, int b) {
  int m;
  a += 32768;
  b += 32768;

  if ((a < 32768) || (b < 32768)) {
    m = a * b ~/ 32768;
  } else {
    m = 2 * (a + b) - (a * b) ~/ 32768 - 65536;
  }

  if (m == 65536) m = 65535;
  m -= 32768;
  return m;
}

class WaveExporter {
  final Map<int, List<ArrayInt16>> exportBuffer = {};

  void clear() {
    exportBuffer.clear();
  }

  void addBuffer(int positionMillis, ArrayInt16 buffer) {
    exportBuffer[positionMillis] ??= [];
    exportBuffer[positionMillis]?.add(buffer);
  }

  Future<String?> combineBuffer(Song song) async {
    var totalMillis = song.durationMillis;
    if (exportBuffer.keys.isNotEmpty) {
      totalMillis = exportBuffer.keys.last + 1000;
    }
    String recordId = song.id;
    String? filePath = buildWavPath(recordId);
    var newFile = await compute((args) async {
      try {
        var sampleRate = SoundRender.sampleRate;
        var totalSampleRate = sampleRate * (totalMillis / 1000);
        var sampleRatePerMillis = (sampleRate / 1000).round();
        var buffer = ArrayInt16.zeros(numShorts: totalSampleRate.round());
        exportBuffer.forEach(
          (position, buffers) {
            for (var threadBuffer in buffers) {
              var offset = (position * sampleRatePerMillis).round();
              var dataLength = threadBuffer.bytes.lengthInBytes ~/ 2;
              for (var i = 0; i < dataLength; i++) {
                var bufferIndex = offset + i;
                int a = buffer[bufferIndex];
                int b = threadBuffer[i];
                buffer[bufferIndex] = mixTwoSample(a, b);
              }
            }
          },
        );
        var waveBuffer = await pcmToWaveBuffer(
          inputBuffer: buffer.bytes.buffer.asUint8List(),
          sampleRate: sampleRate,
        );
        var wav = Wav.read(waveBuffer);
        await wav.writeFile(filePath);
        return filePath;
      } catch (e) {
        return null;
      }
    }, "");
    clear();
    return newFile;
  }
}
chipweinberger commented 1 month ago

looks reasonable to me