klren0312 / daliy_knowledge

知识积累,正确使用方式是watch
21 stars 4 forks source link

canvas音频可视化 #590

Open klren0312 opened 2 years ago

klren0312 commented 2 years ago

动画

<!DOCTYPE html>
<html lang="en">
<head>
  <meta charset="UTF-8">
  <meta http-equiv="X-UA-Compatible" content="IE=edge">
  <meta name="viewport" content="width=device-width, initial-scale=1.0">
  <title>Document</title>
</head>
<body>
  <audio id="audio" src="./audio/audio.wav" style="display: none;"  controls></audio>
  <canvas id="canvas" width="600" height="300"></canvas>
  <button onclick="play()">test</button>
  <script>
    const audio = document.getElementById('audio')
    const canvas = document.getElementById('canvas')
    const ctx = canvas.getContext('2d')
    function play () {
      audio.play()
      let AudioContext = window.AudioContext || window.webkitAudioContext || window.mozAudioContext
      AudioContext = new AudioContext()
      const analyser = AudioContext.createAnalyser()
      analyser.smoothingTimeConstant = 0.85
      analyser.fftSize = 32
      const source = AudioContext.createMediaElementSource(audio)
      source.connect(analyser)
      analyser.connect(AudioContext.destination)
      const bufferLength = analyser.frequencyBinCount
      const dataArray = new Uint8Array(bufferLength)
      getData()
      function getData () {
        requestAnimationFrame(getData)
        analyser.getByteFrequencyData(dataArray)
        ctx.clearRect(0, 0, 600, 300)
            // bufferLength表示柱形图中矩形的个数
        for (var i = 0, x = 0; i < bufferLength; i++) {
          // 根据频率映射一个矩形高度
          barHeight = dataArray[i];
          barWidth = 2;
          // 根据每个矩形高度映射一个背景色
          var r = barHeight + 25 * (i / bufferLength);
          var g = 250 * (i / bufferLength);
          var b = 50;
          // 绘制一个矩形,并填充背景色
          ctx.fillStyle = "rgb(" + r + "," + g + "," + b + ")";
          ctx.fillRect(x, 300 - barHeight, barWidth, barHeight);
          x += barWidth + 1;
        }
      }
    }
  </script>
</body>
</html>
klren0312 commented 2 years ago
<!DOCTYPE html>
<html lang="en">

<head>
  <meta charset="UTF-8">
  <meta http-equiv="X-UA-Compatible" content="IE=edge">
  <meta name="viewport" content="width=device-width, initial-scale=1.0">
  <title>Document</title>
</head>

<body>
  <canvas id="canvas" width="600" height="300"></canvas>

  <script>
    const canvas = document.getElementById('canvas')
    const ctx = canvas.getContext('2d')
    /**
     * AudioRecorder类.
     * @constructor
     */
    function AudioRecorder() {
      //麦克风
      this.mDevice = null;
      //从麦克风获取的音频流
      this.mMediaStream = null;
      this.mAudioContext = null;
      this.mAudioFromMicrophone = null;
      this.mMediaRecorder = null;
      this.mStatus = "stop";
      this.mChunks = [];
      //回调函数
      this.onStopCallBack = null;

    }

    AudioRecorder.prototype = {

      /**
       * 获取录音机对象设备
       * @method getAudioRecorderDevice
       * @for AudioRecorder
       * @returns {Promise} 返回一个promise对象
       */
      getAudioRecorderDevice: function () {
        //仅用来进行录音
        var constraints = {
          audio: true
        };
        // 老的浏览器可能根本没有实现 mediaDevices,所以我们可以先设置一个空的对象
        if (navigator.mediaDevices === undefined) {
          navigator.mediaDevices = {};
        }
        // 一些浏览器部分支持 mediaDevices。我们不能直接给对象设置 getUserMedia 
        // 因为这样可能会覆盖已有的属性。这里我们只会在没有getUserMedia属性的时候添加它。
        if (navigator.mediaDevices.getUserMedia === undefined) {
          navigator.mediaDevices.getUserMedia = function (constraints) {

            // 首先,如果有getUserMedia的话,就获得它
            var getUserMedia = navigator.webkitGetUserMedia || navigator.mozGetUserMedia;

            // 一些浏览器根本没实现它 - 那么就返回一个error到promise的reject来保持一个统一的接口
            if (!getUserMedia) {
              return Promise.reject(new Error(
                'getUserMedia is not implemented in this browser'));
            }

            // 否则,为老的navigator.getUserMedia方法包裹一个Promise
            this.mDevice = new Promise(function (resolve, reject) {
              getUserMedia.call(navigator, constraints, resolve, reject);
            });
          }
        } else {
          this.mDevice = navigator.mediaDevices.getUserMedia(constraints);
        }

        if (this.mDevice != null) {
          this.mDevice.then((mediaStream) => {
            this.openDeviceSuccess.call(this, mediaStream)
          }, this.openDeviceFailure);
        }
      },

      addOnStopCallback: function (onStop) {
        this.onStopCallBack = onStop;
      },

      openDeviceSuccess: function (mediaStream) {
        console.log(mediaStream)
        this.mMediaStream = mediaStream;
        this.startRecord();
      },

      openDeviceFailure: (reason) => {
        let errorMessage;
        switch (reason.name) {
          // 用户拒绝
          case 'NotAllowedError':
          case 'PermissionDeniedError':
            errorMessage = '用户已禁止网页调用录音设备';
            break;
            // 没接入录音设备
          case 'NotFoundError':
          case 'DevicesNotFoundError':
            errorMessage = '录音设备未找到';
            break;
            // 其它错误
          case 'NotSupportedError':
            errorMessage = '不支持录音功能';
            break;
          default:
            errorMessage = '录音调用错误';
            window.console.log(error);
        }
        alert(errorMessage);
      },

      /**
       * 开始录音
       * @method startRecord
       * @for AudioRecorder
       * @return {Boolean}
       */
      startRecord: function () {
        let retValue = false;
        if (this.mStatus == "stop") {
          this.mChunks = [];
          if (this.mMediaRecorder == null) {
            const AudioContext = window.AudioContext || window.webkitAudioContext;
            this.mAudioContext = new AudioContext();

            //创建音频源
            this.mAudioFromMicrophone = this.mAudioContext.createMediaStreamSource(this.mMediaStream);
            //创建目的节点
            const destination = this.mAudioContext.createMediaStreamDestination();
            this.mMediaRecorder = new MediaRecorder(destination.stream);
            this.mAudioFromMicrophone.connect(destination);
            this.mMediaRecorder.ondataavailable = (audioData) => {
              this.onProcessData.call(this, audioData)
            };
            this.mMediaRecorder.onstop = (event) => {
              this.onStop.call(this, event)
            };
            this.analyser = this.mAudioContext.createAnalyser();
            this.analyser.smoothingTimeConstant = 0.8;
            this.analyser.fftSize = 32;
            this.mAudioFromMicrophone.connect(this.analyser);
            // this.analyser.connect(this.mAudioContext.destination);
            const bufferLength = this.analyser.frequencyBinCount;
            const dataArray = new Uint8Array(bufferLength);
            const getData = () => {
              requestAnimationFrame(getData)
              this.analyser.getByteFrequencyData(dataArray)
              ctx.clearRect(0, 0, 600, 300)
              console.log(dataArray.reduce((a, b) => a + b, 0) / dataArray.length)
              // bufferLength表示柱形图中矩形的个数
              for (var i = 0, x = 0; i < bufferLength; i++) {
                // 根据频率映射一个矩形高度
                barHeight = dataArray[i];
                barWidth = 20;
                // 根据每个矩形高度映射一个背景色
                var r = barHeight + 25 * (i / bufferLength);
                var g = 250 * (i / bufferLength);
                var b = 50;
                // 绘制一个矩形,并填充背景色
                ctx.fillStyle = "rgb(" + r + "," + g + "," + b + ")";
                ctx.fillRect(x, 300 - barHeight, barWidth, barHeight);
                x += barWidth + 1;
              }
            }

            getData();
          }

          this.mStatus = "record";
          retValue = true;

        }
        return retValue;
      },

      onProcessData: function (audioData) {
        this.mChunks.push(audioData.data);
      },

      onStop: function (event) {
        //var blob = new Blob(this.mChunks, { 'type' : 'audio/ogg; codecs=opus' });
        var blob = new Blob(this.mChunks, {
          'type': 'audio/mpeg'
        });
        var mp3URL = URL.createObjectURL(blob);
        if (this.onStopCallBack != null) {
          this.onStopCallBack(mp3URL);
        }

      },

      /**
       * 结束录音
       * @method stopRecord
       * @for AudioRecorder
       */
      stopRecord: function () {
        if (this.mStatus == "record") {
          this.mMediaRecorder.requestData();
          this.mMediaRecorder.stop();
          this.mStatus = "stop";
        }
      }
    }
  </script>
  <script>
    const audioRecorder = new AudioRecorder()
    audioRecorder.getAudioRecorderDevice()
  </script>
</body>

</html>