samhirtarif / react-audio-visualize

An audio visualizer for React. Provides separate components to visualize both live audio and audio blobs.
https://www.npmjs.com/package/react-audio-visualize
MIT License
87 stars 17 forks source link

"AudioVisualizer" component doesn't work. #16

Closed xNevrroXx closed 1 year ago

xNevrroXx commented 1 year ago

Problem: Hello, I use the custom audio recorder component(the code will be given below) and the react-audio-visualizer package. During the recording of the user's voice - everything is ok, the "LiveAudioVisualizer" component works well. But when I try to render a finished audio recording, the "AudioVisualizer" does not visualize anything, except the background color. In other words, the canvas with styles is rendered normally, but sound waves are not.

Project setup: "react": "^18.2.0", "react-audio-visualize": "^1.1.3";

What I tried: I tried to create a simple project and visualize a song in "mp3" format. And it didn't work.

Code samples: Custom useAudioRecorder hook:

import {useRef, useState} from "react";

export type IUseAudioRecorderReturnType = ReturnType<typeof useAudioRecorder>;

const mimeType = "audio/webm" as const;

const useAudioRecorder = () => {
    const mediaRecorder = useRef<MediaRecorder | null>(null);
    const [permission, setPermission] = useState<boolean>(false);
    const [isRecording, setIsRecording] = useState<boolean>(false);
    const [stream, setStream] = useState<MediaStream | null>(null);
    const [audio, setAudio] = useState<Blob | null>(null);
    const [audioChunks, setAudioChunks] = useState<Blob[]>([]);

    const getMicrophonePermission = async () => {
        console.log("get permission");
        if ("MediaRecorder" in window) {
            try {
                const streamData = await navigator.mediaDevices.getUserMedia({
                    audio: true,
                    video: false,
                });
                setPermission(true);
                setStream(streamData);
            } catch (err) {
                if (err instanceof Error) {
                    alert(err.message);
                    return;
                }
                console.warn(err);
            }
        } else {
            alert("The MediaRecorder API is not supported in your browser.");
        }
    };

    const startRecording = () => {
        console.log("start recording");
        if (!stream) {
            return;
        }

        setIsRecording(true);
        //create new Media recorder instance using the stream
        const media = new MediaRecorder(stream, { mimeType });
        //set the MediaRecorder instance to the mediaRecorder ref
        mediaRecorder.current = media;
        //invokes the start method to start the recording process
        mediaRecorder.current.start();
        const localAudioChunks: Blob[] = [];

        mediaRecorder.current.ondataavailable = (event) => {
            console.log("blobEvent: ", event);
            if (typeof event.data === "undefined") return;
            if (event.data.size === 0) return;
            localAudioChunks.push(event.data);
        };
        setAudioChunks(localAudioChunks);
    };

    const stopRecording = () => {
        console.log("stop recording");
        if (!mediaRecorder.current) {
            return;
        }
        setIsRecording(false);
        //stops the recording instance
        mediaRecorder.current.onstop = () => {
            //creates a blob file from the audiochunks data
            const audioBlob = new Blob(audioChunks, { type: mimeType });
            //creates a playable URL from the blob file.
            setAudio(audioBlob);
            setAudioChunks([]);
        };
        mediaRecorder.current.stop();
    };

    return {
        mediaRecorder,
        permission,
        isRecording,
        audio,
        getMicrophonePermission,
        startRecording,
        stopRecording
    };
};

export {useAudioRecorder};

The App component with the main logic(voice recording and rendering a finished audio recording):

import {AudioVisualizer, LiveAudioVisualizer} from "react-audio-visualize";
import {useAudioRecorder} from "./component/useAudioRecorder.hook.ts";

function App() {
    const {
        mediaRecorder,
        permission,
        isRecording,
        audio,
        getMicrophonePermission,
        startRecording,
        stopRecording
    } = useAudioRecorder();

    console.log("audio: ", audio);
    return (
        <div>
            { !permission && <button onClick={getMicrophonePermission}>Get permission</button> }
            { permission && !isRecording && <button onClick={startRecording}>Start</button> }
            { isRecording && <button onClick={stopRecording}>Stop</button> }

            {
                isRecording &&
                <LiveAudioVisualizer
                    mediaRecorder={mediaRecorder.current}
                    width="300px"
                    height="75px"
                />
            }

            {
                audio &&
                <AudioVisualizer
                    blob={audio}
                    width={"610px"}
                    height={"50px"}
                    barWidth={1}
                    gap={2}
                    barColor={"#f76565"}
                    backgroundColor={"black"}
                />
            }

        </div>
    )
}

export default App;

I would really appreciate your help

xNevrroXx commented 1 year ago

So, seems like my IDE glitched. Anyway, thanks, if someone figured out this problem.