bytedeco / sample-projects

Sample project files for JavaCPP, JavaCPP Presets, and JavaCV
219 stars 174 forks source link

FFmpegFrameRecorder doesn't generate video file in android versions 10 or higher #64

Open anthonydmg opened 2 years ago

anthonydmg commented 2 years ago

Hello, I have been trying the example the example JavaCV-android-example but found that it doesn't work for Android version 10 or higher, the video file (.mp4) is not created when the recording is complete.

I'm kind of new to android development but I think the problem is occurring because FFmpegFrameRecorder needs to be passed an instance of FILE that is the .mp4 video file, however, in new versions of Android (10 or higher) the creation of files no longer works that way and the MediaStore API should be used.

private void initRecorder() {
        ffmpeg_link = new File(getBaseContext().getExternalFilesDir(null), "stream.mp4");
        Log.w(LOG_TAG, "init recorder");

        if(RECORD_LENGTH > 0) {
            imagesIndex = 0;
            images = new Frame[RECORD_LENGTH * frameRate];
            timestamps = new long[images.length];
            for(int i = 0; i < images.length; i++) {
                images[i] = new Frame(imageWidth, imageHeight, Frame.DEPTH_UBYTE, 2);
                timestamps[i] = -1;
            }
        } else if(yuvImage == null) {
            yuvImage = new Frame(imageWidth, imageHeight, Frame.DEPTH_UBYTE, 2);
            Log.i(LOG_TAG, "create yuvImage");
        }

        Log.i(LOG_TAG, "ffmpeg_url: " + ffmpeg_link.getAbsolutePath());
        recorder = new FFmpegFrameRecorder(ffmpeg_link, imageWidth, imageHeight, 1);
        recorder.setFormat("mp4");
        recorder.setSampleRate(sampleAudioRateInHz);
        // Set in the surface changed method
        recorder.setFrameRate(frameRate);

        Log.i(LOG_TAG, "recorder initialize success");

        audioRecordRunnable = new AudioRecordRunnable();
        audioThread = new Thread(audioRecordRunnable);
        runAudioThread = true;
        Log.i(LOG_TAG, "recorder initialize success");
    }

So far I haven't been able to figure out how to fix this problem. Does someone know how to solve this problem?

saudet commented 2 years ago

If you can't use the File API in your application, we can use OutputStream with FFmpegFrameRecorder.

anthonydmg commented 2 years ago

Sure, I have adopted the code to use OutputStream with FFmpegFrameRecorder like this.

    private void initRecorder() {
        Long ts = System.currentTimeMillis() / 1000;
        String videoFileName = "stream_"+ ts + ".mp4";
        ContentResolver resolver = getBaseContext().getContentResolver();

        if(Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q){
            try {
                valuesVideos = new ContentValues();
                valuesVideos.put(MediaStore.Video.Media.RELATIVE_PATH, "Movies");
                valuesVideos.put(MediaStore.Video.Media.TITLE, videoFileName);
                valuesVideos.put(MediaStore.Video.Media.DISPLAY_NAME, videoFileName);
                valuesVideos.put(MediaStore.Video.Media.MIME_TYPE, "video/mp4");
                valuesVideos.put(
                        MediaStore.Video.Media.DATE_ADDED,
                        System.currentTimeMillis() /100
                );
                Uri collection = MediaStore.Video.Media.getContentUri(MediaStore.VOLUME_EXTERNAL_PRIMARY);
                valuesVideos.put(MediaStore.Video.Media.DATE_TAKEN, System.currentTimeMillis());
                valuesVideos.put(MediaStore.Video.Media.IS_PENDING, 1);
                uriSavedVideo = resolver.insert(collection, valuesVideos);
                pfd = getBaseContext().getContentResolver().openFileDescriptor(uriSavedVideo, "w");

            } catch (FileNotFoundException e) {
                e.printStackTrace();
            }
            outVideo = new FileOutputStream(pfd.getFileDescriptor());
            recorder = new FFmpegFrameRecorder(outVideo, imageWidth, imageHeight, 1);
        } else {
            ffmpeg_link = new File(getBaseContext().getExternalFilesDir(null), "stream.mp4");
            Log.w(LOG_TAG, "init recorder");
            recorder = new FFmpegFrameRecorder(ffmpeg_link, imageWidth, imageHeight, 1);
            Log.i(LOG_TAG, "ffmpeg_url: " + ffmpeg_link.getAbsolutePath());
        }

        if(RECORD_LENGTH > 0) {
            imagesIndex = 0;
            images = new Frame[RECORD_LENGTH * frameRate];
            timestamps = new long[images.length];
            for(int i = 0; i < images.length; i++) {
                images[i] = new Frame(imageWidth, imageHeight, Frame.DEPTH_UBYTE, 2);
                timestamps[i] = -1;
            }
        } else if(yuvImage == null) {
            yuvImage = new Frame(imageWidth, imageHeight, Frame.DEPTH_UBYTE, 2);
            Log.i(LOG_TAG, "create yuvImage");
        }

        recorder.setFormat("mp4");
        recorder.setSampleRate(sampleAudioRateInHz);
        // Set in the surface changed method
        recorder.setFrameRate(frameRate);

        Log.i(LOG_TAG, "recorder initialize success");

        audioRecordRunnable = new AudioRecordRunnable();
        audioThread = new Thread(audioRecordRunnable);
        runAudioThread = true;
        Log.i(LOG_TAG, "recorder initialize success");
    }

Now when I start the recording I have the following error

avformat_write_header error() error -22: Could not write header to 'java.io.FileOutputStream@80a7537' (For more details, make sure FFmpegLogCallback.set() has been called.)

Am I missing something? that could be happening ?

saudet commented 2 years ago

The MP4 format doesn't support streams well. Try to use another format.

anthonydmg commented 2 years ago

Thanks, I solved it by changing it to mkv format but I still have a problem. When I open the generated video, the total duration of the video is greater than what is actually played. In other words, if I record a 6-second video in the player, I get a 10-second video but it only plays up to 6 seconds.

 private void initRecorder() {

        ContentResolver resolver = getBaseContext().getContentResolver();

        if(Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q){
            Long ts = System.currentTimeMillis() / 1000;
            String videoFileName = "stream_"+ ts + ".mkv";
            try {
                valuesVideos = new ContentValues();
                valuesVideos.put(MediaStore.Video.Media.RELATIVE_PATH, "Movies");
                valuesVideos.put(MediaStore.Video.Media.TITLE, videoFileName);
                valuesVideos.put(MediaStore.Video.Media.DISPLAY_NAME, videoFileName);
                valuesVideos.put(MediaStore.Video.Media.MIME_TYPE, "video/mkv");
                valuesVideos.put(
                        MediaStore.Video.Media.DATE_ADDED,
                        System.currentTimeMillis() /100
                );
                Uri collection = MediaStore.Video.Media.getContentUri(MediaStore.VOLUME_EXTERNAL_PRIMARY);
                valuesVideos.put(MediaStore.Video.Media.DATE_TAKEN, System.currentTimeMillis());
                valuesVideos.put(MediaStore.Video.Media.IS_PENDING, 1);
                uriSavedVideo = resolver.insert(collection, valuesVideos);
                pfd = getBaseContext().getContentResolver().openFileDescriptor(uriSavedVideo, "w");

            } catch (FileNotFoundException e) {
                e.printStackTrace();
            }
            outVideo = new FileOutputStream(pfd.getFileDescriptor());
            recorder = new FFmpegFrameRecorder(outVideo, imageWidth, imageHeight, 1);
            recorder.setFormat("matroska");
        } else {
            Long ts = System.currentTimeMillis() / 1000;
            String videoFileName = "stream_"+ ts + ".mp4";
            ffmpeg_link = new File(getBaseContext().getExternalFilesDir(null), videoFileName);
            Log.w(LOG_TAG, "init recorder");
            recorder = new FFmpegFrameRecorder(ffmpeg_link, imageWidth, imageHeight, 1);
            Log.i(LOG_TAG, "ffmpeg_url: " + ffmpeg_link.getAbsolutePath());
            recorder.setFormat("mp4");
        }

        if(RECORD_LENGTH > 0) {
            imagesIndex = 0;
            images = new Frame[RECORD_LENGTH * frameRate];
            timestamps = new long[images.length];
            for(int i = 0; i < images.length; i++) {
                images[i] = new Frame(imageWidth, imageHeight, Frame.DEPTH_UBYTE, 2);
                timestamps[i] = -1;
            }
        } else if(yuvImage == null) {
            yuvImage = new Frame(imageWidth, imageHeight, Frame.DEPTH_UBYTE, 2);
            Log.i(LOG_TAG, "create yuvImage");
        }

        recorder.setSampleRate(sampleAudioRateInHz);
        // Set in the surface changed method
        recorder.setFrameRate(frameRate);

        Log.i(LOG_TAG, "recorder initialize success");

        audioRecordRunnable = new AudioRecordRunnable();
        audioThread = new Thread(audioRecordRunnable);
        runAudioThread = true;
        Log.i(LOG_TAG, "recorder initialize success");
    }

I'm not sure if I'm missing something to configure.

saudet commented 2 years ago

It's possible that's a limitation of your video player. Do you get the same result with some other known good player like VLC?

anthonydmg commented 2 years ago

Not with VLC the time is correct, I guess it's because of the video format that my video player doesn't work well. Thanks for the help.

saudet commented 2 years ago

I see, I'm sure it's possible to hack something with FFmpeg by modifying FFmpegFrameRecorder itself to make it output what you need, but someone's going to need to spend some time experimenting with that...