Open zhushizhan opened 2 weeks ago
Please refer to this sample code: https://github.com/bytedeco/javacv/blob/master/samples/JavaFxPlayVideoAndAudio.java
Please refer to this sample code: https://github.com/bytedeco/javacv/blob/master/samples/JavaFxPlayVideoAndAudio.java
Maybe what I said is not accurate, I am in the image after the synthesis of the video, the result is like this
If you're expecting FFmpeg to create image files, you might need to call recorder.close() before that happens.
public static boolean createMp4(String mp4SavePath, Map<Integer, File> imgMap, int width, int height) { // System.loadLibrary(Core.NATIVE_LIBRARY_NAME); try (FFmpegFrameRecorder recorder = new FFmpegFrameRecorder(mp4SavePath, width, height); Java2DFrameConverter converter = new Java2DFrameConverter()) { recorder.setVideoCodec(avcodec.AV_CODEC_ID_H264); // recorder.setVideoCodec(avcodec.AV_CODEC_ID_MPEG4); recorder.setFrameRate(25); recorder.setFrameNumber(250); recorder.setPixelFormat(avutil.AV_PIX_FMT_YUV420P); recorder.setAudioBitrate(900000000); recorder.setAudioQuality(0); recorder.setVideoOption("crf", "0"); // recorder.setVideoOption("r", "25"); recorder.setVideoOption("s", width + "x" + height); recorder.setVideoOption("b", "16000k"); recorder.setVideoOption("preset", "slow"); recorder.setVideoOption("tune", "zerolatency"); recorder.setVideoOption("qscale", "0.01"); recorder.setFormat("mp4"); recorder.start(); for (int i = 0; i < imgMap.size(); i++) { BufferedImage read = ImageIO.read(imgMap.get(i)); recorder.record(converter.getFrame(read)); } recorder.stop(); recorder.release(); return true; } catch (Exception e) { System.out.println("error:" + e.getMessage()); return false; }