Closed ghost closed 7 years ago
可以的,如果你可以直接使用Muxer来进行保存,也可以自己把视频流自己打包。 使用Muxer的话参照之前的方式就行,我顺手帮你加了一下以前的逻辑,你可以参考一下,代码结构自己重构一下吧。把ScreenRecorder.java替换即可:
package net.yrom.screenrecorder;
import android.hardware.display.DisplayManager;
import android.hardware.display.VirtualDisplay;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.media.MediaMuxer;
import android.media.projection.MediaProjection;
import android.os.Environment;
import android.util.Log;
import android.view.Surface;
import net.yrom.screenrecorder.core.Packager;
import net.yrom.screenrecorder.rtmp.RESFlvData;
import net.yrom.screenrecorder.rtmp.RESFlvDataCollecter;
import net.yrom.screenrecorder.tools.LogTools;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.concurrent.atomic.AtomicBoolean;
import static net.yrom.screenrecorder.rtmp.RESFlvData.FLV_RTMP_PACKET_TYPE_VIDEO;
/**
* @author Yrom
* Modified by raomengyang 2017-03-12
*/
public class ScreenRecorder extends Thread {
private static final String TAG = "ScreenRecorder";
private int mWidth;
private int mHeight;
private int mBitRate;
private int mDpi;
private String mDstPath;
private MediaProjection mMediaProjection;
// parameters for the encoder
private static final String MIME_TYPE = "video/avc"; // H.264 Advanced Video Coding
private static final int FRAME_RATE = 30; // 30 fps
private static final int IFRAME_INTERVAL = 10; // 10 seconds between I-frames
private static final int TIMEOUT_US = 10000;
private MediaCodec mEncoder;
private Surface mSurface;
private long startTime = 0;
private MediaMuxer mMuxer;
private boolean mMuxerStarted = false;
private int mVideoTrackIndex = -1;
private AtomicBoolean mQuit = new AtomicBoolean(false);
private MediaCodec.BufferInfo mBufferInfo = new MediaCodec.BufferInfo();
private MediaCodec.BufferInfo mLocalBufferInfo = new MediaCodec.BufferInfo();
private VirtualDisplay mVirtualDisplay;
private RESFlvDataCollecter mDataCollecter;
public ScreenRecorder(RESFlvDataCollecter dataCollecter, int width, int height, int bitrate, int dpi, MediaProjection mp) {
super(TAG);
mWidth = width;
mHeight = height;
mBitRate = bitrate;
mDpi = dpi;
mMediaProjection = mp;
startTime = 0;
mDataCollecter = dataCollecter;
File file = new File(Environment.getExternalStorageDirectory(),
"record-" + width + "x" + height + "-" + System.currentTimeMillis() + ".mp4");
mDstPath = file.getAbsolutePath();
}
/**
* stop task
*/
public final void quit() {
mQuit.set(true);
}
@Override
public void run() {
try {
try {
prepareEncoder();
mMuxer = new MediaMuxer(mDstPath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
} catch (IOException e) {
throw new RuntimeException(e);
}
mVirtualDisplay = mMediaProjection.createVirtualDisplay(TAG + "-display",
mWidth, mHeight, mDpi, DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC,
mSurface, null, null);
Log.d(TAG, "created virtual display: " + mVirtualDisplay);
recordVirtualDisplay();
} catch (Exception e) {
e.printStackTrace();
} finally {
release();
}
}
private void prepareEncoder() throws IOException {
MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight);
format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
format.setInteger(MediaFormat.KEY_BIT_RATE, mBitRate);
format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
Log.d(TAG, "created video format: " + format);
mEncoder = MediaCodec.createEncoderByType(MIME_TYPE);
mEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mSurface = mEncoder.createInputSurface();
Log.d(TAG, "created input surface: " + mSurface);
mEncoder.start();
}
private void recordVirtualDisplay() {
while (!mQuit.get()) {
int eobIndex = mEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_US);
switch (eobIndex) {
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
LogTools.d("VideoSenderThread,MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED");
break;
case MediaCodec.INFO_TRY_AGAIN_LATER:
// LogTools.d("VideoSenderThread,MediaCodec.INFO_TRY_AGAIN_LATER");
break;
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
LogTools.d("VideoSenderThread,MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:" +
mEncoder.getOutputFormat().toString());
sendAVCDecoderConfigurationRecord(0, mEncoder.getOutputFormat());
break;
default:
LogTools.d("VideoSenderThread,MediaCode,eobIndex=" + eobIndex);
if (startTime == 0) {
startTime = mBufferInfo.presentationTimeUs / 1000;
}
/**
* we send sps pps already in INFO_OUTPUT_FORMAT_CHANGED
* so we ignore MediaCodec.BUFFER_FLAG_CODEC_CONFIG
*/
if (mBufferInfo.flags != MediaCodec.BUFFER_FLAG_CODEC_CONFIG && mBufferInfo.size != 0) {
ByteBuffer realData = mEncoder.getOutputBuffers()[eobIndex];
realData.position(mBufferInfo.offset + 4);
realData.limit(mBufferInfo.offset + mBufferInfo.size);
ByteBuffer toLocalRealDataBB = realData.duplicate();
mMuxer.writeSampleData(mVideoTrackIndex, realData, mBufferInfo);
sendRealData((mBufferInfo.presentationTimeUs / 1000) - startTime, toLocalRealDataBB);
}
mEncoder.releaseOutputBuffer(eobIndex, false);
break;
}
}
}
private void release() {
if (mEncoder != null) {
mEncoder.stop();
mEncoder.release();
mEncoder = null;
}
if (mVirtualDisplay != null) {
mVirtualDisplay.release();
}
if (mMediaProjection != null) {
mMediaProjection.stop();
}
if (mMuxer != null) {
mMuxer.stop();
mMuxer.release();
mMuxer = null;
}
}
public final boolean getStatus() {
return !mQuit.get();
}
private void sendAVCDecoderConfigurationRecord(long tms, MediaFormat format) {
mVideoTrackIndex = mMuxer.addTrack(format);
Log.i(TAG, "started media muxer, videoIndex=" + mVideoTrackIndex);
mMuxer.start();
byte[] AVCDecoderConfigurationRecord = Packager.H264Packager.generateAVCDecoderConfigurationRecord(format);
int packetLen = Packager.FLVPackager.FLV_VIDEO_TAG_LENGTH +
AVCDecoderConfigurationRecord.length;
byte[] finalBuff = new byte[packetLen];
Packager.FLVPackager.fillFlvVideoTag(finalBuff,
0,
true,
true,
AVCDecoderConfigurationRecord.length);
System.arraycopy(AVCDecoderConfigurationRecord, 0,
finalBuff, Packager.FLVPackager.FLV_VIDEO_TAG_LENGTH, AVCDecoderConfigurationRecord.length);
RESFlvData resFlvData = new RESFlvData();
resFlvData.droppable = false;
resFlvData.byteBuffer = finalBuff;
resFlvData.size = finalBuff.length;
resFlvData.dts = (int) tms;
resFlvData.flvTagType = FLV_RTMP_PACKET_TYPE_VIDEO;
resFlvData.videoFrameType = RESFlvData.NALU_TYPE_IDR;
mDataCollecter.collect(resFlvData, FLV_RTMP_PACKET_TYPE_VIDEO);
}
private void sendRealData(long tms, ByteBuffer realData) {
int realDataLength = realData.remaining();
int packetLen = Packager.FLVPackager.FLV_VIDEO_TAG_LENGTH +
Packager.FLVPackager.NALU_HEADER_LENGTH +
realDataLength;
byte[] finalBuff = new byte[packetLen];
realData.get(finalBuff, Packager.FLVPackager.FLV_VIDEO_TAG_LENGTH +
Packager.FLVPackager.NALU_HEADER_LENGTH,
realDataLength);
int frameType = finalBuff[Packager.FLVPackager.FLV_VIDEO_TAG_LENGTH +
Packager.FLVPackager.NALU_HEADER_LENGTH] & 0x1F;
Packager.FLVPackager.fillFlvVideoTag(finalBuff,
0,
false,
frameType == 5,
realDataLength);
RESFlvData resFlvData = new RESFlvData();
resFlvData.droppable = true;
resFlvData.byteBuffer = finalBuff;
resFlvData.size = finalBuff.length;
resFlvData.dts = (int) tms;
resFlvData.flvTagType = FLV_RTMP_PACKET_TYPE_VIDEO;
resFlvData.videoFrameType = frameType;
mDataCollecter.collect(resFlvData, FLV_RTMP_PACKET_TYPE_VIDEO);
}
}
非常感谢
@eterrao 还有个问题想请教一下,我在搭建rtmp服务器的过程中,启动时总是报
nginx: [emerg] unknown directive "rtmp" in C:\Users\linktones\Desktop\RTMP-Server/conf/nginx.conf:9
的错误,下面是我的nginx.conf配置文件的内容:
worker_processes 1;
error_log logs/error.log debug;
events {
worker_connections 1024;
}
rtmp {
server {
listen 1935;
application hls {
live on;
hls on;
hls_path nginx-rtmp-module/tmp/app/;
hls_fragment 5s;
recorder rec {
record all manual;
record_suffix _rec.flv;
record_path nginx-rtmp-module/tmp/rec/;
record_unique on;
}
}
application vod2{
play nginx-rtmp-module/tmp/rec/;
}
}
}
http {
server {
listen 18080;
location /stat {
rtmp_stat all;
rtmp_stat_stylesheet stat.xsl;
}
location /stat.xsl {
root nginx-rtmp-module/;
}
location /control {
rtmp_control all;
}
location /hls/ {
#server hls fragments
types{
application/vnd.apple.mpegurl m3u8;
video/mp2t ts;
}
alias nginx-rtmp-module/tmp/app/;
expires -1;
}
location /vod/{
alias nginx-rtmp-module/tmp/rec/;
}
location / {
root nginx-rtmp-module/test/www/;
}
}
}
请看下是什么原因,谢谢
@yuqizi 你的windows版的 nginx?
@java-Xs 是的,具体您可以参考http://blog.csdn.net/zxw_tiantan/article/details/53906253,不过后来我这个需求就挂了,没往后研究。
请问在直播过程中,是否能将直播的内容同时保存成MP4文件,存储到手机上