Open OneDayNoMore opened 5 years ago
l want to set pixelFormat to UVCCamera.PIXEL_FORMAT_NV21
and use MediaCodec to transfer H264:
public AvcEncoder(int width, int height, int framerate, int bitrate, LocalSocket mSender, Context context) { this.context = context; try { out = mSender.getOutputStream(); } catch (IOException e) { e.printStackTrace(); } m_width = 1280; m_height = 720; m_framerate = 30; Log.i("mQuality", "2:" + framerate); MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", m_width, m_height); mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar);
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, 256000); mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 31); mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1); try { mediaCodec = MediaCodec.createEncoderByType("video/avc"); } catch (IOException e) { e.printStackTrace(); }
// //配置编码器参数 // mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); //启动编码器 mediaCodec.start(); //创建保存编码后数据的文件 createfile();
context.registerReceiver(BirateBroadcastReciver, new IntentFilter("com.ffcs.z.talklibrary.action.birate")); } private static String path = Environment.getExternalStorageDirectory().getAbsolutePath() + "/test1.h264"; private BufferedOutputStream outputStream; private void createfile() { File file = new File(path); if (file.exists()) { file.delete(); } try { outputStream = new BufferedOutputStream(new FileOutputStream(file)); } catch (Exception e) { e.printStackTrace(); } } private void StopEncoder() { try { mediaCodec.stop(); mediaCodec.release(); mediaCodec = null; } catch (Exception e) { e.printStackTrace(); } } public boolean isRuning = false; public void StopThread() { context.unregisterReceiver(BirateBroadcastReciver); isRuning = false; try { StopEncoder(); outputStream.flush(); outputStream.close(); } catch (IOException e) { e.printStackTrace(); } } public void StartEncoderThread() { Thread EncoderThread = new Thread(new Runnable() { @SuppressLint("WrongConstant") @Override public void run() { isRuning = true; byte[] input; long pts; long generateIndex = 0; while (isRuning) { //访问MainActivity用来缓冲待解码数据的队列 if (USBStream.YUVQueue.size() > 0) { //从缓冲队列中取出一帧 input = VideoStream.YUVQueue.poll(); byte[] yuv420sp = new byte[m_width * m_height * 3 / 2];
// //把待编码的视频帧转换为YUV420格式 NV21ToNV12(input, yuv420sp, m_width, m_height); input = yuv420sp;
if (input != null && mediaCodec != null) { try { //编码器输入缓冲区 ByteBuffer[] inputBuffers = mediaCodec.getInputBuffers(); //编码器输出缓冲区 ByteBuffer[] outputBuffers = mediaCodec.getOutputBuffers(); int inputBufferIndex = mediaCodec.dequeueInputBuffer(-1); if (inputBufferIndex >= 0) { pts = computePresentationTime(generateIndex); ByteBuffer inputBuffer = inputBuffers[inputBufferIndex]; inputBuffer.clear(); //把转换后的YUV420格式的视频帧放到编码器输入缓冲区中 inputBuffer.put(input); mediaCodec.queueInputBuffer(inputBufferIndex, 0, input.length, pts, 0); generateIndex += 1; } MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo(); int outputBufferIndex = -1; if (mediaCodec != null) outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, TIMEOUT_USEC); while (outputBufferIndex >= 0) { //Log.i("AvcEncoder", "Get H264 Buffer Success! flag = "+bufferInfo.flags+",pts = "+bufferInfo.presentationTimeUs+""); ByteBuffer outputBuffer = outputBuffers[outputBufferIndex]; byte[] outData = new byte[bufferInfo.size]; outputBuffer.get(outData);
// txtUtils.saveTxt("outData:" + outData.length);
if (bufferInfo.flags == BUFFER_FLAG_CODEC_CONFIG) { configbyte = new byte[bufferInfo.size]; configbyte = outData; } //else if (bufferInfo.flags == BUFFER_FLAG_KEY_FRAME) { else { int spsPos = 0; int ppsPos = 0; if (bufferInfo.flags == BUFFER_FLAG_KEY_FRAME) { int i;
// txtUtils.saveTxt("KeyFram");
for (i = 0; i < configbyte.length - 4; i++) { if (configbyte[i] == 0x00 && configbyte[i + 1] == 0x00 && configbyte[i + 2] == 0x00 && configbyte[i + 3] == 0x01) { int type = configbyte[i + 4] & 0x1f; if (type == 7) { spsPos = i + 4; continue; } else if (type == 8) { ppsPos = i + 4; byte[] sps = new byte[ppsPos - spsPos - 4]; System.arraycopy(configbyte, spsPos, sps, 0, ppsPos - spsPos - 4); byte[] pps = new byte[configbyte.length - ppsPos]; System.arraycopy(configbyte, ppsPos, pps, 0, configbyte.length - ppsPos);
// txtUtils.saveTxt("PPS:" + ByteUtils.bytesToHexString(pps) + "==SPS:" + ByteUtils.bytesToHexString(sps));
H264Packetizer.setStreamParameters(pps, sps); break; } } } if (i >= configbyte.length - 4) { continue; } } /* byte[] keyframe = new byte[bufferInfo.size + configbyte.length - keyPos]; System.arraycopy(configbyte, keyPos, keyframe, 0, configbyte.length - keyPos);*/ byte[] keyframe = new byte[outData.length]; // System.arraycopy(configbyte, 0, keyframe, 0, configbyte.length); //把编码后的视频帧从编码器输出缓冲区中拷贝出来 System.arraycopy(outData, 0, keyframe, 0, outData.length);
// txtUtils.saveTxt("keyframe:" + outData.length);
// outputStream.write(keyframe, 0, keyframe.length); int length = 0; int pos = 0; byte[] outByte; if (isFirst == true) { pos = 12; length = keyframe.length + pos; outByte = new byte[length]; isFirst = false; outByte[0] = 'm'; outByte[1] = 'd'; outByte[2] = 'a'; outByte[3] = 't'; } else { length = keyframe.length; outByte = new byte[length]; }
outByte[pos + 3] = (byte) ((outData.length - 4) & 0xff); outByte[pos + 2] = (byte) ((outData.length - 4) >> 8 & 0xff); outByte[pos + 1] = (byte) ((outData.length - 4) >> 16 & 0xff); outByte[pos + 0] = (byte) ((outData.length - 4) >> 24 & 0xff); System.arraycopy(keyframe, 4, outByte, pos + 4, keyframe.length - 4);
// LogUtils.i("H264:"+ ByteUtils.bytesToHexString(outByte)); if (out != null) {
// txtUtils.saveTxt("start:" + ByteUtils.bytesToHexString(outByte)); out.write(outByte, 0, outByte.length); } input = null; // if (os!=null) { // os.println(ByteUtils.bytesToHexString(Arrays.copyOfRange(keyframe, 0, keyframe.length))); // os.flush(); // } }
if (mediaCodec != null) { mediaCodec.releaseOutputBuffer(outputBufferIndex, false); outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, TIMEOUT_USEC); } } } catch (Throwable t) { } } } else { try { Thread.sleep(10); } catch (InterruptedException e) { e.printStackTrace(); } } } } }); EncoderThread.start(); } public void setBitrate(int bitrate) { Bundle bitrateBundle = new Bundle(); bitrateBundle.putInt(MediaCodec.PARAMETER_KEY_VIDEO_BITRATE, bitrate); mediaCodec.setParameters(bitrateBundle); } private void NV21ToNV12(byte[] nv21, byte[] nv12, int width, int height) { if (nv21 == null || nv12 == null) return; int framesize = width * height; int i = 0, j = 0; System.arraycopy(nv21, 0, nv12, 0, framesize); for (i = 0; i < framesize; i++) { nv12[i] = nv21[i]; } for (j = 0; j < framesize / 2; j += 2) { nv12[framesize + j - 1] = nv21[j + framesize]; } for (j = 0; j < framesize / 2; j += 2) { nv12[framesize + j] = nv21[j + framesize - 1]; } }
l want to set pixelFormat to UVCCamera.PIXEL_FORMAT_NV21
and use MediaCodec to transfer H264:
public AvcEncoder(int width, int height, int framerate, int bitrate, LocalSocket mSender, Context context) { this.context = context; try { out = mSender.getOutputStream(); } catch (IOException e) { e.printStackTrace(); } m_width = 1280; m_height = 720; m_framerate = 30; Log.i("mQuality", "2:" + framerate); MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", m_width, m_height); mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar);
// //配置编码器参数 // mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); //启动编码器 mediaCodec.start(); //创建保存编码后数据的文件 createfile();
// //把待编码的视频帧转换为YUV420格式 NV21ToNV12(input, yuv420sp, m_width, m_height); input = yuv420sp;
// txtUtils.saveTxt("outData:" + outData.length);
// txtUtils.saveTxt("KeyFram");
// txtUtils.saveTxt("PPS:" + ByteUtils.bytesToHexString(pps) + "==SPS:" + ByteUtils.bytesToHexString(sps));
// txtUtils.saveTxt("keyframe:" + outData.length);
// outputStream.write(keyframe, 0, keyframe.length); int length = 0; int pos = 0; byte[] outByte; if (isFirst == true) { pos = 12; length = keyframe.length + pos; outByte = new byte[length]; isFirst = false; outByte[0] = 'm'; outByte[1] = 'd'; outByte[2] = 'a'; outByte[3] = 't'; } else { length = keyframe.length; outByte = new byte[length]; }
// LogUtils.i("H264:"+ ByteUtils.bytesToHexString(outByte)); if (out != null) {
// txtUtils.saveTxt("start:" + ByteUtils.bytesToHexString(outByte)); out.write(outByte, 0, outByte.length); } input = null; // if (os!=null) { // os.println(ByteUtils.bytesToHexString(Arrays.copyOfRange(keyframe, 0, keyframe.length))); // os.flush(); // } }
This is error,Help me see what this is.