Closed yesong closed 9 years ago
Could you provide the code? Thank you
package com.tutuim.mobile;
import static org.bytedeco.javacpp.opencv_core.IPL_DEPTH_8U;
import java.io.File;
import java.io.IOException;
import java.nio.Buffer;
import java.nio.ShortBuffer;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.bytedeco.javacpp.opencv_core.IplImage;
import org.bytedeco.javacv.FFmpegFrameRecorder;
import org.bytedeco.javacv.FrameRecorder;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.app.Dialog;
import android.content.ContentResolver;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.database.Cursor;
import android.graphics.Bitmap.Config;
import android.hardware.Camera;
import android.hardware.Camera.CameraInfo;
import android.hardware.Camera.Parameters;
import android.hardware.Camera.PreviewCallback;
import android.hardware.Camera.Size;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.Build;
import android.os.Bundle;
import android.os.Handler;
import android.os.Message;
import android.os.PowerManager;
import android.provider.MediaStore;
import android.provider.MediaStore.Video;
import android.util.DisplayMetrics;
import android.util.Log;
import android.view.KeyEvent;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.ImageView;
import android.widget.ProgressBar;
import android.widget.RelativeLayout;
import android.widget.TextView;
import com.nostra13.universalimageloader.core.DisplayImageOptions;
import com.nostra13.universalimageloader.core.ImageLoader;
import com.nostra13.universalimageloader.core.assist.ImageScaleType;
import com.nostra13.universalimageloader.core.display.RoundedBitmapDisplayer;
import com.tutuim.mobile.PhotoWallActivity.MakeVideoRunnable;
import com.tutuim.mobile.constant.Constant;
import com.tutuim.mobile.model.SavedFrames;
import com.tutuim.mobile.utils.RecoderUtil;
import com.tutuim.mobile.utils.RecorderParameters;
/**
* @author yesong
*
*/
public class FFmpegRecorderActivity extends Activity implements OnClickListener {
private final static String CLASS_LABEL = "FFmpegRecorderActivity";
private final static String LOG_TAG = CLASS_LABEL;
public static final int INITIATE_RECORD = 3;
public static final int MINIMUM_TIME = 4;
public static final int RECORDER_STATE_SUCCESS = 5;
public static final int RECORD_OK = 6;
public static final int RECODER_WIDTH = 480;
private PowerManager.WakeLock mWakeLock;
private String strVideoPath = null;
private File fileVideoPath = null;
private Uri uriVideoPath = null;
private boolean rec = false;
boolean recording = false;
boolean isRecordingStarted = false;
boolean isFlashOn = false;
TextView txtTimer, txtRecordingSize;
ImageView cancelBtn,nextBtn,switchCameraIcon = null;
TextView flashIcon;
private volatile FFmpegFrameRecorder videoRecorder;
private boolean isPreviewOn = false;
private int currentResolution = Constant.RESOLUTION_MEDIUM_VALUE;
//private Camera mCamera;
private int previewWidth = 480, screenWidth= 480;
private int previewHeight = 480, screenHeight = 800;
private int sampleRate = 44100;
private AudioRecord audioRecord;
private AudioRecordRunnable audioRecordRunnable;
private Thread audioThread;
volatile boolean runAudioThread = true;
private Camera cameraDevice;
private CameraView cameraView;
Parameters cameraParameters = null;
private IplImage yuvIplImage = null;
int defaultCameraId = -1, defaultScreenResolution = -1 , cameraSelection = 0;
private Dialog dialog = null;
RelativeLayout topLayout = null;
long firstTime = 0;
long startPauseTime = 0;
long totalPauseTime = 0;
long pausedTime = 0;
long stopPauseTime = 0;
long totalTime = 0;
private int frameRate = 15;
private int recordingTime = 8000;
private int recordingMinimumTime = 2000;
boolean recordFinish = false;
private Dialog creatingProgress;
private volatile long mAudioTimestamp = 0L;
private final int[] mVideoRecordLock = new int[0];
private final int[] mAudioRecordLock = new int[0];
private long mLastAudioTimestamp = 0L;
private volatile long mAudioTimeRecorded;
private long frameTime = 0L;
private SavedFrames lastSavedframe = new SavedFrames(null,0L);
private long mVideoTimestamp = 0L;
private boolean isRecordingSaved = false;
private boolean isFinalizing = false;
private ProgressBar progressView;
private boolean isBufferChange;
private boolean hasPressRecoderMenu;
private String imagePath = null;
private RecorderState currentRecorderState = RecorderState.PRESS;
private byte[] firstData = null;
private boolean isStopAudio = false;
private ImageView mOkVideoImageView;
private ImageView mDeleteVideoImageView;
private ImageView mSwitchCameraImageView;
private ImageView mChoosePic2VideoImageView;
private ImageView mRecoderMenuImageView;
private ImageView mStopRecoderImageView;
private boolean mProgressBarHasStart = false;
private boolean mhastopPreview = false;
private Handler mHandler;
private boolean isFirstStart = true;
private boolean hasInitCamera = false;
private boolean hasFlushFeature = false;
private String mFirstImgPath;//
private ImageLoader imageLoader;
DisplayImageOptions options = new DisplayImageOptions.Builder()
.imageScaleType(ImageScaleType.EXACTLY)
.bitmapConfig(Config.RGB_565).considerExifParams(true)
.cacheInMemory(false)
.displayer(new RoundedBitmapDisplayer(10)).build();
private void initHandler(){
mHandler = new Handler(){
@Override
public void handleMessage(Message msg) {
super.handleMessage(msg);
switch (msg.what) {
case INITIATE_RECORD:
if(!recording)
initiateRecording(true);
else{
stopPauseTime = System.currentTimeMillis();
totalPauseTime = stopPauseTime - startPauseTime - ((long) (1.0/(double)frameRate)*1000);
pausedTime += totalPauseTime;
}
rec = true;
progressBarStart();
break;
case MINIMUM_TIME:
rec = false;
startPauseTime = System.currentTimeMillis();
if(totalTime >= recordingMinimumTime){
currentRecorderState = RecorderState.SUCCESS;
}
break;
case RECORDER_STATE_SUCCESS:
currentRecorderState = RecorderState.SUCCESS;
mOkVideoImageView.setImageResource(R.drawable.save_video_img);
break;
case RECORD_OK:
saveRecording();
break;
default:
break;
}
}
};
}
protected void progressBarStart() {
if(!mProgressBarHasStart){
mProgressBarHasStart = true;
mHandler.postDelayed(new Runnable() {
@Override
public void run() {
if(recording){
long progress = totalTime*100/recordingTime;
progressView.setProgress((int)progress);
}
mHandler.postDelayed(this, 100);
}
}, 100);
}
}
static {
System.loadLibrary("checkneon");
}
public native static int checkNeonFromJNI();
private boolean initSuccess = false;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_recorder);
PowerManager pm = (PowerManager) getSystemService(Context.POWER_SERVICE);
mWakeLock = pm.newWakeLock(PowerManager.SCREEN_BRIGHT_WAKE_LOCK, CLASS_LABEL);
mWakeLock.acquire();
imageLoader = ImageLoader.getInstance();
DisplayMetrics displaymetrics = new DisplayMetrics();
getWindowManager().getDefaultDisplay().getMetrics(displaymetrics);
screenWidth = displaymetrics.widthPixels;
screenHeight = displaymetrics.heightPixels;
initHandler();
initLayout();
}
@Override
protected void onResume() {
super.onResume();
if(mhastopPreview){
initCameraLayout();
}
if (mWakeLock == null) {
PowerManager pm = (PowerManager) getSystemService(Context.POWER_SERVICE);
mWakeLock = pm.newWakeLock(PowerManager.SCREEN_BRIGHT_WAKE_LOCK, CLASS_LABEL);
mWakeLock.acquire();
}
}
@Override
protected void onPause() {
super.onPause();
if (cameraView != null) {
cameraView.stopPreview();
if(cameraDevice != null){
cameraDevice.setPreviewCallback(null);
cameraDevice.release();
}
cameraDevice = null;
//mCamera = null;
cameraView = null;
}
if(topLayout != null && topLayout.getChildCount() > 0){
topLayout.removeAllViews();
}
if (mWakeLock != null) {
mWakeLock.release();
mWakeLock = null;
}
mhastopPreview = true;
}
@Override
protected void onDestroy() {
super.onDestroy();
recording = false;
runAudioThread = false;
releaseResources();
if (cameraView != null) {
cameraView.stopPreview();
if(cameraDevice != null){
cameraDevice.setPreviewCallback(null);
cameraDevice.release();
}
cameraDevice = null;
}
firstData = null;
//mCamera = null;
cameraDevice = null;
cameraView = null;
if (mWakeLock != null) {
mWakeLock.release();
mWakeLock = null;
}
if(strVideoPath != null){
File file = new File(strVideoPath);
if(file.exists()){
file.delete();
}
}
}
private void initLayout()
{
progressView = (ProgressBar) findViewById(R.id.recorder_progress);
flashIcon = (TextView)findViewById(R.id.recoder_switch_flush_iv);
switchCameraIcon = (ImageView)findViewById(R.id.recoder_switch_carmera_iv);
mOkVideoImageView = (ImageView)findViewById(R.id.ok_video_iv);
mDeleteVideoImageView =(ImageView)findViewById(R.id.delete_video_iv);
mSwitchCameraImageView =(ImageView)findViewById(R.id.switch_camera_iv);
mChoosePic2VideoImageView=(ImageView)findViewById(R.id.choose_pic_2_video_iv);
mRecoderMenuImageView = (ImageView) findViewById(R.id.recoder_menu_iv);
mStopRecoderImageView = (ImageView) findViewById(R.id.stop_recoder_menu_iv);
mRecoderMenuImageView.setOnClickListener(this);
mChoosePic2VideoImageView.setOnClickListener(this);
mSwitchCameraImageView.setOnClickListener(this);
mDeleteVideoImageView.setOnClickListener(this);
mOkVideoImageView.setOnClickListener(this);
mStopRecoderImageView.setOnClickListener(this);
switchCameraIcon.setOnClickListener(FFmpegRecorderActivity.this);
mOkVideoImageView.setVisibility(View.GONE);
mDeleteVideoImageView.setVisibility(View.GONE);
this.findViewById(R.id.recoder_close_iv).setOnClickListener(this);
topLayout = (RelativeLayout) findViewById(R.id.recorder_surface_parent);
flashIcon.setOnClickListener(this);
if (getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA_FRONT)) {
switchCameraIcon.setVisibility(View.VISIBLE);
}
if (getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA_FLASH)) {
hasFlushFeature = true;
}
initCameraLayout();
skipVideoProgress();
getLatestImagePath();
showLastImage();
}
private void showLastImage() {
//mChoosePic2VideoImageView
if(mFirstImgPath != null){
imageLoader.displayImage("file://"+mFirstImgPath, mChoosePic2VideoImageView, options);
}
}
private void getLatestImagePath() {
Uri mImageUri = MediaStore.Images.Media.EXTERNAL_CONTENT_URI;
String key_MIME_TYPE = MediaStore.Images.Media.MIME_TYPE;
String key_DATA = MediaStore.Images.Media.DATA;
ContentResolver mContentResolver = getContentResolver();
Cursor cursor = mContentResolver.query(mImageUri,
new String[] { key_DATA }, key_MIME_TYPE + "=? or "
+ key_MIME_TYPE + "=? or " + key_MIME_TYPE + "=?",
new String[] { "image/jpg", "image/jpeg", "image/png" },
MediaStore.Images.Media.DATE_MODIFIED);
if (cursor != null) {
if (cursor.moveToLast()) {
while (true) {
mFirstImgPath = cursor.getString(0);
break;
}
}
cursor.close();
}
}
private void initCameraLayout() {
new AsyncTask<String, Integer, Boolean>(){
protected void onPreExecute() {
if(topLayout != null && topLayout.getChildCount() > 0 && !isFirstStart){
topLayout.removeAllViews();
}
isFirstStart = false;
};
@Override
protected Boolean doInBackground(String... params) {
boolean result = setCamera();
if(!initSuccess){
initVideoRecorder();
startRecording();
initSuccess = true;
}
return result;
}
@Override
protected void onPostExecute(Boolean result) {
if(!result || cameraDevice == null){
finish();
return;
}
cameraView = new CameraView(FFmpegRecorderActivity.this, cameraDevice);
handleSurfaceChanged();
RelativeLayout.LayoutParams layoutParam1 = new RelativeLayout.LayoutParams(screenWidth,
(int) (screenWidth*(previewWidth/(previewHeight*1f))));
layoutParam1.addRule(RelativeLayout.ALIGN_PARENT_TOP, RelativeLayout.TRUE);
topLayout.addView(cameraView, layoutParam1);
progressView.setVisibility(View.VISIBLE);
if(hasFlushFeature){
if(cameraSelection == CameraInfo.CAMERA_FACING_FRONT){
flashIcon.setVisibility(View.GONE);
} else {
flashIcon.setVisibility(View.VISIBLE);
flashIcon.setText(R.string.camera_open_tip);
}
}
hasInitCamera = true;
}
}.execute();
}
@SuppressLint("NewApi")
private boolean setCamera()
{
try
{
if(Build.VERSION.SDK_INT > Build.VERSION_CODES.FROYO)
{
int numberOfCameras = Camera.getNumberOfCameras();
CameraInfo cameraInfo = new CameraInfo();
for (int i = 0; i < numberOfCameras; i++) {
Camera.getCameraInfo(i, cameraInfo);
if (cameraInfo.facing == cameraSelection) {
defaultCameraId = i;
}
}
}
stopPreview();
if(cameraDevice != null){
cameraDevice.release();
cameraDevice = null;
}
if(defaultCameraId >= 0)
cameraDevice = Camera.open(defaultCameraId);
else
cameraDevice = Camera.open();
Log.i("yesongsong", "Camera.open.....................");
}
catch(Exception e)
{
return false;
}
return true;
}
private void initVideoRecorder() {
strVideoPath = RecoderUtil.createFinalPath(this);
RecorderParameters recorderParameters = RecoderUtil.getRecorderParameter(currentResolution);
sampleRate = recorderParameters.getAudioSamplingRate();
frameRate = recorderParameters.getVideoFrameRate();
frameTime = (1000000L / frameRate);
fileVideoPath = new File(strVideoPath);
Log.i("yesongsong", "strVideoPath = "+ strVideoPath);
videoRecorder = new FFmpegFrameRecorder(strVideoPath, RECODER_WIDTH, RECODER_WIDTH, 1);
videoRecorder.setFormat(recorderParameters.getVideoOutputFormat());
videoRecorder.setSampleRate(recorderParameters.getAudioSamplingRate());
videoRecorder.setFrameRate(recorderParameters.getVideoFrameRate());
videoRecorder.setVideoCodec(recorderParameters.getVideoCodec());
videoRecorder.setVideoQuality(recorderParameters.getVideoQuality());
videoRecorder.setAudioQuality(recorderParameters.getVideoQuality());
videoRecorder.setAudioCodec(recorderParameters.getAudioCodec());
videoRecorder.setVideoBitrate(recorderParameters.getVideoBitrate());
videoRecorder.setAudioBitrate(recorderParameters.getAudioBitrate());
audioRecordRunnable = new AudioRecordRunnable();
audioThread = new Thread(audioRecordRunnable);
}
public void startRecording() {
try {
runAudioThread = true;
isStopAudio = false;
videoRecorder.start();
audioThread.start();
} catch (FFmpegFrameRecorder.Exception e) {
e.printStackTrace();
}
}
class AudioRecordRunnable implements Runnable {
int bufferSize;
short[] audioData;
int bufferReadResult;
private final AudioRecord audioRecord;
public volatile boolean isInitialized;
private int mCount =0;
private AudioRecordRunnable()
{
bufferSize = AudioRecord.getMinBufferSize(sampleRate,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC,
sampleRate,
AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT,
bufferSize);
audioData = new short[bufferSize];
}
private void record(ShortBuffer shortBuffer)
{
try
{
synchronized (mAudioRecordLock)
{
if (videoRecorder != null && shortBuffer != null)
{
this.mCount += shortBuffer.limit();
//videoRecorder.record(0,new Buffer[] {shortBuffer});
videoRecorder.record(new Buffer[] {shortBuffer});
}
return;
}
}
catch (FrameRecorder.Exception localException){}
}
private void updateTimestamp()
{
if (videoRecorder != null)
{
int i = RecoderUtil.getTimeStampInNsFromSampleCounted(this.mCount);
if (mAudioTimestamp != i)
{
mAudioTimestamp = i;
mAudioTimeRecorded = System.nanoTime();
}
}
}
public void run()
{
android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
this.isInitialized = false;
if(audioRecord != null)
{
while (this.audioRecord.getState() == 0)
{
try
{
Thread.sleep(100L);
}
catch (InterruptedException localInterruptedException)
{
}
}
this.isInitialized = true;
this.audioRecord.startRecording();
while (((runAudioThread) || (mVideoTimestamp > mAudioTimestamp)) && (mAudioTimestamp < (1000 * recordingTime)))
{
if(isStopAudio){
break;
}
updateTimestamp();
bufferReadResult = this.audioRecord.read(audioData, 0, audioData.length);
if ((bufferReadResult > 0) && ((recording && rec) || (mVideoTimestamp > mAudioTimestamp))){
record(ShortBuffer.wrap(audioData, 0, bufferReadResult));
}
}
this.audioRecord.stop();
this.audioRecord.release();
}
}
}
private boolean isFirstFrame = true;
class CameraView extends SurfaceView implements SurfaceHolder.Callback, PreviewCallback {
private SurfaceHolder mHolder;
public CameraView(Context context, Camera camera) {
super(context);
//mCamera = camera;
cameraParameters = cameraDevice.getParameters();
mHolder = getHolder();
mHolder.addCallback(CameraView.this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
cameraDevice.setPreviewCallback(CameraView.this);
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
try {
stopPreview();
if(cameraDevice != null){
cameraDevice.setPreviewDisplay(holder);
}
} catch (IOException exception) {
cameraDevice.release();
cameraDevice = null;
}
}
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
if (isPreviewOn && cameraDevice != null){
cameraDevice.stopPreview();
}
handleSurfaceChanged();
startPreview();
if(cameraDevice!= null){
cameraDevice.autoFocus(null);
}
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
try {
mHolder.addCallback(null);
cameraDevice.setPreviewCallback(null);
//cameraDevice.stopPreview();
//cameraDevice.release();
//cameraDevice = null;
} catch (RuntimeException e) {
}
}
public void startPreview() {
if (!isPreviewOn && cameraDevice != null) {
isPreviewOn = true;
cameraDevice.startPreview();
}
}
public void stopPreview() {
if (isPreviewOn && cameraDevice != null) {
isPreviewOn = false;
//cameraDevice.setPreviewCallback(null);
cameraDevice.stopPreview();
}
}
private byte[] rotateYUV420Degree90(byte[] data, int imageWidth, int imageHeight)
{
byte [] yuv = new byte[imageWidth*imageHeight*3/2];
// Rotate the Y luma
int i = 0;
for(int x = 0;x < imageWidth;x++)
{
for(int y = imageHeight-1;y >= 0;y--)
{
yuv[i] = data[y*imageWidth+x];
i++;
}
}
// Rotate the U and V color components
i = imageWidth*imageHeight*3/2-1;
for(int x = imageWidth-1;x > 0;x=x-2)
{
for(int y = 0;y < imageHeight/2;y++)
{
yuv[i] = data[(imageWidth*imageHeight)+(y*imageWidth)+x];
i--;
yuv[i] = data[(imageWidth*imageHeight)+(y*imageWidth)+(x-1)];
i--;
}
}
return yuv;
}
private byte[] rotateYUV420Degree180(byte[] data, int imageWidth, int imageHeight)
{
byte [] yuv = new byte[imageWidth*imageHeight*3/2];
int i = 0;
int count = 0;
for (i = imageWidth * imageHeight - 1; i >= 0; i--) {
yuv[count] = data[i];
count++;
}
i = imageWidth * imageHeight * 3 / 2 - 1;
for (i = imageWidth * imageHeight * 3 / 2 - 1; i >= imageWidth
* imageHeight; i -= 2) {
yuv[count++] = data[i - 1];
yuv[count++] = data[i];
}
return yuv;
}
private byte[] rotateYUV420Degree270(byte[] data, int imageWidth, int imageHeight)
{
byte [] yuv = new byte[imageWidth*imageHeight*3/2];
int nWidth = 0, nHeight = 0;
int wh = 0;
int uvHeight = 0;
if(imageWidth != nWidth || imageHeight != nHeight)
{
nWidth = imageWidth;
nHeight = imageHeight;
wh = imageWidth * imageHeight;
uvHeight = imageHeight >> 1;//uvHeight = height / 2
}
//旋转Y
int k = 0;
for(int i = 0; i < imageWidth; i++) {
int nPos = 0;
for(int j = 0; j < imageHeight; j++) {
yuv[k] = data[nPos + i];
k++;
nPos += imageWidth;
}
}
for(int i = 0; i < imageWidth; i+=2){
int nPos = wh;
for(int j = 0; j < uvHeight; j++) {
yuv[k] = data[nPos + i];
yuv[k + 1] = data[nPos + i + 1];
k += 2;
nPos += imageWidth;
}
}
return rotateYUV420Degree180(yuv,imageWidth,imageHeight);
}
public byte[] cropYUV420(byte[] data,int imageW,int imageH,int newImageH){
int cropH;
int i,j,count,tmp;
byte[] yuv = new byte[imageW*newImageH*3/2];
cropH = (imageH - newImageH)/2;
count = 0;
for(j=cropH;j<cropH+newImageH;j++){
for(i=0;i<imageW;i++){
yuv[count++] = data[j*imageW+i];
}
}
//Cr Cb
tmp = imageH+cropH/2;
for(j=tmp;j<tmp + newImageH/2;j++){
for(i=0;i<imageW;i++){
yuv[count++] = data[j*imageW+i];
}
}
return yuv;
}
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
long frameTimeStamp = 0L;
if(mAudioTimestamp == 0L && firstTime > 0L)
frameTimeStamp = 1000L * (System.currentTimeMillis() -firstTime);
else if (mLastAudioTimestamp == mAudioTimestamp)
frameTimeStamp = mAudioTimestamp + frameTime;
else
{
long l2 = (System.nanoTime() - mAudioTimeRecorded) / 1000L;
frameTimeStamp = l2 + mAudioTimestamp;
mLastAudioTimestamp = mAudioTimestamp;
}
synchronized (mVideoRecordLock) {
if (recording && rec && lastSavedframe != null && lastSavedframe.getFrameBytesData() != null && yuvIplImage != null)
{
if(isFirstFrame){
isFirstFrame = false;
firstData = data;
}
totalTime = System.currentTimeMillis() - firstTime - pausedTime - ((long) (1.0/(double)frameRate)*1000);
if(totalTime >= recordingTime){
/*rec = false;*/
/*saveRecording();*/
mHandler.sendEmptyMessage(RECORD_OK);
}
if(totalTime >= recordingMinimumTime){
mHandler.sendEmptyMessage(RECORDER_STATE_SUCCESS);
}
mVideoTimestamp += frameTime;
if(lastSavedframe.getTimeStamp() > mVideoTimestamp)
mVideoTimestamp = lastSavedframe.getTimeStamp();
try {
if(videoRecorder != null){
yuvIplImage.getByteBuffer().put(lastSavedframe.getFrameBytesData());
videoRecorder.setTimestamp(lastSavedframe.getTimeStamp());
videoRecorder.record(yuvIplImage);
}
} catch (org.bytedeco.javacv.FrameRecorder.Exception e) {
Log.i("recorder", "录制错误"+e.getMessage());
e.printStackTrace();
}
}
byte[] tempData = rotateYUV420Degree90(data, previewWidth, previewHeight);
if(cameraSelection == 1)
tempData = rotateYUV420Degree270(data, previewWidth, previewHeight);
lastSavedframe = new SavedFrames(tempData,frameTimeStamp);
}
}
}
public void stopPreview() {
if (isPreviewOn && cameraDevice != null) {
isPreviewOn = false;
cameraDevice.stopPreview();
}
}
private void handleSurfaceChanged()
{
if(cameraDevice == null){
//showToast(this, "无法连接到相机");
finish();
return;
}
List<Camera.Size> resolutionList = RecoderUtil.getResolutionList(cameraDevice);
if(resolutionList != null && resolutionList.size() > 0){
Collections.sort(resolutionList, new RecoderUtil.ResolutionComparator());
Camera.Size previewSize = null;
if(defaultScreenResolution == -1){
boolean hasSize = false;
for(int i = 0;i<resolutionList.size();i++){
Size size = resolutionList.get(i);
if(size != null && size.width==640 && size.height==480){
previewSize = size;
hasSize = true;
break;
}
}
if(!hasSize){
int mediumResolution = resolutionList.size()/2;
if(mediumResolution >= resolutionList.size())
mediumResolution = resolutionList.size() - 1;
previewSize = resolutionList.get(mediumResolution);
}
}else{
if(defaultScreenResolution >= resolutionList.size())
defaultScreenResolution = resolutionList.size() - 1;
previewSize = resolutionList.get(defaultScreenResolution);
}
if(previewSize != null ){
previewWidth = previewSize.width;
previewHeight = previewSize.height;
cameraParameters.setPreviewSize(previewWidth, previewHeight);
if(videoRecorder != null)
{
videoRecorder.setImageWidth(previewWidth);
videoRecorder.setImageHeight(previewHeight);
}
}
}
cameraParameters.setPreviewFrameRate(frameRate);
yuvIplImage = IplImage.create(previewHeight, previewWidth,IPL_DEPTH_8U, 2);
if(Build.VERSION.SDK_INT > Build.VERSION_CODES.FROYO)
{
cameraDevice.setDisplayOrientation(RecoderUtil.determineDisplayOrientation(FFmpegRecorderActivity.this, defaultCameraId));
List<String> focusModes = cameraParameters.getSupportedFocusModes();
if(focusModes != null){
if (((Build.MODEL.startsWith("GT-I950"))
|| (Build.MODEL.endsWith("SCH-I959"))
|| (Build.MODEL.endsWith("MEIZU MX3")))&&focusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE)){
cameraParameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
}
else if(focusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)){
cameraParameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
else{
cameraParameters.setFocusMode(focusModes.get(0));
// cameraParameters.setFocusMode(Camera.Parameters.FOCUS_MODE_FIXED);
}
}
}
else{
cameraDevice.setDisplayOrientation(90);
}
cameraDevice.setParameters(cameraParameters);
}
@Override
public void onClick(View v) {
if (!hasInitCamera) {
return;
}
if (v.getId() == R.id.recoder_switch_flush_iv) {
if (!hasFlushFeature) {
return;
}
if (isFlashOn) {
isFlashOn = false;
cameraParameters.setFlashMode(Parameters.FLASH_MODE_OFF);
flashIcon.setText(R.string.camera_open_tip);
} else {
isFlashOn = true;
cameraParameters.setFlashMode(Parameters.FLASH_MODE_TORCH);
flashIcon.setText(R.string.camera_close_tip);
}
cameraDevice.setParameters(cameraParameters);
} else if (v.getId() == R.id.recoder_switch_carmera_iv) {
if (hasInitCamera) {
hasInitCamera = false;
cameraSelection = ((cameraSelection == CameraInfo.CAMERA_FACING_BACK) ? CameraInfo.CAMERA_FACING_FRONT
: CameraInfo.CAMERA_FACING_BACK);
initCameraLayout();
if (hasFlushFeature) {
if (cameraSelection == CameraInfo.CAMERA_FACING_FRONT){
flashIcon.setVisibility(View.GONE);
}
else{
flashIcon.setVisibility(View.VISIBLE);
}
}
}
} else if (v.getId() == R.id.recoder_close_iv) {
backAction();
} else if (v.getId() == R.id.recoder_menu_iv) {
hasPressRecoderMenu = true;
if (!recordFinish) {
if (totalTime < recordingTime) {
mHandler.removeMessages(INITIATE_RECORD);
mHandler.removeMessages(MINIMUM_TIME);
mHandler.sendEmptyMessageDelayed(INITIATE_RECORD, 300);
v.setVisibility(View.GONE);
mStopRecoderImageView.setVisibility(View.VISIBLE);
mChoosePic2VideoImageView.setVisibility(View.GONE);
mSwitchCameraImageView.setVisibility(View.GONE);
}
}
} else if (v.getId() == R.id.stop_recoder_menu_iv) {
stopAction();
} else if (v.getId() == R.id.choose_pic_2_video_iv) {// 选择照片合成视频
Intent intent = new Intent(this, ChooImg2VideoActivity.class);
startActivity(intent);
overridePendingTransition(R.anim.main_translatex100to0,
R.anim.main_translatex0tof100);
// finish();
} else if (v.getId() == R.id.switch_camera_iv) {
// Intent intent = new Intent(this, TakePhotoActivity.class);
// startActivity(intent);
// overridePendingTransition(R.anim.main_translatex100to0, R.anim.main_translatex0tof100);
backAction();
} else if (v.getId() == R.id.delete_video_iv) {
// stopAction();
deleteAction();
isStopAudio = true;
runAudioThread = false;
recording = false;
releaseResources();
if (fileVideoPath != null && fileVideoPath.exists()) {
fileVideoPath.delete();
}
delay2InitStatus();
} else if (v.getId() == R.id.ok_video_iv) {
if (totalTime >= recordingMinimumTime) {
// stopAction();
saveRecording();
}
}
}
private void deleteAction() {
mHandler.removeMessages(INITIATE_RECORD);
mHandler.removeMessages(MINIMUM_TIME);
if(rec)
mHandler.sendEmptyMessage(MINIMUM_TIME);
mStopRecoderImageView.setVisibility(View.GONE);
mRecoderMenuImageView.setVisibility(View.VISIBLE);
mDeleteVideoImageView.setVisibility(View.GONE);
mOkVideoImageView.setVisibility(View.GONE);
mSwitchCameraImageView.setVisibility(View.VISIBLE);
mChoosePic2VideoImageView.setVisibility(View.VISIBLE);
}
private void backAction() {
this.finish();
overridePendingTransition(R.anim.main_translatexf100to0,R.anim.main_translatex0to100);
}
@Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_BACK) {
backAction();
return true;
} else {
return super.onKeyDown(keyCode, event);
}
}
private void delay2InitStatus() {
mHandler.postDelayed(new Runnable() {
@Override
public void run() {
resetStatus();
}
}, 100);
}
private void resetStatus() {
mSwitchCameraImageView.setVisibility(View.VISIBLE);
mChoosePic2VideoImageView.setVisibility(View.VISIBLE);
mDeleteVideoImageView.setVisibility(View.GONE);
mOkVideoImageView.setVisibility(View.GONE);
recordFinish = false;
totalTime = 0;
progressView.setProgress(0);
hasPressRecoderMenu = false;
firstTime = 0;
startPauseTime = 0;
totalPauseTime = 0;
pausedTime = 0;
stopPauseTime = 0;
currentRecorderState = RecorderState.PRESS;
lastSavedframe = new SavedFrames(null,0L);
firstData = null;
mAudioTimestamp = 0L;
mVideoTimestamp = 0L;
mLastAudioTimestamp =0L;
mOkVideoImageView.setImageResource(R.drawable.ok_video_click);
initVideoRecorder();
startRecording();
isRecordingSaved = false;
isFinalizing = false;
isFirstFrame = true;
skipVideoProgress();
}
private void stopAction() {
mHandler.removeMessages(INITIATE_RECORD);
mHandler.removeMessages(MINIMUM_TIME);
if(rec)
mHandler.sendEmptyMessage(MINIMUM_TIME);
mStopRecoderImageView.setVisibility(View.GONE);
mRecoderMenuImageView.setVisibility(View.VISIBLE);
mDeleteVideoImageView.setVisibility(View.VISIBLE);
mOkVideoImageView.setVisibility(View.VISIBLE);
}
public void videoTheEnd()
{
releaseResources();
returnToCaller();
}
@Override
public void onBackPressed() {
super.onBackPressed();
if(fileVideoPath != null && fileVideoPath.exists()){
fileVideoPath.delete();
}
}
private void returnToCaller()
{
File file = new File(strVideoPath);
if(file.exists()){
file = null;
Intent intent = new Intent(FFmpegRecorderActivity.this, FFmpegPreviewActivity.class);
intent.putExtra("videoPath", strVideoPath);
intent.putExtra("videoTime", (int)totalTime);
intent.putExtra("id", FFmpegPreviewActivity.RECODER_VIDEO);
startActivity(intent);
overridePendingTransition(R.anim.main_translatex100to0,R.anim.main_translatex0tof100);
}
}
private void saveRecording()
{
stopAction();
rec = false;
recording = false;
runAudioThread = false;
isStopAudio = true;
mHandler.postDelayed(new Runnable() {
@Override
public void run() {
//registerVideo();
videoTheEnd();
}
}, 50);
mHandler.postDelayed(new Runnable() {
@Override
public void run() {
resetStatus();
}
}, 200);
}
private void releaseResources(){
try {
if(videoRecorder != null)
{
videoRecorder.stop();
videoRecorder.release();
videoRecorder = null;
}
} catch (org.bytedeco.javacv.FrameRecorder.Exception e) {
e.printStackTrace();
}
}
private void initiateRecording(boolean isActionDown)
{
isRecordingStarted = true;
firstTime = System.currentTimeMillis();
recording = true;
totalPauseTime = 0;
pausedTime = 0;
}
public void skipVideoProgress() {
progressView.setVisibility(View.VISIBLE);
if (!hasPressRecoderMenu) {
progressView.post(videoProgressRunnable);
}
}
final Runnable videoProgressRunnable = new Runnable() {
@Override
public void run() {
if (!hasPressRecoderMenu) {
progressView.postDelayed(this, 1000);
progressView.setProgress(isBufferChange ? 0 : 1);
isBufferChange = !isBufferChange;
}
}
};
public static enum RecorderState {
PRESS(1), LOOSEN(2), CHANGE(3), SUCCESS(4);
static RecorderState mapIntToValue(final int stateInt) {
for (RecorderState value : RecorderState.values()) {
if (stateInt == value.getIntValue()) {
return value;
}
}
return PRESS;
}
private int mIntValue;
RecorderState(int intValue) {
mIntValue = intValue;
}
int getIntValue() {
return mIntValue;
}
}
}
If I understand correctly, you are looking for a way to set the aspect ratio independently from the resolution? Is this correct?
@saudet You are right。I am searching a way to record square-like video from the raw datas with Android Camera. You may know that, MediaRecorder does not support set any video size as I want unless the hardware support that . So, it is so difficult to record high-definition and squre-like video from Android camera just like vine does。
Ok, so we could add some new property like getAspectRatio()/setAspectRatio(double)
. Let me know if you have some time to implement that. Thanks!
Let me know if you have any problems with the new aspectRatio
property! Thanks
This property is not available in the 0.11 repo
Is there any source code providing an example of how to record a square video in Android using FFmpeg?
The new aspect ratio functionality is now available in version 1.0. Enjoy!
I have removed recordingTime variable in FFmpegRecorderActivity to record as long as I want.
Now I can see the only frames recorded in 35 mins,but I can't see frames recorded in 35 mins later. I think that FFMpegRecorder can't record over 35 mins, Is this right?
I can see these logs in 35 mins later below:
09-02 00:09:17.940 30446-30446/com.opvw.videorecordapp W/System.err﹕ com.googlecode.javacv.FrameRecorder$Exception: avcodec_encode_video2() error -1: Could not encode video packet.
09-02 00:09:17.940 30446-30446/com.opvw.videorecordapp W/System.err﹕ at com.javacv.recorder.FFmpegFrameRecorder.record(FFmpegFrameRecorder.java:814)
09-02 00:09:17.940 30446-30446/com.opvw.videorecordapp W/System.err﹕ at com.javacv.recorder.FFmpegFrameRecorder.record(FFmpegFrameRecorder.java:741)
09-02 00:09:17.940 30446-30446/com.opvw.videorecordapp W/System.err﹕ at com.opvw.videorecordapp.VideoRecordActivity$CameraView.onPreviewFrame(VideoRecordActivity.java:775)
There's no such limit, no. It's probably a problem with the timestamps. So could you first make sure they make sense with the FPS you are using? Thanks
thanks for your reply. I will check soon and tell you.
Best regards.
I can't know about the correct setting for FPS to record over 35 mins. I show the code that I can think that it may be related with FPS below: please help me. But I can see the audio recording over 35 mins. thanks.
private int currentResolution = CONSTANTS.RESOLUTION_HIGH_VALUE; private int frameRate = 30;
private void initVideoRecorder() { strVideoPath = Util.createTempPath(tempFolderPath); RecorderParameters recorderParameters = Util.getRecorderParameter(currentResolution); fileVideoPath = new File(strVideoPath); videoRecorder = new FFmpegFrameRecorder(strVideoPath, previewWidth, previewHeight, 1); videoRecorder.setFormat(recorderParameters.getVideoOutputFormat()); videoRecorder.setSampleRate(recorderParameters.getAudioSamplingRate()); videoRecorder.setFrameRate(recorderParameters.getVideoFrameRate()); videoRecorder.setVideoCodec(recorderParameters.getVideoCodec()); videoRecorder.setVideoQuality(recorderParameters.getVideoQuality()); videoRecorder.setAudioQuality(recorderParameters.getVideoQuality()); videoRecorder.setAudioCodec(recorderParameters.getAudioCodec()); videoRecorder.setVideoBitrate(1000000); videoRecorder.setAudioBitrate(64000); }
private void initAudioRecorder() {
// Create a new unique path for video to be created
strAudioPath = Util.createTempPath(tempFolderPath);
RecorderParameters recorderParameters = Util.getRecorderParameter(currentResolution);
sampleRate = recorderParameters.getAudioSamplingRate();
frameRate = recorderParameters.getVideoFrameRate();
frameTime = (1000000L / frameRate);
fileAudioPath= new File(strAudioPath);
audioRecorder = new FFmpegFrameRecorder(strAudioPath, previewWidth, previewHeight, 1);
audioRecorder.setFormat(recorderParameters.getVideoOutputFormat());
audioRecorder.setSampleRate(recorderParameters.getAudioSamplingRate());
audioRecorder.setFrameRate(recorderParameters.getVideoFrameRate());
audioRecorder.setVideoCodec(recorderParameters.getVideoCodec());
audioRecorder.setVideoQuality(recorderParameters.getVideoQuality());
audioRecorder.setAudioQuality(recorderParameters.getVideoQuality());
audioRecorder.setAudioCodec(recorderParameters.getAudioCodec());
audioRecorder.setVideoBitrate(recorderParameters.getVideoBitrate());
audioRecorder.setAudioBitrate(recorderParameters.getAudioBitrate());
audioRecordRunnable = new AudioRecordRunnable();
audioThread = new Thread(audioRecordRunnable);
}
private void handleSurfaceChanged()
{
List
if(previewSize != null )
{
previewWidth = previewSize.width;
previewHeight = previewSize.height;
cameraParameters.setPreviewSize(previewWidth, previewHeight);
if(videoRecorder != null)
{
videoRecorder.setImageWidth(previewWidth);
videoRecorder.setImageHeight(previewHeight);
}
}
}
cameraParameters.setPreviewFpsRange(1000, frameRate*1000);
yuvIplImage = opencv_core.IplImage.create(previewWidth, previewHeight, IPL_DEPTH_8U, 4);
if(Build.VERSION.SDK_INT > Build.VERSION_CODES.FROYO)
{
mCamera.setDisplayOrientation(Util.determineDisplayOrientation(VideoRecordActivity.this, defaultCameraId));
List<String> focusModes = cameraParameters.getSupportedFocusModes();
if (focusModes != null && focusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO))
{
cameraParameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
}
else
mCamera.setDisplayOrientation(90);
mCamera.setParameters(cameraParameters);
}
sorry, I have just found the reason. It was reason for the timestamp. thanks.
the same code , I use com.googlecode.javacv.FFmpegFrameRecorder to record 480* 480 video not deformed, but using org.bytedeco.javacv.FFmpegFrameRecorder recording video will be deformed, video width is stretched,