Open enamor753 opened 8 years ago
I have the same question, could anyone help? Thanks!
Hi qingziang-jia, enamor753
Maybe you can see the link https://github.com/Truiton/ScreenCapture/blob/master/app/src/main/java/com/truiton/screencapture/MainActivity.java
this is the sample of MediaProjection + MediaRecorder.
and you can replace it to https://github.com/fyhertz/libstreaming/blob/master/src/net/majorkernelpanic/streaming/video/H264Stream.java line 198, line 199
line 198 => mMediaRecorder.setCamera(mCamera); line 199 => mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);
You need to modify MediaRecorder using mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE); and feed surface to mMediaProjection.createVirtualDisplay("MainActivity", DISPLAY_WIDTH, DISPLAY_HEIGHT, mScreenDensity, DisplayManager.VIRTUAL_DISPLAY_FLAG_AUTO_MIRROR, mMediaRecorder.getSurface(), null /Callbacks/, null) just link the sample. line 145~151 of https://github.com/Truiton/ScreenCapture/blob/master/app/src/main/java/com/truiton/screencapture/MainActivity.java
Hi hank5000,
Thanks for providing the code sample. Let me try it and see if it works.
Hi. Do you have success combinging mediaprojection with libstreaming?
I have the same question, could anyone help? Thanks!
You can use a this class instead H264Stream
package net.majorkernelpanic.streaming.video;
import android.annotation.SuppressLint; import android.content.Context; import android.content.SharedPreferences; import android.graphics.Rect; import android.hardware.Camera; import android.hardware.display.DisplayManager; import android.hardware.display.VirtualDisplay; import android.media.MediaCodec; import android.media.MediaCodecInfo; import android.media.MediaFormat; import android.media.MediaRecorder; import android.media.projection.MediaProjection; import android.os.Environment; import android.os.IBinder; import android.os.ParcelFileDescriptor; import android.util.Base64; import android.util.DisplayMetrics; import android.util.Log; import android.util.SparseIntArray; import android.view.Surface; import android.view.SurfaceHolder; import android.view.WindowManager;
import net.majorkernelpanic.streaming.MediaStream; import net.majorkernelpanic.streaming.authen.AuthenticationActivity; import net.majorkernelpanic.streaming.exceptions.ConfNotSupportedException; import net.majorkernelpanic.streaming.exceptions.StorageUnavailableException; import net.majorkernelpanic.streaming.gl.SurfaceView; import net.majorkernelpanic.streaming.hw.EncoderDebugger; import net.majorkernelpanic.streaming.hw.NV21Convertor; import net.majorkernelpanic.streaming.mp4.MP4Config; import net.majorkernelpanic.streaming.rtp.H264Packetizer; import net.majorkernelpanic.streaming.rtp.MediaCodecInputStream; import net.majorkernelpanic.streaming.surface.SurfaceControl; import net.majorkernelpanic.streaming.utils.Logger;
import java.io.File; import java.io.FileDescriptor; import java.io.IOException; import java.io.InputStream; import java.nio.ByteBuffer; import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit;
import static android.content.Context.WINDOW_SERVICE;
/**
Created by hungtq7admin on 28/03/2019. */ public class H264ScreenStream extends MediaStream {
protected final static String TAG = "H264ScreenStream" + Logger.TAG;
private AuthenticationActivity mAuthenticationActivity; private MediaProjection mMediaProjection; private VirtualDisplay mVirtualDisplay; private MediaCodec.BufferInfo mVideoBufferInfo; private IBinder mDisplay; private Surface mSurface; private int mScreenDensity; private MediaProjection.Callback mMediaProjectionCallback = new MediaProjection.Callback() { @SuppressLint("LongLogTag") @Override public void onStop() { stop(); Log.v(TAG, "Recording Stopped"); mMediaProjection.unregisterCallback(this); mMediaProjection = null; } }; private Context mContext; protected VideoQuality mRequestedQuality = VideoQuality.DEFAULT_VIDEO_QUALITY.clone(); protected VideoQuality mQuality = mRequestedQuality.clone(); ; protected SharedPreferences mSettings = null; private Semaphore mLock = new Semaphore(0); private MP4Config mConfig; private DisplayMetrics mMetrics;
protected int mMaxFps = 0; private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
static { ORIENTATIONS.append(Surface.ROTATION_0, 90); ORIENTATIONS.append(Surface.ROTATION_90, 0); ORIENTATIONS.append(Surface.ROTATION_180, 270); ORIENTATIONS.append(Surface.ROTATION_270, 180); }
public H264ScreenStream(AuthenticationActivity authenticationActivity) { mAuthenticationActivity = authenticationActivity; mContext = mAuthenticationActivity.getApplicationContext(); mMode = MODE_MEDIARECORDER_API; mPacketizer = new H264Packetizer(); createProjector(); }
/**
public void setPreferences(SharedPreferences prefs) { mSettings = prefs; }
/**
/**
@SuppressLint({"NewApi", "LongLogTag"}) private MP4Config testMediaCodecAPI() throws RuntimeException, IOException { Log.d(TAG, "testMediaCodecAPI: "); try { if (mQuality.resX>=640) { // Using the MediaCodec API with the buffer method for high resolutions is too slow mMode = MODE_MEDIARECORDER_API; } EncoderDebugger debugger = EncoderDebugger.debug(mSettings, mQuality.resX, mQuality.resY); return new MP4Config(debugger.getB64SPS(), debugger.getB64PPS()); } catch (Exception e) { // Fallback on the old streaming method using the MediaRecorder API Log.e(TAG,"Resolution not supported with the MediaCodec API, we fallback on the old streamign method."); mMode = MODE_MEDIARECORDER_API; return testMediaRecorderAPI(); } }
/**
/**
Video encoding is done by a MediaRecorder. */ @SuppressLint("LongLogTag") protected void encodeWithMediaRecorder() throws IOException { Log.d(TAG,"Video encoded using the MediaRecorder API sPipeApi=" + sPipeApi); // We need a local socket to forward data output by the camera to the packetizer createSockets();
try { mMediaRecorder = new MediaRecorder(); mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE); mMediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP); mMediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264); mMediaRecorder.setVideoSize(mQuality.resX, mQuality.resY); mMediaRecorder.setVideoFrameRate(mQuality.framerate);
// The bandwidth actually consumed is often above what was requested
mMediaRecorder.setVideoEncodingBitRate((int)(mRequestedQuality.bitrate * 0.8));
int rotation = mAuthenticationActivity.getWindowManager().getDefaultDisplay().getRotation();
int orientation = ORIENTATIONS.get(rotation + 90);
mMediaRecorder.setOrientationHint(orientation);
// We write the output of the camera in a local socket instead of a file !
// This one little trick makes streaming feasible quiet simply: data from the camera
// can then be manipulated at the other end of the socket
FileDescriptor fd = null;
if (sPipeApi == PIPE_API_PFD) {
fd = mParcelWrite.getFileDescriptor();
} else {
fd = mSender.getFileDescriptor();
}
Log.d(TAG,"Set input= " + fd.valid());
mMediaRecorder.setOutputFile(fd);
mMediaRecorder.prepare();
// destroyVirtualDisplay(); Log.d(TAG, "encodeWithMediaRecorder: mQuality: " + mQuality + " mScreenDensity " + mScreenDensity
" mMediaRecorder " + mMediaRecorder.toString()); mVirtualDisplay = mMediaProjection.createVirtualDisplay("ScreenSharingDemo", mQuality.resX, mQuality.resY, mScreenDensity, DisplayManager.VIRTUAL_DISPLAY_FLAG_AUTO_MIRROR, mMediaRecorder.getSurface(), null /Callbacks/, null /Handler/);
mMediaRecorder.start();
Log.d(TAG,"Set input= " + fd.valid() + " toString= " + fd.toString());
} catch (Exception e) {
e.printStackTrace();
Log.e(TAG, e.toString());
throw new ConfNotSupportedException(e.getMessage());
}
InputStream is = null;
if (sPipeApi == PIPE_API_PFD) {
is = new ParcelFileDescriptor.AutoCloseInputStream(mParcelRead);
} else {
is = mReceiver.getInputStream();
}
// This will skip the MPEG4 header if this step fails we can't stream anything :(
try {
byte buffer[] = new byte[4];
// Skip all atoms preceding mdat atom
while (!Thread.interrupted()) {
while (is.read() != 'm');
is.read(buffer,0,3);
if (buffer[0] == 'd' && buffer[1] == 'a' && buffer[2] == 't') break;
}
} catch (IOException e) {
Log.e(TAG,"Couldn't skip mp4 header :/");
stop();
throw e;
}
// The packetizer encapsulates the bit stream in an RTP stream and send it over the network
mPacketizer.setInputStream(is);
mPacketizer.start();
mStreaming = true;
Log.i(TAG, "started");
}
/**
Video encoding is done by a MediaCodec. */ @SuppressLint("LongLogTag") protected void encodeWithMediaCodec() throws RuntimeException, IOException { this.mVideoBufferInfo = new MediaCodec.BufferInfo(); mMediaCodec = MediaCodec.createEncoderByType("video/avc"); MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", mQuality.resX, mQuality.resY); mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, mQuality.bitrate); mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, mQuality.framerate); mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
mMediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); mSurface = mMediaCodec.createInputSurface(); mMediaCodec.start();
Log.d(TAG,"MediaCodec started video content w= " + mQuality.resX + " h= " + mQuality.resY + " bitrate= "
mPacketizer.setInputStream(new MediaCodecInputStream(mMediaCodec)); mPacketizer.start(); mStreaming = true; }
/**
}
@SuppressLint("LongLogTag") private MP4Config testMediaRecorderAPI() throws RuntimeException, IOException { String key = PREF_PREFIX+"h264-mr-"+mRequestedQuality.framerate+","+mRequestedQuality.resX+","+mRequestedQuality.resY; Log.d(TAG, "testMediaRecorderAPI: " + key);
if (mSettings != null && mSettings.contains(key) ) {
String[] s = mSettings.getString(key, "").split(",");
return new MP4Config(s[0],s[1],s[2]);
}
if (!Environment.getExternalStorageState().equals(Environment.MEDIA_MOUNTED)) {
throw new StorageUnavailableException("No external storage or external storage not ready !");
}
final String TESTFILE = Environment.getExternalStorageDirectory().getPath()+"/droid-test.mp4";
Log.i(TAG,"Testing H264 support... Test file saved at: "+TESTFILE);
try {
File file = new File(TESTFILE);
file.createNewFile();
} catch (IOException e) {
Log.e(TAG, e.toString());
throw new StorageUnavailableException(e.getMessage());
}
Log.d(TAG, "Test file has been create.");
try {
mMediaRecorder = new MediaRecorder();
mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
mMediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP);
mMediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264);
mMediaRecorder.setVideoSize(mRequestedQuality.resX, mRequestedQuality.resY);
mMediaRecorder.setVideoFrameRate(mRequestedQuality.framerate);
mMediaRecorder.setVideoEncodingBitRate((int)(mRequestedQuality.bitrate * 0.8));
mMediaRecorder.setOutputFile(TESTFILE);
mMediaRecorder.setMaxDuration(3000);
// We wait a little and stop recording
mMediaRecorder.setOnInfoListener(new MediaRecorder.OnInfoListener() {
public void onInfo(MediaRecorder mr, int what, int extra) {
Log.d(TAG,"MediaRecorder callback called !");
if (what==MediaRecorder.MEDIA_RECORDER_INFO_MAX_DURATION_REACHED) {
Log.d(TAG,"MediaRecorder: MAX_DURATION_REACHED");
} else if (what==MediaRecorder.MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED) {
Log.d(TAG,"MediaRecorder: MAX_FILESIZE_REACHED");
} else if (what==MediaRecorder.MEDIA_RECORDER_INFO_UNKNOWN) {
Log.d(TAG,"MediaRecorder: INFO_UNKNOWN");
} else {
Log.d(TAG,"WTF ?");
}
mLock.release();
}
});
// Start recording
mMediaRecorder.prepare();
Log.i(TAG,"H264 Test start...");
// destroyVirtualDisplay(); Log.d(TAG, "encodeWithMediaRecorder: mQuality: " + mQuality + " mScreenDensity " + mScreenDensity
" mMediaRecorder " + mMediaRecorder.toString()); mVirtualDisplay = mMediaProjection.createVirtualDisplay("ScreenSharingDemo", mQuality.resX,mQuality.resY, mScreenDensity, 0, mMediaRecorder.getSurface(), null /Callbacks/, null /Handler/); mMediaRecorder.start(); Log.i(TAG,"H264 Test started..."); if (mLock.tryAcquire(6, TimeUnit.SECONDS)) { Log.d(TAG,"MediaRecorder callback was called :)"); Thread.sleep(400); } else { Log.d(TAG,"MediaRecorder callback was not called after 6 seconds... :("); } } catch (IOException e) { throw new ConfNotSupportedException(e.getMessage()); } catch (RuntimeException e) { throw new ConfNotSupportedException(e.getMessage()); } catch (InterruptedException e) { e.printStackTrace(); } finally { try { mMediaRecorder.stop(); } catch (Exception e) {} mMediaRecorder.release(); mMediaRecorder = null; }
// Retrieve SPS & PPS & ProfileId with MP4Config
MP4Config config = new MP4Config(TESTFILE);
// Delete dummy video
//File file = new File(TESTFILE);
//if (!file.delete()) Log.e(TAG,"Temp file could not be erased");
Log.i(TAG,"H264 Test succeeded...");
Log.i(TAG, "SPS= " + config.getB64SPS() + " PPS= " + config.getB64PPS() + " getPlevel= " + config.getProfileLevel());
// Save test result
if (mSettings != null) {
SharedPreferences.Editor editor = mSettings.edit();
editor.putString(key, config.getProfileLevel()+","+config.getB64SPS()+","+config.getB64PPS());
editor.commit();
}
return config;
}
@SuppressLint("LongLogTag") private void createProjector() { mMediaProjection = mAuthenticationActivity.getMediaProjection(); mMetrics = new DisplayMetrics(); WindowManager windowManager = (WindowManager) mContext.getSystemService(WINDOW_SERVICE); if (windowManager == null) { Log.e(TAG, "recover activity to get context windowManager = null"); return; } if (mMediaProjection == null) { Log.e(TAG, "recover activity to get context mediaProjector = null"); return; } //mMediaProjection.registerCallback(mMediaProjectionCallback, null); windowManager.getDefaultDisplay().getMetrics(mMetrics); mScreenDensity = mMetrics.densityDpi;
Log.i(TAG, "createProjector x= " + mMetrics.widthPixels + " y= " + mMetrics.heightPixels);
}
@SuppressLint("LongLogTag") private void destroyVirtualDisplay() { if (mVirtualDisplay != null) { Log.i(TAG, "virtual release"); mVirtualDisplay.release(); mVirtualDisplay = null; } }
private static void setDisplaySurface(IBinder display, Surface surface, Rect deviceRect, Rect displayRect) { SurfaceControl.openTransaction(); try { SurfaceControl.setDisplaySurface(display, surface); SurfaceControl.setDisplayProjection(display, 0, deviceRect, displayRect); SurfaceControl.setDisplayLayerStack(display, 0); } finally { SurfaceControl.closeTransaction(); } }
private static IBinder createDisplay() { return SurfaceControl.createDisplay("scrcpy", true); }
private static void destroyDisplay(IBinder display) { SurfaceControl.destroyDisplay(display); } }
I am thinking about using mediaprojection and libstreaming to cast my android screen to other device. Is that possible? I don't know how to replace camera input with screen capture.... Need help, please