Closed Salabda closed 1 year ago
You can setYuvDataCallback in DJICodecManager. In each callback return, save the data into bitmap by yourself. You can refer to the DJI decoder demo.
Is that anywhere in the demo? Sorry, maybe I haven't been able to find it.
Saving to JPEG or Bitmap is similar things.
Thanks for that, but I don't really know how or where to set up the setYuvDataCallback methods. My application is pretty simple right now, mind checking it up? Where should I add the Yuvmethods?
public class VideoFeederClass extends AppCompatActivity implements View.OnClickListener { @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_video_feeder); init(); //Initialize UI, Callbacks, VideoListeners
}
// ------------------------- Video Feeder Local Variables --------------------------
private SetCallback setBandwidthCallback;
private FetchMediaTask.Callback fetchMediaFileTaskCallback;
private dji.sdk.camera.VideoFeeder.PhysicalSourceListener sourceListener;
private VideoFeedView primaryVideoFeed;
private VideoFeedView fpvVideoFeed;
private Camera camera;
// ------------------------- Video Feeder Local Variables ends --------------------------
protected TextureView mVideoSurface = null;
protected ImageView mImageSurface;
private Bitmap Photobm;
private Button CaptureBtn, ShootPhotoModeBtn, RecordVideoModeBtn;
private ToggleButton RecordBtn;
// ----------------------- Video Feeder Jambo Mambo Starts --------------------------
private void init() {
initui();
initCallbacks();
setUpListeners();
}
private void initui() {
primaryVideoFeed = (VideoFeedView) findViewById(R.id.primary_video_feed);
fpvVideoFeed = (VideoFeedView) findViewById(R.id.fpv_video_feed);
CaptureBtn = findViewById(R.id.btn_capture);
RecordBtn = findViewById(R.id.btn_record);
}
private void initCallbacks() {
setBandwidthCallback = new SetCallback() {
@Override
public void onSuccess() {
showToast("Set key value successfully");
if (fpvVideoFeed != null) {
fpvVideoFeed.changeSourceResetKeyFrame();
}
if (primaryVideoFeed != null) {
primaryVideoFeed.changeSourceResetKeyFrame();
}
}
@Override
public void onFailure(@NonNull DJIError error) {
showToast("Failed to set: " + error.getDescription());
}
};
}
private void setUpListeners() {
sourceListener = new dji.sdk.camera.VideoFeeder.PhysicalSourceListener() {
@Override
public void onChange(dji.sdk.camera.VideoFeeder.VideoFeed videoFeed, PhysicalSource newPhysicalSource) {
if (videoFeed == dji.sdk.camera.VideoFeeder.getInstance().getPrimaryVideoFeed()) {
String newText = "Primary Source: " + newPhysicalSource.toString();
setResultToToast("Primary Source: " + newPhysicalSource.toString());
}
if (videoFeed == dji.sdk.camera.VideoFeeder.getInstance().getSecondaryVideoFeed()) {
ToastUtils.setResultToToast("Secondary Source: " + newPhysicalSource.toString());
}
}
};
setVideoFeederListeners(true);
}
private void setVideoFeederListeners(boolean isOpen) {
if (dji.sdk.camera.VideoFeeder.getInstance() == null) return;
final BaseProduct product = DJISDKManager.getInstance().getProduct();
//updateM210SeriesButtons();
//updateM300Buttons();
if (product != null) {
dji.sdk.camera.VideoFeeder.VideoDataListener primaryVideoDataListener =
primaryVideoFeed.registerLiveVideo(dji.sdk.camera.VideoFeeder.getInstance().getPrimaryVideoFeed(), true);
dji.sdk.camera.VideoFeeder.VideoDataListener secondaryVideoDataListener =
fpvVideoFeed.registerLiveVideo(dji.sdk.camera.VideoFeeder.getInstance().getSecondaryVideoFeed(), false);
if (isOpen) {
String newText =
"Primary Source: " + dji.sdk.camera.VideoFeeder.getInstance().getPrimaryVideoFeed().getVideoSource().name();
//ToastUtils.setResultToText(primaryVideoFeedTitle, newText);
if (Helper.isMultiStreamPlatform()) {
String newTextFpv = "Secondary Source: " + dji.sdk.camera.VideoFeeder.getInstance()
.getSecondaryVideoFeed()
.getVideoSource()
.name();
//ToastUtils.setResultToText(fpvVideoFeedTitle, newTextFpv);
}
dji.sdk.camera.VideoFeeder.getInstance().addPhysicalSourceListener(sourceListener);
showToast("videofeeder");
} else {
dji.sdk.camera.VideoFeeder.getInstance().removePhysicalSourceListener(sourceListener);
dji.sdk.camera.VideoFeeder.getInstance().getPrimaryVideoFeed().removeVideoDataListener(primaryVideoDataListener);
if (Helper.isMultiStreamPlatform()) {
dji.sdk.camera.VideoFeeder.getInstance()
.getSecondaryVideoFeed()
.removeVideoDataListener(secondaryVideoDataListener);
showToast("que sopa");
}
}
}
}
@Override
public void onClick(View v) {
}
}
Now if I tried to add mCodecManager.enabledYuvData(true); the app crashes, is it possible to have a videosurface and collect the yuv data, at the same time, in any MSDK V 4.x?
When the YUV data is enabled, the video surface will stop rendering. In other words, you will see a freeze screen.
Noticed. I managed to get the data from a textureview listener and then pass it to a Bitmap. So it's working now. Thanks for the help.
Is there any way of getting a Bitmap from the dji decoder in version 4.16.2 Using the MSDK and an M300 RTk