Closed b4017816 closed 3 years ago
How about the following?
eglManager = new EglManager(null);
processor =
new FrameProcessor(
this,
eglManager.getNativeContext(),
BINARY_GRAPH_NAME,
INPUT_VIDEO_STREAM_NAME,
OUTPUT_VIDEO_STREAM_NAME);
processor.getVideoSurfaceOutput().setFlipY(FLIP_FRAMES_VERTICALLY);
float focalLength = cameraHelper.getFocalLengthPixels();
if (focalLength != Float.MIN_VALUE) {
Packet focalLengthSidePacket = processor.getPacketCreator().createFloat32(focalLength);
Map<String, Packet> inputSidePackets = new HashMap<>();
inputSidePackets.put(FOCAL_LENGTH_STREAM_NAME, focalLengthSidePacket);
processor.setInputSidePackets(inputSidePackets);
}
Hi, thanks for the reply, with that suggestion I get a Null Pointer Exception, Attempt to invoke virtual method "float com.google.mediapipe.components.CameraXPreviewHelper.getFocalLengthPixels()" on a null object reference, (error log listed below)
In package com.google.mediapipe.components focalLengthPixels is defined and returned so I'm not sure what is causing the NPE
private float focalLengthPixels = 1.4E-45F;
public float getFocalLengthPixels() {
return this.focalLengthPixels;
}
I'm guessing I'm not initializing something correctly but not sure where, I really appreciate the help, thanks
My MainActivity
package com.example.iristracking;
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import android.graphics.SurfaceTexture;
import android.os.Bundle;
import android.util.Log;
import java.util.HashMap;
import java.util.Map;
import java.util.List;
import androidx.appcompat.app.AppCompatActivity;
import android.util.Size;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.ViewGroup;
import android.content.pm.ApplicationInfo; //not used
import android.content.pm.PackageManager; //not used
import android.content.pm.PackageManager.NameNotFoundException; //not used
import android.graphics.SurfaceTexture; //not used
import android.os.Bundle; //not used
import com.google.mediapipe.formats.proto.LandmarkProto.NormalizedLandmark; //not used
import com.google.mediapipe.formats.proto.LandmarkProto.NormalizedLandmarkList; //not used
import com.google.mediapipe.components.CameraHelper;
import com.google.mediapipe.components.CameraXPreviewHelper;
import com.google.mediapipe.components.ExternalTextureConverter;
import com.google.mediapipe.components.FrameProcessor;
import com.google.mediapipe.components.PermissionHelper;
import com.google.mediapipe.framework.AndroidAssetUtil;
import com.google.mediapipe.framework.AndroidPacketCreator; //not used
import com.google.mediapipe.framework.PacketGetter; //not used
import com.google.mediapipe.framework.Packet;
import com.google.mediapipe.glutil.EglManager;
/** Main activity of MediaPipe example apps. */
public class MainActivity extends AppCompatActivity {
private static final String TAG = "MainActivity";
private boolean haveAddedSidePackets = false;
private static final String FOCAL_LENGTH_STREAM_NAME = "focal_length_pixel";
private static final String OUTPUT_LANDMARKS_STREAM_NAME = "face_landmarks_with_iris";
private static final String BINARY_GRAPH_NAME = "iris_tracking_gpu.binarypb";
private static final String INPUT_VIDEO_STREAM_NAME = "input_video";
private static final String OUTPUT_VIDEO_STREAM_NAME = "output_video";
private static final CameraHelper.CameraFacing CAMERA_FACING = CameraHelper.CameraFacing.FRONT;
// Flips the camera-preview frames vertically before sending them into FrameProcessor to be
// processed in a MediaPipe graph, and flips the processed frames back when they are displayed.
// This is needed because OpenGL represents images assuming the image origin is at the bottom-left
// corner, whereas MediaPipe in general assumes the image origin is at top-left.
private static final boolean FLIP_FRAMES_VERTICALLY = true;
static {
// Load all native libraries needed by the app.
System.loadLibrary("mediapipe_jni");
System.loadLibrary("opencv_java3");
}
// {@link SurfaceTexture} where the camera-preview frames can be accessed.
private SurfaceTexture previewFrameTexture;
// {@link SurfaceView} that displays the camera-preview frames processed by a MediaPipe graph.
private SurfaceView previewDisplayView;
// Creates and manages an {@link EGLContext}.
private EglManager eglManager;
// Sends camera-preview frames into a MediaPipe graph for processing, and displays the processed
// frames onto a {@link Surface}.
private FrameProcessor processor;
// Converts the GL_TEXTURE_EXTERNAL_OES texture from Android camera into a regular texture to be
// consumed by {@link FrameProcessor} and the underlying MediaPipe graph.
private ExternalTextureConverter converter;
// ApplicationInfo for retrieving metadata defined in the manifest.
private ApplicationInfo applicationInfo;
// Handles camera access via the {@link CameraX} Jetpack support library.
private CameraXPreviewHelper cameraHelper;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
previewDisplayView = new SurfaceView(this);
setupPreviewDisplayView();
// Initialize asset manager so that MediaPipe native libraries can access the app assets, e.g.,
// binary graphs.
AndroidAssetUtil.initializeNativeAssetManager(this);
eglManager = new EglManager(null);
processor =
new FrameProcessor(
this,
eglManager.getNativeContext(),
BINARY_GRAPH_NAME,
INPUT_VIDEO_STREAM_NAME,
OUTPUT_VIDEO_STREAM_NAME);
processor.getVideoSurfaceOutput().setFlipY(FLIP_FRAMES_VERTICALLY);
PermissionHelper.checkAndRequestCameraPermissions(this);
if (!haveAddedSidePackets) {
float focalLength = cameraHelper.getFocalLengthPixels();
if (focalLength != Float.MIN_VALUE) {
Packet focalLengthSidePacket = processor.getPacketCreator().createFloat32(focalLength);
Map<String, Packet> inputSidePackets = new HashMap<>();
inputSidePackets.put(FOCAL_LENGTH_STREAM_NAME, focalLengthSidePacket);
processor.setInputSidePackets(inputSidePackets);
}
haveAddedSidePackets = true;
}
}
@Override
protected void onResume() {
super.onResume();
converter = new ExternalTextureConverter(eglManager.getContext());
converter.setFlipY(FLIP_FRAMES_VERTICALLY);
converter.setConsumer(processor);
if (PermissionHelper.cameraPermissionsGranted(this)) { startCamera(); }
}
@Override
protected void onPause() {
super.onPause();
converter.close();
previewDisplayView.setVisibility(View.GONE);
}
@Override
public void onRequestPermissionsResult(
int requestCode, String[] permissions, int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
PermissionHelper.onRequestPermissionsResult(requestCode, permissions, grantResults);
}
private void setupPreviewDisplayView() {
previewDisplayView.setVisibility(View.GONE);
ViewGroup viewGroup = findViewById(R.id.preview_display_layout);
viewGroup.addView(previewDisplayView);
previewDisplayView
.getHolder()
.addCallback(
new SurfaceHolder.Callback() {
@Override
public void surfaceCreated(SurfaceHolder holder) {
processor.getVideoSurfaceOutput().setSurface(holder.getSurface());
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
// (Re-)Compute the ideal size of the camera-preview display (the area that the
// camera-preview frames get rendered onto, potentially with scaling and rotation)
// based on the size of the SurfaceView that contains the display.
Size viewSize = new Size(width, height);
Size displaySize = cameraHelper.computeDisplaySizeFromViewSize(viewSize);
// Connect the converter to the camera-preview frames as its input (via
// previewFrameTexture), and configure the output width and height as the computed
// display size.
converter.setSurfaceTextureAndAttachToGLContext(
previewFrameTexture, displaySize.getWidth(), displaySize.getHeight());
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
processor.getVideoSurfaceOutput().setSurface(null);
}
});
}
private void startCamera() {
cameraHelper = new CameraXPreviewHelper();
cameraHelper.setOnCameraStartedListener(
surfaceTexture -> {
previewFrameTexture = surfaceTexture;
// Make the display view visible to start showing the preview. This triggers the
// SurfaceHolder.Callback added to (the holder of) previewDisplayView.
previewDisplayView.setVisibility(View.VISIBLE);
});
cameraHelper.startCamera(this, CAMERA_FACING, /*surfaceTexture=*/ null);
}
}
Full Runtime Error:
java.lang.RuntimeException: Unable to start activity ComponentInfo{com.example.iristracking/com.example.iristracking.MainActivity}: java.lang.NullPointerException: Attempt to invoke virtual method 'float com.google.mediapipe.components.CameraXPreviewHelper.getFocalLengthPixels()' on a null object reference
at android.app.ActivityThread.performLaunchActivity(ActivityThread.java:3313)
at android.app.ActivityThread.handleLaunchActivity(ActivityThread.java:3462)
at android.app.servertransaction.LaunchActivityItem.execute(LaunchActivityItem.java:83)
at android.app.servertransaction.TransactionExecutor.executeCallbacks(TransactionExecutor.java:135)
at android.app.servertransaction.TransactionExecutor.execute(TransactionExecutor.java:95)
at android.app.ActivityThread$H.handleMessage(ActivityThread.java:2049)
at android.os.Handler.dispatchMessage(Handler.java:107)
at android.os.Looper.loop(Looper.java:224)
at android.app.ActivityThread.main(ActivityThread.java:7565)
at java.lang.reflect.Method.invoke(Native Method)
at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:539)
at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:950)
Caused by: java.lang.NullPointerException: Attempt to invoke virtual method 'float com.google.mediapipe.components.CameraXPreviewHelper.getFocalLengthPixels()' on a null object reference
at com.example.iristracking.MainActivity.onCreate(MainActivity.java:117)
at android.app.Activity.performCreate(Activity.java:7916)
at android.app.Activity.performCreate(Activity.java:7903)
at android.app.Instrumentation.callActivityOnCreate(Instrumentation.java:1307)
at android.app.ActivityThread.performLaunchActivity(ActivityThread.java:3288)
... 11 more
I tried playing with where the cameraHelper instance is created but that gives me that NPE if " cameraHelper = new CameraXPreviewHelper();" is in startCamera which makes sense, but if I try moving it into onCreate I get "side packets not provided" again, I'm fairly new to android dev and working with ML stuff so I apologize if these are basic issues, Thanks for the assistance it's much appreciated
Hi @b4017816, Can you share your graphs and calculators, So that we can verify with the Iris Solution engineers. Thanks!
Hi there, thanks for your help again, I haven't modified the example Iris Calculators or graphs yet, my plan is to try that after I can build the initial Iris Example, I've tried to adapt what I can from the examples and docs but I'm stuck now.
Calculators Used to build the Iris .aar: mediapipe/graphs/iris_tracking :iris_tracking_gpu_deps
Iris .aar Location: https://github.com/b4017816/IrisTracking/blob/2f48a14411219dd2b1193200cfa69c5a2f7018d5/app/libs/mp_iris_tracking_aar.aar
Binary Graph Location: https://github.com/b4017816/IrisTracking/blob/2f48a14411219dd2b1193200cfa69c5a2f7018d5/app/src/main/assets/iris_tracking_gpu.binarypb
Android Studio Project Repo:https://github.com/b4017816/IrisTracking
Building onto a Xiaomi POCO X3 NFC (MIUI 12.0.8 + Android 10)
Hope this helps, Thanks
Hi @b4017816 Did you try the code mentioned in this https://github.com/google/mediapipe/issues/1847#issuecomment-815028836. Thanks!
Hi yeah I tried adding that code but it gives me a NPE because a camerahelper object hasn't been initialised at that point yet, and trying to initialise the object there gives the error of side packet required, my previous comments have more details, thanks
Hi thanks for your help so far but I still haven't been able to make any progress, any ideas of what I can try next? I really appreciate the assistance, thanks
Hi @b4017816, Above added right assignees who can guide you well. Thanks!
If https://github.com/google/mediapipe/issues/1847#issuecomment-815028836 didn't work for you, then maybe the camera hasn't been initialized at that point in time.
Are you not able to mimic what is done for focal length in the Iris example, and place in onCameraStarted in your own example ?
Alternatively,
a quick workaround is to just place cameraHelper.getFocalLengthPixels();
anywhere after starting the camera , print out the value, then hard-code that value for the focalLength
mentioned in https://github.com/google/mediapipe/issues/1847#issuecomment-815028836
On a phone, the focal length is constant per each device, so just getting this value once for your phone is enough. i understand that doesn't generalize to anywhere your app runs, but this may at least allow you to test stuff on your device, and fix this issue properly later. The proper fix is calling getFocalLength sometime after the camera starts but before the graph starts
Hi @b4017816, Did you get a chance to go through the above comment. Thanks!
Assuming you are resolved, we are closing this issue now. Feel free to reopen if the above issue still exists.
Followed the above but now getting this error with the app crash on start
2021-06-09 23:43:26.405 31582-31582/? E/h.iristracktes: Unknown bits set in runtime_flags: 0x800000 2021-06-09 23:43:26.606 31582-31582/com.example.iristracktest E/Config:Grabber:OnePlusFontConfig: Index 0 requested, with a size of 0 2021-06-09 23:43:26.616 31582-31582/com.example.iristracktest E/h.iristracktes: [frame_perf] perfboost open tb_ctl file failed, isApp[1], errno=No such file or directory 2021-06-09 23:43:26.616 31582-31582/com.example.iristracktest E/libprocessgroup: set_timerslack_ns write failed: Operation not permitted 2021-06-09 23:43:26.715 31582-31582/com.example.iristracktest E/h.iristracktes: Invalid ID 0x00000000. 2021-06-09 23:43:26.745 31582-31582/com.example.iristracktest E/libEGL: validate_display:87 error 3008 (EGL_BAD_DISPLAY) 2021-06-09 23:43:26.781 31582-9075/com.example.iristracktest E/libc: Access denied finding property "vendor.camera.aux.packagelist" 2021-06-09 23:43:26.782 31582-9075/com.example.iristracktest E/libc: Access denied finding property "vendor.camera.aux.packagelist" 2021-06-09 23:43:26.782 31582-9075/com.example.iristracktest E/libc: Access denied finding property "vendor.camera.aux.packagelist" 2021-06-09 23:43:26.782 31582-9075/com.example.iristracktest E/libc: Access denied finding property "vendor.camera.aux.packagelist" 2021-06-09 23:43:26.791 31582-9075/com.example.iristracktest E/libc: Access denied finding property "vendor.camera.aux.packagelist" 2021-06-09 23:43:26.791 31582-9075/com.example.iristracktest E/libc: Access denied finding property "vendor.camera.aux.packagelist" 2021-06-09 23:43:26.792 31582-9075/com.example.iristracktest E/libc: Access denied finding property "vendor.camera.aux.packagelist" 2021-06-09 23:43:26.805 31582-9075/com.example.iristracktest E/libc: Access denied finding property "vendor.camera.aux.packagelist" 2021-06-09 23:43:26.900 31582-9075/com.example.iristracktest E/libc: Access denied finding property "vendor.camera.aux.packagelist" 2021-06-09 23:43:26.902 31582-9075/com.example.iristracktest E/libc: Access denied finding property "persist.vendor.camera.privapp.list" 2021-06-09 23:43:26.906 31582-9079/com.example.iristracktest E/libc: Access denied finding property "vendor.camera.aux.packagelist" 2021-06-09 23:43:27.118 31582-9073/com.example.iristracktest E/libc: Access denied finding property "vendor.camera.aux.packagelist" 2021-06-09 23:43:27.168 31582-9070/com.example.iristracktest A/libc: Fatal signal 11 (SIGSEGV), code 1 (SEGV_MAPERR), fault addr 0x0 in tid 9070 (mediapipe_gl_ru), pid 31582 (h.iristracktest)
Please help!!
Hello there, I'm working at my iris project, and I don't know how they get the focal length; and any one have access the the land marks and save them to json file?
Hi, I'm trying to integrate Iris into an android app using an .aar, I was able to set up and build the app for face detection using the .aar tutorial, but I'm trying when trying to modify the example to use an Iris .aar I'm running into issues. The app builds and deploys, starts to run then crashes giving exception "focal_length_pixel" is required but not provided, which is expected but I don't know how to add it to the main activity correctly.
Does the .aar I built contain the calculators for the subgraphs as well as the main graphs or do I need to add something else to my AAR Build File? AAR Build File: load("//mediapipe/java/com/google/mediapipe:mediapipe_aar.bzl", "mediapipe_aar") mediapipe_aar( name = "mp_iris_tracking_aar", calculators = ["//mediapipe/graphs/iris_tracking :iris_tracking_gpu_deps"], )
Android Studio Assets: iris_tracking_gpu.binarypb face_landmark.tflite iris_landmark.tflite face_detection_front.tflite
I've tried to add code for the focal length to onCreate based on the Iris example but I don't know how to modify this to work with an Iris .aar, Is there any further docs I can read to point me in the right direction? I'm trying to create an accessibility app that can utilise Iris for infererring gaze direction for use in controlling a text to speech keyboard, I Would really appreciate any help or further reading materials, Thanks :)
I need to integrate this snippet (I think) into the modified code for the facedetection example but not sure how.
if (!haveAddedSidePackets) { float focalLength = cameraHelper.getFocalLengthPixels(); if (focalLength != Float.MIN_VALUE) { Packet focalLengthSidePacket = processor.getPacketCreator().createFloat32(focalLength); Map<String, Packet> inputSidePackets = new HashMap<>(); inputSidePackets.put(FOCAL_LENGTH_STREAM_NAME, focalLengthSidePacket); processor.setInputSidePackets(inputSidePackets); } haveAddedSidePackets = true;
Modified Face Tracking AAR example: package com.example.iristracking;
// Copyright 2019 The MediaPipe Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License.
import android.graphics.SurfaceTexture; import android.os.Bundle; import android.util.Log; import java.util.HashMap; import java.util.Map; import androidx.appcompat.app.AppCompatActivity; import android.util.Size; import android.view.SurfaceHolder; import android.view.SurfaceView; import android.view.View; import android.view.ViewGroup; import com.google.mediapipe.components.CameraHelper; import com.google.mediapipe.components.CameraXPreviewHelper; import com.google.mediapipe.components.ExternalTextureConverter; import com.google.mediapipe.components.FrameProcessor; import com.google.mediapipe.components.PermissionHelper; import com.google.mediapipe.framework.AndroidAssetUtil; import com.google.mediapipe.framework.Packet; import com.google.mediapipe.glutil.EglManager;
/* Main activity of MediaPipe example apps. / public class MainActivity extends AppCompatActivity { private static final String TAG = "MainActivity"; private boolean haveAddedSidePackets = false;
} `