homuler / MediaPipeUnityPlugin

Unity plugin to run MediaPipe
MIT License
1.79k stars 465 forks source link

FaceMesh Android platform error #717

Closed wizVR-zhangjun closed 2 years ago

wizVR-zhangjun commented 2 years ago

Description

error:2022-08-23 21:01:32.812 10184-12112/com.study.MediaPipeUnityPlugin E/Unity: MediaPipeException: MediaPipe Aborted, refer glog files for more details at Mediapipe.MpReturnCodeExtension.Assert (Mediapipe.MpReturnCode code) [0x0006d] in D:\Unity\SvnMediaPipe\MediapipeUnity\Packages\com.github.homuler.mediapipe\Runtime\Scripts\PInvoke\MpReturnCode.cs:51 at Mediapipe.CalculatorGraph..ctor (System.String textFormatConfig) [0x00009] in D:\Unity\SvnMediaPipe\MediapipeUnity\Packages\com.github.homuler.mediapipe\Runtime\Scripts\Framework\CalculatorGraph.cs:27 at Mediapipe.Unity.Hqft.FaceMesh+d__12.MoveNext () [0x00247] in D:\Unity\SvnMediaPipe\MediapipeUnity\Assets\Face\Scripts\FaceMesh.cs:72 at UnityEngine.SetupCoroutine.InvokeMoveNext (System.Collections.IEnumerator enumerator, System.IntPtr returnValueAddress) [0x00020] in /Users/bokken/buildslave/unity/build/Runtime/Export/Scripting/Coroutines.cs:17

Here's my script: public class FaceMesh : MonoBehaviour { [SerializeField] private TextAsset _configAsset; [SerializeField] private RawImage _screen; [SerializeField] private int _width; [SerializeField] private int _height; [SerializeField] private int _fps; [SerializeField] private MultiFaceLandmarkListAnnotationController _multiFaceLandmarksAnnotationController;

    private CalculatorGraph _graph;
    private ResourceManager _resourceManager;

    private WebCamTexture _webCamTexture;
    private Texture2D _inputTexture;
    private Color32[] _inputPixelData;

    struct ImageSource 
    {
        public RotationAngle rotation;
        public bool isHorizontallyFlipped;

        public ImageSource(RotationAngle _angle,bool _isHorizontallyFlipped) 
        {
            rotation = _angle;
            isHorizontallyFlipped = _isHorizontallyFlipped;
        }
    }

    private IEnumerator Start()
    {
        if (WebCamTexture.devices.Length == 0)
        {
            throw new System.Exception("Web Camera devices are not found");
        }
        var webCamDevice = WebCamTexture.devices[0];
        _webCamTexture = new WebCamTexture(webCamDevice.name, _width, _height, _fps);
        _webCamTexture.Play();

        yield return new WaitUntil(() => _webCamTexture.width > 16);

        _screen.rectTransform.sizeDelta = new Vector2(_width, _height);

        _inputTexture = new Texture2D(_width, _height, TextureFormat.RGBA32, false);
        _inputPixelData = new Color32[_width * _height];

        _screen.texture = _webCamTexture;

        //_resourceManager = new LocalResourceManager();
        //yield return _resourceManager.PrepareAssetAsync("face_detection_short_range.bytes");
        //yield return _resourceManager.PrepareAssetAsync("face_landmark_with_attention.bytes");

        var resourceManager = new StreamingAssetsResourceManager();
        yield return resourceManager.PrepareAssetAsync("face_detection_short_range.bytes");
        yield return resourceManager.PrepareAssetAsync("face_landmark_with_attention.bytes");

        var stopwatch = new Stopwatch();

        var sidePacket = new SidePacket();

        sidePacket.Emplace("num_faces", new IntPacket(1));
        sidePacket.Emplace("with_attention", new BoolPacket(true));
        ImageSource imageSource = new ImageSource( RotationAngle.Rotation0,true);
        this.SetImageTransformationOptions(sidePacket, imageSource);

        _graph = new CalculatorGraph(_configAsset.text);

        var multiFaceLandmarksStream = new OutputStream<NormalizedLandmarkListVectorPacket, List<NormalizedLandmarkList>>(_graph, "multi_face_landmarks");//output_video  //multi_face_landmarks
        multiFaceLandmarksStream.StartPolling().AssertOk();
        _graph.StartRun(sidePacket).AssertOk();
        stopwatch.Start();

// var screenRect = _screen.GetComponent().rect;

        while (true)
        {
            _inputTexture.SetPixels32(_webCamTexture.GetPixels32(_inputPixelData));
            var imageFrame = new ImageFrame(ImageFormat.Format.SRGBA, _width, _height, _width * 4, _inputTexture.GetRawTextureData<byte>());
            var currentTimestamp = stopwatch.ElapsedTicks / (System.TimeSpan.TicksPerMillisecond / 1000);
            _graph.AddPacketToInputStream("input_video", new ImageFramePacket(imageFrame, new Timestamp(currentTimestamp))).AssertOk();

            yield return new WaitForEndOfFrame();

            if (multiFaceLandmarksStream.TryGetNext(out var multiFaceLandmarks))
            {
                _multiFaceLandmarksAnnotationController.DrawNow(multiFaceLandmarks);
            }
            else
            {
                _multiFaceLandmarksAnnotationController.DrawNow(null);
            }
        }
    }

    private void SetImageTransformationOptions(SidePacket sidePacket, ImageSource imageSource, bool expectedToBeMirrored = false)
    {
        // NOTE: The origin is left-bottom corner in Unity, and right-top corner in MediaPipe.
        RotationAngle rotation = imageSource.rotation.Reverse();
        var inputRotation = rotation;
        var isInverted = CoordinateSystem.ImageCoordinate.IsInverted(rotation);
        var shouldBeMirrored = imageSource.isHorizontallyFlipped ^ expectedToBeMirrored;
        var inputHorizontallyFlipped = isInverted ^ shouldBeMirrored;
        var inputVerticallyFlipped = !isInverted;

        if ((inputHorizontallyFlipped && inputVerticallyFlipped) || rotation == RotationAngle.Rotation180)
        {
            inputRotation = inputRotation.Add(RotationAngle.Rotation180);
            inputHorizontallyFlipped = !inputHorizontallyFlipped;
            inputVerticallyFlipped = !inputVerticallyFlipped;
        }

        Debug.Log($"input_rotation = {inputRotation}, input_horizontally_flipped = {inputHorizontallyFlipped}, input_vertically_flipped = {inputVerticallyFlipped}");

        sidePacket.Emplace("input_rotation", new IntPacket((int)inputRotation));
        sidePacket.Emplace("input_horizontally_flipped", new BoolPacket(inputHorizontallyFlipped));
        sidePacket.Emplace("input_vertically_flipped", new BoolPacket(inputVerticallyFlipped));
    }

    private void OnDestroy()
    {
        if (_webCamTexture != null)
        {
            _webCamTexture.Stop();
        }

        if (_graph != null)
        {
            try
            {
                _graph.CloseInputStream("input_video").AssertOk();
                _graph.WaitUntilDone().AssertOk();
            }
            finally
            {

                _graph.Dispose();
            }
        }
    }

Here is the configuration file:face_mesh_cpu.txt face_mesh_cpu.txt 1661260233635 Running on PC is not a problem, but when it is packaged into Android, the above error occurs

homuler commented 2 years ago

On Android, only GPU calculators are exported, so you cannot use FaceLandmarkFrontCpu (use FaceLandmarkFrontGpu instead).

P.S. Please use the support form and share the complete log, not excerpts.

NeerajMehta commented 2 years ago

@wizVR-zhangjun Hi did it work for you?

wizVR-zhangjun commented 2 years ago

@wizVR-zhangjun Hi did it work for you?

@NeerajMehta This one seems to be synchronized. It's going to hit a pause

NeerajMehta commented 2 years ago

For me, this is not working

wizVR-zhangjun commented 2 years ago

For me, this is not working

@NeerajMehta https://github.com/google/mediapipe/issues/3646