Closed IceFloe closed 1 year ago
Hi @IceFloe, Could you please provide the steps to reproduce the above error. Thanks
Sure: Use folder mediapipe/examples/ios/facemeshgpu
load(
"@build_bazel_rules_apple//apple:ios.bzl",
"ios_application",
"ios_framework"
)
load(
"//mediapipe/examples/ios:bundle_id.bzl",
"BUNDLE_ID_PREFIX",
"example_provisioning",
)
licenses(["notice"])
MIN_IOS_VERSION = "10.0"
IOS_FAMILIES = [
"iphone",
"ipad",
]
FRAMEWORK_HEADERS = [
"ObjcppLib.h",
"FaceLandmark.h",
]
objc_library(
name = "ObjcppLib",
srcs = [
"ObjcppLib.mm",
"FaceLandmark.mm",
],
hdrs = FRAMEWORK_HEADERS,
copts = ["-std=c++17"],
data = [
"//mediapipe/graphs/face_mesh:face_mesh_mobile_gpu.binarypb",
"//mediapipe/modules/face_detection:face_detection_front.tflite",
"//mediapipe/modules/face_landmark:face_landmark.tflite",
],
deps = [
"//mediapipe/objc:mediapipe_framework_ios",
"//mediapipe/objc:mediapipe_input_sources_ios",
"//mediapipe/objc:mediapipe_layer_renderer",
] + select({
"//mediapipe:ios_i386": [],
"//mediapipe:ios_x86_64": [],
"//conditions:default": [
"//mediapipe/graphs/face_mesh:mobile_calculators",
"//mediapipe/framework/formats:landmark_cc_proto",
],
}),
)
ios_framework(
name = "FacialSearch",
hdrs = FRAMEWORK_HEADERS,
bundle_id = "com.mindigital.test.FaceMeshFramework",
bundle_name = "FacialSearch",
families = IOS_FAMILIES,
infoplists = ["Info.plist"],
minimum_os_version = MIN_IOS_VERSION,
visibility = ["//visibility:public"],
deps = [
":ObjcppLib",
"@ios_opencv//:OpencvFramework",
],
)
#import <CoreVideo/CoreVideo.h>
#import <Foundation/Foundation.h>
#import "FaceLandmark.h"
@protocol FacialSearchDelegate
@interface FacialSearch : NSObject
ObjcppLib.mm
#import "ObjcppLib.h"
#import "mediapipe/objc/MPPGraph.h"
#import "mediapipe/objc/MPPCameraInputSource.h"
#import "mediapipe/objc/MPPLayerRenderer.h"
#include "mediapipe/framework/formats/landmark.pb.h"
static NSString* const kGraphName = @"face_mesh_mobile_gpu";
static const char* kInputStream = "input_video";
static const char* kNumFacesInputSidePacket = "num_faces";
static const char* kLandmarksOutputStream = "multi_face_landmarks";
// Max number of faces to detect/process.
static const int kNumFaces = 1;
@interface FacialSearch() <MPPGraphDelegate>
@property(nonatomic) MPPGraph* mediapipeGraph;
@end
@implementation FacialSearch {}
#pragma mark - Cleanup methods
- (void)dealloc {
self.mediapipeGraph.delegate = nil;
[self.mediapipeGraph cancel];
// Ignore errors since we're cleaning up.
[self.mediapipeGraph closeAllInputStreamsWithError:nil];
[self.mediapipeGraph waitUntilDoneWithError:nil];
}
#pragma mark - MediaPipe graph methods
+ (MPPGraph*)loadGraphFromResource:(NSString*)resource {
// Load the graph config resource.
NSError* configLoadError = nil;
NSBundle* bundle = [NSBundle bundleForClass:[self class]];
if (!resource || resource.length == 0) {
return nil;
}
NSURL* graphURL = [bundle URLForResource:resource withExtension:@"binarypb"];
NSData* data = [NSData dataWithContentsOfURL:graphURL options:0 error:&configLoadError];
if (!data) {
NSLog(@"Failed to load MediaPipe graph config: %@", configLoadError);
return nil;
}
// Parse the graph config resource into mediapipe::CalculatorGraphConfig proto object.
mediapipe::CalculatorGraphConfig config;
config.ParseFromArray(data.bytes, (int)data.length);
// Create MediaPipe graph with mediapipe::CalculatorGraphConfig proto object.
MPPGraph* newGraph = [[MPPGraph alloc] initWithGraphConfig:config];
// Add meme collection as side packets.
[newGraph setSidePacket:(mediapipe::MakePacket<int>(kNumFaces))
named:kNumFacesInputSidePacket];
[newGraph addFrameOutputStream:kLandmarksOutputStream
outputPacketType:MPPPacketTypeRaw];
return newGraph;
}
- (instancetype)init
{
self = [super init];
if (self) {
self.mediapipeGraph = [[self class] loadGraphFromResource:kGraphName];
self.mediapipeGraph.delegate = self;
NSLog(@"inited graph %@", kGraphName);
}
return self;
}
- (void)startGraph {
NSError* error;
if (![self.mediapipeGraph startWithError:&error]) {
NSLog(@"Failed to start graph: %@", error);
}
NSLog(@"Started graph %@", kGraphName);
}
#pragma mark - MPPGraphDelegate methods
// Receives CVPixelBufferRef from the MediaPipe graph. Invoked on a MediaPipe worker thread.
- (void)mediapipeGraph:(MPPGraph*)graph
didOutputPixelBuffer:(CVPixelBufferRef)pixelBuffer
fromStream:(const std::string&)streamName {
NSLog(@"recv pixelBuffer from %@", @(streamName.c_str()));
}
// Receives a raw packet from the MediaPipe graph. Invoked on a MediaPipe worker thread.
- (void)mediapipeGraph:(MPPGraph*)graph
didOutputPacket:(const ::mediapipe::Packet&)packet
fromStream:(const std::string&)streamName {
if (streamName == kLandmarksOutputStream) {
if (packet.IsEmpty()) {
NSLog(@"[TS:%lld] No face landmarks", packet.Timestamp().Value());
return;
}
const auto& multi_face_landmarks = packet.Get<std::vector<::mediapipe::NormalizedLandmarkList>>();
NSLog(@"[TS:%lld] Number of face instances with landmarks: %lu", packet.Timestamp().Value(),
multi_face_landmarks.size());
for (int face_index = 0; face_index < multi_face_landmarks.size(); ++face_index) {
const auto& landmarks = multi_face_landmarks[face_index];
NSLog(@"\tNumber of landmarks for face[%d]: %d", face_index, landmarks.landmark_size());
NSMutableArray<FaceLandmark *> *result = [NSMutableArray array];
for (int i = 0; i < landmarks.landmark_size(); ++i) {
NSLog(@"\t\tLandmark[%d]: (%f, %f, %f)", i, landmarks.landmark(i).x(),
landmarks.landmark(i).y(), landmarks.landmark(i).z());
auto* c = [[FaceLandmark alloc] init];
if (c) {
c.x = landmarks.landmark(i).x();
c.y = landmarks.landmark(i).y();
c.z = landmarks.landmark(i).z();
[result addObject:c];
}
}
[_delegate didReceive: result];
}
}
}
#pragma mark - MPPInputSourceDelegate methods
- (void)processVideoFrame:(CVPixelBufferRef)imageBuffer {
NSLog(@"sending imageBuffer to %s", kInputStream);
BOOL sent = [self.mediapipeGraph sendPixelBuffer:imageBuffer
intoStream:kInputStream
packetType:MPPPacketTypePixelBuffer];
if (sent) {
NSLog(@"imageBuffer sent!");
}
}
@end
FaceLandmark.h
#import <Foundation/Foundation.h>
@interface FaceLandmark : NSObject
@property (nonatomic) double x;
@property (nonatomic) double y;
@property (nonatomic) double z;
@end
FaceLandmark.mm
#import <Foundation/Foundation.h>
#import "FaceLandmark.h"
@implementation FaceLandmark
@synthesize x = _x;
@synthesize y = _y;
@synthesize z = _z;
@end
@sgowroji How many days should I wait for an answer from a googler approximately?
Hi @IceFloe, Due to less bandwidth we could not help you immediately. But very soon assigned engineer will investigate on your issue. Thanks!
I have the same problem
In fact, I cannot run example apps on Simulator
So when I create an ios_framework
, the simulator also don't support
it looks like google team does not have time to review this issue :(
Hi man, I encountered this a few days ago while compiling mediapipe as a framework (here).
The cause of the problem is you're using the bazel script that came with the example for compiling ios_framework
. As you can see in the objc_library
section of your build script, it is setting the dependencies to []
for ios_i386
and ios_x86_64
as targets.
so just change to
objc_library(
name = "ObjcppLib",
srcs = [
"ObjcppLib.mm",
"FaceLandmark.mm",
],
hdrs = FRAMEWORK_HEADERS,
copts = ["-std=c++17"],
data = [
"//mediapipe/graphs/face_mesh:face_mesh_mobile_gpu.binarypb",
"//mediapipe/modules/face_detection:face_detection_front.tflite",
"//mediapipe/modules/face_landmark:face_landmark.tflite",
],
deps = [
"//mediapipe/objc:mediapipe_framework_ios",
"//mediapipe/objc:mediapipe_input_sources_ios",
"//mediapipe/objc:mediapipe_layer_renderer",
] + select({
# "//mediapipe:ios_i386": [], # Comment these out
# "//mediapipe:ios_x86_64": [], # Comment these out
"//conditions:default": [
"//mediapipe/graphs/face_mesh:mobile_calculators",
"//mediapipe/framework/formats:landmark_cc_proto",
],
}),
)
and run build again, it should work in that condition I think.
Hi @IceFloe, Are you still looking for resolution in this issue? All mediapipe solutions related to Android and iOS have been tested in the physical device and in the emulator/simulator this behaviour is expected. Please build/run the solutions in the real device and revert back if still face the above error. Thank you!
I have not try for a while, we can close the issue it is not relevant any more
Is there any reason why mediapipe/framework/formats/landmark.pb.h - is not available for x86_64 arch. I am trying to build an iOS framework base on the facemeshgpu example using this bazel command bazel build --copt=-fembed-bitcode --apple_bitcode=embedded --config=ios_arm64 --ios_multi_cpus=arm64,x86_64 --sandbox_debug mediapipe/examples/ios/facemeshgpu:FaceLandmarks
All works for iOS device, but for a simulator I am getting this error: