Lauszus / FaceRecognitionApp

Face Recognition Android App
GNU General Public License v2.0
506 stars 238 forks source link

ndk is null #24

Closed VinodKumarGaur closed 5 years ago

VinodKumarGaur commented 6 years ago

Build command failed. Error while executing process C:\Users\Webmobril\AppData\Local\Android\Sdk\ndk-bundle\ndk-build.cmd with arguments {NDK_PROJECT_PATH=null APP_BUILD_SCRIPT=F:\04.10.18\FaceRecognitionApp\app\src\main\cpp\Android.mk NDK_APPLICATION_MK=F:\04.10.18\FaceRecognitionApp\app\src\main\cpp\Application.mk APP_ABI=arm64-v8a NDK_ALL_ABIS=arm64-v8a NDK_DEBUG=1 APP_PLATFORM=android-16 NDK_OUT=F:/04.10.18/FaceRecognitionApp/app/build/intermediates/ndkBuild/debug/obj NDK_LIBS_OUT=F:\04.10.18\FaceRecognitionApp\app\build\intermediates\ndkBuild\debug\lib NDK_DEBUG=1 NDEBUG=null F:/04.10.18/FaceRecognitionApp/app/build/intermediates/ndkBuild/debug/obj/local/arm64-v8a/libface-lib.so} [arm64-v8a] Compile++ : face-lib <= face-lib.cpp In file included from F:/04.10.18/FaceRecognitionApp/app/src/main/cpp/face-lib.cpp:21: In file included from F:/04.10.18/FaceRecognitionApp/app/src/main/cpp\include/opencv2/core.hpp:52: F:/04.10.18/FaceRecognitionApp/app/src/main/cpp\include/opencv2/core/cvdef.h:91:10: fatal error: 'opencv2/core/hal/interface.h' file not found

include "opencv2/core/hal/interface.h"

^~~~~~~~~~ 1 error generated. make: *** [F:/04.10.18/FaceRecognitionApp/app/build/intermediates/ndkBuild/debug/obj/local/arm64-v8a/objs-debug/face-lib/F_/04.10.18/FaceRecognitionApp/app/src/main/cpp/face-lib.o] Error 1

Lauszus commented 6 years ago

Please follow the build instructions: https://github.com/Lauszus/FaceRecognitionApp#build-instructions.

VinodKumarGaur commented 6 years ago

Hi Kristian Sloth Lauszus,

I followed the instructions but still same error. please help me out as i am new in the android development.

here is my app level build.gradle apply plugin: 'com.android.application'

android { compileSdkVersion 27 defaultConfig { applicationId 'com.lauszus.facerecognitionapp' minSdkVersion 15 targetSdkVersion 27 versionCode 5 versionName '1.2.2' vectorDrawables.useSupportLibrary = true externalNativeBuild { cmake { cppFlags "-frtti -fexceptions" abiFilters 'armeabi-v7a' } }

    ndk {
        stl 'c++_static'
        //cppFlags '-std=c++11 -fexceptions -frtti'
        //cFlags ' -fexceptions -frtti'

// cppFlags "-frtti -fexceptions" abiFilters 'armeabi-v7a', 'arm64-v8a', 'x86', 'x86_64' } setProperty('archivesBaseName', rootProject.name + '-' + defaultConfig.versionName) } splits { abi { enable true reset() universalApk true include 'armeabi-v7a', 'arm64-v8a', 'x86', 'x86_64' } } project.ext.abiCodes = ['armeabi-v7a':1, 'arm64-v8a':2, 'x86':3, 'x86_64':4] applicationVariants.all { variant -> variant.outputs.each { output -> def baseAbiVersionCode = project.ext.abiCodes.get(output.getFilter(com.android.build.OutputFile.ABI)) // Because abiCodes.get() returns null for ABIs that are not mapped by ext.abiCodes, // the following code does not override the version code for universal APKs. if (baseAbiVersionCode != null) { output.versionCodeOverride = baseAbiVersionCode 1000000 + variant.versionCode } } if (variant.getBuildType().isMinifyEnabled()) { variant.assemble.doLast { variant.mappingFile.renameTo(variant.mappingFile.parent + "/$archivesBaseName-$variant.baseName-mapping.txt") } } } signingConfigs { release } buildTypes { /debug { jniDebuggable true externalNativeBuild { ndkBuild { arguments 'NDK_DEBUG=1', 'NDEBUG=null' } } }*/ release { shrinkResources true minifyEnabled true proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro' signingConfig signingConfigs.release } }

/ externalNativeBuild { cmake { path "CMakeLists.txt" } }/

sourceSets { main { jni.srcDirs = ['src/main/jniLibs'] } }

externalNativeBuild {
    ndkBuild {
        path 'src/main/cpp/Android.mk'
    }
}
lintOptions {
    abortOnError false
}

}

dependencies { implementation 'com.android.support:appcompat-v7:27.1.1' implementation 'com.android.support:design:27.1.1' implementation project(':opencv') }

// These values are all set in my global gradle.properties file /if (project.hasProperty('storeFile') && project.hasProperty('storePassword') && project.hasProperty('keyAlias') && project.hasProperty('keyPassword')) { android.signingConfigs.release.storeFile = file(storeFile) android.signingConfigs.release.storePassword = storePassword android.signingConfigs.release.keyAlias = keyAlias android.signingConfigs.release.keyPassword = keyPassword } else { android.buildTypes.release.signingConfig = null }/

=======================================================

Android.mk

LOCAL_PATH := $(call my-dir)

include $(CLEAR_VARS)

ifndef OPENCV_ANDROID_SDK $(D:\opencv-3.4.1-android-sdk\OpenCV-android-sdk\sdk) endif

OPENCV_INSTALL_MODULES := on include $(D:\opencv-3.4.1-android-sdk\OpenCV-android-sdk/sdk/native/jni/OpenCV.mk)

ifndef EIGEN3_DIR $(D:\eigen3) endif

LOCAL_MODULE := face-lib LOCAL_SRC_FILES += $(LOCAL_PATH)/face-lib.cpp $(LOCAL_PATH)/FaceRecognitionLib/Facebase.cpp LOCAL_SRC_FILES += $(LOCAL_PATH)/FaceRecognitionLib/Eigenfaces.cpp $(LOCAL_PATH)/FaceRecognitionLib/Fisherfaces.cpp LOCAL_SRC_FILES += $(LOCAL_PATH)/FaceRecognitionLib/PCA.cpp $(LOCAL_PATH)/FaceRecognitionLib/LDA.cpp LOCAL_C_INCLUDES += $(EIGEN3_DIR) LOCAL_C_INCLUDES += $(LOCAL_PATH)/FaceRecognitionLib/RedSVD/include LOCAL_LDLIBS += -llog -ldl LOCAL_CPPFLAGS += -std=c++11 -frtti -fexceptions

include $(BUILD_SHARED_LIBRARY)

Application.mk

APP_PLATFORM := android-27 APP_ABI := armeabi-v7a arm64-v8a x86 x86_64

APP_STL := gnustl_static

APP_STL := c++_static APP_CPPFLAGS := -std=c++++11 -frtti -fexceptions

ifeq ($(NDK_DEBUG),0) APP_CPPFLAGS += -DNDEBUG endif

==============================================================

face-lib.cpp

/***

include

include <Eigen/Dense> // http://eigen.tuxfamily.org

include <include/opencv2/core.hpp>

include <include/opencv2/core/eigen.hpp>

include <FaceRecognitionLib/Eigenfaces.h>

include <FaceRecognitionLib/Fisherfaces.h>

include <FaceRecognitionLib/Tools.h>

include <android/log.h>

include <include/opencv2/core/hal/interface.h>

ifdef NDEBUG

define LOGD(...) ((void)0)

define LOGI(...) ((void)0)

define LOGE(...) ((void)0)

define LOG_ASSERT(condition, ...) ((void)0)

else

define LOG_TAG "FaceRecognitionAppActivity/Native"

define LOGD(...) ((void)android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, __VA_ARGS))

define LOGI(...) ((void)android_log_print(ANDROID_LOG_INFO, LOG_TAG, __VA_ARGS))

define LOGE(...) ((void)android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS))

define LOG_ASSERT(condition, ...) if (!(condition)) android_log_assert(#condition, LOG_TAG, __VA_ARGS)

endif

Eigenfaces eigenfaces; Fisherfaces fisherfaces;

using namespace std; using namespace cv; using namespace Eigen;

ifdef __cplusplus

extern "C" {

endif

JNIEXPORT void JNICALL Java_com_lauszus_facerecognitionapp_NativeMethods_TrainFaces(JNIEnv, jobject, jlong addrImages, jlong addrClasses) { Mat pImages = (Mat ) addrImages; // Each images is represented as a column vector Mat pClasses = (Mat ) addrClasses; // Classes are represented as a vector

LOG_ASSERT(pImages->type() == CV_8U, "Images must be an 8-bit matrix");
MatrixXi images;
cv2eigen(*pImages, images); // Copy from OpenCV Mat to Eigen matrix

//Facebase *pFacebase;
if (pClasses == NULL) { // If classes are NULL, then train Eigenfaces
    eigenfaces.train(images); // Train Eigenfaces
    LOGI("Eigenfacess numComponents: %d", eigenfaces.numComponents);
    //pFacebase = &eigenfaces;
} else {
    LOG_ASSERT(pClasses->type() == CV_32S && pClasses->cols == 1, "Classes must be a signed 32-bit vector");
    VectorXi classes;
    cv2eigen(*pClasses, classes); // Copy from OpenCV Mat to Eigen vector
    LOG_ASSERT(classes.minCoeff() == 1, "Minimum value in the list must be 1");
    fisherfaces.train(images, classes); // Train Fisherfaces
    LOGI("Fisherfaces numComponents: %d", fisherfaces.numComponents);
    //pFacebase = &fisherfaces;
}

/*
if (!pFacebase->V.hasNaN()) {
    for (int i = 0; i < pFacebase->numComponents; i++) { // Loop through eigenvectors
        for (int j = 0; j < 10; j++) // Print first 10 values
            LOGI("Eigenvector[%d]: %f", i, pFacebase->V(j, i));
    }
} else
    LOGE("Eigenvectors are not valid!");
*/

}

JNIEXPORT void JNICALL Java_com_lauszus_facerecognitionapp_NativeMethods_MeasureDist(JNIEnv env, jobject, jlong addrImage, jfloatArray minDist, jintArray minDistIndex, jfloatArray faceDist, jboolean useEigenfaces) { Facebase pFacebase; if (useEigenfaces) { LOGI("Using Eigenfaces"); pFacebase = &eigenfaces; } else { LOGI("Using Fisherfaces"); pFacebase = &fisherfaces; }

if (pFacebase->V.any()) { // Make sure that the eigenvector has been calculated
    Mat *pImage = (Mat *) addrImage; // Image is represented as a column vector

    VectorXi image;
    cv2eigen(*pImage, image); // Convert from OpenCV Mat to Eigen matrix

    LOGI("Project faces");
    VectorXf W = pFacebase->project(image); // Project onto subspace
    LOGI("Reconstructing faces");
    VectorXf face = pFacebase->reconstructFace(W);

    LOGI("Calculate normalized Euclidean distance");
    jfloat dist_face = pFacebase->euclideanDistFace(image, face);
    LOGI("Face distance: %f", dist_face);
    env->SetFloatArrayRegion(faceDist, 0, 1, &dist_face);

    VectorXf dist = pFacebase->euclideanDist(W);

    vector<size_t> sortedIdx = sortIndexes(dist);
    for (auto idx : sortedIdx)
        LOGI("dist[%zu]: %f", idx, dist(idx));

    int minIndex = (int) sortedIdx[0];
    env->SetFloatArrayRegion(minDist, 0, 1, &dist(minIndex));
    env->SetIntArrayRegion(minDistIndex, 0, 1, &minIndex);
}

} / static inline void convertYUVToRGBA(uint8_t y, uint8_t u, uint8_t v, uint8_t buf) attribute((always_inline));

static void convertYUVImageToRGBA(const Mat pYUV, Mat pRGB) { const Size size = pRGB->size(); const int width = size.width; const int height = size.height; const int n_pixels = width * height; const int rgba_channels = pRGB->channels();

// See: https://android.googlesource.com/platform/frameworks/av/+/master/media/libstagefright/yuv/YUVImage.cpp,
// https://wiki.videolan.org/YUV/#Semi-planar
// and https://en.wikipedia.org/wiki/YUV#Y.E2.80.B2UV420p_.28and_Y.E2.80.B2V12_or_YV12.29_to_RGB888_conversion
for (int y = 0; y < height; y++) {
    for (int x = 0; x < width; x++) {
        // U and V channels are interleaved as VUVUVU.
        // So V data starts at the end of Y channel and
        // U data starts right after V's start.
        const int yIndex = x + y * width;
        const uint8_t y_val = pYUV->data[yIndex];

        // Since U and V channels are interleaved, offsets need to be doubled.
        const int uvOffset = (y >> 1) * (width >> 1) + (x >> 1);
        const int vIndex = n_pixels + 2*uvOffset;
        const int uIndex = vIndex + 1;
        const uint8_t v_val = pYUV->data[vIndex];
        const uint8_t u_val = pYUV->data[uIndex];
        convertYUVToRGBA(y_val, u_val, v_val, &pRGB->data[rgba_channels * yIndex]);
    }
}

}

JNIEXPORT void JNICALL Java_com_lauszus_facerecognitionapp_NativeMethods_YUV2RGB(JNIEnv, jobject, jlong addrYuv, jlong addrRgba) { Mat pYUV = (Mat ) addrYuv; // YUV 4:2:0 planar image, with 8 bit Y samples, followed by interleaved V/U plane with 8bit 2x2 sub-sampled chroma samples Mat pRGB = (Mat ) addrRgba; // RGBA image

convertYUVImageToRGBA(pYUV, pRGB);

}

JNIEXPORT void JNICALL Java_com_lauszus_facerecognitionapp_NativeMethods_HistEQ(JNIEnv, jobject, jlong addrYuv, jlong addrRgba) { Mat pYUV = (Mat ) addrYuv; // YUV 4:2:0 planar image, with 8 bit Y samples, followed by interleaved V/U plane with 8bit 2x2 sub-sampled chroma samples Mat pRGB = (Mat ) addrRgba; // RGBA image

const Size size = pRGB->size();
const int width = size.width;
const int height = size.height;

// 1. Step: Compute histogram of Y channel
uint32_t histogram[256];
memset(histogram, 0, sizeof(histogram));

for (int y = 0; y < height; y++) {
    for (int x = width / 2; x < width; x++) { // Only look at half the image
        const int yIndex = x + y * width;
        const uint8_t y_val = pYUV->data[yIndex];
        histogram[y_val]++;
    }
}

// Step 2: Compute CDF of histogram
uint32_t histogram_cdf[256];

histogram_cdf[0] = histogram[0];
for (int i = 1; i < 256; i++)
    histogram_cdf[i] = histogram_cdf[i - 1] + histogram[i]; // Calculate CDF

for (int i = 0; i < 256; i++)
    histogram_cdf[i] /= histogram_cdf[255] / 255; // Normalize CDF

// Step 3: Apply histogram equalization
for (int y = 0; y < height; y++) {
    for (int x = width / 2; x < width; x++) { // Image is flipped after this function
        const int yIndex = x + y * width;
        const uint8_t y_val = pYUV->data[yIndex];
        pYUV->data[yIndex] = (uint8_t) histogram_cdf[y_val];
    }
}

// Step 4: Convert from YUV to RGB
convertYUVImageToRGBA(pYUV, pRGB);

}

define clamp(amt,low,high) ((amt)<(low)?(low):((amt)>(high)?(high):(amt)))

static inline void convertYUVToRGBA(uint8_t y, uint8_t u, uint8_t v, uint8_t buf) { const int rTmp = y + (int)(1.370705f (v - 128)); const int gTmp = y - (int)(0.698001f (v - 128)) - (int)(0.337633f (u - 128)); const int bTmp = y + (int)(1.732446f * (u - 128));

buf[0] = (uint8_t) clamp(rTmp, 0, 255);
buf[1] = (uint8_t) clamp(gTmp, 0, 255);
buf[2] = (uint8_t) clamp(bTmp, 0, 255);
buf[3] = 255; // Alpha channel

} */

ifdef __cplusplus

}

endif