Open raja259 opened 4 years ago
I have tested it on Samsung phone
facenet_hiroki.tflite is the FaceNet model. It works but it takes 3 or 4 seconds/inference. mobile_face_net.tflite is a trained model taken from this implementation of MobileFaceNet
@estebanuri I'm facing the same issue as @raja259 The app doesnt retain the data. Any ideas on how I can make them retain them even upon closure of app. Btw, whats the use of the search button?
Hello. As @jenipharachel said, please let us know how can we retain data?
@estebanuri I'm facing the same issue as @raja259 The app doesnt retain the data. Any ideas on how I can make them retain them even upon closure of app.
I am planning to extend the application by adding some way to persist the data. The first thing that comes to mind is to use Room to save in sqlite.
Btw, whats the use of the search button? The purpose of the search button was to display the detailed information of the detected person, but that part is not yet developed.
Hi,
We are looking for the update .
Regards, GaneshC
I managed to save the training. Both local and firebase. Using the firebase is better, as installing on another device does not need to train again
@HugoFerreiraStack please share the code.
We can also use it
Yes ,Please share the code . As this is the only major implementation is pending in the package
I managed to save the training. Both local and firebase. Using the firebase is better, as installing on another device does not need to train again
Please share the code <3
public class TFLiteObjectDetectionAPIModel implements SimilarityClassifier ... .... ....
public void register(String name, Recognition rec, DetectorActivity det) {
if(name==null){
try {
//AQUI VC LE DO FIREBASE
//HashMap<String, Recognition> registeredl= (HashMap<String, Recognition>) o.readObject();
//if(registeredl!=null)registered=registeredl;
FirebaseStorage storage = FirebaseStorage.getInstance();
StorageReference storageRef = storage.getReference();
StorageReference test2 = storageRef.child("test2.txt");
File localFile = File.createTempFile("test2", "txt");
test2.getFile(localFile).addOnSuccessListener(new OnSuccessListener<FileDownloadTask.TaskSnapshot>() {
@Override
public void onSuccess(FileDownloadTask.TaskSnapshot taskSnapshot) {
try {
ObjectInputStream i = new ObjectInputStream(new FileInputStream(localFile));
HashMap<String, Recognition> registeredl= (HashMap<String, Recognition>) i.readObject();
if(registeredl!=null)registered=registeredl;
Log.d("Clique AQUI","Clique Aqui Adicionado "+registeredl.size() );
} catch (Exception e) {
Log.d("Clique AQUI","Clique Aqui erro "+e.toString() );
}
}
}).addOnFailureListener(new OnFailureListener() {
@Override
public void onFailure(@NonNull Exception exception) {
Log.d("Clique AQUI","Clique Aqui erro "+exception.toString() );
}
});
Log.d("Clique AQUI","Clique AQUI file leu: " );
Log.d("Clique AQUI","Clique AQUI leu " );
}catch (Exception e){
Log.d("Clique AQUI","Clique AQUI file created: " + e.toString());
}
return ;
}
registered.put(name, rec);
//Recuperar dados do Edit Text
//pessoas.child(nome).setValue(pessoa);
byte[] bytes=null;
try {
// file.createNewFile();
//write the bytes in file
{
ObjectOutputStream o = new ObjectOutputStream(det.openFileOutput("test2.txt",0));
/* 24 */ o.writeObject(registered);
/* 25 */ o.close();
/* 26 */
Log.d("Clique AQUI","Clique AQUI file created: " );
/// file.delete();
Log.d("Clique AQUI","Clique AQUI delete " );
}
//AQUI VC ENVIA PRO FIREBASE
FirebaseStorage storage = FirebaseStorage.getInstance();
StorageReference storageRef = storage.getReference();
StorageReference test2 = storageRef.child("test2.txt");
test2.delete();
test2.putStream(det.openFileInput("test2.txt"));
Log.d("Clique AQUI","Clique Aqui Enviou ");
}catch (Exception e){
Log.d("Clique AQUI","Clique AQUI file created: " + e.toString());
Log.d("Clique AQUI","Clique AQUI file created: " + bytes.length);
}
}
public class TFLiteObjectDetectionAPIModel implements SimilarityClassifier ... .... ....
public void register(String name, Recognition rec, DetectorActivity det) {
if(name==null){ try { //AQUI VC LE DO FIREBASE //HashMap<String, Recognition> registeredl= (HashMap<String, Recognition>) o.readObject(); //if(registeredl!=null)registered=registeredl; FirebaseStorage storage = FirebaseStorage.getInstance(); StorageReference storageRef = storage.getReference(); StorageReference test2 = storageRef.child("test2.txt"); File localFile = File.createTempFile("test2", "txt"); test2.getFile(localFile).addOnSuccessListener(new OnSuccessListener<FileDownloadTask.TaskSnapshot>() { @Override public void onSuccess(FileDownloadTask.TaskSnapshot taskSnapshot) { try { ObjectInputStream i = new ObjectInputStream(new FileInputStream(localFile)); HashMap<String, Recognition> registeredl= (HashMap<String, Recognition>) i.readObject(); if(registeredl!=null)registered=registeredl; Log.d("Clique AQUI","Clique Aqui Adicionado "+registeredl.size() ); } catch (Exception e) { Log.d("Clique AQUI","Clique Aqui erro "+e.toString() ); } } }).addOnFailureListener(new OnFailureListener() { @Override public void onFailure(@NonNull Exception exception) { Log.d("Clique AQUI","Clique Aqui erro "+exception.toString() ); } }); Log.d("Clique AQUI","Clique AQUI file leu: " ); Log.d("Clique AQUI","Clique AQUI leu " ); }catch (Exception e){ Log.d("Clique AQUI","Clique AQUI file created: " + e.toString()); } return ; } registered.put(name, rec); //Recuperar dados do Edit Text //pessoas.child(nome).setValue(pessoa); byte[] bytes=null; try { // file.createNewFile(); //write the bytes in file { ObjectOutputStream o = new ObjectOutputStream(det.openFileOutput("test2.txt",0)); /* 24 */ o.writeObject(registered); /* 25 */ o.close(); /* 26 */ Log.d("Clique AQUI","Clique AQUI file created: " ); /// file.delete(); Log.d("Clique AQUI","Clique AQUI delete " ); } //AQUI VC ENVIA PRO FIREBASE FirebaseStorage storage = FirebaseStorage.getInstance(); StorageReference storageRef = storage.getReference(); StorageReference test2 = storageRef.child("test2.txt"); test2.delete(); test2.putStream(det.openFileInput("test2.txt")); Log.d("Clique AQUI","Clique Aqui Enviou "); }catch (Exception e){ Log.d("Clique AQUI","Clique AQUI file created: " + e.toString()); Log.d("Clique AQUI","Clique AQUI file created: " + bytes.length); } }
I'm very grateful for the code, but I have this error "2020-08-02 14:02:04.710 29977-30115/org.tensorflow.lite.examples.facerecognition D/Clique AQUI: Clique AQUI file created: java.lang.NullPointerException: Attempt to invoke virtual method 'java.io.FileOutputStream android.content.Context.openFileOutput(java.lang.String, int)' on a null object reference". Can i get full local file save solution, sir <3
public class TFLiteObjectDetectionAPIModel implements SimilarityClassifier ... .... ....
public void register(String name, Recognition rec, DetectorActivity det) {
if(name==null){ try { //AQUI VC LE DO FIREBASE //HashMap<String, Recognition> registeredl= (HashMap<String, Recognition>) o.readObject(); //if(registeredl!=null)registered=registeredl; FirebaseStorage storage = FirebaseStorage.getInstance(); StorageReference storageRef = storage.getReference(); StorageReference test2 = storageRef.child("test2.txt"); File localFile = File.createTempFile("test2", "txt"); test2.getFile(localFile).addOnSuccessListener(new OnSuccessListener<FileDownloadTask.TaskSnapshot>() { @Override public void onSuccess(FileDownloadTask.TaskSnapshot taskSnapshot) { try { ObjectInputStream i = new ObjectInputStream(new FileInputStream(localFile)); HashMap<String, Recognition> registeredl= (HashMap<String, Recognition>) i.readObject(); if(registeredl!=null)registered=registeredl; Log.d("Clique AQUI","Clique Aqui Adicionado "+registeredl.size() ); } catch (Exception e) { Log.d("Clique AQUI","Clique Aqui erro "+e.toString() ); } } }).addOnFailureListener(new OnFailureListener() { @Override public void onFailure(@NonNull Exception exception) { Log.d("Clique AQUI","Clique Aqui erro "+exception.toString() ); } }); Log.d("Clique AQUI","Clique AQUI file leu: " ); Log.d("Clique AQUI","Clique AQUI leu " ); }catch (Exception e){ Log.d("Clique AQUI","Clique AQUI file created: " + e.toString()); } return ; } registered.put(name, rec); //Recuperar dados do Edit Text //pessoas.child(nome).setValue(pessoa); byte[] bytes=null; try { // file.createNewFile(); //write the bytes in file { ObjectOutputStream o = new ObjectOutputStream(det.openFileOutput("test2.txt",0)); /* 24 */ o.writeObject(registered); /* 25 */ o.close(); /* 26 */ Log.d("Clique AQUI","Clique AQUI file created: " ); /// file.delete(); Log.d("Clique AQUI","Clique AQUI delete " ); } //AQUI VC ENVIA PRO FIREBASE FirebaseStorage storage = FirebaseStorage.getInstance(); StorageReference storageRef = storage.getReference(); StorageReference test2 = storageRef.child("test2.txt"); test2.delete(); test2.putStream(det.openFileInput("test2.txt")); Log.d("Clique AQUI","Clique Aqui Enviou "); }catch (Exception e){ Log.d("Clique AQUI","Clique AQUI file created: " + e.toString()); Log.d("Clique AQUI","Clique AQUI file created: " + bytes.length); } }
I try to save hashmap local but it throws this error "filejava.io.NotSerializableException: org.tensorflow.lite.examples.detection.tflite.SimilarityClassifier$Recognition" here is the code: " try { ObjectOutputStream o = new ObjectOutputStream(new FileOutputStream(filePath)); o.writeObject(registered); o.close(); Log.d(TAG, "register: DATA SAVE COMPETED"); } catch (Exception e) { Log.d(TAG, "register: Lỗi ghi file" + e.toString()); } " please, save me <3
https://github.com/romeumellobr/painelbrasApp here you can see all the code. Face detection, mask detection and thermal detection with flir camera
https://github.com/romeumellobr/painelbrasApp here you can see all the code. Face detection, mask detection and thermal detection with flir camera
thank you, sir. I will try it out <3
@estebanuri I'm facing the same issue as @raja259 The app doesnt retain the data. Any ideas on how I can make them retain them even upon closure of app.
I am planning to extend the application by adding some way to persist the data. The first thing that comes to mind is to use Room to save in sqlite.
Btw, whats the use of the search button? The purpose of the search button was to display the detailed information of the detected person, but that part is not yet developed.
Hi Sir, anything new on retraining data? Regards, volam1999
https://github.com/romeumellobr/painelbrasApp here you can see all the code. Face detection, mask detection and thermal detection with flir camera
@HugoFerreiraStack Hey, If you managed to successfully implement a storage method on top of Esteban's version would you mind showing the source code to that?
I have tried to implement the code you linked previously but keep running into errors when trying to pass the "DetectorActivity det" parameter.
The repo you linked seems to just be a Face Mask detector. I see it claims to have facial recognition capabilities but this is all that works for me as there is no UI to change it even thought I see the ability to enter a users name in the .xml files
@OwenSlevin @volam1999 Hello guys, I've done project by this library, It saves cropped image and save to a database, so simple, but because of my company restrictions i cannot put codes here. Send me a message at Telegram to teach you how to implement this. t.me/LegionStudioDeveloper
@OwenSlevin @volam1999 Hello guys, I've done project by this library, It saves cropped image and save to a database, so simple, but because of my company restrictions i cannot put codes here. Send me a message at Telegram to teach you how to implement this. t.me/LegionStudioDeveloper
I try to contact you in Telegram but I can't. can you teach me through my email? thank you, sir
@OwenSlevin @volam1999 Hello guys, I've done project by this library, It saves cropped image and save to a database, so simple, but because of my company restrictions i cannot put codes here. Send me a message at Telegram to teach you how to implement this. t.me/LegionStudioDeveloper
Hello, I also need your help, thank you so much, I already use Telegram to send messages to you.
I tried saving the SimilarityClassifier.Recognition object to data base using room. But while retrieving data, the extras, which is an object, is being retrieved ad an array.
It is showing error in the line
for (Map.Entry<String, Recognition> entry : registered.entrySet()) {
final String name = entry.getKey();
**final float[] knownEmb = ((float[][]) entry.getValue().getExtra())[0];**
float distance = 0;
for (int i = 0; i < emb.length; i++) {
float diff = emb[i] - knownEmb[i];
distance += diff * diff;
}
distance = (float) Math.sqrt(distance);
if (ret == null || distance < ret.second) {
ret = new Pair<>(name, distance);
}
}
Did anybody complete database saving?
@estebanuri I'm facing the same issue as @raja259 The app doesnt retain the data. Any ideas on how I can make them retain them even upon closure of app.
I am planning to extend the application by adding some way to persist the data. The first thing that comes to mind is to use Room to save in sqlite.
Btw, whats the use of the search button? The purpose of the search button was to display the detailed information of the detected person, but that part is not yet developed.
@estebanuri, Sir, Did you complete saving person name and image in sqlite or firebase database? Please update it if you have done it. It's a request.
@estebanuri I'm facing the same issue as @raja259 The app doesnt retain the data. Any ideas on how I can make them retain them even upon closure of app.
I am planning to extend the application by adding some way to persist the data. The first thing that comes to mind is to use Room to save in sqlite.
Btw, whats the use of the search button? The purpose of the search button was to display the detailed information of the detected person, but that part is not yet developed.
@estebanuri, Sir, Did you complete saving person name and image in sqlite or firebase database? Please update it if you have done it. It's a request.
Did you find any solution. or anyone you find a way to store the images and name in sqlite database or firebase?
@estebanuri I can do that and commit if you'd allow me or it's fine if you've other ideas.
@estebanuri I can do that and commit if you'd allow me or it's fine if you've other ideas.
@PemmasaniRajesh , Please share the source code that you have done. Have you done it with sqlite or other databases?
@PemmasaniRajesh I'd really appreciate it too if you could post a way to save names and faces. I tried to implement my own solutions but had no success.
@estebanuri any updates on saving the data ?
@estebanuri I can do that and commit if you'd allow me or it's fine if you've other ideas.
@PemmasaniRajesh Please paste the code here and also commit request if possible
if you save the picture how you gonna call it back then ?
if you save the picture how you gonna call it back then ?
Well the main issue here is storing the images not retrieving them. Once an image is stored on the sdcard/storage you can get the image file's path with getexternalstoragedirectory method and make a bitmap from the path to display with imageview or something. Again, retrieving already saved images is not the problem here, only saving is. So if you've got any idea on that, please share it.
if you save the picture how you gonna call it back then ?
Well the main issue here is storing the images not retrieving them. Once an image is stored on the sdcard/storage you can get the image file's path with getexternalstoragedirectory method and make a bitmap from the path to display with imageview or something. Again, retrieving already saved images is not the problem here, only saving is. So if you've got any idea on that, please share it.
Because now i managed to save the captured face the appear when you click the plus symbol but I don’t know how to call it back when you reopen the app
@deepsingh132 Do you got any idee how to call it back in the app so you don't need to take pic again ?
if you save the picture how you gonna call it back then ?
Well the main issue here is storing the images not retrieving them. Once an image is stored on the sdcard/storage you can get the image file's path with getexternalstoragedirectory method and make a bitmap from the path to display with imageview or something. Again, retrieving already saved images is not the problem here, only saving is. So if you've got any idea on that, please share it.
Because now i managed to save the captured face the appear when you click the plus symbol but I don’t know how to call it back when you reopen the app
Could you please elaborate on how you managed to save the images ? Share the code here if you can. For retrieving the images, there are various methods, like the one i mentioned above.
if you save the picture how you gonna call it back then ?
Well the main issue here is storing the images not retrieving them. Once an image is stored on the sdcard/storage you can get the image file's path with getexternalstoragedirectory method and make a bitmap from the path to display with imageview or something. Again, retrieving already saved images is not the problem here, only saving is. So if you've got any idea on that, please share it.
Because now i managed to save the captured face the appear when you click the plus symbol but I don’t know how to call it back when you reopen the app
Could you please elaborate on how you managed to save the images ? Share the code here if you can. For retrieving the images, there are various methods, like the one i mentioned above.
I saved the image but the issue still there that if i reopen the app doesn't recognized the face, My question is that how can i let the app call the pics back (like making it as a database for the app)
@Sami095 i don't think you understand the issue. Actually you didn't save the images at all in the first place. They are only stored temporarily and not on the phone storage. That's the problem we are facing. Now when you say you saved the images , you didn't saved them at all.
@Sami095 i don't think you understand the issue. Actually you didn't save the images at all in the first place. They are only stored temporarily and not on the phone storage. That's the problem we are facing. Now when you say you saved the images , you didn't saved them at all.
The code that i have written can save the captured face the the device storage and that's it it can't do anything else. the picture will stay in the storage if you close the app they won't get deleted or anything else
@Sami095 If you can upload the saving part maybe i, someone or @estebanuri can help you with further progress but for that, you'd have to show the code that saves the face images.
@estebanuri @Sami095 I found this face recognition android app which sort of saves the people's faces with their names and reuses them to recognize whenever the app is launched again. The code base is also quite similar to this one but it lacks in accuracy that this app has and also it's very big in terms of storage as it's currently greater than 90mb. You need to train at least 4 faces to get the face recognized with good accuracy otherwise it will just show up random matches with bad confidence. I think that's because of the models used in this app and the other app are somewhat different. But we could use it's storage function and implement that in this app.
@estebanuri @Sami095 I found this face recognition android app which sort of saves the people's faces with their names and reuses them to recognize whenever the app is launched again. The code base is also quite similar to this one but it lacks in accuracy that this app has and also it's very big in terms of storage as it's currently greater than 90mb. You need to train at least 4 faces to get the face recognized with good accuracy otherwise it will just show up random matches with bad confidence. I think that's because of the models used in this app and the other app are somewhat different. But we could use it's storage function and implement that in this app.
where is the part where it save the image to storage in that app? and did you find any solution ?
any updates on this? has anyone found a solution to save images locally?
https://github.com/romeumellobr/painelbrasApp here you can see all the code. Face detection, mask detection and thermal detection with flir camera
thank you, sir. I will try it out <3
Page not found
@estebanuri I'm facing the same issue as @raja259 The app doesnt retain the data. Any ideas on how I can make them retain them even upon closure of app.
I am planning to extend the application by adding some way to persist the data. The first thing that comes to mind is to use Room to save in sqlite.
Btw, whats the use of the search button? The purpose of the search button was to display the detailed information of the detected person, but that part is not yet developed.
Please do it very soon
@estebanuri any updates on saving the data ?
Please save data to Sqlite database
I managed to save the training. Both local and firebase. Using the firebase is better, as installing on another device does not need to train again
please share the code to add images from local storage
I managed to save the training. Both local and firebase. Using the firebase is better, as installing on another device does not need to train again
can you share the code i.e., add image from local storage. I am trying this from few days but there is no result. I felt that i was missed some where. Please help to add images from local storage.
any updates on this? has anyone found a solution to save images locally?
did you got anything to add image from local storage.
Thanks for the Good App.
But when you add any picture it save it temporarily and it worked fine with back camera as long as you are on the app but once you exit out and come back you need to add these pictures again it does not saved permanently on local phone .
So request you to save it locally so next time if I want to scan any face I dont need to add it again and again.
Same front camera problem is there but back camera is working perfectly.
Just need to know which model you used in the app
Facenet.tflite or facenet_hiroki.tflite
Or you have used both the modes in the app
Because it is increases the app size
Thanks again for the good work
I am working on it day & night, but it is hard to understand how to store in ROOM, sqlite please help me
For Storing The Data Locally on Internal storage, just replace this with code in TFLiteObjectDetectionAPIModel class.
/* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/
;
import android.annotation.SuppressLint; import android.content.Context; import android.content.ContextWrapper; import android.content.res.AssetFileDescriptor; import android.content.res.AssetManager; import android.graphics.Bitmap; import android.graphics.RectF; import android.net.Uri; import android.os.Build; import android.os.Environment; import android.os.Trace; import android.util.Log; import android.util.Pair; import android.widget.Toast;
import com.google.gson.Gson;
import androidx.annotation.NonNull; import androidx.core.content.FileProvider;
import com.google.android.gms.tasks.OnFailureListener; import com.google.android.gms.tasks.OnSuccessListener; import com.google.firebase.storage.FileDownloadTask; import com.google.firebase.storage.FirebaseStorage; import com.google.firebase.storage.StorageReference; import com.google.firebase.storage.UploadTask; import com.google.gson.reflect.TypeToken;
import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.FileReader; import java.io.FileWriter; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.OutputStream; import java.lang.reflect.Type; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.nio.MappedByteBuffer; import java.nio.channels.FileChannel; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Vector;
import org.tensorflow.lite.Interpreter;
import org.tensorflow.lite.examples.detection.DetectorActivity; import org.tensorflow.lite.examples.detection.env.Logger;
/**
private static final String TAG = "Class TFLiteObjectDetectionAPIModel :";
private static final Logger LOGGER = new Logger();
//private static final int OUTPUT_SIZE = 512; private static final int OUTPUT_SIZE = 192;
// Only return this many results. private static final int NUM_DETECTIONS = 1;
// Float model private static final float IMAGE_MEAN = 128.0f; private static final float IMAGE_STD = 128.0f;
// Number of threads in the java app
private static final int NUM_THREADS = 4;
private boolean isModelQuantized;
// Config values.
private int inputSize;
// Pre-allocated buffers.
private Vector
private float[][] embeedings;
private ByteBuffer imgData;
private Interpreter tfLite;
// Face Mask Detector Output private float[][] output;
private HashMap<String, Recognition> registered = new HashMap<>();
@SuppressLint("LongLogTag") public void register(String name, Recognition rec, DetectorActivity det) { registered.put(name, rec);
if (!modelFolder.exists()){
modelFolder.mkdir();
}
File imageFile = new File(modelFolder.getAbsolutePath(), imageFileName);
try {
Gson gson = new Gson();
FileOutputStream fos = new FileOutputStream(imageFile);
Type type = new TypeToken<HashMap<String, Recognition>>() {}.getType();
ObjectOutputStream out = new ObjectOutputStream(fos);
String toStoreObject = gson.toJson(registered, type);
out.writeObject(toStoreObject);
out.close();
fos.close();
} catch (Exception e) {
e.printStackTrace();
}
}
// FirebaseStorage storage = FirebaseStorage.getInstance();
// StorageReference storageRef = storage.getReference();
// StorageReference test2 = storageRef.child(FileName);
// //test2.delete();
// //test2.putStream();
//
// Uri file = Uri.fromFile(new File(det.getFilesDir(), FileName));
//
//
// test2.putFile(file)
// .addOnSuccessListener(new OnSuccessListener
@SuppressLint("LongLogTag")
private TFLiteObjectDetectionAPIModel() throws IOException {
Log.d("Class TFLiteObjectDetectionAPIModel :", "TFLiteObjectDetectionAPIModel Called");
}
/**
* Memory-map the model file in Assets.
*/
@SuppressLint("LongLogTag")
private static MappedByteBuffer loadModelFile(AssetManager assets, String modelFilename)
throws IOException {
Log.d("Class TFLiteObjectDetectionAPIModel :", "loadModelFile Called");
AssetFileDescriptor fileDescriptor = assets.openFd(modelFilename);
FileInputStream inputStream = new FileInputStream(fileDescriptor.getFileDescriptor());
FileChannel fileChannel = inputStream.getChannel();
long startOffset = fileDescriptor.getStartOffset();
long declaredLength = fileDescriptor.getDeclaredLength();
return fileChannel.map(FileChannel.MapMode.READ_ONLY, startOffset, declaredLength);
}
/**
* Initializes a native TensorFlow session for classifying images.
*
* @param assetManager The asset manager to be used to load assets.
* @param modelFilename The filepath of the model GraphDef protocol buffer.
* @param labelFilename The filepath of label file for classes.
* @param inputSize The size of image input
* @param isQuantized Boolean representing model is quantized or not
*/
@SuppressLint({"LongLogTag", "ShowToast"})
public static SimilarityClassifier create(
final AssetManager assetManager,
final String modelFilename,
final String labelFilename,
final int inputSize,
final boolean isQuantized, DetectorActivity det)
throws IOException {
final TFLiteObjectDetectionAPIModel tfLiteObjectDetectionAPIModel = new TFLiteObjectDetectionAPIModel();
//Toast.makeText(det.getApplicationContext(), "name is null", Toast.LENGTH_LONG ).show();
// FirebaseStorage storage = FirebaseStorage.getInstance(); // StorageReference storageRef = storage.getReference(); // StorageReference test2 = storageRef.child(FileName); if (!modelFolder.exists()){ modelFolder.mkdir(); }
File imageGetFile = new File(modelFolder.getAbsolutePath(), imageFileName);
try {
Gson gson = new Gson();
ObjectInputStream objectInputStream = new ObjectInputStream(new FileInputStream(imageGetFile));
Type type = new TypeToken<HashMap<String, Recognition>>() {}.getType();
HashMap<String, Recognition> registered1 = gson.fromJson((String) objectInputStream.readObject(), type);
if (registered1!=null){
tfLiteObjectDetectionAPIModel.registered = registered1;
}
objectInputStream.close();
Log.d(TAG,registered1.toString());
Log.d(TAG,tfLiteObjectDetectionAPIModel.registered.toString());
} catch (Exception e) {
e.printStackTrace();
}
String actualFilename = labelFilename.split("file:///android_asset/")[1];
InputStream labelsInput = assetManager.open(actualFilename);
BufferedReader br = new BufferedReader(new InputStreamReader(labelsInput));
String line;
while ((line = br.readLine()) != null) {
LOGGER.w(line);
tfLiteObjectDetectionAPIModel.labels.add(line);
}
br.close();
tfLiteObjectDetectionAPIModel.inputSize = inputSize;
try {
tfLiteObjectDetectionAPIModel.tfLite = new Interpreter(loadModelFile(assetManager, modelFilename));
} catch (Exception e) {
throw new RuntimeException(e);
}
tfLiteObjectDetectionAPIModel.isModelQuantized = isQuantized;
// Pre-allocate buffers.
int numBytesPerChannel;
if (isQuantized) {
numBytesPerChannel = 1; // Quantized
} else {
numBytesPerChannel = 4; // Floating point
}
tfLiteObjectDetectionAPIModel.imgData = ByteBuffer.allocateDirect(tfLiteObjectDetectionAPIModel.inputSize * tfLiteObjectDetectionAPIModel.inputSize * 3 * numBytesPerChannel);
tfLiteObjectDetectionAPIModel.imgData.order(ByteOrder.nativeOrder());
tfLiteObjectDetectionAPIModel.intValues = new int[tfLiteObjectDetectionAPIModel.inputSize * tfLiteObjectDetectionAPIModel.inputSize];
tfLiteObjectDetectionAPIModel.tfLite.setNumThreads(NUM_THREADS);
tfLiteObjectDetectionAPIModel.outputLocations = new float[1][NUM_DETECTIONS][4];
tfLiteObjectDetectionAPIModel.outputClasses = new float[1][NUM_DETECTIONS];
tfLiteObjectDetectionAPIModel.outputScores = new float[1][NUM_DETECTIONS];
tfLiteObjectDetectionAPIModel.numDetections = new float[1];
return tfLiteObjectDetectionAPIModel;
}
@SuppressLint("LongLogTag")
@Override
public List<Recognition> recognizeImage(final Bitmap bitmap, boolean storeExtra) {
Log.d("Class TFLiteObjectDetectionAPIModel :", "recognizeImage Called");
// Log this method so that it can be analyzed with systrace.
Trace.beginSection("recognizeImage");
Trace.beginSection("preprocessBitmap");
// Preprocess the image data from 0-255 int to normalized float based
// on the provided parameters.
bitmap.getPixels(intValues, 0, bitmap.getWidth(), 0, 0, bitmap.getWidth(), bitmap.getHeight());
imgData.rewind();
for (int i = 0; i < inputSize; ++i) {
for (int j = 0; j < inputSize; ++j) {
int pixelValue = intValues[i * inputSize + j];
if (isModelQuantized) {
// Quantized model
imgData.put((byte) ((pixelValue >> 16) & 0xFF));
imgData.put((byte) ((pixelValue >> 8) & 0xFF));
imgData.put((byte) (pixelValue & 0xFF));
} else { // Float model
imgData.putFloat((((pixelValue >> 16) & 0xFF) - IMAGE_MEAN) / IMAGE_STD);
imgData.putFloat((((pixelValue >> 8) & 0xFF) - IMAGE_MEAN) / IMAGE_STD);
imgData.putFloat(((pixelValue & 0xFF) - IMAGE_MEAN) / IMAGE_STD);
}
}
}
Trace.endSection(); // preprocessBitmap
// Copy the input data into TensorFlow.
Trace.beginSection("feed");
Object[] inputArray = {imgData};
Trace.endSection();
// Here outputMap is changed to fit the Face Mask detector Map<Integer, Object> outputMap = new HashMap<>();
embeedings = new float[1][OUTPUT_SIZE];
outputMap.put(0, embeedings);
// Run the inference call.
Trace.beginSection("run");
//tfLite.runForMultipleInputsOutputs(inputArray, outputMapBack);
tfLite.runForMultipleInputsOutputs(inputArray, outputMap);
Trace.endSection();
// String res = "["; // for (int i = 0; i < embeedings[0].length; i++) { // res += embeedings[0][i]; // if (i < embeedings[0].length - 1) res += ", "; // } // res += "]";
float distance = Float.MAX_VALUE;
String id = "0";
String label = "?";
if (registered.size() > 0) {
//LOGGER.i("dataset SIZE: " + registered.size());
final Pair<String, Float> nearest = findNearest(embeedings[0]);
if (nearest != null) {
final String name = nearest.first;
label = name;
distance = nearest.second;
LOGGER.i("nearest: " + name + " - distance: " + distance);
}
}
final int numDetectionsOutput = 1;
final ArrayList<Recognition> recognitions = new ArrayList<>(numDetectionsOutput);
Recognition rec = new Recognition(
id,
label,
distance,
new RectF());
recognitions.add(rec);
if (storeExtra) {
rec.setExtra(embeedings);
}
Trace.endSection();
return recognitions;
}
private Pair<String, Float> findNearest(float[] emb) {
Gson gson = new Gson();
Pair<String, Float> ret = null;
for (Map.Entry<String, Recognition> entry : registered.entrySet()) {
String name = entry.getKey();
float distance = 0;
try {
// original code
//final float[] knownEmb = ((float[][]) entry.getValue().getExtra())[0];
// -------------------- MODIFY --------------------------------------------------------------/
float[][] knownEmb2d = gson.fromJson(entry.getValue().getExtra().toString(), float[][].class);
final float[] knownEmb = knownEmb2d[0];
for (int i = 0; i < emb.length; i++) {
float diff = emb[i] - knownEmb[i];
distance += diff * diff;
}
} catch (Exception e) {
//Toast.makeText(context, e.getMessage(), Toast.LENGTH_LONG ).show();
Log.e("findNearest", e.getMessage());
}
distance = (float) Math.sqrt(distance);
if (ret == null || distance < ret.second) {
ret = new Pair<>(name, distance);
}
}
return ret;
}
@SuppressLint("LongLogTag")
@Override
public void enableStatLogging(final boolean logStats) {
Log.d("Class TFLiteObjectDetectionAPIModel :", "enableStatLogging Called");
}
@SuppressLint("LongLogTag")
@Override
public String getStatString() {
Log.d("Class TFLiteObjectDetectionAPIModel :", "getStatString Called");
return "";
}
@SuppressLint("LongLogTag")
@Override
public void close() {
Log.d("Class TFLiteObjectDetectionAPIModel :", "close Called");
}
@SuppressLint("LongLogTag")
public void setNumThreads(int num_threads) {
Log.d("Class TFLiteObjectDetectionAPIModel :", "setNumThreads Called");
if (tfLite != null) tfLite.setNumThreads(num_threads);
}
@SuppressLint("LongLogTag")
@Override
public void setUseNNAPI(boolean isChecked) {
Log.d("Class TFLiteObjectDetectionAPIModel :", "setUseNNAPI Called");
if (tfLite != null) tfLite.setUseNNAPI(isChecked);
}
}
public class TFLiteObjectDetectionAPIModel implements SimilarityClassifier ... .... ....
public void register(String name, Recognition rec, DetectorActivity det) {
if(name==null){ try { //AQUI VC LE DO FIREBASE //HashMap<String, Recognition> registeredl= (HashMap<String, Recognition>) o.readObject(); //if(registeredl!=null)registered=registeredl; FirebaseStorage storage = FirebaseStorage.getInstance(); StorageReference storageRef = storage.getReference(); StorageReference test2 = storageRef.child("test2.txt"); File localFile = File.createTempFile("test2", "txt"); test2.getFile(localFile).addOnSuccessListener(new OnSuccessListener<FileDownloadTask.TaskSnapshot>() { @Override public void onSuccess(FileDownloadTask.TaskSnapshot taskSnapshot) { try { ObjectInputStream i = new ObjectInputStream(new FileInputStream(localFile)); HashMap<String, Recognition> registeredl= (HashMap<String, Recognition>) i.readObject(); if(registeredl!=null)registered=registeredl; Log.d("Clique AQUI","Clique Aqui Adicionado "+registeredl.size() ); } catch (Exception e) { Log.d("Clique AQUI","Clique Aqui erro "+e.toString() ); } } }).addOnFailureListener(new OnFailureListener() { @Override public void onFailure(@NonNull Exception exception) { Log.d("Clique AQUI","Clique Aqui erro "+exception.toString() ); } }); Log.d("Clique AQUI","Clique AQUI file leu: " ); Log.d("Clique AQUI","Clique AQUI leu " ); }catch (Exception e){ Log.d("Clique AQUI","Clique AQUI file created: " + e.toString()); } return ; } registered.put(name, rec); //Recuperar dados do Edit Text //pessoas.child(nome).setValue(pessoa); byte[] bytes=null; try { // file.createNewFile(); //write the bytes in file { ObjectOutputStream o = new ObjectOutputStream(det.openFileOutput("test2.txt",0)); /* 24 */ o.writeObject(registered); /* 25 */ o.close(); /* 26 */ Log.d("Clique AQUI","Clique AQUI file created: " ); /// file.delete(); Log.d("Clique AQUI","Clique AQUI delete " ); } //AQUI VC ENVIA PRO FIREBASE FirebaseStorage storage = FirebaseStorage.getInstance(); StorageReference storageRef = storage.getReference(); StorageReference test2 = storageRef.child("test2.txt"); test2.delete(); test2.putStream(det.openFileInput("test2.txt")); Log.d("Clique AQUI","Clique Aqui Enviou "); }catch (Exception e){ Log.d("Clique AQUI","Clique AQUI file created: " + e.toString()); Log.d("Clique AQUI","Clique AQUI file created: " + bytes.length); } }
Hello bro, can you send me, full source code, pleease, my Telegram - t.me/developershahzod. Thank you.
Thanks for the Good App.
But when you add any picture it save it temporarily and it worked fine with back camera as long as you are on the app but once you exit out and come back you need to add these pictures again it does not saved permanently on local phone .
So request you to save it locally so next time if I want to scan any face I dont need to add it again and again.
Same front camera problem is there but back camera is working perfectly.
Just need to know which model you used in the app
Facenet.tflite or facenet_hiroki.tflite
Or you have used both the modes in the app
Because it is increases the app size
Thanks again for the good work