Closed maheralzoubi97 closed 1 month ago
In landscape on Android or iOS? And what happens exactly
Does it show wrong results, or wrong boxes placements, shows nothing?
Please provide all the information about what happens so I can help you better
in landscape on Android
show wrong results and show wrong boxes placement
in case real time object detection @zezo357
can you provide an example ? it will help me greatly, both the right and wrong ones
most probably i will work on it after i finish the opencv stuff, since i am thinking of moving the camera decoding logic to opencv, to be much faster
show this image
Thank you
fixed by https://github.com/abdelaziz-mahdy/pytorch_lite/pull/81 and released
import 'package:camera/camera.dart'; import 'package:flutter/material.dart'; import 'package:flutter/services.dart'; import 'package:pytorch_lite/pigeon.dart'; import 'package:pytorch_lite/pytorch_lite.dart';
import 'camera_view_singleton.dart';
/// [CameraView] sends each frame for inference class CameraView extends StatefulWidget { /// Callback to pass results after inference to [HomeView] final Function(List<ResultObjectDetection?> recognitions) resultsCallback; final Function(String classification) resultsCallbackClassification;
/// Constructor const CameraView(this.resultsCallback, this.resultsCallbackClassification, {Key? key}) : super(key: key); @override _CameraViewState createState() => _CameraViewState(); }
class _CameraViewState extends State with WidgetsBindingObserver {
/// List of available cameras
late List cameras;
/// Controller CameraController? cameraController;
/// true when inference is ongoing bool predicting = false;
ModelObjectDetection? _objectModel; ClassificationModel? _imageModel;
bool classification = false; @override void initState() { super.initState(); initStateAsync(); }
//load your model Future loadModel() async { String pathObjectDetectionModel = "assets/models/best2.torchscript"; try { _objectModel = await PytorchLite.loadObjectDetectionModel( pathObjectDetectionModel, 24, 640, 640, labelPath: "assets/labels/labels_objectDetection_Coco.txt", objectDetectionModelType: ObjectDetectionModelType.yolov8, ); } catch (e) { if (e is PlatformException) { print("only supported for android, Error is $e"); } else { print("Error is $e"); } } }
void initStateAsync() async { WidgetsBinding.instance.addObserver(this); await loadModel();
}
/// Initializes the camera by setting [cameraController] void initializeCamera() async { cameras = await availableCameras();
}
@override Widget build(BuildContext context) { // Return empty container while the camera is not initialized if (cameraController == null || !cameraController!.value.isInitialized) { return Container(); }
}
runObjectDetection(CameraImage cameraImage) async { if (_objectModel != null) { List<ResultObjectDetection?> objDetect = await _objectModel! .getImagePredictionFromBytesList( cameraImage.planes.map((e) => e.bytes).toList(), cameraImage.width, cameraImage.height, minimumScore: 0.3, iOUThreshold: 0.3);
}
/// Callback to receive each frame [CameraImage] perform inference on it onLatestImageAvailable(CameraImage cameraImage) async { if (predicting) { return; } predicting = true;
}
@override void didChangeAppLifecycleState(AppLifecycleState state) async { switch (state) { case AppLifecycleState.paused: cameraController?.stopImageStream(); break; case AppLifecycleState.resumed: if (!cameraController!.value.isStreamingImages) { await cameraController?.startImageStream(onLatestImageAvailable); } break; default: } }
@override void dispose() { WidgetsBinding.instance.removeObserver(this); cameraController?.dispose(); super.dispose(); } }
Could you please help me?