mrousavy / react-native-vision-camera

๐Ÿ“ธ A powerful, high-performance React Native Camera library.
https://react-native-vision-camera.com
MIT License
7.57k stars 1.1k forks source link

๐Ÿ› codeScanner callback results, the corners and frame,the points and size not correct on android landscape mode #3298

Closed i7soft closed 4 hours ago

i7soft commented 4 hours ago

What's happening?

i wanted to scan qrcode, i get the corners points and frame from onCodeScanned callback ,and draw on a canvas,but the size and points not correct iShot_2024-11-17_10 42 31

Reproduceable Code

import React, { useEffect, useRef, useState } from 'react';
import { Alert, Dimensions, Image, Modal, PixelRatio, SafeAreaView, View } from 'react-native';

import { styles } from './CameraScanner.styles';
import { RNHoleView } from 'react-native-hole-view';
import {
    Camera,
    CameraRuntimeError,
    Code,
    useCameraDevice,
    useCodeScanner,
    useSkiaFrameProcessor,
} from 'react-native-vision-camera';
import { useIsFocused } from '@react-navigation/native';
import { getWindowHeight, getWindowWidth, isIos } from '../../helpers';
import { useAppStateListener } from '../../hooks/useAppStateListener';
import { ICameraScannerProps } from '../../type';
import { Group, Points, Skia, vec } from '@shopify/react-native-skia';
// import Canvas from 'react-native-canvas';
import { Canvas, useCanvasRef, Circle } from "@shopify/react-native-skia";

export const CameraScanner = ({
    setIsCameraShown,
    onReadCode,
}: ICameraScannerProps) => {

    const device = useCameraDevice('back');
    const camera = useRef<Camera>(null);
    const canvasRef = useCanvasRef();
    const isFocused = useIsFocused();
    const [isCameraInitialized, setIsCameraInitialized] = useState(isIos);
    const [isActive, setIsActive] = useState(isIos);
    const [flash, setFlash] = useState<'on' | 'off'>(isIos ? 'off' : 'on');
    const { appState } = useAppStateListener();
    const [codeScanned, setCodeScanned] = useState('');
    const [scanResults, setScanResults] = useState([])
    const [scanResultsPoints, setScanResultsPoints] = useState([])
    const [snapshot,setSnapshot] = useState('')

    useEffect(() => {
        if (codeScanned) {
            onReadCode(codeScanned);
        }
    }, [codeScanned, onReadCode]);

    useEffect(() => {
        let timeout: NodeJS.Timeout;

        if (isCameraInitialized) {
            timeout = setTimeout(() => {
                setIsActive(true);
                setFlash('off');
            }, 1000);
        }
        setIsActive(false);
        return () => {
            clearTimeout(timeout);
        };
    }, [isCameraInitialized]);

    const onInitialized = () => {
        setIsCameraInitialized(true);
    };

    const codeScanner = useCodeScanner({
        codeTypes: ['qr'],
        onCodeScanned: async codes => {
            console.log(codes)
            if (codes.length > 0) {
                // if (codes[0].value) {
                //     setIsActive(false);
                //     setTimeout(() => setCodeScanned(codes[0]?.value), 500);
                // }

                // setIsActive(false);
                const photo = await camera.current!.takeSnapshot()
                console.log(photo)
                setSnapshot('file://'+photo.path)
                var qr_codes = codes.filter(item => item.type == 'qr')
                if (qr_codes.length > 0) {
                    handleCanvas( qr_codes)
                    // canvasRef.current.
                    setScanResults(qr_codes)
                }

            }
            return;
        },

    });

    const onCrossClick = () => {
        setIsCameraShown(false);
    };

    const onError = (error: CameraRuntimeError) => {
        Alert.alert('ๆ‰ซ็ ๅ‡บ้”™', error.message);
    };

    /* if (device == null) {
        Alert.alert('ๆ‰ซ็ ๅ‡บ้”™', 'ๅŽ็ฝฎๆ‘„ๅƒๅคดไธๅฏ็”จ');
    } */

    function handleCanvas(codes){

        var windowSize = Dimensions.get('window')

        console.log(windowSize)

        var pixelRatio = PixelRatio.get()
        var paths = []

        for (var i = 0; i < codes.length; i++){
            var item = codes[i];
            if (item.type == 'qr') {

                var corners = item.corners;

                var points = []

                for (var i2 = 0; i2 < corners.length; i2++) {
                    var corner = corners[i2]
                    points.push(vec(corner.x * pixelRatio, corner.y * pixelRatio))
                }

                points.push(vec(corners[0].x * pixelRatio, corners[0].y*pixelRatio))

                paths.push(points)

            }

        }
        setScanResultsPoints(paths);

    }

    if (isFocused && device) {
        return (<View style={{ width: '100%', height: '100%', backgroundColor: 'black',position:'relative' }}>

            <Image source={{ uri: snapshot }} style={{ position: 'absolute', width: '100%', height: '100%' }} />

            <Canvas style={{ position: 'absolute', width: '100%', height: '100%', backgroundColor: 'rgba(0,0,0,0.5)' }} ref={canvasRef}>

                    {scanResultsPoints.map(function (points, i) {
                        return <Points
                            key={i}
                            points={points}
                            mode="polygon"
                            color="blue"
                            style="stroke"
                            strokeWidth={3}

                        />;
                    })}

            </Canvas>

            <View style={[styles.cameraControls, { backgroundColor: undefined }]} />
            {scanResults.length==0 && <Camera
                ref={camera}
                torch={flash}
                onInitialized={onInitialized}
                onError={onError}
                photo={false}
                style={[styles.fullScreenCamera]}
                device={device}
                codeScanner={codeScanner}
                // frameProcessor={frameProcessor}
                isActive={
                    isActive &&
                    isFocused &&
                    appState === 'active' &&
                    isCameraInitialized
                }
            />}

        </View>
        );
    }
};

Relevant log output

console.log(Dimensions.get('window')) 
 {"fontScale": 1, "height": 702.9213031219331, "scale": 1.1125000715255737, "width": 1197.3032938087147}

console.log(await camera.current!.takeSnapshot())
{"height": 755, "isMirrored": false, "orientation": "landscape-right", "path": "/data/user/0/com.zgcqnx.pe/cache/mrousavy8446094293175562017.jpg", "width": 1332}

Camera Device

{
  "formats": [],
  "sensorOrientation": "landscape-left",
  "hardwareLevel": "limited",
  "maxZoom": 4,
  "minZoom": 1,
  "maxExposure": 32,
  "supportsLowLightBoost": true,
  "neutralZoom": 1,
  "physicalDevices": [
    "wide-angle-camera"
  ],
  "supportsFocus": true,
  "supportsRawCapture": false,
  "isMultiCam": false,
  "minFocusDistance": 0.09775171065493646,
  "minExposure": -32,
  "name": "0 (BACK) androidx.camera.camera2",
  "hasFlash": true,
  "hasTorch": true,
  "position": "back",
  "id": "0"
}

Device

EPWHQ Android 13

VisionCamera Version

4.6.1

Can you reproduce this issue in the VisionCamera Example app?

Yes, I can reproduce the same issue in the Example app here

Additional information

maintenance-hans[bot] commented 4 hours ago

Guten Tag, Hans here. It seems like you are experiencing an issue with the codeScanner callback in app landscape mode. However, I notice that you didn't provide certain key logs that mrousavy will need to diagnose further. Please use adb logcat to gather runtime logs while reproducing the issue, and provide those for better assistance. Remember, if you'd like to support this project and help mrousavy dedicate more time to it, consider sponsoring here. Cheers! ๐Ÿป

Note: If you think I made a mistake by closing this issue, please ping @mrousavy to take a look.