Problem
I have created a screen in React Native using the React Native Camera library, which allows us to detect faces. After face detection, it captures an image. However, it captures the entire image instead of just the face. How can I capture only the face image?.
Code
import { Dimensions, View, Alert, TouchableOpacity, Text } from 'react-native'
import React, { useRef, useState, useEffect } from 'react'
import { RNCamera } from 'react-native-camera'
import ImageEditor from "@react-native-community/image-editor"
import { SCREEN } from '../../../constants'
import { styles } from './styles'
// import ImagePicker from 'react-native-image-crop-picker'
const { width: windowWidth, height: windowHeight } = Dimensions.get('window')
const FaceDetection = ({ navigation }) => {
const [box, setBox] = useState(null)
const cameraRef = useRef()
const [alertShown, setAlertShown] = useState(false)
const [borderColor, setBorderColor] = useState('red')
const [showCaptureButton, setShowCaptureButton] = useState(false)
const [capturedImage, setCapturedImage] = useState(null)
const alertTimeoutRef = useRef(null)
useEffect(() => {
if (alertShown) {
// Delay showing the capture button after 2 seconds
const timer = setTimeout(() => {
setShowCaptureButton(true)
}, 2000)
return () => clearTimeout(timer)
} else {
setShowCaptureButton(false)
}
}, [alertShown])
const handleFacesDetected = ({ faces }) => {
if (faces.length > 0) {
const face = faces[0]
const isFaceProperlyVisible = () => {
const minFaceWidth = 50
const minFaceHeight = 50
const maxFaceWidth = windowWidth - 50
const maxFaceHeight = windowHeight - 50
const isWithinBounds =
face.bounds.origin.x >= 0 &&
face.bounds.origin.y >= 0 &&
face.bounds.origin.x + face.bounds.size.width <= windowWidth &&
face.bounds.origin.y + face.bounds.size.height <= windowHeight
// const isOccluded = face.bounds.size.width < minFaceWidth || face.bounds.size.height < minFaceHeight
// console.log(isOccluded)
const isFaceStraight = Math.abs(face.yawAngle) < 15 // Adjust angle threshold as needed
return (
face.bounds.size.width >= minFaceWidth &&
face.bounds.size.height >= minFaceHeight &&
face.bounds.size.width <= maxFaceWidth &&
face.bounds.size.height <= maxFaceHeight &&
isWithinBounds &&
isFaceStraight
// && !isOccluded
)
}
const properlyVisible = isFaceProperlyVisible()
setBox({
width: face.bounds.size.width,
height: face.bounds.size.height,
x: face.bounds.origin.x,
y: face.bounds.origin.y,
});
if (properlyVisible) {
if (!alertShown) {
setAlertShown(true)
alertTimeoutRef.current = setTimeout(() => {
setBorderColor('green')
Alert.alert("Face Detected", "Your face is detected!")
}, 2000)
}
} else {
if (alertTimeoutRef.current) {
clearTimeout(alertTimeoutRef.current)
alertTimeoutRef.current = null
}
setBorderColor('red')
setAlertShown(false)
}
} else {
if (alertTimeoutRef.current) {
clearTimeout(alertTimeoutRef.current)
alertTimeoutRef.current = null
}
setBox(null)
setAlertShown(false)
setBorderColor('red')
}
}
const takePicture = async () => {
if (cameraRef.current) {
const options = { quality: 0.5, base64: true }
const data = await cameraRef.current.takePictureAsync(options)
setCapturedImage(data.uri)
navigation.navigate(SCREEN.capturedImage, { capturedImage: data.uri })
}
}
return (
<View style={styles.container}>
<RNCamera
ref={cameraRef}
style={styles.cameraContainer}
type={RNCamera.Constants.Type.front}
captureAudio={false}
onFacesDetected={handleFacesDetected}
faceDetectionMode={RNCamera.Constants.FaceDetection.Mode.fast}
/>
{box && (
<View
style={[
styles.bound,
{
width: box.width,
height: box.height,
left: box.x,
top: box.y,
borderColor: borderColor,
},
]}
/>
)}
{showCaptureButton && (
<TouchableOpacity style={styles.captureButton} onPress={takePicture}>
<Text style={styles.captureButtonText}>Capture Image</Text>
</TouchableOpacity>
)}
</View>
)
}
export default FaceDetection
I just want to cappture face area