Video Stream - Copy this React, Tailwind Component to your project
Import React, { useEffect } from "react"; import TooltipOverlay from "./TooltipOverlay"; import Swal from "sweetalert2"; import * as faceapi from "face api.js"; function VideoStream({ selectedCamera, modelsLoaded, isCameraSelected, videoRef, canvasRef, employeeData, setEmployeeData, tooltipPositions, setTooltipPositions, tooltipPlacements, setTooltipPlacements, detectionInfo, setDetectionInfo, config, setIdentifiedLogs, identifiedLogs, setUnrecognizedCounter, setRecentDetections, recentDetections, setMarkingLog, markingLog, isEmployeeInMarkingLog, updateMarkingLog, setLogs, setLastAlertTime, lastAlertTime, unrecognizedCounter, }) { useEffect(() => { const startVideo = (deviceId) => { navigator.mediaDevices .getUserMedia({ video: { deviceId: { exact: deviceId } } }) .then((stream) => { if (videoRef.current) { videoRef.current.srcObject = stream; } }) .catch(() => { Swal.fire("Error", "Error al acceder a la webcam.", "error"); }); }; const handleVideoOnPlay = () => { if (!modelsLoaded) { Swal.fire("Error", "Los modelos aún no se han cargado.", "error"); return; } const intervalId = setInterval(async () => { if (videoRef.current && canvasRef.current) { const videoWidth = videoRef.current.videoWidth; const videoHeight = videoRef.current.videoHeight; if (!videoWidth || !videoHeight) { console.warn( "Dimensiones del video no válidas. Intentando nuevamente..." ); return; } const detections = await faceapi .detectAllFaces( videoRef.current, new faceapi.TinyFaceDetectorOptions() ) .withFaceLandmarks() .withFaceDescriptors() .withAgeAndGender(); const displaySize = { width: videoWidth, height: videoHeight }; faceapi.matchDimensions(canvasRef.current, displaySize); const resizedDetections = faceapi.resizeResults( detections, displaySize ); const ctx = canvasRef.current.getContext("2d"); ctx.clearRect( 0, 0, canvasRef.current.width, canvasRef.current.height ); faceapi.draw.drawDetections(canvasRef.current, resizedDetections); faceapi.draw.drawFaceLandmarks(canvasRef.current, resizedDetections); resizedDetections.forEach(async (detection, index) => { const { age, gender, genderProbability } = detection; setDetectionInfo((prevInfo) => ({ ...prevInfo, [index]: { age: faceapi.utils.round(age, 0), gender, genderProbability: faceapi.utils.round( genderProbability * 100, 0 ), }, })); const descriptor = detection.descriptor; try { const response = await fetch(`${config.apiUrl}/verify`, { method: "POST", headers: { "Content Type": "application/json", }, body: JSON.stringify({ descriptor }), }); if (response.status === 500) { throw new Error("Server Error"); } const result = await response.json(); const box = detection.detection.box; if (result.success) { const now = Date.now(); const detectionKey = result.employee.rut; setRecentDetections((prevDetections) => { const prevEntry = prevDetections[detectionKey] || { count: 0, timestamp: now, }; if (now prevEntry.timestamp < 10000) { prevEntry.count += 1; } else { prevEntry.count = 1; prevEntry.timestamp = now; } if (prevEntry.count >= 3) { setIdentifiedLogs((prevLogs) => { const exists = prevLogs.some( (log) => log.rut === result.employee.rut ); if (!exists) { const dateTime = new Date(); const newEntry = { nombre: result.employee.nombre, apellido: result.employee.apellido, rut: result.employee.rut, area: result.employee.area, telefono: result.employee.telefono, email: result.employee.email, fecha: dateTime.toLocaleDateString(), hora: dateTime.toLocaleTimeString(), }; prevLogs = [...prevLogs, newEntry]; console.log("logs previos a guardar: ", prevLogs); localStorage.setItem( "localDetecteds", JSON.stringify(prevLogs) ); if (!isEmployeeInMarkingLog(result.employee.rut)) { updateMarkingLog(newEntry); fetch(`${config.apiUrl}/mark entry`, { method: "POST", headers: { "Content Type": "application/json", }, body: JSON.stringify({ rut: newEntry.rut, nombre: newEntry.nombre, apellido: newEntry.apellido, area: newEntry.area, telefono: newEntry.telefono, email: newEntry.email, fecha: newEntry.fecha, hora: newEntry.hora, }), }) .then((response) => response.json()) .catch(() => Swal.fire( "Error", "Error al registrar asistencia en el servidor.", "error" ) ); } } return prevLogs; }); delete prevDetections[detectionKey]; } return { ...prevDetections, [detectionKey]: prevEntry }; }); setEmployeeData((prevData) => ({ ...prevData, [index]: result.employee, })); setTooltipPositions((prevPositions) => ({ ...prevPositions, [index]: { top: box.y, left: box.x, width: box.width, height: box.height, }, })); const certaintyPercentage = (1 result.matchScore) * 100; setDetectionInfo((prevInfo) => ({ ...prevInfo, [index]: { ...prevInfo[index], matchScore: faceapi.utils.round(certaintyPercentage, 2), }, })); if (box.y < 60) { if (box.x < displaySize.width / 2) { setTooltipPlacements((prevPlacements) => ({ ...prevPlacements, [index]: "right", })); } else { setTooltipPlacements((prevPlacements) => ({ ...prevPlacements, [index]: "left", })); } } else { setTooltipPlacements((prevPlacements) => ({ ...prevPlacements, [index]: "top", })); } ctx.strokeStyle = "green"; ctx.lineWidth = 2; ctx.strokeRect(box.x, box.y, box.width, box.height); ctx.fillStyle = "rgba(0, 255, 0, 0.2)"; ctx.fillRect(box.x, box.y, box.width, box.height); } else { setUnrecognizedCounter((prev) => prev + 1); setTooltipPositions((prevPositions) => ({ ...prevPositions, [index]: { top: box.y, left: box.x, width: box.width, height: box.height, }, })); if (box.y < 60) { if (box.x < displaySize.width / 2) { setTooltipPlacements((prevPlacements) => ({ ...prevPlacements, [index]: "right", })); } else { setTooltipPlacements((prevPlacements) => ({ ...prevPlacements, [index]: "left", })); } } else { setTooltipPlacements((prevPlacements) => ({ ...prevPlacements, [index]: "top", })); } ctx.strokeStyle = "red"; ctx.lineWidth = 2; ctx.strokeRect(box.x, box.y, box.width, box.height); ctx.fillStyle = "rgba(255, 0, 0, 0.2)"; ctx.fillRect(box.x, box.y, box.width, box.height); setEmployeeData((prevData) => ({ ...prevData, [index]: null, })); } } catch (error) { console.error("Error en la verificación:", error); } }); } }, 1000); return () => clearInterval(intervalId); }; if (isCameraSelected) { startVideo(selectedCamera); videoRef.current.addEventListener("play", handleVideoOnPlay); } return () => { if (videoRef.current) { videoRef.current.removeEventListener("play", handleVideoOnPlay); } }; }, [isCameraSelected, selectedCamera, modelsLoaded]); return ( <div style={{ position: "relative", width: "720px", height: "560px" }}> <video ref={videoRef} autoPlay muted style={{ position: "absolute", borderRadius: "15px", width: "100%", height: "80vh", }} /> <canvas ref={canvasRef} style={{ position: "absolute", top: 0, left: 0 }} /> <TooltipOverlay tooltipPositions={tooltipPositions} tooltipPlacements={tooltipPlacements} employeeData={employeeData} detectionInfo={detectionInfo} /> </div> ); } export default VideoStream; has que sea responsive, y con tailwind
