From 582e1a8f46a4ba3ce75253d2b52e3d5b30fb775d Mon Sep 17 00:00:00 2001 From: Carlo Ferriolo Date: Fri, 10 Apr 2026 12:30:14 +0200 Subject: [PATCH 1/2] [OOBE] add new page for webcam analysis In this patch has been added the integration of a new page which include the same logic used before for Sample integrity, Quality Inspection and Crowd and Fall detection improved by the use of webcam instead of images and carousel. This gives the possbility to choose a more practical application for who wants to use the OOBE on real environment. Signed-off-by: Carlo Ferriolo --- oobe/src/App.tsx | 15 + oobe/src/pages/CrowdAndFallDetection.tsx | 17 +- .../src/pages/CrowdAndFallDetectionWebcam.tsx | 359 ++++++++++++++++++ oobe/src/pages/QualityInspection.tsx | 17 +- oobe/src/pages/QualityInspectionWebcam.tsx | 327 ++++++++++++++++ oobe/src/pages/SampleIntegrityCheck.tsx | 17 +- oobe/src/pages/SampleIntegrityCheckWebcam.tsx | 300 +++++++++++++++ 7 files changed, 1049 insertions(+), 3 deletions(-) create mode 100644 oobe/src/pages/CrowdAndFallDetectionWebcam.tsx create mode 100644 oobe/src/pages/QualityInspectionWebcam.tsx create mode 100644 oobe/src/pages/SampleIntegrityCheckWebcam.tsx diff --git a/oobe/src/App.tsx b/oobe/src/App.tsx index f3ecd16..0c98ccf 100644 --- a/oobe/src/App.tsx +++ b/oobe/src/App.tsx @@ -21,6 +21,9 @@ import SmartClinical from "./pages/SmartClinical"; import QualityInspection from "./pages/QualityInspection"; import HighResolutionVisuals from "./pages/HighResolutionVisuals"; import CrowdAndFallDetection from "./pages/CrowdAndFallDetection"; +import QualityInspectionWebcam from "./pages/QualityInspectionWebcam"; +import SampleIntegrityCheckWebcam from "./pages/SampleIntegrityCheckWebcam"; +import CrowdAndFallDetectionWebcam from "./pages/CrowdAndFallDetectionWebcam"; const HIDE_SIDEBAR_ROUTES = [ "/medical-alert-management", @@ -92,14 +95,26 @@ function App() { path="/quality-inspection" element={} /> + } + /> } /> + } + /> } /> + } + /> } diff --git a/oobe/src/pages/CrowdAndFallDetection.tsx b/oobe/src/pages/CrowdAndFallDetection.tsx index 6dcde33..5d5eb16 100644 --- a/oobe/src/pages/CrowdAndFallDetection.tsx +++ b/oobe/src/pages/CrowdAndFallDetection.tsx @@ -1,6 +1,6 @@ import { Container, Image, Button, Alert } from "react-bootstrap"; import { useState, useEffect, useRef, useLayoutEffect } from "react"; -import { useNavigate } from "react-router-dom"; +import { useLocation, useNavigate } from "react-router-dom"; import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; import { faX, faExclamationTriangle } from "@fortawesome/free-solid-svg-icons"; import { FormattedMessage, defineMessages } from "react-intl"; @@ -187,6 +187,14 @@ const CrowdAndFallDetection = ({ apiClient }: CrowdAndFallDetectionProps) => { return () => clearInterval(timer); }, []); + const location = useLocation(); + + useEffect(() => { + if (location.state?.autoStart) { + setStatus("analysis"); + } + }, [location.state]); + useEffect(() => { if (status !== "analysis") return; const processImage = async () => { @@ -382,6 +390,13 @@ const CrowdAndFallDetection = ({ apiClient }: CrowdAndFallDetectionProps) => {
+ + + +
+ + ); +}; + +export default CrowdAndFallDetectionWebcam; diff --git a/oobe/src/pages/QualityInspection.tsx b/oobe/src/pages/QualityInspection.tsx index 8fe7f24..ac95e95 100644 --- a/oobe/src/pages/QualityInspection.tsx +++ b/oobe/src/pages/QualityInspection.tsx @@ -1,7 +1,7 @@ import { Container, Image, Button, Alert } from "react-bootstrap"; import { logo } from "../assets/images"; import "./QualityInspection.scss"; -import { useNavigate } from "react-router-dom"; +import { useLocation, useNavigate } from "react-router-dom"; import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; import { faX } from "@fortawesome/free-solid-svg-icons"; import { FormattedMessage, useIntl } from "react-intl"; @@ -122,6 +122,14 @@ const QualityInspection = ({ apiClient }: QualityInspectionProps) => { if (currentImage) processImage(); }, [apiClient, currentImage, status, intl, analysisMode]); + const location = useLocation(); + + useEffect(() => { + if (location.state?.autoStart) { + setStatus("analysis"); + } + }, [location.state]); + const handleBBoxColor = (categoryId: number) => { switch (categoryId) { case 0: @@ -297,6 +305,13 @@ const QualityInspection = ({ apiClient }: QualityInspectionProps) => {
+ + + + +
+ + ); +}; + +export default QualityInspectionWebcam; diff --git a/oobe/src/pages/SampleIntegrityCheck.tsx b/oobe/src/pages/SampleIntegrityCheck.tsx index 0c836df..dbf6977 100644 --- a/oobe/src/pages/SampleIntegrityCheck.tsx +++ b/oobe/src/pages/SampleIntegrityCheck.tsx @@ -1,7 +1,7 @@ import { Container, Image, Button, Alert } from "react-bootstrap"; import { logo } from "../assets/images"; import "./SampleIntegrityCheck.scss"; -import { useNavigate } from "react-router-dom"; +import { useLocation, useNavigate } from "react-router-dom"; import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; import { faX } from "@fortawesome/free-solid-svg-icons"; import { FormattedMessage, useIntl } from "react-intl"; @@ -125,6 +125,14 @@ const SampleIntegrityCheck = ({ apiClient }: SampleIntegrityCheckProps) => { if (currentImage) processImage(); }, [apiClient, currentImage, status, intl]); + const location = useLocation(); + + useEffect(() => { + if (location.state?.autoStart) { + setStatus("analysis"); + } + }, [location.state]); + const handleBBoxColor = (categoryId: number) => { switch (categoryId) { case 1: @@ -286,6 +294,13 @@ const SampleIntegrityCheck = ({ apiClient }: SampleIntegrityCheckProps) => {
+ + + +
+ + ); +}; + +export default SampleIntegrityCheckWebcam; From 390b1ed24cb18dcc5c2aff51ec192d5637d60dec Mon Sep 17 00:00:00 2001 From: Carlo Ferriolo Date: Mon, 20 Apr 2026 15:08:37 +0200 Subject: [PATCH 2/2] [OOBE] add new button for cpu, gpu and npu analysis In this commit has been done few changes regarding the functioning of main and webcam analysis. In webcam pages there were already the possibility to access the camera for the analysis but no mode (cpu, gpu or npu) can be chosen. To do so have been added three more buttons which starts the live webcam analysis with the corresponding endpoint and AI model. Furthermore, to enhance the processor units this work has been done for main analysis which does not require the webcam but only the carousel part. The newest endpoint created for blister and people detection are: /people-detect-cpu /people-detect-gpu /people-detect-npu /blister-pack-detect-cpu /blister-pack-detect-gpu /blister-pack-detect-npu while the ones corresponding to the pcb don't change. Signed-off-by: Carlo Ferriolo --- oobe/src/api/APIClient.ts | 49 ++- oobe/src/i18n/langs/en.json | 36 +- oobe/src/pages/CrowdAndFallDetection.scss | 35 ++ oobe/src/pages/CrowdAndFallDetection.tsx | 381 +++++++++-------- .../src/pages/CrowdAndFallDetectionWebcam.tsx | 390 ++++++++++-------- oobe/src/pages/QualityInspection.scss | 1 + oobe/src/pages/QualityInspection.tsx | 358 +++++++--------- oobe/src/pages/QualityInspectionWebcam.tsx | 348 +++++++++------- oobe/src/pages/SampleIntegrityCheck.scss | 20 +- oobe/src/pages/SampleIntegrityCheck.tsx | 360 ++++++++-------- oobe/src/pages/SampleIntegrityCheckWebcam.tsx | 342 +++++++++------ 11 files changed, 1231 insertions(+), 1089 deletions(-) diff --git a/oobe/src/api/APIClient.ts b/oobe/src/api/APIClient.ts index be261f1..26ada17 100644 --- a/oobe/src/api/APIClient.ts +++ b/oobe/src/api/APIClient.ts @@ -23,12 +23,6 @@ export type PersonResult = { score: number; }; -interface BackendPersonResult { - category_id: number; - bbox: number[]; - score: number; -} - export type InverterStatus = "ready" | "fault"; export type SmartUpdate = | { field: "plantStatus"; value: string } @@ -110,7 +104,12 @@ type FaceRecognitionMessage = { data: FaceRecognitionUpdate[]; }; -export type AnalysisMode = "cpu" | "npu"; +export type AnalysisMode = "cpu" | "gpu" | "npu"; + +export interface DetectionResponse { + results: T[]; + inferenceTime: number; +} export class APIClient { private config: Config; @@ -160,9 +159,12 @@ export class APIClient { }; } - async getBlisterPackResult(imageFile: File): Promise { - const response = await this.axiosInstance.post( - "/blister-pack-detect", + async getBlisterPackResult( + imageFile: File, + mode: AnalysisMode, + ): Promise> { + const response = await this.axiosInstance.post( + `/blister-pack-detect-${mode}`, imageFile, { headers: { @@ -170,13 +172,14 @@ export class APIClient { }, }, ); - return response.data.map( - (item): BlisterPackResult => ({ + return { + results: response.data.items.map((item) => ({ categoryId: item.category_id, bbox: item.bbox, score: item.score, - }), - ); + })), + inferenceTime: response.data.inferenceTime, + }; } async exitApp(): Promise { @@ -469,9 +472,12 @@ export class APIClient { }; } - async getPersonResult(imageFile: File): Promise { - const response = await this.axiosInstance.post( - "/people-detect", + async getPersonResult( + imageFile: File, + mode: AnalysisMode, + ): Promise> { + const response = await this.axiosInstance.post( + `/people-detect-${mode}`, imageFile, { headers: { @@ -480,13 +486,14 @@ export class APIClient { }, ); - return response.data.map( - (item): PersonResult => ({ + return { + results: response.data.items.map((item) => ({ categoryId: item.category_id, bbox: item.bbox, score: item.score, - }), - ); + })), + inferenceTime: response.data.inferenceTime, + }; } disconnectWebSocket(wsType?: string) { diff --git a/oobe/src/i18n/langs/en.json b/oobe/src/i18n/langs/en.json index b7c7a83..d0bf9b5 100644 --- a/oobe/src/i18n/langs/en.json +++ b/oobe/src/i18n/langs/en.json @@ -80,6 +80,15 @@ "components.BloodCountMedical.unit": { "defaultMessage": "Unit" }, + "components.CrowdAndFallDetection.cpuAnalysisButton": { + "defaultMessage": "CPU Analysis" + }, + "components.CrowdAndFallDetection.gpuAnalysisButton": { + "defaultMessage": "GPU Analysis" + }, + "components.CrowdAndFallDetection.npuAnalysisButton": { + "defaultMessage": "NPU Analysis" + }, "components.DeviceDetailsCard.cpuArchitecture": { "defaultMessage": "CPU architecture" }, @@ -122,32 +131,23 @@ "components.GeolocalizationCard.title": { "defaultMessage": "Geolocalization" }, - "components.QualityInspection.analyzeNextMessage": { - "defaultMessage": "This is a demo environment, the camera feed is simulated. Click ‘CPU Analysis’ or 'NPU Analysis' to run inference" - }, "components.QualityInspection.cpuAnalysisButton": { "defaultMessage": "CPU Analysis" }, + "components.QualityInspection.gpuAnalysisButton": { + "defaultMessage": "GPU Analysis" + }, "components.QualityInspection.npuAnalysisButton": { "defaultMessage": "NPU Analysis" }, - "components.QualityInspection.startAnalysisButton": { - "defaultMessage": "Start analysis" - }, - "components.QualityInspection.startAnalysisMessage": { - "defaultMessage": "This is a demo environment, the camera feed is simulated. Click ‘Start Analysis’ to run inference." - }, - "components.SampleIntegrityCheck.analyzeNextButton": { - "defaultMessage": "Analyze next object" - }, - "components.SampleIntegrityCheck.analyzeNextMessage": { - "defaultMessage": "This is a demo environment, the camera feed is simulated. Click ‘Analyze next object’ to run inference." + "components.SampleIntegrityCheck.cpuAnalysisButton": { + "defaultMessage": "CPU Analysis" }, - "components.SampleIntegrityCheck.startAnalysisMessage": { - "defaultMessage": "This is a demo environment, the camera feed is simulated. Click ‘Start Analysis’ to run inference." + "components.SampleIntegrityCheck.gpuAnalysisButton": { + "defaultMessage": "GPU Analysis" }, - "components.SampleIntegrityCheck.startButton": { - "defaultMessage": "Start analysis" + "components.SampleIntegrityCheck.npuAnalysisButton": { + "defaultMessage": "NPU Analysis" }, "components.Sidebar.dashboard": { "defaultMessage": "Dashboard" diff --git a/oobe/src/pages/CrowdAndFallDetection.scss b/oobe/src/pages/CrowdAndFallDetection.scss index 260417b..1acca0e 100644 --- a/oobe/src/pages/CrowdAndFallDetection.scss +++ b/oobe/src/pages/CrowdAndFallDetection.scss @@ -37,6 +37,41 @@ } } + .greeting-button { + background-color: #e2e5f3; + border: none; + border-radius: 6px; + color: #1a1a1a; + font-size: 1rem; + min-width: 200px; + height: 52px; + + &:hover { + background-color: #d1d5e8; + } + } + + .analyze-cpu-button, + .analyze-gpu-button, + .analyze-npu-button { + @extend .greeting-button; + height: 52px; + transition: all 0.3s ease; + + &.active-analysis { + color: #919191 !important; + background-color: #e2e5f3 !important; + opacity: 0.6; + cursor: wait; + } + + &:disabled:not(.active-analysis) { + opacity: 0.8; + background-color: #ffffff; + color: #ccc; + } + } + @media (max-width: 768px) { overflow-y: auto !important; .vh-100 { diff --git a/oobe/src/pages/CrowdAndFallDetection.tsx b/oobe/src/pages/CrowdAndFallDetection.tsx index 5d5eb16..764881b 100644 --- a/oobe/src/pages/CrowdAndFallDetection.tsx +++ b/oobe/src/pages/CrowdAndFallDetection.tsx @@ -1,11 +1,11 @@ import { Container, Image, Button, Alert } from "react-bootstrap"; import { useState, useEffect, useRef, useLayoutEffect } from "react"; -import { useLocation, useNavigate } from "react-router-dom"; +import { useNavigate } from "react-router-dom"; import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; import { faX, faExclamationTriangle } from "@fortawesome/free-solid-svg-icons"; import { FormattedMessage, defineMessages } from "react-intl"; import ImageCarousel from "./ImageCarousel"; -import { APIClient } from "../api/APIClient"; +import { APIClient, type AnalysisMode } from "../api/APIClient"; import { logo, elevator_second_floor_1, @@ -126,9 +126,11 @@ const urlToFile = async (url: string): Promise => { const CrowdAndFallDetection = ({ apiClient }: CrowdAndFallDetectionProps) => { const [results, setResults] = useState([]); + const [analysisMode, setAnalysisMode] = useState("cpu"); + const [inferenceTime, setInferenceTime] = useState(null); const [error, setError] = useState(null); const [status, setStatus] = useState<"greeting" | "analysis" | "result">( - "greeting", + "analysis", ); const [currentTime, setCurrentTime] = useState(new Date()); const [analysisTime, setAnalysisTime] = useState(null); @@ -187,38 +189,25 @@ const CrowdAndFallDetection = ({ apiClient }: CrowdAndFallDetectionProps) => { return () => clearInterval(timer); }, []); - const location = useLocation(); - - useEffect(() => { - if (location.state?.autoStart) { - setStatus("analysis"); - } - }, [location.state]); - useEffect(() => { if (status !== "analysis") return; const processImage = async () => { try { setResults([]); + setInferenceTime(null); const file = await urlToFile(currentImage); - const data = await apiClient.getPersonResult(file); - setResults(data); + const data = await apiClient.getPersonResult(file, analysisMode); + setResults(data.results); + setInferenceTime(data.inferenceTime); setAnalysisTime(new Date()); setStatus("result"); } catch { setError("Error"); - setStatus("greeting"); + setStatus("analysis"); } }; processImage(); - }, [apiClient, currentImage, status]); - - const changeImage = (dir: number) => { - const idx = imageOptions.indexOf(currentImage); - const next = (idx + dir + imageOptions.length) % imageOptions.length; - setCurrentImage(imageOptions[next]); - setStatus("analysis"); - }; + }, [apiClient, currentImage, status, analysisMode]); return ( { SECO -
+
{formatFullDate(currentTime)}
- {status === "greeting" ? ( -
- -
- ) : ( -
-
-
- {status === "result" && - results.map((r, i) => { - const scaleX = - imgRect.width / (imageRef.current?.naturalWidth || 1); - const scaleY = - imgRect.height / (imageRef.current?.naturalHeight || 1); - const isFall = r.categoryId === 1; - const boxColor = isFall - ? "#FF0000" - : DETECTION_COLORS[i % DETECTION_COLORS.length]; + {status === "result" && + results.map((r, i) => { + const scaleX = + imgRect.width / (imageRef.current?.naturalWidth || 1); + const scaleY = + imgRect.height / (imageRef.current?.naturalHeight || 1); + const isFall = r.categoryId === 1; + const boxColor = isFall + ? "#FF0000" + : DETECTION_COLORS[i % DETECTION_COLORS.length]; - return ( -
- ); - })} -
- + return ( +
+ ); + })}
+ +
-
- { - setCurrentImage(img); - setStatus("analysis"); - }} +
+ { + setCurrentImage(img); + setStatus("analysis"); + }} + /> +
+
+ +
+
+
+

+ -

+
-
-
-
-

- -

+ {status === "result" && inferenceTime !== null && ( +
+ Inference time: {inferenceTime.toFixed(2)} ms
+ )} -
- {status === "analysis" ? ( -
-
- -
- ) : ( -
- {results.length === 0 ? ( -
-

- -

-
- ) : ( - results.map((r, i) => ( -
-
- - #{i + 1} - - - - -
-
- - {analysisTime ? formatFullDate(analysisTime) : ""} +
+ {status === "analysis" ? ( +
+
+ +
+ ) : ( +
+ {results.length === 0 ? ( +
+

+ +

+
+ ) : ( + results.map((r, i) => ( +
+
+ #{i + 1} + + + +
+
+ + {analysisTime ? formatFullDate(analysisTime) : ""} + + {r.categoryId === 1 && ( + + + - {r.categoryId === 1 && ( - - - - - )} -
+ )}
- )) - )} -
- )} -
- -
-
- - - - 0 ? "text-primary" : "text-white"}`} - style={{ lineHeight: 1 }} - > - {results.length.toString().padStart(2, "0")} - -
-
- - - +
+ )) + )}
+ )} +
+ +
+
+ + + + 0 ? "text-primary" : "text-white"}`} + style={{ lineHeight: 1 }} + > + {results.length.toString().padStart(2, "0")} + +
+
+ + + +
- )} +
{error && ( { const canvasRef = useRef(null); const imageRef = useRef(null); const containerRef = useRef(null); + const isAnalyzingRef = useRef(false); - const [status, setStatus] = useState("greeting"); + const [status, setStatus] = useState("idle"); const [results, setResults] = useState([]); + const [inferenceTime, setInferenceTime] = useState(null); + const [analysisMode, setAnalysisMode] = useState("cpu"); const [error, setError] = useState(null); const [captured, setCaptured] = useState(null); const [currentTime] = useState(new Date()); const navigate = useNavigate(); - const apiClient = new APIClient(); + const apiClient = useMemo(() => new APIClient(), []); React.useEffect(() => { - navigator.mediaDevices.getUserMedia({ video: true }).then((stream) => { - streamRef.current = stream; - if (videoRef.current) videoRef.current.srcObject = stream; - }); return () => { if (streamRef.current) { streamRef.current.getTracks().forEach((track) => track.stop()); @@ -51,43 +51,77 @@ const CrowdAndFallDetectionWebcam = () => { }; }, []); + const startWebcam = async () => { + try { + const stream = await navigator.mediaDevices.getUserMedia({ video: true }); + streamRef.current = stream; + setStatus("greeting"); + } catch { + setError( + "Failed to access webcam. Please ensure you have given permission.", + ); + } + }; + React.useEffect(() => { if (status === "greeting" && videoRef.current && streamRef.current) { videoRef.current.srcObject = streamRef.current; } }, [status]); - const dataURLtoFile = (dataurl: string, filename: string) => { - const arr = dataurl.split(","); - const mime = arr[0].match(/:(.*?);/)![1]; - const bstr = atob(arr[1]); - const u8arr = new Uint8Array(bstr.length); - for (let i = 0; i < bstr.length; i++) u8arr[i] = bstr.charCodeAt(i); - return new File([u8arr], filename, { type: mime }); - }; + React.useEffect(() => { + let active = true; + const analyzeLoop = async () => { + if ( + status !== "greeting" || + !streamRef.current || + !videoRef.current || + !canvasRef.current + ) + return; + if (isAnalyzingRef.current) return; + + isAnalyzingRef.current = true; + try { + const ctx = canvasRef.current.getContext("2d", { alpha: false }); + if (ctx) { + ctx.drawImage(videoRef.current, 0, 0, 980, 720); - const handleCapture = async () => { - if (videoRef.current && canvasRef.current) { - const ctx = canvasRef.current.getContext("2d"); - if (ctx) { - ctx.drawImage(videoRef.current, 0, 0, 960, 720); - const dataUrl = canvasRef.current.toDataURL("image/png"); - setCaptured(dataUrl); - setStatus("analysis"); - setError(null); - - try { - const file = dataURLtoFile(dataUrl, "webcam.png"); - const data = await apiClient.getPersonResult(file); - setResults(data); - setStatus("result"); - } catch { - setError("Backend rejected the image format or analysis failed."); - setStatus("greeting"); + const blob = await new Promise((resolve) => + canvasRef.current?.toBlob((b) => resolve(b), "image/jpeg", 0.6), + ); + + if (blob && active && status === "greeting") { + const file = new File([blob], "webcam.jpg", { type: "image/jpeg" }); + const data = await apiClient.getPersonResult(file, analysisMode); + if (active && status === "greeting") { + const url = URL.createObjectURL(blob); + setCaptured((prev) => { + if (prev && prev.startsWith("blob:")) URL.revokeObjectURL(prev); + return url; + }); + setResults(data.results); + setInferenceTime(data.inferenceTime); + } + } + } + } catch (err) { + console.error("Loop analysis error:", err); + } finally { + isAnalyzingRef.current = false; + if (active && status === "greeting") { + setTimeout(analyzeLoop, 50); } } + }; + + if (status === "greeting") { + analyzeLoop(); } - }; + return () => { + active = false; + }; + }, [status, analysisMode, apiClient]); const formatFullDate = (date: Date): string => { const time = date.toLocaleTimeString("en-GB", { hour12: false }); @@ -99,30 +133,58 @@ const CrowdAndFallDetectionWebcam = () => { return `${time} - ${dayMonthYear}`; }; - const handleRestartWebcam = () => { - setCaptured(null); - setResults([]); - setError(null); - setStatus("greeting"); - }; - - const handleCaptureAndAnalysis = () => { - setCaptured(null); - setResults([]); - setError(null); - setStatus("greeting"); - setTimeout(() => handleCapture(), 100); + const handleAnalysisClick = (mode: AnalysisMode) => { + setAnalysisMode(mode); + if (status === "idle") { + startWebcam(); + } }; const handleReturnPage = () => { - if (videoRef.current && videoRef.current.srcObject) { - (videoRef.current.srcObject as MediaStream) - .getTracks() - .forEach((track) => track.stop()); + if (streamRef.current) { + streamRef.current.getTracks().forEach((track) => track.stop()); } navigate("/crowd-and-fall-detection", { state: { autoStart: true } }); }; + const renderBBoxes = () => { + const media = imageRef.current || videoRef.current; + const container = containerRef.current; + if (!media || !container || results.length === 0) return null; + + const rect = media.getBoundingClientRect(); + const containerRect = container.getBoundingClientRect(); + + const scaleX = rect.width / 980; + const scaleY = rect.height / 720; + + const offsetX = rect.left - containerRect.left; + const offsetY = rect.top - containerRect.top; + + return results.map((r, i) => { + const isFall = r.categoryId === 1; + const boxColor = isFall + ? "#FF0000" + : DETECTION_COLORS[i % DETECTION_COLORS.length]; + return ( +
+ ); + }); + }; + return ( {
+ {status === "idle" && ( +
+

Webcam inactive.

+

Click an analysis mode to start.

+
+ )} + {status === "greeting" && ( +
@@ -274,35 +304,41 @@ const CrowdAndFallDetectionWebcam = () => {
+ {inferenceTime !== null && ( +
+ Inference time: {inferenceTime.toFixed(2)} ms +
+ )} +
- {status === "analysis" ? ( -
-
- Scanning in progress... -
- ) : ( -
- {results.length === 0 ? ( -
-

- No people detected. -

-
- ) : ( - results.map((r, i) => ( -
-
- #{i + 1} - Person detected -
+
+ {results.length === 0 ? ( +
+

+ No people detected. +

+
+ ) : ( + results.map((r, i) => ( +
+
+ #{i + 1} + Person detected
- )) - )} -
- )} +
+ )) + )} +
@@ -327,29 +363,43 @@ const CrowdAndFallDetectionWebcam = () => {
-
+
+
diff --git a/oobe/src/pages/QualityInspection.scss b/oobe/src/pages/QualityInspection.scss index c989d52..a8aded2 100644 --- a/oobe/src/pages/QualityInspection.scss +++ b/oobe/src/pages/QualityInspection.scss @@ -65,6 +65,7 @@ } .analyze-cpu-button, + .analyze-gpu-button, .analyze-npu-button { @extend .greeting-button; height: 52px; diff --git a/oobe/src/pages/QualityInspection.tsx b/oobe/src/pages/QualityInspection.tsx index ac95e95..e2c6908 100644 --- a/oobe/src/pages/QualityInspection.tsx +++ b/oobe/src/pages/QualityInspection.tsx @@ -1,7 +1,7 @@ import { Container, Image, Button, Alert } from "react-bootstrap"; import { logo } from "../assets/images"; import "./QualityInspection.scss"; -import { useLocation, useNavigate } from "react-router-dom"; +import { useNavigate } from "react-router-dom"; import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; import { faX } from "@fortawesome/free-solid-svg-icons"; import { FormattedMessage, useIntl } from "react-intl"; @@ -66,7 +66,7 @@ const QualityInspection = ({ apiClient }: QualityInspectionProps) => { const [analysisMode, setAnalysisMode] = useState("cpu"); const [inferenceTime, setInferenceTime] = useState(null); const [error, setError] = useState(null); - const [status, setStatus] = useState("greeting"); + const [status, setStatus] = useState("analysis"); const [currentImage, setCurrentImage] = useState(pcbMissingHole00); const [scale, setScale] = useState({ x: 1, y: 1 }); @@ -89,16 +89,6 @@ const QualityInspection = ({ apiClient }: QualityInspectionProps) => { return () => window.removeEventListener("resize", handleImageLoad); }, []); - useEffect(() => { - if (status !== "analysis") return; - - const timer = setTimeout(() => { - if (status === "analysis") setStatus("result"); - }, 2000); - - return () => clearTimeout(timer); - }, [status, currentImage]); - useEffect(() => { if (status !== "analysis") return; const processImage = async () => { @@ -109,6 +99,7 @@ const QualityInspection = ({ apiClient }: QualityInspectionProps) => { const data = await apiClient.getDefectResult(file, analysisMode); setDefectResults(data.results); setInferenceTime(data.inferenceTime); + setStatus("result"); } catch { setError( intl.formatMessage({ @@ -122,14 +113,6 @@ const QualityInspection = ({ apiClient }: QualityInspectionProps) => { if (currentImage) processImage(); }, [apiClient, currentImage, status, intl, analysisMode]); - const location = useLocation(); - - useEffect(() => { - if (location.state?.autoStart) { - setStatus("analysis"); - } - }, [location.state]); - const handleBBoxColor = (categoryId: number) => { switch (categoryId) { case 0: @@ -147,21 +130,15 @@ const QualityInspection = ({ apiClient }: QualityInspectionProps) => { className="quality-inspection-container vh-100 d-flex flex-column p-4 bg-black text-white" >
- {status !== "greeting" && ( - - )} +
- SECO Logo + SECO Logo
@@ -177,192 +154,161 @@ const QualityInspection = ({ apiClient }: QualityInspectionProps) => { )} -
-

- {status === "greeting" ? ( - - ) : ( - +
+
+ {status === "result" && + defectResults.map((defect, index) => ( +
+ ))} + Sample - )} -

-
- - {status !== "greeting" && ( -
-
-
- {status === "result" && - defectResults.map((defect, index) => ( -
- ))} - Sample -
+
-
- { - setCurrentImage(img); - setDefectResults([]); - }} - /> -
+
+ { + setCurrentImage(img); + setStatus("analysis"); + }} + />
+
-
-
- {status === "analysis" && ( -
- )} -

- {status === "analysis" ? ( +
+
+ {status === "analysis" && ( +
+ )} +

+ {status === "analysis" ? ( + + ) : ( +
- ) : ( -
- - - {status === "result" && inferenceTime !== null && ( -
- Inference time:{" "} - {inferenceTime.toFixed(2)} ms -
- )} -
- - + {status === "result" && inferenceTime !== null && ( +
+ Inference time:{" "} + {inferenceTime.toFixed(2)} ms
+ )} -
- - -
+
+ +
- )} -

-
-
- - - - -
+
+ + +
+
+ )} +

-
- )} - {status === "greeting" && ( -
- +
+ + + + + + +
- )} +
); }; diff --git a/oobe/src/pages/QualityInspectionWebcam.tsx b/oobe/src/pages/QualityInspectionWebcam.tsx index 7fadd7a..5538005 100644 --- a/oobe/src/pages/QualityInspectionWebcam.tsx +++ b/oobe/src/pages/QualityInspectionWebcam.tsx @@ -1,4 +1,4 @@ -import React, { useRef, useState } from "react"; +import React, { useRef, useState, useMemo } from "react"; import { Container, Button, Alert, Image } from "react-bootstrap"; import { useNavigate } from "react-router-dom"; import { APIClient, type AnalysisMode } from "../api/APIClient"; @@ -15,68 +15,98 @@ const QualityInspectionWebcam = () => { const videoRef = useRef(null); const canvasRef = useRef(null); const imageRef = useRef(null); - const [status, setStatus] = useState("greeting"); + const containerRef = useRef(null); + const isAnalyzingRef = useRef(false); + + const [status, setStatus] = useState("idle"); const [defectResults, setDefectResults] = useState([]); const [inferenceTime, setInferenceTime] = useState(null); const [analysisMode, setAnalysisMode] = useState("cpu"); const [error, setError] = useState(null); const [captured, setCaptured] = useState(null); - const [scale] = useState({ x: 1, y: 1 }); const [currentTime] = useState(new Date()); const navigate = useNavigate(); - const apiClient = new APIClient(); + const apiClient = useMemo(() => new APIClient(), []); React.useEffect(() => { - navigator.mediaDevices.getUserMedia({ video: true }).then((stream) => { - streamRef.current = stream; - if (videoRef.current) videoRef.current.srcObject = stream; - }); return () => { - if (videoRef.current && videoRef.current.srcObject) { - (videoRef.current.srcObject as MediaStream) - .getTracks() - .forEach((track) => track.stop()); + if (streamRef.current) { + streamRef.current.getTracks().forEach((track) => track.stop()); } }; }, []); + const startWebcam = async () => { + try { + const stream = await navigator.mediaDevices.getUserMedia({ video: true }); + streamRef.current = stream; + setStatus("greeting"); + } catch { + setError( + "Failed to access webcam. Please ensure you have given permission.", + ); + } + }; + React.useEffect(() => { if (status === "greeting" && videoRef.current && streamRef.current) { videoRef.current.srcObject = streamRef.current; } }, [status]); - const dataURLtoFile = (dataurl: string, filename: string) => { - const arr = dataurl.split(","); - const mime = arr[0].match(/:(.*?);/)![1]; - const bstr = atob(arr[1]); - const u8arr = new Uint8Array(bstr.length); - for (let i = 0; i < bstr.length; i++) u8arr[i] = bstr.charCodeAt(i); - return new File([u8arr], filename, { type: mime }); - }; + React.useEffect(() => { + let active = true; + const analyzeLoop = async () => { + if ( + status !== "greeting" || + !streamRef.current || + !videoRef.current || + !canvasRef.current + ) + return; + if (isAnalyzingRef.current) return; + + isAnalyzingRef.current = true; + try { + const ctx = canvasRef.current.getContext("2d", { alpha: false }); + if (ctx) { + ctx.drawImage(videoRef.current, 0, 0, 960, 720); - const handleCapture = async (mode: AnalysisMode) => { - if (videoRef.current && canvasRef.current) { - const ctx = canvasRef.current.getContext("2d"); - if (ctx) { - ctx.drawImage(videoRef.current, 0, 0, 960, 720); - const dataUrl = canvasRef.current.toDataURL("image/png"); - setCaptured(dataUrl); - setStatus("analysis"); - setError(null); - try { - const file = dataURLtoFile(dataUrl, "webcam.png"); - const data = await apiClient.getDefectResult(file, mode); - setDefectResults(data.results); - setInferenceTime(data.inferenceTime); - setStatus("result"); - } catch { - setError("Backend rejected the image format or analysis failed."); - setStatus("greeting"); + const blob = await new Promise((resolve) => + canvasRef.current?.toBlob((b) => resolve(b), "image/jpeg", 0.7), + ); + + if (blob && active && status === "greeting") { + const file = new File([blob], "webcam.jpg", { type: "image/jpeg" }); + const data = await apiClient.getDefectResult(file, analysisMode); + if (active && status === "greeting") { + const url = URL.createObjectURL(blob); + setCaptured((prev) => { + if (prev && prev.startsWith("blob:")) URL.revokeObjectURL(prev); + return url; + }); + setDefectResults(data.results); + setInferenceTime(data.inferenceTime); + } + } + } + } catch (err) { + console.error("Loop analysis error:", err); + } finally { + isAnalyzingRef.current = false; + if (active && status === "greeting") { + setTimeout(analyzeLoop, 100); } } + }; + + if (status === "greeting") { + analyzeLoop(); } - }; + return () => { + active = false; + }; + }, [status, analysisMode, apiClient]); const formatFullDate = (date: Date): string => { const time = date.toLocaleTimeString("en-GB", { hour12: false }); @@ -88,27 +118,16 @@ const QualityInspectionWebcam = () => { return `${time} - ${dayMonthYear}`; }; - const handleRestartWebcam = () => { - setCaptured(null); - setDefectResults([]); - setError(null); - setStatus("greeting"); - }; - - const handleCaptureAndAnalysis = (mode: AnalysisMode) => { - setCaptured(null); - setDefectResults([]); - setError(null); + const handleAnalysisClick = (mode: AnalysisMode) => { setAnalysisMode(mode); - setStatus("greeting"); - setTimeout(() => handleCapture(mode), 100); + if (status === "idle") { + startWebcam(); + } }; const handleReturnPage = () => { - if (videoRef.current && videoRef.current.srcObject) { - (videoRef.current.srcObject as MediaStream) - .getTracks() - .forEach((track) => track.stop()); + if (streamRef.current) { + streamRef.current.getTracks().forEach((track) => track.stop()); } navigate("/quality-inspection", { state: { autoStart: true } }); }; @@ -124,6 +143,38 @@ const QualityInspectionWebcam = () => { } }; + const renderBBoxes = () => { + const media = imageRef.current || videoRef.current; + const container = containerRef.current; + if (!media || !container || defectResults.length === 0) return null; + + const rect = media.getBoundingClientRect(); + const containerRect = container.getBoundingClientRect(); + + const scaleX = rect.width / 960; + const scaleY = rect.height / 720; + + const offsetX = rect.left - containerRect.left; + const offsetY = rect.top - containerRect.top; + + return defectResults.map((defect, index) => ( +
+ )); + }; + return ( {
+ {status === "idle" && ( +
+

Webcam inactive.

+

Click an analysis mode to start.

+
+ )} + {status === "greeting" && ( +