diff --git a/src/pages/CameraModal.js b/src/pages/CameraModal.js
index e69de29..7ee63d9 100644
--- a/src/pages/CameraModal.js
+++ b/src/pages/CameraModal.js
@@ -0,0 +1,618 @@
+import React, { useState, useRef, useEffect, useCallback } from 'react';
+
+// --- HELPER FUNCTIONS ---
+function dataURLtoFile(dataUrl, filename) {
+ const arr = dataUrl.split(',');
+ const mime = arr[0].match(/:(.*?);/)[1];
+ const bstr = atob(arr[1]);
+ let n = bstr.length;
+ const u8arr = new Uint8Array(n);
+ while (n--) {
+ u8arr[n] = bstr.charCodeAt(n);
+ }
+ return new File([u8arr], filename, { type: mime });
+}
+
+const SPINNER_ANIMATION_STYLE = `
+ @keyframes spin {
+ 0% { transform: rotate(0deg); }
+ 100% { transform: rotate(360deg); }
+ }
+`;
+
+const cameraModalStyles = {
+ modalOverlay: {
+ position: "fixed", top: 0, left: 0, right: 0, bottom: 0,
+ backgroundColor: "rgba(0, 0, 0, 0.8)", zIndex: 1000,
+ display: "flex", alignItems: "center", justifyContent: "center",
+ },
+ modalContent: {
+ backgroundColor: "#f8f9fa",
+ borderRadius: "16px",
+ width: "100%",
+ height: "100%",
+ maxWidth: "800px",
+ maxHeight: "95vh",
+ display: "flex",
+ flexDirection: "column",
+ overflow: "hidden",
+ position: "relative",
+ },
+ button: {
+ backgroundColor: "#007aff", color: "white", padding: "14px 22px",
+ borderRadius: "12px", border: "none", fontSize: "16px",
+ fontWeight: "600", cursor: "pointer", margin: "8px 0",
+ textAlign: "center", transition: "background-color .2s ease",
+ },
+ secondaryButton: {
+ backgroundColor: '#6b7280', color: 'white'
+ },
+ cameraContainer: {
+ position: "relative", width: "100%", height: "100%",
+ backgroundColor: "black", display: "flex", alignItems: "center", justifyContent: "center",
+ },
+ video: {
+ width: "100%", height: "100%", objectFit: "cover",
+ },
+ canvas: {
+ position: 'absolute', top: 0, left: 0, width: '100%', height: '100%',
+ pointerEvents: 'none',
+ },
+ cameraControls: {
+ position: "absolute",
+ bottom: "30px",
+ left: 0,
+ right: 0,
+ display: "flex",
+ justifyContent: "center",
+ alignItems: "center",
+ zIndex: 11,
+ },
+ backButton: {
+ position: "absolute", top: "20px", left: "20px",
+ backgroundColor: "rgba(0, 0, 0, 0.6)", color: "white",
+ border: "none", borderRadius: "50%", width: "44px", height: "44px",
+ fontSize: "24px", cursor: "pointer", zIndex: 11,
+ display: "flex", alignItems: "center", justifyContent: "center",
+ },
+ captureButton: {
+ backgroundColor: "white",
+ border: "4px solid rgba(255, 255, 255, 0.3)",
+ borderRadius: "50%", width: "70px", height: "70px",
+ cursor: "pointer", transition: "transform 0.1s",
+ },
+ previewContainer: {
+ display: "flex", flexDirection: "column", alignItems: "center",
+ justifyContent: "center", padding: 20, flex: 1, backgroundColor: "#f8f9fa",
+ },
+ previewImage: {
+ maxHeight: '60vh',
+ maxWidth: "100%", borderRadius: 12,
+ marginBottom: 20, boxShadow: "0 4px 12px rgba(0,0,0,0.15)",
+ },
+ loadingOverlay: {
+ position: "absolute", inset: 0, background: "rgba(0,0,0,0.8)",
+ display: "flex", flexDirection: "column", alignItems: "center",
+ justifyContent: "center", zIndex: 20,
+ },
+ spinner: {
+ border: "4px solid #f3f3f3", borderTop: "4px solid #007aff",
+ borderRadius: "50%", width: "44px", height: "44px",
+ animation: "spin 1s linear infinite",
+ },
+ instructionText: {
+ position: "absolute", top: "80px", left: "20px", right: "20px",
+ textAlign: "center", color: "white", fontSize: "14px",
+ background: "rgba(0,0,0,0.7)", padding: "12px", borderRadius: "8px",
+ zIndex: 12, fontWeight: "500",
+ }
+};
+
+const useCamera = ({ videoRef, canvasRef }) => {
+ const streamRef = useRef(null);
+ const guideRectRef = useRef({ x: 0, y: 0, width: 0, height: 0, radius: 20 });
+ const animationFrameId = useRef(null);
+
+ const stopCameraStream = useCallback(() => {
+ console.log("Stopping camera stream...");
+
+ // Cancel animation frame
+ if (animationFrameId.current) {
+ cancelAnimationFrame(animationFrameId.current);
+ animationFrameId.current = null;
+ }
+
+ // Stop all tracks
+ if (streamRef.current) {
+ streamRef.current.getTracks().forEach((track) => {
+ console.log(`Stopping track: ${track.kind}`);
+ track.stop();
+ });
+ streamRef.current = null;
+ }
+
+ // Clear video element
+ if (videoRef.current) {
+ videoRef.current.srcObject = null;
+ videoRef.current.load();
+ }
+ }, [videoRef]);
+
+ const drawCameraGuide = (ctx, rect, canvasWidth, canvasHeight) => {
+ const { x, y, width, height, radius } = rect;
+
+ // Clear canvas
+ ctx.clearRect(0, 0, canvasWidth, canvasHeight);
+
+ ctx.save();
+
+ // Draw overlay
+ ctx.fillStyle = "rgba(0, 0, 0, 0.5)";
+ ctx.fillRect(0, 0, canvasWidth, canvasHeight);
+
+ // Cut out the guide rectangle
+ ctx.globalCompositeOperation = 'destination-out';
+ ctx.beginPath();
+ ctx.roundRect(x, y, width, height, radius);
+ ctx.fill();
+
+ // Reset composite operation
+ ctx.globalCompositeOperation = 'source-over';
+
+ // Draw corner indicators
+ const cornerSize = 25;
+ const cornerThickness = 4;
+ ctx.strokeStyle = "#00ff88";
+ ctx.lineWidth = cornerThickness;
+ ctx.lineCap = "round";
+
+ ctx.beginPath();
+ // Top-left corner
+ ctx.moveTo(x, y + cornerSize);
+ ctx.lineTo(x, y);
+ ctx.lineTo(x + cornerSize, y);
+
+ // Top-right corner
+ ctx.moveTo(x + width - cornerSize, y);
+ ctx.lineTo(x + width, y);
+ ctx.lineTo(x + width, y + cornerSize);
+
+ // Bottom-left corner
+ ctx.moveTo(x, y + height - cornerSize);
+ ctx.lineTo(x, y + height);
+ ctx.lineTo(x + cornerSize, y + height);
+
+ // Bottom-right corner
+ ctx.moveTo(x + width - cornerSize, y + height);
+ ctx.lineTo(x + width, y + height);
+ ctx.lineTo(x + width, y + height - cornerSize);
+
+ ctx.stroke();
+ ctx.restore();
+ };
+
+ const setupCanvasAndDrawLoop = useCallback(() => {
+ const video = videoRef.current;
+ const canvas = canvasRef.current;
+ if (!video || !canvas) {
+ console.log("Video or canvas not available for setup");
+ return;
+ }
+
+ console.log("Setting up canvas and draw loop");
+
+ const ctx = canvas.getContext("2d");
+ const container = canvas.parentElement;
+
+ if (!container) return;
+
+ const viewportWidth = container.clientWidth;
+ const viewportHeight = container.clientHeight;
+
+ // Set canvas size with device pixel ratio
+ const pixelRatio = window.devicePixelRatio || 1;
+ canvas.width = viewportWidth * pixelRatio;
+ canvas.height = viewportHeight * pixelRatio;
+ canvas.style.width = viewportWidth + 'px';
+ canvas.style.height = viewportHeight + 'px';
+ ctx.scale(pixelRatio, pixelRatio);
+
+ // Calculate guide rectangle (portrait ratio for ID cards)
+ const portraitRatio = 0.63; // Width/Height ratio for ID cards
+ let rectWidth = viewportWidth * 0.85;
+ let rectHeight = rectWidth / portraitRatio;
+
+ // Ensure it fits in viewport
+ if (rectHeight > viewportHeight * 0.7) {
+ rectHeight = viewportHeight * 0.7;
+ rectWidth = rectHeight * portraitRatio;
+ }
+
+ guideRectRef.current = {
+ x: (viewportWidth - rectWidth) / 2,
+ y: (viewportHeight - rectHeight) / 2,
+ width: rectWidth,
+ height: rectHeight,
+ radius: 12,
+ };
+
+ console.log("Guide rect:", guideRectRef.current);
+
+ const drawLoop = () => {
+ if (!canvas.parentElement || !streamRef.current) {
+ console.log("Stopping draw loop - canvas removed or stream stopped");
+ return;
+ }
+
+ drawCameraGuide(ctx, guideRectRef.current, viewportWidth, viewportHeight);
+ animationFrameId.current = requestAnimationFrame(drawLoop);
+ };
+
+ drawLoop();
+ }, [videoRef, canvasRef]);
+
+ const startCamera = useCallback(async () => {
+ console.log("Starting camera...");
+
+ // Stop any existing stream first
+ stopCameraStream();
+
+ // Wait for cleanup
+ await new Promise(resolve => setTimeout(resolve, 100));
+
+ try {
+ const constraints = {
+ video: {
+ facingMode: "environment",
+ width: { ideal: 1920, max: 1920 },
+ height: { ideal: 1080, max: 1080 }
+ },
+ audio: false,
+ };
+
+ console.log("Requesting camera access...");
+ const newStream = await navigator.mediaDevices.getUserMedia(constraints);
+ streamRef.current = newStream;
+
+ console.log("Camera stream obtained");
+
+ if (videoRef.current) {
+ videoRef.current.srcObject = newStream;
+
+ // Wait for video to be ready
+ await new Promise((resolve, reject) => {
+ const video = videoRef.current;
+ if (!video) return reject(new Error("Video element not found"));
+
+ const onLoadedMetadata = () => {
+ console.log("Video metadata loaded");
+ video.removeEventListener('loadedmetadata', onLoadedMetadata);
+ video.removeEventListener('error', onError);
+ resolve();
+ };
+
+ const onError = (e) => {
+ console.error("Video error:", e);
+ video.removeEventListener('loadedmetadata', onLoadedMetadata);
+ video.removeEventListener('error', onError);
+ reject(e);
+ };
+
+ if (video.readyState >= 1) { // HAVE_METADATA
+ console.log("Video already loaded");
+ resolve();
+ } else {
+ video.addEventListener('loadedmetadata', onLoadedMetadata);
+ video.addEventListener('error', onError);
+ }
+
+ video.play().catch(reject);
+ });
+
+ // Wait a bit more for video to start playing
+ await new Promise(resolve => setTimeout(resolve, 200));
+
+ console.log("Setting up canvas...");
+ setupCanvasAndDrawLoop();
+ }
+ } catch (err) {
+ console.error("Failed to access camera:", err);
+ alert("Tidak dapat mengakses kamera. Pastikan Anda telah memberikan izin kamera.");
+ throw err;
+ }
+ }, [videoRef, stopCameraStream, setupCanvasAndDrawLoop]);
+
+ const captureImage = useCallback(() => {
+ const video = videoRef.current;
+ const canvas = canvasRef.current;
+
+ if (!video || video.readyState < 2) {
+ console.log("Video not ready for capture");
+ return null;
+ }
+
+ console.log("Capturing and cropping image...");
+
+ const guide = guideRectRef.current;
+ if (!canvas) return null;
+
+ // Get actual dimensions
+ const canvasRect = canvas.getBoundingClientRect();
+ const videoWidth = video.videoWidth;
+ const videoHeight = video.videoHeight;
+
+ console.log("Video dimensions:", videoWidth, "x", videoHeight);
+ console.log("Canvas dimensions:", canvasRect.width, "x", canvasRect.height);
+ console.log("Guide rect:", guide);
+
+ // Calculate how the video is displayed (object-fit: cover)
+ const videoAspectRatio = videoWidth / videoHeight;
+ const canvasAspectRatio = canvasRect.width / canvasRect.height;
+
+ let displayWidth, displayHeight, offsetX = 0, offsetY = 0;
+ let scaleX, scaleY;
+
+ if (videoAspectRatio > canvasAspectRatio) {
+ // Video is wider - it will be cropped horizontally
+ displayHeight = canvasRect.height;
+ displayWidth = displayHeight * videoAspectRatio;
+ offsetX = (canvasRect.width - displayWidth) / 2;
+ offsetY = 0;
+ } else {
+ // Video is taller - it will be cropped vertically
+ displayWidth = canvasRect.width;
+ displayHeight = displayWidth / videoAspectRatio;
+ offsetX = 0;
+ offsetY = (canvasRect.height - displayHeight) / 2;
+ }
+
+ // Calculate scale factors from displayed video to actual video
+ scaleX = videoWidth / displayWidth;
+ scaleY = videoHeight / displayHeight;
+
+ // Calculate crop coordinates in video space
+ const cropX = Math.max(0, (guide.x - offsetX) * scaleX);
+ const cropY = Math.max(0, (guide.y - offsetY) * scaleY);
+ const cropWidth = Math.min(videoWidth - cropX, guide.width * scaleX);
+ const cropHeight = Math.min(videoHeight - cropY, guide.height * scaleY);
+
+ console.log("Crop coordinates:", { cropX, cropY, cropWidth, cropHeight });
+ console.log("Scale factors:", { scaleX, scaleY });
+ console.log("Display info:", { displayWidth, displayHeight, offsetX, offsetY });
+
+ // Create crop canvas
+ const cropCanvas = document.createElement("canvas");
+ cropCanvas.width = Math.round(cropWidth);
+ cropCanvas.height = Math.round(cropHeight);
+ const cropCtx = cropCanvas.getContext("2d");
+
+ // Draw cropped portion
+ cropCtx.drawImage(
+ video,
+ Math.round(cropX), Math.round(cropY), Math.round(cropWidth), Math.round(cropHeight),
+ 0, 0, Math.round(cropWidth), Math.round(cropHeight)
+ );
+
+ console.log("Image cropped successfully");
+ return cropCanvas.toDataURL("image/jpeg", 0.9);
+ }, [videoRef, canvasRef]);
+
+ useEffect(() => {
+ return () => {
+ console.log("useCamera cleanup");
+ stopCameraStream();
+ };
+ }, [stopCameraStream]);
+
+ return { startCamera, stopCameraStream, captureImage };
+};
+
+const CameraModal = ({ isOpen, onClose, onCapture }) => {
+ const [step, setStep] = useState("camera");
+ const [capturedImage, setCapturedImage] = useState(null);
+ const [loading, setLoading] = useState(false);
+ const [cameraReady, setCameraReady] = useState(false);
+ const videoRef = useRef(null);
+ const canvasRef = useRef(null);
+
+ const { startCamera, stopCameraStream, captureImage } = useCamera({ videoRef, canvasRef });
+
+ // Handle modal open/close and step changes
+ useEffect(() => {
+ if (!isOpen) {
+ console.log("Modal closed - cleaning up");
+ stopCameraStream();
+ setStep("camera");
+ setCapturedImage(null);
+ setLoading(false);
+ setCameraReady(false);
+ return;
+ }
+
+ if (step === "camera" && isOpen) {
+ console.log("Starting camera for step:", step);
+ setCameraReady(false);
+ setLoading(true);
+
+ const startCameraAsync = async () => {
+ try {
+ await startCamera();
+ setCameraReady(true);
+ console.log("Camera ready");
+ } catch (error) {
+ console.error("Failed to start camera:", error);
+ onClose();
+ } finally {
+ setLoading(false);
+ }
+ };
+
+ // Small delay to ensure DOM is ready
+ const timer = setTimeout(startCameraAsync, 150);
+ return () => clearTimeout(timer);
+ }
+ }, [isOpen, step, startCamera, stopCameraStream, onClose]);
+
+ const handleCaptureClick = async () => {
+ if (!cameraReady) return;
+
+ console.log("Capture button clicked");
+ setLoading(true);
+
+ try {
+ const imageData = captureImage();
+ if (imageData) {
+ setCapturedImage(imageData);
+ stopCameraStream(); // Stop camera after capture
+
+ // Short delay for smooth transition
+ setTimeout(() => {
+ setLoading(false);
+ setStep("preview");
+ }, 300);
+ } else {
+ setLoading(false);
+ alert("Gagal mengambil gambar. Silakan coba lagi.");
+ }
+ } catch (error) {
+ console.error("Capture error:", error);
+ setLoading(false);
+ alert("Terjadi error saat mengambil gambar.");
+ }
+ };
+
+ const handleRetake = () => {
+ console.log("Retake button clicked");
+ setCapturedImage(null);
+ setLoading(false);
+ setCameraReady(false);
+ setStep("camera"); // This will trigger camera restart via useEffect
+ };
+
+ const handleUsePhoto = () => {
+ if (capturedImage) {
+ const file = dataURLtoFile(capturedImage, `capture-${Date.now()}.jpg`);
+ onCapture(file);
+ }
+ // Reset state
+ setStep("camera");
+ setCapturedImage(null);
+ setCameraReady(false);
+ };
+
+ const handleClose = () => {
+ console.log("Close button clicked");
+ stopCameraStream();
+ onClose();
+ };
+
+ if (!isOpen) return null;
+
+ return (
+
+
+
+ {step === "camera" && (
+
+
+
+
+
+
+
+ Posisikan dokumen dalam area hijau dan tekan tombol untuk mengambil gambar
+
+
+
+
+
+
+ )}
+
+ {step === "preview" && capturedImage && (
+
+
+ Pratinjau Hasil Crop
+
+

+
+
+
+
+
+ )}
+
+ {loading && (
+
+
+
+ {step === "camera" && !cameraReady ? "Memuat kamera..." : "Memproses foto..."}
+
+
+ )}
+
+
+ );
+};
+
+export default CameraModal;
\ No newline at end of file
diff --git a/src/pages/InputDataPage.js b/src/pages/InputDataPage.js
index d04367b..e6fe5ee 100644
--- a/src/pages/InputDataPage.js
+++ b/src/pages/InputDataPage.js
@@ -1,5 +1,6 @@
import React, { useState, useRef, useEffect, useCallback } from 'react';
import { useParams, Link, useLocation } from 'react-router-dom';
+import CameraModal from './CameraModal';
// --- MOCK API LOGIC ---
const fetchEntries = async (dataTypeId) => {
@@ -11,622 +12,7 @@ const fetchEntries = async (dataTypeId) => {
});
};
-// --- HELPER FUNCTIONS ---
-function dataURLtoFile(dataUrl, filename) {
- const arr = dataUrl.split(',');
- const mime = arr[0].match(/:(.*?);/)[1];
- const bstr = atob(arr[1]);
- let n = bstr.length;
- const u8arr = new Uint8Array(n);
- while (n--) {
- u8arr[n] = bstr.charCodeAt(n);
- }
- return new File([u8arr], filename, { type: mime });
-}
-
-const SPINNER_ANIMATION_STYLE = `
- @keyframes spin {
- 0% { transform: rotate(0deg); }
- 100% { transform: rotate(360deg); }
- }
-`;
-
-const cameraModalStyles = {
- modalOverlay: {
- position: "fixed", top: 0, left: 0, right: 0, bottom: 0,
- backgroundColor: "rgba(0, 0, 0, 0.8)", zIndex: 1000,
- display: "flex", alignItems: "center", justifyContent: "center",
- },
- modalContent: {
- backgroundColor: "#f8f9fa",
- borderRadius: "16px",
- width: "100%",
- height: "100%",
- maxWidth: "800px",
- maxHeight: "95vh",
- display: "flex",
- flexDirection: "column",
- overflow: "hidden",
- position: "relative",
- },
- button: {
- backgroundColor: "#007aff", color: "white", padding: "14px 22px",
- borderRadius: "12px", border: "none", fontSize: "16px",
- fontWeight: "600", cursor: "pointer", margin: "8px 0",
- textAlign: "center", transition: "background-color .2s ease",
- },
- secondaryButton: {
- backgroundColor: '#6b7280', color: 'white'
- },
- cameraContainer: {
- position: "relative", width: "100%", height: "100%",
- backgroundColor: "black", display: "flex", alignItems: "center", justifyContent: "center",
- },
- video: {
- width: "100%", height: "100%", objectFit: "cover",
- },
- canvas: {
- position: 'absolute', top: 0, left: 0, width: '100%', height: '100%',
- pointerEvents: 'none',
- },
- cameraControls: {
- position: "absolute",
- bottom: "30px",
- left: 0,
- right: 0,
- display: "flex",
- justifyContent: "center",
- alignItems: "center",
- zIndex: 11,
- },
- backButton: {
- position: "absolute", top: "20px", left: "20px",
- backgroundColor: "rgba(0, 0, 0, 0.6)", color: "white",
- border: "none", borderRadius: "50%", width: "44px", height: "44px",
- fontSize: "24px", cursor: "pointer", zIndex: 11,
- display: "flex", alignItems: "center", justifyContent: "center",
- },
- captureButton: {
- backgroundColor: "white",
- border: "4px solid rgba(255, 255, 255, 0.3)",
- borderRadius: "50%", width: "70px", height: "70px",
- cursor: "pointer", transition: "transform 0.1s",
- },
- previewContainer: {
- display: "flex", flexDirection: "column", alignItems: "center",
- justifyContent: "center", padding: 20, flex: 1, backgroundColor: "#f8f9fa",
- },
- previewImage: {
- maxHeight: '60vh',
- maxWidth: "100%", borderRadius: 12,
- marginBottom: 20, boxShadow: "0 4px 12px rgba(0,0,0,0.15)",
- },
- loadingOverlay: {
- position: "absolute", inset: 0, background: "rgba(0,0,0,0.8)",
- display: "flex", flexDirection: "column", alignItems: "center",
- justifyContent: "center", zIndex: 20,
- },
- spinner: {
- border: "4px solid #f3f3f3", borderTop: "4px solid #007aff",
- borderRadius: "50%", width: "44px", height: "44px",
- animation: "spin 1s linear infinite",
- },
- instructionText: {
- position: "absolute", top: "80px", left: "20px", right: "20px",
- textAlign: "center", color: "white", fontSize: "14px",
- background: "rgba(0,0,0,0.7)", padding: "12px", borderRadius: "8px",
- zIndex: 12, fontWeight: "500",
- }
-};
-
-const useCamera = ({ videoRef, canvasRef }) => {
- const streamRef = useRef(null);
- const guideRectRef = useRef({ x: 0, y: 0, width: 0, height: 0, radius: 20 });
- const animationFrameId = useRef(null);
-
- const stopCameraStream = useCallback(() => {
- console.log("Stopping camera stream...");
-
- // Cancel animation frame
- if (animationFrameId.current) {
- cancelAnimationFrame(animationFrameId.current);
- animationFrameId.current = null;
- }
-
- // Stop all tracks
- if (streamRef.current) {
- streamRef.current.getTracks().forEach((track) => {
- console.log(`Stopping track: ${track.kind}`);
- track.stop();
- });
- streamRef.current = null;
- }
-
- // Clear video element
- if (videoRef.current) {
- videoRef.current.srcObject = null;
- videoRef.current.load();
- }
- }, [videoRef]);
-
- const drawCameraGuide = (ctx, rect, canvasWidth, canvasHeight) => {
- const { x, y, width, height, radius } = rect;
-
- // Clear canvas
- ctx.clearRect(0, 0, canvasWidth, canvasHeight);
-
- ctx.save();
-
- // Draw overlay
- ctx.fillStyle = "rgba(0, 0, 0, 0.5)";
- ctx.fillRect(0, 0, canvasWidth, canvasHeight);
-
- // Cut out the guide rectangle
- ctx.globalCompositeOperation = 'destination-out';
- ctx.beginPath();
- ctx.roundRect(x, y, width, height, radius);
- ctx.fill();
-
- // Reset composite operation
- ctx.globalCompositeOperation = 'source-over';
-
- // Draw corner indicators
- const cornerSize = 25;
- const cornerThickness = 4;
- ctx.strokeStyle = "#00ff88";
- ctx.lineWidth = cornerThickness;
- ctx.lineCap = "round";
-
- ctx.beginPath();
- // Top-left corner
- ctx.moveTo(x, y + cornerSize);
- ctx.lineTo(x, y);
- ctx.lineTo(x + cornerSize, y);
-
- // Top-right corner
- ctx.moveTo(x + width - cornerSize, y);
- ctx.lineTo(x + width, y);
- ctx.lineTo(x + width, y + cornerSize);
-
- // Bottom-left corner
- ctx.moveTo(x, y + height - cornerSize);
- ctx.lineTo(x, y + height);
- ctx.lineTo(x + cornerSize, y + height);
-
- // Bottom-right corner
- ctx.moveTo(x + width - cornerSize, y + height);
- ctx.lineTo(x + width, y + height);
- ctx.lineTo(x + width, y + height - cornerSize);
-
- ctx.stroke();
- ctx.restore();
- };
-
- const setupCanvasAndDrawLoop = useCallback(() => {
- const video = videoRef.current;
- const canvas = canvasRef.current;
- if (!video || !canvas) {
- console.log("Video or canvas not available for setup");
- return;
- }
-
- console.log("Setting up canvas and draw loop");
-
- const ctx = canvas.getContext("2d");
- const container = canvas.parentElement;
-
- if (!container) return;
-
- const viewportWidth = container.clientWidth;
- const viewportHeight = container.clientHeight;
-
- // Set canvas size with device pixel ratio
- const pixelRatio = window.devicePixelRatio || 1;
- canvas.width = viewportWidth * pixelRatio;
- canvas.height = viewportHeight * pixelRatio;
- canvas.style.width = viewportWidth + 'px';
- canvas.style.height = viewportHeight + 'px';
- ctx.scale(pixelRatio, pixelRatio);
-
- // Calculate guide rectangle (portrait ratio for ID cards)
- const portraitRatio = 0.63; // Width/Height ratio for ID cards
- let rectWidth = viewportWidth * 0.85;
- let rectHeight = rectWidth / portraitRatio;
-
- // Ensure it fits in viewport
- if (rectHeight > viewportHeight * 0.7) {
- rectHeight = viewportHeight * 0.7;
- rectWidth = rectHeight * portraitRatio;
- }
-
- guideRectRef.current = {
- x: (viewportWidth - rectWidth) / 2,
- y: (viewportHeight - rectHeight) / 2,
- width: rectWidth,
- height: rectHeight,
- radius: 12,
- };
-
- console.log("Guide rect:", guideRectRef.current);
-
- const drawLoop = () => {
- if (!canvas.parentElement || !streamRef.current) {
- console.log("Stopping draw loop - canvas removed or stream stopped");
- return;
- }
-
- drawCameraGuide(ctx, guideRectRef.current, viewportWidth, viewportHeight);
- animationFrameId.current = requestAnimationFrame(drawLoop);
- };
-
- drawLoop();
- }, [videoRef, canvasRef]);
-
- const startCamera = useCallback(async () => {
- console.log("Starting camera...");
-
- // Stop any existing stream first
- stopCameraStream();
-
- // Wait for cleanup
- await new Promise(resolve => setTimeout(resolve, 100));
-
- try {
- const constraints = {
- video: {
- facingMode: "environment",
- width: { ideal: 1920, max: 1920 },
- height: { ideal: 1080, max: 1080 }
- },
- audio: false,
- };
-
- console.log("Requesting camera access...");
- const newStream = await navigator.mediaDevices.getUserMedia(constraints);
- streamRef.current = newStream;
-
- console.log("Camera stream obtained");
-
- if (videoRef.current) {
- videoRef.current.srcObject = newStream;
-
- // Wait for video to be ready
- await new Promise((resolve, reject) => {
- const video = videoRef.current;
- if (!video) return reject(new Error("Video element not found"));
-
- const onLoadedMetadata = () => {
- console.log("Video metadata loaded");
- video.removeEventListener('loadedmetadata', onLoadedMetadata);
- video.removeEventListener('error', onError);
- resolve();
- };
-
- const onError = (e) => {
- console.error("Video error:", e);
- video.removeEventListener('loadedmetadata', onLoadedMetadata);
- video.removeEventListener('error', onError);
- reject(e);
- };
-
- if (video.readyState >= 1) { // HAVE_METADATA
- console.log("Video already loaded");
- resolve();
- } else {
- video.addEventListener('loadedmetadata', onLoadedMetadata);
- video.addEventListener('error', onError);
- }
-
- video.play().catch(reject);
- });
-
- // Wait a bit more for video to start playing
- await new Promise(resolve => setTimeout(resolve, 200));
-
- console.log("Setting up canvas...");
- setupCanvasAndDrawLoop();
- }
- } catch (err) {
- console.error("Failed to access camera:", err);
- alert("Tidak dapat mengakses kamera. Pastikan Anda telah memberikan izin kamera.");
- throw err;
- }
- }, [videoRef, stopCameraStream, setupCanvasAndDrawLoop]);
-
- const captureImage = useCallback(() => {
- const video = videoRef.current;
- const canvas = canvasRef.current;
-
- if (!video || video.readyState < 2) {
- console.log("Video not ready for capture");
- return null;
- }
-
- console.log("Capturing and cropping image...");
-
- const guide = guideRectRef.current;
- if (!canvas) return null;
-
- // Get actual dimensions
- const canvasRect = canvas.getBoundingClientRect();
- const videoWidth = video.videoWidth;
- const videoHeight = video.videoHeight;
-
- console.log("Video dimensions:", videoWidth, "x", videoHeight);
- console.log("Canvas dimensions:", canvasRect.width, "x", canvasRect.height);
- console.log("Guide rect:", guide);
-
- // Calculate how the video is displayed (object-fit: cover)
- const videoAspectRatio = videoWidth / videoHeight;
- const canvasAspectRatio = canvasRect.width / canvasRect.height;
-
- let displayWidth, displayHeight, offsetX = 0, offsetY = 0;
- let scaleX, scaleY;
-
- if (videoAspectRatio > canvasAspectRatio) {
- // Video is wider - it will be cropped horizontally
- displayHeight = canvasRect.height;
- displayWidth = displayHeight * videoAspectRatio;
- offsetX = (canvasRect.width - displayWidth) / 2;
- offsetY = 0;
- } else {
- // Video is taller - it will be cropped vertically
- displayWidth = canvasRect.width;
- displayHeight = displayWidth / videoAspectRatio;
- offsetX = 0;
- offsetY = (canvasRect.height - displayHeight) / 2;
- }
-
- // Calculate scale factors from displayed video to actual video
- scaleX = videoWidth / displayWidth;
- scaleY = videoHeight / displayHeight;
-
- // Calculate crop coordinates in video space
- const cropX = Math.max(0, (guide.x - offsetX) * scaleX);
- const cropY = Math.max(0, (guide.y - offsetY) * scaleY);
- const cropWidth = Math.min(videoWidth - cropX, guide.width * scaleX);
- const cropHeight = Math.min(videoHeight - cropY, guide.height * scaleY);
-
- console.log("Crop coordinates:", { cropX, cropY, cropWidth, cropHeight });
- console.log("Scale factors:", { scaleX, scaleY });
- console.log("Display info:", { displayWidth, displayHeight, offsetX, offsetY });
-
- // Create crop canvas
- const cropCanvas = document.createElement("canvas");
- cropCanvas.width = Math.round(cropWidth);
- cropCanvas.height = Math.round(cropHeight);
- const cropCtx = cropCanvas.getContext("2d");
-
- // Draw cropped portion
- cropCtx.drawImage(
- video,
- Math.round(cropX), Math.round(cropY), Math.round(cropWidth), Math.round(cropHeight),
- 0, 0, Math.round(cropWidth), Math.round(cropHeight)
- );
-
- console.log("Image cropped successfully");
- return cropCanvas.toDataURL("image/jpeg", 0.9);
- }, [videoRef, canvasRef]);
-
- useEffect(() => {
- return () => {
- console.log("useCamera cleanup");
- stopCameraStream();
- };
- }, [stopCameraStream]);
-
- return { startCamera, stopCameraStream, captureImage };
-};
-
-const CameraModal = ({ isOpen, onClose, onCapture }) => {
- const [step, setStep] = useState("camera");
- const [capturedImage, setCapturedImage] = useState(null);
- const [loading, setLoading] = useState(false);
- const [cameraReady, setCameraReady] = useState(false);
- const videoRef = useRef(null);
- const canvasRef = useRef(null);
-
- const { startCamera, stopCameraStream, captureImage } = useCamera({ videoRef, canvasRef });
-
- // Handle modal open/close and step changes
- useEffect(() => {
- if (!isOpen) {
- console.log("Modal closed - cleaning up");
- stopCameraStream();
- setStep("camera");
- setCapturedImage(null);
- setLoading(false);
- setCameraReady(false);
- return;
- }
-
- if (step === "camera" && isOpen) {
- console.log("Starting camera for step:", step);
- setCameraReady(false);
- setLoading(true);
-
- const startCameraAsync = async () => {
- try {
- await startCamera();
- setCameraReady(true);
- console.log("Camera ready");
- } catch (error) {
- console.error("Failed to start camera:", error);
- onClose();
- } finally {
- setLoading(false);
- }
- };
-
- // Small delay to ensure DOM is ready
- const timer = setTimeout(startCameraAsync, 150);
- return () => clearTimeout(timer);
- }
- }, [isOpen, step, startCamera, stopCameraStream, onClose]);
-
- const handleCaptureClick = async () => {
- if (!cameraReady) return;
-
- console.log("Capture button clicked");
- setLoading(true);
-
- try {
- const imageData = captureImage();
- if (imageData) {
- setCapturedImage(imageData);
- stopCameraStream(); // Stop camera after capture
-
- // Short delay for smooth transition
- setTimeout(() => {
- setLoading(false);
- setStep("preview");
- }, 300);
- } else {
- setLoading(false);
- alert("Gagal mengambil gambar. Silakan coba lagi.");
- }
- } catch (error) {
- console.error("Capture error:", error);
- setLoading(false);
- alert("Terjadi error saat mengambil gambar.");
- }
- };
-
- const handleRetake = () => {
- console.log("Retake button clicked");
- setCapturedImage(null);
- setLoading(false);
- setCameraReady(false);
- setStep("camera"); // This will trigger camera restart via useEffect
- };
-
- const handleUsePhoto = () => {
- if (capturedImage) {
- const file = dataURLtoFile(capturedImage, `capture-${Date.now()}.jpg`);
- onCapture(file);
- }
- // Reset state
- setStep("camera");
- setCapturedImage(null);
- setCameraReady(false);
- };
-
- const handleClose = () => {
- console.log("Close button clicked");
- stopCameraStream();
- onClose();
- };
-
- if (!isOpen) return null;
-
- return (
-
-
-
- {step === "camera" && (
-
-
-
-
-
-
-
- Posisikan dokumen dalam area hijau dan tekan tombol untuk mengambil gambar
-
-
-
-
-
-
- )}
-
- {step === "preview" && capturedImage && (
-
-
- Pratinjau Hasil Crop
-
-

-
-
-
-
-
- )}
-
- {loading && (
-
-
-
- {step === "camera" && !cameraReady ? "Memuat kamera..." : "Memproses foto..."}
-
-
- )}
-
-
- );
-};
-
-// Icons (same as before)
+// Icons
const BackIcon = () => ();
const UploadIcon = () => ();
const CameraIcon = () => ();
@@ -634,7 +20,6 @@ const ImageIcon = () => ();
const CheckCircleIcon = () => ();
-// Rest of your InputDataPage component remains the same...
export default function InputDataPage() {
const location = useLocation();
const expectation = location.state?.expectation || {};
@@ -692,7 +77,6 @@ export default function InputDataPage() {
setIsCameraOpen(false);
};
- // Rest of your component methods remain the same...
const removeFile = (index) => {
setFilesToUpload(prevFiles => prevFiles.filter((_, i) => i !== index));
};