This commit is contained in:
Vassshhh
2025-09-24 23:35:59 +07:00
parent 2fc708dcf6
commit f30087d4f2
2 changed files with 620 additions and 618 deletions

View File

@@ -0,0 +1,618 @@
import React, { useState, useRef, useEffect, useCallback } from 'react';
// --- HELPER FUNCTIONS ---
function dataURLtoFile(dataUrl, filename) {
const arr = dataUrl.split(',');
const mime = arr[0].match(/:(.*?);/)[1];
const bstr = atob(arr[1]);
let n = bstr.length;
const u8arr = new Uint8Array(n);
while (n--) {
u8arr[n] = bstr.charCodeAt(n);
}
return new File([u8arr], filename, { type: mime });
}
const SPINNER_ANIMATION_STYLE = `
@keyframes spin {
0% { transform: rotate(0deg); }
100% { transform: rotate(360deg); }
}
`;
const cameraModalStyles = {
modalOverlay: {
position: "fixed", top: 0, left: 0, right: 0, bottom: 0,
backgroundColor: "rgba(0, 0, 0, 0.8)", zIndex: 1000,
display: "flex", alignItems: "center", justifyContent: "center",
},
modalContent: {
backgroundColor: "#f8f9fa",
borderRadius: "16px",
width: "100%",
height: "100%",
maxWidth: "800px",
maxHeight: "95vh",
display: "flex",
flexDirection: "column",
overflow: "hidden",
position: "relative",
},
button: {
backgroundColor: "#007aff", color: "white", padding: "14px 22px",
borderRadius: "12px", border: "none", fontSize: "16px",
fontWeight: "600", cursor: "pointer", margin: "8px 0",
textAlign: "center", transition: "background-color .2s ease",
},
secondaryButton: {
backgroundColor: '#6b7280', color: 'white'
},
cameraContainer: {
position: "relative", width: "100%", height: "100%",
backgroundColor: "black", display: "flex", alignItems: "center", justifyContent: "center",
},
video: {
width: "100%", height: "100%", objectFit: "cover",
},
canvas: {
position: 'absolute', top: 0, left: 0, width: '100%', height: '100%',
pointerEvents: 'none',
},
cameraControls: {
position: "absolute",
bottom: "30px",
left: 0,
right: 0,
display: "flex",
justifyContent: "center",
alignItems: "center",
zIndex: 11,
},
backButton: {
position: "absolute", top: "20px", left: "20px",
backgroundColor: "rgba(0, 0, 0, 0.6)", color: "white",
border: "none", borderRadius: "50%", width: "44px", height: "44px",
fontSize: "24px", cursor: "pointer", zIndex: 11,
display: "flex", alignItems: "center", justifyContent: "center",
},
captureButton: {
backgroundColor: "white",
border: "4px solid rgba(255, 255, 255, 0.3)",
borderRadius: "50%", width: "70px", height: "70px",
cursor: "pointer", transition: "transform 0.1s",
},
previewContainer: {
display: "flex", flexDirection: "column", alignItems: "center",
justifyContent: "center", padding: 20, flex: 1, backgroundColor: "#f8f9fa",
},
previewImage: {
maxHeight: '60vh',
maxWidth: "100%", borderRadius: 12,
marginBottom: 20, boxShadow: "0 4px 12px rgba(0,0,0,0.15)",
},
loadingOverlay: {
position: "absolute", inset: 0, background: "rgba(0,0,0,0.8)",
display: "flex", flexDirection: "column", alignItems: "center",
justifyContent: "center", zIndex: 20,
},
spinner: {
border: "4px solid #f3f3f3", borderTop: "4px solid #007aff",
borderRadius: "50%", width: "44px", height: "44px",
animation: "spin 1s linear infinite",
},
instructionText: {
position: "absolute", top: "80px", left: "20px", right: "20px",
textAlign: "center", color: "white", fontSize: "14px",
background: "rgba(0,0,0,0.7)", padding: "12px", borderRadius: "8px",
zIndex: 12, fontWeight: "500",
}
};
const useCamera = ({ videoRef, canvasRef }) => {
const streamRef = useRef(null);
const guideRectRef = useRef({ x: 0, y: 0, width: 0, height: 0, radius: 20 });
const animationFrameId = useRef(null);
const stopCameraStream = useCallback(() => {
console.log("Stopping camera stream...");
// Cancel animation frame
if (animationFrameId.current) {
cancelAnimationFrame(animationFrameId.current);
animationFrameId.current = null;
}
// Stop all tracks
if (streamRef.current) {
streamRef.current.getTracks().forEach((track) => {
console.log(`Stopping track: ${track.kind}`);
track.stop();
});
streamRef.current = null;
}
// Clear video element
if (videoRef.current) {
videoRef.current.srcObject = null;
videoRef.current.load();
}
}, [videoRef]);
const drawCameraGuide = (ctx, rect, canvasWidth, canvasHeight) => {
const { x, y, width, height, radius } = rect;
// Clear canvas
ctx.clearRect(0, 0, canvasWidth, canvasHeight);
ctx.save();
// Draw overlay
ctx.fillStyle = "rgba(0, 0, 0, 0.5)";
ctx.fillRect(0, 0, canvasWidth, canvasHeight);
// Cut out the guide rectangle
ctx.globalCompositeOperation = 'destination-out';
ctx.beginPath();
ctx.roundRect(x, y, width, height, radius);
ctx.fill();
// Reset composite operation
ctx.globalCompositeOperation = 'source-over';
// Draw corner indicators
const cornerSize = 25;
const cornerThickness = 4;
ctx.strokeStyle = "#00ff88";
ctx.lineWidth = cornerThickness;
ctx.lineCap = "round";
ctx.beginPath();
// Top-left corner
ctx.moveTo(x, y + cornerSize);
ctx.lineTo(x, y);
ctx.lineTo(x + cornerSize, y);
// Top-right corner
ctx.moveTo(x + width - cornerSize, y);
ctx.lineTo(x + width, y);
ctx.lineTo(x + width, y + cornerSize);
// Bottom-left corner
ctx.moveTo(x, y + height - cornerSize);
ctx.lineTo(x, y + height);
ctx.lineTo(x + cornerSize, y + height);
// Bottom-right corner
ctx.moveTo(x + width - cornerSize, y + height);
ctx.lineTo(x + width, y + height);
ctx.lineTo(x + width, y + height - cornerSize);
ctx.stroke();
ctx.restore();
};
const setupCanvasAndDrawLoop = useCallback(() => {
const video = videoRef.current;
const canvas = canvasRef.current;
if (!video || !canvas) {
console.log("Video or canvas not available for setup");
return;
}
console.log("Setting up canvas and draw loop");
const ctx = canvas.getContext("2d");
const container = canvas.parentElement;
if (!container) return;
const viewportWidth = container.clientWidth;
const viewportHeight = container.clientHeight;
// Set canvas size with device pixel ratio
const pixelRatio = window.devicePixelRatio || 1;
canvas.width = viewportWidth * pixelRatio;
canvas.height = viewportHeight * pixelRatio;
canvas.style.width = viewportWidth + 'px';
canvas.style.height = viewportHeight + 'px';
ctx.scale(pixelRatio, pixelRatio);
// Calculate guide rectangle (portrait ratio for ID cards)
const portraitRatio = 0.63; // Width/Height ratio for ID cards
let rectWidth = viewportWidth * 0.85;
let rectHeight = rectWidth / portraitRatio;
// Ensure it fits in viewport
if (rectHeight > viewportHeight * 0.7) {
rectHeight = viewportHeight * 0.7;
rectWidth = rectHeight * portraitRatio;
}
guideRectRef.current = {
x: (viewportWidth - rectWidth) / 2,
y: (viewportHeight - rectHeight) / 2,
width: rectWidth,
height: rectHeight,
radius: 12,
};
console.log("Guide rect:", guideRectRef.current);
const drawLoop = () => {
if (!canvas.parentElement || !streamRef.current) {
console.log("Stopping draw loop - canvas removed or stream stopped");
return;
}
drawCameraGuide(ctx, guideRectRef.current, viewportWidth, viewportHeight);
animationFrameId.current = requestAnimationFrame(drawLoop);
};
drawLoop();
}, [videoRef, canvasRef]);
const startCamera = useCallback(async () => {
console.log("Starting camera...");
// Stop any existing stream first
stopCameraStream();
// Wait for cleanup
await new Promise(resolve => setTimeout(resolve, 100));
try {
const constraints = {
video: {
facingMode: "environment",
width: { ideal: 1920, max: 1920 },
height: { ideal: 1080, max: 1080 }
},
audio: false,
};
console.log("Requesting camera access...");
const newStream = await navigator.mediaDevices.getUserMedia(constraints);
streamRef.current = newStream;
console.log("Camera stream obtained");
if (videoRef.current) {
videoRef.current.srcObject = newStream;
// Wait for video to be ready
await new Promise((resolve, reject) => {
const video = videoRef.current;
if (!video) return reject(new Error("Video element not found"));
const onLoadedMetadata = () => {
console.log("Video metadata loaded");
video.removeEventListener('loadedmetadata', onLoadedMetadata);
video.removeEventListener('error', onError);
resolve();
};
const onError = (e) => {
console.error("Video error:", e);
video.removeEventListener('loadedmetadata', onLoadedMetadata);
video.removeEventListener('error', onError);
reject(e);
};
if (video.readyState >= 1) { // HAVE_METADATA
console.log("Video already loaded");
resolve();
} else {
video.addEventListener('loadedmetadata', onLoadedMetadata);
video.addEventListener('error', onError);
}
video.play().catch(reject);
});
// Wait a bit more for video to start playing
await new Promise(resolve => setTimeout(resolve, 200));
console.log("Setting up canvas...");
setupCanvasAndDrawLoop();
}
} catch (err) {
console.error("Failed to access camera:", err);
alert("Tidak dapat mengakses kamera. Pastikan Anda telah memberikan izin kamera.");
throw err;
}
}, [videoRef, stopCameraStream, setupCanvasAndDrawLoop]);
const captureImage = useCallback(() => {
const video = videoRef.current;
const canvas = canvasRef.current;
if (!video || video.readyState < 2) {
console.log("Video not ready for capture");
return null;
}
console.log("Capturing and cropping image...");
const guide = guideRectRef.current;
if (!canvas) return null;
// Get actual dimensions
const canvasRect = canvas.getBoundingClientRect();
const videoWidth = video.videoWidth;
const videoHeight = video.videoHeight;
console.log("Video dimensions:", videoWidth, "x", videoHeight);
console.log("Canvas dimensions:", canvasRect.width, "x", canvasRect.height);
console.log("Guide rect:", guide);
// Calculate how the video is displayed (object-fit: cover)
const videoAspectRatio = videoWidth / videoHeight;
const canvasAspectRatio = canvasRect.width / canvasRect.height;
let displayWidth, displayHeight, offsetX = 0, offsetY = 0;
let scaleX, scaleY;
if (videoAspectRatio > canvasAspectRatio) {
// Video is wider - it will be cropped horizontally
displayHeight = canvasRect.height;
displayWidth = displayHeight * videoAspectRatio;
offsetX = (canvasRect.width - displayWidth) / 2;
offsetY = 0;
} else {
// Video is taller - it will be cropped vertically
displayWidth = canvasRect.width;
displayHeight = displayWidth / videoAspectRatio;
offsetX = 0;
offsetY = (canvasRect.height - displayHeight) / 2;
}
// Calculate scale factors from displayed video to actual video
scaleX = videoWidth / displayWidth;
scaleY = videoHeight / displayHeight;
// Calculate crop coordinates in video space
const cropX = Math.max(0, (guide.x - offsetX) * scaleX);
const cropY = Math.max(0, (guide.y - offsetY) * scaleY);
const cropWidth = Math.min(videoWidth - cropX, guide.width * scaleX);
const cropHeight = Math.min(videoHeight - cropY, guide.height * scaleY);
console.log("Crop coordinates:", { cropX, cropY, cropWidth, cropHeight });
console.log("Scale factors:", { scaleX, scaleY });
console.log("Display info:", { displayWidth, displayHeight, offsetX, offsetY });
// Create crop canvas
const cropCanvas = document.createElement("canvas");
cropCanvas.width = Math.round(cropWidth);
cropCanvas.height = Math.round(cropHeight);
const cropCtx = cropCanvas.getContext("2d");
// Draw cropped portion
cropCtx.drawImage(
video,
Math.round(cropX), Math.round(cropY), Math.round(cropWidth), Math.round(cropHeight),
0, 0, Math.round(cropWidth), Math.round(cropHeight)
);
console.log("Image cropped successfully");
return cropCanvas.toDataURL("image/jpeg", 0.9);
}, [videoRef, canvasRef]);
useEffect(() => {
return () => {
console.log("useCamera cleanup");
stopCameraStream();
};
}, [stopCameraStream]);
return { startCamera, stopCameraStream, captureImage };
};
const CameraModal = ({ isOpen, onClose, onCapture }) => {
const [step, setStep] = useState("camera");
const [capturedImage, setCapturedImage] = useState(null);
const [loading, setLoading] = useState(false);
const [cameraReady, setCameraReady] = useState(false);
const videoRef = useRef(null);
const canvasRef = useRef(null);
const { startCamera, stopCameraStream, captureImage } = useCamera({ videoRef, canvasRef });
// Handle modal open/close and step changes
useEffect(() => {
if (!isOpen) {
console.log("Modal closed - cleaning up");
stopCameraStream();
setStep("camera");
setCapturedImage(null);
setLoading(false);
setCameraReady(false);
return;
}
if (step === "camera" && isOpen) {
console.log("Starting camera for step:", step);
setCameraReady(false);
setLoading(true);
const startCameraAsync = async () => {
try {
await startCamera();
setCameraReady(true);
console.log("Camera ready");
} catch (error) {
console.error("Failed to start camera:", error);
onClose();
} finally {
setLoading(false);
}
};
// Small delay to ensure DOM is ready
const timer = setTimeout(startCameraAsync, 150);
return () => clearTimeout(timer);
}
}, [isOpen, step, startCamera, stopCameraStream, onClose]);
const handleCaptureClick = async () => {
if (!cameraReady) return;
console.log("Capture button clicked");
setLoading(true);
try {
const imageData = captureImage();
if (imageData) {
setCapturedImage(imageData);
stopCameraStream(); // Stop camera after capture
// Short delay for smooth transition
setTimeout(() => {
setLoading(false);
setStep("preview");
}, 300);
} else {
setLoading(false);
alert("Gagal mengambil gambar. Silakan coba lagi.");
}
} catch (error) {
console.error("Capture error:", error);
setLoading(false);
alert("Terjadi error saat mengambil gambar.");
}
};
const handleRetake = () => {
console.log("Retake button clicked");
setCapturedImage(null);
setLoading(false);
setCameraReady(false);
setStep("camera"); // This will trigger camera restart via useEffect
};
const handleUsePhoto = () => {
if (capturedImage) {
const file = dataURLtoFile(capturedImage, `capture-${Date.now()}.jpg`);
onCapture(file);
}
// Reset state
setStep("camera");
setCapturedImage(null);
setCameraReady(false);
};
const handleClose = () => {
console.log("Close button clicked");
stopCameraStream();
onClose();
};
if (!isOpen) return null;
return (
<div style={cameraModalStyles.modalOverlay}>
<style>{SPINNER_ANIMATION_STYLE}</style>
<div style={cameraModalStyles.modalContent}>
{step === "camera" && (
<div style={cameraModalStyles.cameraContainer}>
<video
ref={videoRef}
autoPlay
playsInline
muted
style={cameraModalStyles.video}
/>
<canvas
ref={canvasRef}
style={cameraModalStyles.canvas}
/>
<button
onClick={handleClose}
style={cameraModalStyles.backButton}
aria-label="Tutup Kamera"
>
&times;
</button>
<div style={cameraModalStyles.instructionText}>
Posisikan dokumen dalam area hijau dan tekan tombol untuk mengambil gambar
</div>
<div style={cameraModalStyles.cameraControls}>
<button
onClick={handleCaptureClick}
style={{
...cameraModalStyles.captureButton,
opacity: cameraReady ? 1 : 0.5,
transform: loading ? 'scale(0.95)' : 'scale(1)'
}}
disabled={!cameraReady || loading}
aria-label="Ambil Gambar"
/>
</div>
</div>
)}
{step === "preview" && capturedImage && (
<div style={cameraModalStyles.previewContainer}>
<h2 style={{
fontSize: '1.25rem',
fontWeight: 'bold',
marginBottom: '1rem',
color: '#1f2937'
}}>
Pratinjau Hasil Crop
</h2>
<img
src={capturedImage}
alt="Hasil crop"
style={cameraModalStyles.previewImage}
/>
<div style={{
display: 'flex',
gap: '1rem',
justifyContent: 'center',
width: '100%',
maxWidth: '400px'
}}>
<button
style={{
...cameraModalStyles.button,
...cameraModalStyles.secondaryButton,
flex: 1
}}
onClick={handleRetake}
>
Ambil Ulang
</button>
<button
style={{
...cameraModalStyles.button,
flex: 1
}}
onClick={handleUsePhoto}
>
Gunakan Foto
</button>
</div>
</div>
)}
{loading && (
<div style={cameraModalStyles.loadingOverlay}>
<div style={cameraModalStyles.spinner} />
<p style={{
marginTop: '1rem',
color: 'white',
fontWeight: '500'
}}>
{step === "camera" && !cameraReady ? "Memuat kamera..." : "Memproses foto..."}
</p>
</div>
)}
</div>
</div>
);
};
export default CameraModal;

View File

@@ -1,5 +1,6 @@
import React, { useState, useRef, useEffect, useCallback } from 'react'; import React, { useState, useRef, useEffect, useCallback } from 'react';
import { useParams, Link, useLocation } from 'react-router-dom'; import { useParams, Link, useLocation } from 'react-router-dom';
import CameraModal from './CameraModal';
// --- MOCK API LOGIC --- // --- MOCK API LOGIC ---
const fetchEntries = async (dataTypeId) => { const fetchEntries = async (dataTypeId) => {
@@ -11,622 +12,7 @@ const fetchEntries = async (dataTypeId) => {
}); });
}; };
// --- HELPER FUNCTIONS --- // Icons
function dataURLtoFile(dataUrl, filename) {
const arr = dataUrl.split(',');
const mime = arr[0].match(/:(.*?);/)[1];
const bstr = atob(arr[1]);
let n = bstr.length;
const u8arr = new Uint8Array(n);
while (n--) {
u8arr[n] = bstr.charCodeAt(n);
}
return new File([u8arr], filename, { type: mime });
}
const SPINNER_ANIMATION_STYLE = `
@keyframes spin {
0% { transform: rotate(0deg); }
100% { transform: rotate(360deg); }
}
`;
const cameraModalStyles = {
modalOverlay: {
position: "fixed", top: 0, left: 0, right: 0, bottom: 0,
backgroundColor: "rgba(0, 0, 0, 0.8)", zIndex: 1000,
display: "flex", alignItems: "center", justifyContent: "center",
},
modalContent: {
backgroundColor: "#f8f9fa",
borderRadius: "16px",
width: "100%",
height: "100%",
maxWidth: "800px",
maxHeight: "95vh",
display: "flex",
flexDirection: "column",
overflow: "hidden",
position: "relative",
},
button: {
backgroundColor: "#007aff", color: "white", padding: "14px 22px",
borderRadius: "12px", border: "none", fontSize: "16px",
fontWeight: "600", cursor: "pointer", margin: "8px 0",
textAlign: "center", transition: "background-color .2s ease",
},
secondaryButton: {
backgroundColor: '#6b7280', color: 'white'
},
cameraContainer: {
position: "relative", width: "100%", height: "100%",
backgroundColor: "black", display: "flex", alignItems: "center", justifyContent: "center",
},
video: {
width: "100%", height: "100%", objectFit: "cover",
},
canvas: {
position: 'absolute', top: 0, left: 0, width: '100%', height: '100%',
pointerEvents: 'none',
},
cameraControls: {
position: "absolute",
bottom: "30px",
left: 0,
right: 0,
display: "flex",
justifyContent: "center",
alignItems: "center",
zIndex: 11,
},
backButton: {
position: "absolute", top: "20px", left: "20px",
backgroundColor: "rgba(0, 0, 0, 0.6)", color: "white",
border: "none", borderRadius: "50%", width: "44px", height: "44px",
fontSize: "24px", cursor: "pointer", zIndex: 11,
display: "flex", alignItems: "center", justifyContent: "center",
},
captureButton: {
backgroundColor: "white",
border: "4px solid rgba(255, 255, 255, 0.3)",
borderRadius: "50%", width: "70px", height: "70px",
cursor: "pointer", transition: "transform 0.1s",
},
previewContainer: {
display: "flex", flexDirection: "column", alignItems: "center",
justifyContent: "center", padding: 20, flex: 1, backgroundColor: "#f8f9fa",
},
previewImage: {
maxHeight: '60vh',
maxWidth: "100%", borderRadius: 12,
marginBottom: 20, boxShadow: "0 4px 12px rgba(0,0,0,0.15)",
},
loadingOverlay: {
position: "absolute", inset: 0, background: "rgba(0,0,0,0.8)",
display: "flex", flexDirection: "column", alignItems: "center",
justifyContent: "center", zIndex: 20,
},
spinner: {
border: "4px solid #f3f3f3", borderTop: "4px solid #007aff",
borderRadius: "50%", width: "44px", height: "44px",
animation: "spin 1s linear infinite",
},
instructionText: {
position: "absolute", top: "80px", left: "20px", right: "20px",
textAlign: "center", color: "white", fontSize: "14px",
background: "rgba(0,0,0,0.7)", padding: "12px", borderRadius: "8px",
zIndex: 12, fontWeight: "500",
}
};
const useCamera = ({ videoRef, canvasRef }) => {
const streamRef = useRef(null);
const guideRectRef = useRef({ x: 0, y: 0, width: 0, height: 0, radius: 20 });
const animationFrameId = useRef(null);
const stopCameraStream = useCallback(() => {
console.log("Stopping camera stream...");
// Cancel animation frame
if (animationFrameId.current) {
cancelAnimationFrame(animationFrameId.current);
animationFrameId.current = null;
}
// Stop all tracks
if (streamRef.current) {
streamRef.current.getTracks().forEach((track) => {
console.log(`Stopping track: ${track.kind}`);
track.stop();
});
streamRef.current = null;
}
// Clear video element
if (videoRef.current) {
videoRef.current.srcObject = null;
videoRef.current.load();
}
}, [videoRef]);
const drawCameraGuide = (ctx, rect, canvasWidth, canvasHeight) => {
const { x, y, width, height, radius } = rect;
// Clear canvas
ctx.clearRect(0, 0, canvasWidth, canvasHeight);
ctx.save();
// Draw overlay
ctx.fillStyle = "rgba(0, 0, 0, 0.5)";
ctx.fillRect(0, 0, canvasWidth, canvasHeight);
// Cut out the guide rectangle
ctx.globalCompositeOperation = 'destination-out';
ctx.beginPath();
ctx.roundRect(x, y, width, height, radius);
ctx.fill();
// Reset composite operation
ctx.globalCompositeOperation = 'source-over';
// Draw corner indicators
const cornerSize = 25;
const cornerThickness = 4;
ctx.strokeStyle = "#00ff88";
ctx.lineWidth = cornerThickness;
ctx.lineCap = "round";
ctx.beginPath();
// Top-left corner
ctx.moveTo(x, y + cornerSize);
ctx.lineTo(x, y);
ctx.lineTo(x + cornerSize, y);
// Top-right corner
ctx.moveTo(x + width - cornerSize, y);
ctx.lineTo(x + width, y);
ctx.lineTo(x + width, y + cornerSize);
// Bottom-left corner
ctx.moveTo(x, y + height - cornerSize);
ctx.lineTo(x, y + height);
ctx.lineTo(x + cornerSize, y + height);
// Bottom-right corner
ctx.moveTo(x + width - cornerSize, y + height);
ctx.lineTo(x + width, y + height);
ctx.lineTo(x + width, y + height - cornerSize);
ctx.stroke();
ctx.restore();
};
const setupCanvasAndDrawLoop = useCallback(() => {
const video = videoRef.current;
const canvas = canvasRef.current;
if (!video || !canvas) {
console.log("Video or canvas not available for setup");
return;
}
console.log("Setting up canvas and draw loop");
const ctx = canvas.getContext("2d");
const container = canvas.parentElement;
if (!container) return;
const viewportWidth = container.clientWidth;
const viewportHeight = container.clientHeight;
// Set canvas size with device pixel ratio
const pixelRatio = window.devicePixelRatio || 1;
canvas.width = viewportWidth * pixelRatio;
canvas.height = viewportHeight * pixelRatio;
canvas.style.width = viewportWidth + 'px';
canvas.style.height = viewportHeight + 'px';
ctx.scale(pixelRatio, pixelRatio);
// Calculate guide rectangle (portrait ratio for ID cards)
const portraitRatio = 0.63; // Width/Height ratio for ID cards
let rectWidth = viewportWidth * 0.85;
let rectHeight = rectWidth / portraitRatio;
// Ensure it fits in viewport
if (rectHeight > viewportHeight * 0.7) {
rectHeight = viewportHeight * 0.7;
rectWidth = rectHeight * portraitRatio;
}
guideRectRef.current = {
x: (viewportWidth - rectWidth) / 2,
y: (viewportHeight - rectHeight) / 2,
width: rectWidth,
height: rectHeight,
radius: 12,
};
console.log("Guide rect:", guideRectRef.current);
const drawLoop = () => {
if (!canvas.parentElement || !streamRef.current) {
console.log("Stopping draw loop - canvas removed or stream stopped");
return;
}
drawCameraGuide(ctx, guideRectRef.current, viewportWidth, viewportHeight);
animationFrameId.current = requestAnimationFrame(drawLoop);
};
drawLoop();
}, [videoRef, canvasRef]);
const startCamera = useCallback(async () => {
console.log("Starting camera...");
// Stop any existing stream first
stopCameraStream();
// Wait for cleanup
await new Promise(resolve => setTimeout(resolve, 100));
try {
const constraints = {
video: {
facingMode: "environment",
width: { ideal: 1920, max: 1920 },
height: { ideal: 1080, max: 1080 }
},
audio: false,
};
console.log("Requesting camera access...");
const newStream = await navigator.mediaDevices.getUserMedia(constraints);
streamRef.current = newStream;
console.log("Camera stream obtained");
if (videoRef.current) {
videoRef.current.srcObject = newStream;
// Wait for video to be ready
await new Promise((resolve, reject) => {
const video = videoRef.current;
if (!video) return reject(new Error("Video element not found"));
const onLoadedMetadata = () => {
console.log("Video metadata loaded");
video.removeEventListener('loadedmetadata', onLoadedMetadata);
video.removeEventListener('error', onError);
resolve();
};
const onError = (e) => {
console.error("Video error:", e);
video.removeEventListener('loadedmetadata', onLoadedMetadata);
video.removeEventListener('error', onError);
reject(e);
};
if (video.readyState >= 1) { // HAVE_METADATA
console.log("Video already loaded");
resolve();
} else {
video.addEventListener('loadedmetadata', onLoadedMetadata);
video.addEventListener('error', onError);
}
video.play().catch(reject);
});
// Wait a bit more for video to start playing
await new Promise(resolve => setTimeout(resolve, 200));
console.log("Setting up canvas...");
setupCanvasAndDrawLoop();
}
} catch (err) {
console.error("Failed to access camera:", err);
alert("Tidak dapat mengakses kamera. Pastikan Anda telah memberikan izin kamera.");
throw err;
}
}, [videoRef, stopCameraStream, setupCanvasAndDrawLoop]);
const captureImage = useCallback(() => {
const video = videoRef.current;
const canvas = canvasRef.current;
if (!video || video.readyState < 2) {
console.log("Video not ready for capture");
return null;
}
console.log("Capturing and cropping image...");
const guide = guideRectRef.current;
if (!canvas) return null;
// Get actual dimensions
const canvasRect = canvas.getBoundingClientRect();
const videoWidth = video.videoWidth;
const videoHeight = video.videoHeight;
console.log("Video dimensions:", videoWidth, "x", videoHeight);
console.log("Canvas dimensions:", canvasRect.width, "x", canvasRect.height);
console.log("Guide rect:", guide);
// Calculate how the video is displayed (object-fit: cover)
const videoAspectRatio = videoWidth / videoHeight;
const canvasAspectRatio = canvasRect.width / canvasRect.height;
let displayWidth, displayHeight, offsetX = 0, offsetY = 0;
let scaleX, scaleY;
if (videoAspectRatio > canvasAspectRatio) {
// Video is wider - it will be cropped horizontally
displayHeight = canvasRect.height;
displayWidth = displayHeight * videoAspectRatio;
offsetX = (canvasRect.width - displayWidth) / 2;
offsetY = 0;
} else {
// Video is taller - it will be cropped vertically
displayWidth = canvasRect.width;
displayHeight = displayWidth / videoAspectRatio;
offsetX = 0;
offsetY = (canvasRect.height - displayHeight) / 2;
}
// Calculate scale factors from displayed video to actual video
scaleX = videoWidth / displayWidth;
scaleY = videoHeight / displayHeight;
// Calculate crop coordinates in video space
const cropX = Math.max(0, (guide.x - offsetX) * scaleX);
const cropY = Math.max(0, (guide.y - offsetY) * scaleY);
const cropWidth = Math.min(videoWidth - cropX, guide.width * scaleX);
const cropHeight = Math.min(videoHeight - cropY, guide.height * scaleY);
console.log("Crop coordinates:", { cropX, cropY, cropWidth, cropHeight });
console.log("Scale factors:", { scaleX, scaleY });
console.log("Display info:", { displayWidth, displayHeight, offsetX, offsetY });
// Create crop canvas
const cropCanvas = document.createElement("canvas");
cropCanvas.width = Math.round(cropWidth);
cropCanvas.height = Math.round(cropHeight);
const cropCtx = cropCanvas.getContext("2d");
// Draw cropped portion
cropCtx.drawImage(
video,
Math.round(cropX), Math.round(cropY), Math.round(cropWidth), Math.round(cropHeight),
0, 0, Math.round(cropWidth), Math.round(cropHeight)
);
console.log("Image cropped successfully");
return cropCanvas.toDataURL("image/jpeg", 0.9);
}, [videoRef, canvasRef]);
useEffect(() => {
return () => {
console.log("useCamera cleanup");
stopCameraStream();
};
}, [stopCameraStream]);
return { startCamera, stopCameraStream, captureImage };
};
const CameraModal = ({ isOpen, onClose, onCapture }) => {
const [step, setStep] = useState("camera");
const [capturedImage, setCapturedImage] = useState(null);
const [loading, setLoading] = useState(false);
const [cameraReady, setCameraReady] = useState(false);
const videoRef = useRef(null);
const canvasRef = useRef(null);
const { startCamera, stopCameraStream, captureImage } = useCamera({ videoRef, canvasRef });
// Handle modal open/close and step changes
useEffect(() => {
if (!isOpen) {
console.log("Modal closed - cleaning up");
stopCameraStream();
setStep("camera");
setCapturedImage(null);
setLoading(false);
setCameraReady(false);
return;
}
if (step === "camera" && isOpen) {
console.log("Starting camera for step:", step);
setCameraReady(false);
setLoading(true);
const startCameraAsync = async () => {
try {
await startCamera();
setCameraReady(true);
console.log("Camera ready");
} catch (error) {
console.error("Failed to start camera:", error);
onClose();
} finally {
setLoading(false);
}
};
// Small delay to ensure DOM is ready
const timer = setTimeout(startCameraAsync, 150);
return () => clearTimeout(timer);
}
}, [isOpen, step, startCamera, stopCameraStream, onClose]);
const handleCaptureClick = async () => {
if (!cameraReady) return;
console.log("Capture button clicked");
setLoading(true);
try {
const imageData = captureImage();
if (imageData) {
setCapturedImage(imageData);
stopCameraStream(); // Stop camera after capture
// Short delay for smooth transition
setTimeout(() => {
setLoading(false);
setStep("preview");
}, 300);
} else {
setLoading(false);
alert("Gagal mengambil gambar. Silakan coba lagi.");
}
} catch (error) {
console.error("Capture error:", error);
setLoading(false);
alert("Terjadi error saat mengambil gambar.");
}
};
const handleRetake = () => {
console.log("Retake button clicked");
setCapturedImage(null);
setLoading(false);
setCameraReady(false);
setStep("camera"); // This will trigger camera restart via useEffect
};
const handleUsePhoto = () => {
if (capturedImage) {
const file = dataURLtoFile(capturedImage, `capture-${Date.now()}.jpg`);
onCapture(file);
}
// Reset state
setStep("camera");
setCapturedImage(null);
setCameraReady(false);
};
const handleClose = () => {
console.log("Close button clicked");
stopCameraStream();
onClose();
};
if (!isOpen) return null;
return (
<div style={cameraModalStyles.modalOverlay}>
<style>{SPINNER_ANIMATION_STYLE}</style>
<div style={cameraModalStyles.modalContent}>
{step === "camera" && (
<div style={cameraModalStyles.cameraContainer}>
<video
ref={videoRef}
autoPlay
playsInline
muted
style={cameraModalStyles.video}
/>
<canvas
ref={canvasRef}
style={cameraModalStyles.canvas}
/>
<button
onClick={handleClose}
style={cameraModalStyles.backButton}
aria-label="Tutup Kamera"
>
&times;
</button>
<div style={cameraModalStyles.instructionText}>
Posisikan dokumen dalam area hijau dan tekan tombol untuk mengambil gambar
</div>
<div style={cameraModalStyles.cameraControls}>
<button
onClick={handleCaptureClick}
style={{
...cameraModalStyles.captureButton,
opacity: cameraReady ? 1 : 0.5,
transform: loading ? 'scale(0.95)' : 'scale(1)'
}}
disabled={!cameraReady || loading}
aria-label="Ambil Gambar"
/>
</div>
</div>
)}
{step === "preview" && capturedImage && (
<div style={cameraModalStyles.previewContainer}>
<h2 style={{
fontSize: '1.25rem',
fontWeight: 'bold',
marginBottom: '1rem',
color: '#1f2937'
}}>
Pratinjau Hasil Crop
</h2>
<img
src={capturedImage}
alt="Hasil crop"
style={cameraModalStyles.previewImage}
/>
<div style={{
display: 'flex',
gap: '1rem',
justifyContent: 'center',
width: '100%',
maxWidth: '400px'
}}>
<button
style={{
...cameraModalStyles.button,
...cameraModalStyles.secondaryButton,
flex: 1
}}
onClick={handleRetake}
>
Ambil Ulang
</button>
<button
style={{
...cameraModalStyles.button,
flex: 1
}}
onClick={handleUsePhoto}
>
Gunakan Foto
</button>
</div>
</div>
)}
{loading && (
<div style={cameraModalStyles.loadingOverlay}>
<div style={cameraModalStyles.spinner} />
<p style={{
marginTop: '1rem',
color: 'white',
fontWeight: '500'
}}>
{step === "camera" && !cameraReady ? "Memuat kamera..." : "Memproses foto..."}
</p>
</div>
)}
</div>
</div>
);
};
// Icons (same as before)
const BackIcon = () => (<svg xmlns="http://www.w3.org/2000/svg" className="h-6 w-6" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth="2"><path strokeLinecap="round" strokeLinejoin="round" d="M15 19l-7-7 7-7" /></svg>); const BackIcon = () => (<svg xmlns="http://www.w3.org/2000/svg" className="h-6 w-6" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth="2"><path strokeLinecap="round" strokeLinejoin="round" d="M15 19l-7-7 7-7" /></svg>);
const UploadIcon = () => (<svg xmlns="http://www.w3.org/2000/svg" className="h-6 w-6 mr-2" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth="2"><path strokeLinecap="round" strokeLinejoin="round" d="M4 16v1a3 3 0 003 3h10a3 3 0 003-3v-1m-4-8l-4-4m0 0L8 8m4-4v12" /></svg>); const UploadIcon = () => (<svg xmlns="http://www.w3.org/2000/svg" className="h-6 w-6 mr-2" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth="2"><path strokeLinecap="round" strokeLinejoin="round" d="M4 16v1a3 3 0 003 3h10a3 3 0 003-3v-1m-4-8l-4-4m0 0L8 8m4-4v12" /></svg>);
const CameraIcon = () => (<svg xmlns="http://www.w3.org/2000/svg" className="h-6 w-6 mr-2" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth="2"><path strokeLinecap="round" strokeLinejoin="round" d="M3 9a2 2 0 012-2h.93a2 2 0 001.664-.89l.812-1.22A2 2 0 0110.07 4h3.86a2 2 0 011.664.89l.812 1.22A2 2 0 0018.07 7H19a2 2 0 012 2v9a2 2 0 01-2 2H5a2 2 0 01-2-2V9z" /><path strokeLinecap="round" strokeLinejoin="round" d="M15 13a3 3 0 11-6 0 3 3 0 016 0z" /></svg>); const CameraIcon = () => (<svg xmlns="http://www.w3.org/2000/svg" className="h-6 w-6 mr-2" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth="2"><path strokeLinecap="round" strokeLinejoin="round" d="M3 9a2 2 0 012-2h.93a2 2 0 001.664-.89l.812-1.22A2 2 0 0110.07 4h3.86a2 2 0 011.664.89l.812 1.22A2 2 0 0018.07 7H19a2 2 0 012 2v9a2 2 0 01-2 2H5a2 2 0 01-2-2V9z" /><path strokeLinecap="round" strokeLinejoin="round" d="M15 13a3 3 0 11-6 0 3 3 0 016 0z" /></svg>);
@@ -634,7 +20,6 @@ const ImageIcon = () => (<svg xmlns="http://www.w3.org/2000/svg" className="h-16
const TrashIcon = () => (<svg xmlns="http://www.w3.org/2000/svg" className="h-5 w-5" viewBox="0 0 20 20" fill="currentColor"><path fillRule="evenodd" d="M9 2a1 1 0 00-.894.553L7.382 4H4a1 1 0 000 2v10a2 2 0 002 2h8a2 2 0 002-2V6a1 1 0 100-2h-3.382l-.724-1.447A1 1 0 0011 2H9zM7 8a1 1 0 012 0v6a1 1 0 11-2 0V8zm5-1a1 1 0 00-1 1v6a1 1 0 102 0V8a1 1 0 00-1-1z" clipRule="evenodd" /></svg>); const TrashIcon = () => (<svg xmlns="http://www.w3.org/2000/svg" className="h-5 w-5" viewBox="0 0 20 20" fill="currentColor"><path fillRule="evenodd" d="M9 2a1 1 0 00-.894.553L7.382 4H4a1 1 0 000 2v10a2 2 0 002 2h8a2 2 0 002-2V6a1 1 0 100-2h-3.382l-.724-1.447A1 1 0 0011 2H9zM7 8a1 1 0 012 0v6a1 1 0 11-2 0V8zm5-1a1 1 0 00-1 1v6a1 1 0 102 0V8a1 1 0 00-1-1z" clipRule="evenodd" /></svg>);
const CheckCircleIcon = () => (<svg xmlns="http://www.w3.org/2000/svg" className="h-5 w-5 mr-2" viewBox="0 0 20 20" fill="currentColor"><path fillRule="evenodd" d="M10 18a8 8 0 100-16 8 8 0 000 16zm3.707-9.293a1 1 0 00-1.414-1.414L9 10.586 7.707 9.293a1 1 0 00-1.414 1.414l2 2a1 1 0 001.414 0l4-4z" clipRule="evenodd" /></svg>); const CheckCircleIcon = () => (<svg xmlns="http://www.w3.org/2000/svg" className="h-5 w-5 mr-2" viewBox="0 0 20 20" fill="currentColor"><path fillRule="evenodd" d="M10 18a8 8 0 100-16 8 8 0 000 16zm3.707-9.293a1 1 0 00-1.414-1.414L9 10.586 7.707 9.293a1 1 0 00-1.414 1.414l2 2a1 1 0 001.414 0l4-4z" clipRule="evenodd" /></svg>);
// Rest of your InputDataPage component remains the same...
export default function InputDataPage() { export default function InputDataPage() {
const location = useLocation(); const location = useLocation();
const expectation = location.state?.expectation || {}; const expectation = location.state?.expectation || {};
@@ -692,7 +77,6 @@ export default function InputDataPage() {
setIsCameraOpen(false); setIsCameraOpen(false);
}; };
// Rest of your component methods remain the same...
const removeFile = (index) => { const removeFile = (index) => {
setFilesToUpload(prevFiles => prevFiles.filter((_, i) => i !== index)); setFilesToUpload(prevFiles => prevFiles.filter((_, i) => i !== index));
}; };