/* main.js - websnap */ import { HandLandmarker, FilesetResolver } from "https://cdn.jsdelivr.net/npm/@mediapipe/tasks-vision@latest"; /* set html elements */ const video = document.getElementById("video"); const canvas = document.getElementById("canvas"); const overlay = document.getElementById("overlay"); const octx = overlay.getContext("2d"); const ctx = canvas.getContext("2d"); const flash = document.getElementById("flash"); let lastShot = 0; let peaceFrames = 0; const PEACE_FRAMES_REQUIRED = 6; /* ~100 ms at 60 fps */ /* function to request fullscreen */ function requestFS() { const el = document.documentElement; (el.requestFullscreen || el.webkitRequestFullscreen || el.mozRequestFullScreen) ?.call(el); } requestFS(); /* add listeners for fullscreen */ document.addEventListener("click", requestFS, { once: true }); document.addEventListener("touchend", requestFS, { once: true }); document.addEventListener("dblclick", requestFS); /* camera setup */ const stream = await navigator.mediaDevices.getUserMedia({ /* ideal res: 4096x2160 ; 4k */ video: { width: { ideal: 4096 }, height: { ideal: 2160 }, facingMode: "user" } }); video.srcObject = stream; /* import mediapipe for handgestures */ const vision = await FilesetResolver.forVisionTasks( "https://cdn.jsdelivr.net/npm/@mediapipe/tasks-vision/wasm" ); const handLandmarker = await HandLandmarker.createFromOptions(vision, { baseOptions: { modelAssetPath: "https://storage.googleapis.com/mediapipe-models/hand_landmarker/hand_landmarker/float16/1/hand_landmarker.task", delegate: "GPU" /* faster inference → more consistent detections */ }, runningMode: "VIDEO", numHands: 1, minHandDetectionConfidence: 0.3, /* default 0.5 — loosens initial detection */ minHandPresenceConfidence: 0.3, /* default 0.5 — keeps tracking through partial occlusion */ minTrackingConfidence: 0.3 /* default 0.5 — less likely to drop the hand mid-gesture */ }); /* flash for when picture gets taken */ function triggerFlash() { flash.classList.add("pop"); requestAnimationFrame(() => requestAnimationFrame(() => flash.classList.remove("pop"))); } /* capture a picture */ function takePhoto() { triggerFlash(); const w = video.videoWidth, h = video.videoHeight; canvas.width = w; canvas.height = h; ctx.drawImage(video, 0, 0, w, h); canvas.toBlob(blob => { if (!blob) return; saveAs(blob, "websnap-" + Date.now() + ".png"); }, "image/png"); } /* detect shit */ function detectLoop(timestamp) { const dw = overlay.offsetWidth, dh = overlay.offsetHeight; if (overlay.width !== dw || overlay.height !== dh) { overlay.width = dw; overlay.height = dh; } /* draw rect */ octx.clearRect(0, 0, dw, dh); if (video.readyState < 2) { requestAnimationFrame(detectLoop); return; } const result = handLandmarker.detectForVideo(video, timestamp); if (result.landmarks.length) { const lm = result.landmarks[0]; /* finger indexes, for peace sign */ /* hand scale: wrist(0) → middle MCP(9) distance in normalised coords. normalising against hand size makes thresholds work at any distance */ const scale = Math.hypot(lm[9].x - lm[0].x, lm[9].y - lm[0].y); const minBend = scale * 0.5; const fingerUp = (tip, pip) => (pip.y - tip.y) > minBend; const fingerDown = (tip, pip) => (tip.y - pip.y) > -minBend * 0.3; /* relaxed */ const indexUp = fingerUp(lm[8], lm[6]); const middleUp = fingerUp(lm[12], lm[10]); const ringDown = fingerDown(lm[16], lm[14]); const pinkyDown = fingerDown(lm[20], lm[18]); /* gap normalised so it works at any distance */ const fingerGap = Math.hypot(lm[8].x - lm[12].x, lm[8].y - lm[12].y) / scale; const peace = indexUp && middleUp && ringDown && pinkyDown && fingerGap > 0.35; /* require gesture to hold for several frames to kill noisy false positives */ if (peace) { peaceFrames++; if (peaceFrames >= PEACE_FRAMES_REQUIRED && Date.now() - lastShot > 2500) { /* wait a bit */ takePhoto(); lastShot = Date.now(); peaceFrames = 0; } } else { peaceFrames = 0; } } requestAnimationFrame(detectLoop); } requestAnimationFrame(detectLoop);