400 lines
16 KiB
JavaScript
400 lines
16 KiB
JavaScript
// face_interop.js — TasQ web face verification bridge.
|
|
//
|
|
// Liveness : MediaPipe FaceLandmarker (blend shapes) — blink OR smile.
|
|
// Comparison: face-api.js (128-D face descriptors via faceRecognitionNet).
|
|
//
|
|
// Pinned CDN versions — update these constants when upgrading:
|
|
// @mediapipe/tasks-vision 0.10.21
|
|
// @vladmandic/face-api 1
|
|
|
|
'use strict';
|
|
|
|
// ── Shared state ─────────────────────────────────────────────────────────────
|
|
let _livenessRunning = false;
|
|
let _activeOverlay = null; // for cancelWebLiveness() to clean up eagerly
|
|
|
|
// ── face-api.js — lazy-loaded for face descriptor comparison ─────────────────
|
|
let _faceApiLoaded = false;
|
|
let _faceApiPromise = null;
|
|
|
|
async function _ensureFaceApi() {
|
|
if (_faceApiLoaded) return;
|
|
if (!_faceApiPromise) {
|
|
_faceApiPromise = (async () => {
|
|
if (!window.faceapi) {
|
|
await new Promise((res, rej) => {
|
|
const s = document.createElement('script');
|
|
s.src = 'https://cdn.jsdelivr.net/npm/@vladmandic/face-api@1/dist/face-api.js';
|
|
s.onload = res;
|
|
s.onerror = () => rej(new Error('Failed to load face-api.js'));
|
|
document.head.appendChild(s);
|
|
});
|
|
}
|
|
const MODEL_URL = 'https://cdn.jsdelivr.net/npm/@vladmandic/face-api@1/model/';
|
|
await faceapi.nets.tinyFaceDetector.loadFromUri(MODEL_URL);
|
|
await faceapi.nets.faceLandmark68Net.loadFromUri(MODEL_URL);
|
|
await faceapi.nets.faceRecognitionNet.loadFromUri(MODEL_URL);
|
|
_faceApiLoaded = true;
|
|
})().catch(e => { _faceApiPromise = null; throw e; });
|
|
}
|
|
await _faceApiPromise;
|
|
}
|
|
|
|
/// Called by Dart on dialog open. Begins background face-api load so
|
|
/// that descriptors are ready for compareFaces() after liveness succeeds.
|
|
async function initFaceApi() {
|
|
_ensureFaceApi().catch(e => console.warn('[face-api bg]', e));
|
|
return true; // always succeeds; real errors surface in getFaceDescriptor*
|
|
}
|
|
|
|
// ── MediaPipe FaceLandmarker — lazy-loaded for liveness ──────────────────────
|
|
// Update _MP_VER when a newer release is available on jsDelivr.
|
|
const _MP_VER = '0.10.21';
|
|
const _MP_CDN = `https://cdn.jsdelivr.net/npm/@mediapipe/tasks-vision@${_MP_VER}`;
|
|
const _MP_MODEL = 'https://storage.googleapis.com/mediapipe-models/' +
|
|
'face_landmarker/face_landmarker/float16/1/face_landmarker.task';
|
|
|
|
let _faceLandmarker = null;
|
|
let _mpPromise = null;
|
|
|
|
async function _ensureMediaPipe(onStatus) {
|
|
if (_faceLandmarker) return;
|
|
if (!_mpPromise) {
|
|
_mpPromise = (async () => {
|
|
onStatus?.('Loading face detection…');
|
|
const { FaceLandmarker, FilesetResolver } =
|
|
await import(`${_MP_CDN}/vision_bundle.mjs`);
|
|
|
|
onStatus?.('Initializing model…');
|
|
const fileset = await FilesetResolver.forVisionTasks(`${_MP_CDN}/wasm`);
|
|
|
|
const opts = {
|
|
outputFaceBlendshapes: true,
|
|
runningMode: 'VIDEO',
|
|
numFaces: 1,
|
|
};
|
|
const mkLandmarker = (delegate) =>
|
|
FaceLandmarker.createFromOptions(fileset, {
|
|
baseOptions: { modelAssetPath: _MP_MODEL, delegate },
|
|
...opts,
|
|
});
|
|
|
|
// Prefer GPU for throughput; fall back to CPU if unavailable.
|
|
try {
|
|
_faceLandmarker = await mkLandmarker('GPU');
|
|
} catch {
|
|
_faceLandmarker = await mkLandmarker('CPU');
|
|
}
|
|
})().catch(e => { _mpPromise = null; throw e; });
|
|
}
|
|
await _mpPromise;
|
|
}
|
|
|
|
// ── Liveness overlay — MediaPipe, blink OR smile ─────────────────────────────
|
|
/// Creates a fullscreen overlay appended to document.body so that both the
|
|
/// <video> element and MediaPipe's WebAssembly context live in the top-level
|
|
/// browsing context — NOT inside Flutter's CanvasKit cross-origin iframe —
|
|
/// which is the root cause that prevented face-api EAR blink detection on web.
|
|
///
|
|
/// Resolves with { dataUrl, blinkCount } on success, or null on cancel/error.
|
|
async function runWebLiveness(requiredBlinks) {
|
|
_livenessRunning = true;
|
|
|
|
// ── Inject spinner CSS once ────────────────────────────────────────────────
|
|
if (!document.getElementById('_tq_kf')) {
|
|
const s = document.createElement('style');
|
|
s.id = '_tq_kf';
|
|
s.textContent = '@keyframes _tqspin{to{transform:rotate(360deg)}}';
|
|
document.head.appendChild(s);
|
|
}
|
|
|
|
// ── Build overlay ──────────────────────────────────────────────────────────
|
|
const overlay = _el('div', null,
|
|
'position:fixed;inset:0;z-index:99999;background:rgba(0,0,0,0.92);' +
|
|
'display:flex;flex-direction:column;align-items:center;' +
|
|
'justify-content:center;gap:16px;font-family:Roboto,sans-serif;');
|
|
|
|
const titleEl = _el('h2', 'Face Verification',
|
|
'color:#fff;margin:0;font-size:20px;font-weight:500;letter-spacing:.3px;');
|
|
|
|
// Status line (shown during model/camera loading)
|
|
const statusEl = _el('p', '',
|
|
'color:rgba(255,255,255,0.5);margin:0;font-size:13px;min-height:18px;');
|
|
|
|
// Camera box
|
|
const cameraWrap = _el('div', null,
|
|
'width:320px;height:240px;border-radius:14px;overflow:hidden;' +
|
|
'background:#0a0a0a;border:2px solid rgba(255,255,255,0.18);' +
|
|
'position:relative;flex-shrink:0;');
|
|
|
|
const video = document.createElement('video');
|
|
video.autoplay = true;
|
|
video.muted = true;
|
|
video.setAttribute('playsinline', '');
|
|
// Mirror so user sees themselves naturally; raw frame for capture is un-mirrored.
|
|
video.style.cssText =
|
|
'width:100%;height:100%;object-fit:cover;transform:scaleX(-1);display:none;';
|
|
cameraWrap.appendChild(video);
|
|
|
|
// Spinner inside camera box (shown while loading)
|
|
const spinnerWrap = _el('div', null,
|
|
'position:absolute;inset:0;display:flex;align-items:center;' +
|
|
'justify-content:center;background:#0a0a0a;');
|
|
const spinnerRing = _el('div', null,
|
|
'width:38px;height:38px;border-radius:50%;' +
|
|
'border:3px solid rgba(255,255,255,0.12);border-top-color:#4A6FA5;' +
|
|
'animation:_tqspin 0.75s linear infinite;');
|
|
spinnerWrap.appendChild(spinnerRing);
|
|
cameraWrap.appendChild(spinnerWrap);
|
|
|
|
// Instruction text
|
|
const instrEl = _el('p', 'Initializing…',
|
|
'color:rgba(255,255,255,0.88);margin:0;font-size:15px;text-align:center;' +
|
|
'max-width:310px;line-height:1.5;');
|
|
|
|
// Blink progress (hidden until camera is live)
|
|
const progressGroup = _el('div', null,
|
|
'display:none;flex-direction:column;align-items:center;gap:6px;width:280px;');
|
|
progressGroup.style.display = 'none';
|
|
const progressTrack = _el('div', null,
|
|
'width:100%;background:rgba(255,255,255,0.15);border-radius:4px;height:6px;overflow:hidden;');
|
|
const progressBar = _el('div', null,
|
|
'height:100%;width:0%;background:#4A6FA5;transition:width 0.18s ease;border-radius:4px;');
|
|
progressTrack.appendChild(progressBar);
|
|
const blinkLabelEl = _el('p', '',
|
|
'color:rgba(255,255,255,0.5);margin:0;font-size:13px;');
|
|
progressGroup.append(progressTrack, blinkLabelEl);
|
|
|
|
// Smile badge (hidden until smile detected)
|
|
const smileBadge = _el('div', '😊 Smile detected!',
|
|
'display:none;color:#4ade80;font-size:14px;font-weight:500;');
|
|
|
|
// Cancel button
|
|
const cancelBtn = _el('button', 'Cancel',
|
|
'padding:9px 30px;border-radius:24px;' +
|
|
'border:1px solid rgba(255,255,255,0.35);background:transparent;' +
|
|
'color:#fff;cursor:pointer;font-size:14px;margin-top:4px;');
|
|
|
|
overlay.append(titleEl, statusEl, cameraWrap, instrEl, progressGroup, smileBadge, cancelBtn);
|
|
document.body.appendChild(overlay);
|
|
_activeOverlay = overlay;
|
|
|
|
// Helper to show status under title
|
|
const setStatus = (msg) => { statusEl.textContent = msg; };
|
|
|
|
// ── Initialize MediaPipe ───────────────────────────────────────────────────
|
|
try {
|
|
await _ensureMediaPipe(setStatus);
|
|
statusEl.textContent = '';
|
|
} catch (err) {
|
|
console.error('[MediaPipe init]', err);
|
|
instrEl.textContent = 'Failed to load face detection. Please retry.';
|
|
instrEl.style.color = '#ff6b6b';
|
|
return new Promise(res => {
|
|
cancelBtn.onclick = () => _finishLiveness(overlay, null, res);
|
|
});
|
|
}
|
|
|
|
if (!_livenessRunning) { _finishLiveness(overlay, null, () => {}); return null; }
|
|
|
|
// ── Start camera ───────────────────────────────────────────────────────────
|
|
setStatus('Starting camera…');
|
|
let stream;
|
|
try {
|
|
stream = await navigator.mediaDevices.getUserMedia({
|
|
video: { facingMode: 'user', width: { ideal: 640 }, height: { ideal: 480 } },
|
|
});
|
|
video.srcObject = stream;
|
|
await video.play();
|
|
video.style.display = '';
|
|
spinnerWrap.style.display = 'none';
|
|
statusEl.textContent = '';
|
|
} catch (err) {
|
|
console.error('[Camera]', err);
|
|
instrEl.textContent = 'Camera access denied.\nPlease allow camera access and retry.';
|
|
instrEl.style.color = '#ff6b6b';
|
|
return new Promise(res => {
|
|
cancelBtn.onclick = () => _finishLiveness(overlay, null, res);
|
|
});
|
|
}
|
|
|
|
if (!_livenessRunning) {
|
|
if (stream) stream.getTracks().forEach(t => t.stop());
|
|
_finishLiveness(overlay, null, () => {});
|
|
return null;
|
|
}
|
|
|
|
// ── Detection loop ─────────────────────────────────────────────────────────
|
|
instrEl.textContent = 'Blink 3 times · or · Smile with teeth';
|
|
blinkLabelEl.textContent = `Blinks: 0 / ${requiredBlinks}`;
|
|
progressGroup.style.display = 'flex';
|
|
|
|
// Blend-shape thresholds
|
|
const BLINK_CLOSE = 0.45; // eyeBlinkLeft/Right above → eyes closing
|
|
const BLINK_OPEN = 0.28; // eyeBlinkLeft/Right below → eyes reopened (hysteresis)
|
|
const SMILE_MIN = 0.60; // mouthSmileLeft/Right above → smiling
|
|
const SMILE_HOLD = 6; // consecutive frames smile must persist to confirm
|
|
|
|
return new Promise(resolve => {
|
|
let blinkCount = 0;
|
|
let smileFrames = 0;
|
|
let eyeWasClosed = false;
|
|
let lastVideoTime = -1;
|
|
let noFaceFrames = 0;
|
|
let done = false;
|
|
|
|
const finish = (result) => {
|
|
if (done) return;
|
|
done = true;
|
|
_livenessRunning = false;
|
|
if (stream) stream.getTracks().forEach(t => t.stop());
|
|
_finishLiveness(overlay, result, resolve);
|
|
};
|
|
|
|
cancelBtn.onclick = () => finish(null);
|
|
|
|
const detect = () => {
|
|
if (!_livenessRunning || done) return;
|
|
|
|
if (video.readyState >= 2 && video.currentTime !== lastVideoTime) {
|
|
lastVideoTime = video.currentTime;
|
|
try {
|
|
const result = _faceLandmarker.detectForVideo(video, performance.now());
|
|
|
|
if (result.faceBlendshapes && result.faceBlendshapes.length > 0) {
|
|
noFaceFrames = 0;
|
|
const cats = result.faceBlendshapes[0].categories;
|
|
const bs = (name) => cats.find(c => c.categoryName === name)?.score ?? 0;
|
|
|
|
const blinkL = bs('eyeBlinkLeft');
|
|
const blinkR = bs('eyeBlinkRight');
|
|
const avgBlink = (blinkL + blinkR) / 2;
|
|
|
|
const smileL = bs('mouthSmileLeft');
|
|
const smileR = bs('mouthSmileRight');
|
|
const avgSmile = (smileL + smileR) / 2;
|
|
|
|
// ── Blink ──────────────────────────────────────────────────────
|
|
if (avgBlink > BLINK_CLOSE && !eyeWasClosed) {
|
|
eyeWasClosed = true;
|
|
} else if (avgBlink < BLINK_OPEN && eyeWasClosed) {
|
|
eyeWasClosed = false;
|
|
blinkCount++;
|
|
blinkLabelEl.textContent =
|
|
`Blinks: ${blinkCount} / ${requiredBlinks}`;
|
|
progressBar.style.width =
|
|
`${Math.min((blinkCount / requiredBlinks) * 100, 100)}%`;
|
|
window.dispatchEvent(new CustomEvent('faceLivenessProgress',
|
|
{ detail: { blinkCount, required: requiredBlinks } }));
|
|
if (blinkCount >= requiredBlinks) {
|
|
instrEl.textContent = '✓ Liveness confirmed!';
|
|
finish({ dataUrl: _captureFrame(video), blinkCount });
|
|
return;
|
|
}
|
|
}
|
|
|
|
// ── Smile ──────────────────────────────────────────────────────
|
|
if (avgSmile > SMILE_MIN) {
|
|
smileFrames++;
|
|
smileBadge.style.display = '';
|
|
if (smileFrames >= SMILE_HOLD) {
|
|
instrEl.textContent = '✓ Liveness confirmed!';
|
|
finish({ dataUrl: _captureFrame(video), blinkCount });
|
|
return;
|
|
}
|
|
} else {
|
|
smileFrames = Math.max(0, smileFrames - 1);
|
|
if (smileFrames === 0) smileBadge.style.display = 'none';
|
|
}
|
|
|
|
// Restore instruction after "no face" message
|
|
if (instrEl.textContent.startsWith('Position')) {
|
|
instrEl.textContent = 'Blink 3 times · or · Smile with teeth';
|
|
}
|
|
} else {
|
|
noFaceFrames++;
|
|
if (noFaceFrames > 40) {
|
|
instrEl.textContent = 'Position your face in the camera';
|
|
}
|
|
}
|
|
} catch (e) {
|
|
console.error('[detect frame]', e);
|
|
}
|
|
}
|
|
|
|
requestAnimationFrame(detect);
|
|
};
|
|
|
|
requestAnimationFrame(detect);
|
|
});
|
|
}
|
|
|
|
/// Cancel any running liveness check. Removes the overlay immediately so the
|
|
/// user is not left with an orphaned overlay if Dart disposes the dialog.
|
|
function cancelWebLiveness() {
|
|
_livenessRunning = false;
|
|
if (_activeOverlay && document.body.contains(_activeOverlay)) {
|
|
document.body.removeChild(_activeOverlay);
|
|
_activeOverlay = null;
|
|
}
|
|
}
|
|
|
|
// ── Face comparison — face-api.js ─────────────────────────────────────────────
|
|
async function getFaceDescriptorFromDataUrl(imageDataUrl) {
|
|
await _ensureFaceApi();
|
|
try {
|
|
const img = await faceapi.fetchImage(imageDataUrl);
|
|
const det = await faceapi
|
|
.detectSingleFace(img, new faceapi.TinyFaceDetectorOptions())
|
|
.withFaceLandmarks()
|
|
.withFaceDescriptor();
|
|
return det ? Array.from(det.descriptor) : null;
|
|
} catch (e) {
|
|
console.error('[getFaceDescriptor/dataUrl]', e);
|
|
return null;
|
|
}
|
|
}
|
|
|
|
async function getFaceDescriptorFromUrl(imageUrl) {
|
|
await _ensureFaceApi();
|
|
try {
|
|
const img = await faceapi.fetchImage(imageUrl);
|
|
const det = await faceapi
|
|
.detectSingleFace(img, new faceapi.TinyFaceDetectorOptions())
|
|
.withFaceLandmarks()
|
|
.withFaceDescriptor();
|
|
return det ? Array.from(det.descriptor) : null;
|
|
} catch (e) {
|
|
console.error('[getFaceDescriptor/url]', e);
|
|
return null;
|
|
}
|
|
}
|
|
|
|
function compareFaceDescriptors(desc1, desc2) {
|
|
if (!desc1 || !desc2 || desc1.length !== desc2.length) return 1.0;
|
|
return faceapi.euclideanDistance(desc1, desc2);
|
|
}
|
|
|
|
// ── DOM helpers ───────────────────────────────────────────────────────────────
|
|
function _el(tag, text, css) {
|
|
const e = document.createElement(tag);
|
|
if (text) e.textContent = text;
|
|
if (css) e.style.cssText = css;
|
|
return e;
|
|
}
|
|
|
|
function _captureFrame(video) {
|
|
const canvas = document.createElement('canvas');
|
|
canvas.width = video.videoWidth || 640;
|
|
canvas.height = video.videoHeight || 480;
|
|
canvas.getContext('2d').drawImage(video, 0, 0);
|
|
return canvas.toDataURL('image/jpeg', 0.85);
|
|
}
|
|
|
|
function _finishLiveness(overlay, result, resolve) {
|
|
_activeOverlay = null;
|
|
if (document.body.contains(overlay)) document.body.removeChild(overlay);
|
|
resolve(result);
|
|
}
|
|
|