248 lines
7.4 KiB
Dart
248 lines
7.4 KiB
Dart
import 'dart:convert';
|
|
import 'dart:js_interop';
|
|
import 'dart:typed_data';
|
|
|
|
import 'package:flutter/material.dart';
|
|
|
|
// ─── JS interop bindings ───────────────────────────────────────────────────
|
|
|
|
@JS()
|
|
external JSPromise<JSBoolean> initFaceApi();
|
|
|
|
/// Runs liveness detection via a JS-managed fullscreen overlay.
|
|
/// No containerId needed — the JS code appends the overlay to document.body
|
|
/// directly, which avoids CanvasKit iframe cross-origin restrictions.
|
|
@JS()
|
|
external JSPromise<JSAny?> runWebLiveness(JSNumber requiredBlinks);
|
|
|
|
@JS()
|
|
external void cancelWebLiveness();
|
|
|
|
@JS()
|
|
external JSPromise<JSAny?> getFaceDescriptorFromDataUrl(JSString dataUrl);
|
|
|
|
@JS()
|
|
external JSPromise<JSAny?> getFaceDescriptorFromUrl(JSString url);
|
|
|
|
@JS()
|
|
external JSNumber compareFaceDescriptors(JSAny desc1, JSAny desc2);
|
|
|
|
// ─── JS result type ────────────────────────────────────────────────────────
|
|
|
|
extension type _LivenessJSResult(JSObject _) implements JSObject {
|
|
external JSString get dataUrl;
|
|
external JSNumber get blinkCount;
|
|
}
|
|
|
|
// ─── Public API ─────────────────────────────────────────────────────────────
|
|
|
|
/// Result from a face liveness check.
|
|
class FaceLivenessResult {
|
|
final Uint8List imageBytes;
|
|
final String? imagePath;
|
|
FaceLivenessResult({required this.imageBytes, this.imagePath});
|
|
}
|
|
|
|
/// Run face liveness detection on web using face-api.js.
|
|
/// Shows a dialog with camera preview and blink detection.
|
|
Future<FaceLivenessResult?> runFaceLiveness(
|
|
BuildContext context, {
|
|
int requiredBlinks = 3,
|
|
}) async {
|
|
return showDialog<FaceLivenessResult>(
|
|
context: context,
|
|
barrierDismissible: false,
|
|
builder: (ctx) => _WebLivenessDialog(requiredBlinks: requiredBlinks),
|
|
);
|
|
}
|
|
|
|
/// Compare a captured face photo with enrolled face photo bytes.
|
|
/// Uses face-api.js face descriptors on web.
|
|
/// Returns similarity score 0.0 (no match) to 1.0 (perfect match).
|
|
Future<double> compareFaces(
|
|
Uint8List capturedBytes,
|
|
Uint8List enrolledBytes,
|
|
) async {
|
|
try {
|
|
final capturedDataUrl =
|
|
'data:image/jpeg;base64,${base64Encode(capturedBytes)}';
|
|
final enrolledDataUrl =
|
|
'data:image/jpeg;base64,${base64Encode(enrolledBytes)}';
|
|
|
|
final desc1Result = await getFaceDescriptorFromDataUrl(
|
|
capturedDataUrl.toJS,
|
|
).toDart;
|
|
final desc2Result = await getFaceDescriptorFromDataUrl(
|
|
enrolledDataUrl.toJS,
|
|
).toDart;
|
|
|
|
if (desc1Result == null || desc2Result == null) return 0.0;
|
|
|
|
final distance = compareFaceDescriptors(
|
|
desc1Result,
|
|
desc2Result,
|
|
).toDartDouble;
|
|
|
|
// face-api.js distance: 0 = identical, ~0.6 = threshold, 1+ = very different
|
|
// Convert to similarity score: 1.0 = perfect match, 0.0 = no match
|
|
return (1.0 - distance).clamp(0.0, 1.0);
|
|
} catch (_) {
|
|
return 0.0;
|
|
}
|
|
}
|
|
|
|
// ─── Web Liveness Dialog ────────────────────────────────────────────────────
|
|
|
|
class _WebLivenessDialog extends StatefulWidget {
|
|
final int requiredBlinks;
|
|
const _WebLivenessDialog({required this.requiredBlinks});
|
|
|
|
@override
|
|
State<_WebLivenessDialog> createState() => _WebLivenessDialogState();
|
|
}
|
|
|
|
enum _WebLivenessState { loading, error }
|
|
|
|
class _WebLivenessDialogState extends State<_WebLivenessDialog> {
|
|
_WebLivenessState _state = _WebLivenessState.loading;
|
|
String _statusText = 'Loading face detection models…';
|
|
String? _errorText;
|
|
bool _popped = false;
|
|
|
|
@override
|
|
void initState() {
|
|
super.initState();
|
|
WidgetsBinding.instance.addPostFrameCallback((_) => _initialize());
|
|
}
|
|
|
|
Future<void> _initialize() async {
|
|
// initFaceApi() immediately returns true and starts background loading of
|
|
// face-api.js (needed for compareFaces later). MediaPipe is initialized
|
|
// inside runWebLiveness() itself, with progress shown in the JS overlay.
|
|
await initFaceApi().toDart;
|
|
if (!mounted) return;
|
|
_runLiveness();
|
|
}
|
|
|
|
Future<void> _runLiveness() async {
|
|
try {
|
|
// runWebLiveness opens its own fullscreen JS overlay so the camera video
|
|
// element lives in the top-level document — not inside a CanvasKit iframe.
|
|
final result = await runWebLiveness(widget.requiredBlinks.toJS).toDart;
|
|
|
|
if (result == null) {
|
|
if (mounted && !_popped) {
|
|
_popped = true;
|
|
Navigator.of(context).pop(null);
|
|
}
|
|
return;
|
|
}
|
|
|
|
final jsResult = result as _LivenessJSResult;
|
|
final dataUrl = jsResult.dataUrl.toDart;
|
|
final base64Data = dataUrl.split(',')[1];
|
|
final bytes = base64Decode(base64Data);
|
|
|
|
if (mounted && !_popped) {
|
|
_popped = true;
|
|
Navigator.of(
|
|
context,
|
|
).pop(FaceLivenessResult(imageBytes: Uint8List.fromList(bytes)));
|
|
}
|
|
} catch (e) {
|
|
_setError('Liveness detection failed: $e');
|
|
}
|
|
}
|
|
|
|
void _setError(String message) {
|
|
if (!mounted) return;
|
|
setState(() {
|
|
_state = _WebLivenessState.error;
|
|
_errorText = message;
|
|
});
|
|
}
|
|
|
|
void _cancel() {
|
|
if (_popped) return;
|
|
_popped = true;
|
|
cancelWebLiveness();
|
|
Navigator.of(context).pop(null);
|
|
}
|
|
|
|
void _retry() {
|
|
setState(() {
|
|
_state = _WebLivenessState.loading;
|
|
_statusText = 'Loading face detection models…';
|
|
_errorText = null;
|
|
});
|
|
_initialize();
|
|
}
|
|
|
|
@override
|
|
void dispose() {
|
|
cancelWebLiveness();
|
|
super.dispose();
|
|
}
|
|
|
|
@override
|
|
Widget build(BuildContext context) {
|
|
final theme = Theme.of(context);
|
|
final colors = theme.colorScheme;
|
|
|
|
Widget content;
|
|
if (_state == _WebLivenessState.error) {
|
|
content = Column(
|
|
mainAxisSize: MainAxisSize.min,
|
|
children: [
|
|
Icon(Icons.error_outline, color: colors.error, size: 40),
|
|
const SizedBox(height: 12),
|
|
Text(
|
|
_errorText ?? 'An error occurred.',
|
|
style: theme.textTheme.bodyMedium?.copyWith(color: colors.error),
|
|
textAlign: TextAlign.center,
|
|
),
|
|
],
|
|
);
|
|
} else {
|
|
content = Column(
|
|
mainAxisSize: MainAxisSize.min,
|
|
children: [
|
|
const SizedBox(
|
|
width: 28,
|
|
height: 28,
|
|
child: CircularProgressIndicator(strokeWidth: 2.5),
|
|
),
|
|
const SizedBox(height: 14),
|
|
Text(
|
|
_statusText,
|
|
style: theme.textTheme.bodyMedium,
|
|
textAlign: TextAlign.center,
|
|
),
|
|
],
|
|
);
|
|
}
|
|
|
|
return AlertDialog(
|
|
shape: RoundedRectangleBorder(borderRadius: BorderRadius.circular(28)),
|
|
title: Row(
|
|
children: [
|
|
Icon(Icons.face, color: colors.primary),
|
|
const SizedBox(width: 12),
|
|
const Expanded(child: Text('Face Verification')),
|
|
],
|
|
),
|
|
content: Padding(
|
|
padding: const EdgeInsets.symmetric(vertical: 8),
|
|
child: content,
|
|
),
|
|
actions: [
|
|
if (_state == _WebLivenessState.error) ...[
|
|
TextButton(onPressed: _cancel, child: const Text('Cancel')),
|
|
FilledButton(onPressed: _retry, child: const Text('Retry')),
|
|
] else
|
|
TextButton(onPressed: _cancel, child: const Text('Cancel')),
|
|
],
|
|
);
|
|
}
|
|
}
|