tasq/lib/services/face_verification_mobile.dart

199 lines
6.2 KiB
Dart

import 'dart:io';
import 'dart:math';
import 'dart:typed_data';
import 'package:flutter/material.dart';
import 'package:flutter_liveness_check/flutter_liveness_check.dart';
import 'package:google_mlkit_face_detection/google_mlkit_face_detection.dart';
/// Result from a face liveness check.
class FaceLivenessResult {
final Uint8List imageBytes;
final String? imagePath;
FaceLivenessResult({required this.imageBytes, this.imagePath});
}
/// Run face liveness detection on mobile using flutter_liveness_check.
/// Navigates to the LivenessCheckScreen and returns the captured photo.
Future<FaceLivenessResult?> runFaceLiveness(
BuildContext context, {
int requiredBlinks = 3,
}) async {
String? capturedPath;
final colors = Theme.of(context).colorScheme;
final textTheme = Theme.of(context).textTheme;
await Navigator.of(context).push(
MaterialPageRoute(
builder: (ctx) => LivenessCheckScreen(
config: LivenessCheckConfig(
callbacks: LivenessCheckCallbacks(
onPhotoTaken: (path) {
capturedPath = path;
// Package never calls onSuccess in v1.0.3 — pop here
// so the screen doesn't hang after photo capture.
Navigator.of(ctx).pop();
},
// Don't pop in onCancel/onError — the package's AppBar
// already calls Navigator.pop() after invoking these.
),
// Remove the default placeholder from inside the camera circle;
// it is shown below via customBottomWidget instead.
placeholder: null,
theme: LivenessCheckTheme(
backgroundColor: colors.surface,
overlayColor: colors.surface.withAlpha(230),
primaryColor: colors.primary,
borderColor: colors.primary,
textColor: colors.onSurface,
errorColor: colors.error,
successColor: colors.tertiary,
),
settings: LivenessCheckSettings(
requiredBlinkCount: requiredBlinks,
requireSmile: false,
autoNavigateOnSuccess: false,
// Must be false so that customBottomWidget is shown.
showTryAgainButton: false,
),
// Challenge instruction rendered below the camera circle.
customBottomWidget: Padding(
padding: const EdgeInsets.fromLTRB(24, 4, 24, 24),
child: Text(
'Blink $requiredBlinks times or smile naturally to continue',
textAlign: TextAlign.center,
style: textTheme.bodyLarge?.copyWith(color: colors.onSurface),
),
),
),
),
),
);
if (capturedPath == null) return null;
final file = File(capturedPath!);
if (!await file.exists()) return null;
final bytes = await file.readAsBytes();
return FaceLivenessResult(imageBytes: bytes, imagePath: capturedPath);
}
/// Compare a captured face photo with enrolled face photo bytes.
/// Uses Google ML Kit face contour comparison.
/// Returns similarity score 0.0 (no match) to 1.0 (perfect match).
Future<double> compareFaces(
Uint8List capturedBytes,
Uint8List enrolledBytes,
) async {
final detector = FaceDetector(
options: FaceDetectorOptions(
enableContours: true,
performanceMode: FaceDetectorMode.accurate,
),
);
try {
// Save both to temp files for ML Kit
final tempDir = Directory.systemTemp;
final capturedFile = File('${tempDir.path}/face_captured_temp.jpg');
await capturedFile.writeAsBytes(capturedBytes);
final enrolledFile = File('${tempDir.path}/face_enrolled_temp.jpg');
await enrolledFile.writeAsBytes(enrolledBytes);
// Process both images
final capturedInput = InputImage.fromFilePath(capturedFile.path);
final enrolledInput = InputImage.fromFilePath(enrolledFile.path);
final capturedFaces = await detector.processImage(capturedInput);
final enrolledFaces = await detector.processImage(enrolledInput);
// Cleanup temp files
await capturedFile.delete().catchError((_) => capturedFile);
await enrolledFile.delete().catchError((_) => enrolledFile);
if (capturedFaces.isEmpty || enrolledFaces.isEmpty) return 0.0;
return _compareContours(capturedFaces.first, enrolledFaces.first);
} catch (_) {
return 0.0;
} finally {
await detector.close();
}
}
double _compareContours(Face face1, Face face2) {
const contourTypes = [
FaceContourType.face,
FaceContourType.leftEye,
FaceContourType.rightEye,
FaceContourType.noseBridge,
FaceContourType.noseBottom,
FaceContourType.upperLipTop,
FaceContourType.lowerLipBottom,
];
double totalScore = 0;
int validComparisons = 0;
for (final type in contourTypes) {
final c1 = face1.contours[type];
final c2 = face2.contours[type];
if (c1 != null &&
c2 != null &&
c1.points.isNotEmpty &&
c2.points.isNotEmpty) {
final score = _comparePointSets(c1.points, c2.points);
totalScore += score;
validComparisons++;
}
}
if (validComparisons == 0) return 0.0;
return totalScore / validComparisons;
}
double _comparePointSets(List<Point<int>> points1, List<Point<int>> points2) {
final norm1 = _normalizePoints(points1);
final norm2 = _normalizePoints(points2);
final n = min(norm1.length, norm2.length);
if (n == 0) return 0.0;
double totalDist = 0;
for (int i = 0; i < n; i++) {
final dx = norm1[i].x - norm2[i].x;
final dy = norm1[i].y - norm2[i].y;
totalDist += sqrt(dx * dx + dy * dy);
}
final avgDist = totalDist / n;
// Convert distance to similarity: 0 distance → 1.0 score
return max(0.0, 1.0 - avgDist * 2.5);
}
List<Point<double>> _normalizePoints(List<Point<int>> points) {
if (points.isEmpty) return [];
double minX = double.infinity, minY = double.infinity;
double maxX = double.negativeInfinity, maxY = double.negativeInfinity;
for (final p in points) {
minX = min(minX, p.x.toDouble());
minY = min(minY, p.y.toDouble());
maxX = max(maxX, p.x.toDouble());
maxY = max(maxY, p.y.toDouble());
}
final w = maxX - minX;
final h = maxY - minY;
if (w == 0 || h == 0) return [];
return points
.map((p) => Point<double>((p.x - minX) / w, (p.y - minY) / h))
.toList();
}