废话少说,直接代码,其他文件在资源里
<!DOCTYPE html>
<html>
<head>
<title>H5活体检测拍照</title>
<script src="./face-api.js"></script>
<script src="https://cdn.bootcdn.net/ajax/libs/vue/2.6.11/vue.min.js"></script>
</head>
<body>
<div id="app" v-cloak>
<video id="video" width="640" height="480" autoplay></video>
<canvas id="canvas"></canvas>
<div @click="okfine = false">重新检测</div>
</div>
<script>
new Vue({
el: '#app',
data: {
detectionParams: {
detectionInterval: 1000, // 检测间隔(ms)
faceAngleThreshold: 0.5, // 人脸角度偏差阈值(度)
blinkEARThreshold: 0.25, // 眨眼判断阈值
minBlinkCount: 2 // 至少需要眨眼次数
},
// 状态跟踪
isLive: false,
blinkCount: 0,
lastBlinkTime: 0,
okfine:false, //是否完成活体检测
},
mounted() {
// 启动检测
this.startDetection();
},
methods: {
startDetection() {
// 加载模型文件
faceapi.nets.tinyFaceDetector.loadFromUri('/models');
faceapi.nets.faceLandmark68Net.loadFromUri('/models');
//加载模型 用于检测68个面部关键点
faceapi.nets.ssdMobilenetv1.loadFromUri("/models");
// 初始化摄像头
const video = document.getElementById('video');
navigator.mediaDevices.getUserMedia({ video: true })
.then(stream => video.srcObject = stream);
// 开始检测循环
setInterval(async () => {
const detections = await faceapi.detectAllFaces(
video, new faceapi.TinyFaceDetectorOptions()
).withFaceLandmarks();
if (detections.length > 0 && this.okfine == false) {
const landmarks = detections[0].landmarks;
this.checkFaceAngle(landmarks); // 人脸角度检测
this.checkBlink(landmarks); // 活体检测(眨眼)
if (this.isLive) {
this.takePhoto(); // 自动拍照
this.okfine = true;
}
}
}, this.detectionParams.detectionInterval);
},
// 人脸角度检测(三维姿态估计)
checkFaceAngle(landmarks) {
const nose = landmarks.getNose();
const leftEye = landmarks.getLeftEye();
const rightEye = landmarks.getRightEye();
// 计算三维角度(简化的几何法)
const dx = rightEye[3].x - leftEye[0].x;
const dy = nose[3].y - ((leftEye[4].y + rightEye[4].y) / 2);
const angleX = Math.atan2(dy, dx) * 180 / Math.PI;
return Math.abs(angleX) < this.detectionParams.faceAngleThreshold;
},
// 活体检测(基于眨眼频率)
checkBlink(landmarks) {
const eyeL = landmarks.getLeftEye();
const eyeR = landmarks.getRightEye();
const earL = this.eyeAspectRatio(eyeL);
const earR = this.eyeAspectRatio(eyeR);
const avgEAR = (earL + earR) / 2;
if (avgEAR < this.detectionParams.blinkEARThreshold &&
Date.now() - this.lastBlinkTime > 500) {
this.blinkCount++;
this.lastBlinkTime = Date.now();
console.log('blinkCount:', this.blinkCount, 'minBlinkCount:', this.detectionParams.minBlinkCount);
this.isLive = this.blinkCount >= this.detectionParams.minBlinkCount;
}
},
// 计算眼睛纵横比(Eye Aspect Ratio)
eyeAspectRatio(eye) {
const A = faceapi.euclideanDistance(eye[1], eye[5]);
const B = faceapi.euclideanDistance(eye[2], eye[4]);
const C = faceapi.euclideanDistance(eye[0], eye[3]);
if (isNaN((A + B) / (2 * C))) {
return 0; // 或者其他默认值
}
return (A + B) / (2 * C);
},
// 拍照并上传
takePhoto() {
const video = document.getElementById('video');
const canvas = document.getElementById('canvas');
canvas.width = video.videoWidth;
canvas.height = video.videoHeight;
canvas.getContext('2d').drawImage(video, 0, 0);
// 转换为Blob上传
canvas.toBlob(blob => {
const formData = new FormData();
formData.append('photo', blob, 'user_photo.jpg');
console.log(formData)
// fetch('/upload', { method: 'POST', body: formData });
}, 'image/jpeg', 0.95);
},
}
})
</script>
</body>
</html>