微信小程序的人脸检测功能,配合蓝牙,配合ESP32 可以实现一些有趣的玩具
本文先只说微信小程序的人脸检测功能
1、人脸检测使用了摄像头,就必须在用户隐私权限里面声明。
修改用户隐私声明后,还需要等待审核,大概一天
2、app.json文件中也必须声明
"permission":{
"scope.bluetooth":{
"desc": "获取蓝牙用于硬件链接"
},
"scope.camera":{
"desc": "获取摄像头用于人脸检测"
}
},
这个不做,是无法真机调试的
模拟器上会一直报无效的
app.json permission[“scope.bluetooth”]、app.json permission[“scope.camera”]
这个错误,可以忽略
然后就是直接看代码了
face.xml
<block>
<view class="work-log" style="height: 300rpx;" ></view>
<view class="work-log" style="height: 200rpx;" >
<view class="work-eyeview"><view class="work-eye" style="height: {{eyeLetfHeight}};width:{{eyeLetfWidth}}"></view></view>
<view class="work-eyeview"><view class="work-eye" style="height: {{eyeRightHeight}};width:{{eyeRightWidth}}"></view></view>
</view>
</block>
face.wxss
page{
width: 100%;
position: fixed;
top:0;
left:0;
background-color: black;
}
.work-log {
width: 96%;
flex-direction:row;
display: flex;
margin-top: 15rpx;
margin-bottom: 5rpx;
margin-left: 2%;
}
.work-eyeview {
width: 50%;
height: 100%;
background-color: black;
display: flex;
justify-content: center;
align-items: center;
}
.work-eye {
width: 30%;
height: 100%;
box-shadow: 4px 4px 15px #c0bfbf;
background-color: #eef1f7;
border-radius: 10rpx;
}
face.json
{
"disableScroll": true,
"renderer": "webview",
"navigationBarTitleText": "人脸三维识别",
"pageOrientation": "auto",
"navigationStyle":"custom"
}
face.js
***ponent({
session: undefined, // 全局的VKsession对象
data:{
originx:"1%",
originy:"1%",
eyeLetfHeight:"100%",
eyeLetfWidth:"30%",
eyeRightHeight:"100%",
eyeRightWidth:"30%"
},
methods: {
onReady(){
//初始化VK
this.init();
},
onHide :function(){
//关闭
this.closeBle();
},
onUnload :function(){
this.closeBle();
},
// 对应案例的初始化逻辑,由统一的 behavior 触发 初始化VKstart完毕后,进行更新渲染循环
init() {
this
// VKSession 配置
const session = this.session = wx.createVKSession({
track: {face: {mode: 1}},
version: 'v2',
});
try {
session.start(err => {
if (err) return console.error('VK error: ', err);
//摄像头设置为前置摄像头 1 0 为后置
const config = session.config
config.cameraPosition = 1;
session.config = config;
console.log('VKSession.start ok,version:', session.version)
// VKSession EVENT resize
session.on('resize', () => {
})
// 开启三维识别
session.update3DMode({open3d: true})
// VKSession EVENT addAnchors
session.on('addAnchors', anchors => {
console.log("addAnchor", anchors)
})
// VKSession EVENT updateAnchors
session.on('updateAnchors', anchors => {
var anchor = anchors[0];//第一个人脸
//43 两个眼睛中间点 46鼻头
var centera***h = anchor.points[46];//两个眼睛中间点
//72 左上眼皮 73 左下眼皮 75 右上眼皮 76 右下眼皮
//console.log(centera***h);//鼻头
var eyeLetfLen = this.calen(this.calculateEye(anchor.points[72],anchor.points[73],anchor.points[52],anchor.points[55]));
var eyeRightLen = this.calen(this.calculateEye(anchor.points[75],anchor.points[76],anchor.points[58],anchor.points[61]));
this.setData({
originx:centera***h.x * 100 +"%",
originy:centera***h.y * 100 +"%",
eyeLetfHeight: eyeLetfLen + "%",
eyeLetfWidth: (70 - (eyeLetfLen / 100 ) * 40) + "%",
eyeRightHeight: eyeRightLen + "%",
eyeRightWidth: (70 - (eyeRightLen / 100 ) * 40) + "%"
})
})
// VKSession removeAnchors
// 识别目标丢失时不断触发
session.on('removeAnchors', anchors => {
console.log("removeAnchors",anchors);
this.setData({
originx:"1%",
originy:"1%"
})
});
console.log('ready to initloop')
// start 初始化完毕后,进行更新渲染循环
this.initLoop();
});
} catch(e) {
console.error(e);
}
},
calen(eyelen){
var l = 105 - eyelen;
if(l>100){
return 100;
}else if (l < 5){
return 3;
}else{
return l;
}
},
calculateEye(up,dow,left,right){
var ylen = this.calculateDistance(up.x,up.y,dow.x,dow.y);
var xlen = this.calculateDistance(right.x,right.y,left.x,left.y);
return xlen/ylen;
},
calculateDistance(x1, y1, x2, y2) {
var dx = x2 - x1;
var dy = y2 - y1;
return Math.sqrt(dx * dx + dy * dy);
},
// 限帧逻辑
initLoop() {
// 限制调用帧率,暂时去掉
let fps = 30
let fpsInterval = 1000 / fps
let last = Date.now()
const session = this.session;
// 逐帧渲染
const onFrame = timestamp => {
try {
let now = Date.now()
const mill = now - last
// 经过了足够的时间
if (mill > fpsInterval) {
last = now - (mill % fpsInterval); //校正当前时间
session.getVKFrame(1,1);
}
} catch(e) {
console.error(e);
}
session.requestAnimationFrame(onFrame)
}
session.requestAnimationFrame(onFrame)
},
},
})
最终实现的效果就是 屏幕对应的眼睛和自己眼睛对应,眨眼检测