opencv/samples/dnn/js_face_recognition.html

230 lines
8.3 KiB
HTML
Raw Normal View History

2017-12-03 04:52:35 +08:00
<!DOCTYPE html>
<html>
<head>
2019-06-24 21:41:37 +08:00
<script async src="../../opencv.js" type="text/javascript"></script>
2017-12-03 04:52:35 +08:00
<script src="../../utils.js" type="text/javascript"></script>
<script type='text/javascript'>
var netDet = undefined, netRecogn = undefined;
var persons = {};
//! [Run face detection model]
function detectFaces(img) {
netDet.setInputSize(new cv.Size(img.cols, img.rows));
var out = new cv.Mat();
netDet.detect(img, out);
2017-12-03 04:52:35 +08:00
var faces = [];
for (var i = 0, n = out.data32F.length; i < n; i += 15) {
var left = out.data32F[i];
var top = out.data32F[i + 1];
var right = (out.data32F[i] + out.data32F[i + 2]);
var bottom = (out.data32F[i + 1] + out.data32F[i + 3]);
2017-12-03 04:52:35 +08:00
left = Math.min(Math.max(0, left), img.cols - 1);
top = Math.min(Math.max(0, top), img.rows - 1);
2017-12-03 04:52:35 +08:00
right = Math.min(Math.max(0, right), img.cols - 1);
bottom = Math.min(Math.max(0, bottom), img.rows - 1);
if (left < right && top < bottom) {
faces.push({
x: left,
y: top,
width: right - left,
height: bottom - top,
x1: out.data32F[i + 4] < 0 || out.data32F[i + 4] > img.cols - 1 ? -1 : out.data32F[i + 4],
y1: out.data32F[i + 5] < 0 || out.data32F[i + 5] > img.rows - 1 ? -1 : out.data32F[i + 5],
x2: out.data32F[i + 6] < 0 || out.data32F[i + 6] > img.cols - 1 ? -1 : out.data32F[i + 6],
y2: out.data32F[i + 7] < 0 || out.data32F[i + 7] > img.rows - 1 ? -1 : out.data32F[i + 7],
x3: out.data32F[i + 8] < 0 || out.data32F[i + 8] > img.cols - 1 ? -1 : out.data32F[i + 8],
y3: out.data32F[i + 9] < 0 || out.data32F[i + 9] > img.rows - 1 ? -1 : out.data32F[i + 9],
x4: out.data32F[i + 10] < 0 || out.data32F[i + 10] > img.cols - 1 ? -1 : out.data32F[i + 10],
y4: out.data32F[i + 11] < 0 || out.data32F[i + 11] > img.rows - 1 ? -1 : out.data32F[i + 11],
x5: out.data32F[i + 12] < 0 || out.data32F[i + 12] > img.cols - 1 ? -1 : out.data32F[i + 12],
y5: out.data32F[i + 13] < 0 || out.data32F[i + 13] > img.rows - 1 ? -1 : out.data32F[i + 13],
confidence: out.data32F[i + 14]
})
2017-12-03 04:52:35 +08:00
}
}
out.delete();
return faces;
};
//! [Run face detection model]
//! [Get 128 floating points feature vector]
function face2vec(face) {
var blob = cv.blobFromImage(face, 1.0, {width: 112, height: 112}, [0, 0, 0, 0], true, false)
2017-12-03 04:52:35 +08:00
netRecogn.setInput(blob);
var vec = netRecogn.forward();
blob.delete();
return vec;
};
//! [Get 128 floating points feature vector]
//! [Recognize]
function recognize(face) {
var vec = face2vec(face);
var bestMatchName = 'unknown';
var bestMatchScore = 30; // Threshold for face recognition.
2017-12-03 04:52:35 +08:00
for (name in persons) {
var personVec = persons[name];
var score = vec.dot(personVec);
if (score > bestMatchScore) {
bestMatchScore = score;
bestMatchName = name;
}
}
vec.delete();
return bestMatchName;
};
//! [Recognize]
function loadModels(callback) {
2019-06-24 21:41:37 +08:00
var utils = new Utils('');
var detectModel = 'https://media.githubusercontent.com/media/opencv/opencv_zoo/main/models/face_detection_yunet/face_detection_yunet_2023mar.onnx';
var recognModel = 'https://media.githubusercontent.com/media/opencv/opencv_zoo/main/models/face_recognition_sface/face_recognition_sface_2021dec.onnx';
document.getElementById('status').innerHTML = 'Downloading YuNet model';
utils.createFileFromUrl('face_detection_yunet_2023mar.onnx', detectModel, () => {
document.getElementById('status').innerHTML = 'Downloading OpenFace model';
utils.createFileFromUrl('face_recognition_sface_2021dec.onnx', recognModel, () => {
document.getElementById('status').innerHTML = '';
netDet = new cv.FaceDetectorYN("face_detection_yunet_2023mar.onnx", "", new cv.Size(320, 320), 0.9, 0.3, 5000);
netRecogn = cv.readNet('face_recognition_sface_2021dec.onnx');
callback();
2017-12-03 04:52:35 +08:00
});
});
};
function main() {
if(!cv.FaceDetectorYN){
alert(`Error: This sample require OpenCV.js built with FaceDetectorYN. Please rebuild it with FaceDetectorYN or use the latest version of OpenCV.js.`);
return;
}
2017-12-03 04:52:35 +08:00
// Create a camera object.
var output = document.getElementById('output');
var camera = document.createElement("video");
camera.setAttribute("width", output.width);
camera.setAttribute("height", output.height);
// Get a permission from user to use a camera.
navigator.mediaDevices.getUserMedia({video: true, audio: false})
.then(function(stream) {
camera.srcObject = stream;
camera.onloadedmetadata = function(e) {
camera.play();
};
});
//! [Open a camera stream]
var cap = new cv.VideoCapture(camera);
var frame = new cv.Mat(camera.height, camera.width, cv.CV_8UC4);
var frameBGR = new cv.Mat(camera.height, camera.width, cv.CV_8UC3);
//! [Open a camera stream]
//! [Add a person]
document.getElementById('addPersonButton').onclick = function() {
var rects = detectFaces(frameBGR);
if (rects.length > 0) {
var face = frameBGR.roi(rects[0]);
var name = prompt('Say your name:');
var cell = document.getElementById("targetNames").insertCell(0);
cell.innerHTML = name;
persons[name] = face2vec(face).clone();
var canvas = document.createElement("canvas");
canvas.setAttribute("width", 112);
canvas.setAttribute("height", 112);
2017-12-03 04:52:35 +08:00
var cell = document.getElementById("targetImgs").insertCell(0);
cell.appendChild(canvas);
var faceResized = new cv.Mat(canvas.height, canvas.width, cv.CV_8UC3);
cv.resize(face, faceResized, {width: canvas.width, height: canvas.height});
cv.cvtColor(faceResized, faceResized, cv.COLOR_BGR2RGB);
cv.imshow(canvas, faceResized);
faceResized.delete();
}
};
//! [Add a person]
//! [Define frames processing]
var isRunning = false;
const FPS = 30; // Target number of frames processed per second.
function captureFrame() {
var begin = Date.now();
cap.read(frame); // Read a frame from camera
cv.cvtColor(frame, frameBGR, cv.COLOR_RGBA2BGR);
var faces = detectFaces(frameBGR);
faces.forEach(function(rect) {
cv.rectangle(frame, {x: rect.x, y: rect.y}, {x: rect.x + rect.width, y: rect.y + rect.height}, [0, 255, 0, 255]);
if(rect.x1>0 && rect.y1>0)
cv.circle(frame, {x: rect.x1, y: rect.y1}, 2, [255, 0, 0, 255], 2)
if(rect.x2>0 && rect.y2>0)
cv.circle(frame, {x: rect.x2, y: rect.y2}, 2, [0, 0, 255, 255], 2)
if(rect.x3>0 && rect.y3>0)
cv.circle(frame, {x: rect.x3, y: rect.y3}, 2, [0, 255, 0, 255], 2)
if(rect.x4>0 && rect.y4>0)
cv.circle(frame, {x: rect.x4, y: rect.y4}, 2, [255, 0, 255, 255], 2)
if(rect.x5>0 && rect.y5>0)
cv.circle(frame, {x: rect.x5, y: rect.y5}, 2, [0, 255, 255, 255], 2)
2017-12-03 04:52:35 +08:00
var face = frameBGR.roi(rect);
var name = recognize(face);
cv.putText(frame, name, {x: rect.x, y: rect.y}, cv.FONT_HERSHEY_SIMPLEX, 1.0, [0, 255, 0, 255]);
});
cv.imshow(output, frame);
// Loop this function.
if (isRunning) {
var delay = 1000 / FPS - (Date.now() - begin);
setTimeout(captureFrame, delay);
}
};
//! [Define frames processing]
document.getElementById('startStopButton').onclick = function toggle() {
if (isRunning) {
isRunning = false;
document.getElementById('startStopButton').innerHTML = 'Start';
document.getElementById('addPersonButton').disabled = true;
} else {
function run() {
isRunning = true;
captureFrame();
document.getElementById('startStopButton').innerHTML = 'Stop';
document.getElementById('startStopButton').disabled = false;
document.getElementById('addPersonButton').disabled = false;
}
if (netDet == undefined || netRecogn == undefined) {
document.getElementById('startStopButton').disabled = true;
loadModels(run); // Load models and run a pipeline;
} else {
run();
}
}
};
document.getElementById('startStopButton').disabled = false;
};
</script>
</head>
2019-06-30 00:21:14 +08:00
<body onload="cv['onRuntimeInitialized']=()=>{ main() }">
2017-12-03 04:52:35 +08:00
<button id="startStopButton" type="button" disabled="true">Start</button>
<div id="status"></div>
<canvas id="output" width=640 height=480 style="max-width: 100%"></canvas>
<table>
<tr id="targetImgs"></tr>
<tr id="targetNames"></tr>
</table>
<button id="addPersonButton" type="button" disabled="true">Add a person</button>
</body>
</html>