-
Notifications
You must be signed in to change notification settings - Fork 4
Expand file tree
/
Copy pathindex.html
More file actions
98 lines (81 loc) · 3.05 KB
/
index.html
File metadata and controls
98 lines (81 loc) · 3.05 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8" />
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<title>Live Face Detector</title>
<meta name="viewport" content="width=device-width, initial-scale=1">
<style>
.error-msg {
color: red;
}
</style>
</head>
<body>
<div class="main-container">
<video width="0" height="0" autoplay></video>
<canvas width="640" height="480"></canvas>
<div class="error-msg"></div>
</div>
<script>
const errorMsg = document.querySelector('.error-msg');
if ('FaceDetector' in window) {
// Initializing vars & consts
const video = document.querySelector('video');
const canvas = document.querySelector('canvas');
const context = canvas.getContext('2d');
const faceDetector = new FaceDetector();
// Get access to the camera
if (navigator.mediaDevices && navigator.mediaDevices.getUserMedia) {
navigator.mediaDevices.getUserMedia({ video: true }).then(function (stream) {
video.srcObject = stream;
video.play();
});
}
async function detect() {
const faces = await faceDetector.detect(canvas);
drawFaces(faces);
}
// Draw face highlighters
async function drawFaces(faces) {
faces.forEach(face => {
const { width, height, top, left } = face.boundingBox;
context.strokeStyle = '#00FF00';
context.lineWidth = 5;
context.strokeRect(left, top, width, height);
face.landmarks.forEach(landmark => {
context.lineWidth = 1;
if (landmark.type === 'eye') {
context.strokeStyle = '#FF0000';
context.strokeRect(landmark.location.x - 20, landmark.location.y - 20, 40, 20);
} else {
context.strokeStyle = '#0000FF';
context.strokeRect(landmark.location.x - 40, landmark.location.y - 40, 80, 40);
}
});
});
}
setInterval(() => {
context.drawImage(video, 0, 0, canvas.width, canvas.height);
detect();
}, 100);
} else {
// Find if user is using Chrome, and if yes, check version number. Face Detection API has been in the browser since M57
const chromeVer = /Chrome\/(\d+)/.exec(navigator.userAgent);
if (chromeVer && parseInt(chromeVer[1]) > 56) {
errorMsg.textContent = "Looks like your version of Chrome supports FaceDetector API but you haven't enabled it (since experimental features in Chrome are disabled by default). You can enable it by going to the URL 'chrome://flags#enable-experimental-web-platform-features' and enabling the 'Experimental Web Platform features'"
} else {
errorMsg.textContent = "Looks like your browser does not support FaceDetector API. If you're on Chrome, please make sure you've the latest update and enable the #enable-experimental-web-platform-features flag from chrome://flags";
}
}
</script>
<!-- Global site tag (gtag.js) - Google Analytics -->
<script async src="https://www.googletagmanager.com/gtag/js?id=UA-116311938-1"></script>
<script>
window.dataLayer = window.dataLayer || [];
function gtag() { dataLayer.push(arguments); }
gtag('js', new Date());
gtag('config', 'UA-116311938-1');
</script>
</body>
</html>