Skip to content

Commit e4de171

Browse files
authored
Add face landmarks detection live video demo (#972)
* Add face landmarks detection live video demo * Fix typos
1 parent 60c37a2 commit e4de171

File tree

14 files changed

+8478
-0
lines changed

14 files changed

+8478
-0
lines changed
+40
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,40 @@
1+
# Demos
2+
3+
Try our demos and get inspired with what you can do with face-landmarks-detection models!
4+
5+
## Table of Contents
6+
1. [Live Camera Demo](#live-camera-demo)
7+
8+
2. [Upload a Video Demo](#upload-a-video-demo)
9+
10+
3. [How to Run a Demo](#how-to-run-a-demo)
11+
12+
-------------------------------------------------------------------------------
13+
14+
## Live Camera Demo
15+
This demo uses your camera to get live stream and tracks your face in real-time.
16+
You can try out different runtimes to see the difference. It
17+
works on laptops, iPhones and android phones.
18+
19+
[MediaPipeFaceMesh model entry](https://storage.googleapis.com/tfjs-models/demos/face-landmarks-detection/index.html?model=mediapipe_face_mesh)
20+
21+
## Upload a Video Demo
22+
This demo allows you to upload a video (in .mp4 format) to run with the model.
23+
Once the video is processed, it automatically downloads the video with face landmarks detection.
24+
25+
[MediaPipeFaceMesh model entry](https://storage.googleapis.com/tfjs-models/demos/face-landmarks-detection-upload-video/index.html?model=mediapipe_face_mesh)
26+
27+
## How to Run a Demo
28+
If you want to run any of the demos locally, follow these steps:
29+
30+
1. Go to the demo folder, e.g. `cd live_video`
31+
32+
2. Remove cache etc. `rm -rf .cache dist node_modules`
33+
34+
3. Build dependency. `yarn build-dep`
35+
36+
4. Install dependencies. `yarn`
37+
38+
5. Run the demo. `yarn watch`
39+
40+
6. The demo runs at `localhost:1234`. (Remember to provide URL model parameter e. g. `localhost:1234/?model=mediapipe_face_mesh`)
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,16 @@
1+
{
2+
"presets": [
3+
[
4+
"env",
5+
{
6+
"esmodules": false,
7+
"targets": {
8+
"browsers": [
9+
"> 3%"
10+
]
11+
}
12+
}
13+
]
14+
],
15+
"plugins": ["@babel/plugin-transform-runtime"]
16+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,63 @@
1+
<!-- Copyright 2022 Google LLC. All Rights Reserved.
2+
Licensed under the Apache License, Version 2.0 (the "License");
3+
you may not use this file except in compliance with the License.
4+
You may obtain a copy of the License at
5+
http://www.apache.org/licenses/LICENSE-2.0
6+
Unless required by applicable law or agreed to in writing, software
7+
distributed under the License is distributed on an "AS IS" BASIS,
8+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
9+
See the License for the specific language governing permissions and
10+
limitations under the License.
11+
==============================================================================-->
12+
<!DOCTYPE html>
13+
<html>
14+
15+
<head>
16+
<meta charset="utf-8">
17+
<meta name="viewport" content="width=device-width,initial-scale=1,maximum-scale=1.0, user-scalable=no">
18+
<style>
19+
body {
20+
margin: 0;
21+
}
22+
23+
#stats {
24+
position: relative;
25+
width: 100%;
26+
height: 80px;
27+
}
28+
29+
#main {
30+
position: relative;
31+
margin: 0;
32+
}
33+
34+
#canvas-wrapper {
35+
position: relative;
36+
}
37+
</style>
38+
</head>
39+
40+
<body>
41+
<div id="stats"></div>
42+
<div id="main">
43+
<div class="container">
44+
<div class="canvas-wrapper">
45+
<canvas id="output"></canvas>
46+
<video id="video" playsinline style="
47+
-webkit-transform: scaleX(-1);
48+
transform: scaleX(-1);
49+
visibility: hidden;
50+
width: auto;
51+
height: auto;
52+
">
53+
</video>
54+
</div>
55+
</div>
56+
</div>
57+
</div>
58+
</body>
59+
<script src="https://cdnjs.cloudflare.com/ajax/libs/dat-gui/0.7.6/dat.gui.min.js"></script>
60+
<script src="https://cdnjs.cloudflare.com/ajax/libs/stats.js/r16/Stats.min.js"></script>
61+
<script src="src/index.js"></script>
62+
63+
</html>
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,65 @@
1+
{
2+
"name": "facelandmarksdetection_demo",
3+
"version": "0.0.1",
4+
"description": "Demo for face landmark detection api",
5+
"main": "index.js",
6+
"license": "Apache-2.0",
7+
"private": true,
8+
"engines": {
9+
"node": ">=8.9.0"
10+
},
11+
"dependencies": {
12+
"@mediapipe/face_mesh": "~0.4.0",
13+
"@tensorflow-models/face-landmarks-detection": "file:../../dist",
14+
"@tensorflow/tfjs-backend-wasm": "^3.12.0",
15+
"@tensorflow/tfjs-backend-webgl": "^3.12.0",
16+
"@tensorflow/tfjs-converter": "^3.12.0",
17+
"@tensorflow/tfjs-core": "^3.12.0",
18+
"scatter-gl": "0.0.8"
19+
},
20+
"scripts": {
21+
"watch": "cross-env NODE_ENV=development parcel index.html --no-hmr --open",
22+
"build": "cross-env NODE_ENV=production parcel build index.html --public-url ./",
23+
"lint": "eslint .",
24+
"build-dep": "cd ../../ && yarn && yarn build",
25+
"link-core": "yalc link @tensorflow/tfjs-core",
26+
"link-webgl": "yalc link @tensorflow/tfjs-backend-webgl"
27+
},
28+
"browser": {
29+
"crypto": false
30+
},
31+
"devDependencies": {
32+
"@babel/core": "^7.7.5",
33+
"@babel/plugin-transform-runtime": "^7.7.6",
34+
"@babel/preset-env": "^7.7.6",
35+
"babel-plugin-external-helpers": "^6.22.0",
36+
"babel-preset-env": "^1.7.0",
37+
"clang-format": "~1.2.2",
38+
"cross-env": "^5.2.0",
39+
"eslint": "^4.19.1",
40+
"eslint-config-google": "^0.9.1",
41+
"parcel-bundler": "1.12.5",
42+
"parcel-plugin-static-files-copy": "^2.5.1",
43+
"yalc": "~1.0.0-pre.50"
44+
},
45+
"resolutions": {
46+
"is-svg": "4.3.1"
47+
},
48+
"eslintConfig": {
49+
"extends": "google",
50+
"rules": {
51+
"require-jsdoc": 0,
52+
"valid-jsdoc": 0
53+
},
54+
"env": {
55+
"es6": true
56+
},
57+
"parserOptions": {
58+
"ecmaVersion": 8,
59+
"sourceType": "module"
60+
}
61+
},
62+
"eslintIgnore": [
63+
"dist/"
64+
]
65+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,93 @@
1+
/**
2+
* @license
3+
* Copyright 2022 Google LLC. All Rights Reserved.
4+
* Licensed under the Apache License, Version 2.0 (the "License");
5+
* you may not use this file except in compliance with the License.
6+
* You may obtain a copy of the License at
7+
*
8+
* https://www.apache.org/licenses/LICENSE-2.0
9+
*
10+
* Unless required by applicable law or agreed to in writing, software
11+
* distributed under the License is distributed on an "AS IS" BASIS,
12+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13+
* See the License for the specific language governing permissions and
14+
* limitations under the License.
15+
* =============================================================================
16+
*/
17+
18+
import {VIDEO_SIZE} from './shared/params';
19+
import {drawResults, isMobile} from './shared/util';
20+
21+
export class Camera {
22+
constructor() {
23+
this.video = document.getElementById('video');
24+
this.canvas = document.getElementById('output');
25+
this.ctx = this.canvas.getContext('2d');
26+
}
27+
28+
/**
29+
* Initiate a Camera instance and wait for the camera stream to be ready.
30+
* @param cameraParam From app `STATE.camera`.
31+
*/
32+
static async setupCamera(cameraParam) {
33+
if (!navigator.mediaDevices || !navigator.mediaDevices.getUserMedia) {
34+
throw new Error(
35+
'Browser API navigator.mediaDevices.getUserMedia not available');
36+
}
37+
38+
const {targetFPS, sizeOption} = cameraParam;
39+
const $size = VIDEO_SIZE[sizeOption];
40+
const videoConfig = {
41+
'audio': false,
42+
'video': {
43+
facingMode: 'user',
44+
// Only setting the video to a specified size for large screen, on
45+
// mobile devices accept the default size.
46+
width: isMobile() ? VIDEO_SIZE['360 X 270'].width : $size.width,
47+
height: isMobile() ? VIDEO_SIZE['360 X 270'].height : $size.height,
48+
frameRate: {
49+
ideal: targetFPS,
50+
},
51+
},
52+
};
53+
54+
const stream = await navigator.mediaDevices.getUserMedia(videoConfig);
55+
56+
const camera = new Camera();
57+
camera.video.srcObject = stream;
58+
59+
await new Promise((resolve) => {
60+
camera.video.onloadedmetadata = () => {
61+
resolve(video);
62+
};
63+
});
64+
65+
camera.video.play();
66+
67+
const videoWidth = camera.video.videoWidth;
68+
const videoHeight = camera.video.videoHeight;
69+
// Must set below two lines, otherwise video element doesn't show.
70+
camera.video.width = videoWidth;
71+
camera.video.height = videoHeight;
72+
73+
camera.canvas.width = videoWidth;
74+
camera.canvas.height = videoHeight;
75+
const canvasContainer = document.querySelector('.canvas-wrapper');
76+
canvasContainer.style = `width: ${videoWidth}px; height: ${videoHeight}px`;
77+
78+
// Because the image from camera is mirrored, need to flip horizontally.
79+
camera.ctx.translate(camera.video.videoWidth, 0);
80+
camera.ctx.scale(-1, 1);
81+
82+
return camera;
83+
}
84+
85+
drawCtx() {
86+
this.ctx.drawImage(
87+
this.video, 0, 0, this.video.videoWidth, this.video.videoHeight);
88+
}
89+
90+
drawResults(faces, triangulateMesh, boundingBox) {
91+
drawResults(this.ctx, faces, triangulateMesh, boundingBox);
92+
}
93+
}

0 commit comments

Comments
 (0)