-
Notifications
You must be signed in to change notification settings - Fork 48
/
minimal__modules__track_one_face.html
143 lines (92 loc) · 4.3 KB
/
minimal__modules__track_one_face.html
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>Beyond Reality Face SDK - BRFv5 - Face Tracking for Browser/JavaScript - Minimal Webcam Example</title>
<style>
html, body { width: 100%; height: 100%; background-color: #ffffff; margin: 0; padding: 0; overflow: hidden; }
</style>
</head>
<body>
<!--
This is a minimal modules example. It's much shorter than minimal_no_modules.html, because we already
provide lots of functionality within the modules.
-->
<video id="_webcam" style="display: none;" playsinline></video>
<canvas id="_imageData"></canvas>
<script type="module">
import { brfv5 } from './js/brfv5/brfv5__init.js'
import { loadBRFv5Model } from './js/brfv5/brfv5__init.js'
import { configureCameraInput } from './js/brfv5/brfv5__configure.js'
import { configureFaceTracking } from './js/brfv5/brfv5__configure.js'
import { configureNumFacesToTrack } from './js/brfv5/brfv5__configure.js'
import { startCamera } from './js/utils/utils__camera.js'
import { drawInputMirrored } from './js/utils/utils__canvas.js'
import { drawCircles } from './js/utils/utils__canvas.js'
import { drawRect, drawRects } from './js/utils/utils__canvas.js'
const _appId = 'brfv5.browser.minimal.modules' // (mandatory): 8 to 64 characters, a-z . 0-9 allowed
const _webcam = document.getElementById('_webcam')
const _imageData = document.getElementById('_imageData')
// Those variables will be retrieved from the stream and the library.
let _brfv5Manager = null
let _brfv5Config = null
let _width = 0
let _height = 0
// loadBRFv5Model and openCamera are being done simultaneously thanks to Promises. Both call
// configureTracking which only gets executed once both Promises were successful. Once configured
// trackFaces will do the tracking work and draw the results.
startCamera(_webcam, { width: 640, height: 480, frameRate: 30, facingMode: 'user' }).then(({ video }) => {
console.log('openCamera: done: ' + video.videoWidth + 'x' + video.videoHeight)
_width = video.videoWidth
_height = video.videoHeight
_imageData.width = _width
_imageData.height = _height
configureTracking()
}).catch((e) => { if(e) { console.error('Camera failed: ', e) } })
loadBRFv5Model('68l', 8, './js/brfv5/models/', _appId,
(progress) => { console.log(progress) }).then(({ brfv5Manager, brfv5Config }) => {
console.log('loadBRFv5Model: done')
_brfv5Manager = brfv5Manager
_brfv5Config = brfv5Config
configureTracking()
}).catch((e) => { console.error('BRFv5 failed: ', e) })
const configureTracking = () => {
if(_brfv5Config !== null && _width > 0) {
configureCameraInput(_brfv5Config, _width, _height)
configureNumFacesToTrack(_brfv5Config, 1)
configureFaceTracking(_brfv5Config, 3, true)
_brfv5Manager.configure(_brfv5Config)
trackFaces()
}
}
const trackFaces = () => {
if(!_brfv5Manager || !_brfv5Config || !_imageData) { return }
const ctx = _imageData.getContext('2d')
drawInputMirrored(ctx, _width, _height, _webcam)
_brfv5Manager.update(ctx.getImageData(0, 0, _width, _height))
let doDrawFaceDetection = !_brfv5Config.enableFaceTracking
if(_brfv5Config.enableFaceTracking) {
const sizeFactor = Math.min(_width, _height) / 480.0
const faces = _brfv5Manager.getFaces()
for(let i = 0; i < faces.length; i++) {
const face = faces[i]
if(face.state === brfv5.BRFv5State.FACE_TRACKING) {
drawRect(ctx, _brfv5Config.faceTrackingConfig.regionOfInterest, '#00a0ff', 2.0)
drawCircles(ctx, face.landmarks, '#00a0ff', 2.0 * sizeFactor)
drawRect(ctx, face.bounds, '#ffffff', 1.0)
} else {
doDrawFaceDetection = true
}
}
}
if(doDrawFaceDetection) {
// Only draw face detection results, if face detection was performed.
drawRect( ctx, _brfv5Config.faceDetectionConfig.regionOfInterest, '#ffffff', 2.0)
drawRects(ctx, _brfv5Manager.getDetectedRects(), '#00a0ff', 1.0)
drawRects(ctx, _brfv5Manager.getMergedRects(), '#ffffff', 3.0)
}
requestAnimationFrame(trackFaces)
}
</script>
</body>
</html>