Skip to content

Commit 61d687e

Browse files
author
doripjonov
committed
optimized webcam demo docs
1 parent 9c8a80a commit 61d687e

File tree

1 file changed

+214
-0
lines changed

1 file changed

+214
-0
lines changed

README.md

Lines changed: 214 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -128,6 +128,220 @@ In this part of docs we show how to use our detection service with webcamera. NO
128128

129129
```npm i @exadel/compreface-js-sdk```
130130

131+
4. Create your component and copy/past following code. NOTE: We have used functional component and video tag used to connect to your webcamera and canvas tags used for drawing square and some extra data.
132+
133+
```
134+
import { useRef } from 'react'
135+
import { CompreFace } from '@exadel/compreface-js-sdk';
136+
137+
function App() {
138+
const videoTag = useRef(null);
139+
const canvas1 = useRef(null);
140+
const canvas2 = useRef(null);
141+
const canvas3 = useRef(null);
142+
143+
const handleVideoStart = () => {
144+
console.log("Click is working")
145+
}
146+
147+
return (
148+
<div>
149+
<video ref={videoTag} width="640" height="480" autoPlay muted ></video>
150+
<canvas ref={canvas1} width="640" id="canvas" height="480" style={{ display: 'none' }}></canvas>
151+
<canvas ref={canvas2} width="640" id="canvas2" height="480" style={{ position: 'absolute' }} ></canvas>
152+
<canvas ref={canvas3} width="640" height="480" style={{ position: 'absolute' }}></canvas>
153+
154+
<div>
155+
<button onClick={handleVideoStart}>Start video</button>
156+
</div>
157+
</div>
158+
);
159+
}
160+
161+
export default App;
162+
```
163+
164+
5. Add ability to start webcamera when user clicks "Start video" button. Put following code into ```handleVideoStart()``` function. ```Navigator.mediaDevices``` is built in read-only property of browser which enables user to access webcamera.
165+
166+
```
167+
navigator.mediaDevices.getUserMedia({ video: true})
168+
.then(stream => videoTag.current.srcObject = stream)
169+
.catch( error => console.error(error) )
170+
```
171+
172+
6. Initialize CompreFace instances and catch video event which fired when webcamera starts working. Your code should look like as following example. ```Play``` event listener fires when webcamera starts working and this is place where we need to use CompreFace SDK. NOTE: ```next_frame``` custom event created in order to create kind of recursion effect when we drawing square on face.
173+
```
174+
import { useRef } from 'react'
175+
import { CompreFace } from '@exadel/compreface-js-sdk';
176+
177+
function App() {
178+
const videoTag = useRef(null);
179+
const canvas1 = useRef(null);
180+
const canvas2 = useRef(null);
181+
const canvas3 = useRef(null);
182+
183+
const handleVideoStart = () => {
184+
navigator.mediaDevices.getUserMedia({ video: true})
185+
.then(stream => videoTag.current.srcObject = stream)
186+
.catch( error => console.error(error) )
187+
188+
videoTag.current.addEventListener('play', () => {
189+
// CompreFace init
190+
let server = "http://localhost";
191+
let port = 8000;
192+
let detection_key = "your_api_key_for_detection_service";
193+
194+
let core = new CompreFace(server, port);
195+
let detection_service = core.initFaceDetectionService(detection_key);
196+
// end of CompreFace init
197+
198+
let ctx1 = canvas1.current.getContext('2d');
199+
let ctx2 = canvas2.current.getContext('2d');
200+
let ctx3 = canvas3.current.getContext("2d");
201+
202+
document.addEventListener('next_frame', () => {
203+
ctx1.drawImage(videoTag.current, 0, 0, 640, 480)
204+
canvas1.current.toBlob( blob => {
205+
detection_service.detect(blob, { limit: 1, face_plugins: 'age,gender' })
206+
.then(res => {
207+
/**
208+
209+
We need call draw function which draws square on face of user in front of webcamera
210+
211+
*/
212+
})
213+
.catch(error => console.log(error))
214+
}, 'image/jpeg', 0.95)
215+
})
216+
217+
const evt = new Event("next_frame", {"bubbles":true, "cancelable":false});
218+
document.dispatchEvent(evt);
219+
})
220+
}
221+
222+
return (
223+
<div>
224+
<video ref={videoTag} width="640" height="480" autoPlay muted ></video>
225+
<canvas ref={canvas1} width="640" id="canvas" height="480" style={{ display: 'none' }}></canvas>
226+
<canvas ref={canvas2} width="640" id="canvas2" height="480" style={{ position: 'absolute' }} ></canvas>
227+
<canvas ref={canvas3} width="640" height="480" style={{ position: 'absolute' }}></canvas>
228+
229+
<div>
230+
<button onClick={handleVideoStart}>Start video</button>
231+
</div>
232+
</div>
233+
);
234+
}
235+
236+
export default App;
237+
```
238+
239+
7. Add draw function. NOTE: You can extra canvas elemets which shows extra info related to detected face.
240+
241+
```
242+
const drawFace = (canvasElement, faceData, extraCanvas) => {
243+
const evt = new Event("next_frame", {"bubbles":true, "cancelable":false});
244+
document.dispatchEvent(evt);
245+
let box = faceData.result[0].box;
246+
247+
canvasElement.clearRect(0, 0, 640, 480);
248+
extraCanvas.clearRect(0, 0, 640, 480);
249+
250+
canvasElement.strokeStyle = 'green';
251+
extraCanvas.strokeStyle = "blue";
252+
extraCanvas.fillStyle = "white"
253+
254+
extraCanvas.lineWidth = 5;
255+
canvasElement.lineWidth = 5;
256+
257+
canvasElement.strokeRect(box.x_min, box.y_min, box.x_max - box.x_min, box.y_max - box.y_min);
258+
extraCanvas.fillText( Number.parseFloat(box.probability).toPrecision(5) + ' ' + faceData.result[0].gender + ' ' + faceData.result[0].age[0] + '-' + faceData.result[0].age[1], box.x_min, box.y_min - 10)
259+
}
260+
```
261+
262+
8. Final code should look like this.
263+
264+
```
265+
import { useRef } from 'react'
266+
import { CompreFace } from '@exadel/compreface-js-sdk';
267+
268+
function App() {
269+
const videoTag = useRef(null);
270+
const canvas1 = useRef(null);
271+
const canvas2 = useRef(null);
272+
const canvas3 = useRef(null);
273+
274+
const drawFace = (canvasElement, faceData, extraCanvas) => {
275+
const evt = new Event("next_frame", {"bubbles":true, "cancelable":false});
276+
document.dispatchEvent(evt);
277+
let box = faceData.result[0].box;
278+
279+
canvasElement.clearRect(0, 0, 640, 480);
280+
extraCanvas.clearRect(0, 0, 640, 480);
281+
282+
canvasElement.strokeStyle = 'green';
283+
extraCanvas.strokeStyle = "blue";
284+
extraCanvas.fillStyle = "white"
285+
286+
extraCanvas.lineWidth = 5;
287+
canvasElement.lineWidth = 5;
288+
289+
canvasElement.strokeRect(box.x_min, box.y_min, box.x_max - box.x_min, box.y_max - box.y_min);
290+
extraCanvas.fillText( Number.parseFloat(box.probability).toPrecision(5) + ' ' + faceData.result[0].gender + ' ' + faceData.result[0].age[0] + '-' + faceData.result[0].age[1], box.x_min, box.y_min - 10)
291+
}
292+
293+
const handleVideoStart = () => {
294+
navigator.mediaDevices.getUserMedia({ video: true})
295+
.then(stream => videoTag.current.srcObject = stream)
296+
.catch( error => console.error(error) )
297+
298+
videoTag.current.addEventListener('play', () => {
299+
// CompreFace init
300+
let server = "http://localhost";
301+
let port = 8000;
302+
let detection_key = "your_api_key_for_detection_service";
303+
304+
let core = new CompreFace(server, port);
305+
let detection_service = core.initFaceDetectionService(detection_key);
306+
// end of CompreFace init
307+
308+
let ctx1 = canvas1.current.getContext('2d');
309+
let ctx2 = canvas2.current.getContext('2d');
310+
let ctx3 = canvas3.current.getContext("2d");
311+
312+
document.addEventListener('next_frame', () => {
313+
ctx1.drawImage(videoTag.current, 0, 0, 640, 480)
314+
canvas1.current.toBlob( blob => {
315+
detection_service.detect(blob, { limit: 1, face_plugins: 'age,gender' })
316+
.then(res => {
317+
drawFace(ctx2, res, ctx3)
318+
})
319+
.catch(error => console.log(error))
320+
}, 'image/jpeg', 0.95)
321+
})
322+
323+
const evt = new Event("next_frame", {"bubbles":true, "cancelable":false});
324+
document.dispatchEvent(evt);
325+
})
326+
}
327+
328+
return (
329+
<div>
330+
<video ref={videoTag} width="640" height="480" autoPlay muted ></video>
331+
<canvas ref={canvas1} width="640" id="canvas" height="480" style={{ display: 'none' }}></canvas>
332+
<canvas ref={canvas2} width="640" id="canvas2" height="480" style={{ position: 'absolute' }} ></canvas>
333+
<canvas ref={canvas3} width="640" height="480" style={{ position: 'absolute' }}></canvas>
334+
335+
<div>
336+
<button onClick={handleVideoStart}>Start video</button>
337+
</div>
338+
</div>
339+
);
340+
}
341+
342+
export default App;
343+
```
344+
131345
## Reference
132346

133347
### CompreFace Global Object

0 commit comments

Comments
 (0)