-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathdetection.js
More file actions
106 lines (99 loc) · 2.96 KB
/
detection.js
File metadata and controls
106 lines (99 loc) · 2.96 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
import React from "react";
import ReactDOM from "react-dom";
import * as cocoSsd from "@tensorflow-models/coco-ssd";
import "@tensorflow/tfjs";
import "./style.css";
class App1 extends React.Component {
videoRef = React.createRef();
canvasRef = React.createRef();
componentDidMount() {
if (navigator.mediaDevices && navigator.mediaDevices.getUserMedia) {
const webCamPromise = navigator.mediaDevices
.getUserMedia({
audio: false,
video: {
facingMode: "user"
}
})
.then(stream => {
window.stream = stream;
this.videoRef.current.srcObject = stream;
return new Promise((resolve, reject) => {
this.videoRef.current.onloadedmetadata = () => {
resolve();
};
});
});
const modelPromise = cocoSsd.load();
Promise.all([modelPromise, webCamPromise])
.then(values => {
this.detectFrame(this.videoRef.current, values[0]);
})
.catch(error => {
console.error(error);
});
}
}
detectFrame = (video, model) => {
model.detect(video).then(predictions => {
this.renderPredictions(predictions);
requestAnimationFrame(() => {
this.detectFrame(video, model);
});
});
};
renderPredictions = predictions => {
const ctx = this.canvasRef.current.getContext("2d");
ctx.clearRect(0, 0, ctx.canvas.width, ctx.canvas.height);
// Font options.
const font = "16px sans-serif";
ctx.font = font;
ctx.textBaseline = "top";
predictions.forEach(prediction => {
const x = prediction.bbox[0];
const y = prediction.bbox[1];
const width = prediction.bbox[2];
const height = prediction.bbox[3];
// Draw the bounding box.
ctx.strokeStyle = "#00FFFF";
ctx.lineWidth = 4;
ctx.strokeRect(x, y, width, height);
// Draw the label background.
ctx.fillStyle = "#00FFFF";
const textWidth = ctx.measureText(prediction.class).width;
const textHeight = parseInt(font, 10); // base 10
ctx.fillRect(x, y, textWidth + 4, textHeight + 4);
});
predictions.forEach(prediction => {
const x = prediction.bbox[0];
const y = prediction.bbox[1];
// Draw the text last to ensure it's on top.
ctx.fillStyle = "#000000";
ctx.fillText(prediction.class, x, y);
});
};
render() {
return (
<div>
<video
className="size"
autoPlay
playsInline
muted
ref={this.videoRef}
width="600"
height="500"
/>
<canvas
className="size"
ref={this.canvasRef}
width="600"
height="500"
/>
</div>
);
}
}
export default App1;
//const rootElement = document.getElementById("root");
//ReactDOM.render(<App1 />, rootElement);