forked from jjhbw/barcode-scanner-webassembly
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathindex.html
226 lines (178 loc) · 5.96 KB
/
index.html
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>barcode scanner wasm</title>
<style>
body {
margin: 0;
padding: 0;
background-color: black;
}
.wrapper {
display: flex;
align-items: center;
justify-content: center;
min-height: 100vh;
}
canvas {
/* transform: rotate(-90deg); */
}
</style>
</head>
<!-- We create some DOM elements necessary for grabbing webcam frames -->
<body>
<div class="wrapper">
<!-- note that we hide the live video element using 'display:none', that way only the canvas is rendered -->
<video id="live" width="1" height="1" autoplay style="display:block;" playsinline> </video>
<canvas id="canvas"> </canvas>
</div>
</body>
<!-- Import the javascript bundle produced by Emscripten-->
<script src="a.out.js"></script>
<!-- The main 'application code' tying it all together -->
<script>
// Execute the application code when the WebAssembly module is ready.
Module.onRuntimeInitialized = async _ => {
// wrap all C functions using cwrap. Note that we have to provide crwap with the function signature.
const api = {
scan_image: Module.cwrap('scan_image', '', ['number', 'number', 'number']),
create_buffer: Module.cwrap('create_buffer', 'number', ['number', 'number']),
destroy_buffer: Module.cwrap('destroy_buffer', '', ['number']),
};
const video = document.getElementById("live");
const canvas = document.getElementById("canvas");
const ctx = canvas.getContext('2d');
// settings for the getUserMedia call
const constraints = {
video: {
// the browser will try to honor this resolution, but it may end up being lower.
width: {
min: 1280,
max: 3840,
ideal: 3840
},
// width: 1920,
// width: 1280,
// height: 720,
// 1280x720 for iphone
// empty for ipad
facingMode: {
exact: "environment"
}
}
};
const startingAtVerticalView = window.innerHeight > window.innerWidth;
let heightOriginal;
let widthOriginal;
let ratio;
// self executing function here
// (function() {
// heightOriginal = document.body.clientHeight;
// widthOriginal = document.body.clientWidth;
// })();
window.addEventListener("orientationchange", function() {
console.log("the orientation of the device is changed w: " + document.body.clientWidth + ",h: " + document.body.clientHeight);
let fromVertical = canvas.height > canvas.width;
currenntViewIsVertical = fromVertical;
let oldHeight = canvas.height;
let oldWidth = canvas.width;
if (startingAtVerticalView) {
if (fromVertical) {
let newHeight = Math.min(oldWidth, oldWidth * ratio);
canvas.height = newHeight;
canvas.width = Math.min(oldWidth, newHeight / ratio);
return;
}
canvas.width = widthOriginal;
canvas.height = heightOriginal;
return;
}
if (fromVertical) {
canvas.width = widthOriginal;
canvas.height = heightOriginal;
return;
}
let scaleUp = 1.95;
canvas.width = oldHeight * scaleUp;
canvas.height = oldWidth * scaleUp;
});
// open the webcam stream
navigator.mediaDevices.getUserMedia(constraints).then((stream) => {
// stream is a MediaStream object
video.srcObject = stream;
video.play();
// tell the canvas which resolution we ended up getting from the webcam
const track = stream.getVideoTracks()[0];
const actualSettings = track.getSettings();
const { width, height } = actualSettings;
console.log(actualSettings.width, actualSettings.height)
// ratio = canvas.width / canvas.height
ratio = width/ height;
//alert(JSON.stringify({ width, height, canvasW: canvas.width, canvasH: canvas.height }))
if (startingAtVerticalView) {
canvas.height = document.body.clientHeight;
canvas.width = Math.min(canvas.height * ratio, document.body.clientWidth);
} else {
canvas.width = document.body.clientWidth;
canvas.height = Math.min(canvas.width / ratio, document.body.clientHeight);
}
widthOriginal = canvas.width;
heightOriginal = canvas.height;
// canvas.height = body.document.clientHeight;
// canvas.width = body.document.clientWidth;
// every k milliseconds, we draw the contents of the video to the canvas and run the detector.
const timer = setInterval(detectSymbols, 250);
}).catch((e) => {
throw e
});
function detectSymbols() {
// grab a frame from the media source and draw it to the canvas
ctx.drawImage(video, 0, 0, canvas.width, canvas.height);
// get the image data from the canvas
const image = ctx.getImageData(0, 0, canvas.width, canvas.height)
// convert the image data to grayscale
const grayData = []
const d = image.data;
for (var i = 0, j = 0; i < d.length; i += 4, j++) {
grayData[j] = (d[i] * 66 + d[i + 1] * 129 + d[i + 2] * 25 + 4096) >> 8;
}
// put the data into the allocated buffer on the wasm heap.
const p = api.create_buffer(image.width, image.height);
Module.HEAP8.set(grayData, p);
// call the scanner function
api.scan_image(p, image.width, image.height)
// clean up
//(this is not really necessary in this example as we could reuse the buffer, but is used to demonstrate how you can manage Wasm heap memory from the js environment)
api.destroy_buffer(p);
}
function drawPoly(ctx, poly) {
// drawPoly expects a flat array of coordinates forming a polygon (e.g. [x1,y1,x2,y2,... etc])
ctx.beginPath();
ctx.moveTo(poly[0], poly[1]);
for (item = 2; item < poly.length - 1; item += 2) { ctx.lineTo(poly[item], poly[item + 1]) }
ctx.lineWidth = 2;
ctx.strokeStyle = "#FF0000";
ctx.closePath();
ctx.stroke();
}
// render the string contained in the barcode as text on the canvas
function renderData(ctx, data, x, y) {
ctx.font = "15px Arial";
ctx.fillStyle = "red";
ctx.fillText(data, x, y);
}
// set the function that should be called whenever a barcode is detected
Module['processResult'] = (symbol, data, polygon) => {
console.log("Data liberated from WASM heap:")
console.log(symbol)
console.log(data)
console.log(polygon)
// draw the bounding polygon
drawPoly(ctx, polygon)
// render the data at the first coordinate of the polygon
renderData(ctx, data, polygon[0], polygon[1] - 10)
}
}
</script>
</html>