This repository has been archived by the owner on Dec 7, 2023. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 30
/
ws_client_new.html
245 lines (187 loc) · 9.62 KB
/
ws_client_new.html
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
<!DOCTYPE html>
<!--
Here we combine two things:
https://github.com/elsampsa/websocket-mse-demo/blob/master/ws_client.html
https://stackoverflow.com/questions/54186634/sending-periodic-metadata-in-fragmented-live-mp4-stream/
-->
<html>
<head>
<title>WebSocket MSE demo</title>
</head>
<body>
<!-- muted attribute required for chrome autoplay-->
<video id="stream_live"
width="640" height="480"
controls="false" autoplay="true"
muted="muted"
preload="auto">
Your browser does not support the video tag.
</video>
</body>
<script>
// *** USER PARAMETERS ***
var verbose = false;
// var verbose = true; // enable for saturating the console ..
var buffering_sec = 1; // use some reasonable value
var buffering_sec_seek = buffering_sec*0.9;
// ..seek the stream if it's this much away or
// from the last available timestamp
var buffering_sec_seek_distance = buffering_sec*0.5;
// .. jump to this distance from the last avail. timestamp
// *** INTERNAL PARAMETERS ***
// set mimetype and codec
var mimeType = "video/mp4";
var codecs = "avc1.4D401F"; // https://wiki.whatwg.org/wiki/Video_type_parameters
// if your stream has audio, remember to include it in these definitions.. otherwise your mse goes sour
var codecPars = mimeType+';codecs="'+codecs+'"';
var stream_started = false; // is the source_buffer updateend callback active nor not
// create media source instance
var ms = new MediaSource();
// queue for incoming media packets
var queue = [];
var stream_live; // the HTMLMediaElement (i.e. <video> element)
var ws; // websocket
var seeked = false; // have have seeked manually once ..
var cc = 0;
var source_buffer; // source_buffer instance
var pass = 0;
// *** MP4 Box manipulation functions ***
// taken from here: https://stackoverflow.com/questions/54186634/sending-periodic-metadata-in-fragmented-live-mp4-stream/
function toInt(arr, index) { // From bytes to big-endian 32-bit integer. Input: Uint8Array, index
var dv = new DataView(arr.buffer, 0);
return dv.getInt32(index, false); // big endian
}
function toString(arr, fr, to) { // From bytes to string. Input: Uint8Array, start index, stop index.
// https://developers.google.com/web/updates/2012/06/How-to-convert-ArrayBuffer-to-and-from-String
return String.fromCharCode.apply(null, arr.slice(fr,to));
}
function getBox(arr, i) { // input Uint8Array, start index
return [toInt(arr, i), toString(arr, i+4, i+8)]
}
function getSubBox(arr, box_name) { // input Uint8Array, box name
var i = 0;
res = getBox(arr, i);
main_length = res[0]; name = res[1]; // this boxes length and name
i = i + 8;
var sub_box = null;
while (i < main_length) {
res = getBox(arr, i);
l = res[0]; name = res[1];
if (box_name == name) {
sub_box = arr.slice(i, i+l)
}
i = i + l;
}
return sub_box;
}
function hasFirstSampleFlag(arr) { // input Uint8Array
// [moof [mfhd] [traf [tfhd] [tfdt] [trun]]]
var traf = getSubBox(arr, "traf");
if (traf==null) { return false; }
var trun = getSubBox(traf, "trun");
if (trun==null) { return false; }
// ISO/IEC 14496-12:2012(E) .. pages 5 and 57
// bytes: (size 4), (name 4), (version 1 + tr_flags 3)
var flags = trun.slice(10,13); // console.log(flags);
f = flags[1] & 4; // console.log(f);
return f == 4;
}
// consider these callbacks:
// - putPacket : called when websocket receives data
// - loadPacket : called when source_buffer is ready for more data
// Both operate on a common fifo
function putPacket(arr) {
// receives ArrayBuffer. Called when websocket gets more data
// first packet ever to arrive: write directly to source_buffer
// source_buffer ready to accept: write directly to source_buffer
// otherwise insert it to queue
var memview = new Uint8Array(arr);
if (verbose) { console.log("got", arr.byteLength, "bytes. Values=", memview[0], memview[1], memview[2], memview[3], memview[4]); }
res = getBox(memview, 0);
main_length = res[0]; name = res[1]; // this boxes length and name
if ((name=="ftyp") && (pass==0)) {
pass = pass + 1;
console.log("got ftyp");
}
else if ((name=="moov") && (pass==1)) {
pass = pass + 1;
console.log("got moov");
}
else if ((name=="moof") && (pass==2)) {
if (hasFirstSampleFlag(memview)) {
pass = pass + 1;
console.log("got that special moof");
}
else {
return;
}
}
else if (pass < 3) {
return;
}
// keep the latency to minimum
let latest = stream_live.duration;
if ((stream_live.duration >= buffering_sec) &&
((latest - stream_live.currentTime) > buffering_sec_seek)) {
console.log("seek from ", stream_live.currentTime, " to ", latest);
df = (stream_live.duration - stream_live.currentTime); // this much away from the last available frame
if ((df > buffering_sec_seek)) {
seek_to = stream_live.duration - buffering_sec_seek_distance;
stream_live.currentTime = seek_to;
}
}
data = arr;
if (!stream_started) {
if (verbose) {console.log("Streaming started: ", memview[0], memview[1], memview[2], memview[3], memview[4]);}
source_buffer.appendBuffer(data);
stream_started = true;
cc = cc + 1;
return;
}
queue.push(data); // add to the end
if (verbose) { console.log("queue push:", queue.length); }
}
function loadPacket() { // called when source_buffer is ready for more
if (!source_buffer.updating) { // really, really ready
if (queue.length>0) {
inp = queue.shift(); // pop from the beginning
if (verbose) { console.log("queue pop:", queue.length); }
var memview = new Uint8Array(inp);
if (verbose) { console.log(" ==> writing buffer with", memview[0], memview[1], memview[2], memview[3]); }
source_buffer.appendBuffer(inp);
cc = cc + 1;
}
else { // the queue runs empty, so the next packet is fed directly
stream_started = false;
}
}
else { // so it was not?
}
}
function opened() { // MediaSource object is ready to go
// https://developer.mozilla.org/en-US/docs/Web/API/MediaSource/duration
ms.duration = buffering_sec;
source_buffer = ms.addSourceBuffer(codecPars);
// https://developer.mozilla.org/en-US/docs/Web/API/source_buffer/mode
var myMode = source_buffer.mode;
source_buffer.mode = 'sequence';
// source_buffer.mode = 'segments';
source_buffer.addEventListener("updateend",loadPacket);
ws = new WebSocket("ws://localhost:8089/ws/");
ws.binaryType = "arraybuffer";
ws.onmessage = function (event) {
putPacket(event.data);
};
}
function startup() {
ms.addEventListener('sourceopen',opened,false);
// get reference to video
stream_live = document.getElementById('stream_live');
// set mediasource as source of video
stream_live.src = window.URL.createObjectURL(ms);
}
window.onload = function() {
startup();
}
</script>
</html>