segment-parser.js
17.9 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
(function(window) {
var
videojs = window.videojs,
FlvTag = videojs.Hls.FlvTag,
H264Stream = videojs.Hls.H264Stream,
AacStream = videojs.Hls.AacStream,
MetadataStream = videojs.Hls.MetadataStream,
MP2T_PACKET_LENGTH,
STREAM_TYPES;
/**
* An object that incrementally transmuxes MPEG2 Trasport Stream
* chunks into an FLV.
*/
videojs.Hls.SegmentParser = function() {
var
self = this,
parseTSPacket,
streamBuffer = new Uint8Array(MP2T_PACKET_LENGTH),
streamBufferByteCount = 0,
h264Stream = new H264Stream(),
aacStream = new AacStream(),
h264HasTimeStampOffset = false,
aacHasTimeStampOffset = false,
timeStampOffset;
// expose the stream metadata
self.stream = {
// the mapping between transport stream programs and the PIDs
// that form their elementary streams
programMapTable: {}
};
// allow in-band metadata to be observed
self.metadataStream = new MetadataStream();
// For information on the FLV format, see
// http://download.macromedia.com/f4v/video_file_format_spec_v10_1.pdf.
// Technically, this function returns the header and a metadata FLV tag
// if duration is greater than zero
// duration in seconds
// @return {object} the bytes of the FLV header as a Uint8Array
self.getFlvHeader = function(duration, audio, video) { // :ByteArray {
var
headBytes = new Uint8Array(3 + 1 + 1 + 4),
head = new DataView(headBytes.buffer),
metadata,
result;
// default arguments
duration = duration || 0;
audio = audio === undefined? true : audio;
video = video === undefined? true : video;
// signature
head.setUint8(0, 0x46); // 'F'
head.setUint8(1, 0x4c); // 'L'
head.setUint8(2, 0x56); // 'V'
// version
head.setUint8(3, 0x01);
// flags
head.setUint8(4, (audio ? 0x04 : 0x00) | (video ? 0x01 : 0x00));
// data offset, should be 9 for FLV v1
head.setUint32(5, headBytes.byteLength);
// init the first FLV tag
if (duration <= 0) {
// no duration available so just write the first field of the first
// FLV tag
result = new Uint8Array(headBytes.byteLength + 4);
result.set(headBytes);
result.set([0, 0, 0, 0], headBytes.byteLength);
return result;
}
// write out the duration metadata tag
metadata = new FlvTag(FlvTag.METADATA_TAG);
metadata.pts = metadata.dts = 0;
metadata.writeMetaDataDouble("duration", duration);
result = new Uint8Array(headBytes.byteLength + metadata.byteLength);
result.set(head);
result.set(head.bytesLength, metadata.finalize());
return result;
};
self.flushTags = function() {
h264Stream.finishFrame();
};
/**
* Returns whether a call to `getNextTag()` will be successful.
* @return {boolean} whether there is at least one transmuxed FLV
* tag ready
*/
self.tagsAvailable = function() { // :int {
return h264Stream.tags.length + aacStream.tags.length;
};
/**
* Returns the next tag in decoder-timestamp (DTS) order.
* @returns {object} the next tag to decoded.
*/
self.getNextTag = function() {
var tag;
if (!h264Stream.tags.length) {
// only audio tags remain
tag = aacStream.tags.shift();
} else if (!aacStream.tags.length) {
// only video tags remain
tag = h264Stream.tags.shift();
} else if (aacStream.tags[0].dts < h264Stream.tags[0].dts) {
// audio should be decoded next
tag = aacStream.tags.shift();
} else {
// video should be decoded next
tag = h264Stream.tags.shift();
}
return tag.finalize();
};
self.parseSegmentBinaryData = function(data) { // :ByteArray) {
var
dataPosition = 0,
dataSlice;
// To avoid an extra copy, we will stash overflow data, and only
// reconstruct the first packet. The rest of the packets will be
// parsed directly from data
if (streamBufferByteCount > 0) {
if (data.byteLength + streamBufferByteCount < MP2T_PACKET_LENGTH) {
// the current data is less than a single m2ts packet, so stash it
// until we receive more
// ?? this seems to append streamBuffer onto data and then just give up. I'm not sure why that would be interesting.
videojs.log('data.length + streamBuffer.length < MP2T_PACKET_LENGTH ??');
streamBuffer.readBytes(data, data.length, streamBuffer.length);
return;
} else {
// we have enough data for an m2ts packet
// process it immediately
dataSlice = data.subarray(0, MP2T_PACKET_LENGTH - streamBufferByteCount);
streamBuffer.set(dataSlice, streamBufferByteCount);
parseTSPacket(streamBuffer);
// reset the buffer
streamBuffer = new Uint8Array(MP2T_PACKET_LENGTH);
streamBufferByteCount = 0;
}
}
while (true) {
// Make sure we are TS aligned
while(dataPosition < data.byteLength && data[dataPosition] !== 0x47) {
// If there is no sync byte skip forward until we find one
// TODO if we find a sync byte, look 188 bytes in the future (if
// possible). If there is not a sync byte there, keep looking
dataPosition++;
}
// base case: not enough data to parse a m2ts packet
if (data.byteLength - dataPosition < MP2T_PACKET_LENGTH) {
if (data.byteLength - dataPosition > 0) {
// there are bytes remaining, save them for next time
streamBuffer.set(data.subarray(dataPosition),
streamBufferByteCount);
streamBufferByteCount += data.byteLength - dataPosition;
}
return;
}
// attempt to parse a m2ts packet
if (parseTSPacket(data.subarray(dataPosition, dataPosition + MP2T_PACKET_LENGTH))) {
dataPosition += MP2T_PACKET_LENGTH;
} else {
// If there was an error parsing a TS packet. it could be
// because we are not TS packet aligned. Step one forward by
// one byte and allow the code above to find the next
videojs.log('error parsing m2ts packet, attempting to re-align');
dataPosition++;
}
}
};
/**
* Parses a video/mp2t packet and appends the underlying video and
* audio data onto h264stream and aacStream, respectively.
* @param data {Uint8Array} the bytes of an MPEG2-TS packet,
* including the sync byte.
* @return {boolean} whether a valid packet was encountered
*/
// TODO add more testing to make sure we dont walk past the end of a TS
// packet!
parseTSPacket = function(data) { // :ByteArray):Boolean {
var
offset = 0, // :uint
end = offset + MP2T_PACKET_LENGTH, // :uint
// Payload Unit Start Indicator
pusi = !!(data[offset + 1] & 0x40), // mask: 0100 0000
// packet identifier (PID), a unique identifier for the elementary
// stream this packet describes
pid = (data[offset + 1] & 0x1F) << 8 | data[offset + 2], // mask: 0001 1111
// adaptation_field_control, whether this header is followed by an
// adaptation field, a payload, or both
afflag = (data[offset + 3] & 0x30 ) >>> 4,
patTableId, // :int
patCurrentNextIndicator, // Boolean
patSectionLength, // :uint
pesPacketSize, // :int,
dataAlignmentIndicator, // :Boolean,
ptsDtsIndicator, // :int
pesHeaderLength, // :int
pts, // :uint
dts, // :uint
pmtCurrentNextIndicator, // :Boolean
pmtProgramDescriptorsLength,
pmtSectionLength, // :uint
streamType, // :int
elementaryPID, // :int
ESInfolength; // :int
// Continuity Counter we could use this for sanity check, and
// corrupt stream detection
// cc = (data[offset + 3] & 0x0F);
// move past the header
offset += 4;
// if an adaption field is present, its length is specified by
// the fifth byte of the PES header. The adaptation field is
// used to specify some forms of timing and control data that we
// do not currently use.
if (afflag > 0x01) {
offset += data[offset] + 1;
}
// Handle a Program Association Table (PAT). PATs map PIDs to
// individual programs. If this transport stream was being used
// for television broadcast, a program would probably be
// equivalent to a channel. In HLS, it would be very unusual to
// create an mp2t stream with multiple programs.
if (0x0000 === pid) {
// The PAT may be split into multiple sections and those
// sections may be split into multiple packets. If a PAT
// section starts in this packet, PUSI will be true and the
// first byte of the playload will indicate the offset from
// the current position to the start of the section.
if (pusi) {
offset += 1 + data[offset];
}
patTableId = data[offset];
if (patTableId !== 0x00) {
videojs.log('the table_id of the PAT should be 0x00 but was' +
patTableId.toString(16));
}
// the current_next_indicator specifies whether this PAT is
// currently applicable or is part of the next table to become
// active
patCurrentNextIndicator = !!(data[offset + 5] & 0x01);
if (patCurrentNextIndicator) {
// section_length specifies the number of bytes following
// its position to the end of this section
patSectionLength = (data[offset + 1] & 0x0F) << 8 | data[offset + 2];
// move past the rest of the PSI header to the first program
// map table entry
offset += 8;
// we don't handle streams with more than one program, so
// raise an exception if we encounter one
// section_length = rest of header + (n * entry length) + CRC
// = 5 + (n * 4) + 4
if ((patSectionLength - 5 - 4) / 4 !== 1) {
throw new Error("TS has more that 1 program");
}
// the Program Map Table (PMT) associates the underlying
// video and audio streams with a unique PID
self.stream.pmtPid = (data[offset + 2] & 0x1F) << 8 | data[offset + 3];
}
} else if (pid === self.stream.programMapTable[STREAM_TYPES.h264] ||
pid === self.stream.programMapTable[STREAM_TYPES.adts] ||
pid === self.stream.programMapTable[STREAM_TYPES.metadata]) {
if (pusi) {
// comment out for speed
if (0x00 !== data[offset + 0] || 0x00 !== data[offset + 1] || 0x01 !== data[offset + 2]) {
// look for PES start code
throw new Error("PES did not begin with start code");
}
// var sid:int = data[offset+3]; // StreamID
pesPacketSize = (data[offset + 4] << 8) | data[offset + 5];
dataAlignmentIndicator = (data[offset + 6] & 0x04) !== 0;
ptsDtsIndicator = data[offset + 7];
pesHeaderLength = data[offset + 8]; // TODO sanity check header length
offset += 9; // Skip past PES header
// PTS and DTS are normially stored as a 33 bit number.
// JavaScript does not have a integer type larger than 32 bit
// BUT, we need to convert from 90ns to 1ms time scale anyway.
// so what we are going to do instead, is drop the least
// significant bit (the same as dividing by two) then we can
// divide by 45 (45 * 2 = 90) to get ms.
if (ptsDtsIndicator & 0xC0) {
// the PTS and DTS are not written out directly. For information on
// how they are encoded, see
// http://dvd.sourceforge.net/dvdinfo/pes-hdr.html
pts = (data[offset + 0] & 0x0E) << 28
| (data[offset + 1] & 0xFF) << 21
| (data[offset + 2] & 0xFE) << 13
| (data[offset + 3] & 0xFF) << 6
| (data[offset + 4] & 0xFE) >>> 2;
pts /= 45;
dts = pts;
if (ptsDtsIndicator & 0x40) {// DTS
dts = (data[offset + 5] & 0x0E ) << 28
| (data[offset + 6] & 0xFF ) << 21
| (data[offset + 7] & 0xFE ) << 13
| (data[offset + 8] & 0xFF ) << 6
| (data[offset + 9] & 0xFE ) >>> 2;
dts /= 45;
}
}
// Skip past "optional" portion of PTS header
offset += pesHeaderLength;
// align the metadata stream PTS values with the start of
// the other elementary streams
if (!self.metadataStream.timestampOffset) {
self.metadataStream.timestampOffset = pts;
}
if (pid === self.stream.programMapTable[STREAM_TYPES.h264]) {
if (!h264HasTimeStampOffset) {
h264HasTimeStampOffset = true;
if (timeStampOffset === undefined) {
timeStampOffset = pts;
}
h264Stream.setTimeStampOffset(timeStampOffset);
}
h264Stream.setNextTimeStamp(pts,
dts,
dataAlignmentIndicator);
} else if (pid === self.stream.programMapTable[STREAM_TYPES.adts]) {
if (!aacHasTimeStampOffset) {
aacHasTimeStampOffset = true;
if (timeStampOffset === undefined) {
timeStampOffset = pts;
}
aacStream.setTimeStampOffset(timeStampOffset);
}
aacStream.setNextTimeStamp(pts,
pesPacketSize,
dataAlignmentIndicator);
}
}
if (pid === self.stream.programMapTable[STREAM_TYPES.adts]) {
aacStream.writeBytes(data, offset, end - offset);
} else if (pid === self.stream.programMapTable[STREAM_TYPES.h264]) {
h264Stream.writeBytes(data, offset, end - offset);
} else if (pid === self.stream.programMapTable[STREAM_TYPES.metadata]) {
self.metadataStream.push({
pts: pts,
dts: dts,
data: data.subarray(offset)
});
}
} else if (self.stream.pmtPid === pid) {
// similarly to the PAT, jump to the first byte of the section
if (pusi) {
offset += 1 + data[offset];
}
if (data[offset] !== 0x02) {
videojs.log('The table_id of a PMT should be 0x02 but was ' +
data[offset].toString(16));
}
// whether this PMT is currently applicable or is part of the
// next table to become active
pmtCurrentNextIndicator = !!(data[offset + 5] & 0x01);
if (pmtCurrentNextIndicator) {
// overwrite any existing program map table
self.stream.programMapTable = {};
// section_length specifies the number of bytes following
// its position to the end of this section
pmtSectionLength = (data[offset + 1] & 0x0f) << 8 | data[offset + 2];
// subtract the length of the program info descriptors
pmtProgramDescriptorsLength = (data[offset + 10] & 0x0f) << 8 | data[offset + 11];
pmtSectionLength -= pmtProgramDescriptorsLength;
// skip CRC and PSI data we dont care about
// rest of header + CRC = 9 + 4
pmtSectionLength -= 13;
// align offset to the first entry in the PMT
offset += 12 + pmtProgramDescriptorsLength;
// iterate through the entries
while (0 < pmtSectionLength) {
// the type of data carried in the PID this entry describes
streamType = data[offset + 0];
// the PID for this entry
elementaryPID = (data[offset + 1] & 0x1F) << 8 | data[offset + 2];
if (streamType === STREAM_TYPES.h264 &&
self.stream.programMapTable[streamType] &&
self.stream.programMapTable[streamType] !== elementaryPID) {
throw new Error("Program has more than 1 video stream");
} else if (streamType === STREAM_TYPES.adts &&
self.stream.programMapTable[streamType] &&
self.stream.programMapTable[streamType] !== elementaryPID) {
throw new Error("Program has more than 1 audio Stream");
}
// add the stream type entry to the map
self.stream.programMapTable[streamType] = elementaryPID;
// TODO add support for MP3 audio
// the length of the entry descriptor
ESInfolength = (data[offset + 3] & 0x0F) << 8 | data[offset + 4];
// capture the stream descriptor for metadata streams
if (streamType === STREAM_TYPES.metadata) {
self.metadataStream.descriptor = new Uint8Array(data.subarray(offset + 5, offset + 5 + ESInfolength));
}
// move to the first byte after the end of this entry
offset += 5 + ESInfolength;
pmtSectionLength -= 5 + ESInfolength;
}
}
// We could test the CRC here to detect corruption with extra CPU cost
} else if (0x0011 === pid) {
// Service Description Table
} else if (0x1FFF === pid) {
// NULL packet
} else {
videojs.log("Unknown PID parsing TS packet: " + pid);
}
return true;
};
self.getTags = function() {
return h264Stream.tags;
};
self.stats = {
h264Tags: function() {
return h264Stream.tags.length;
},
aacTags: function() {
return aacStream.tags.length;
}
};
};
// MPEG2-TS constants
videojs.Hls.SegmentParser.MP2T_PACKET_LENGTH = MP2T_PACKET_LENGTH = 188;
videojs.Hls.SegmentParser.STREAM_TYPES = STREAM_TYPES = {
h264: 0x1b,
adts: 0x0f,
metadata: 0x15
};
})(window);