54ca5993 by David LaPalomento

Translate H.264 Annex B streams to AVC elementary streams

Fragmented MP4s expected H264 data in mdats to a sequence of NAL units preceded by their length. MPEG2-TS streams transmit H264 data using start codes to signal the beginning of aNAL. When parsing NALs to build the sample table, reformat them to remove start codes and add length fields. Pulled the video-related program stream parsing into its own stream object because Transmuxer was getting too busy.
1 parent 31e49fcd
...@@ -365,7 +365,7 @@ stsd = function(track) { ...@@ -365,7 +365,7 @@ stsd = function(track) {
365 track.profileIdc, // AVCProfileIndication 365 track.profileIdc, // AVCProfileIndication
366 track.profileCompatibility, // profile_compatibility 366 track.profileCompatibility, // profile_compatibility
367 track.levelIdc, // AVCLevelIndication 367 track.levelIdc, // AVCLevelIndication
368 0xff // lengthSizeMinusOne 368 0xff // lengthSizeMinusOne, hard-coded to 4 bytes
369 ].concat([ 369 ].concat([
370 track.sps.length // numOfSequenceParameterSets 370 track.sps.length // numOfSequenceParameterSets
371 ]).concat(sequenceParameterSets).concat([ 371 ]).concat(sequenceParameterSets).concat([
...@@ -438,7 +438,7 @@ traf = function(track) { ...@@ -438,7 +438,7 @@ traf = function(track) {
438 0x00, 0x00, 0x00, 0x00 // baseMediaDecodeTime 438 0x00, 0x00, 0x00, 0x00 // baseMediaDecodeTime
439 ])), 439 ])),
440 trun(track, 440 trun(track,
441 sdtp.length + 441 sampleDependencyTable.length +
442 16 + // tfhd 442 16 + // tfhd
443 16 + // tfdt 443 16 + // tfdt
444 8 + // traf header 444 8 + // traf header
......
...@@ -14,7 +14,10 @@ ...@@ -14,7 +14,10 @@
14 (function(window, videojs, undefined) { 14 (function(window, videojs, undefined) {
15 'use strict'; 15 'use strict';
16 16
17 var PacketStream, ParseStream, ProgramStream, Transmuxer, AacStream, H264Stream, NalByteStream, MP2T_PACKET_LENGTH, H264_STREAM_TYPE, ADTS_STREAM_TYPE, mp4; 17 var
18 PacketStream, ParseStream, ProgramStream, VideoSegmentStream,
19 Transmuxer, AacStream, H264Stream, NalByteStream,
20 MP2T_PACKET_LENGTH, H264_STREAM_TYPE, ADTS_STREAM_TYPE, mp4;
18 21
19 MP2T_PACKET_LENGTH = 188; // bytes 22 MP2T_PACKET_LENGTH = 188; // bytes
20 H264_STREAM_TYPE = 0x1b; 23 H264_STREAM_TYPE = 0x1b;
...@@ -402,7 +405,6 @@ AacStream.prototype = new videojs.Hls.Stream(); ...@@ -402,7 +405,6 @@ AacStream.prototype = new videojs.Hls.Stream();
402 NalByteStream = function() { 405 NalByteStream = function() {
403 var 406 var
404 i = 6, 407 i = 6,
405 // the first NAL unit is prefixed by an extra zero byte
406 syncPoint = 1, 408 syncPoint = 1,
407 buffer; 409 buffer;
408 NalByteStream.prototype.init.call(this); 410 NalByteStream.prototype.init.call(this);
...@@ -419,15 +421,36 @@ NalByteStream = function() { ...@@ -419,15 +421,36 @@ NalByteStream = function() {
419 buffer = swapBuffer; 421 buffer = swapBuffer;
420 } 422 }
421 423
422 // scan for synchronization byte sequences (0x00 00 01) 424 // Rec. ITU-T H.264, Annex B
425 // scan for NAL unit boundaries
423 426
424 // a match looks like this: 427 // a match looks like this:
425 // 0 0 1 .. NAL .. 0 0 1 428 // 0 0 1 .. NAL .. 0 0 1
426 // ^ sync point ^ i 429 // ^ sync point ^ i
430 // or this:
431 // 0 0 1 .. NAL .. 0 0 0
432 // ^ sync point ^ i
427 while (i < buffer.byteLength) { 433 while (i < buffer.byteLength) {
428 switch (buffer[i]) { 434 switch (buffer[i]) {
429 case 0: 435 case 0:
430 i++; 436 // skip past non-sync sequences
437 if (buffer[i - 1] !== 0) {
438 i += 2;
439 break;
440 } else if (buffer[i - 2] !== 0) {
441 i++;
442 break;
443 }
444
445 // deliver the NAL unit
446 this.trigger('data', buffer.subarray(syncPoint + 3, i - 2));
447
448 // drop trailing zeroes
449 do {
450 i++;
451 } while (buffer[i] !== 1);
452 syncPoint = i - 2;
453 i += 3;
431 break; 454 break;
432 case 1: 455 case 1:
433 // skip past non-sync sequences 456 // skip past non-sync sequences
...@@ -463,8 +486,8 @@ NalByteStream = function() { ...@@ -463,8 +486,8 @@ NalByteStream = function() {
463 NalByteStream.prototype = new videojs.Hls.Stream(); 486 NalByteStream.prototype = new videojs.Hls.Stream();
464 487
465 /** 488 /**
466 * Accepts a ProgramStream and emits data events with parsed 489 * Accepts input from a ProgramStream and produces H.264 NAL unit data
467 * AAC Audio Frames of the individual packets. 490 * events.
468 */ 491 */
469 H264Stream = function() { 492 H264Stream = function() {
470 var 493 var
...@@ -657,98 +680,30 @@ H264Stream = function() { ...@@ -657,98 +680,30 @@ H264Stream = function() {
657 }; 680 };
658 H264Stream.prototype = new videojs.Hls.Stream(); 681 H264Stream.prototype = new videojs.Hls.Stream();
659 682
660 683 /**
661 Transmuxer = function() { 684 * @param track {object} track metadata configuration
685 */
686 VideoSegmentStream = function(track) {
662 var 687 var
663 self = this,
664 sequenceNumber = 0, 688 sequenceNumber = 0,
665 videoSamples = [], 689 nalUnits = [],
666 videoSamplesSize = 0, 690 nalUnitsLength = 0;
667 track, 691 VideoSegmentStream.prototype.init.call(this);
668 config,
669 pps,
670
671 packetStream, parseStream, programStream, aacStream, h264Stream,
672
673 flushVideo;
674
675 Transmuxer.prototype.init.call(this);
676
677 // set up the parsing pipeline
678 packetStream = new PacketStream();
679 parseStream = new ParseStream();
680 programStream = new ProgramStream();
681 aacStream = new AacStream();
682 h264Stream = new H264Stream();
683
684 packetStream.pipe(parseStream);
685 parseStream.pipe(programStream);
686 programStream.pipe(aacStream);
687 programStream.pipe(h264Stream);
688
689 // handle incoming data events
690 h264Stream.on('data', function(data) {
691 // if this chunk starts a new access unit, flush the data we've been buffering
692 if (data.nalUnitType === 'access_unit_delimiter_rbsp' &&
693 videoSamples.length) {
694 //flushVideo();
695 }
696 // record the track config
697 if (data.nalUnitType === 'seq_parameter_set_rbsp' &&
698 !config) {
699 config = data.config;
700
701 track.width = config.width;
702 track.height = config.height;
703 track.sps = [data.data];
704 track.profileIdc = config.profileIdc;
705 track.levelIdc = config.levelIdc;
706 track.profileCompatibility = config.profileCompatibility;
707
708 // generate an init segment once all the metadata is available
709 if (pps) {
710 self.trigger('data', {
711 data: videojs.mp4.initSegment([track])
712 });
713 }
714 }
715 if (data.nalUnitType === 'pic_parameter_set_rbsp' &&
716 !pps) {
717 pps = data.data;
718 track.pps = [data.data];
719
720 if (config) {
721 self.trigger('data', {
722 data: videojs.mp4.initSegment([track])
723 });
724 }
725 }
726 692
727 // buffer video until we encounter a new access unit (aka the next frame) 693 this.push = function(data) {
728 videoSamples.push(data); 694 // buffer video until end() is called
729 videoSamplesSize += data.data.byteLength; 695 nalUnits.push(data);
730 }); 696 nalUnitsLength += data.data.byteLength;
731 programStream.on('data', function(data) { 697 };
732 var i;
733 if (data.type === 'metadata') {
734 i = data.tracks.length;
735 while (i--) {
736 if (data.tracks[i].type === 'video') {
737 track = data.tracks[i];
738 break;
739 }
740 }
741 }
742 });
743 698
744 // helper functions 699 this.end = function() {
745 flushVideo = function() { 700 var startUnit, currentNal, moof, mdat, boxes, i, data, view, sample;
746 var startUnit, currentNal, moof, mdat, boxes, i, data, sample;
747 701
748 // concatenate the video data and construct the mdat 702 // concatenate the video data and construct the mdat
749 // first, we have to build the index from byte locations to 703 // first, we have to build the index from byte locations to
750 // samples (i.e. frames) in the video data 704 // samples (that is, frames) in the video data
751 data = new Uint8Array(videoSamplesSize); 705 data = new Uint8Array(nalUnitsLength + (4 * nalUnits.length));
706 view = new DataView(data.buffer);
752 track.samples = []; 707 track.samples = [];
753 sample = { 708 sample = {
754 size: 0, 709 size: 0,
...@@ -761,8 +716,8 @@ Transmuxer = function() { ...@@ -761,8 +716,8 @@ Transmuxer = function() {
761 } 716 }
762 }; 717 };
763 i = 0; 718 i = 0;
764 while (videoSamples.length) { 719 while (nalUnits.length) {
765 currentNal = videoSamples[0]; 720 currentNal = nalUnits[0];
766 // flush the sample we've been building when a new sample is started 721 // flush the sample we've been building when a new sample is started
767 if (currentNal.nalUnitType === 'access_unit_delimiter_rbsp') { 722 if (currentNal.nalUnitType === 'access_unit_delimiter_rbsp') {
768 if (startUnit) { 723 if (startUnit) {
...@@ -785,20 +740,23 @@ Transmuxer = function() { ...@@ -785,20 +740,23 @@ Transmuxer = function() {
785 if (currentNal.nalUnitType === 'slice_layer_without_partitioning_rbsp_idr') { 740 if (currentNal.nalUnitType === 'slice_layer_without_partitioning_rbsp_idr') {
786 // the current sample is a key frame 741 // the current sample is a key frame
787 sample.flags.dependsOn = 2; 742 sample.flags.dependsOn = 2;
788
789 } 743 }
744 sample.size += 4; // space for the NAL length
790 sample.size += currentNal.data.byteLength; 745 sample.size += currentNal.data.byteLength;
791 746
747 view.setUint32(i, currentNal.data.byteLength);
748 i += 4;
792 data.set(currentNal.data, i); 749 data.set(currentNal.data, i);
793 i += currentNal.data.byteLength; 750 i += currentNal.data.byteLength;
794 videoSamples.shift(); 751
752 nalUnits.shift();
795 } 753 }
796 // record the last sample 754 // record the last sample
797 if (track.samples.length) { 755 if (track.samples.length) {
798 sample.duration = track.samples[track.samples.length - 1].duration; 756 sample.duration = track.samples[track.samples.length - 1].duration;
799 } 757 }
800 track.samples.push(sample); 758 track.samples.push(sample);
801 videoSamplesSize = 0; 759 nalUnitsLength = 0;
802 mdat = mp4.mdat(data); 760 mdat = mp4.mdat(data);
803 761
804 moof = mp4.moof(sequenceNumber, [track]); 762 moof = mp4.moof(sequenceNumber, [track]);
...@@ -813,10 +771,89 @@ Transmuxer = function() { ...@@ -813,10 +771,89 @@ Transmuxer = function() {
813 boxes.set(moof); 771 boxes.set(moof);
814 boxes.set(mdat, moof.byteLength); 772 boxes.set(mdat, moof.byteLength);
815 773
816 self.trigger('data', { 774 this.trigger('data', boxes);
817 data: boxes
818 });
819 }; 775 };
776 };
777 VideoSegmentStream.prototype = new videojs.Hls.Stream();
778
779 Transmuxer = function() {
780 var
781 self = this,
782 track,
783 config,
784 pps,
785
786 packetStream, parseStream, programStream, aacStream, h264Stream, videoSegmentStream;
787
788 Transmuxer.prototype.init.call(this);
789
790 // set up the parsing pipeline
791 packetStream = new PacketStream();
792 parseStream = new ParseStream();
793 programStream = new ProgramStream();
794 aacStream = new AacStream();
795 h264Stream = new H264Stream();
796
797 packetStream.pipe(parseStream);
798 parseStream.pipe(programStream);
799 programStream.pipe(aacStream);
800 programStream.pipe(h264Stream);
801
802 // handle incoming data events
803 h264Stream.on('data', function(data) {
804 // record the track config
805 if (data.nalUnitType === 'seq_parameter_set_rbsp' &&
806 !config) {
807 config = data.config;
808
809 track.width = config.width;
810 track.height = config.height;
811 track.sps = [data.data];
812 track.profileIdc = config.profileIdc;
813 track.levelIdc = config.levelIdc;
814 track.profileCompatibility = config.profileCompatibility;
815
816 // generate an init segment once all the metadata is available
817 if (pps) {
818 self.trigger('data', {
819 data: videojs.mp4.initSegment([track])
820 });
821 }
822 }
823 if (data.nalUnitType === 'pic_parameter_set_rbsp' &&
824 !pps) {
825 pps = data.data;
826 track.pps = [data.data];
827
828 if (config) {
829 self.trigger('data', {
830 data: videojs.mp4.initSegment([track])
831 });
832 }
833 }
834 });
835 // hook up the video segment stream once track metadata is delivered
836 programStream.on('data', function(data) {
837 var i, triggerData = function(segment) {
838 self.trigger('data', {
839 data: segment
840 });
841 };
842 if (data.type === 'metadata') {
843 i = data.tracks.length;
844 while (i--) {
845 if (data.tracks[i].type === 'video') {
846 track = data.tracks[i];
847 if (!videoSegmentStream) {
848 videoSegmentStream = new VideoSegmentStream(track);
849 h264Stream.pipe(videoSegmentStream);
850 videoSegmentStream.on('data', triggerData);
851 }
852 break;
853 }
854 }
855 }
856 });
820 857
821 // feed incoming data to the front of the parsing pipeline 858 // feed incoming data to the front of the parsing pipeline
822 this.push = function(data) { 859 this.push = function(data) {
...@@ -826,9 +863,7 @@ Transmuxer = function() { ...@@ -826,9 +863,7 @@ Transmuxer = function() {
826 this.end = function() { 863 this.end = function() {
827 programStream.end(); 864 programStream.end();
828 h264Stream.end(); 865 h264Stream.end();
829 if (videoSamples.length) { 866 videoSegmentStream.end();
830 flushVideo();
831 }
832 }; 867 };
833 }; 868 };
834 Transmuxer.prototype = new videojs.Hls.Stream(); 869 Transmuxer.prototype = new videojs.Hls.Stream();
...@@ -841,6 +876,7 @@ window.videojs.mp2t = { ...@@ -841,6 +876,7 @@ window.videojs.mp2t = {
841 PacketStream: PacketStream, 876 PacketStream: PacketStream,
842 ParseStream: ParseStream, 877 ParseStream: ParseStream,
843 ProgramStream: ProgramStream, 878 ProgramStream: ProgramStream,
879 VideoSegmentStream: VideoSegmentStream,
844 Transmuxer: Transmuxer, 880 Transmuxer: Transmuxer,
845 AacStream: AacStream, 881 AacStream: AacStream,
846 H264Stream: H264Stream 882 H264Stream: H264Stream
......
...@@ -29,6 +29,8 @@ var ...@@ -29,6 +29,8 @@ var
29 programStream, 29 programStream,
30 H264Stream = videojs.mp2t.H264Stream, 30 H264Stream = videojs.mp2t.H264Stream,
31 h264Stream, 31 h264Stream,
32 VideoSegmentStream = videojs.mp2t.VideoSegmentStream,
33 videoSegmentStream,
32 Transmuxer = videojs.mp2t.Transmuxer, 34 Transmuxer = videojs.mp2t.Transmuxer,
33 transmuxer, 35 transmuxer,
34 36
...@@ -782,6 +784,103 @@ test('parses nal unit types', function() { ...@@ -782,6 +784,103 @@ test('parses nal unit types', function() {
782 equal(data.nalUnitType, 'slice_layer_without_partitioning_rbsp_idr', 'identified a key frame'); 784 equal(data.nalUnitType, 'slice_layer_without_partitioning_rbsp_idr', 'identified a key frame');
783 }); 785 });
784 786
787 // MP4 expects H264 (aka AVC) data to be in storage format. Storage
788 // format is optimized for reliable, random-access media in contrast
789 // to the byte stream format that retransmits metadata regularly to
790 // allow decoders to quickly begin operation from wherever in the
791 // broadcast they begin receiving.
792 // Details on the byte stream format can be found in Annex B of
793 // Recommendation ITU-T H.264.
794 // The storage format is described in ISO/IEC 14496-15
795 test('strips byte stream framing during parsing', function() {
796 var data = [];
797 h264Stream.on('data', function(event) {
798 data.push(event);
799 });
800
801 h264Stream.push({
802 type: 'video',
803 data: new Uint8Array([
804 // -- NAL unit start
805 // zero_byte
806 0x00,
807 // start_code_prefix_one_3bytes
808 0x00, 0x00, 0x01,
809 // nal_unit_type (picture parameter set)
810 0x08,
811 // fake data
812 0x01, 0x02, 0x03, 0x04,
813 0x05, 0x06, 0x07,
814 // trailing_zero_8bits * 5
815 0x00, 0x00, 0x00, 0x00,
816 0x00,
817
818 // -- NAL unit start
819 // zero_byte
820 0x00,
821 // start_code_prefix_one_3bytes
822 0x00, 0x00, 0x01,
823 // nal_unit_type (access_unit_delimiter_rbsp)
824 0x09,
825 // fake data
826 0x06, 0x05, 0x04, 0x03,
827 0x02, 0x01, 0x00
828 ])
829 });
830 h264Stream.end();
831
832 equal(data.length, 2, 'parsed two NAL units');
833 deepEqual(new Uint8Array([
834 0x08,
835 0x01, 0x02, 0x03, 0x04,
836 0x05, 0x06, 0x07
837 ]), data[0].data, 'parsed the first NAL unit');
838 deepEqual(new Uint8Array([
839 0x09,
840 0x06, 0x05, 0x04, 0x03,
841 0x02, 0x01, 0x00
842 ]), data[1].data, 'parsed the second NAL unit');
843 });
844
845 module('VideoSegmentStream', {
846 setup: function() {
847 videoSegmentStream = new VideoSegmentStream({});
848 }
849 });
850
851 // see ISO/IEC 14496-15, Section 5 "AVC elementary streams and sample definitions"
852 test('concatenates NAL units into AVC elementary streams', function() {
853 var segment, boxes;
854 videoSegmentStream.on('data', function(data) {
855 segment = data;
856 });
857 videoSegmentStream.push({
858 data: new Uint8Array([
859 0x08,
860 0x01, 0x02, 0x03
861 ])
862 });
863 videoSegmentStream.push({
864 data: new Uint8Array([
865 0x08,
866 0x04, 0x03, 0x02, 0x01, 0x00
867 ])
868 });
869 videoSegmentStream.end();
870
871 ok(segment, 'generated a data event');
872 boxes = videojs.inspectMp4(segment);
873 equal(boxes[1].byteLength,
874 (4 + 4) + (4 + 6),
875 'wrote the correct number of bytes');
876 deepEqual(new Uint8Array(segment.subarray(boxes[0].size + 8)), new Uint8Array([
877 0, 0, 0, 4,
878 0x08, 0x01, 0x02, 0x03,
879 0, 0, 0, 6,
880 0x08, 0x04, 0x03, 0x02, 0x01, 0x00
881 ]), 'wrote an AVC stream into the mdat');
882 });
883
785 module('Transmuxer', { 884 module('Transmuxer', {
786 setup: function() { 885 setup: function() {
787 transmuxer = new Transmuxer(); 886 transmuxer = new Transmuxer();
...@@ -832,12 +931,17 @@ test('buffers video samples until ended', function() { ...@@ -832,12 +931,17 @@ test('buffers video samples until ended', function() {
832 equal(boxes.length, 2, 'generated two boxes'); 931 equal(boxes.length, 2, 'generated two boxes');
833 equal(boxes[0].type, 'moof', 'the first box is a moof'); 932 equal(boxes[0].type, 'moof', 'the first box is a moof');
834 equal(boxes[1].type, 'mdat', 'the second box is a mdat'); 933 equal(boxes[1].type, 'mdat', 'the second box is a mdat');
835 deepEqual(new Uint8Array(samples[0].data.subarray(samples[0].data.length - 10)), 934 deepEqual(new Uint8Array(samples[0].data.subarray(boxes[0].size + 8)),
836 new Uint8Array([ 935 new Uint8Array([
936 0, 0, 0, 2,
837 0x09, 0x01, 937 0x09, 0x01,
938 0, 0, 0, 2,
838 0x00, 0x02, 939 0x00, 0x02,
940 0, 0, 0, 2,
839 0x09, 0x03, 941 0x09, 0x03,
942 0, 0, 0, 2,
840 0x00, 0x04, 943 0x00, 0x04,
944 0, 0, 0, 2,
841 0x00, 0x05]), 945 0x00, 0x05]),
842 'concatenated NALs into an mdat'); 946 'concatenated NALs into an mdat');
843 }); 947 });
...@@ -873,8 +977,8 @@ validateTrack = function(track, metadata) { ...@@ -873,8 +977,8 @@ validateTrack = function(track, metadata) {
873 equal(mdia.boxes[2].type, 'minf', 'wrote the media info'); 977 equal(mdia.boxes[2].type, 'minf', 'wrote the media info');
874 }; 978 };
875 979
876 validateTrackFragment = function(track, metadata) { 980 validateTrackFragment = function(track, segment, metadata) {
877 var tfhd, trun, sdtp, i, sample; 981 var tfhd, trun, sdtp, i, j, sample, nalUnitType;
878 equal(track.type, 'traf', 'wrote a track fragment'); 982 equal(track.type, 'traf', 'wrote a track fragment');
879 equal(track.boxes.length, 4, 'wrote four track fragment children'); 983 equal(track.boxes.length, 4, 'wrote four track fragment children');
880 tfhd = track.boxes[0]; 984 tfhd = track.boxes[0];
...@@ -884,18 +988,15 @@ validateTrackFragment = function(track, metadata) { ...@@ -884,18 +988,15 @@ validateTrackFragment = function(track, metadata) {
884 equal(track.boxes[1].type, 988 equal(track.boxes[1].type,
885 'tfdt', 989 'tfdt',
886 'wrote a track fragment decode time box'); 990 'wrote a track fragment decode time box');
887 ok(track.boxes[1].baseMediaDecodeTime >= 0, 'base decode time is valid'); 991 ok(track.boxes[1].baseMediaDecodeTime >= 0, 'base decode time is non-negative');
888 992
889 trun = track.boxes[2]; 993 trun = track.boxes[2];
890 ok(trun.dataOffset >= 0, 'set data offset'); 994 ok(trun.dataOffset >= 0, 'set data offset');
891 equal(trun.dataOffset, 995 equal(trun.dataOffset,
892 trun.size + 996 metadata.mdatOffset + 8,
893 16 + // mfhd size 997 'trun data offset is the size of the moof');
894 8 + // moof header size
895 8, // mdat header size
896 'uses movie fragment relative addressing');
897 ok(trun.samples.length > 0, 'generated media samples'); 998 ok(trun.samples.length > 0, 'generated media samples');
898 for (i = 0; i < trun.samples.length; i++) { 999 for (i = 0, j = trun.dataOffset; i < trun.samples.length; i++) {
899 sample = trun.samples[i]; 1000 sample = trun.samples[i];
900 ok(sample.duration > 0, 'wrote a positive duration for sample ' + i); 1001 ok(sample.duration > 0, 'wrote a positive duration for sample ' + i);
901 ok(sample.size > 0, 'wrote a positive size for sample ' + i); 1002 ok(sample.size > 0, 'wrote a positive size for sample ' + i);
...@@ -903,11 +1004,17 @@ validateTrackFragment = function(track, metadata) { ...@@ -903,11 +1004,17 @@ validateTrackFragment = function(track, metadata) {
903 'wrote a positive composition time offset for sample ' + i); 1004 'wrote a positive composition time offset for sample ' + i);
904 ok(sample.flags, 'wrote sample flags'); 1005 ok(sample.flags, 'wrote sample flags');
905 equal(sample.flags.isLeading, 0, 'the leading nature is unknown'); 1006 equal(sample.flags.isLeading, 0, 'the leading nature is unknown');
1007
906 notEqual(sample.flags.dependsOn, 0, 'sample dependency is not unknown'); 1008 notEqual(sample.flags.dependsOn, 0, 'sample dependency is not unknown');
907 notEqual(sample.flags.dependsOn, 4, 'sample dependency is valid'); 1009 notEqual(sample.flags.dependsOn, 4, 'sample dependency is valid');
1010 nalUnitType = segment[j + 4] & 0x1F;
1011 equal(nalUnitType, 9, 'samples begin with an access_unit_delimiter_rbsp');
1012
908 equal(sample.flags.isDependedOn, 0, 'dependency of other samples is unknown'); 1013 equal(sample.flags.isDependedOn, 0, 'dependency of other samples is unknown');
909 equal(sample.flags.hasRedundancy, 0, 'sample redundancy is unknown'); 1014 equal(sample.flags.hasRedundancy, 0, 'sample redundancy is unknown');
910 equal(sample.flags.degradationPriority, 0, 'sample degradation priority is zero'); 1015 equal(sample.flags.degradationPriority, 0, 'sample degradation priority is zero');
1016
1017 j += sample.size; // advance to the next sample in the mdat
911 } 1018 }
912 1019
913 sdtp = track.boxes[3]; 1020 sdtp = track.boxes[3];
...@@ -970,12 +1077,13 @@ test('parses an example mp2t file and generates media segments', function() { ...@@ -970,12 +1077,13 @@ test('parses an example mp2t file and generates media segments', function() {
970 ok(mfhd.sequenceNumber < sequenceNumber, 'sequence numbers are increasing'); 1077 ok(mfhd.sequenceNumber < sequenceNumber, 'sequence numbers are increasing');
971 sequenceNumber = mfhd.sequenceNumber; 1078 sequenceNumber = mfhd.sequenceNumber;
972 1079
973 validateTrackFragment(boxes[i].boxes[1], { 1080 equal(boxes[i + 1].type, 'mdat', 'second box is an mdat');
1081 validateTrackFragment(boxes[i].boxes[1], segments[1].data, {
974 trackId: 256, 1082 trackId: 256,
975 width: 388, 1083 width: 388,
976 height: 300 1084 height: 300,
1085 mdatOffset: boxes[0].size
977 }); 1086 });
978 equal(boxes[i + 1].type, 'mdat', 'second box is an mdat');
979 } 1087 }
980 }); 1088 });
981 1089
......