Get the example page using html-hls by any means necessary
Hack in the changes necessary to use the MP2T->MP4 transmuxer and native media source extensions. If you can find a video that is compatible with the current state of the transmuxer, this commit successfully plays back multiple video segments though seeking seems a bit touchy.
Showing
1 changed file
with
140 additions
and
82 deletions
... | @@ -34,7 +34,7 @@ videojs.Hls = videojs.extends(Component, { | ... | @@ -34,7 +34,7 @@ videojs.Hls = videojs.extends(Component, { |
34 | // backwards-compatibility | 34 | // backwards-compatibility |
35 | if (tech.options_ && tech.options_.playerId) { | 35 | if (tech.options_ && tech.options_.playerId) { |
36 | _player = videojs(tech.options_.playerId); | 36 | _player = videojs(tech.options_.playerId); |
37 | if (!_player.tech.hls) { | 37 | if (!_player.hls) { |
38 | Object.defineProperty(_player, 'hls', { | 38 | Object.defineProperty(_player, 'hls', { |
39 | get: function() { | 39 | get: function() { |
40 | videojs.log.warn('player.hls is deprecated. Use player.tech.hls instead.'); | 40 | videojs.log.warn('player.hls is deprecated. Use player.tech.hls instead.'); |
... | @@ -74,7 +74,8 @@ videojs.Hls = videojs.extends(Component, { | ... | @@ -74,7 +74,8 @@ videojs.Hls = videojs.extends(Component, { |
74 | }); | 74 | }); |
75 | 75 | ||
76 | // add HLS as a source handler | 76 | // add HLS as a source handler |
77 | videojs.getComponent('Flash').registerSourceHandler({ | 77 | //videojs.getComponent('Flash').registerSourceHandler({ |
78 | videojs.getComponent('Html5').registerSourceHandler({ | ||
78 | canHandleSource: function(srcObj) { | 79 | canHandleSource: function(srcObj) { |
79 | var mpegurlRE = /^application\/(?:x-|vnd\.apple\.)mpegurl/i; | 80 | var mpegurlRE = /^application\/(?:x-|vnd\.apple\.)mpegurl/i; |
80 | return mpegurlRE.test(srcObj.type); | 81 | return mpegurlRE.test(srcObj.type); |
... | @@ -100,10 +101,12 @@ videojs.Hls.prototype.src = function(src) { | ... | @@ -100,10 +101,12 @@ videojs.Hls.prototype.src = function(src) { |
100 | return; | 101 | return; |
101 | } | 102 | } |
102 | 103 | ||
103 | mediaSource = new videojs.MediaSource(); | 104 | // mediaSource = new videojs.MediaSource(); |
105 | mediaSource = new MediaSource(); | ||
104 | source = { | 106 | source = { |
105 | src: videojs.URL.createObjectURL(mediaSource), | 107 | src: URL.createObjectURL(mediaSource), |
106 | type: "video/flv" | 108 | //src: videojs.URL.createObjectURL(mediaSource), |
109 | type: 'audio/mp4;codecs=mp4a.40.2' // "video/flv" | ||
107 | }; | 110 | }; |
108 | this.mediaSource = mediaSource; | 111 | this.mediaSource = mediaSource; |
109 | 112 | ||
... | @@ -228,7 +231,8 @@ videojs.Hls.prototype.src = function(src) { | ... | @@ -228,7 +231,8 @@ videojs.Hls.prototype.src = function(src) { |
228 | if (!this.tech_.el()) { | 231 | if (!this.tech_.el()) { |
229 | return; | 232 | return; |
230 | } | 233 | } |
231 | this.tech_.el().vjs_src(source.src); | 234 | this.tech_.el().src = source.src; |
235 | //this.tech_.el().vjs_src(source.src); | ||
232 | }; | 236 | }; |
233 | 237 | ||
234 | /* Returns the media index for the live point in the current playlist, and updates | 238 | /* Returns the media index for the live point in the current playlist, and updates |
... | @@ -252,10 +256,8 @@ videojs.Hls.getMediaIndexForLive_ = function(selectedPlaylist) { | ... | @@ -252,10 +256,8 @@ videojs.Hls.getMediaIndexForLive_ = function(selectedPlaylist) { |
252 | }; | 256 | }; |
253 | 257 | ||
254 | videojs.Hls.prototype.handleSourceOpen = function() { | 258 | videojs.Hls.prototype.handleSourceOpen = function() { |
255 | // construct the video data buffer and set the appropriate MIME type | 259 | this.audioSourceBuffer = this.mediaSource.addSourceBuffer('audio/mp4;codecs=mp4a.40.2'); |
256 | var sourceBuffer = this.mediaSource.addSourceBuffer('video/flv; codecs="vp6,aac"'); | 260 | this.videoSourceBuffer = this.mediaSource.addSourceBuffer('video/mp4;codecs=avc1.4d400d'); |
257 | |||
258 | this.sourceBuffer = sourceBuffer; | ||
259 | 261 | ||
260 | // if autoplay is enabled, begin playback. This is duplicative of | 262 | // if autoplay is enabled, begin playback. This is duplicative of |
261 | // code in video.js but is required because play() must be invoked | 263 | // code in video.js but is required because play() must be invoked |
... | @@ -267,7 +269,7 @@ videojs.Hls.prototype.handleSourceOpen = function() { | ... | @@ -267,7 +269,7 @@ videojs.Hls.prototype.handleSourceOpen = function() { |
267 | this.play(); | 269 | this.play(); |
268 | } | 270 | } |
269 | 271 | ||
270 | sourceBuffer.appendBuffer(this.segmentParser_.getFlvHeader()); | 272 | //sourceBuffer.appendBuffer(this.segmentParser_.getFlvHeader()); |
271 | }; | 273 | }; |
272 | 274 | ||
273 | // register event listeners to transform in-band metadata events into | 275 | // register event listeners to transform in-band metadata events into |
... | @@ -369,7 +371,8 @@ videojs.Hls.prototype.setupFirstPlay = function() { | ... | @@ -369,7 +371,8 @@ videojs.Hls.prototype.setupFirstPlay = function() { |
369 | // check that everything is ready to begin buffering | 371 | // check that everything is ready to begin buffering |
370 | if (this.duration() === Infinity && | 372 | if (this.duration() === Infinity && |
371 | this.tech_.played().length === 0 && | 373 | this.tech_.played().length === 0 && |
372 | this.sourceBuffer && | 374 | this.audioSourceBuffer && |
375 | this.videoSourceBuffer && | ||
373 | media) { | 376 | media) { |
374 | 377 | ||
375 | // seek to the latest media position for live videos | 378 | // seek to the latest media position for live videos |
... | @@ -420,8 +423,9 @@ videojs.Hls.prototype.setCurrentTime = function(currentTime) { | ... | @@ -420,8 +423,9 @@ videojs.Hls.prototype.setCurrentTime = function(currentTime) { |
420 | this.mediaIndex = this.playlists.getMediaIndexForTime_(currentTime); | 423 | this.mediaIndex = this.playlists.getMediaIndexForTime_(currentTime); |
421 | 424 | ||
422 | // abort any segments still being decoded | 425 | // abort any segments still being decoded |
423 | if (this.sourceBuffer) { | 426 | if (this.audioSourceBuffer) { |
424 | this.sourceBuffer.abort(); | 427 | this.audioSourceBuffer.abort(); |
428 | this.videoSourceBuffer.abort(); | ||
425 | } | 429 | } |
426 | 430 | ||
427 | // cancel outstanding requests and buffer appends | 431 | // cancel outstanding requests and buffer appends |
... | @@ -476,10 +480,11 @@ videojs.Hls.prototype.seekable = function() { | ... | @@ -476,10 +480,11 @@ videojs.Hls.prototype.seekable = function() { |
476 | * Update the player duration | 480 | * Update the player duration |
477 | */ | 481 | */ |
478 | videojs.Hls.prototype.updateDuration = function(playlist) { | 482 | videojs.Hls.prototype.updateDuration = function(playlist) { |
479 | var oldDuration = this.mediaSource.duration(), | 483 | var oldDuration = this.mediaSource.duration, |
484 | // oldDuration = this.mediaSource.duration(), | ||
480 | newDuration = videojs.Hls.Playlist.duration(playlist), | 485 | newDuration = videojs.Hls.Playlist.duration(playlist), |
481 | setDuration = function() { | 486 | setDuration = function() { |
482 | this.mediaSource.duration(newDuration); | 487 | this.mediaSource.duration = newDuration; |
483 | this.tech_.trigger('durationchange'); | 488 | this.tech_.trigger('durationchange'); |
484 | this.mediaSource.removeEventListener('sourceopen', setDuration); | 489 | this.mediaSource.removeEventListener('sourceopen', setDuration); |
485 | }.bind(this); | 490 | }.bind(this); |
... | @@ -487,7 +492,8 @@ videojs.Hls.prototype.updateDuration = function(playlist) { | ... | @@ -487,7 +492,8 @@ videojs.Hls.prototype.updateDuration = function(playlist) { |
487 | // if the duration has changed, invalidate the cached value | 492 | // if the duration has changed, invalidate the cached value |
488 | if (oldDuration !== newDuration) { | 493 | if (oldDuration !== newDuration) { |
489 | if (this.mediaSource.readyState === 'open') { | 494 | if (this.mediaSource.readyState === 'open') { |
490 | this.mediaSource.duration(newDuration); | 495 | // this.mediaSource.duration(newDuration); |
496 | this.mediaSource.duration = newDuration; | ||
491 | this.tech_.trigger('durationchange'); | 497 | this.tech_.trigger('durationchange'); |
492 | } else { | 498 | } else { |
493 | this.mediaSource.addEventListener('sourceopen', setDuration); | 499 | this.mediaSource.addEventListener('sourceopen', setDuration); |
... | @@ -504,8 +510,9 @@ videojs.Hls.prototype.resetSrc_ = function() { | ... | @@ -504,8 +510,9 @@ videojs.Hls.prototype.resetSrc_ = function() { |
504 | this.cancelSegmentXhr(); | 510 | this.cancelSegmentXhr(); |
505 | this.cancelKeyXhr(); | 511 | this.cancelKeyXhr(); |
506 | 512 | ||
507 | if (this.sourceBuffer) { | 513 | if (this.audioSourceBuffer) { |
508 | this.sourceBuffer.abort(); | 514 | this.audioSourceBuffer.abort(); |
515 | this.videoSourceBuffer.abort(); | ||
509 | } | 516 | } |
510 | }; | 517 | }; |
511 | 518 | ||
... | @@ -724,7 +731,7 @@ videojs.Hls.prototype.fillBuffer = function(offset) { | ... | @@ -724,7 +731,7 @@ videojs.Hls.prototype.fillBuffer = function(offset) { |
724 | return; | 731 | return; |
725 | } | 732 | } |
726 | 733 | ||
727 | if (buffered) { | 734 | if (buffered && buffered.length) { |
728 | // assuming a single, contiguous buffer region | 735 | // assuming a single, contiguous buffer region |
729 | bufferedTime = tech.buffered().end(0) - tech.currentTime(); | 736 | bufferedTime = tech.buffered().end(0) - tech.currentTime(); |
730 | } | 737 | } |
... | @@ -843,6 +850,8 @@ videojs.Hls.prototype.loadSegment = function(segmentUri, offset) { | ... | @@ -843,6 +850,8 @@ videojs.Hls.prototype.loadSegment = function(segmentUri, offset) { |
843 | }); | 850 | }); |
844 | }; | 851 | }; |
845 | 852 | ||
853 | var initialized = false; | ||
854 | |||
846 | videojs.Hls.prototype.drainBuffer = function(event) { | 855 | videojs.Hls.prototype.drainBuffer = function(event) { |
847 | var | 856 | var |
848 | i = 0, | 857 | i = 0, |
... | @@ -861,16 +870,23 @@ videojs.Hls.prototype.drainBuffer = function(event) { | ... | @@ -861,16 +870,23 @@ videojs.Hls.prototype.drainBuffer = function(event) { |
861 | 870 | ||
862 | // if the buffer is empty or the source buffer hasn't been created | 871 | // if the buffer is empty or the source buffer hasn't been created |
863 | // yet, do nothing | 872 | // yet, do nothing |
864 | if (!segmentBuffer.length || !this.sourceBuffer) { | 873 | if (!segmentBuffer.length || !this.audioSourceBuffer) { |
865 | return; | 874 | return; |
866 | } | 875 | } |
867 | 876 | ||
868 | // we can't append more data if the source buffer is busy processing | 877 | // we can't append more data if the source buffer is busy processing |
869 | // what we've already sent | 878 | // what we've already sent |
870 | if (this.sourceBuffer.updating) { | 879 | if (this.audioSourceBuffer.updating || this.videoSourceBuffer.updating) { |
871 | return; | 880 | return; |
872 | } | 881 | } |
873 | 882 | ||
883 | // transition the sourcebuffer to the ended state if we've hit the end of | ||
884 | // the playlist | ||
885 | if (this.duration() !== Infinity && | ||
886 | this.mediaIndex === this.playlists.media().segments.length) { | ||
887 | this.mediaSource.endOfStream(); | ||
888 | } | ||
889 | |||
874 | segmentInfo = segmentBuffer[0]; | 890 | segmentInfo = segmentBuffer[0]; |
875 | 891 | ||
876 | mediaIndex = segmentInfo.mediaIndex; | 892 | mediaIndex = segmentInfo.mediaIndex; |
... | @@ -915,83 +931,125 @@ videojs.Hls.prototype.drainBuffer = function(event) { | ... | @@ -915,83 +931,125 @@ videojs.Hls.prototype.drainBuffer = function(event) { |
915 | 931 | ||
916 | event = event || {}; | 932 | event = event || {}; |
917 | 933 | ||
918 | // transmux the segment data from MP2T to FLV | 934 | var transmuxer = new videojs.mp2t.Transmuxer(); |
919 | this.segmentParser_.parseSegmentBinaryData(bytes); | 935 | var segments = []; |
920 | this.segmentParser_.flushTags(); | 936 | transmuxer.on('data', function(segment) { |
921 | 937 | segments.push(segment); | |
922 | tags = []; | 938 | }); |
923 | 939 | transmuxer.push(bytes); | |
924 | if (this.segmentParser_.tagsAvailable()) { | 940 | transmuxer.end(); |
925 | // record PTS information for the segment so we can calculate | 941 | |
926 | // accurate durations and seek reliably | 942 | // // transmux the segment data from MP2T to FLV |
927 | if (this.segmentParser_.stats.h264Tags()) { | 943 | // this.segmentParser_.parseSegmentBinaryData(bytes); |
928 | segment.minVideoPts = this.segmentParser_.stats.minVideoPts(); | 944 | // this.segmentParser_.flushTags(); |
929 | segment.maxVideoPts = this.segmentParser_.stats.maxVideoPts(); | 945 | |
930 | } | 946 | // tags = []; |
931 | if (this.segmentParser_.stats.aacTags()) { | 947 | |
932 | segment.minAudioPts = this.segmentParser_.stats.minAudioPts(); | 948 | // if (this.segmentParser_.tagsAvailable()) { |
933 | segment.maxAudioPts = this.segmentParser_.stats.maxAudioPts(); | 949 | // // record PTS information for the segment so we can calculate |
934 | } | 950 | // // accurate durations and seek reliably |
935 | } | 951 | // if (this.segmentParser_.stats.h264Tags()) { |
936 | 952 | // segment.minVideoPts = this.segmentParser_.stats.minVideoPts(); | |
937 | while (this.segmentParser_.tagsAvailable()) { | 953 | // segment.maxVideoPts = this.segmentParser_.stats.maxVideoPts(); |
938 | tags.push(this.segmentParser_.getNextTag()); | 954 | // } |
939 | } | 955 | // if (this.segmentParser_.stats.aacTags()) { |
956 | // segment.minAudioPts = this.segmentParser_.stats.minAudioPts(); | ||
957 | // segment.maxAudioPts = this.segmentParser_.stats.maxAudioPts(); | ||
958 | // } | ||
959 | // } | ||
960 | |||
961 | // while (this.segmentParser_.tagsAvailable()) { | ||
962 | // tags.push(this.segmentParser_.getNextTag()); | ||
963 | // } | ||
940 | 964 | ||
941 | this.addCuesForMetadata_(segmentInfo); | 965 | this.addCuesForMetadata_(segmentInfo); |
942 | this.updateDuration(this.playlists.media()); | 966 | this.updateDuration(this.playlists.media()); |
943 | 967 | ||
944 | // if we're refilling the buffer after a seek, scan through the muxed | 968 | // // if we're refilling the buffer after a seek, scan through the muxed |
945 | // FLV tags until we find the one that is closest to the desired | 969 | // // FLV tags until we find the one that is closest to the desired |
946 | // playback time | 970 | // // playback time |
947 | if (typeof offset === 'number') { | 971 | // if (typeof offset === 'number') { |
948 | if (tags.length) { | 972 | // if (tags.length) { |
949 | // determine the offset within this segment we're seeking to | 973 | // // determine the offset within this segment we're seeking to |
950 | segmentOffset = this.playlists.expiredPostDiscontinuity_ + this.playlists.expiredPreDiscontinuity_; | 974 | // segmentOffset = this.playlists.expiredPostDiscontinuity_ + this.playlists.expiredPreDiscontinuity_; |
951 | segmentOffset += videojs.Hls.Playlist.duration(playlist, | 975 | // segmentOffset += videojs.Hls.Playlist.duration(playlist, |
952 | playlist.mediaSequence, | 976 | // playlist.mediaSequence, |
953 | playlist.mediaSequence + mediaIndex); | 977 | // playlist.mediaSequence + mediaIndex); |
954 | segmentOffset = offset - (segmentOffset * 1000); | 978 | // segmentOffset = offset - (segmentOffset * 1000); |
955 | ptsTime = segmentOffset + tags[0].pts; | 979 | // ptsTime = segmentOffset + tags[0].pts; |
980 | |||
981 | // while (tags[i + 1] && tags[i].pts < ptsTime) { | ||
982 | // i++; | ||
983 | // } | ||
984 | |||
985 | // // tell the SWF the media position of the first tag we'll be delivering | ||
986 | // this.tech_.el().vjs_setProperty('currentTime', ((tags[i].pts - ptsTime + offset) * 0.001)); | ||
987 | |||
988 | // tags = tags.slice(i); | ||
989 | // } | ||
990 | // } | ||
991 | |||
992 | // // when we're crossing a discontinuity, inject metadata to indicate | ||
993 | // // that the decoder should be reset appropriately | ||
994 | // if (segment.discontinuity && tags.length) { | ||
995 | // this.tech_.el().vjs_discontinuity(); | ||
996 | // } | ||
997 | |||
998 | // (function() { | ||
999 | // var segmentByteLength = 0, j, segment; | ||
1000 | // for (i = 0; i < tags.length; i++) { | ||
1001 | // segmentByteLength += tags[i].bytes.byteLength; | ||
1002 | // } | ||
1003 | // segment = new Uint8Array(segmentByteLength); | ||
1004 | // for (i = 0, j = 0; i < tags.length; i++) { | ||
1005 | // segment.set(tags[i].bytes, j); | ||
1006 | // j += tags[i].bytes.byteLength; | ||
1007 | // } | ||
1008 | // this.sourceBuffer.appendBuffer(segment); | ||
1009 | // }).call(this); | ||
956 | 1010 | ||
957 | while (tags[i + 1] && tags[i].pts < ptsTime) { | 1011 | (function() { |
958 | i++; | 1012 | var audioByteLength = 0, videoByteLength = 0, j, audioSegment, videoSegment; |
1013 | if (initialized) { | ||
1014 | segments = segments.slice(2); | ||
959 | } | 1015 | } |
960 | 1016 | for (i = 0; i < segments.length; i++) { | |
961 | // tell the SWF the media position of the first tag we'll be delivering | 1017 | if (segments[i].type === 'audio') { |
962 | this.tech_.el().vjs_setProperty('currentTime', ((tags[i].pts - ptsTime + offset) * 0.001)); | 1018 | audioByteLength += segments[i].data.byteLength; |
963 | 1019 | } else { | |
964 | tags = tags.slice(i); | 1020 | videoByteLength += segments[i].data.byteLength; |
965 | } | 1021 | } |
966 | } | 1022 | } |
967 | 1023 | audioSegment = new Uint8Array(audioByteLength); | |
968 | // when we're crossing a discontinuity, inject metadata to indicate | 1024 | for (i = 0, j = 0; i < segments.length; i++) { |
969 | // that the decoder should be reset appropriately | 1025 | if (segments[i].type === 'audio') { |
970 | if (segment.discontinuity && tags.length) { | 1026 | audioSegment.set(segments[i].data, j); |
971 | this.tech_.el().vjs_discontinuity(); | 1027 | j += segments[i].data.byteLength; |
1028 | } | ||
1029 | } | ||
1030 | if (this.audioSourceBuffer.buffered.length) { | ||
1031 | this.audioSourceBuffer.timestampOffset = this.audioSourceBuffer.buffered.end(0); | ||
972 | } | 1032 | } |
1033 | this.audioSourceBuffer.appendBuffer(audioSegment); | ||
973 | 1034 | ||
974 | (function() { | 1035 | videoSegment = new Uint8Array(videoByteLength); |
975 | var segmentByteLength = 0, j, segment; | 1036 | for (i = 0, j = 0; i < segments.length; i++) { |
976 | for (i = 0; i < tags.length; i++) { | 1037 | if (segments[i].type === 'video') { |
977 | segmentByteLength += tags[i].bytes.byteLength; | 1038 | videoSegment.set(segments[i].data, j); |
1039 | j += segments[i].data.byteLength; | ||
1040 | } | ||
978 | } | 1041 | } |
979 | segment = new Uint8Array(segmentByteLength); | 1042 | if (this.videoSourceBuffer.buffered.length) { |
980 | for (i = 0, j = 0; i < tags.length; i++) { | 1043 | this.videoSourceBuffer.timestampOffset = this.videoSourceBuffer.buffered.end(0); |
981 | segment.set(tags[i].bytes, j); | ||
982 | j += tags[i].bytes.byteLength; | ||
983 | } | 1044 | } |
984 | this.sourceBuffer.appendBuffer(segment); | 1045 | this.videoSourceBuffer.appendBuffer(videoSegment); |
1046 | |||
1047 | initialized = true; | ||
985 | }).call(this); | 1048 | }).call(this); |
986 | 1049 | ||
987 | // we're done processing this segment | 1050 | // we're done processing this segment |
988 | segmentBuffer.shift(); | 1051 | segmentBuffer.shift(); |
989 | 1052 | ||
990 | // transition the sourcebuffer to the ended state if we've hit the end of | ||
991 | // the playlist | ||
992 | if (this.duration() !== Infinity && mediaIndex + 1 === playlist.segments.length) { | ||
993 | this.mediaSource.endOfStream(); | ||
994 | } | ||
995 | }; | 1053 | }; |
996 | 1054 | ||
997 | /** | 1055 | /** | ... | ... |
-
Please register or sign in to post a comment