6837076a by David LaPalomento

Get seeking working again

We shouldn't abort() on the source buffer for every seek. The buffered region is no longer guaranteed to be contiguous, so take that into account when determining if the segment to be loaded needs to be updated. Allow media sources to handle duration updates internally, don't set the duration on every segment download. Doing so was actually causing the range removal algorithm to run when the final segment came in slightly below the duration advertised in the m3u8.
1 parent 853ec020
......@@ -75,7 +75,7 @@
type="application/x-mpegURL">
</video>
<script>
videojs.getGlobalOptions().flash.swf = 'node_modules/videojs-swf/dist/video-js.swf';
videojs.options.flash.swf = 'node_modules/videojs-swf/dist/video-js.swf';
// initialize the player
var player = videojs('video');
</script>
......
......@@ -262,6 +262,15 @@ videojs.Hls.getMediaIndexForLive_ = function(selectedPlaylist) {
videojs.Hls.prototype.handleSourceOpen = function() {
this.sourceBuffer = this.mediaSource.addSourceBuffer('video/mp2t');
// transition the sourcebuffer to the ended state if we've hit the end of
// the playlist
this.sourceBuffer.addEventListener('updateend', function() {
if (this.duration() !== Infinity &&
this.mediaIndex === this.playlists.media().segments.length) {
this.mediaSource.endOfStream();
}
}.bind(this));
// if autoplay is enabled, begin playback. This is duplicative of
// code in video.js but is required because play() must be invoked
// *after* the media source has opened.
......@@ -417,6 +426,7 @@ videojs.Hls.prototype.play = function() {
};
videojs.Hls.prototype.setCurrentTime = function(currentTime) {
var buffered, i;
if (!(this.playlists && this.playlists.media())) {
// return immediately if the metadata is not ready yet
return 0;
......@@ -428,14 +438,19 @@ videojs.Hls.prototype.setCurrentTime = function(currentTime) {
return 0;
}
// if the seek location is already buffered, continue buffering as
// usual
buffered = this.tech_.buffered();
for (i = 0; i < buffered.length; i++) {
if (this.tech_.buffered().start(i) <= currentTime &&
this.tech_.buffered().end(i) >= currentTime) {
return currentTime;
}
}
// determine the requested segment
this.mediaIndex = this.playlists.getMediaIndexForTime_(currentTime);
// abort any segments still being decoded
if (this.sourceBuffer) {
this.sourceBuffer.abort();
}
// cancel outstanding requests and buffer appends
this.cancelSegmentXhr();
......@@ -865,8 +880,8 @@ videojs.Hls.prototype.drainBuffer = function(event) {
segment,
decrypter,
segIv,
segmentOffset = 0,
// ptsTime,
// segmentOffset = 0,
segmentBuffer = this.segmentBuffer_;
// if the buffer is empty or the source buffer hasn't been created
......@@ -881,6 +896,9 @@ videojs.Hls.prototype.drainBuffer = function(event) {
return;
}
segmentInfo = segmentBuffer[0];
mediaIndex = segmentInfo.mediaIndex;
......@@ -943,7 +961,8 @@ videojs.Hls.prototype.drainBuffer = function(event) {
// }
this.addCuesForMetadata_(segmentInfo);
this.updateDuration(this.playlists.media());
//this.updateDuration(this.playlists.media());
// // if we're refilling the buffer after a seek, scan through the muxed
// // FLV tags until we find the one that is closest to the desired
......@@ -975,21 +994,17 @@ videojs.Hls.prototype.drainBuffer = function(event) {
// this.tech_.el().vjs_discontinuity();
// }
if (this.sourceBuffer.buffered.length) {
this.sourceBuffer.timestampOffset = this.sourceBuffer.buffered.end(0);
}
this.sourceBuffer.appendBuffer(bytes);
// determine the timestamp offset for the start of this segment
segmentOffset = this.playlists.expiredPostDiscontinuity_ + this.playlists.expiredPreDiscontinuity_;
segmentOffset += videojs.Hls.Playlist.duration(playlist,
playlist.mediaSequence,
playlist.mediaSequence + mediaIndex);
// transition the sourcebuffer to the ended state if we've hit the end of
// the playlist
if (this.duration() !== Infinity &&
mediaIndex + 1 === this.playlists.media().segments.length) {
this.mediaSource.endOfStream();
}
this.sourceBuffer.timestampOffset = segmentOffset;
this.sourceBuffer.appendBuffer(bytes);
// we're done processing this segment
segmentBuffer.shift();
};
/**
......
......@@ -191,11 +191,12 @@ var
MockMediaSource = videojs.extends(videojs.EventTarget, {
constructor: function() {},
addSourceBuffer: function() {
return {
return new (videojs.extends(videojs.EventTarget, {
constructor: function() {},
abort: function() {},
buffered: videojs.createTimeRange(),
appendBuffer: function() {}
};
}));
},
}),
......@@ -1142,8 +1143,7 @@ test('only makes one segment request at a time', function() {
});
test('only appends one segment at a time', function() {
var appends = 0, tags = [{ pts: 0, bytes: new Uint8Array(1) }];
videojs.Hls.SegmentParser = mockSegmentParser(tags);
var appends = 0;
player.src({
src: 'manifest/media.m3u8',
type: 'application/vnd.apple.mpegurl'
......@@ -1156,7 +1156,6 @@ test('only appends one segment at a time', function() {
player.tech.hls.sourceBuffer.appendBuffer = function() {
appends++;
};
tags.push({ pts: 0, bytes: new Uint8Array(1) });
player.tech.hls.checkBuffer_();
standardXHRResponse(requests.pop()); // segment 1
......@@ -1164,7 +1163,7 @@ test('only appends one segment at a time', function() {
equal(appends, 0, 'did not append while updating');
});
test('records the min and max PTS values for a segment', function() {
QUnit.skip('records the min and max PTS values for a segment', function() {
var tags = [];
videojs.Hls.SegmentParser = mockSegmentParser(tags);
player.src({
......@@ -1184,7 +1183,7 @@ test('records the min and max PTS values for a segment', function() {
equal(player.tech.hls.playlists.media().segments[0].maxAudioPts, 10, 'recorded max audio pts');
});
test('records PTS values for video-only segments', function() {
QUnit.skip('records PTS values for video-only segments', function() {
var tags = [];
videojs.Hls.SegmentParser = mockSegmentParser(tags);
player.src({
......@@ -1213,7 +1212,7 @@ test('records PTS values for video-only segments', function() {
strictEqual(player.tech.hls.playlists.media().segments[0].maxAudioPts, undefined, 'max audio pts is undefined');
});
test('records PTS values for audio-only segments', function() {
QUnit.skip('records PTS values for audio-only segments', function() {
var tags = [];
videojs.Hls.SegmentParser = mockSegmentParser(tags);
player.src({
......@@ -1658,7 +1657,7 @@ QUnit.skip('translates ID3 PTS values across discontinuities', function() {
equal(track.cues[1].endTime, 11, 'second cue ended at the correct time');
});
test('drops tags before the target timestamp when seeking', function() {
QUnit.skip('drops tags before the target timestamp when seeking', function() {
var i = 10,
tags = [],
bytes = [];
......@@ -1697,42 +1696,73 @@ test('drops tags before the target timestamp when seeking', function() {
deepEqual(bytes, [new Uint8Array([7,8,9])], 'three tags are appended');
});
test('calls abort() on the SourceBuffer before seeking', function() {
var
aborts = 0,
bytes = [],
tags = [{ pts: 0, bytes: new Uint8Array(1) }];
test('adjusts the segment offsets for out-of-buffer seeking', function() {
player.src({
src: 'manifest/media.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
standardXHRResponse(requests.shift()); // media
player.tech.hls.sourceBuffer.buffered = function() {
return videojs.createTimeRange(0, 20);
};
equal(player.tech.hls.mediaIndex, 0, 'starts at zero');
player.tech.setCurrentTime(35);
clock.tick(1);
// drop the aborted segment
requests.shift();
equal(player.tech.hls.mediaIndex, 3, 'moved the mediaIndex');
standardXHRResponse(requests.shift());
equal(player.tech.hls.sourceBuffer.timestampOffset, 30, 'updated the timestamp offset');
});
// track calls to abort()
videojs.Hls.SegmentParser = mockSegmentParser(tags);
window.videojs.SourceBuffer = function() {
this.appendBuffer = function(chunk) {
bytes.push(chunk);
};
this.abort = function() {
aborts++;
test('seeks between buffered time ranges', function() {
player.src({
src: 'manifest/media.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
standardXHRResponse(requests.shift()); // media
player.tech.buffered = function() {
return {
length: 2,
ranges_: [[0, 10], [20, 30]],
start: function(i) {
return this.ranges_[i][0];
},
end: function(i) {
return this.ranges_[i][1];
}
};
};
player.tech.setCurrentTime(15);
clock.tick(1);
// drop the aborted segment
requests.shift();
equal(player.tech.hls.mediaIndex, 1, 'updated the mediaIndex');
standardXHRResponse(requests.shift());
equal(player.tech.hls.sourceBuffer.timestampOffset, 10, 'updated the timestamp offset');
});
test('does not modify the media index for in-buffer seeking', function() {
var mediaIndex;
player.src({
src: 'manifest/media.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
standardXHRResponse(requests.shift());
player.tech.buffered = function() {
return videojs.createTimeRange(0, 20);
};
mediaIndex = player.tech.hls.mediaIndex;
standardXHRResponse(requests[0]);
standardXHRResponse(requests[1]);
// drainBuffer() uses the first PTS value to account for any timestamp discontinuities in the stream
// adding a tag with a PTS of zero looks like a stream with no discontinuities
tags.push({ pts: 0, bytes: new Uint8Array(1) });
tags.push({ pts: 7000, bytes: new Uint8Array([7]) });
// seek to 7s
player.currentTime(7);
standardXHRResponse(requests[2]);
strictEqual(1, aborts, 'aborted pending buffer');
player.tech.setCurrentTime(11);
clock.tick(1);
equal(player.tech.hls.mediaIndex, mediaIndex, 'did not interrupt buffering');
equal(requests.length, 1, 'did not abort the outstanding request');
});
QUnit.skip('playlist 404 should trigger MEDIA_ERR_NETWORK', function() {
......@@ -2522,7 +2552,9 @@ test('calls ended() on the media source at the end of a playlist', function() {
// segment response
requests[0].response = new ArrayBuffer(17);
requests.shift().respond(200, null, '');
strictEqual(endOfStreams, 0, 'waits for the buffer update to finish');
player.tech.hls.sourceBuffer.trigger('updateend');
strictEqual(endOfStreams, 1, 'ended media source');
});
......