3e508892 by David LaPalomento

recalculate currentTime whenever seeking

The more video is buffered, the more accurate our calculation of currentTime becomes. Make sure we don't artificially anchor our currentTime calculation and then get out of sync once more video has been downloaded.
1 parent f51b1039
......@@ -431,11 +431,12 @@
for (i = 0; i < this.media_.segments.length; i++) {
time -= Playlist.duration(this.media_,
this.media_.mediaSequence + i,
this.media_.mediaSequence + i + 1);
this.media_.mediaSequence + i + 1,
true);
// HLS version 3 and lower round segment durations to the
// nearest decimal integer. When the correct media index is
// ambiguous, prefer the lower one.
// ambiguous, prefer the higher one.
if (time <= 0) {
return i;
}
......
......@@ -17,10 +17,13 @@
* boundary for the playlist. Defaults to 0.
* @param endSequence {number} (optional) an exclusive upper boundary
* for the playlist. Defaults to playlist length.
* @param strict {boolean} (optional) if true, the interval between
* the final segment and the subsequent segment will not be included
* in the result
* @return {number} the duration between the start index and end
* index.
*/
segmentsDuration = function(playlist, startSequence, endSequence) {
segmentsDuration = function(playlist, startSequence, endSequence, strict) {
var targetDuration, i, j, segment, endSegment, expiredSegmentCount, result = 0;
startSequence = startSequence || 0;
......@@ -54,11 +57,26 @@
}
}
endSegment = playlist.segments[j - playlist.mediaSequence];
result += (Math.max(endSegment.maxVideoPts, endSegment.maxAudioPts) -
Math.min(segment.minVideoPts, segment.minAudioPts)) * 0.001;
i = j;
}
// attribute the gap between the latest PTS value in end segment
// and the earlier PTS in the next one to the result
segment = playlist.segments[endSequence - 1];
endSegment = playlist.segments[endSequence];
if (!strict &&
endSegment &&
!endSegment.discontinuity &&
endSegment.minVideoPts &&
segment &&
segment.maxVideoPts) {
result += (Math.min(endSegment.minVideoPts, endSegment.minAudioPts) -
Math.max(segment.maxVideoPts, segment.maxAudioPts)) * 0.001;
}
return result;
};
......@@ -72,10 +90,13 @@
* boundary for the playlist. Defaults to 0.
* @param endSequence {number} (optional) an exclusive upper boundary
* for the playlist. Defaults to playlist length.
* @param strict {boolean} (optional) if true, the interval between
* the final segment and the subsequent segment will not be included
* in the result
* @return {number} the duration between the start index and end
* index.
*/
duration = function(playlist, startSequence, endSequence) {
duration = function(playlist, startSequence, endSequence, strict) {
if (!playlist) {
return 0;
}
......@@ -97,7 +118,8 @@
// calculate the total duration based on the segment durations
return segmentsDuration(playlist,
startSequence,
endSequence);
endSequence,
strict);
};
/**
......
......@@ -260,7 +260,7 @@ videojs.Hls.prototype.setupMetadataCueTranslation_ = function() {
// add a metadata cue whenever a metadata event is triggered during
// segment parsing
metadataStream.on('data', function(metadata) {
var i, cue, frame, time, media, segmentOffset, hexDigit;
var i, hexDigit;
// create the metadata track if this is the first ID3 tag we've
// seen
......@@ -276,19 +276,7 @@ videojs.Hls.prototype.setupMetadataCueTranslation_ = function() {
}
}
// calculate the start time for the segment that is currently being parsed
media = tech.playlists.media();
segmentOffset = tech.playlists.expiredPreDiscontinuity_ + tech.playlists.expiredPostDiscontinuity_;
segmentOffset += videojs.Hls.Playlist.duration(media, media.mediaSequence, media.mediaSequence + tech.mediaIndex);
// create cue points for all the ID3 frames in this metadata event
for (i = 0; i < metadata.frames.length; i++) {
frame = metadata.frames[i];
time = tech.segmentParser_.mediaTimelineOffset + ((metadata.pts - tech.segmentParser_.timestampOffset) * 0.001);
cue = new window.VTTCue(time, time, frame.value || frame.url || '');
cue.frame = frame;
textTrack.addCue(cue);
}
tech.addCuesForMetadata_(textTrack, metadata);
});
// when seeking, clear out all cues ahead of the earliest position
......@@ -312,6 +300,25 @@ videojs.Hls.prototype.setupMetadataCueTranslation_ = function() {
});
};
videojs.Hls.prototype.addCuesForMetadata_ = function(textTrack, metadata) {
var i, cue, frame, minPts, segmentInfo, segmentOffset, time;
segmentInfo = this.segmentBuffer_[0];
segmentOffset = videojs.Hls.Playlist.duration(segmentInfo.playlist,
segmentInfo.playlist.mediaSequence,
segmentInfo.playlist.mediaSequence + segmentInfo.mediaIndex);
minPts = Math.min(this.segmentParser_.stats.minVideoPts(),
this.segmentParser_.stats.minAudioPts());
// create cue points for all the ID3 frames in this metadata event
for (i = 0; i < metadata.frames.length; i++) {
frame = metadata.frames[i];
time = segmentOffset + ((metadata.pts - minPts) * 0.001);
cue = new window.VTTCue(time, time, frame.value || frame.url || '');
cue.frame = frame;
textTrack.addCue(cue);
}
};
/**
* Reset the mediaIndex if play() is called after the video has
* ended.
......@@ -812,8 +819,6 @@ videojs.Hls.prototype.drainBuffer = function(event) {
decrypter,
segIv,
ptsTime,
tagPts,
tagIndex,
segmentOffset = 0,
segmentBuffer = this.segmentBuffer_;
......@@ -872,19 +877,12 @@ videojs.Hls.prototype.drainBuffer = function(event) {
}
event = event || {};
segmentOffset = this.playlists.expiredPreDiscontinuity_;
segmentOffset += this.playlists.expiredPostDiscontinuity_;
segmentOffset += videojs.Hls.Playlist.duration(playlist, playlist.mediaSequence, playlist.mediaSequence + mediaIndex);
segmentOffset *= 1000;
// if this segment starts is the start of a new discontinuity
// sequence, the segment parser's timestamp offset must be
// re-calculated
if (segment.discontinuity) {
this.segmentParser_.mediaTimelineOffset = segmentOffset * 0.001;
this.segmentParser_.timestampOffset = null;
} else if (this.segmentParser_.mediaTimelineOffset === null) {
this.segmentParser_.mediaTimelineOffset = segmentOffset * 0.001;
}
// transmux the segment data from MP2T to FLV
......@@ -908,29 +906,66 @@ videojs.Hls.prototype.drainBuffer = function(event) {
this.updateDuration(this.playlists.media());
/*
Live In-Progress
0 s c m
. . . |~~~~~~~|--%-----^--|~~~~~%~~~~~|-----| . . .
p q AAJ
Live In-Progress 2
0 s c m
. . . |~~~~~~~~~~%~~|--^--|~~~~~%~~~~~|-----| . . .
q AAJ
0 400 450
. . . |-------X-----| . . .
Live Before Buffering
c
. . . |~~%~~~~~| . . .
??
p = earliest known pts
s = earliest playback position
q = earliest pts after the last discontinuity
c = current time
m = the latest buffered playback position
~ = only EXTINF available
- = PTS available
% = discontinuity
. = expired or unavailable
A = buffered in actionscript
J = buffered in javascript
Calculate current pts from current time
- subtract current time from buffered end to find out the interval between the latest buffered playback position and current time
- determine the current segment by subtracting segment durations from the latest buffered playback position
- determine current pts based on max segment pts
Determine the target segment by calculating the duration of intermediate segments
Add the difference between current time and the target time to find the target pts
Skip samples until the next sample is greater than or equal to the target pts
*/
// if we're refilling the buffer after a seek, scan through the muxed
// FLV tags until we find the one that is closest to the desired
// playback time
if (typeof offset === 'number') {
ptsTime = offset - segmentOffset + tags[0].pts;
tagPts = tags[i].pts;
tagIndex = i;
while (tagPts < ptsTime) {
// determine the offset within this segment we're seeking to
segmentOffset = this.playlists.expiredPostDiscontinuity_ + this.playlists.expiredPreDiscontinuity_;
segmentOffset += videojs.Hls.Playlist.duration(playlist,
playlist.mediaSequence,
playlist.mediaSequence + mediaIndex);
segmentOffset = offset - (segmentOffset * 1000);
ptsTime = segmentOffset + tags[0].pts;
while (tags[i + 1] && tags[i].pts < ptsTime) {
i++;
if (tags[i] !== undefined) {
tagPts = tags[i].pts;
tagIndex = i;
}
else {
break;
}
}
// tell the SWF where we will be seeking to
this.el().vjs_setProperty('currentTime', (tagPts - tags[0].pts + segmentOffset) * 0.001);
// tell the SWF the media position of the first tag we'll be delivering
this.el().vjs_setProperty('currentTime', ((tags[i].pts - ptsTime + offset) * 0.001));
tags = tags.slice(tagIndex);
tags = tags.slice(i);
this.lastSeekedTime_ = null;
}
......
......@@ -81,29 +81,85 @@
maxAudioPts: 1 * 10 * 1000 + 1,
uri: '0.ts'
}, {
duration: 10,
duration: 9,
uri: '1.ts'
}, {
duration: 10,
uri: '2.ts'
}, {
duration: 10,
minVideoPts: 2 * 10 * 1000 + 7,
minAudioPts: 2 * 10 * 1000 + 10,
maxVideoPts: 3 * 10 * 1000 + 1,
maxAudioPts: 3 * 10 * 1000 + 2,
uri: '2.ts'
uri: '3.ts'
}, {
duration: 10,
maxVideoPts: 4 * 10 * 1000 + 1,
maxAudioPts: 4 * 10 * 1000 + 2,
uri: '3.ts'
uri: '4.ts'
}]
}, 0, 4);
}, 0, 5);
firstInterval = (1 * 10 * 1000 + 1) - 1;
firstInterval *= 0.001;
secondInterval = (4 * 10 * 1000 + 2) - (2 * 10 * 1000 + 7);
secondInterval *= 0.001;
equal(duration, firstInterval + 10 + secondInterval, 'calculated with mixed intervals');
equal(duration,
firstInterval + 9 + 10 + secondInterval,
'calculated with mixed intervals');
});
test('interval duration handles trailing segments without PTS information', function() {
var duration = Playlist.duration({
mediaSequence: 0,
endList: true,
segments: [{
minVideoPts: 0,
minAudioPts: 0,
maxVideoPts: 10 * 1000,
maxAudioPts: 10 * 1000,
uri: '0.ts'
}, {
duration: 9,
uri: '1.ts'
}, {
duration: 10,
uri: '2.ts'
}, {
minVideoPts: 30 * 1000,
minAudioPts: 30 * 1000,
maxVideoPts: 40 * 1000,
maxAudioPts: 40 * 1000,
uri: '3.ts'
}]
}, 0, 3);
equal(duration, 10 + 9 + 10, 'calculated duration');
});
test('interval duration counts the time between segments as part of the later segment duration', function() {
var duration = Playlist.duration({
mediaSequence: 0,
endList: true,
segments: [{
minVideoPts: 0,
minAudioPts: 0,
maxVideoPts: 1 * 10 * 1000,
maxAudioPts: 1 * 10 * 1000,
uri: '0.ts'
}, {
minVideoPts: 1 * 10 * 1000 + 100,
minAudioPts: 1 * 10 * 1000 + 100,
maxVideoPts: 2 * 10 * 1000 + 100,
maxAudioPts: 2 * 10 * 1000 + 100,
duration: 10,
uri: '1.ts'
}]
}, 0, 1);
equal(duration, (1 * 10 * 1000 + 100) * 0.001, 'included the segment gap');
});
test('interval duration accounts for discontinuities', function() {
......@@ -130,6 +186,53 @@
equal(duration, 10 + 10, 'handles discontinuities');
});
test('interval duration does not count ending segment gaps across a discontinuity', function() {
var duration = Playlist.duration({
mediaSequence: 0,
endList: true,
segments: [{
minVideoPts: 0,
minAudioPts: 0,
maxVideoPts: 1 * 10 * 1000,
maxAudioPts: 1 * 10 * 1000,
uri: '0.ts'
}, {
discontinuity: true,
minVideoPts: 1 * 10 * 1000 + 100,
minAudioPts: 1 * 10 * 1000 + 100,
maxVideoPts: 2 * 10 * 1000 + 100,
maxAudioPts: 2 * 10 * 1000 + 100,
duration: 10,
uri: '1.ts'
}]
}, 0, 1);
equal(duration, (1 * 10 * 1000) * 0.001, 'did not include the segment gap');
});
test('strict interval duration does not count ending segment gaps', function() {
var duration = Playlist.duration({
mediaSequence: 0,
endList: true,
segments: [{
minVideoPts: 0,
minAudioPts: 0,
maxVideoPts: 1 * 10 * 1000,
maxAudioPts: 1 * 10 * 1000,
uri: '0.ts'
}, {
minVideoPts: 1 * 10 * 1000 + 100,
minAudioPts: 1 * 10 * 1000 + 100,
maxVideoPts: 2 * 10 * 1000 + 100,
maxAudioPts: 2 * 10 * 1000 + 100,
duration: 10,
uri: '1.ts'
}]
}, 0, 1, true);
equal(duration, (1 * 10 * 1000) * 0.001, 'did not include the segment gap');
});
test('calculates seekable time ranges from the available segments', function() {
var playlist = {
mediaSequence: 0,
......
......@@ -97,7 +97,7 @@ var
var MockSegmentParser;
if (tags === undefined) {
tags = [];
tags = [{ pts: 0, bytes: new Uint8Array(1) }];
}
MockSegmentParser = function() {
this.getFlvHeader = function() {
......@@ -1287,30 +1287,32 @@ test('clears in-band cues ahead of current time on seek', function() {
player.hls.segmentParser_.parseSegmentBinaryData = function() {
// trigger a metadata event
if (events.length) {
while (events.length) {
player.hls.segmentParser_.metadataStream.trigger('data', events.shift());
}
};
standardXHRResponse(requests.shift()); // media
tags.push({ pts: 10 * 1000, bytes: new Uint8Array(1) });
tags.push({ pts: 0, bytes: new Uint8Array(1) },
{ pts: 10 * 1000, bytes: new Uint8Array(1) });
events.push({
pts: 20 * 1000,
pts: 9.9 * 1000,
data: new Uint8Array([]),
frames: [{
id: 'TXXX',
value: 'cue 3'
value: 'cue 1'
}]
});
events.push({
pts: 9.9 * 1000,
pts: 20 * 1000,
data: new Uint8Array([]),
frames: [{
id: 'TXXX',
value: 'cue 1'
value: 'cue 3'
}]
});
standardXHRResponse(requests.shift()); // segment 0
tags.push({ pts: 20 * 1000, bytes: new Uint8Array(1) });
tags.push({ pts: 10 * 1000 + 1, bytes: new Uint8Array(1) },
{ pts: 20 * 1000, bytes: new Uint8Array(1) });
events.push({
pts: 19.9 * 1000,
data: new Uint8Array([]),
......@@ -1323,12 +1325,12 @@ test('clears in-band cues ahead of current time on seek', function() {
standardXHRResponse(requests.shift()); // segment 1
track = player.textTracks()[0];
equal(track.cues.length, 2, 'added the cues');
equal(track.cues.length, 3, 'added the cues');
// seek into segment 1
player.currentTime(11);
player.trigger('seeking');
equal(track.cues.length, 1, 'removed a cue');
equal(track.cues.length, 1, 'removed later cues');
equal(track.cues[0].startTime, 9.9, 'retained the earlier cue');
});
......@@ -2010,6 +2012,48 @@ test('continues playing after seek to discontinuity', function() {
strictEqual(aborts, 1, 'cleared the segment buffer on a seek');
});
test('seeking does not fail when targeted between segments', function() {
var tags = [], currentTime, segmentUrl;
videojs.Hls.SegmentParser = mockSegmentParser(tags);
player.src({
src: 'media.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
// mock out the currentTime callbacks
player.hls.el().vjs_setProperty = function(property, value) {
if (property === 'currentTime') {
currentTime = value;
}
};
player.hls.el().vjs_getProperty = function(property) {
if (property === 'currentTime') {
return currentTime;
}
};
standardXHRResponse(requests.shift()); // media
tags.push({ pts: 100, bytes: new Uint8Array(1) },
{ pts: 9 * 1000 + 100, bytes: new Uint8Array(1) });
standardXHRResponse(requests.shift()); // segment 0
player.hls.checkBuffer_();
tags.push({ pts: 9.5 * 1000 + 100, bytes: new Uint8Array(1) },
{ pts: 20 * 1000 + 100, bytes: new Uint8Array(1) });
segmentUrl = requests[0].url;
standardXHRResponse(requests.shift()); // segment 1
// seek to a time that is greater than the last tag in segment 0 but
// less than the first in segment 1
player.currentTime(9.4);
equal(requests[0].url, segmentUrl, 'requested the later segment');
tags.push({ pts: 9.5 * 1000 + 100, bytes: new Uint8Array(1) },
{ pts: 20 * 1000 + 100, bytes: new Uint8Array(1) });
standardXHRResponse(requests.shift()); // segment 1
equal(player.currentTime(), 9.5, 'seeked to the later time');
});
test('resets the switching algorithm if a request times out', function() {
player.src({
src: 'master.m3u8',
......