0227bf64 by David LaPalomento

improved video duration calculation. closes #321

2 parents 675d9423 af75b33b
......@@ -2,7 +2,7 @@ CHANGELOG
=========
## HEAD (Unreleased)
_(none)_
* @dmlap improved video duration calculation. ([view](https://github.com/videojs/videojs-contrib-hls/pull/321))
--------------------
......
......@@ -363,29 +363,4 @@ hls.FlvTag.frameTime = function(tag) {
return pts;
};
/**
* Calculate the media timeline duration represented by an array of
* tags. This function assumes the tags are already pre-sorted by
* presentation timestamp (PTS), in ascending order. Returns zero if
* there are less than two FLV tags to inspect.
* @param tags {array} the FlvTag objects to query
* @return the number of milliseconds between the display time of the
* first tag and the last tag.
*/
hls.FlvTag.durationFromTags = function(tags) {
if (tags.length < 2) {
return 0;
}
var
first = tags[0],
last = tags[tags.length - 1],
frameDuration;
// use the interval between the last two tags or assume 24 fps
frameDuration = last.pts - tags[tags.length - 2].pts || (1/24);
return (last.pts - first.pts) + frameDuration;
};
})(this);
......
......@@ -18,6 +18,7 @@
resolveUrl = videojs.Hls.resolveUrl,
xhr = videojs.Hls.xhr,
Playlist = videojs.Hls.Playlist,
mergeOptions = videojs.util.mergeOptions,
/**
* Returns a new master playlist that is the result of merging an
......@@ -33,7 +34,7 @@
updateMaster = function(master, media) {
var
changed = false,
result = videojs.util.mergeOptions(master, {}),
result = mergeOptions(master, {}),
i,
playlist;
......@@ -50,14 +51,47 @@
continue;
}
result.playlists[i] = videojs.util.mergeOptions(playlist, media);
result.playlists[i] = mergeOptions(playlist, media);
result.playlists[media.uri] = result.playlists[i];
// if the update could overlap existing segment information,
// merge the two lists
if (playlist.segments) {
result.playlists[i].segments = updateSegments(playlist.segments,
media.segments,
media.mediaSequence - playlist.mediaSequence);
}
changed = true;
}
}
return changed ? result : null;
},
/**
* Returns a new array of segments that is the result of merging
* properties from an older list of segments onto an updated
* list. No properties on the updated playlist will be overridden.
* @param original {array} the outdated list of segments
* @param update {array} the updated list of segments
* @param offset {number} (optional) the index of the first update
* segment in the original segment list. For non-live playlists,
* this should always be zero and does not need to be
* specified. For live playlists, it should be the difference
* between the media sequence numbers in the original and updated
* playlists.
* @return a list of merged segment objects
*/
updateSegments = function(original, update, offset) {
var result = update.slice(), length, i;
offset = offset || 0;
length = Math.min(original.length, update.length + offset);
for (i = offset; i < length; i++) {
result[i - offset] = mergeOptions(original[i], result[i - offset]);
}
return result;
},
PlaylistLoader = function(srcUrl, withCredentials) {
var
loader = this,
......
......@@ -21,7 +21,7 @@
* index.
*/
segmentsDuration = function(playlist, startSequence, endSequence) {
var targetDuration, i, segment, expiredSegmentCount, result = 0;
var targetDuration, i, j, segment, endSegment, expiredSegmentCount, result = 0;
startSequence = startSequence || 0;
i = startSequence;
......@@ -36,9 +36,27 @@
// accumulate the segment durations into the result
for (; i < endSequence; i++) {
segment = playlist.segments[i - playlist.mediaSequence];
result += segment.preciseDuration ||
segment.duration ||
targetDuration;
// when PTS values aren't available, use information from the playlist
if (segment.minVideoPts === undefined) {
result += segment.duration ||
targetDuration;
continue;
}
// find the last segment with PTS info and use that to calculate
// the interval duration
for(j = i; j < endSequence - 1; j++) {
endSegment = playlist.segments[j - playlist.mediaSequence + 1];
if (endSegment.maxVideoPts === undefined ||
endSegment.discontinuity) {
break;
}
}
endSegment = playlist.segments[j - playlist.mediaSequence];
result += (Math.max(endSegment.maxVideoPts, endSegment.maxAudioPts) -
Math.min(segment.minVideoPts, segment.minAudioPts)) * 0.001;
i = j;
}
return result;
......
......@@ -479,8 +479,20 @@
h264Tags: function() {
return h264Stream.tags.length;
},
minVideoPts: function() {
return h264Stream.tags[0].pts;
},
maxVideoPts: function() {
return h264Stream.tags[h264Stream.tags.length - 1].pts;
},
aacTags: function() {
return aacStream.tags.length;
},
minAudioPts: function() {
return aacStream.tags[0].pts;
},
maxAudioPts: function() {
return aacStream.tags[aacStream.tags.length - 1].pts;
}
};
};
......
......@@ -870,15 +870,17 @@ videojs.Hls.prototype.drainBuffer = function(event) {
tags = [];
while (this.segmentParser_.tagsAvailable()) {
tags.push(this.segmentParser_.getNextTag());
if (this.segmentParser_.tagsAvailable()) {
// record PTS information for the segment so we can calculate
// accurate durations and seek reliably
segment.minVideoPts = this.segmentParser_.stats.minVideoPts();
segment.maxVideoPts = this.segmentParser_.stats.maxVideoPts();
segment.minAudioPts = this.segmentParser_.stats.minAudioPts();
segment.maxAudioPts = this.segmentParser_.stats.maxAudioPts();
}
if (tags.length > 0) {
// Use the presentation timestamp of the ts segment to calculate its
// exact duration, since this may differ by fractions of a second
// from what is reported in the playlist
segment.preciseDuration = videojs.Hls.FlvTag.durationFromTags(tags) * 0.001;
while (this.segmentParser_.tagsAvailable()) {
tags.push(this.segmentParser_.getNextTag());
}
this.updateDuration(this.playlists.media());
......
......@@ -57,32 +57,4 @@ test('writeBytes grows the internal byte array dynamically', function() {
}
});
test('calculates the duration of a tag array from PTS values', function() {
var tags = [], count = 20, i;
for (i = 0; i < count; i++) {
tags[i] = new FlvTag(FlvTag.VIDEO_TAG);
tags[i].pts = i * 1000;
}
equal(FlvTag.durationFromTags(tags), count * 1000, 'calculated duration from PTS values');
});
test('durationFromTags() assumes 24fps if the last frame duration cannot be calculated', function() {
var tags = [
new FlvTag(FlvTag.VIDEO_TAG),
new FlvTag(FlvTag.VIDEO_TAG),
new FlvTag(FlvTag.VIDEO_TAG)
];
tags[0].pts = 0;
tags[1].pts = tags[2].pts = 1000;
equal(FlvTag.durationFromTags(tags), 1000 + (1/24) , 'assumes 24fps video');
});
test('durationFromTags() returns zero if there are less than two frames', function() {
equal(FlvTag.durationFromTags([]), 0, 'returns zero for empty input');
equal(FlvTag.durationFromTags([new FlvTag(FlvTag.VIDEO_TAG)]), 0, 'returns zero for a singleton input');
});
})(this);
......
......@@ -331,6 +331,35 @@
'requested the media playlist');
});
test('preserves segment metadata across playlist refreshes', function() {
var loader = new videojs.Hls.PlaylistLoader('live.m3u8'), segment;
requests.pop().respond(200, null,
'#EXTM3U\n' +
'#EXT-X-MEDIA-SEQUENCE:0\n' +
'#EXTINF:10,\n' +
'0.ts\n' +
'#EXTINF:10,\n' +
'1.ts\n' +
'#EXTINF:10,\n' +
'2.ts\n');
// add PTS info to 1.ts
segment = loader.media().segments[1];
segment.minVideoPts = 14;
segment.maxAudioPts = 27;
segment.preciseDuration = 10.045;
clock.tick(10 * 1000); // trigger a refresh
requests.pop().respond(200, null,
'#EXTM3U\n' +
'#EXT-X-MEDIA-SEQUENCE:1\n' +
'#EXTINF:10,\n' +
'1.ts\n' +
'#EXTINF:10,\n' +
'2.ts\n');
deepEqual(loader.media().segments[0], segment, 'preserved segment attributes');
});
test('clears the update timeout when switching quality', function() {
var loader = new videojs.Hls.PlaylistLoader('live-master.m3u8'), refreshes = 0;
// track the number of playlist refreshes triggered
......
......@@ -38,6 +38,98 @@
equal(duration, 14 * 10, 'duration includes dropped segments');
});
test('interval duration uses PTS values when available', function() {
var duration = Playlist.duration({
mediaSequence: 0,
endList: true,
segments: [{
minVideoPts: 1,
minAudioPts: 2,
uri: '0.ts'
}, {
duration: 10,
maxVideoPts: 2 * 10 * 1000 + 1,
maxAudioPts: 2 * 10 * 1000 + 2,
uri: '1.ts'
}, {
duration: 10,
maxVideoPts: 3 * 10 * 1000 + 1,
maxAudioPts: 3 * 10 * 1000 + 2,
uri: '2.ts'
}, {
duration: 10,
maxVideoPts: 4 * 10 * 1000 + 1,
maxAudioPts: 4 * 10 * 1000 + 2,
uri: '3.ts'
}]
}, 0, 4);
equal(duration, ((4 * 10 * 1000 + 2) - 1) * 0.001, 'used PTS values');
});
test('interval duration works when partial PTS information is available', function() {
var firstInterval, secondInterval, duration = Playlist.duration({
mediaSequence: 0,
endList: true,
segments: [{
minVideoPts: 1,
minAudioPts: 2,
maxVideoPts: 1 * 10 * 1000 + 1,
// intentionally less duration than video
// the max stream duration should be used
maxAudioPts: 1 * 10 * 1000 + 1,
uri: '0.ts'
}, {
duration: 10,
uri: '1.ts'
}, {
duration: 10,
minVideoPts: 2 * 10 * 1000 + 7,
minAudioPts: 2 * 10 * 1000 + 10,
maxVideoPts: 3 * 10 * 1000 + 1,
maxAudioPts: 3 * 10 * 1000 + 2,
uri: '2.ts'
}, {
duration: 10,
maxVideoPts: 4 * 10 * 1000 + 1,
maxAudioPts: 4 * 10 * 1000 + 2,
uri: '3.ts'
}]
}, 0, 4);
firstInterval = (1 * 10 * 1000 + 1) - 1;
firstInterval *= 0.001;
secondInterval = (4 * 10 * 1000 + 2) - (2 * 10 * 1000 + 7);
secondInterval *= 0.001;
equal(duration, firstInterval + 10 + secondInterval, 'calculated with mixed intervals');
});
test('interval duration accounts for discontinuities', function() {
var duration = Playlist.duration({
mediaSequence: 0,
endList: true,
segments: [{
minVideoPts: 0,
minAudioPts: 0,
maxVideoPts: 1 * 10 * 1000,
maxAudioPts: 1 * 10 * 1000,
uri: '0.ts'
}, {
discontinuity: true,
minVideoPts: 2 * 10 * 1000,
minAudioPts: 2 * 10 * 1000,
maxVideoPts: 3 * 10 * 1000,
maxAudioPts: 3 * 10 * 1000,
duration: 10,
uri: '1.ts'
}]
}, 0, 2);
equal(duration, 10 + 10, 'handles discontinuities');
});
test('calculates seekable time ranges from the available segments', function() {
var playlist = {
mediaSequence: 0,
......
......@@ -422,10 +422,19 @@
byte,
tag,
type,
minVideoPts,
maxVideoPts,
minAudioPts,
maxAudioPts,
currentPts = 0,
lastTime = 0;
parser.parseSegmentBinaryData(window.bcSegment);
minVideoPts = parser.stats.minVideoPts();
maxVideoPts = parser.stats.maxVideoPts();
minAudioPts = parser.stats.minAudioPts();
maxAudioPts = parser.stats.maxAudioPts();
while (parser.tagsAvailable()) {
tag = parser.getNextTag();
type = tag.bytes[0];
......@@ -435,11 +444,15 @@
// generic flv headers
switch (type) {
case 8: ok(true, 'the type is audio');
case 8: ok(true, 'the type is audio');
ok(minAudioPts <= currentPts, 'not less than minimum audio PTS');
ok(maxAudioPts >= currentPts, 'not greater than max audio PTS');
break;
case 9: ok(true, 'the type is video');
case 9: ok(true, 'the type is video');
ok(minVideoPts <= currentPts, 'not less than minimum video PTS');
ok(maxVideoPts >= currentPts, 'not greater than max video PTS');
break;
case 18: ok(true, 'the type is script');
case 18: ok(true, 'the type is script');
break;
default: ok(false, 'the type (' + type + ') is unrecognized');
}
......
......@@ -94,14 +94,18 @@ var
},
mockSegmentParser = function(tags) {
var MockSegmentParser;
if (tags === undefined) {
tags = [];
}
return function() {
MockSegmentParser = function() {
this.getFlvHeader = function() {
return 'flv';
};
this.parseSegmentBinaryData = function() {};
this.timestampOffset = 0;
this.mediaTimelineOffset = 0;
this.flushTags = function() {};
this.tagsAvailable = function() {
return tags.length;
......@@ -112,10 +116,31 @@ var
this.getNextTag = function() {
return tags.shift();
};
this.metadataStream = {
on: Function.prototype
this.metadataStream = new videojs.Hls.Stream();
this.metadataStream.init();
this.metadataStream.descriptor = new Uint8Array([
1, 2, 3, 0xbb
]);
this.stats = {
minVideoPts: function() {
return tags[0].pts;
},
maxVideoPts: function() {
return tags[tags.length - 1].pts;
},
minAudioPts: function() {
return tags[0].pts;
},
maxAudioPts: function() {
return tags[tags.length - 1].pts;
},
};
};
MockSegmentParser.STREAM_TYPES = videojs.Hls.SegmentParser.STREAM_TYPES;
return MockSegmentParser;
},
// return an absolute version of a page-relative URL
......@@ -1001,6 +1026,26 @@ test('only appends one segment at a time', function() {
equal(appends, 0, 'did not append while updating');
});
test('records the min and max PTS values for a segment', function() {
var tags = [];
videojs.Hls.SegmentParser = mockSegmentParser(tags);
player.src({
src: 'manifest/media.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
standardXHRResponse(requests.pop()); // media.m3u8
tags.push({ pts: 0, bytes: new Uint8Array(1) });
tags.push({ pts: 10, bytes: new Uint8Array(1) });
standardXHRResponse(requests.pop()); // segment 0
equal(player.hls.playlists.media().segments[0].minVideoPts, 0, 'recorded min video pts');
equal(player.hls.playlists.media().segments[0].maxVideoPts, 10, 'recorded max video pts');
equal(player.hls.playlists.media().segments[0].minAudioPts, 0, 'recorded min audio pts');
equal(player.hls.playlists.media().segments[0].maxAudioPts, 10, 'recorded max audio pts');
});
test('waits to download new segments until the media playlist is stable', function() {
var media;
player.src({
......@@ -1140,58 +1185,9 @@ test('flushes the parser after each segment', function() {
strictEqual(flushes, 1, 'tags are flushed at the end of a segment');
});
test('calculates preciseDuration for a new segment', function() {
var tags = [
{ pts : 200 * 1000, bytes: new Uint8Array(1) },
{ pts : 300 * 1000, bytes: new Uint8Array(1) }
];
videojs.Hls.SegmentParser = mockSegmentParser(tags);
player.src({
src: 'manifest/media.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
standardXHRResponse(requests[0]);
strictEqual(player.duration(), 40, 'player duration is read from playlist on load');
standardXHRResponse(requests[1]);
strictEqual(player.hls.playlists.media().segments[0].preciseDuration, 200, 'preciseDuration is calculated and stored');
strictEqual(player.duration(), 230, 'player duration is calculated using preciseDuration');
});
test('calculates preciseDuration correctly around discontinuities', function() {
var tags = [];
videojs.Hls.SegmentParser = mockSegmentParser(tags);
player.src({
src: 'manifest/media.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
requests.shift().respond(200, null,
'#EXTM3U\n' +
'#EXTINF:10,\n' +
'0.ts\n' +
'#EXT-X-DISCONTINUITY\n' +
'#EXTINF:10,\n' +
'1.ts\n' +
'#EXT-X-ENDLIST\n');
tags.push({ pts: 10 * 1000, bytes: new Uint8Array(1) });
standardXHRResponse(requests.shift()); // segment 0
player.hls.checkBuffer_();
// the PTS value of the second segment is *earlier* than the first
tags.push({ pts: 0 * 1000, bytes: new Uint8Array(1) });
tags.push({ pts: 5 * 1000, bytes: new Uint8Array(1) });
standardXHRResponse(requests.shift()); // segment 1
equal(player.hls.playlists.media().segments[1].preciseDuration,
5 + 5, // duration includes the time to display the second tag
'duration is independent of previous segments');
});
test('exposes in-band metadata events as cues', function() {
var track;
videojs.Hls.SegmentParser = mockSegmentParser();
player.src({
src: 'manifest/media.m3u8',
type: 'application/vnd.apple.mpegurl'
......@@ -1199,10 +1195,6 @@ test('exposes in-band metadata events as cues', function() {
openMediaSource(player);
player.hls.segmentParser_.parseSegmentBinaryData = function() {
// fake out a descriptor
player.hls.segmentParser_.metadataStream.descriptor = new Uint8Array([
1, 2, 3, 0xbb
]);
// trigger a metadata event
player.hls.segmentParser_.metadataStream.trigger('data', {
pts: 2000,
......@@ -1251,23 +1243,14 @@ test('exposes in-band metadata events as cues', function() {
test('only adds in-band cues the first time they are encountered', function() {
var tags = [{ pts: 0, bytes: new Uint8Array(1) }], track;
videojs.Hls.SegmentParser = mockSegmentParser(tags);
player.src({
src: 'manifest/media.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
player.hls.segmentParser_.getNextTag = function() {
return tags.shift();
};
player.hls.segmentParser_.tagsAvailable = function() {
return tags.length;
};
player.hls.segmentParser_.parseSegmentBinaryData = function() {
// fake out a descriptor
player.hls.segmentParser_.metadataStream.descriptor = new Uint8Array([
1, 2, 3, 0xbb
]);
// trigger a metadata event
player.hls.segmentParser_.metadataStream.trigger('data', {
pts: 2000,
......@@ -1295,23 +1278,14 @@ test('clears in-band cues ahead of current time on seek', function() {
tags = [],
events = [],
track;
videojs.Hls.SegmentParser = mockSegmentParser(tags);
player.src({
src: 'manifest/media.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
player.hls.segmentParser_.getNextTag = function() {
return tags.shift();
};
player.hls.segmentParser_.tagsAvailable = function() {
return tags.length;
};
player.hls.segmentParser_.parseSegmentBinaryData = function() {
// fake out a descriptor
player.hls.segmentParser_.metadataStream.descriptor = new Uint8Array([
1, 2, 3, 0xbb
]);
// trigger a metadata event
if (events.length) {
player.hls.segmentParser_.metadataStream.trigger('data', events.shift());
......@@ -1360,26 +1334,17 @@ test('clears in-band cues ahead of current time on seek', function() {
test('translates ID3 PTS values to cue media timeline positions', function() {
var tags = [{ pts: 4 * 1000, bytes: new Uint8Array(1) }], track;
videojs.Hls.SegmentParser = mockSegmentParser(tags);
player.src({
src: 'manifest/media.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
player.hls.segmentParser_.getNextTag = function() {
return tags.shift();
};
player.hls.segmentParser_.tagsAvailable = function() {
return tags.length;
};
player.hls.segmentParser_.parseSegmentBinaryData = function() {
// setup the timestamp offset
this.timestampOffset = tags[0].pts;
// fake out a descriptor
player.hls.segmentParser_.metadataStream.descriptor = new Uint8Array([
1, 2, 3, 0xbb
]);
// trigger a metadata event
player.hls.segmentParser_.metadataStream.trigger('data', {
pts: 5 * 1000,
......@@ -1400,26 +1365,17 @@ test('translates ID3 PTS values to cue media timeline positions', function() {
test('translates ID3 PTS values across discontinuities', function() {
var tags = [], events = [], track;
videojs.Hls.SegmentParser = mockSegmentParser(tags);
player.src({
src: 'cues-and-discontinuities.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
player.hls.segmentParser_.getNextTag = function() {
return tags.shift();
};
player.hls.segmentParser_.tagsAvailable = function() {
return tags.length;
};
player.hls.segmentParser_.parseSegmentBinaryData = function() {
if (this.timestampOffset === null) {
this.timestampOffset = tags[0].pts;
}
// fake out a descriptor
player.hls.segmentParser_.metadataStream.descriptor = new Uint8Array([
1, 2, 3, 0xbb
]);
// trigger a metadata event
if (events.length) {
player.hls.segmentParser_.metadataStream.trigger('data', events.shift());
......@@ -1437,7 +1393,9 @@ test('translates ID3 PTS values across discontinuities', function() {
'1.ts\n');
// segment 0 starts at PTS 14000 and has a cue point at 15000
tags.push({ pts: 14 * 1000, bytes: new Uint8Array(1) });
player.hls.segmentParser_.timestampOffset = 14 * 1000;
tags.push({ pts: 14 * 1000, bytes: new Uint8Array(1) },
{ pts: 24 * 1000, bytes: new Uint8Array(1) });
events.push({
pts: 15 * 1000,
data: new Uint8Array([]),
......@@ -1449,14 +1407,14 @@ test('translates ID3 PTS values across discontinuities', function() {
standardXHRResponse(requests.shift()); // segment 0
// segment 1 is after a discontinuity, starts at PTS 22000
// and has a cue point at 15000
// and has a cue point at 23000
tags.push({ pts: 22 * 1000, bytes: new Uint8Array(1) });
events.push({
pts: 23 * 1000,
data: new Uint8Array([]),
frames: [{
id: 'TXXX',
value: 'cue 0'
value: 'cue 1'
}]
});
player.hls.checkBuffer_();
......