cc20895d by David LaPalomento

Record min and max PTS values for segments

Segment duration can have different interpretations in different contexts. To ensure we have all the information we need to seek accurately, record PTS values for the audio and video streams within a segment. For #314.
1 parent 675d9423
......@@ -479,8 +479,20 @@
h264Tags: function() {
return h264Stream.tags.length;
},
minVideoPts: function() {
return h264Stream.tags[0].pts;
},
maxVideoPts: function() {
return h264Stream.tags[h264Stream.tags.length - 1].pts;
},
aacTags: function() {
return aacStream.tags.length;
},
minAudioPts: function() {
return aacStream.tags[0].pts;
},
maxAudioPts: function() {
return aacStream.tags[aacStream.tags.length - 1].pts;
}
};
};
......
......@@ -870,6 +870,15 @@ videojs.Hls.prototype.drainBuffer = function(event) {
tags = [];
if (this.segmentParser_.tagsAvailable()) {
// record PTS information for the segment so we can calculate
// accurate durations and seek reliably
segment.minVideoPts = this.segmentParser_.stats.minVideoPts();
segment.maxVideoPts = this.segmentParser_.stats.maxVideoPts();
segment.minAudioPts = this.segmentParser_.stats.minAudioPts();
segment.maxAudioPts = this.segmentParser_.stats.maxAudioPts();
}
while (this.segmentParser_.tagsAvailable()) {
tags.push(this.segmentParser_.getNextTag());
}
......
......@@ -422,10 +422,19 @@
byte,
tag,
type,
minVideoPts,
maxVideoPts,
minAudioPts,
maxAudioPts,
currentPts = 0,
lastTime = 0;
parser.parseSegmentBinaryData(window.bcSegment);
minVideoPts = parser.stats.minVideoPts();
maxVideoPts = parser.stats.maxVideoPts();
minAudioPts = parser.stats.minAudioPts();
maxAudioPts = parser.stats.maxAudioPts();
while (parser.tagsAvailable()) {
tag = parser.getNextTag();
type = tag.bytes[0];
......@@ -436,8 +445,12 @@
// generic flv headers
switch (type) {
case 8: ok(true, 'the type is audio');
ok(minAudioPts <= currentPts, 'not less than minimum audio PTS');
ok(maxAudioPts >= currentPts, 'not greater than max audio PTS');
break;
case 9: ok(true, 'the type is video');
ok(minVideoPts <= currentPts, 'not less than minimum video PTS');
ok(maxVideoPts >= currentPts, 'not greater than max video PTS');
break;
case 18: ok(true, 'the type is script');
break;
......
......@@ -94,14 +94,18 @@ var
},
mockSegmentParser = function(tags) {
var MockSegmentParser;
if (tags === undefined) {
tags = [];
}
return function() {
MockSegmentParser = function() {
this.getFlvHeader = function() {
return 'flv';
};
this.parseSegmentBinaryData = function() {};
this.timestampOffset = 0;
this.mediaTimelineOffset = 0;
this.flushTags = function() {};
this.tagsAvailable = function() {
return tags.length;
......@@ -112,10 +116,31 @@ var
this.getNextTag = function() {
return tags.shift();
};
this.metadataStream = {
on: Function.prototype
this.metadataStream = new videojs.Hls.Stream();
this.metadataStream.init();
this.metadataStream.descriptor = new Uint8Array([
1, 2, 3, 0xbb
]);
this.stats = {
minVideoPts: function() {
return tags[0].pts;
},
maxVideoPts: function() {
return tags[tags.length - 1].pts;
},
minAudioPts: function() {
return tags[0].pts;
},
maxAudioPts: function() {
return tags[tags.length - 1].pts;
},
};
};
MockSegmentParser.STREAM_TYPES = videojs.Hls.SegmentParser.STREAM_TYPES;
return MockSegmentParser;
},
// return an absolute version of a page-relative URL
......@@ -1001,6 +1026,26 @@ test('only appends one segment at a time', function() {
equal(appends, 0, 'did not append while updating');
});
test('records the min and max PTS values for a segment', function() {
var tags = [];
videojs.Hls.SegmentParser = mockSegmentParser(tags);
player.src({
src: 'manifest/media.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
standardXHRResponse(requests.pop()); // media.m3u8
tags.push({ pts: 0, bytes: new Uint8Array(1) });
tags.push({ pts: 10, bytes: new Uint8Array(1) });
standardXHRResponse(requests.pop()); // segment 0
equal(player.hls.playlists.media().segments[0].minVideoPts, 0, 'recorded min video pts');
equal(player.hls.playlists.media().segments[0].maxVideoPts, 10, 'recorded max video pts');
equal(player.hls.playlists.media().segments[0].minAudioPts, 0, 'recorded min audio pts');
equal(player.hls.playlists.media().segments[0].maxAudioPts, 10, 'recorded max audio pts');
});
test('waits to download new segments until the media playlist is stable', function() {
var media;
player.src({
......@@ -1192,6 +1237,7 @@ test('calculates preciseDuration correctly around discontinuities', function() {
test('exposes in-band metadata events as cues', function() {
var track;
videojs.Hls.SegmentParser = mockSegmentParser();
player.src({
src: 'manifest/media.m3u8',
type: 'application/vnd.apple.mpegurl'
......@@ -1199,10 +1245,6 @@ test('exposes in-band metadata events as cues', function() {
openMediaSource(player);
player.hls.segmentParser_.parseSegmentBinaryData = function() {
// fake out a descriptor
player.hls.segmentParser_.metadataStream.descriptor = new Uint8Array([
1, 2, 3, 0xbb
]);
// trigger a metadata event
player.hls.segmentParser_.metadataStream.trigger('data', {
pts: 2000,
......@@ -1251,23 +1293,14 @@ test('exposes in-band metadata events as cues', function() {
test('only adds in-band cues the first time they are encountered', function() {
var tags = [{ pts: 0, bytes: new Uint8Array(1) }], track;
videojs.Hls.SegmentParser = mockSegmentParser(tags);
player.src({
src: 'manifest/media.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
player.hls.segmentParser_.getNextTag = function() {
return tags.shift();
};
player.hls.segmentParser_.tagsAvailable = function() {
return tags.length;
};
player.hls.segmentParser_.parseSegmentBinaryData = function() {
// fake out a descriptor
player.hls.segmentParser_.metadataStream.descriptor = new Uint8Array([
1, 2, 3, 0xbb
]);
// trigger a metadata event
player.hls.segmentParser_.metadataStream.trigger('data', {
pts: 2000,
......@@ -1295,23 +1328,14 @@ test('clears in-band cues ahead of current time on seek', function() {
tags = [],
events = [],
track;
videojs.Hls.SegmentParser = mockSegmentParser(tags);
player.src({
src: 'manifest/media.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
player.hls.segmentParser_.getNextTag = function() {
return tags.shift();
};
player.hls.segmentParser_.tagsAvailable = function() {
return tags.length;
};
player.hls.segmentParser_.parseSegmentBinaryData = function() {
// fake out a descriptor
player.hls.segmentParser_.metadataStream.descriptor = new Uint8Array([
1, 2, 3, 0xbb
]);
// trigger a metadata event
if (events.length) {
player.hls.segmentParser_.metadataStream.trigger('data', events.shift());
......@@ -1360,26 +1384,17 @@ test('clears in-band cues ahead of current time on seek', function() {
test('translates ID3 PTS values to cue media timeline positions', function() {
var tags = [{ pts: 4 * 1000, bytes: new Uint8Array(1) }], track;
videojs.Hls.SegmentParser = mockSegmentParser(tags);
player.src({
src: 'manifest/media.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
player.hls.segmentParser_.getNextTag = function() {
return tags.shift();
};
player.hls.segmentParser_.tagsAvailable = function() {
return tags.length;
};
player.hls.segmentParser_.parseSegmentBinaryData = function() {
// setup the timestamp offset
this.timestampOffset = tags[0].pts;
// fake out a descriptor
player.hls.segmentParser_.metadataStream.descriptor = new Uint8Array([
1, 2, 3, 0xbb
]);
// trigger a metadata event
player.hls.segmentParser_.metadataStream.trigger('data', {
pts: 5 * 1000,
......@@ -1400,26 +1415,17 @@ test('translates ID3 PTS values to cue media timeline positions', function() {
test('translates ID3 PTS values across discontinuities', function() {
var tags = [], events = [], track;
videojs.Hls.SegmentParser = mockSegmentParser(tags);
player.src({
src: 'cues-and-discontinuities.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
player.hls.segmentParser_.getNextTag = function() {
return tags.shift();
};
player.hls.segmentParser_.tagsAvailable = function() {
return tags.length;
};
player.hls.segmentParser_.parseSegmentBinaryData = function() {
if (this.timestampOffset === null) {
this.timestampOffset = tags[0].pts;
}
// fake out a descriptor
player.hls.segmentParser_.metadataStream.descriptor = new Uint8Array([
1, 2, 3, 0xbb
]);
// trigger a metadata event
if (events.length) {
player.hls.segmentParser_.metadataStream.trigger('data', events.shift());
......@@ -1437,6 +1443,7 @@ test('translates ID3 PTS values across discontinuities', function() {
'1.ts\n');
// segment 0 starts at PTS 14000 and has a cue point at 15000
player.hls.segmentParser_.timestampOffset = 14 * 1000;
tags.push({ pts: 14 * 1000, bytes: new Uint8Array(1) });
events.push({
pts: 15 * 1000,
......@@ -1449,14 +1456,14 @@ test('translates ID3 PTS values across discontinuities', function() {
standardXHRResponse(requests.shift()); // segment 0
// segment 1 is after a discontinuity, starts at PTS 22000
// and has a cue point at 15000
// and has a cue point at 23000
tags.push({ pts: 22 * 1000, bytes: new Uint8Array(1) });
events.push({
pts: 23 * 1000,
data: new Uint8Array([]),
frames: [{
id: 'TXXX',
value: 'cue 0'
value: 'cue 1'
}]
});
player.hls.checkBuffer_();
......