Calculate duration when requested from PTS values
Use the time range covered by the union of audio and video track PTS values when calculating duration. For non-live content, this matches the NetStream's duration calculation to within a millisecond or so. Make sure that segment PTS information is preserved when live playlists are refreshed. Remove FlvTag.durationFromTags in favor of Playlist.duration() since the latter handles durations across segments and discontinuities. For #314.
Showing
8 changed files
with
180 additions
and
116 deletions
... | @@ -363,29 +363,4 @@ hls.FlvTag.frameTime = function(tag) { | ... | @@ -363,29 +363,4 @@ hls.FlvTag.frameTime = function(tag) { |
363 | return pts; | 363 | return pts; |
364 | }; | 364 | }; |
365 | 365 | ||
366 | /** | ||
367 | * Calculate the media timeline duration represented by an array of | ||
368 | * tags. This function assumes the tags are already pre-sorted by | ||
369 | * presentation timestamp (PTS), in ascending order. Returns zero if | ||
370 | * there are less than two FLV tags to inspect. | ||
371 | * @param tags {array} the FlvTag objects to query | ||
372 | * @return the number of milliseconds between the display time of the | ||
373 | * first tag and the last tag. | ||
374 | */ | ||
375 | hls.FlvTag.durationFromTags = function(tags) { | ||
376 | if (tags.length < 2) { | ||
377 | return 0; | ||
378 | } | ||
379 | |||
380 | var | ||
381 | first = tags[0], | ||
382 | last = tags[tags.length - 1], | ||
383 | frameDuration; | ||
384 | |||
385 | // use the interval between the last two tags or assume 24 fps | ||
386 | frameDuration = last.pts - tags[tags.length - 2].pts || (1/24); | ||
387 | |||
388 | return (last.pts - first.pts) + frameDuration; | ||
389 | }; | ||
390 | |||
391 | })(this); | 366 | })(this); | ... | ... |
... | @@ -18,6 +18,7 @@ | ... | @@ -18,6 +18,7 @@ |
18 | resolveUrl = videojs.Hls.resolveUrl, | 18 | resolveUrl = videojs.Hls.resolveUrl, |
19 | xhr = videojs.Hls.xhr, | 19 | xhr = videojs.Hls.xhr, |
20 | Playlist = videojs.Hls.Playlist, | 20 | Playlist = videojs.Hls.Playlist, |
21 | mergeOptions = videojs.util.mergeOptions, | ||
21 | 22 | ||
22 | /** | 23 | /** |
23 | * Returns a new master playlist that is the result of merging an | 24 | * Returns a new master playlist that is the result of merging an |
... | @@ -33,7 +34,7 @@ | ... | @@ -33,7 +34,7 @@ |
33 | updateMaster = function(master, media) { | 34 | updateMaster = function(master, media) { |
34 | var | 35 | var |
35 | changed = false, | 36 | changed = false, |
36 | result = videojs.util.mergeOptions(master, {}), | 37 | result = mergeOptions(master, {}), |
37 | i, | 38 | i, |
38 | playlist; | 39 | playlist; |
39 | 40 | ||
... | @@ -50,14 +51,47 @@ | ... | @@ -50,14 +51,47 @@ |
50 | continue; | 51 | continue; |
51 | } | 52 | } |
52 | 53 | ||
53 | result.playlists[i] = videojs.util.mergeOptions(playlist, media); | 54 | result.playlists[i] = mergeOptions(playlist, media); |
54 | result.playlists[media.uri] = result.playlists[i]; | 55 | result.playlists[media.uri] = result.playlists[i]; |
56 | |||
57 | // if the update could overlap existing segment information, | ||
58 | // merge the two lists | ||
59 | if (playlist.segments) { | ||
60 | result.playlists[i].segments = updateSegments(playlist.segments, | ||
61 | media.segments, | ||
62 | media.mediaSequence - playlist.mediaSequence); | ||
63 | } | ||
55 | changed = true; | 64 | changed = true; |
56 | } | 65 | } |
57 | } | 66 | } |
58 | return changed ? result : null; | 67 | return changed ? result : null; |
59 | }, | 68 | }, |
60 | 69 | ||
70 | /** | ||
71 | * Returns a new array of segments that is the result of merging | ||
72 | * properties from an older list of segments onto an updated | ||
73 | * list. No properties on the updated playlist will be overridden. | ||
74 | * @param original {array} the outdated list of segments | ||
75 | * @param update {array} the updated list of segments | ||
76 | * @param offset {number} (optional) the index of the first update | ||
77 | * segment in the original segment list. For non-live playlists, | ||
78 | * this should always be zero and does not need to be | ||
79 | * specified. For live playlists, it should be the difference | ||
80 | * between the media sequence numbers in the original and updated | ||
81 | * playlists. | ||
82 | * @return a list of merged segment objects | ||
83 | */ | ||
84 | updateSegments = function(original, update, offset) { | ||
85 | var result = update.slice(), length, i; | ||
86 | offset = offset || 0; | ||
87 | length = Math.min(original.length, update.length + offset); | ||
88 | |||
89 | for (i = offset; i < length; i++) { | ||
90 | result[i - offset] = mergeOptions(original[i], result[i - offset]); | ||
91 | } | ||
92 | return result; | ||
93 | }, | ||
94 | |||
61 | PlaylistLoader = function(srcUrl, withCredentials) { | 95 | PlaylistLoader = function(srcUrl, withCredentials) { |
62 | var | 96 | var |
63 | loader = this, | 97 | loader = this, | ... | ... |
... | @@ -21,7 +21,7 @@ | ... | @@ -21,7 +21,7 @@ |
21 | * index. | 21 | * index. |
22 | */ | 22 | */ |
23 | segmentsDuration = function(playlist, startSequence, endSequence) { | 23 | segmentsDuration = function(playlist, startSequence, endSequence) { |
24 | var targetDuration, i, segment, expiredSegmentCount, result = 0; | 24 | var targetDuration, i, j, segment, endSegment, expiredSegmentCount, result = 0; |
25 | 25 | ||
26 | startSequence = startSequence || 0; | 26 | startSequence = startSequence || 0; |
27 | i = startSequence; | 27 | i = startSequence; |
... | @@ -36,9 +36,27 @@ | ... | @@ -36,9 +36,27 @@ |
36 | // accumulate the segment durations into the result | 36 | // accumulate the segment durations into the result |
37 | for (; i < endSequence; i++) { | 37 | for (; i < endSequence; i++) { |
38 | segment = playlist.segments[i - playlist.mediaSequence]; | 38 | segment = playlist.segments[i - playlist.mediaSequence]; |
39 | result += segment.preciseDuration || | 39 | |
40 | segment.duration || | 40 | // when PTS values aren't available, use information from the playlist |
41 | if (segment.minVideoPts === undefined) { | ||
42 | result += segment.duration || | ||
41 | targetDuration; | 43 | targetDuration; |
44 | continue; | ||
45 | } | ||
46 | |||
47 | // find the last segment with PTS info and use that to calculate | ||
48 | // the interval duration | ||
49 | for(j = i; j < endSequence - 1; j++) { | ||
50 | endSegment = playlist.segments[j - playlist.mediaSequence + 1]; | ||
51 | if (endSegment.maxVideoPts === undefined || | ||
52 | endSegment.discontinuity) { | ||
53 | break; | ||
54 | } | ||
55 | } | ||
56 | endSegment = playlist.segments[j - playlist.mediaSequence]; | ||
57 | result += (Math.max(endSegment.maxVideoPts, endSegment.maxAudioPts) - | ||
58 | Math.min(segment.minVideoPts, segment.minAudioPts)) * 0.001; | ||
59 | i = j; | ||
42 | } | 60 | } |
43 | 61 | ||
44 | return result; | 62 | return result; | ... | ... |
... | @@ -883,13 +883,6 @@ videojs.Hls.prototype.drainBuffer = function(event) { | ... | @@ -883,13 +883,6 @@ videojs.Hls.prototype.drainBuffer = function(event) { |
883 | tags.push(this.segmentParser_.getNextTag()); | 883 | tags.push(this.segmentParser_.getNextTag()); |
884 | } | 884 | } |
885 | 885 | ||
886 | if (tags.length > 0) { | ||
887 | // Use the presentation timestamp of the ts segment to calculate its | ||
888 | // exact duration, since this may differ by fractions of a second | ||
889 | // from what is reported in the playlist | ||
890 | segment.preciseDuration = videojs.Hls.FlvTag.durationFromTags(tags) * 0.001; | ||
891 | } | ||
892 | |||
893 | this.updateDuration(this.playlists.media()); | 886 | this.updateDuration(this.playlists.media()); |
894 | 887 | ||
895 | // if we're refilling the buffer after a seek, scan through the muxed | 888 | // if we're refilling the buffer after a seek, scan through the muxed | ... | ... |
... | @@ -57,32 +57,4 @@ test('writeBytes grows the internal byte array dynamically', function() { | ... | @@ -57,32 +57,4 @@ test('writeBytes grows the internal byte array dynamically', function() { |
57 | } | 57 | } |
58 | }); | 58 | }); |
59 | 59 | ||
60 | test('calculates the duration of a tag array from PTS values', function() { | ||
61 | var tags = [], count = 20, i; | ||
62 | |||
63 | for (i = 0; i < count; i++) { | ||
64 | tags[i] = new FlvTag(FlvTag.VIDEO_TAG); | ||
65 | tags[i].pts = i * 1000; | ||
66 | } | ||
67 | |||
68 | equal(FlvTag.durationFromTags(tags), count * 1000, 'calculated duration from PTS values'); | ||
69 | }); | ||
70 | |||
71 | test('durationFromTags() assumes 24fps if the last frame duration cannot be calculated', function() { | ||
72 | var tags = [ | ||
73 | new FlvTag(FlvTag.VIDEO_TAG), | ||
74 | new FlvTag(FlvTag.VIDEO_TAG), | ||
75 | new FlvTag(FlvTag.VIDEO_TAG) | ||
76 | ]; | ||
77 | tags[0].pts = 0; | ||
78 | tags[1].pts = tags[2].pts = 1000; | ||
79 | |||
80 | equal(FlvTag.durationFromTags(tags), 1000 + (1/24) , 'assumes 24fps video'); | ||
81 | }); | ||
82 | |||
83 | test('durationFromTags() returns zero if there are less than two frames', function() { | ||
84 | equal(FlvTag.durationFromTags([]), 0, 'returns zero for empty input'); | ||
85 | equal(FlvTag.durationFromTags([new FlvTag(FlvTag.VIDEO_TAG)]), 0, 'returns zero for a singleton input'); | ||
86 | }); | ||
87 | |||
88 | })(this); | 60 | })(this); | ... | ... |
... | @@ -331,6 +331,35 @@ | ... | @@ -331,6 +331,35 @@ |
331 | 'requested the media playlist'); | 331 | 'requested the media playlist'); |
332 | }); | 332 | }); |
333 | 333 | ||
334 | test('preserves segment metadata across playlist refreshes', function() { | ||
335 | var loader = new videojs.Hls.PlaylistLoader('live.m3u8'), segment; | ||
336 | requests.pop().respond(200, null, | ||
337 | '#EXTM3U\n' + | ||
338 | '#EXT-X-MEDIA-SEQUENCE:0\n' + | ||
339 | '#EXTINF:10,\n' + | ||
340 | '0.ts\n' + | ||
341 | '#EXTINF:10,\n' + | ||
342 | '1.ts\n' + | ||
343 | '#EXTINF:10,\n' + | ||
344 | '2.ts\n'); | ||
345 | // add PTS info to 1.ts | ||
346 | segment = loader.media().segments[1]; | ||
347 | segment.minVideoPts = 14; | ||
348 | segment.maxAudioPts = 27; | ||
349 | segment.preciseDuration = 10.045; | ||
350 | |||
351 | clock.tick(10 * 1000); // trigger a refresh | ||
352 | requests.pop().respond(200, null, | ||
353 | '#EXTM3U\n' + | ||
354 | '#EXT-X-MEDIA-SEQUENCE:1\n' + | ||
355 | '#EXTINF:10,\n' + | ||
356 | '1.ts\n' + | ||
357 | '#EXTINF:10,\n' + | ||
358 | '2.ts\n'); | ||
359 | |||
360 | deepEqual(loader.media().segments[0], segment, 'preserved segment attributes'); | ||
361 | }); | ||
362 | |||
334 | test('clears the update timeout when switching quality', function() { | 363 | test('clears the update timeout when switching quality', function() { |
335 | var loader = new videojs.Hls.PlaylistLoader('live-master.m3u8'), refreshes = 0; | 364 | var loader = new videojs.Hls.PlaylistLoader('live-master.m3u8'), refreshes = 0; |
336 | // track the number of playlist refreshes triggered | 365 | // track the number of playlist refreshes triggered | ... | ... |
... | @@ -38,6 +38,98 @@ | ... | @@ -38,6 +38,98 @@ |
38 | equal(duration, 14 * 10, 'duration includes dropped segments'); | 38 | equal(duration, 14 * 10, 'duration includes dropped segments'); |
39 | }); | 39 | }); |
40 | 40 | ||
41 | test('interval duration uses PTS values when available', function() { | ||
42 | var duration = Playlist.duration({ | ||
43 | mediaSequence: 0, | ||
44 | endList: true, | ||
45 | segments: [{ | ||
46 | minVideoPts: 1, | ||
47 | minAudioPts: 2, | ||
48 | uri: '0.ts' | ||
49 | }, { | ||
50 | duration: 10, | ||
51 | maxVideoPts: 2 * 10 * 1000 + 1, | ||
52 | maxAudioPts: 2 * 10 * 1000 + 2, | ||
53 | uri: '1.ts' | ||
54 | }, { | ||
55 | duration: 10, | ||
56 | maxVideoPts: 3 * 10 * 1000 + 1, | ||
57 | maxAudioPts: 3 * 10 * 1000 + 2, | ||
58 | uri: '2.ts' | ||
59 | }, { | ||
60 | duration: 10, | ||
61 | maxVideoPts: 4 * 10 * 1000 + 1, | ||
62 | maxAudioPts: 4 * 10 * 1000 + 2, | ||
63 | uri: '3.ts' | ||
64 | }] | ||
65 | }, 0, 4); | ||
66 | |||
67 | equal(duration, ((4 * 10 * 1000 + 2) - 1) * 0.001, 'used PTS values'); | ||
68 | }); | ||
69 | |||
70 | test('interval duration works when partial PTS information is available', function() { | ||
71 | var firstInterval, secondInterval, duration = Playlist.duration({ | ||
72 | mediaSequence: 0, | ||
73 | endList: true, | ||
74 | segments: [{ | ||
75 | minVideoPts: 1, | ||
76 | minAudioPts: 2, | ||
77 | maxVideoPts: 1 * 10 * 1000 + 1, | ||
78 | |||
79 | // intentionally less duration than video | ||
80 | // the max stream duration should be used | ||
81 | maxAudioPts: 1 * 10 * 1000 + 1, | ||
82 | uri: '0.ts' | ||
83 | }, { | ||
84 | duration: 10, | ||
85 | uri: '1.ts' | ||
86 | }, { | ||
87 | duration: 10, | ||
88 | minVideoPts: 2 * 10 * 1000 + 7, | ||
89 | minAudioPts: 2 * 10 * 1000 + 10, | ||
90 | maxVideoPts: 3 * 10 * 1000 + 1, | ||
91 | maxAudioPts: 3 * 10 * 1000 + 2, | ||
92 | uri: '2.ts' | ||
93 | }, { | ||
94 | duration: 10, | ||
95 | maxVideoPts: 4 * 10 * 1000 + 1, | ||
96 | maxAudioPts: 4 * 10 * 1000 + 2, | ||
97 | uri: '3.ts' | ||
98 | }] | ||
99 | }, 0, 4); | ||
100 | |||
101 | firstInterval = (1 * 10 * 1000 + 1) - 1; | ||
102 | firstInterval *= 0.001; | ||
103 | secondInterval = (4 * 10 * 1000 + 2) - (2 * 10 * 1000 + 7); | ||
104 | secondInterval *= 0.001; | ||
105 | |||
106 | equal(duration, firstInterval + 10 + secondInterval, 'calculated with mixed intervals'); | ||
107 | }); | ||
108 | |||
109 | test('interval duration accounts for discontinuities', function() { | ||
110 | var duration = Playlist.duration({ | ||
111 | mediaSequence: 0, | ||
112 | endList: true, | ||
113 | segments: [{ | ||
114 | minVideoPts: 0, | ||
115 | minAudioPts: 0, | ||
116 | maxVideoPts: 1 * 10 * 1000, | ||
117 | maxAudioPts: 1 * 10 * 1000, | ||
118 | uri: '0.ts' | ||
119 | }, { | ||
120 | discontinuity: true, | ||
121 | minVideoPts: 2 * 10 * 1000, | ||
122 | minAudioPts: 2 * 10 * 1000, | ||
123 | maxVideoPts: 3 * 10 * 1000, | ||
124 | maxAudioPts: 3 * 10 * 1000, | ||
125 | duration: 10, | ||
126 | uri: '1.ts' | ||
127 | }] | ||
128 | }, 0, 2); | ||
129 | |||
130 | equal(duration, 10 + 10, 'handles discontinuities'); | ||
131 | }); | ||
132 | |||
41 | test('calculates seekable time ranges from the available segments', function() { | 133 | test('calculates seekable time ranges from the available segments', function() { |
42 | var playlist = { | 134 | var playlist = { |
43 | mediaSequence: 0, | 135 | mediaSequence: 0, | ... | ... |
... | @@ -1185,56 +1185,6 @@ test('flushes the parser after each segment', function() { | ... | @@ -1185,56 +1185,6 @@ test('flushes the parser after each segment', function() { |
1185 | strictEqual(flushes, 1, 'tags are flushed at the end of a segment'); | 1185 | strictEqual(flushes, 1, 'tags are flushed at the end of a segment'); |
1186 | }); | 1186 | }); |
1187 | 1187 | ||
1188 | test('calculates preciseDuration for a new segment', function() { | ||
1189 | var tags = [ | ||
1190 | { pts : 200 * 1000, bytes: new Uint8Array(1) }, | ||
1191 | { pts : 300 * 1000, bytes: new Uint8Array(1) } | ||
1192 | ]; | ||
1193 | videojs.Hls.SegmentParser = mockSegmentParser(tags); | ||
1194 | |||
1195 | player.src({ | ||
1196 | src: 'manifest/media.m3u8', | ||
1197 | type: 'application/vnd.apple.mpegurl' | ||
1198 | }); | ||
1199 | openMediaSource(player); | ||
1200 | |||
1201 | standardXHRResponse(requests[0]); | ||
1202 | strictEqual(player.duration(), 40, 'player duration is read from playlist on load'); | ||
1203 | standardXHRResponse(requests[1]); | ||
1204 | strictEqual(player.hls.playlists.media().segments[0].preciseDuration, 200, 'preciseDuration is calculated and stored'); | ||
1205 | strictEqual(player.duration(), 230, 'player duration is calculated using preciseDuration'); | ||
1206 | }); | ||
1207 | |||
1208 | test('calculates preciseDuration correctly around discontinuities', function() { | ||
1209 | var tags = []; | ||
1210 | videojs.Hls.SegmentParser = mockSegmentParser(tags); | ||
1211 | player.src({ | ||
1212 | src: 'manifest/media.m3u8', | ||
1213 | type: 'application/vnd.apple.mpegurl' | ||
1214 | }); | ||
1215 | openMediaSource(player); | ||
1216 | requests.shift().respond(200, null, | ||
1217 | '#EXTM3U\n' + | ||
1218 | '#EXTINF:10,\n' + | ||
1219 | '0.ts\n' + | ||
1220 | '#EXT-X-DISCONTINUITY\n' + | ||
1221 | '#EXTINF:10,\n' + | ||
1222 | '1.ts\n' + | ||
1223 | '#EXT-X-ENDLIST\n'); | ||
1224 | tags.push({ pts: 10 * 1000, bytes: new Uint8Array(1) }); | ||
1225 | standardXHRResponse(requests.shift()); // segment 0 | ||
1226 | player.hls.checkBuffer_(); | ||
1227 | |||
1228 | // the PTS value of the second segment is *earlier* than the first | ||
1229 | tags.push({ pts: 0 * 1000, bytes: new Uint8Array(1) }); | ||
1230 | tags.push({ pts: 5 * 1000, bytes: new Uint8Array(1) }); | ||
1231 | standardXHRResponse(requests.shift()); // segment 1 | ||
1232 | |||
1233 | equal(player.hls.playlists.media().segments[1].preciseDuration, | ||
1234 | 5 + 5, // duration includes the time to display the second tag | ||
1235 | 'duration is independent of previous segments'); | ||
1236 | }); | ||
1237 | |||
1238 | test('exposes in-band metadata events as cues', function() { | 1188 | test('exposes in-band metadata events as cues', function() { |
1239 | var track; | 1189 | var track; |
1240 | videojs.Hls.SegmentParser = mockSegmentParser(); | 1190 | videojs.Hls.SegmentParser = mockSegmentParser(); |
... | @@ -1444,7 +1394,8 @@ test('translates ID3 PTS values across discontinuities', function() { | ... | @@ -1444,7 +1394,8 @@ test('translates ID3 PTS values across discontinuities', function() { |
1444 | 1394 | ||
1445 | // segment 0 starts at PTS 14000 and has a cue point at 15000 | 1395 | // segment 0 starts at PTS 14000 and has a cue point at 15000 |
1446 | player.hls.segmentParser_.timestampOffset = 14 * 1000; | 1396 | player.hls.segmentParser_.timestampOffset = 14 * 1000; |
1447 | tags.push({ pts: 14 * 1000, bytes: new Uint8Array(1) }); | 1397 | tags.push({ pts: 14 * 1000, bytes: new Uint8Array(1) }, |
1398 | { pts: 24 * 1000, bytes: new Uint8Array(1) }); | ||
1448 | events.push({ | 1399 | events.push({ |
1449 | pts: 15 * 1000, | 1400 | pts: 15 * 1000, |
1450 | data: new Uint8Array([]), | 1401 | data: new Uint8Array([]), | ... | ... |
-
Please register or sign in to post a comment