a63f0154 by David LaPalomento

Merge pull request #397 from dmlap/live-variant-syncing

Live variant syncing
2 parents 83b4302b 27758be3
/**
* playlist-loader
*
* A state machine that manages the loading, caching, and updating of
* M3U8 playlists. When tracking a live playlist, loaders will keep
* track of the duration of content that expired since the loader was
* initialized and when the current discontinuity sequence was
* encountered. A complete media timeline for a live playlist with
* expiring segments and discontinuities looks like this:
* expiring segments looks like this:
*
* |-- expiredPreDiscontinuity --|-- expiredPostDiscontinuity --|-- segments --|
* |-- expired --|-- segments --|
*
* You can use these values to calculate how much time has elapsed
* since the stream began loading or how long it has been since the
* most recent discontinuity was encountered, for instance.
*/
(function(window, videojs) {
'use strict';
......@@ -159,20 +158,13 @@
// initialize the loader state
loader.state = 'HAVE_NOTHING';
// the total duration of all segments that expired and have been
// removed from the current playlist after the last
// #EXT-X-DISCONTINUITY. In a live playlist without
// discontinuities, this is the total amount of time that has
// been removed from the stream since the playlist loader began
// The total duration of all segments that expired and have been
// removed from the current playlist, in seconds. This property
// should always be zero for non-live playlists. In a live
// playlist, this is the total amount of time that has been
// removed from the stream since the playlist loader began
// tracking it.
loader.expiredPostDiscontinuity_ = 0;
// the total duration of all segments that expired and have been
// removed from the current playlist before the last
// #EXT-X-DISCONTINUITY. The total amount of time that has
// expired is always the sum of expiredPreDiscontinuity_ and
// expiredPostDiscontinuity_.
loader.expiredPreDiscontinuity_ = 0;
loader.expired_ = 0;
// capture the prototype dispose function
dispose = this.dispose;
......@@ -364,35 +356,14 @@
* @param update {object} the updated media playlist object
*/
PlaylistLoader.prototype.updateMediaPlaylist_ = function(update) {
var lastDiscontinuity, expiredCount, i;
var expiredCount;
if (this.media_) {
expiredCount = update.mediaSequence - this.media_.mediaSequence;
// setup the index for duration calculations so that the newly
// expired time will be accumulated after the last
// discontinuity, unless we discover otherwise
lastDiscontinuity = this.media_.mediaSequence;
if (this.media_.discontinuitySequence !== update.discontinuitySequence) {
i = expiredCount;
while (i--) {
if (this.media_.segments[i].discontinuity) {
// a segment that begins a new discontinuity sequence has expired
lastDiscontinuity = i + this.media_.mediaSequence;
this.expiredPreDiscontinuity_ += this.expiredPostDiscontinuity_;
this.expiredPostDiscontinuity_ = 0;
break;
}
}
}
// update the expirated durations
this.expiredPreDiscontinuity_ += Playlist.duration(this.media_,
// update the expired time count
this.expired_ += Playlist.duration(this.media_,
this.media_.mediaSequence,
lastDiscontinuity);
this.expiredPostDiscontinuity_ += Playlist.duration(this.media_,
lastDiscontinuity,
update.mediaSequence);
}
......@@ -400,6 +371,28 @@
};
/**
* When switching variant playlists in a live stream, the player may
* discover that the new set of available segments is shifted in
* time relative to the old playlist. If that is the case, you can
* call this method to synchronize the playlist loader so that
* subsequent calls to getMediaIndexForTime_() return values
* appropriate for the new playlist.
*
* @param mediaIndex {integer} the index of the segment that will be
* the used to base timeline calculations on
* @param startTime {number} the media timeline position of the
* first moment of video data for the specified segment. That is,
* data from the specified segment will first be displayed when
* `currentTime` is equal to `startTime`.
*/
PlaylistLoader.prototype.updateTimelineOffset = function(mediaIndex, startingTime) {
var segmentOffset = Playlist.duration(this.media_,
this.media_.mediaSequence,
this.media_.mediaSequence + mediaIndex);
this.expired_ = startingTime - segmentOffset;
};
/**
* Determine the index of the segment that contains a specified
* playback position in the current media playlist. Early versions
* of the HLS specification require segment durations to be rounded
......@@ -426,7 +419,7 @@
// when the requested position is earlier than the current set of
// segments, return the earliest segment index
time -= this.expiredPreDiscontinuity_ + this.expiredPostDiscontinuity_;
time -= this.expired_;
if (time < 0) {
return 0;
}
......
......@@ -46,6 +46,8 @@ videojs.Hls = videojs.extend(Component, {
this.tech_ = tech;
this.source_ = options.source;
this.mode_ = options.mode;
this.pendingSegment_ = null;
this.bytesReceived = 0;
// loadingState_ tracks how far along the buffering process we
......@@ -311,6 +313,8 @@ videojs.Hls.prototype.setupSourceBuffer_ = function() {
return;
}
// if the codecs were explicitly specified, pass them along to the
// source buffer
mimeType = 'video/mp2t';
if (media.attributes && media.attributes.CODECS) {
mimeType += '; codecs="' + media.attributes.CODECS + '"';
......@@ -320,10 +324,33 @@ videojs.Hls.prototype.setupSourceBuffer_ = function() {
// transition the sourcebuffer to the ended state if we've hit the end of
// the playlist
this.sourceBuffer.addEventListener('updateend', function() {
var segmentInfo = this.pendingSegment_, i, currentBuffered;
this.pendingSegment_ = null;
if (this.duration() !== Infinity &&
this.mediaIndex === this.playlists.media().segments.length) {
this.mediaSource.endOfStream();
}
// When switching renditions or seeking, we may misjudge the media
// index to request to continue playback. Check after each append
// that a gap hasn't appeared in the buffered region and adjust
// the media index to fill it if necessary
if (this.tech_.buffered().length === 2 &&
segmentInfo.playlist === this.playlists.media()) {
i = this.tech_.buffered().length;
while (i--) {
if (this.tech_.currentTime() < this.tech_.buffered().start(i)) {
// found the misidentified segment's buffered time range
// adjust the media index to fill the gap
currentBuffered = this.findCurrentBuffered_();
this.playlists.updateTimelineOffset(segmentInfo.mediaIndex, this.tech_.buffered().start(i));
this.mediaIndex = this.playlists.getMediaIndexForTime_(currentBuffered.end(0) + 1);
break;
}
}
}
}.bind(this));
};
......@@ -369,8 +396,10 @@ videojs.Hls.prototype.setupMetadataCueTranslation_ = function() {
return;
}
media = this.playlists.media();
startTime = this.tech_.playlists.expiredPreDiscontinuity_ + this.tech_.playlists.expiredPostDiscontinuity_;
startTime += videojs.Hls.Playlist.duration(media, media.mediaSequence, media.mediaSequence + this.tech_.mediaIndex);
startTime = this.tech_.playlists.expired_;
startTime += videojs.Hls.Playlist.duration(media,
media.mediaSequence,
media.mediaSequence + this.tech_.mediaIndex);
i = textTrack.cues.length;
while (i--) {
......@@ -383,8 +412,7 @@ videojs.Hls.prototype.setupMetadataCueTranslation_ = function() {
videojs.Hls.prototype.addCuesForMetadata_ = function(segmentInfo) {
var i, cue, frame, metadata, minPts, segment, segmentOffset, textTrack, time;
segmentOffset = this.playlists.expiredPreDiscontinuity_;
segmentOffset += this.playlists.expiredPostDiscontinuity_;
segmentOffset = this.playlists.expired_;
segmentOffset += videojs.Hls.Playlist.duration(segmentInfo.playlist,
segmentInfo.playlist.mediaSequence,
segmentInfo.playlist.mediaSequence + segmentInfo.mediaIndex);
......@@ -531,7 +559,7 @@ videojs.Hls.prototype.seekable = function() {
return currentSeekable;
}
startOffset = this.playlists.expiredPostDiscontinuity_ - this.playlists.expiredPreDiscontinuity_;
startOffset = this.playlists.expired_;
return videojs.createTimeRanges(startOffset,
startOffset + (currentSeekable.end(0) - currentSeekable.start(0)));
};
......@@ -1068,10 +1096,9 @@ videojs.Hls.prototype.drainBuffer = function(event) {
this.sourceBuffer.timestampOffset = currentBuffered.end(0);
}
// the segment is asynchronously added to the current buffered data
this.sourceBuffer.appendBuffer(bytes);
// we're done processing this segment
segmentBuffer.shift();
this.pendingSegment_ = segmentBuffer.shift();
};
/**
......
......@@ -59,12 +59,7 @@
'#EXTM3U\n' +
'#EXTINF:10,\n' +
'0.ts\n');
equal(loader.expiredPreDiscontinuity_,
0,
'zero seconds expired pre-discontinuity');
equal(loader.expiredPostDiscontinuity_,
0,
'zero seconds expired post-discontinuity');
equal(loader.expired_, 0, 'zero seconds expired');
});
test('requests the initial playlist immediately', function() {
......@@ -202,7 +197,7 @@
'3.ts\n' +
'#EXTINF:10,\n' +
'4.ts\n');
equal(loader.expiredPostDiscontinuity_, 10, 'expired one segment');
equal(loader.expired_, 10, 'expired one segment');
});
test('increments expired seconds after a discontinuity', function() {
......@@ -226,8 +221,7 @@
'#EXT-X-DISCONTINUITY\n' +
'#EXTINF:4,\n' +
'2.ts\n');
equal(loader.expiredPreDiscontinuity_, 0, 'identifies pre-discontinuity time');
equal(loader.expiredPostDiscontinuity_, 10, 'expired one segment');
equal(loader.expired_, 10, 'expired one segment');
clock.tick(10 * 1000); // 10s, one target duration
requests.pop().respond(200, null,
......@@ -236,8 +230,7 @@
'#EXT-X-DISCONTINUITY\n' +
'#EXTINF:4,\n' +
'2.ts\n');
equal(loader.expiredPreDiscontinuity_, 0, 'tracked time across the discontinuity');
equal(loader.expiredPostDiscontinuity_, 13, 'no expirations after the discontinuity yet');
equal(loader.expired_, 13, 'no expirations after the discontinuity yet');
clock.tick(10 * 1000); // 10s, one target duration
requests.pop().respond(200, null,
......@@ -246,8 +239,7 @@
'#EXT-X-DISCONTINUITY-SEQUENCE:1\n' +
'#EXTINF:10,\n' +
'3.ts\n');
equal(loader.expiredPreDiscontinuity_, 13, 'did not increment pre-discontinuity');
equal(loader.expiredPostDiscontinuity_, 4, 'expired post-discontinuity');
equal(loader.expired_, 13 + 4, 'tracked expired prior to the discontinuity');
});
test('tracks expired seconds properly when two discontinuities expire at once', function() {
......@@ -272,8 +264,7 @@
'#EXT-X-DISCONTINUITY-SEQUENCE:2\n' +
'#EXTINF:7,\n' +
'3.ts\n');
equal(loader.expiredPreDiscontinuity_, 4 + 5, 'tracked pre-discontinuity time');
equal(loader.expiredPostDiscontinuity_, 6, 'tracked post-discontinuity time');
equal(loader.expired_, 4 + 5 + 6, 'tracked both expired discontinuities');
});
test('emits an error when an initial playlist request fails', function() {
......@@ -782,8 +773,7 @@
'1001.ts\n' +
'#EXTINF:5,\n' +
'1002.ts\n');
loader.expiredPreDiscontinuity_ = 50;
loader.expiredPostDiscontinuity_ = 100;
loader.expired_ = 150;
equal(loader.getMediaIndexForTime_(0), 0, 'the lowest returned value is zero');
equal(loader.getMediaIndexForTime_(45), 0, 'expired content returns zero');
......@@ -795,6 +785,30 @@
equal(loader.getMediaIndexForTime_(50 + 100 + 6), 1, 'calculates within the second segment');
});
test('updating the timeline offset adjusts results from getMediaIndexForTime_', function() {
var loader = new videojs.Hls.PlaylistLoader('live.m3u8');
requests.pop().respond(200, null,
'#EXTM3U\n' +
'#EXT-X-MEDIA-SEQUENCE:23\n' +
'#EXTINF:4,\n' +
'23.ts\n' +
'#EXTINF:5,\n' +
'24.ts\n' +
'#EXTINF:6,\n' +
'25.ts\n' +
'#EXTINF:7,\n' +
'26.ts\n');
loader.updateTimelineOffset(0, 150);
equal(loader.getMediaIndexForTime_(150), 0, 'translated the first segment');
equal(loader.getMediaIndexForTime_(130), 0, 'clamps the index to zero');
equal(loader.getMediaIndexForTime_(155), 1, 'translated the second segment');
loader.updateTimelineOffset(2, 30);
equal(loader.getMediaIndexForTime_(30 - 5 - 1), 0, 'translated the first segment');
equal(loader.getMediaIndexForTime_(30 + 7), 3, 'translated the last segment');
equal(loader.getMediaIndexForTime_(30 - 3), 1, 'translated an earlier segment');
});
test('does not misintrepret playlists missing newlines at the end', function() {
var loader = new videojs.Hls.PlaylistLoader('media.m3u8');
requests.shift().respond(200, null,
......
......@@ -23,7 +23,9 @@
<!-- Media Sources plugin -->
<script src="../../node_modules/videojs-contrib-media-sources/src/videojs-media-sources.js"></script>
<script>
videojs.MediaSource.webWorkerURI = '../../node_modules/videojs-contrib-media-sources/src/transmuxer_worker.js';
</script>
<!-- HLS plugin -->
<script src="../../src/videojs-hls.js"></script>
......@@ -77,12 +79,11 @@
</video>
<section class="stats">
<h2>Player Stats</h2>
<div class="segment-timeline"></div>
<dl>
<dt>Current Time:</dt>
<dd class="current-time-stat">0</dd>
<dt>Buffered:</dt>
<dd><span class="buffered-start-stat">-</span> - <span class="buffered-end-stat">-</span></dd>
<dd class="buffered-stat">-</dd>
<dt>Seekable:</dt>
<dd><span class="seekable-start-stat">-</span> - <span class="seekable-end-stat">-</span></dd>
<dt>Video Bitrate:</dt>
......@@ -90,10 +91,13 @@
<dt>Measured Bitrate:</dt>
<dd class="measured-bitrate-stat">0 kbps</dd>
</dl>
<h3>Bitrate Switching</h3>
<div class="switching-stats">
Once the player begins loading, you'll see information about the
operation of the adaptive quality switching here.
</div>
<h3>Timed Metadata</h3>
<div class="segment-timeline"></div>
</section>
<script src="stats.js"></script>
......@@ -107,8 +111,7 @@
// ------------
var currentTimeStat = document.querySelector('.current-time-stat');
var bufferedStartStat = document.querySelector('.buffered-start-stat');
var bufferedEndStat = document.querySelector('.buffered-end-stat');
var bufferedStat = document.querySelector('.buffered-stat');
var seekableStartStat = document.querySelector('.seekable-start-stat');
var seekableEndStat = document.querySelector('.seekable-end-stat');
var videoBitrateState = document.querySelector('.video-bitrate-stat');
......@@ -119,20 +122,17 @@
});
player.on('progress', function() {
var oldStart, oldEnd;
var bufferedText = '', oldStart, oldEnd, i;
// buffered
var buffered = player.buffered();
if (buffered.length) {
oldStart = bufferedStartStat.textContent;
if (buffered.start(0).toFixed(1) !== oldStart) {
bufferedStartStat.textContent = buffered.start(0).toFixed(1);
}
oldEnd = bufferedEndStat.textContent;
if (buffered.end(0).toFixed(1) !== oldEnd) {
bufferedEndStat.textContent = buffered.end(0).toFixed(1);
bufferedText += buffered.start(0) + ' - ' + buffered.end(0);
}
for (i = 1; i < buffered.length; i++) {
bufferedText += ', ' + buffered.start(i) + ' - ' + buffered.end(i);
}
bufferedStat.textContent = bufferedText;
// seekable
var seekable = player.seekable();
......@@ -149,14 +149,14 @@
}
// bitrates
var playlist = player.tech.hls.playlists.media();
var playlist = player.tech_.hls.playlists.media();
if (playlist && playlist.attributes && playlist.attributes.BANDWIDTH) {
videoBitrateState.textContent = (playlist.attributes.BANDWIDTH / 1024).toLocaleString(undefined, {
maximumFractionDigits: 1
}) + ' kbps';
}
if (player.tech.hls.bandwidth) {
measuredBitrateStat.textContent = (player.tech.hls.bandwidth / 1024).toLocaleString(undefined, {
if (player.tech_.hls.bandwidth) {
measuredBitrateStat.textContent = (player.tech_.hls.bandwidth / 1024).toLocaleString(undefined, {
maximumFractionDigits: 1
}) + ' kbps';
}
......
......@@ -4,10 +4,15 @@
}
.axis line,
.axis path,
.intersect {
.axis path {
fill: none;
stroke: #111;
}
.bitrates {
fill: none;
stroke: #000;
stroke: steelblue;
stroke-width: 3px;
}
.cue {
......@@ -23,6 +28,6 @@
.intersect {
fill: none;
stroke: #000;
stroke: #111;
stroke-dasharray: 2,2;
}
......
......@@ -7,9 +7,35 @@
var d3 = window.d3;
var setupGraph = function(element) {
element.innerHTML = '';
var bitrateTickFormatter = d3.format(',.0f');
var updateBitrateAxes = function(svg, xScale, yScale) {
var xAxis = d3.svg.axis().scale(xScale).orient('bottom');
svg.select('.axis.x')
.transition().duration(500)
.call(xAxis);
var yAxis = d3.svg.axis().scale(yScale)
.tickFormat(function(value) {
return bitrateTickFormatter(value / 1024);
}).orient('left');
svg.select('.axis.y')
.transition().duration(500)
.call(yAxis);
};
var updateBitrates = function(svg, x, y, measuredBitrateKbps) {
var bitrates, line;
bitrates = svg.selectAll('.bitrates').datum(measuredBitrateKbps);
line = d3.svg.line()
.x(function(bitrate) { return x(bitrate.time); })
.y(function(bitrate) { return y(bitrate.value); });
bitrates.transition().duration(500).attr('d', line);
};
var setupGraph = function(element, player) {
// setup the display
var margin = {
top: 20,
......@@ -30,15 +56,14 @@
var x = d3.time.scale().range([0, width]); // d3.scale.linear().range([0, width]);
var y = d3.scale.linear().range([height, 0]);
x.domain([new Date(), new Date(Date.now() + (5 * 60 * 1000))]);
x.domain([new Date(), new Date(Date.now() + (1 * 60 * 1000))]);
y.domain([0, 5 * 1024 * 1024 * 8]);
var timeAxis = d3.svg.axis().scale(x).orient('bottom');
var tickFormatter = d3.format(',.0f');
var bitrateAxis = d3.svg.axis()
.scale(y)
.tickFormat(function(value) {
return tickFormatter(value / 1024);
return bitrateTickFormatter(value / 1024);
})
.orient('left');
......@@ -60,6 +85,26 @@
.style('text-anchor', 'end')
.text('Bitrate (kb/s)');
svg.append('path')
.attr('class', 'bitrates');
var measuredBitrateKbps = [{
time: new Date(),
value: player.tech_.hls.bandwidth || 0
}];
player.on('progress', function() {
measuredBitrateKbps.push({
time: new Date(),
value: player.tech_.hls.bandwidth || 0
});
x.domain([x.domain()[0], new Date()]);
y.domain([0, d3.max(measuredBitrateKbps, function(bitrate) {
return bitrate.value;
})]);
updateBitrateAxes(svg, x, y);
updateBitrates(svg, x, y, measuredBitrateKbps);
});
};
// ---------------
......@@ -86,8 +131,8 @@
var mediaDomain = function(media, player) {
var segments = media.segments;
var end = player.tech.hls.playlists.expiredPreDiscontinuity_;
end += player.tech.hls.playlists.expiredPostDiscontinuity_;
var end = player.tech_.hls.playlists.expiredPreDiscontinuity_;
end += player.tech_.hls.playlists.expiredPostDiscontinuity_;
end += Playlist.duration(media,
media.mediaSequence,
media.mediaSequence + segments.length);
......@@ -160,7 +205,7 @@
.call(ptsAxis);
};
var svgRenderSegmentTimeline = function(container, player) {
var media = player.tech.hls.playlists.media();
var media = player.tech_.hls.playlists.media();
var segments = media.segments; // media.segments.slice(0, count);
// setup the display
......@@ -196,7 +241,7 @@
// update everything on progress
player.on('progress', function() {
var updatedMedia = player.tech.hls.playlists.media();
var updatedMedia = player.tech_.hls.playlists.media();
var segments = updatedMedia.segments; // updatedMedia.segments.slice(currentIndex, currentIndex + count);
if (updatedMedia.mediaSequence !== media.mediaSequence) {
......@@ -220,7 +265,7 @@
};
var displayCues = function(container, player) {
var media = player.tech.hls.playlists.media();
var media = player.tech_.hls.playlists.media();
if (media && media.segments) {
svgRenderSegmentTimeline(container, player);
} else {
......
......@@ -42,7 +42,25 @@ var
// patch over some methods of the provided tech so it can be tested
// synchronously with sinon's fake timers
mockTech = function(tech) {
if (tech.isMocked_) {
// make this function idempotent because HTML and Flash based
// playback have very different lifecycles. For HTML, the tech
// is available on player creation. For Flash, the tech isn't
// ready until the source has been loaded and one tick has
// expired.
return;
}
tech.isMocked_ = true;
tech.paused_ = !tech.autoplay();
tech.paused = function() {
return tech.paused_;
};
if (!tech.currentTime_) {
tech.currentTime_ = tech.currentTime;
}
tech.currentTime = function() {
return tech.time_ === undefined ? tech.currentTime_() : tech.time_;
};
......@@ -61,6 +79,19 @@ var
return tech.src_ === undefined ? tech.currentSrc_() : tech.src_;
};
tech.play_ = tech.play;
tech.play = function() {
tech.play_();
tech.paused_ = false;
tech.trigger('play');
};
tech.pause_ = tech.pause_;
tech.pause = function() {
tech.pause_();
tech.paused_ = true;
tech.trigger('pause');
};
tech.setCurrentTime = function(time) {
tech.time_ = time;
......@@ -95,6 +126,7 @@ var
// ensure the Flash tech is ready
player.tech_.triggerReady();
clock.tick(1);
mockTech(player.tech_);
// simulate the sourceopen event
player.tech_.hls.mediaSource.readyState = 'open';
......@@ -197,9 +229,11 @@ var
constructor: function() {},
abort: function() {},
buffered: videojs.createTimeRange(),
appendBuffer: function() {}
appendBuffer: function() {},
remove: function() {}
}))();
},
endOfStream: function() {}
}),
// do a shallow copy of the properties of source onto the target object
......@@ -882,6 +916,57 @@ test('moves to the next segment if there is a network error', function() {
strictEqual(mediaIndex + 1, player.tech_.hls.mediaIndex, 'media index is incremented');
});
test('updates playlist timeline offsets if it detects a desynchronization', function() {
var buffered = [], currentTime = 0;
player.src({
src: 'manifest/master.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
standardXHRResponse(requests.shift()); // master
requests.shift().respond(200, null,
'#EXTM3U\n' +
'#EXT-X-MEDIA-SEQUENCE:2\n' +
'#EXTINF:10,\n' +
'2.ts\n' +
'#EXTINF:10,\n' +
'3.ts\n'); // media
player.tech_.buffered = function() { return videojs.createTimeRange(buffered); };
player.tech_.currentTime = function() { return currentTime; };
player.tech_.paused = function() { return false; };
player.tech_.trigger('play');
clock.tick(1);
standardXHRResponse(requests.shift()); // segment 0
equal(player.tech_.hls.mediaIndex, 1, 'incremented mediaIndex');
player.tech_.hls.sourceBuffer.trigger('updateend');
buffered.push([0, 10]);
// force a playlist switch
player.tech_.hls.playlists.media('media1.m3u8');
requests = requests.filter(function(request) {
return !request.aborted;
});
requests.shift().respond(200, null,
'#EXTM3U\n' +
'#EXT-X-MEDIA-SEQUENCE:9999\n' +
'#EXTINF:10,\n' +
'3.ts\n' +
'#EXTINF:10,\n' +
'4.ts\n' +
'#EXTINF:10,\n' +
'5.ts\n'); // media1
player.tech_.hls.checkBuffer_();
standardXHRResponse(requests.shift());
buffered.push([20, 30]);
currentTime = 8;
player.tech_.hls.sourceBuffer.trigger('updateend');
equal(player.tech_.hls.mediaIndex, 0, 'prepared to request the missing segment');
});
test('updates the duration after switching playlists', function() {
var selectedPlaylist = false;
player.src({
......@@ -1172,33 +1257,14 @@ test('buffers based on the correct TimeRange if multiple ranges exist', function
return 8;
};
player.tech_.buffered = function() {
return {
start: function(num) {
switch (num) {
case 0:
return 0;
case 1:
return 50;
}
},
end: function(num) {
switch (num) {
case 0:
return 10;
case 1:
return 160;
}
},
length: 2
};
};
player.src({
src: 'manifest/media.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
player.tech_.buffered = function() {
return videojs.createTimeRange([[0, 10], [50, 160]]);
};
standardXHRResponse(requests[0]);
standardXHRResponse(requests[1]);
......