5b501b00 by David LaPalomento

Remove expired time tracking

Media sources implicitly track expired time by retaining the mapping between presentation timestamp values and the media timeline in the buffer. That allows us to simplify a good deal of code by not tracking it ourselves. Finish updating tests to work against the new timeline start and end annotations on segments instead of the old PTS values. Remove metadata cue translation because that is now handled by contrib-media-sources. Update key fetching in HLSe to occur concurrently with the segment download. All tests are now passing.
1 parent 2633a46d
......@@ -2,13 +2,7 @@
* playlist-loader
*
* A state machine that manages the loading, caching, and updating of
* M3U8 playlists. When tracking a live playlist, loaders will keep
* track of the duration of content that expired since the loader was
* initialized and when the current discontinuity sequence was
* encountered. A complete media timeline for a live playlist with
* expiring segments looks like this:
*
* |-- expired --|-- segments --|
* M3U8 playlists.
*
*/
(function(window, videojs) {
......@@ -16,7 +10,6 @@
var
resolveUrl = videojs.Hls.resolveUrl,
xhr = videojs.Hls.xhr,
Playlist = videojs.Hls.Playlist,
mergeOptions = videojs.mergeOptions,
/**
......@@ -158,14 +151,6 @@
// initialize the loader state
loader.state = 'HAVE_NOTHING';
// The total duration of all segments that expired and have been
// removed from the current playlist, in seconds. This property
// should always be zero for non-live playlists. In a live
// playlist, this is the total amount of time that has been
// removed from the stream since the playlist loader began
// tracking it.
loader.expired_ = 0;
// capture the prototype dispose function
dispose = this.dispose;
......@@ -360,43 +345,10 @@
* @param update {object} the updated media playlist object
*/
PlaylistLoader.prototype.updateMediaPlaylist_ = function(update) {
var expiredCount;
if (this.media_) {
expiredCount = update.mediaSequence - this.media_.mediaSequence;
// update the expired time count
this.expired_ += Playlist.duration(this.media_,
this.media_.mediaSequence,
update.mediaSequence);
}
this.media_ = this.master.playlists[update.uri];
};
/**
* When switching variant playlists in a live stream, the player may
* discover that the new set of available segments is shifted in
* time relative to the old playlist. If that is the case, you can
* call this method to synchronize the playlist loader so that
* subsequent calls to getMediaIndexForTime_() return values
* appropriate for the new playlist.
*
* @param mediaIndex {integer} the index of the segment that will be
* the used to base timeline calculations on
* @param startTime {number} the media timeline position of the
* first moment of video data for the specified segment. That is,
* data from the specified segment will first be displayed when
* `currentTime` is equal to `startTime`.
*/
PlaylistLoader.prototype.updateTimelineOffset = function(mediaIndex, startingTime) {
var segmentOffset = Playlist.duration(this.media_,
this.media_.mediaSequence,
this.media_.mediaSequence + mediaIndex);
this.expired_ = startingTime - segmentOffset;
};
/**
* Determine the index of the segment that contains a specified
* playback position in the current media playlist. Early versions
* of the HLS specification require segment durations to be rounded
......@@ -423,7 +375,6 @@
// when the requested position is earlier than the current set of
// segments, return the earliest segment index
time -= this.expired_;
if (time < 0) {
return 0;
}
......@@ -447,6 +398,11 @@
time -= segment.start;
time -= segment.duration || targetDuration;
if (time < 0) {
// the segment with start information is also our best guess
// for the momment
return i;
}
break;
}
}
......
......@@ -26,47 +26,46 @@
/**
* Calculate the media duration from the segments associated with a
* playlist. The duration of a subinterval of the available segments
* may be calculated by specifying a start and end index.
* may be calculated by specifying an end index.
*
* @param playlist {object} a media playlist object
* @param startSequence {number} (optional) an inclusive lower
* boundary for the playlist. Defaults to 0.
* @param endSequence {number} (optional) an exclusive upper boundary
* for the playlist. Defaults to playlist length.
* @return {number} the duration between the start index and end
* index.
*/
intervalDuration = function(playlist, startSequence, endSequence) {
var result = 0, targetDuration, i, start, end, expiredSegmentCount;
intervalDuration = function(playlist, endSequence) {
var result = 0, segment, targetDuration, i;
if (startSequence === undefined) {
startSequence = playlist.mediaSequence || 0;
}
if (endSequence === undefined) {
endSequence = startSequence + (playlist.segments || []).length;
endSequence = playlist.mediaSequence + (playlist.segments || []).length;
}
if (endSequence < 0) {
return 0;
}
targetDuration = playlist.targetDuration || DEFAULT_TARGET_DURATION;
// accumulate while looking for the latest known segment-timeline mapping
expiredSegmentCount = optionalMax(playlist.mediaSequence - startSequence, 0);
start = startSequence + expiredSegmentCount - playlist.mediaSequence;
end = endSequence - playlist.mediaSequence;
for (i = end - 1; i >= start; i--) {
if (playlist.segments[i].end !== undefined) {
result += playlist.segments[i].end;
return result;
i = endSequence - playlist.mediaSequence;
// if a start time is available for segment immediately following
// the interval, use it
segment = playlist.segments[i];
// Walk backward until we find the latest segment with timeline
// information that is earlier than endSequence
if (segment && segment.start !== undefined) {
return segment.start;
}
while (i--) {
segment = playlist.segments[i];
if (segment.end !== undefined) {
return result + segment.end;
}
result += playlist.segments[i].duration || targetDuration;
result += (segment.duration || targetDuration);
if (playlist.segments[i].start !== undefined) {
result += playlist.segments[i].start;
return result;
if (segment.start !== undefined) {
return result + segment.start;
}
}
// neither a start or end time was found in the interval so we
// have to estimate the expired duration
result += expiredSegmentCount * targetDuration;
return result;
};
......@@ -76,17 +75,16 @@
* timeline between those two indices. The total duration for live
* playlists is always Infinity.
* @param playlist {object} a media playlist object
* @param startSequence {number} (optional) an inclusive lower
* boundary for the playlist. Defaults to 0.
* @param endSequence {number} (optional) an exclusive upper boundary
* for the playlist. Defaults to playlist length.
* @param includeTrailingTime {boolean} (optional) if false, the interval between
* the final segment and the subsequent segment will not be included
* in the result
* @param endSequence {number} (optional) an exclusive upper
* boundary for the playlist. Defaults to the playlist media
* sequence number plus its length.
* @param includeTrailingTime {boolean} (optional) if false, the
* interval between the final segment and the subsequent segment
* will not be included in the result
* @return {number} the duration between the start index and end
* index.
*/
duration = function(playlist, startSequence, endSequence, includeTrailingTime) {
duration = function(playlist, endSequence, includeTrailingTime) {
if (!playlist) {
return 0;
}
......@@ -97,7 +95,7 @@
// if a slice of the total duration is not requested, use
// playlist-level duration indicators when they're present
if (startSequence === undefined && endSequence === undefined) {
if (endSequence === undefined) {
// if present, use the duration specified in the playlist
if (playlist.totalDuration) {
return playlist.totalDuration;
......@@ -111,7 +109,6 @@
// calculate the total duration based on the segment durations
return intervalDuration(playlist,
startSequence,
endSequence,
includeTrailingTime);
};
......@@ -128,7 +125,7 @@
* for seeking
*/
seekable = function(playlist) {
var start, end, liveBuffer, targetDuration, segment, pending, i;
var start, end;
// without segments, there are no seekable ranges
if (!playlist.segments) {
......@@ -139,33 +136,14 @@
return videojs.createTimeRange(0, duration(playlist));
}
start = 0;
end = intervalDuration(playlist,
playlist.mediaSequence,
playlist.mediaSequence + playlist.segments.length);
targetDuration = playlist.targetDuration || DEFAULT_TARGET_DURATION;
// live playlists should not expose three segment durations worth
// of content from the end of the playlist
// https://tools.ietf.org/html/draft-pantos-http-live-streaming-16#section-6.3.3
if (!playlist.endList) {
liveBuffer = targetDuration * 3;
// walk backward from the last available segment and track how
// much media time has elapsed until three target durations have
// been traversed. if a segment is part of the interval being
// reported, subtract the overlapping portion of its duration
// from the result.
for (i = playlist.segments.length - 1; i >= 0 && liveBuffer > 0; i--) {
segment = playlist.segments[i];
pending = optionalMin(duration(playlist,
playlist.mediaSequence + i,
playlist.mediaSequence + i + 1),
liveBuffer);
liveBuffer -= pending;
end -= pending;
}
}
start = intervalDuration(playlist, playlist.mediaSequence);
end = intervalDuration(playlist,
playlist.mediaSequence + playlist.segments.length);
end -= (playlist.targetDuration || DEFAULT_TARGET_DURATION) * 3;
end = Math.max(0, end);
return videojs.createTimeRange(start, end);
};
......
......@@ -15,7 +15,6 @@ var
// the amount of time to wait between checking the state of the buffer
bufferCheckInterval = 500,
keyXhr,
keyFailed,
resolveUrl;
......@@ -46,6 +45,8 @@ videojs.Hls = videojs.extend(Component, {
this.tech_ = tech;
this.source_ = options.source;
this.mode_ = options.mode;
// the segment info object for a segment that is in the process of
// being downloaded or processed
this.pendingSegment_ = null;
this.bytesReceived = 0;
......@@ -61,9 +62,6 @@ videojs.Hls = videojs.extend(Component, {
this.loadingState_ = 'meta';
}
// a queue of segments that need to be transmuxed and processed,
// and then fed to the source buffer
this.segmentBuffer_ = [];
// periodically check if new data needs to be downloaded or
// buffered data should be appended to the source buffer
this.startCheckingBuffer_();
......@@ -140,11 +138,6 @@ videojs.Hls.prototype.src = function(src) {
}
this.mediaSource = new videojs.MediaSource({ mode: this.mode_ });
this.segmentBuffer_ = [];
// if the stream contains ID3 metadata, expose that as a metadata
// text track
//this.setupMetadataCueTranslation_();
// load the MediaSource into the player
this.mediaSource.addEventListener('sourceopen', this.handleSourceOpen.bind(this));
......@@ -197,17 +190,13 @@ videojs.Hls.prototype.src = function(src) {
this.updateDuration(this.playlists.media());
oldMediaPlaylist = updatedPlaylist;
this.fetchKeys_();
}.bind(this));
this.playlists.on('mediachange', function() {
// abort outstanding key requests and check if new keys need to be retrieved
if (keyXhr) {
this.cancelKeyXhr();
}
this.tech_.trigger({ type: 'mediachange', bubbles: true });
this.tech_.trigger({
type: 'mediachange',
bubbles: true
});
}.bind(this));
// do nothing if the tech has been disposed already
......@@ -219,26 +208,6 @@ videojs.Hls.prototype.src = function(src) {
this.tech_.src(videojs.URL.createObjectURL(this.mediaSource));
};
/* Returns the media index for the live point in the current playlist, and updates
the current time to go along with it.
*/
videojs.Hls.getMediaIndexForLive_ = function(selectedPlaylist) {
if (!selectedPlaylist.segments) {
return 0;
}
var tailIterator = selectedPlaylist.segments.length,
tailDuration = 0,
targetTail = (selectedPlaylist.targetDuration || 10) * 3;
while (tailDuration < targetTail && tailIterator > 0) {
tailDuration += selectedPlaylist.segments[tailIterator - 1].duration;
tailIterator--;
}
return tailIterator;
};
videojs.Hls.prototype.handleSourceOpen = function() {
this.setupSourceBuffer_();
......@@ -324,7 +293,7 @@ videojs.Hls.prototype.setupSourceBuffer_ = function() {
// transition the sourcebuffer to the ended state if we've hit the end of
// the playlist
this.sourceBuffer.addEventListener('updateend', function() {
var segmentInfo = this.pendingSegment_, segment, i, currentBuffered, timelineUpdates;
var segmentInfo = this.pendingSegment_, segment, currentBuffered, timelineUpdates;
this.pendingSegment_ = null;
......@@ -334,24 +303,7 @@ videojs.Hls.prototype.setupSourceBuffer_ = function() {
this.mediaSource.endOfStream();
}
// When switching renditions or seeking, we may misjudge the media
// index to request to continue playback. Check after each append
// that a gap hasn't appeared in the buffered region and adjust
// the media index to fill it if necessary
if (this.tech_.buffered().length === 2 &&
segmentInfo.playlist === this.playlists.media()) {
i = this.tech_.buffered().length;
while (i--) {
if (this.tech_.currentTime() < this.tech_.buffered().start(i)) {
// found the misidentified segment's buffered time range
// adjust the media index to fill the gap
this.playlists.updateTimelineOffset(segmentInfo.mediaIndex,
this.tech_.buffered().start(i));
break;
}
}
}
// stop here if the update errored or was aborted
if (!segmentInfo) {
return;
}
......@@ -370,90 +322,13 @@ videojs.Hls.prototype.setupSourceBuffer_ = function() {
}
});
}.bind(this));
};
// register event listeners to transform in-band metadata events into
// VTTCues on a text track
videojs.Hls.prototype.setupMetadataCueTranslation_ = function() {
var
metadataStream = this.segmentParser_.metadataStream,
textTrack;
// add a metadata cue whenever a metadata event is triggered during
// segment parsing
metadataStream.on('data', function(metadata) {
var i, hexDigit;
// create the metadata track if this is the first ID3 tag we've
// seen
if (!textTrack) {
textTrack = this.tech_.addTextTrack('metadata', 'Timed Metadata');
// build the dispatch type from the stream descriptor
// https://html.spec.whatwg.org/multipage/embedded-content.html#steps-to-expose-a-media-resource-specific-text-track
textTrack.inBandMetadataTrackDispatchType = videojs.Hls.SegmentParser.STREAM_TYPES.metadata.toString(16).toUpperCase();
for (i = 0; i < metadataStream.descriptor.length; i++) {
hexDigit = ('00' + metadataStream.descriptor[i].toString(16).toUpperCase()).slice(-2);
textTrack.inBandMetadataTrackDispatchType += hexDigit;
}
if (timelineUpdates.length) {
this.updateDuration(segmentInfo.playlist);
}
// store this event for processing once the muxing has finished
this.tech_.segmentBuffer_[0].pendingMetadata.push({
textTrack: textTrack,
metadata: metadata
});
// check if it's time to download the next segment
this.checkBuffer_();
}.bind(this));
// when seeking, clear out all cues ahead of the earliest position
// in the new segment. keep earlier cues around so they can still be
// programmatically inspected even though they've already fired
this.on(this.tech_, 'seeking', function() {
var media, startTime, i;
if (!textTrack) {
return;
}
media = this.playlists.media();
startTime = this.tech_.playlists.expired_;
startTime += videojs.Hls.Playlist.duration(media,
media.mediaSequence,
media.mediaSequence + this.tech_.mediaIndex);
i = textTrack.cues.length;
while (i--) {
if (textTrack.cues[i].startTime >= startTime) {
textTrack.removeCue(textTrack.cues[i]);
}
}
});
};
videojs.Hls.prototype.addCuesForMetadata_ = function(segmentInfo) {
var i, cue, frame, metadata, minPts, segment, segmentOffset, textTrack, time;
segmentOffset = this.playlists.expired_;
segmentOffset += videojs.Hls.Playlist.duration(segmentInfo.playlist,
segmentInfo.playlist.mediaSequence,
segmentInfo.playlist.mediaSequence + segmentInfo.mediaIndex);
segment = segmentInfo.playlist.segments[segmentInfo.mediaIndex];
minPts = Math.min(isFinite(segment.minVideoPts) ? segment.minVideoPts : Infinity,
isFinite(segment.minAudioPts) ? segment.minAudioPts : Infinity);
while (segmentInfo.pendingMetadata.length) {
metadata = segmentInfo.pendingMetadata[0].metadata;
textTrack = segmentInfo.pendingMetadata[0].textTrack;
// create cue points for all the ID3 frames in this metadata event
for (i = 0; i < metadata.frames.length; i++) {
frame = metadata.frames[i];
time = segmentOffset + ((metadata.pts - minPts) * 0.001);
cue = new window.VTTCue(time, time, frame.value || frame.url || '');
cue.frame = frame;
cue.pts_ = metadata.pts;
textTrack.addCue(cue);
}
segmentInfo.pendingMetadata.shift();
}
};
/**
......@@ -535,13 +410,13 @@ videojs.Hls.prototype.setCurrentTime = function(currentTime) {
this.cancelSegmentXhr();
// abort outstanding key requests, if necessary
if (keyXhr) {
keyXhr.aborted = true;
if (this.keyXhr_) {
this.keyXhr_.aborted = true;
this.cancelKeyXhr();
}
// clear out any buffered segments
this.segmentBuffer_ = [];
// clear out the segment being processed
this.pendingSegment_ = null;
// begin filling the buffer at the new position
this.fillBuffer(currentTime);
......@@ -556,7 +431,7 @@ videojs.Hls.prototype.duration = function() {
};
videojs.Hls.prototype.seekable = function() {
var currentSeekable, startOffset, media;
var media;
if (!this.playlists) {
return videojs.createTimeRanges();
......@@ -566,17 +441,7 @@ videojs.Hls.prototype.seekable = function() {
return videojs.createTimeRanges();
}
// report the seekable range relative to the earliest possible
// position when the stream was first loaded
currentSeekable = videojs.Hls.Playlist.seekable(media);
if (!currentSeekable.length) {
return currentSeekable;
}
startOffset = this.playlists.expired_;
return videojs.createTimeRanges(startOffset,
startOffset + (currentSeekable.end(0) - currentSeekable.start(0)));
return videojs.Hls.Playlist.seekable(media);
};
/**
......@@ -617,10 +482,10 @@ videojs.Hls.prototype.resetSrc_ = function() {
};
videojs.Hls.prototype.cancelKeyXhr = function() {
if (keyXhr) {
keyXhr.onreadystatechange = null;
keyXhr.abort();
keyXhr = null;
if (this.keyXhr_) {
this.keyXhr_.onreadystatechange = null;
this.keyXhr_.abort();
this.keyXhr_ = null;
}
};
......@@ -829,7 +694,7 @@ videojs.Hls.prototype.fillBuffer = function(seekToTime) {
bufferedTime = 0,
mediaIndex = 0,
segment,
segmentUri;
segmentInfo;
// if preload is set to "none", do not download segments until playback is requested
if (this.loadingState_ !== 'segments') {
......@@ -847,7 +712,7 @@ videojs.Hls.prototype.fillBuffer = function(seekToTime) {
}
// wait until the buffer is up to date
if (this.segmentBuffer_.length || this.pendingSegment_) {
if (this.pendingSegment_) {
return;
}
......@@ -886,10 +751,29 @@ videojs.Hls.prototype.fillBuffer = function(seekToTime) {
return;
}
// resolve the segment URL relative to the playlist
segmentUri = this.playlistUriToUrl(segment.uri);
// package up all the work to append the segment
segmentInfo = {
// resolve the segment URL relative to the playlist
uri: this.playlistUriToUrl(segment.uri),
// the segment's mediaIndex at the time it was received
mediaIndex: mediaIndex,
// the segment's playlist
playlist: this.playlists.media(),
// optionally, a time offset to seek to within the segment
offset: seekToTime,
// unencrypted bytes of the segment
bytes: null,
// when a key is defined for this segment, the encrypted bytes
encryptedBytes: null,
// optionally, the decrypter that is unencrypting the segment
decrypter: null,
// the state of the buffer before a segment is appended will be
// stored here so that the actual segment duration can be
// determined after it has been appended
buffered: null
};
this.loadSegment(segmentUri, mediaIndex, seekToTime);
this.loadSegment(segmentInfo);
};
videojs.Hls.prototype.playlistUriToUrl = function(segmentRelativeUrl) {
......@@ -920,17 +804,22 @@ videojs.Hls.prototype.setBandwidth = function(xhr) {
this.tech_.trigger('bandwidthupdate');
};
videojs.Hls.prototype.loadSegment = function(segmentUri, mediaIndex, seekToTime) {
var self = this;
videojs.Hls.prototype.loadSegment = function(segmentInfo) {
var
self = this,
segment = segmentInfo.playlist.segments[segmentInfo.mediaIndex];
// if the segment is encrypted, request the key
if (segment.key) {
this.fetchKey_(segment);
}
// request the next segment
this.segmentXhr_ = videojs.Hls.xhr({
uri: segmentUri,
uri: segmentInfo.uri,
responseType: 'arraybuffer',
withCredentials: this.source_.withCredentials
}, function(error, request) {
var segmentInfo;
// the segment request is no longer outstanding
self.segmentXhr_ = null;
......@@ -944,7 +833,7 @@ videojs.Hls.prototype.loadSegment = function(segmentUri, mediaIndex, seekToTime)
if (!request.aborted && error) {
self.error = {
status: request.status,
message: 'HLS segment request error at URL: ' + segmentUri,
message: 'HLS segment request error at URL: ' + segmentInfo.uri,
code: (request.status >= 500) ? 4 : 2
};
......@@ -958,34 +847,12 @@ videojs.Hls.prototype.loadSegment = function(segmentUri, mediaIndex, seekToTime)
self.setBandwidth(request);
// package up all the work to append the segment
segmentInfo = {
// the segment's mediaIndex at the time it was received
mediaIndex: mediaIndex,
// the segment's playlist
playlist: self.playlists.media(),
// optionally, a time offset to seek to within the segment
offset: seekToTime,
// unencrypted bytes of the segment
bytes: null,
// when a key is defined for this segment, the encrypted bytes
encryptedBytes: null,
// optionally, the decrypter that is unencrypting the segment
decrypter: null,
// metadata events discovered during muxing that need to be
// translated into cue points
pendingMetadata: [],
// the state of the buffer before a segment is appended will be
// stored here so that the actual segment duration can be
// determined after it has been appended
buffered: null
};
if (segmentInfo.playlist.segments[mediaIndex].key) {
if (segment.key) {
segmentInfo.encryptedBytes = new Uint8Array(request.response);
} else {
segmentInfo.bytes = new Uint8Array(request.response);
}
self.segmentBuffer_.push(segmentInfo);
self.pendingSegment_ = segmentInfo;
self.tech_.trigger('progress');
self.drainBuffer();
......@@ -1008,13 +875,11 @@ videojs.Hls.prototype.drainBuffer = function(event) {
segmentTimestampOffset = 0,
hasBufferedContent = (this.tech_.buffered().length !== 0),
currentBuffered = this.findCurrentBuffered_(),
outsideBufferedRanges = !(currentBuffered && currentBuffered.length),
// ptsTime,
segmentBuffer = this.segmentBuffer_;
outsideBufferedRanges = !(currentBuffered && currentBuffered.length);
// if the buffer is empty or the source buffer hasn't been created
// yet, do nothing
if (!segmentBuffer.length || !this.sourceBuffer) {
if (!this.pendingSegment_ || !this.sourceBuffer) {
return;
}
......@@ -1024,7 +889,7 @@ videojs.Hls.prototype.drainBuffer = function(event) {
return;
}
segmentInfo = segmentBuffer[0];
segmentInfo = this.pendingSegment_;
mediaIndex = segmentInfo.mediaIndex;
playlist = segmentInfo.playlist;
offset = segmentInfo.offset;
......@@ -1037,18 +902,19 @@ videojs.Hls.prototype.drainBuffer = function(event) {
// if the key download failed, we want to skip this segment
// but if the key hasn't downloaded yet, we want to try again later
if (keyFailed(segment.key)) {
return segmentBuffer.shift();
videojs.log.warn('Network error retrieving key from "' +
segment.key.uri + '"');
return this.mediaSource.endOfStream('network');
} else if (!segment.key.bytes) {
// trigger a key request if one is not already in-flight
return this.fetchKeys_();
// waiting for the key bytes, try again later
return;
} else if (segmentInfo.decrypter) {
// decryption is in progress, try again later
return;
} else {
// if the media sequence is greater than 2^32, the IV will be incorrect
// assuming 10s segments, that would be about 1300 years
segIv = segment.key.iv || new Uint32Array([0, 0, 0, mediaIndex + playlist.mediaSequence]);
......@@ -1067,32 +933,6 @@ videojs.Hls.prototype.drainBuffer = function(event) {
event = event || {};
// if (this.segmentParser_.tagsAvailable()) {
// // record PTS information for the segment so we can calculate
// // accurate durations and seek reliably
// if (this.segmentParser_.stats.h264Tags()) {
// segment.minVideoPts = this.segmentParser_.stats.minVideoPts();
// segment.maxVideoPts = this.segmentParser_.stats.maxVideoPts();
// }
// if (this.segmentParser_.stats.aacTags()) {
// segment.minAudioPts = this.segmentParser_.stats.minAudioPts();
// segment.maxAudioPts = this.segmentParser_.stats.maxAudioPts();
// }
// }
// while (this.segmentParser_.tagsAvailable()) {
// tags.push(this.segmentParser_.getNextTag());
// }
this.addCuesForMetadata_(segmentInfo);
//this.updateDuration(this.playlists.media());
// // when we're crossing a discontinuity, inject metadata to indicate
// // that the decoder should be reset appropriately
// if (segment.discontinuity && tags.length) {
// this.tech_.el().vjs_discontinuity();
// }
// If we have seeked into a non-buffered time-range, remove all buffered
// time-ranges because they could have been incorrectly placed originally
if (this.tech_.seeking() && outsideBufferedRanges) {
......@@ -1108,7 +948,7 @@ videojs.Hls.prototype.drainBuffer = function(event) {
// anew on every seek
if (segmentInfo.playlist.discontinuityStarts.length) {
if (segmentInfo.mediaIndex > 0) {
segmentTimestampOffset = videojs.Hls.Playlist.duration(segmentInfo.playlist, 0, segmentInfo.mediaIndex);
segmentTimestampOffset = videojs.Hls.Playlist.duration(segmentInfo.playlist, segmentInfo.mediaIndex);
}
// Now that the forward buffer is clear, we have to set timestamp offset to
......@@ -1128,7 +968,6 @@ videojs.Hls.prototype.drainBuffer = function(event) {
} else {
this.sourceBuffer.appendWindowStart = 0;
}
this.pendingSegment_ = segmentBuffer.shift();
this.pendingSegment_.buffered = this.tech_.buffered();
// the segment is asynchronously added to the current buffered data
......@@ -1136,38 +975,33 @@ videojs.Hls.prototype.drainBuffer = function(event) {
};
/**
* Attempt to retrieve keys starting at a particular media
* segment. This method has no effect if segments are not yet
* available or a key request is already in progress.
*
* @param playlist {object} the media playlist to fetch keys for
* @param index {number} the media segment index to start from
* Attempt to retrieve the key for a particular media segment.
*/
videojs.Hls.prototype.fetchKeys_ = function() {
var i, key, tech, player, settings, segment, view, receiveKey;
videojs.Hls.prototype.fetchKey_ = function(segment) {
var key, self, settings, receiveKey;
// if there is a pending XHR or no segments, don't do anything
if (keyXhr || !this.segmentBuffer_.length) {
if (this.keyXhr_) {
return;
}
tech = this;
player = this.player();
self = this;
settings = this.options_;
/**
* Handle a key XHR response. This function needs to lookup the
* Handle a key XHR response.
*/
receiveKey = function(key) {
return function(error, request) {
keyXhr = null;
var view;
self.keyXhr_ = null;
if (error || !request.response || request.response.byteLength !== 16) {
key.retries = key.retries || 0;
key.retries++;
if (!request.aborted) {
// try fetching again
tech.fetchKeys_();
self.fetchKey_(segment);
}
return;
}
......@@ -1181,28 +1015,25 @@ videojs.Hls.prototype.fetchKeys_ = function() {
]);
// check to see if this allows us to make progress buffering now
tech.checkBuffer_();
self.checkBuffer_();
};
};
for (i = 0; i < tech.segmentBuffer_.length; i++) {
segment = tech.segmentBuffer_[i].playlist.segments[tech.segmentBuffer_[i].mediaIndex];
key = segment.key;
key = segment.key;
// continue looking if this segment is unencrypted
if (!key) {
continue;
}
// nothing to do if this segment is unencrypted
if (!key) {
return;
}
// request the key if the retry limit hasn't been reached
if (!key.bytes && !keyFailed(key)) {
keyXhr = videojs.Hls.xhr({
uri: this.playlistUriToUrl(key.uri),
responseType: 'arraybuffer',
withCredentials: settings.withCredentials
}, receiveKey(key));
break;
}
// request the key if the retry limit hasn't been reached
if (!key.bytes && !keyFailed(key)) {
this.keyXhr_ = videojs.Hls.xhr({
uri: this.playlistUriToUrl(key.uri),
responseType: 'arraybuffer',
withCredentials: settings.withCredentials
}, receiveKey(key));
return;
}
};
......@@ -1234,44 +1065,6 @@ videojs.Hls.isSupported = function() {
};
/**
* Calculate the duration of a playlist from a given start index to a given
* end index.
* @param playlist {object} a media playlist object
* @param startIndex {number} an inclusive lower boundary for the playlist.
* Defaults to 0.
* @param endIndex {number} an exclusive upper boundary for the playlist.
* Defaults to playlist length.
* @return {number} the duration between the start index and end index.
*/
videojs.Hls.getPlaylistDuration = function(playlist, startIndex, endIndex) {
videojs.log.warn('videojs.Hls.getPlaylistDuration is deprecated. ' +
'Use videojs.Hls.Playlist.duration instead');
return videojs.Hls.Playlist.duration(playlist, startIndex, endIndex);
};
/**
* Calculate the total duration for a playlist based on segment metadata.
* @param playlist {object} a media playlist object
* @return {number} the currently known duration, in seconds
*/
videojs.Hls.getPlaylistTotalDuration = function(playlist) {
videojs.log.warn('videojs.Hls.getPlaylistTotalDuration is deprecated. ' +
'Use videojs.Hls.Playlist.duration instead');
return videojs.Hls.Playlist.duration(playlist);
};
/**
* Deprecated.
*
* @deprecated use player.hls.playlists.getMediaIndexForTime_() instead
*/
videojs.Hls.getMediaIndexByTime = function() {
videojs.log.warn('getMediaIndexByTime is deprecated. ' +
'Use PlaylistLoader.getMediaIndexForTime_ instead.');
return 0;
};
/**
* A comparator function to sort two playlist object by bandwidth.
* @param left {object} a media playlist object
* @param right {object} a media playlist object
......
......@@ -53,15 +53,6 @@
strictEqual(loader.state, 'HAVE_NOTHING', 'no metadata has loaded yet');
});
test('starts with no expired time', function() {
var loader = new videojs.Hls.PlaylistLoader('media.m3u8');
requests.pop().respond(200, null,
'#EXTM3U\n' +
'#EXTINF:10,\n' +
'0.ts\n');
equal(loader.expired_, 0, 'zero seconds expired');
});
test('requests the initial playlist immediately', function() {
new videojs.Hls.PlaylistLoader('master.m3u8');
strictEqual(requests.length, 1, 'made a request');
......@@ -175,101 +166,6 @@
strictEqual(loader.state, 'HAVE_METADATA', 'the state is correct');
});
test('increments expired seconds after a segment is removed', function() {
var loader = new videojs.Hls.PlaylistLoader('live.m3u8');
requests.pop().respond(200, null,
'#EXTM3U\n' +
'#EXT-X-MEDIA-SEQUENCE:0\n' +
'#EXTINF:10,\n' +
'0.ts\n' +
'#EXTINF:10,\n' +
'1.ts\n' +
'#EXTINF:10,\n' +
'2.ts\n' +
'#EXTINF:10,\n' +
'3.ts\n');
clock.tick(10 * 1000); // 10s, one target duration
requests.pop().respond(200, null,
'#EXTM3U\n' +
'#EXT-X-MEDIA-SEQUENCE:1\n' +
'#EXTINF:10,\n' +
'1.ts\n' +
'#EXTINF:10,\n' +
'2.ts\n' +
'#EXTINF:10,\n' +
'3.ts\n' +
'#EXTINF:10,\n' +
'4.ts\n');
equal(loader.expired_, 10, 'expired one segment');
});
test('increments expired seconds after a discontinuity', function() {
var loader = new videojs.Hls.PlaylistLoader('live.m3u8');
requests.pop().respond(200, null,
'#EXTM3U\n' +
'#EXT-X-MEDIA-SEQUENCE:0\n' +
'#EXTINF:10,\n' +
'0.ts\n' +
'#EXTINF:3,\n' +
'1.ts\n' +
'#EXT-X-DISCONTINUITY\n' +
'#EXTINF:4,\n' +
'2.ts\n');
clock.tick(10 * 1000); // 10s, one target duration
requests.pop().respond(200, null,
'#EXTM3U\n' +
'#EXT-X-MEDIA-SEQUENCE:1\n' +
'#EXTINF:3,\n' +
'1.ts\n' +
'#EXT-X-DISCONTINUITY\n' +
'#EXTINF:4,\n' +
'2.ts\n');
equal(loader.expired_, 10, 'expired one segment');
clock.tick(10 * 1000); // 10s, one target duration
requests.pop().respond(200, null,
'#EXTM3U\n' +
'#EXT-X-MEDIA-SEQUENCE:2\n' +
'#EXT-X-DISCONTINUITY\n' +
'#EXTINF:4,\n' +
'2.ts\n');
equal(loader.expired_, 13, 'no expirations after the discontinuity yet');
clock.tick(10 * 1000); // 10s, one target duration
requests.pop().respond(200, null,
'#EXTM3U\n' +
'#EXT-X-MEDIA-SEQUENCE:3\n' +
'#EXT-X-DISCONTINUITY-SEQUENCE:1\n' +
'#EXTINF:10,\n' +
'3.ts\n');
equal(loader.expired_, 13 + 4, 'tracked expired prior to the discontinuity');
});
test('tracks expired seconds properly when two discontinuities expire at once', function() {
var loader = new videojs.Hls.PlaylistLoader('live.m3u8');
requests.pop().respond(200, null,
'#EXTM3U\n' +
'#EXT-X-MEDIA-SEQUENCE:0\n' +
'#EXTINF:4,\n' +
'0.ts\n' +
'#EXT-X-DISCONTINUITY\n' +
'#EXTINF:5,\n' +
'1.ts\n' +
'#EXT-X-DISCONTINUITY\n' +
'#EXTINF:6,\n' +
'2.ts\n' +
'#EXTINF:7,\n' +
'3.ts\n');
clock.tick(10 * 1000);
requests.pop().respond(200, null,
'#EXTM3U\n' +
'#EXT-X-MEDIA-SEQUENCE:3\n' +
'#EXT-X-DISCONTINUITY-SEQUENCE:2\n' +
'#EXTINF:7,\n' +
'3.ts\n');
equal(loader.expired_, 4 + 5 + 6, 'tracked both expired discontinuities');
});
test('emits an error when an initial playlist request fails', function() {
var
errors = [],
......@@ -757,8 +653,8 @@
equal(loader.getMediaIndexForTime_(3), 0, 'time three is index zero');
equal(loader.getMediaIndexForTime_(10), 2, 'time 10 is index 2');
equal(loader.getMediaIndexForTime_(22),
2,
'the index is never greater than the length');
3,
'time greater than the length is index 3');
});
test('returns the lower index when calculating for a segment boundary', function() {
......@@ -785,7 +681,7 @@
'1001.ts\n' +
'#EXTINF:5,\n' +
'1002.ts\n');
loader.expired_ = 150;
loader.media().segments[0].start = 150;
equal(loader.getMediaIndexForTime_(0), 0, 'the lowest returned value is zero');
equal(loader.getMediaIndexForTime_(45), 0, 'expired content returns zero');
......@@ -797,30 +693,6 @@
equal(loader.getMediaIndexForTime_(50 + 100 + 6), 1, 'calculates within the second segment');
});
test('updating the timeline offset adjusts results from getMediaIndexForTime_', function() {
var loader = new videojs.Hls.PlaylistLoader('live.m3u8');
requests.pop().respond(200, null,
'#EXTM3U\n' +
'#EXT-X-MEDIA-SEQUENCE:23\n' +
'#EXTINF:4,\n' +
'23.ts\n' +
'#EXTINF:5,\n' +
'24.ts\n' +
'#EXTINF:6,\n' +
'25.ts\n' +
'#EXTINF:7,\n' +
'26.ts\n');
loader.updateTimelineOffset(0, 150);
equal(loader.getMediaIndexForTime_(150), 0, 'translated the first segment');
equal(loader.getMediaIndexForTime_(130), 0, 'clamps the index to zero');
equal(loader.getMediaIndexForTime_(155), 1, 'translated the second segment');
loader.updateTimelineOffset(2, 30);
equal(loader.getMediaIndexForTime_(30 - 5 - 1), 0, 'translated the first segment');
equal(loader.getMediaIndexForTime_(30 + 7), 3, 'translated the last segment');
equal(loader.getMediaIndexForTime_(30 - 3), 1, 'translated an earlier segment');
});
test('does not misintrepret playlists missing newlines at the end', function() {
var loader = new videojs.Hls.PlaylistLoader('media.m3u8');
requests.shift().respond(200, null,
......
......@@ -18,27 +18,6 @@
module('Playlist Interval Duration');
test('accounts expired duration for live playlists', function() {
var duration = Playlist.duration({
mediaSequence: 10,
segments: [{
duration: 10,
uri: '10.ts'
}, {
duration: 10,
uri: '11.ts'
}, {
duration: 10,
uri: '12.ts'
}, {
duration: 10,
uri: '13.ts'
}]
}, 0, 14);
equal(duration, 14 * 10, 'duration includes dropped segments');
});
test('accounts for non-zero starting VOD media sequences', function() {
var duration = Playlist.duration({
mediaSequence: 10,
......@@ -61,47 +40,37 @@
equal(duration, 4 * 10, 'includes only listed segments');
});
test('uses PTS values when available', function() {
test('uses timeline values when available', function() {
var duration = Playlist.duration({
mediaSequence: 0,
endList: true,
segments: [{
minVideoPts: 1,
minAudioPts: 2,
start: 0,
uri: '0.ts'
}, {
duration: 10,
maxVideoPts: 2 * 10 * 1000 + 1,
maxAudioPts: 2 * 10 * 1000 + 2,
end: 2 * 10 + 2,
uri: '1.ts'
}, {
duration: 10,
maxVideoPts: 3 * 10 * 1000 + 1,
maxAudioPts: 3 * 10 * 1000 + 2,
end: 3 * 10 + 2,
uri: '2.ts'
}, {
duration: 10,
maxVideoPts: 4 * 10 * 1000 + 1,
maxAudioPts: 4 * 10 * 1000 + 2,
end: 4 * 10 + 2,
uri: '3.ts'
}]
}, 0, 4);
}, 4);
equal(duration, ((4 * 10 * 1000 + 2) - 1) * 0.001, 'used PTS values');
equal(duration, 4 * 10 + 2, 'used timeline values');
});
test('works when partial PTS information is available', function() {
test('works when partial timeline information is available', function() {
var duration = Playlist.duration({
mediaSequence: 0,
endList: true,
segments: [{
minVideoPts: 1,
minAudioPts: 2,
maxVideoPts: 10 * 1000 + 1,
// intentionally less duration than video
// the max stream duration should be used
maxAudioPts: 10 * 1000 + 1,
start: 0,
uri: '0.ts'
}, {
duration: 9,
......@@ -111,67 +80,17 @@
uri: '2.ts'
}, {
duration: 10,
minVideoPts: 30 * 1000 + 7,
minAudioPts: 30 * 1000 + 10,
maxVideoPts: 40 * 1000 + 1,
maxAudioPts: 40 * 1000 + 2,
start: 30.007,
end: 40.002,
uri: '3.ts'
}, {
duration: 10,
maxVideoPts: 50 * 1000 + 1,
maxAudioPts: 50 * 1000 + 2,
end: 50.0002,
uri: '4.ts'
}]
}, 0, 5);
}, 5);
equal(duration,
((50 * 1000 + 2) - 1) * 0.001,
'calculated with mixed intervals');
});
test('ignores segments before the start', function() {
var duration = Playlist.duration({
mediaSequence: 0,
segments: [{
duration: 10,
uri: '0.ts'
}, {
duration: 10,
uri: '1.ts'
}, {
duration: 10,
uri: '2.ts'
}]
}, 1, 3);
equal(duration, 10 + 10, 'ignored the first segment');
});
test('ignores discontinuity sequences earlier than the start', function() {
var duration = Playlist.duration({
mediaSequence: 0,
discontinuityStarts: [1, 3],
segments: [{
minVideoPts: 0,
minAudioPts: 0,
maxVideoPts: 10 * 1000,
maxAudioPts: 10 * 1000,
uri: '0.ts'
}, {
discontinuity: true,
duration: 9,
uri: '1.ts'
}, {
duration: 10,
uri: '2.ts'
}, {
discontinuity: true,
duration: 10,
uri: '3.ts'
}]
}, 2, 4);
equal(duration, 10 + 10, 'excluded the earlier segments');
equal(duration, 50.0002, 'calculated with mixed intervals');
});
test('ignores discontinuity sequences later than the end', function() {
......@@ -196,20 +115,19 @@
duration: 10,
uri: '3.ts'
}]
}, 0, 2);
}, 2);
equal(duration, 19, 'excluded the later segments');
});
test('handles trailing segments without PTS information', function() {
var duration = Playlist.duration({
test('handles trailing segments without timeline information', function() {
var playlist, duration;
playlist = {
mediaSequence: 0,
endList: true,
segments: [{
minVideoPts: 0,
minAudioPts: 0,
maxVideoPts: 10 * 1000,
maxAudioPts: 10 * 1000,
start: 0,
end: 10.5,
uri: '0.ts'
}, {
duration: 9,
......@@ -218,107 +136,43 @@
duration: 10,
uri: '2.ts'
}, {
minVideoPts: 29.5 * 1000,
minAudioPts: 29.5 * 1000,
maxVideoPts: 39.5 * 1000,
maxAudioPts: 39.5 * 1000,
start: 29.45,
end: 39.5,
uri: '3.ts'
}]
}, 0, 3);
};
duration = Playlist.duration(playlist, 3);
equal(duration, 29.45, 'calculated duration');
equal(duration, 29.5, 'calculated duration');
duration = Playlist.duration(playlist, 2);
equal(duration, 19.5, 'calculated duration');
});
test('uses PTS intervals when the start and end segment have them', function() {
test('uses timeline intervals when segments have them', function() {
var playlist, duration;
playlist = {
mediaSequence: 0,
segments: [{
minVideoPts: 0,
minAudioPts: 0,
maxVideoPts: 10 * 1000,
maxAudioPts: 10 * 1000,
start: 0,
end: 10,
uri: '0.ts'
}, {
duration: 9,
uri: '1.ts'
},{
minVideoPts: 20 * 1000 + 100,
minAudioPts: 20 * 1000 + 100,
maxVideoPts: 30 * 1000 + 100,
maxAudioPts: 30 * 1000 + 100,
start: 20.1,
end: 30.1,
duration: 10,
uri: '2.ts'
}]
};
duration = Playlist.duration(playlist, 0, 2);
duration = Playlist.duration(playlist, 2);
equal(duration, 20.1, 'used the PTS-based interval');
equal(duration, 20.1, 'used the timeline-based interval');
duration = Playlist.duration(playlist, 0, 3);
equal(duration, 30.1, 'used the PTS-based interval');
});
test('works for media without audio', function() {
equal(Playlist.duration({
mediaSequence: 0,
endList: true,
segments: [{
minVideoPts: 0,
maxVideoPts: 9 * 1000,
uri: 'no-audio.ts'
}]
}), 9, 'used video PTS values');
});
test('works for media without video', function() {
equal(Playlist.duration({
mediaSequence: 0,
endList: true,
segments: [{
minAudioPts: 0,
maxAudioPts: 9 * 1000,
uri: 'no-video.ts'
}]
}), 9, 'used video PTS values');
});
test('uses the largest continuous available PTS ranges', function() {
var playlist = {
mediaSequence: 0,
segments: [{
minVideoPts: 0,
minAudioPts: 0,
maxVideoPts: 10 * 1000,
maxAudioPts: 10 * 1000,
uri: '0.ts'
}, {
duration: 10,
uri: '1.ts'
}, {
// starts 0.5s earlier than the previous segment indicates
minVideoPts: 19.5 * 1000,
minAudioPts: 19.5 * 1000,
maxVideoPts: 29.5 * 1000,
maxAudioPts: 29.5 * 1000,
uri: '2.ts'
}, {
duration: 10,
uri: '3.ts'
}, {
// ... but by the last segment, there is actual 0.5s more
// content than duration indicates
minVideoPts: 40.5 * 1000,
minAudioPts: 40.5 * 1000,
maxVideoPts: 50.5 * 1000,
maxAudioPts: 50.5 * 1000,
uri: '4.ts'
}]
};
equal(Playlist.duration(playlist, 0, 5),
50.5,
'calculated across the larger PTS interval');
duration = Playlist.duration(playlist, 3);
equal(duration, 30.1, 'used the timeline-based interval');
});
test('counts the time between segments as part of the earlier segment\'s duration', function() {
......@@ -326,22 +180,18 @@
mediaSequence: 0,
endList: true,
segments: [{
minVideoPts: 0,
minAudioPts: 0,
maxVideoPts: 1 * 10 * 1000,
maxAudioPts: 1 * 10 * 1000,
start: 0,
end: 10,
uri: '0.ts'
}, {
minVideoPts: 1 * 10 * 1000 + 100,
minAudioPts: 1 * 10 * 1000 + 100,
maxVideoPts: 2 * 10 * 1000 + 100,
maxAudioPts: 2 * 10 * 1000 + 100,
start: 10.1,
end: 20.1,
duration: 10,
uri: '1.ts'
}]
}, 0, 1);
}, 1);
equal(duration, (1 * 10 * 1000 + 100) * 0.001, 'included the segment gap');
equal(duration, 10.1, 'included the segment gap');
});
test('accounts for discontinuities', function() {
......@@ -364,7 +214,7 @@
duration: 10,
uri: '1.ts'
}]
}, 0, 2);
}, 2);
equal(duration, 10 + 10, 'handles discontinuities');
});
......@@ -389,7 +239,7 @@
duration: 10,
uri: '1.ts'
}]
}, 0, 1);
}, 1);
equal(duration, (1 * 10 * 1000) * 0.001, 'did not include the segment gap');
});
......@@ -412,7 +262,7 @@
duration: 10,
uri: '1.ts'
}]
}, 0, 1, false);
}, 1, false);
equal(duration, (1 * 10 * 1000) * 0.001, 'did not include the segment gap');
});
......@@ -431,10 +281,9 @@
}]
};
equal(Playlist.duration(playlist, 0, 0), 0, 'zero-length duration is zero');
equal(Playlist.duration(playlist, 0, 0, false), 0, 'zero-length duration is zero');
equal(Playlist.duration(playlist, 0, -1), 0, 'negative length duration is zero');
equal(Playlist.duration(playlist, 2, 1, false), 0, 'negative length duration is zero');
equal(Playlist.duration(playlist, 0), 0, 'zero-length duration is zero');
equal(Playlist.duration(playlist, 0, false), 0, 'zero-length duration is zero');
equal(Playlist.duration(playlist, -1), 0, 'negative length duration is zero');
});
module('Playlist Seekable');
......
......@@ -167,60 +167,6 @@ var
window.manifests[manifestName]);
},
mockSegmentParser = function(tags) {
var MockSegmentParser;
if (tags === undefined) {
tags = [{ pts: 0, bytes: new Uint8Array(1) }];
}
MockSegmentParser = function() {
this.getFlvHeader = function() {
return 'flv';
};
this.parseSegmentBinaryData = function() {};
this.flushTags = function() {};
this.tagsAvailable = function() {
return tags.length;
};
this.getTags = function() {
return tags;
};
this.getNextTag = function() {
return tags.shift();
};
this.metadataStream = new videojs.Hls.Stream();
this.metadataStream.init();
this.metadataStream.descriptor = new Uint8Array([
1, 2, 3, 0xbb
]);
this.stats = {
h264Tags: function() {
return tags.length;
},
minVideoPts: function() {
return tags[0].pts;
},
maxVideoPts: function() {
return tags[tags.length - 1].pts;
},
aacTags: function() {
return tags.length;
},
minAudioPts: function() {
return tags[0].pts;
},
maxAudioPts: function() {
return tags[tags.length - 1].pts;
},
};
};
MockSegmentParser.STREAM_TYPES = videojs.Hls.SegmentParser.STREAM_TYPES;
return MockSegmentParser;
},
// a no-op MediaSource implementation to allow synchronous testing
MockMediaSource = videojs.extend(videojs.EventTarget, {
constructor: function() {},
......@@ -524,7 +470,7 @@ test('sets the duration if one is available on the playlist', function() {
equal(events, 1, 'durationchange is fired');
});
QUnit.skip('calculates the duration if needed', function() {
test('estimates individual segment durations if needed', function() {
var changes = 0;
player.src({
src: 'http://example.com/manifest/missingExtinf.m3u8',
......@@ -532,7 +478,7 @@ QUnit.skip('calculates the duration if needed', function() {
});
openMediaSource(player);
player.tech_.hls.mediaSource.duration = NaN;
player.on('durationchange', function() {
player.tech_.on('durationchange', function() {
changes++;
});
......@@ -889,10 +835,10 @@ test('reports an error if a segment is unreachable', function() {
openMediaSource(player);
player.tech_.hls.bandwidth = 20000;
standardXHRResponse(requests[0]);
standardXHRResponse(requests[1]);
standardXHRResponse(requests[0]); // master
standardXHRResponse(requests[1]); // media
requests[2].respond(400);
requests[2].respond(400); // segment
strictEqual(player.tech_.hls.mediaSource.error_, 'network', 'network error is triggered');
});
......@@ -1172,18 +1118,18 @@ test('downloads the next segment if the buffer is getting low', function() {
test('buffers based on the correct TimeRange if multiple ranges exist', function() {
var currentTime, buffered;
player.src({
src: 'manifest/media.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
player.tech_.currentTime = function() {
return currentTime;
};
player.tech_.buffered = function() {
return videojs.createTimeRange(buffered);
};
player.src({
src: 'manifest/media.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
currentTime = 8;
buffered = [[0, 10], [20, 40]];
......@@ -1238,7 +1184,6 @@ test('only makes one segment request at a time', function() {
});
test('only appends one segment at a time', function() {
var appends = 0;
player.src({
src: 'manifest/media.m3u8',
type: 'application/vnd.apple.mpegurl'
......@@ -1251,84 +1196,6 @@ test('only appends one segment at a time', function() {
equal(requests.length, 0, 'did not request while updating');
});
QUnit.skip('records the min and max PTS values for a segment', function() {
var tags = [];
videojs.Hls.SegmentParser = mockSegmentParser(tags);
player.src({
src: 'manifest/media.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
standardXHRResponse(requests.pop()); // media.m3u8
tags.push({ pts: 0, bytes: new Uint8Array(1) });
tags.push({ pts: 10, bytes: new Uint8Array(1) });
standardXHRResponse(requests.pop()); // segment 0
equal(player.tech_.hls.playlists.media().segments[0].minVideoPts, 0, 'recorded min video pts');
equal(player.tech_.hls.playlists.media().segments[0].maxVideoPts, 10, 'recorded max video pts');
equal(player.tech_.hls.playlists.media().segments[0].minAudioPts, 0, 'recorded min audio pts');
equal(player.tech_.hls.playlists.media().segments[0].maxAudioPts, 10, 'recorded max audio pts');
});
QUnit.skip('records PTS values for video-only segments', function() {
var tags = [];
videojs.Hls.SegmentParser = mockSegmentParser(tags);
player.src({
src: 'manifest/media.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
standardXHRResponse(requests.pop()); // media.m3u8
player.tech_.hls.segmentParser_.stats.aacTags = function() {
return 0;
};
player.tech_.hls.segmentParser_.stats.minAudioPts = function() {
throw new Error('No audio tags');
};
player.tech_.hls.segmentParser_.stats.maxAudioPts = function() {
throw new Error('No audio tags');
};
tags.push({ pts: 0, bytes: new Uint8Array(1) });
tags.push({ pts: 10, bytes: new Uint8Array(1) });
standardXHRResponse(requests.pop()); // segment 0
equal(player.tech_.hls.playlists.media().segments[0].minVideoPts, 0, 'recorded min video pts');
equal(player.tech_.hls.playlists.media().segments[0].maxVideoPts, 10, 'recorded max video pts');
strictEqual(player.tech_.hls.playlists.media().segments[0].minAudioPts, undefined, 'min audio pts is undefined');
strictEqual(player.tech_.hls.playlists.media().segments[0].maxAudioPts, undefined, 'max audio pts is undefined');
});
QUnit.skip('records PTS values for audio-only segments', function() {
var tags = [];
videojs.Hls.SegmentParser = mockSegmentParser(tags);
player.src({
src: 'manifest/media.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
standardXHRResponse(requests.pop()); // media.m3u8
player.tech_.hls.segmentParser_.stats.h264Tags = function() {
return 0;
};
player.tech_.hls.segmentParser_.stats.minVideoPts = function() {
throw new Error('No video tags');
};
player.tech_.hls.segmentParser_.stats.maxVideoPts = function() {
throw new Error('No video tags');
};
tags.push({ pts: 0, bytes: new Uint8Array(1) });
tags.push({ pts: 10, bytes: new Uint8Array(1) });
standardXHRResponse(requests.pop()); // segment 0
equal(player.tech_.hls.playlists.media().segments[0].minAudioPts, 0, 'recorded min audio pts');
equal(player.tech_.hls.playlists.media().segments[0].maxAudioPts, 10, 'recorded max audio pts');
strictEqual(player.tech_.hls.playlists.media().segments[0].minVideoPts, undefined, 'min video pts is undefined');
strictEqual(player.tech_.hls.playlists.media().segments[0].maxVideoPts, undefined, 'max video pts is undefined');
});
test('waits to download new segments until the media playlist is stable', function() {
player.src({
src: 'manifest/master.m3u8',
......@@ -1438,331 +1305,6 @@ test('segmentXhr is properly nulled out when dispose is called', function() {
Flash.prototype.dispose = oldDispose;
});
QUnit.skip('exposes in-band metadata events as cues', function() {
var track;
videojs.Hls.SegmentParser = mockSegmentParser();
player.src({
src: 'manifest/media.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
player.tech_.hls.segmentParser_.parseSegmentBinaryData = function() {
// trigger a metadata event
player.tech_.hls.segmentParser_.metadataStream.trigger('data', {
pts: 2000,
data: new Uint8Array([]),
frames: [{
id: 'TXXX',
value: 'cue text'
}, {
id: 'WXXX',
url: 'http://example.com'
}, {
id: 'PRIV',
owner: 'owner@example.com',
privateData: new Uint8Array([1, 2, 3])
}]
});
};
standardXHRResponse(requests[0]);
standardXHRResponse(requests[1]);
equal(player.textTracks().length, 1, 'created a text track');
track = player.textTracks()[0];
equal(track.kind, 'metadata', 'kind is metadata');
equal(track.inBandMetadataTrackDispatchType, '15010203BB', 'set the dispatch type');
equal(track.cues.length, 3, 'created three cues');
equal(track.cues[0].startTime, 2, 'cue starts at 2 seconds');
equal(track.cues[0].endTime, 2, 'cue ends at 2 seconds');
equal(track.cues[0].pauseOnExit, false, 'cue does not pause on exit');
equal(track.cues[0].text, 'cue text', 'set cue text');
equal(track.cues[1].startTime, 2, 'cue starts at 2 seconds');
equal(track.cues[1].endTime, 2, 'cue ends at 2 seconds');
equal(track.cues[1].pauseOnExit, false, 'cue does not pause on exit');
equal(track.cues[1].text, 'http://example.com', 'set cue text');
equal(track.cues[2].startTime, 2, 'cue starts at 2 seconds');
equal(track.cues[2].endTime, 2, 'cue ends at 2 seconds');
equal(track.cues[2].pauseOnExit, false, 'cue does not pause on exit');
equal(track.cues[2].text, '', 'did not set cue text');
equal(track.cues[2].frame.owner, 'owner@example.com', 'set the owner');
deepEqual(track.cues[2].frame.privateData,
new Uint8Array([1, 2, 3]),
'set the private data');
});
QUnit.skip('only adds in-band cues the first time they are encountered', function() {
var tags = [{ pts: 0, bytes: new Uint8Array(1) }], track;
videojs.Hls.SegmentParser = mockSegmentParser(tags);
player.src({
src: 'manifest/media.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
player.tech_.hls.segmentParser_.parseSegmentBinaryData = function() {
// trigger a metadata event
player.tech_.hls.segmentParser_.metadataStream.trigger('data', {
pts: 2000,
data: new Uint8Array([]),
frames: [{
id: 'TXXX',
value: 'cue text'
}]
});
};
standardXHRResponse(requests.shift());
standardXHRResponse(requests.shift());
// seek back to the first segment
player.currentTime(0);
player.tech_.hls.trigger('seeking');
tags.push({ pts: 0, bytes: new Uint8Array(1) });
standardXHRResponse(requests.shift());
track = player.textTracks()[0];
equal(track.cues.length, 1, 'only added the cue once');
});
QUnit.skip('clears in-band cues ahead of current time on seek', function() {
var
tags = [],
events = [],
track;
videojs.Hls.SegmentParser = mockSegmentParser(tags);
player.src({
src: 'manifest/media.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
player.tech_.hls.segmentParser_.parseSegmentBinaryData = function() {
// trigger a metadata event
while (events.length) {
player.tech_.hls.segmentParser_.metadataStream.trigger('data', events.shift());
}
};
standardXHRResponse(requests.shift()); // media
tags.push({ pts: 0, bytes: new Uint8Array(1) },
{ pts: 10 * 1000, bytes: new Uint8Array(1) });
events.push({
pts: 9.9 * 1000,
data: new Uint8Array([]),
frames: [{
id: 'TXXX',
value: 'cue 1'
}]
});
events.push({
pts: 20 * 1000,
data: new Uint8Array([]),
frames: [{
id: 'TXXX',
value: 'cue 3'
}]
});
standardXHRResponse(requests.shift()); // segment 0
tags.push({ pts: 10 * 1000 + 1, bytes: new Uint8Array(1) },
{ pts: 20 * 1000, bytes: new Uint8Array(1) });
events.push({
pts: 19.9 * 1000,
data: new Uint8Array([]),
frames: [{
id: 'TXXX',
value: 'cue 2'
}]
});
player.tech_.hls.checkBuffer_();
standardXHRResponse(requests.shift()); // segment 1
track = player.textTracks()[0];
equal(track.cues.length, 3, 'added the cues');
// seek into segment 1
player.currentTime(11);
player.trigger('seeking');
equal(track.cues.length, 1, 'removed later cues');
equal(track.cues[0].startTime, 9.9, 'retained the earlier cue');
});
QUnit.skip('translates ID3 PTS values to cue media timeline positions', function() {
var tags = [{ pts: 4 * 1000, bytes: new Uint8Array(1) }], track;
videojs.Hls.SegmentParser = mockSegmentParser(tags);
player.src({
src: 'manifest/media.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
player.tech_.hls.segmentParser_.parseSegmentBinaryData = function() {
// trigger a metadata event
player.tech_.hls.segmentParser_.metadataStream.trigger('data', {
pts: 5 * 1000,
data: new Uint8Array([]),
frames: [{
id: 'TXXX',
value: 'cue text'
}]
});
};
standardXHRResponse(requests.shift()); // media
standardXHRResponse(requests.shift()); // segment 0
track = player.textTracks()[0];
equal(track.cues[0].startTime, 1, 'translated startTime');
equal(track.cues[0].endTime, 1, 'translated startTime');
});
QUnit.skip('translates ID3 PTS values with expired segments', function() {
var tags = [{ pts: 4 * 1000, bytes: new Uint8Array(1) }], track;
videojs.Hls.SegmentParser = mockSegmentParser(tags);
player.src({
src: 'live.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
player.play();
// 20.9 seconds of content have expired
player.hls.playlists.expiredPostDiscontinuity_ = 20.9;
player.hls.segmentParser_.parseSegmentBinaryData = function() {
// trigger a metadata event
player.hls.segmentParser_.metadataStream.trigger('data', {
pts: 5 * 1000,
data: new Uint8Array([]),
frames: [{
id: 'TXXX',
value: 'cue text'
}]
});
};
requests.shift().respond(200, null,
'#EXTM3U\n' +
'#EXT-X-MEDIA-SEQUENCE:2\n' +
'#EXTINF:10,\n' +
'2.ts\n' +
'#EXTINF:10,\n' +
'3.ts\n'); // media
standardXHRResponse(requests.shift()); // segment 0
track = player.textTracks()[0];
equal(track.cues[0].startTime, 20.9 + 1, 'translated startTime');
equal(track.cues[0].endTime, 20.9 + 1, 'translated startTime');
});
QUnit.skip('translates id3 PTS values for audio-only media', function() {
var tags = [{ pts: 4 * 1000, bytes: new Uint8Array(1) }], track;
videojs.Hls.SegmentParser = mockSegmentParser(tags);
player.src({
src: 'manifest/media.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
player.hls.segmentParser_.parseSegmentBinaryData = function() {
// trigger a metadata event
player.hls.segmentParser_.metadataStream.trigger('data', {
pts: 5 * 1000,
data: new Uint8Array([]),
frames: [{
id: 'TXXX',
value: 'cue text'
}]
});
};
player.hls.segmentParser_.stats.h264Tags = function() { return 0; };
player.hls.segmentParser_.stats.minVideoPts = null;
standardXHRResponse(requests.shift()); // media
standardXHRResponse(requests.shift()); // segment 0
track = player.textTracks()[0];
equal(track.cues[0].startTime, 1, 'translated startTime');
});
QUnit.skip('translates ID3 PTS values across discontinuities', function() {
var tags = [], events = [], track;
videojs.Hls.SegmentParser = mockSegmentParser(tags);
player.src({
src: 'cues-and-discontinuities.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
player.tech_.hls.segmentParser_.parseSegmentBinaryData = function() {
// trigger a metadata event
if (events.length) {
player.tech_.hls.segmentParser_.metadataStream.trigger('data', events.shift());
}
};
// media playlist
player.trigger('play');
requests.shift().respond(200, null,
'#EXTM3U\n' +
'#EXTINF:10,\n' +
'0.ts\n' +
'#EXT-X-DISCONTINUITY\n' +
'#EXTINF:10,\n' +
'1.ts\n');
// segment 0 starts at PTS 14000 and has a cue point at 15000
tags.push({ pts: 14 * 1000, bytes: new Uint8Array(1) },
{ pts: 24 * 1000, bytes: new Uint8Array(1) });
events.push({
pts: 15 * 1000,
data: new Uint8Array([]),
frames: [{
id: 'TXXX',
value: 'cue 0'
}]
});
standardXHRResponse(requests.shift()); // segment 0
// segment 1 is after a discontinuity, starts at PTS 22000
// and has a cue point at 23000
tags.push({ pts: 22 * 1000, bytes: new Uint8Array(1) });
events.push({
pts: 23 * 1000,
data: new Uint8Array([]),
frames: [{
id: 'TXXX',
value: 'cue 1'
}]
});
player.tech_.hls.checkBuffer_();
standardXHRResponse(requests.shift());
track = player.textTracks()[0];
equal(track.cues.length, 2, 'created cues');
equal(track.cues[0].startTime, 1, 'first cue started at the correct time');
equal(track.cues[0].endTime, 1, 'first cue ended at the correct time');
equal(track.cues[1].startTime, 11, 'second cue started at the correct time');
equal(track.cues[1].endTime, 11, 'second cue ended at the correct time');
});
test('seeks between buffered time ranges', function() {
player.src({
src: 'media.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
standardXHRResponse(requests.shift()); // media
player.tech_.buffered = function() {
return videojs.createTimeRange([[0, 10], [20, 30]]);
};
player.tech_.setCurrentTime(15);
clock.tick(1);
// drop the aborted segment
requests.shift();
equal(requests[0].url,
absoluteUrl('media-00002.ts'),
'requested the correct segment');
});
test('does not modify the media index for in-buffer seeking', function() {
var mediaIndex;
player.src({
......@@ -2036,26 +1578,21 @@ test('does not break if the playlist has no segments', function() {
strictEqual(requests.length, 1, 'no requests for non-existent segments were queued');
});
test('clears the segment buffer on seek', function() {
var currentTime, oldCurrentTime;
test('aborts segment processing on seek', function() {
var currentTime = 0;
player.src({
src: 'discontinuity.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
oldCurrentTime = player.currentTime;
player.currentTime = function(time) {
if (time !== undefined) {
return oldCurrentTime.call(player, time);
}
player.tech_.currentTime = function() {
return currentTime;
};
player.tech_.buffered = function() {
return videojs.createTimeRange();
};
requests.pop().respond(200, null,
requests.shift().respond(200, null,
'#EXTM3U\n' +
'#EXT-X-KEY:METHOD=AES-128,URI="keys/key.php"\n' +
'#EXTINF:10,0\n' +
......@@ -2063,23 +1600,19 @@ test('clears the segment buffer on seek', function() {
'#EXT-X-DISCONTINUITY\n' +
'#EXTINF:10,0\n' +
'2.ts\n' +
'#EXT-X-ENDLIST\n');
standardXHRResponse(requests.pop()); // 1.ts
// play to 6s to trigger the next segment request
currentTime = 6;
clock.tick(6000);
standardXHRResponse(requests.pop()); // 2.ts
equal(player.tech_.hls.segmentBuffer_.length, 2, 'started fetching segments');
'#EXT-X-ENDLIST\n'); // media
standardXHRResponse(requests.shift()); // 1.ts
standardXHRResponse(requests.shift()); // key.php
ok(player.tech_.hls.pendingSegment_, 'decrypting the segment');
// seek back to the beginning
player.currentTime(0);
clock.tick(1);
equal(player.tech_.hls.segmentBuffer_.length, 0, 'cleared the segment buffer');
ok(!player.tech_.hls.pendingSegment_, 'aborted processing');
});
test('calls mediaSource\'s timestampOffset on discontinuity', function() {
var buffered = [[]];
player.src({
src: 'discontinuity.m3u8',
type: 'application/vnd.apple.mpegurl'
......@@ -2087,10 +1620,10 @@ test('calls mediaSource\'s timestampOffset on discontinuity', function() {
openMediaSource(player);
player.play();
player.tech_.buffered = function() {
return videojs.createTimeRange(0, 10);
return videojs.createTimeRange(buffered);
};
requests.pop().respond(200, null,
requests.shift().respond(200, null,
'#EXTM3U\n' +
'#EXTINF:10,0\n' +
'1.ts\n' +
......@@ -2099,14 +1632,14 @@ test('calls mediaSource\'s timestampOffset on discontinuity', function() {
'2.ts\n' +
'#EXT-X-ENDLIST\n');
player.tech_.hls.sourceBuffer.timestampOffset = 0;
standardXHRResponse(requests.pop()); // 1.ts
equal(player.tech_.hls.sourceBuffer.timestampOffset, 0, 'timestampOffset starts at zero');
// play to 6s to trigger the next segment request
clock.tick(6000);
standardXHRResponse(requests.shift()); // 1.ts
equal(player.tech_.hls.sourceBuffer.timestampOffset,
0,
'timestampOffset starts at zero');
standardXHRResponse(requests.pop()); // 2.ts
buffered = [[0, 10]];
player.tech_.hls.sourceBuffer.trigger('updateend');
standardXHRResponse(requests.shift()); // 2.ts
equal(player.tech_.hls.sourceBuffer.timestampOffset, 10, 'timestampOffset set after discontinuity');
});
......@@ -2198,7 +1731,7 @@ QUnit.skip('sets the timestampOffset after seeking to discontinuity', function()
'set the timestamp offset');
});
QUnit.skip('tracks segment end times as they are buffered', function() {
test('tracks segment end times as they are buffered', function() {
var bufferEnd = 0;
player.src({
src: 'media.m3u8',
......@@ -2225,8 +1758,7 @@ QUnit.skip('tracks segment end times as they are buffered', function() {
bufferEnd = 9.5;
player.tech_.hls.sourceBuffer.trigger('update');
player.tech_.hls.sourceBuffer.trigger('updateend');
equal(player.tech_.duration(), 10 + 9.5, 'updated duration');
equal(player.tech_.hls.appendingSegmentInfo_, null, 'cleared the appending segment');
equal(player.tech_.hls.mediaSource.duration, 10 + 9.5, 'updated duration');
});
QUnit.skip('seeking does not fail when targeted between segments', function() {
......@@ -2274,7 +1806,7 @@ test('resets the switching algorithm if a request times out', function() {
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
player.tech_.hls.bandwidth = 20000;
player.tech_.hls.bandwidth = 1e20;
standardXHRResponse(requests.shift()); // master
standardXHRResponse(requests.shift()); // media.m3u8
......@@ -2455,6 +1987,7 @@ test('tracks the bytes downloaded', function() {
// transmit some segment bytes
requests[0].response = new ArrayBuffer(17);
requests.shift().respond(200, null, '');
player.tech_.hls.sourceBuffer.trigger('updateend');
strictEqual(player.tech_.hls.bytesReceived, 17, 'tracked bytes received');
......@@ -2509,12 +2042,15 @@ test('can be disposed before finishing initialization', function() {
});
test('calls ended() on the media source at the end of a playlist', function() {
var endOfStreams = 0;
var endOfStreams = 0, buffered = [[]];
player.src({
src: 'http://example.com/media.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
player.tech_.buffered = function() {
return videojs.createTimeRanges(buffered);
};
player.tech_.hls.mediaSource.endOfStream = function() {
endOfStreams++;
};
......@@ -2529,70 +2065,61 @@ test('calls ended() on the media source at the end of a playlist', function() {
requests.shift().respond(200, null, '');
strictEqual(endOfStreams, 0, 'waits for the buffer update to finish');
buffered =[[0, 10]];
player.tech_.hls.sourceBuffer.trigger('updateend');
strictEqual(endOfStreams, 1, 'ended media source');
});
test('calling play() at the end of a video resets the media index', function() {
test('calling play() at the end of a video replays', function() {
var seekTime = -1;
player.src({
src: 'http://example.com/media.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
player.tech_.setCurrentTime = function(time) {
if (time !== undefined) {
seekTime = time;
}
return 0;
};
requests.shift().respond(200, null,
'#EXTM3U\n' +
'#EXTINF:10,\n' +
'0.ts\n' +
'#EXT-X-ENDLIST\n');
standardXHRResponse(requests.shift());
strictEqual(player.tech_.hls.mediaIndex, 1, 'index is 1 after the first segment');
player.tech_.ended = function() {
return true;
};
player.tech_.trigger('play');
strictEqual(player.tech_.hls.mediaIndex, 0, 'index is 0 after the first segment');
equal(seekTime, 0, 'seeked to the beginning');
});
test('drainBuffer will not proceed with empty source buffer', function() {
var oldMedia, newMedia, compareBuffer;
test('segments remain pending without a source buffer', function() {
player.src({
src: 'https://example.com/encrypted-media.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
oldMedia = player.tech_.hls.playlists.media;
newMedia = {segments: [{
key: {
'retries': 5
},
uri: 'http://media.example.com/fileSequence52-A.ts'
}, {
key: {
'method': 'AES-128',
'uri': 'https://priv.example.com/key.php?r=53'
},
uri: 'http://media.example.com/fileSequence53-B.ts'
}]};
player.tech_.hls.playlists.media = function() {
return newMedia;
};
requests.shift().respond(200, null,
'#EXTM3U\n' +
'#EXT-X-KEY:METHOD=AES-128,URI="keys/key.php?r=52"\n' +
'#EXTINF:10,\n' +
'http://media.example.com/fileSequence52-A.ts' +
'#EXT-X-KEY:METHOD=AES-128,URI="keys/key.php?r=53"\n' +
'#EXTINF:10,\n' +
'http://media.example.com/fileSequence53-B.ts\n' +
'#EXT-X-ENDLIST\n');
player.tech_.hls.sourceBuffer = undefined;
compareBuffer = [{mediaIndex: 0, playlist: newMedia, offset: 0, bytes: new Uint8Array(3)}];
player.tech_.hls.segmentBuffer_ = [{mediaIndex: 0, playlist: newMedia, offset: 0, bytes: new Uint8Array(3)}];
player.tech_.hls.drainBuffer();
/* Normally, drainBuffer() calls segmentBuffer.shift(), removing a segment from the stack.
* Comparing two buffers to ensure no segment was popped verifies that we returned early
* from drainBuffer() because sourceBuffer was empty.
*/
deepEqual(player.tech_.hls.segmentBuffer_, compareBuffer, 'playlist remains unchanged');
player.tech_.hls.playlists.media = oldMedia;
standardXHRResponse(requests.shift()); // key
standardXHRResponse(requests.shift()); // segment
player.tech_.hls.checkBuffer_();
ok(player.tech_.hls.pendingSegment_, 'waiting for the source buffer');
});
test('keys are requested when an encrypted segment is loaded', function() {
......@@ -2603,12 +2130,14 @@ test('keys are requested when an encrypted segment is loaded', function() {
openMediaSource(player);
player.tech_.trigger('play');
standardXHRResponse(requests.shift()); // playlist
standardXHRResponse(requests.shift()); // first segment
strictEqual(requests.length, 1, 'a key XHR is created');
strictEqual(requests.length, 2, 'a key XHR is created');
strictEqual(requests[0].url,
player.tech_.hls.playlists.media().segments[0].key.uri,
'a key XHR is created with correct uri');
'key XHR is created with correct uri');
strictEqual(requests[1].url,
player.tech_.hls.playlists.media().segments[0].uri,
'segment XHR is created with correct uri');
});
test('keys are resolved relative to the master playlist', function() {
......@@ -2629,11 +2158,10 @@ test('keys are resolved relative to the master playlist', function() {
'#EXTINF:2.833,\n' +
'http://media.example.com/fileSequence1.ts\n' +
'#EXT-X-ENDLIST\n');
standardXHRResponse(requests.shift());
equal(requests.length, 1, 'requested the key');
ok((/video\/playlist\/keys\/key\.php$/).test(requests[0].url),
'resolves multiple relative paths');
equal(requests.length, 2, 'requested the key');
equal(requests[0].url,
absoluteUrl('video/playlist/keys/key.php'),
'resolves multiple relative paths');
});
test('keys are resolved relative to their containing playlist', function() {
......@@ -2649,13 +2177,13 @@ test('keys are resolved relative to their containing playlist', function() {
'#EXTINF:2.833,\n' +
'http://media.example.com/fileSequence1.ts\n' +
'#EXT-X-ENDLIST\n');
standardXHRResponse(requests.shift());
equal(requests.length, 1, 'requested a key');
ok((/video\/keys\/key\.php$/).test(requests[0].url),
'resolves multiple relative paths');
equal(requests.length, 2, 'requested a key');
equal(requests[0].url,
absoluteUrl('video/keys/key.php'),
'resolves multiple relative paths');
});
test('a new key XHR is created when a the segment is received', function() {
test('a new key XHR is created when a the segment is requested', function() {
player.src({
src: 'https://example.com/encrypted-media.m3u8',
type: 'application/vnd.apple.mpegurl'
......@@ -2672,15 +2200,17 @@ test('a new key XHR is created when a the segment is received', function() {
'#EXTINF:2.833,\n' +
'http://media.example.com/fileSequence2.ts\n' +
'#EXT-X-ENDLIST\n');
standardXHRResponse(requests.shift()); // segment 1
standardXHRResponse(requests.shift()); // key 1
standardXHRResponse(requests.shift()); // segment 1
// "finish" decrypting segment 1
player.tech_.hls.segmentBuffer_[0].bytes = new Uint8Array(16);
player.tech_.hls.pendingSegment_.bytes = new Uint8Array(16);
player.tech_.hls.checkBuffer_();
player.tech_.buffered = function() {
return videojs.createTimeRange(0, 2.833);
};
player.tech_.hls.sourceBuffer.trigger('updateend');
standardXHRResponse(requests.shift()); // segment 2
strictEqual(requests.length, 1, 'a key XHR is created');
strictEqual(requests.length, 2, 'a key XHR is created');
strictEqual(requests[0].url,
'https://example.com/' +
player.tech_.hls.playlists.media().segments[1].key.uri,
......@@ -2704,16 +2234,14 @@ test('seeking should abort an outstanding key request and create a new one', fun
'#EXTINF:9,\n' +
'http://media.example.com/fileSequence2.ts\n' +
'#EXT-X-ENDLIST\n');
standardXHRResponse(requests.shift()); // segment 1
standardXHRResponse(requests.pop()); // segment 1
player.currentTime(11);
clock.tick(1);
ok(requests[0].aborted, 'the key XHR should be aborted');
requests.shift(); // aborted key 1
equal(requests.length, 1, 'requested the new segment');
standardXHRResponse(requests.shift()); // segment 2
equal(requests.length, 1, 'requested the new key');
equal(requests.length, 2, 'requested the new key');
equal(requests[0].url,
'https://example.com/' +
player.tech_.hls.playlists.media().segments[1].key.uri,
......@@ -2736,7 +2264,7 @@ test('retries key requests once upon failure', function() {
'#EXT-X-KEY:METHOD=AES-128,URI="htts://priv.example.com/key.php?r=53"\n' +
'#EXTINF:15.0,\n' +
'http://media.example.com/fileSequence53-A.ts\n');
standardXHRResponse(requests.shift()); // segment
standardXHRResponse(requests.pop()); // segment
requests[0].respond(404);
equal(requests.length, 2, 'create a new XHR for the same key');
equal(requests[1].url, requests[0].url, 'should be the same key');
......@@ -2745,7 +2273,7 @@ test('retries key requests once upon failure', function() {
equal(requests.length, 2, 'gives up after one retry');
});
test('skip segments if key requests fail more than once', function() {
test('errors if key requests fail more than once', function() {
var bytes = [];
player.src({
......@@ -2766,23 +2294,14 @@ test('skip segments if key requests fail more than once', function() {
player.tech_.hls.sourceBuffer.appendBuffer = function(chunk) {
bytes.push(chunk);
};
standardXHRResponse(requests.shift()); // segment 1
standardXHRResponse(requests.pop()); // segment 1
requests.shift().respond(404); // fail key
requests.shift().respond(404); // fail key, again
player.tech_.hls.checkBuffer_();
standardXHRResponse(requests.shift()); // segment 2
equal(bytes.length, 0, 'did not append encrypted bytes');
// key for second segment
requests[0].response = new Uint32Array([0,0,0,0]).buffer;
requests.shift().respond(200, null, '');
// "finish" decryption
player.tech_.hls.segmentBuffer_[0].bytes = new Uint8Array(16);
player.tech_.hls.checkBuffer_();
equal(bytes.length, 1, 'appended cleartext bytes from the second segment');
deepEqual(bytes[0], new Uint8Array(16), 'appended bytes from the second segment, not the first');
equal(player.tech_.hls.mediaSource.error_,
'network',
'triggered a network error');
});
test('the key is supplied to the decrypter in the correct format', function() {
......@@ -2804,12 +2323,11 @@ test('the key is supplied to the decrypter in the correct format', function() {
'#EXTINF:15.0,\n' +
'http://media.example.com/fileSequence52-B.ts\n');
videojs.Hls.Decrypter = function(encrypted, key) {
keys.push(key);
};
standardXHRResponse(requests.shift()); // segment
standardXHRResponse(requests.pop()); // segment
requests[0].response = new Uint32Array([0,1,2,3]).buffer;
requests[0].respond(200, null, '');
requests.shift(); // key
......@@ -2856,6 +2374,7 @@ test('supplies the media sequence of current segment as the IV by default, if no
});
test('switching playlists with an outstanding key request does not stall playback', function() {
var buffered = [];
var media = '#EXTM3U\n' +
'#EXT-X-MEDIA-SEQUENCE:5\n' +
'#EXT-X-KEY:METHOD=AES-128,URI="https://priv.example.com/key.php?r=52"\n' +
......@@ -2870,31 +2389,37 @@ test('switching playlists with an outstanding key request does not stall playbac
openMediaSource(player);
player.tech_.trigger('play');
player.tech_.hls.bandwidth = 1;
player.tech_.buffered = function() {
return videojs.createTimeRange(buffered);
};
// master playlist
standardXHRResponse(requests.shift());
// media playlist
requests.shift().respond(200, null, media);
// mock out media switching from this point on
player.tech_.hls.playlists.media = function() {
return player.tech_.hls.playlists.master.playlists[0];
return player.tech_.hls.playlists.master.playlists[1];
};
// first segment of the original media playlist
standardXHRResponse(requests.shift());
// don't respond to the initial key request
requests.shift();
standardXHRResponse(requests.pop());
// "switch" media
player.tech_.hls.playlists.trigger('mediachange');
ok(!requests[0].aborted, 'did not abort the key request');
// "finish" decrypting segment 1
standardXHRResponse(requests.shift()); // key
player.tech_.hls.pendingSegment_.bytes = new Uint8Array(16);
player.tech_.hls.checkBuffer_();
buffered = [[0, 2.833]];
player.tech_.hls.sourceBuffer.trigger('updateend');
player.tech_.hls.checkBuffer_();
ok(requests.length, 'made a request');
equal(requests.length, 1, 'made a request');
equal(requests[0].url,
'http://media.example.com/fileSequence52-B.ts',
'requested the segment');
equal(requests[1].url,
'https://priv.example.com/key.php?r=52',
'requested the key');
});
test('resolves relative key URLs against the playlist', function() {
......@@ -2911,8 +2436,6 @@ test('resolves relative key URLs against the playlist', function() {
'#EXTINF:2.833,\n' +
'http://media.example.com/fileSequence52-A.ts\n' +
'#EXT-X-ENDLIST\n');
standardXHRResponse(requests.shift()); // segment
equal(requests[0].url, 'https://example.com/key.php?r=52', 'resolves the key URL');
});
......@@ -2937,7 +2460,7 @@ test('treats invalid keys as a key request failure', function() {
bytes.push(chunk);
};
// segment request
standardXHRResponse(requests.shift());
standardXHRResponse(requests.pop());
// keys should be 16 bytes long
requests[0].response = new Uint8Array(1).buffer;
requests.shift().respond(200, null, '');
......@@ -2947,17 +2470,12 @@ test('treats invalid keys as a key request failure', function() {
// the retried response is invalid, too
requests[0].response = new Uint8Array(1);
requests.shift().respond(200, null, '');
// the first segment should be dropped and playback moves on
player.tech_.hls.checkBuffer_();
equal(bytes.length, 0, 'did not append bytes');
// second segment request
requests[0].response = new Uint8Array([1, 2]);
requests.shift().respond(200, null, '');
equal(bytes.length, 1, 'appended bytes');
deepEqual(bytes[0], new Uint8Array([1, 2]), 'skipped to the second segment');
// two failed attempts is a network error
equal(player.tech_.hls.mediaSource.error_,
'network',
'triggered a network error');
});
test('live stream should not call endOfStream', function(){
......