f9633470 by David LaPalomento

autoplay at the live point. fix live id3 cue insertion.. Closes #353

2 parents 7aabe429 9dce22d1
......@@ -2,7 +2,7 @@ CHANGELOG
=========
## HEAD (Unreleased)
_(none)_
* autoplay at the live point. fix live id3 cue insertion. ([view](https://github.com/videojs/videojs-contrib-hls/pull/353))
--------------------
......
......@@ -73,6 +73,7 @@
</video>
<script>
videojs.options.flash.swf = 'node_modules/videojs-swf/dist/video-js.swf';
// initialize the player
var player = videojs('video');
</script>
......
......@@ -389,8 +389,8 @@
this.media_.mediaSequence,
lastDiscontinuity);
this.expiredPostDiscontinuity_ += Playlist.duration(this.media_,
lastDiscontinuity,
this.media_.mediaSequence + expiredCount);
lastDiscontinuity,
update.mediaSequence);
}
this.media_ = this.master.playlists[update.uri];
......
......@@ -35,6 +35,14 @@ videojs.Hls = videojs.Flash.extend({
options.source = source;
this.bytesReceived = 0;
this.hasPlayed_ = false;
this.on(player, 'loadstart', function() {
this.hasPlayed_ = false;
this.one(this.mediaSource, 'sourceopen', this.setupFirstPlay);
});
this.on(player, ['play', 'loadedmetadata'], this.setupFirstPlay);
// TODO: After video.js#1347 is pulled in remove these lines
this.currentTime = videojs.Hls.prototype.currentTime;
this.setCurrentTime = videojs.Hls.prototype.setCurrentTime;
......@@ -109,12 +117,7 @@ videojs.Hls.prototype.src = function(src) {
this.playlists.on('loadedmetadata', videojs.bind(this, function() {
var selectedPlaylist, loaderHandler, oldBitrate, newBitrate, segmentDuration,
segmentDlTime, setupEvents, threshold;
setupEvents = function() {
this.fillBuffer();
player.trigger('loadedmetadata');
};
segmentDlTime, threshold;
oldMediaPlaylist = this.playlists.media();
......@@ -155,12 +158,16 @@ videojs.Hls.prototype.src = function(src) {
if (newBitrate > oldBitrate && segmentDlTime <= threshold) {
this.playlists.media(selectedPlaylist);
loaderHandler = videojs.bind(this, function() {
setupEvents.call(this);
this.setupFirstPlay();
this.fillBuffer();
player.trigger('loadedmetadata');
this.playlists.off('loadedplaylist', loaderHandler);
});
this.playlists.on('loadedplaylist', loaderHandler);
} else {
setupEvents.call(this);
this.setupFirstPlay();
this.fillBuffer();
player.trigger('loadedmetadata');
}
}));
......@@ -306,11 +313,14 @@ videojs.Hls.prototype.setupMetadataCueTranslation_ = function() {
videojs.Hls.prototype.addCuesForMetadata_ = function(segmentInfo) {
var i, cue, frame, metadata, minPts, segment, segmentOffset, textTrack, time;
segmentOffset = videojs.Hls.Playlist.duration(segmentInfo.playlist,
segmentInfo.playlist.mediaSequence,
segmentInfo.playlist.mediaSequence + segmentInfo.mediaIndex);
segmentOffset = this.playlists.expiredPreDiscontinuity_;
segmentOffset += this.playlists.expiredPostDiscontinuity_;
segmentOffset += videojs.Hls.Playlist.duration(segmentInfo.playlist,
segmentInfo.playlist.mediaSequence,
segmentInfo.playlist.mediaSequence + segmentInfo.mediaIndex);
segment = segmentInfo.playlist.segments[segmentInfo.mediaIndex];
minPts = Math.min(segment.minVideoPts, segment.minAudioPts);
minPts = Math.min(isFinite(segment.minVideoPts) ? segment.minVideoPts : Infinity,
isFinite(segment.minAudioPts) ? segment.minAudioPts : Infinity);
while (segmentInfo.pendingMetadata.length) {
metadata = segmentInfo.pendingMetadata[0].metadata;
......@@ -322,6 +332,7 @@ videojs.Hls.prototype.addCuesForMetadata_ = function(segmentInfo) {
time = segmentOffset + ((metadata.pts - minPts) * 0.001);
cue = new window.VTTCue(time, time, frame.value || frame.url || '');
cue.frame = frame;
cue.pts_ = metadata.pts;
textTrack.addCue(cue);
}
segmentInfo.pendingMetadata.shift();
......@@ -329,6 +340,33 @@ videojs.Hls.prototype.addCuesForMetadata_ = function(segmentInfo) {
};
/**
* Seek to the latest media position if this is a live video and the
* player and video are loaded and initialized.
*/
videojs.Hls.prototype.setupFirstPlay = function() {
var seekable, media;
media = this.playlists.media();
// check that everything is ready to begin buffering
if (!this.hasPlayed_ &&
this.sourceBuffer &&
media &&
this.paused() === false) {
// only run this block once per video
this.hasPlayed_ = true;
if (this.duration() === Infinity) {
// seek to the latest media position for live videos
seekable = this.seekable();
if (seekable.length) {
this.setCurrentTime(seekable.end(0));
}
}
}
};
/**
* Reset the mediaIndex if play() is called after the video has
* ended.
*/
......@@ -337,25 +375,20 @@ videojs.Hls.prototype.play = function() {
this.mediaIndex = 0;
}
// we may need to seek to begin playing safely for live playlists
if (this.duration() === Infinity) {
// if this is the first time we're playing the stream or we're
// ahead of the latest safe playback position, seek to the live
// point
if (!this.player().hasClass('vjs-has-started') ||
this.currentTime() > this.seekable().end(0)) {
this.setCurrentTime(this.seekable().end(0));
if (!this.hasPlayed_) {
videojs.Flash.prototype.play.apply(this, arguments);
return this.setupFirstPlay();
}
} else if (this.currentTime() < this.seekable().start(0)) {
// if the viewer has paused and we fell out of the live window,
// seek forward to the earliest available position
this.setCurrentTime(this.seekable().start(0));
}
// if the viewer has paused and we fell out of the live window,
// seek forward to the earliest available position
if (this.duration() === Infinity &&
this.currentTime() < this.seekable().start(0)) {
this.setCurrentTime(this.seekable().start(0));
}
// delegate back to the Flash implementation
return videojs.Flash.prototype.play.apply(this, arguments);
videojs.Flash.prototype.play.apply(this, arguments);
};
videojs.Hls.prototype.currentTime = function() {
......@@ -396,7 +429,9 @@ videojs.Hls.prototype.setCurrentTime = function(currentTime) {
this.mediaIndex = this.playlists.getMediaIndexForTime_(currentTime);
// abort any segments still being decoded
this.sourceBuffer.abort();
if (this.sourceBuffer) {
this.sourceBuffer.abort();
}
// cancel outstanding requests and buffer appends
this.cancelSegmentXhr();
......@@ -436,6 +471,10 @@ videojs.Hls.prototype.seekable = function() {
// report the seekable range relative to the earliest possible
// position when the stream was first loaded
currentSeekable = videojs.Hls.Playlist.seekable(media);
if (!currentSeekable.length) {
return currentSeekable;
}
startOffset = this.playlists.expiredPostDiscontinuity_ - this.playlists.expiredPreDiscontinuity_;
return videojs.createTimeRange(startOffset,
startOffset + (currentSeekable.end(0) - currentSeekable.start(0)));
......@@ -679,7 +718,7 @@ videojs.Hls.prototype.fillBuffer = function(offset) {
// being buffering so we don't preload data that will never be
// played
if (!this.playlists.media().endList &&
!this.player().hasClass('vjs-has-started') &&
!player.hasClass('vjs-has-started') &&
offset === undefined) {
return;
}
......@@ -920,22 +959,24 @@ videojs.Hls.prototype.drainBuffer = function(event) {
// FLV tags until we find the one that is closest to the desired
// playback time
if (typeof offset === 'number') {
// determine the offset within this segment we're seeking to
segmentOffset = this.playlists.expiredPostDiscontinuity_ + this.playlists.expiredPreDiscontinuity_;
segmentOffset += videojs.Hls.Playlist.duration(playlist,
playlist.mediaSequence,
playlist.mediaSequence + mediaIndex);
segmentOffset = offset - (segmentOffset * 1000);
ptsTime = segmentOffset + tags[0].pts;
while (tags[i + 1] && tags[i].pts < ptsTime) {
i++;
}
if (tags.length) {
// determine the offset within this segment we're seeking to
segmentOffset = this.playlists.expiredPostDiscontinuity_ + this.playlists.expiredPreDiscontinuity_;
segmentOffset += videojs.Hls.Playlist.duration(playlist,
playlist.mediaSequence,
playlist.mediaSequence + mediaIndex);
segmentOffset = offset - (segmentOffset * 1000);
ptsTime = segmentOffset + tags[0].pts;
while (tags[i + 1] && tags[i].pts < ptsTime) {
i++;
}
// tell the SWF the media position of the first tag we'll be delivering
this.el().vjs_setProperty('currentTime', ((tags[i].pts - ptsTime + offset) * 0.001));
// tell the SWF the media position of the first tag we'll be delivering
this.el().vjs_setProperty('currentTime', ((tags[i].pts - ptsTime + offset) * 0.001));
tags = tags.slice(i);
tags = tags.slice(i);
}
this.lastSeekedTime_ = null;
}
......
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<title>video.js HLS Stats</title>
<link href="../../node_modules/video.js/dist/video-js/video-js.css" rel="stylesheet">
<!-- video.js -->
<script src="../../node_modules/video.js/dist/video-js/video.dev.js"></script>
<!-- Media Sources plugin -->
<script src="../../node_modules/videojs-contrib-media-sources/src/videojs-media-sources.js"></script>
<!-- HLS plugin -->
<script src="../../src/videojs-hls.js"></script>
<!-- segment handling -->
<script src="../../src/xhr.js"></script>
<script src="../../src/flv-tag.js"></script>
<script src="../../src/stream.js"></script>
<script src="../../src/exp-golomb.js"></script>
<script src="../../src/h264-extradata.js"></script>
<script src="../../src/h264-stream.js"></script>
<script src="../../src/aac-stream.js"></script>
<script src="../../src/metadata-stream.js"></script>
<script src="../../src/segment-parser.js"></script>
<!-- m3u8 handling -->
<script src="../../src/m3u8/m3u8-parser.js"></script>
<script src="../../src/playlist.js"></script>
<script src="../../src/playlist-loader.js"></script>
<script src="../../node_modules/pkcs7/dist/pkcs7.unpad.js"></script>
<script src="../../src/decrypter.js"></script>
<!-- player stats visualization -->
<link href="stats.css" rel="stylesheet">
<script src="../switcher/js/vendor/d3.min.js"></script>
<!-- debugging -->
<script src="../../src/bin-utils.js"></script>
<style>
body {
font-family: Arial, sans-serif;
margin: 20px;
}
.info {
background-color: #eee;
border: thin solid #333;
border-radius: 3px;
padding: 0 5px;
margin: 20px 0;
}
</style>
</head>
<body>
<div class="info">
<p>The video below is an <a href="https://developer.apple.com/library/ios/documentation/networkinginternet/conceptual/streamingmediaguide/Introduction/Introduction.html#//apple_ref/doc/uid/TP40008332-CH1-SW1">HTTP Live Stream</a>. On desktop browsers other than Safari, the HLS plugin will polyfill support for the format on top of the video.js Flash tech.</p>
<p>Due to security restrictions in Flash, you will have to load this page over HTTP(S) to see the example in action.</p>
</div>
<video id="video"
class="video-js vjs-default-skin"
height="300"
width="600"
controls>
<source
src="http://s3.amazonaws.com/_bc_dml/example-content/bipbop-id3/index.m3u8"
type="application/x-mpegURL">
</video>
<section class="stats">
<h2>Player Stats</h2>
<div class="segment-timeline"></div>
<dl>
<dt>Current Time:</dt>
<dd class="current-time-stat">0</dd>
<dt>Buffered:</dt>
<dd><span class="buffered-start-stat">-</span> - <span class="buffered-end-stat">-</span></dd>
<dt>Seekable:</dt>
<dd><span class="seekable-start-stat">-</span> - <span class="seekable-end-stat">-</span></dd>
<dt>Video Bitrate:</dt>
<dd class="video-bitrate-stat">0 kbps</dd>
<dt>Measured Bitrate:</dt>
<dd class="measured-bitrate-stat">0 kbps</dd>
</dl>
<div class="switching-stats">
Once the player begins loading, you'll see information about the
operation of the adaptive quality switching here.
</div>
</section>
<script src="stats.js"></script>
<script>
videojs.options.flash.swf = '../../node_modules/videojs-swf/dist/video-js.swf';
// initialize the player
var player = videojs('video');
// ------------
// Player Stats
// ------------
var currentTimeStat = document.querySelector('.current-time-stat');
var bufferedStartStat = document.querySelector('.buffered-start-stat');
var bufferedEndStat = document.querySelector('.buffered-end-stat');
var seekableStartStat = document.querySelector('.seekable-start-stat');
var seekableEndStat = document.querySelector('.seekable-end-stat');
var videoBitrateState = document.querySelector('.video-bitrate-stat');
var measuredBitrateStat = document.querySelector('.measured-bitrate-stat');
player.on('timeupdate', function() {
currentTimeStat.textContent = player.currentTime().toFixed(1);
});
player.on('progress', function() {
var oldStart, oldEnd;
// buffered
var buffered = player.buffered();
if (buffered.length) {
oldStart = bufferedStartStat.textContent;
if (buffered.start(0).toFixed(1) !== oldStart) {
bufferedStartStat.textContent = buffered.start(0).toFixed(1);
}
oldEnd = bufferedEndStat.textContent;
if (buffered.end(0).toFixed(1) !== oldEnd) {
bufferedEndStat.textContent = buffered.end(0).toFixed(1);
}
}
// seekable
var seekable = player.seekable();
if (seekable && seekable.length) {
oldStart = seekableStartStat.textContent;
if (seekable.start(0).toFixed(1) !== oldStart) {
seekableStartStat.textContent = seekable.start(0).toFixed(1);
}
oldEnd = seekableEndStat.textContent;
if (seekable.end(0).toFixed(1) !== oldEnd) {
seekableEndStat.textContent = seekable.end(0).toFixed(1);
}
}
// bitrates
var playlist = player.hls.playlists.media();
if (playlist && playlist.attributes && playlist.attributes.BANDWIDTH) {
videoBitrateState.textContent = (playlist.attributes.BANDWIDTH / 1024).toLocaleString(undefined, {
maximumFractionDigits: 1
}) + ' kbps';
}
if (player.hls.bandwidth) {
measuredBitrateStat.textContent = (player.hls.bandwidth / 1024).toLocaleString(undefined, {
maximumFractionDigits: 1
}) + ' kbps';
}
});
videojs.Hls.displayStats(document.querySelector('.switching-stats'), player);
videojs.Hls.displayCues(document.querySelector('.segment-timeline'), player);
</script>
</body>
</html>
.axis text,
.cue text {
font: 12px sans-serif;
}
.axis line,
.axis path,
.intersect {
fill: none;
stroke: #000;
}
.cue {
width: 20px;
height: 20px;
}
.cue text {
display: none;
}
.cue:hover text {
display: block;
}
.intersect {
fill: none;
stroke: #000;
stroke-dasharray: 2,2;
}
(function(window, videojs, undefined) {
'use strict';
// -------------
// Initial Setup
// -------------
var d3 = window.d3;
var setupGraph = function(element) {
element.innerHTML = '';
// setup the display
var margin = {
top: 20,
right: 80,
bottom: 30,
left: 50
};
var width = 600 - margin.left - margin.right;
var height = 300 - margin.top - margin.bottom;
var svg = d3.select(element)
.append('svg')
.attr('width', width + margin.left + margin.right)
.attr('height', height + margin.top + margin.bottom)
.append('g')
.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')');
// setup the timeline
var x = d3.time.scale().range([0, width]); // d3.scale.linear().range([0, width]);
var y = d3.scale.linear().range([height, 0]);
x.domain([new Date(), new Date(Date.now() + (5 * 60 * 1000))]);
y.domain([0, 5 * 1024 * 1024 * 8]);
var timeAxis = d3.svg.axis().scale(x).orient('bottom');
var tickFormatter = d3.format(',.0f');
var bitrateAxis = d3.svg.axis()
.scale(y)
.tickFormat(function(value) {
return tickFormatter(value / 1024);
})
.orient('left');
// time axis
svg.selectAll('.axis').remove();
svg.append('g')
.attr('class', 'x axis')
.attr('transform', 'translate(0,' + height + ')')
.call(timeAxis);
// bitrate axis
svg.append('g')
.attr('class', 'y axis')
.call(bitrateAxis)
.append('text')
.attr('transform', 'rotate(-90)')
.attr('y', 6)
.attr('dy', '.71em')
.style('text-anchor', 'end')
.text('Bitrate (kb/s)');
};
// ---------------
// Dynamic Updates
// ---------------
var displayStats = function(element, player) {
setupGraph(element, player);
};
// -----------------
// Cue Visualization
// -----------------
var Playlist = videojs.Hls.Playlist;
var margin = {
top: 8,
right: 8,
bottom: 20,
left: 80
};
var width = 600 - margin.left - margin.right;
var height = 600 - margin.top - margin.bottom;
var mediaDomain = function(media, player) {
var segments = media.segments;
var end = player.hls.playlists.expiredPreDiscontinuity_;
end += player.hls.playlists.expiredPostDiscontinuity_;
end += Playlist.duration(media,
media.mediaSequence,
media.mediaSequence + segments.length);
return [0, end];
};
var ptsDomain = function(segments, mediaScale, mediaOffset) {
mediaOffset = mediaOffset * 1000 || 0;
var start = mediaScale.domain()[0] * 1000;
var segment = segments[0];
if (segment &&
segment.minAudioPts !== undefined ||
segment.minVideoPts !== undefined) {
start = Math.min(segment.minAudioPts || Infinity,
segment.minVideoPts || Infinity);
}
start -= mediaOffset;
return [
start,
(mediaScale.domain()[1] - mediaScale.domain()[0]) * 1000 + start
];
};
var svgUpdateCues = function(svg, mediaScale, ptsScale, y, cues) {
cues = Array.prototype.slice.call(cues).filter(function(cue) {
return cue.startTime > mediaScale.domain()[0] &&
cue.startTime < mediaScale.domain()[1];
});
var points = svg.selectAll('.cue').data(cues, function(cue) {
return cue.pts_ + ' -> ' + cue.startTime;
});
points.attr('transform', function(cue) {
return 'translate(' + mediaScale(cue.startTime) + ',' + ptsScale(cue.pts_) + ')';
});
var enter = points.enter().append('g')
.attr('class', 'cue');
enter.append('circle')
.attr('r', 5)
.attr('data-time', function(cue) {
return cue.startTime;
})
.attr('data-pts', function(cue) {
return cue.pts_;
});
enter.append('text')
.attr('transform', 'translate(8,0)')
.text(function(cue) {
return 'time: ' + videojs.formatTime(cue.startTime);
});
enter.append('text')
.attr('transform', 'translate(8,16)')
.text(function(cue) {
return 'pts: ' + cue.pts_;
});
points.exit().remove();
};
var svgUpdateAxes = function(svg, mediaScale, ptsScale) {
// media timeline axis
var mediaAxis = d3.svg.axis().scale(mediaScale).orient('bottom');
svg.select('.axis.media')
.transition().duration(500)
.call(mediaAxis);
// presentation timeline axis
if (!isFinite(ptsScale.domain()[0]) || !isFinite(ptsScale.domain()[1])) {
return;
}
var ptsAxis = d3.svg.axis().scale(ptsScale).orient('left');
svg.select('.axis.presentation')
.transition().duration(500)
.call(ptsAxis);
};
var svgRenderSegmentTimeline = function(container, player) {
var media = player.hls.playlists.media();
var segments = media.segments; // media.segments.slice(0, count);
// setup the display
var svg = d3.select(container)
.append('svg')
.attr('width', width + margin.left + margin.right)
.attr('height', height + margin.top + margin.bottom)
.append('g')
.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')');
// setup the scales
var mediaScale = d3.scale.linear().range([0, width]);
mediaScale.domain(mediaDomain(media, player));
var ptsScale = d3.scale.linear().range([height, 0]);
ptsScale.domain(ptsDomain(segments, mediaScale));
// render
var mediaAxis = d3.svg.axis().scale(mediaScale).orient('bottom');
svg.append('g')
.attr('class', 'x axis media')
.attr('transform', 'translate(0,' + height + ')')
.call(mediaAxis);
var ptsAxis = d3.svg.axis().scale(ptsScale).orient('left');
svg.append('g')
.attr('class', 'y axis presentation')
.call(ptsAxis);
svg.append('path')
.attr('class', 'intersect')
.attr('d', 'M0,' + height + 'L' + width +',0');
var mediaOffset = 0;
// update everything on progress
player.on('progress', function() {
var updatedMedia = player.hls.playlists.media();
var segments = updatedMedia.segments; // updatedMedia.segments.slice(currentIndex, currentIndex + count);
if (updatedMedia.mediaSequence !== media.mediaSequence) {
mediaOffset += Playlist.duration(media,
media.mediaSequence,
updatedMedia.mediaSequence);
media = updatedMedia;
}
mediaScale.domain(mediaDomain(updatedMedia, player));
ptsScale.domain(ptsDomain(segments, mediaScale, mediaOffset));
svgUpdateAxes(svg, mediaScale, ptsScale, updatedMedia, segments);
if (!isFinite(ptsScale.domain()[0]) || !isFinite(ptsScale.domain()[1])) {
return;
}
for (var i = 0; i < player.textTracks().length; i++) {
var track = player.textTracks()[i];
svgUpdateCues(svg, mediaScale, ptsScale, ptsScale, track.cues);
}
});
};
var displayCues = function(container, player) {
var media = player.hls.playlists.media();
if (media && media.segments) {
svgRenderSegmentTimeline(container, player);
} else {
player.one('loadedmetadata', function() {
svgRenderSegmentTimeline(container, player);
});
}
};
// export
videojs.Hls.displayStats = displayStats;
videojs.Hls.displayCues = displayCues;
})(window, window.videojs);
......@@ -50,10 +50,19 @@ var
};
tech = player.el().querySelector('.vjs-tech');
tech.vjs_getProperty = function() {};
tech.vjs_getProperty = function(name) {
if (name === 'paused') {
return this.paused_;
}
};
tech.vjs_setProperty = function() {};
tech.vjs_src = function() {};
tech.vjs_play = function() {};
tech.vjs_play = function() {
this.paused_ = false;
};
tech.vjs_pause = function() {
this.paused_ = true;
};
tech.vjs_discontinuity = function() {};
videojs.Flash.onReady(tech.id);
......@@ -226,6 +235,46 @@ test('starts playing if autoplay is specified', function() {
strictEqual(1, plays, 'play was called');
});
test('autoplay seeks to the live point after playlist load', function() {
var currentTime = 0;
player.options().autoplay = true;
player.hls.setCurrentTime = function(time) {
currentTime = time;
return currentTime;
};
player.hls.currentTime = function() {
return currentTime;
};
player.src({
src: 'liveStart30sBefore.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
standardXHRResponse(requests.shift());
notEqual(currentTime, 0, 'seeked on autoplay');
});
test('autoplay seeks to the live point after media source open', function() {
var currentTime = 0;
player.options().autoplay = true;
player.hls.setCurrentTime = function(time) {
currentTime = time;
return currentTime;
};
player.hls.currentTime = function() {
return currentTime;
};
player.src({
src: 'liveStart30sBefore.m3u8',
type: 'application/vnd.apple.mpegurl'
});
standardXHRResponse(requests.shift());
openMediaSource(player);
notEqual(currentTime, 0, 'seeked on autoplay');
});
test('creates a PlaylistLoader on init', function() {
var loadedmetadata = false;
player.on('loadedmetadata', function() {
......@@ -1424,6 +1473,73 @@ test('translates ID3 PTS values to cue media timeline positions', function() {
equal(track.cues[0].endTime, 1, 'translated startTime');
});
test('translates ID3 PTS values with expired segments', function() {
var tags = [{ pts: 4 * 1000, bytes: new Uint8Array(1) }], track;
videojs.Hls.SegmentParser = mockSegmentParser(tags);
player.src({
src: 'live.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
player.play();
// 20.9 seconds of content have expired
player.hls.playlists.expiredPostDiscontinuity_ = 20.9;
player.hls.segmentParser_.parseSegmentBinaryData = function() {
// trigger a metadata event
player.hls.segmentParser_.metadataStream.trigger('data', {
pts: 5 * 1000,
data: new Uint8Array([]),
frames: [{
id: 'TXXX',
value: 'cue text'
}]
});
};
requests.shift().respond(200, null,
'#EXTM3U\n' +
'#EXT-X-MEDIA-SEQUENCE:2\n' +
'#EXTINF:10,\n' +
'2.ts\n' +
'#EXTINF:10,\n' +
'3.ts\n'); // media
standardXHRResponse(requests.shift()); // segment 0
track = player.textTracks()[0];
equal(track.cues[0].startTime, 20.9 + 1, 'translated startTime');
equal(track.cues[0].endTime, 20.9 + 1, 'translated startTime');
});
test('translates id3 PTS values for audio-only media', function() {
var tags = [{ pts: 4 * 1000, bytes: new Uint8Array(1) }], track;
videojs.Hls.SegmentParser = mockSegmentParser(tags);
player.src({
src: 'manifest/media.m3u8',
type: 'application/vnd.apple.mpegurl'
});
openMediaSource(player);
player.hls.segmentParser_.parseSegmentBinaryData = function() {
// trigger a metadata event
player.hls.segmentParser_.metadataStream.trigger('data', {
pts: 5 * 1000,
data: new Uint8Array([]),
frames: [{
id: 'TXXX',
value: 'cue text'
}]
});
};
player.hls.segmentParser_.stats.h264Tags = function() { return 0; };
player.hls.segmentParser_.stats.minVideoPts = null;
standardXHRResponse(requests.shift()); // media
standardXHRResponse(requests.shift()); // segment 0
track = player.textTracks()[0];
equal(track.cues[0].startTime, 1, 'translated startTime');
});
test('translates ID3 PTS values across discontinuities', function() {
var tags = [], events = [], track;
videojs.Hls.SegmentParser = mockSegmentParser(tags);
......@@ -1699,6 +1815,7 @@ test('live playlist starts three target durations before live', function() {
equal(player.hls.mediaIndex, 0, 'waits for the first play to start buffering');
equal(requests.length, 0, 'no outstanding segment request');
player.hls.paused = function() { return false; };
player.play();
mediaPlaylist = player.hls.playlists.media();
equal(player.hls.mediaIndex, 1, 'mediaIndex is updated at play');
......@@ -1758,7 +1875,7 @@ test('resets the time to a seekable position when resuming a live stream ' +
'16.ts\n');
// mock out the player to simulate a live stream that has been
// playing for awhile
player.addClass('vjs-has-started');
player.hls.hasPlayed_ = true;
player.hls.seekable = function() {
return {
start: function() {
......@@ -1766,7 +1883,8 @@ test('resets the time to a seekable position when resuming a live stream ' +
},
end: function() {
return 170;
}
},
length: 1
};
};
player.hls.currentTime = function() {
......@@ -1780,12 +1898,6 @@ test('resets the time to a seekable position when resuming a live stream ' +
player.play();
equal(seekTarget, player.seekable().start(0), 'seeked to the start of seekable');
player.hls.currentTime = function() {
return 180;
};
player.play();
equal(seekTarget, player.seekable().end(0), 'seeked to the end of seekable');
});
test('clamps seeks to the seekable window', function() {
......@@ -2015,6 +2127,18 @@ test('clears the segment buffer on seek', function() {
strictEqual(aborts, 1, 'cleared the segment buffer on a seek');
});
test('can seek before the source buffer opens', function() {
player.src({
src: 'media.m3u8',
type: 'application/vnd.apple.mpegurl'
});
standardXHRResponse(requests.shift());
player.triggerReady();
player.currentTime(1);
equal(player.currentTime(), 1, 'seeked');
});
test('continues playing after seek to discontinuity', function() {
var aborts = 0, tags = [], currentTime, bufferEnd, oldCurrentTime;
......