update collapsible

This commit is contained in:
Luke Pulverenti 2016-07-02 14:05:40 -04:00
parent f351643d29
commit 891a865464
42 changed files with 1185 additions and 536 deletions

View file

@ -452,7 +452,7 @@ var AbrController = function (_EventHandler) {
ewmaFast = config.abrEwmaFastVoD;
ewmaSlow = config.abrEwmaSlowVoD;
}
this.bwEstimator = new _ewmaBandwidthEstimator2.default(hls, ewmaSlow, ewmaFast);
this.bwEstimator = new _ewmaBandwidthEstimator2.default(hls, ewmaSlow, ewmaFast, config.abrEwmaDefaultEstimate);
}
var frag = data.frag;
@ -603,7 +603,7 @@ var AbrController = function (_EventHandler) {
return Math.min(this._nextAutoLevel, maxAutoLevel);
}
var avgbw = this.bwEstimator.getEstimate(),
var avgbw = this.bwEstimator ? this.bwEstimator.getEstimate() : config.abrEwmaDefaultEstimate,
adjustedbw = void 0;
// follow algorithm captured from stagefright :
// https://android.googlesource.com/platform/frameworks/av/+/master/media/libstagefright/httplive/LiveSession.cpp
@ -1251,11 +1251,11 @@ function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { de
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
var EwmaBandWidthEstimator = function () {
function EwmaBandWidthEstimator(hls, slow, fast) {
function EwmaBandWidthEstimator(hls, slow, fast, defaultEstimate) {
_classCallCheck(this, EwmaBandWidthEstimator);
this.hls = hls;
this.defaultEstimate_ = 5e5; // 500kbps
this.defaultEstimate_ = defaultEstimate;
this.minWeight_ = 0.001;
this.minDelayMs_ = 50;
this.slow_ = new _ewma2.default(slow);
@ -1277,7 +1277,7 @@ var EwmaBandWidthEstimator = function () {
}, {
key: 'getEstimate',
value: function getEstimate() {
if (!this.fast_ || this.fast_.getTotalWeight() < this.minWeight_) {
if (!this.fast_ || !this.slow_ || this.fast_.getTotalWeight() < this.minWeight_) {
return this.defaultEstimate_;
}
//console.log('slow estimate:'+ Math.round(this.slow_.getEstimate()));
@ -1739,9 +1739,7 @@ var StreamController = function (_EventHandler) {
}
}, {
key: 'startLoad',
value: function startLoad() {
var startPosition = arguments.length <= 0 || arguments[0] === undefined ? 0 : arguments[0];
value: function startLoad(startPosition) {
if (this.levels) {
var media = this.media,
lastCurrentTime = this.lastCurrentTime;
@ -1752,7 +1750,7 @@ var StreamController = function (_EventHandler) {
}
this.level = -1;
this.fragLoadError = 0;
if (media && lastCurrentTime) {
if (media && lastCurrentTime > 0) {
_logger.logger.log('configure startPosition @' + lastCurrentTime);
if (!this.lastPaused) {
_logger.logger.log('resuming video');
@ -1978,10 +1976,10 @@ var StreamController = function (_EventHandler) {
var deltaPTS = fragPrevious.deltaPTS,
curSNIdx = frag.sn - levelDetails.startSN;
// if there is a significant delta between audio and video, larger than max allowed hole,
// it might be because video fragment does not start with a keyframe.
// and if previous remuxed fragment did not start with a keyframe. (fragPrevious.dropped)
// let's try to load previous fragment again to get last keyframe
// then we will reload again current fragment (that way we should be able to fill the buffer hole ...)
if (deltaPTS && deltaPTS > config.maxBufferHole) {
if (deltaPTS && deltaPTS > config.maxBufferHole && fragPrevious.dropped) {
frag = fragments[curSNIdx - 1];
_logger.logger.warn('SN just loaded, with large PTS gap between audio and video, maybe frag is not starting with a keyframe ? load previous one to try to overcome this');
// decrement previous frag load counter to avoid frag loop loading error when next fragment will get reloaded
@ -1990,6 +1988,10 @@ var StreamController = function (_EventHandler) {
frag = fragments[curSNIdx + 1];
_logger.logger.log('SN just loaded, load next one: ' + frag.sn);
}
// ensure frag is not undefined
if (!frag) {
break;
}
} else {
// have we reached end of VOD playlist ?
if (!levelDetails.live) {
@ -2158,8 +2160,16 @@ var StreamController = function (_EventHandler) {
_logger.logger.log('immediateLevelSwitch');
if (!this.immediateSwitch) {
this.immediateSwitch = true;
this.previouslyPaused = this.media.paused;
this.media.pause();
var media = this.media,
previouslyPaused = void 0;
if (media) {
previouslyPaused = media.paused;
media.pause();
} else {
// don't restart playback after instant level switch in case media not attached
previouslyPaused = true;
}
this.previouslyPaused = previouslyPaused;
}
var fragCurrent = this.fragCurrent;
if (fragCurrent && fragCurrent.loader) {
@ -2257,8 +2267,9 @@ var StreamController = function (_EventHandler) {
media.addEventListener('seeking', this.onvseeking);
media.addEventListener('seeked', this.onvseeked);
media.addEventListener('ended', this.onvended);
if (this.levels && this.config.autoStartLoad) {
this.hls.startLoad();
var config = this.config;
if (this.levels && config.autoStartLoad) {
this.hls.startLoad(config.startPosition);
}
}
}, {
@ -2373,8 +2384,9 @@ var StreamController = function (_EventHandler) {
this.levels = data.levels;
this.startLevelLoaded = false;
this.startFragRequested = false;
if (this.config.autoStartLoad) {
this.hls.startLoad();
var config = this.config;
if (config.autoStartLoad) {
this.hls.startLoad(config.startPosition);
}
}
}, {
@ -2411,12 +2423,23 @@ var StreamController = function (_EventHandler) {
curLevel.details = newDetails;
this.hls.trigger(_events2.default.LEVEL_UPDATED, { details: newDetails, level: newLevelId });
// compute start position
if (this.startFragRequested === false) {
// if live playlist, set start position to be fragment N-this.config.liveSyncDurationCount (usually 3)
if (newDetails.live) {
var targetLatency = this.config.liveSyncDuration !== undefined ? this.config.liveSyncDuration : this.config.liveSyncDurationCount * newDetails.targetduration;
this.startPosition = Math.max(0, sliding + duration - targetLatency);
// compute start position if set to -1. use it straight away if value is defined
if (this.startPosition === -1) {
// first, check if start time offset has been set in playlist, if yes, use this value
var startTimeOffset = newDetails.startTimeOffset;
if (!isNaN(startTimeOffset)) {
_logger.logger.log('start time offset found in playlist, adjust startPosition to ' + startTimeOffset);
this.startPosition = startTimeOffset;
} else {
// if live playlist, set start position to be fragment N-this.config.liveSyncDurationCount (usually 3)
if (newDetails.live) {
var targetLatency = this.config.liveSyncDuration !== undefined ? this.config.liveSyncDuration : this.config.liveSyncDurationCount * newDetails.targetduration;
this.startPosition = Math.max(0, sliding + duration - targetLatency);
} else {
this.startPosition = 0;
}
}
}
this.nextLoadPosition = this.startPosition;
}
@ -2474,7 +2497,7 @@ var StreamController = function (_EventHandler) {
}
}
this.pendingAppending = 0;
_logger.logger.log('Demuxing ' + sn + ' of [' + details.startSN + ' ,' + details.endSN + '],level ' + level);
_logger.logger.log('Demuxing ' + sn + ' of [' + details.startSN + ' ,' + details.endSN + '],level ' + level + ', cc ' + fragCurrent.cc);
var demuxer = this.demuxer;
if (demuxer) {
demuxer.push(data.payload, audioCodec, currentLevel.videoCodec, start, fragCurrent.cc, level, sn, duration, fragCurrent.decryptdata);
@ -2576,12 +2599,17 @@ var StreamController = function (_EventHandler) {
var level = this.levels[this.level],
frag = this.fragCurrent;
_logger.logger.log('parsed ' + data.type + ',PTS:[' + data.startPTS.toFixed(3) + ',' + data.endPTS.toFixed(3) + '],DTS:[' + data.startDTS.toFixed(3) + '/' + data.endDTS.toFixed(3) + '],nb:' + data.nb);
_logger.logger.log('parsed ' + data.type + ',PTS:[' + data.startPTS.toFixed(3) + ',' + data.endPTS.toFixed(3) + '],DTS:[' + data.startDTS.toFixed(3) + '/' + data.endDTS.toFixed(3) + '],nb:' + data.nb + ',dropped:' + (data.dropped || 0));
var drift = _levelHelper2.default.updateFragPTSDTS(level.details, frag.sn, data.startPTS, data.endPTS, data.startDTS, data.endDTS),
hls = this.hls;
hls.trigger(_events2.default.LEVEL_PTS_UPDATED, { details: level.details, level: this.level, drift: drift });
// has remuxer dropped video frames located before first keyframe ?
if (data.type === 'video') {
frag.dropped = data.dropped;
}
[data.data1, data.data2].forEach(function (buffer) {
if (buffer) {
_this2.pendingAppending++;
@ -3905,7 +3933,7 @@ var DemuxerWorker = function DemuxerWorker(self) {
});
observer.on(_events2.default.FRAG_PARSING_DATA, function (ev, data) {
var objData = { event: ev, type: data.type, startPTS: data.startPTS, endPTS: data.endPTS, startDTS: data.startDTS, endDTS: data.endDTS, data1: data.data1.buffer, data2: data.data2.buffer, nb: data.nb };
var objData = { event: ev, type: data.type, startPTS: data.startPTS, endPTS: data.endPTS, startDTS: data.startDTS, endDTS: data.endDTS, data1: data.data1.buffer, data2: data.data2.buffer, nb: data.nb, dropped: data.dropped };
// pass data1/data2 as transferable object (no copy)
self.postMessage(objData, [objData.data1, objData.data2]);
});
@ -4055,7 +4083,8 @@ var Demuxer = function () {
startDTS: data.startDTS,
endDTS: data.endDTS,
type: data.type,
nb: data.nb
nb: data.nb,
dropped: data.dropped
});
break;
case _events2.default.FRAG_PARSING_METADATA:
@ -4653,7 +4682,7 @@ var TSDemuxer = function () {
value: function switchLevel() {
this.pmtParsed = false;
this._pmtId = -1;
this._avcTrack = { container: 'video/mp2t', type: 'video', id: -1, sequenceNumber: 0, samples: [], len: 0, nbNalu: 0 };
this._avcTrack = { container: 'video/mp2t', type: 'video', id: -1, sequenceNumber: 0, samples: [], len: 0, nbNalu: 0, dropped: 0 };
this._aacTrack = { container: 'video/mp2t', type: 'audio', id: -1, sequenceNumber: 0, samples: [], len: 0 };
this._id3Track = { type: 'id3', id: -1, sequenceNumber: 0, samples: [], len: 0 };
this._txtTrack = { type: 'text', id: -1, sequenceNumber: 0, samples: [], len: 0 };
@ -4999,6 +5028,9 @@ var TSDemuxer = function () {
samples.push(avcSample);
track.len += length;
track.nbNalu += units2.length;
} else {
// dropped samples, track it
track.dropped++;
}
units2 = [];
length = 0;
@ -5966,6 +5998,7 @@ var Hls = function () {
if (!Hls.defaultConfig) {
Hls.defaultConfig = {
autoStartLoad: true,
startPosition: -1,
debug: false,
capLevelToPlayerSize: false,
maxBufferLength: 30,
@ -6010,6 +6043,7 @@ var Hls = function () {
abrEwmaSlowLive: 9,
abrEwmaFastVoD: 4,
abrEwmaSlowVoD: 15,
abrEwmaDefaultEstimate: 5e5, // 500 kbps
abrBandWidthFactor: 0.8,
abrBandWidthUpFactor: 0.7
};
@ -6125,7 +6159,7 @@ var Hls = function () {
}, {
key: 'startLoad',
value: function startLoad() {
var startPosition = arguments.length <= 0 || arguments[0] === undefined ? 0 : arguments[0];
var startPosition = arguments.length <= 0 || arguments[0] === undefined ? -1 : arguments[0];
_logger.logger.log('startLoad');
this.levelController.startLoad();
@ -6685,7 +6719,7 @@ var PlaylistLoader = function (_EventHandler) {
byteRangeEndOffset,
byteRangeStartOffset;
regexp = /(?:#EXT-X-(MEDIA-SEQUENCE):(\d+))|(?:#EXT-X-(TARGETDURATION):(\d+))|(?:#EXT-X-(KEY):(.*))|(?:#EXT(INF):([\d\.]+)[^\r\n]*([\r\n]+[^#|\r\n]+)?)|(?:#EXT-X-(BYTERANGE):([\d]+[@[\d]*)]*[\r\n]+([^#|\r\n]+)?|(?:#EXT-X-(ENDLIST))|(?:#EXT-X-(DIS)CONTINUITY))|(?:#EXT-X-(PROGRAM-DATE-TIME):(.*)[\r\n]+([^#|\r\n]+)?)/g;
regexp = /(?:#EXT-X-(MEDIA-SEQUENCE):(\d+))|(?:#EXT-X-(TARGETDURATION):(\d+))|(?:#EXT-X-(KEY):(.*))|(?:#EXT-X-(START):(.*))|(?:#EXT(INF):([\d\.]+)[^\r\n]*([\r\n]+[^#|\r\n]+)?)|(?:#EXT-X-(BYTERANGE):([\d]+[@[\d]*)]*[\r\n]+([^#|\r\n]+)?|(?:#EXT-X-(ENDLIST))|(?:#EXT-X-(DIS)CONTINUITY))|(?:#EXT-X-(PROGRAM-DATE-TIME):(.*)[\r\n]+([^#|\r\n]+)?)/g;
while ((result = regexp.exec(string)) !== null) {
result.shift();
result = result.filter(function (n) {
@ -6760,6 +6794,14 @@ var PlaylistLoader = function (_EventHandler) {
}
}
break;
case 'START':
var startParams = result[1];
var startAttrs = new _attrList2.default(startParams);
var startTimeOffset = startAttrs.decimalFloatingPoint('TIME-OFFSET');
if (startTimeOffset) {
level.startTimeOffset = startTimeOffset;
}
break;
case 'PROGRAM-DATE-TIME':
programDateTime = new Date(Date.parse(result[1]));
if (frag && !frag.url && result.length >= 3) {
@ -7656,8 +7698,10 @@ var MP4Remuxer = function () {
}
// next AVC sample DTS should be equal to last sample DTS + last sample duration
this.nextAvcDts = dtsnorm + lastSampleDuration * pes2mp4ScaleFactor;
var dropped = track.dropped;
track.len = 0;
track.nbNalu = 0;
track.dropped = 0;
if (samples.length && navigator.userAgent.toLowerCase().indexOf('chrome') > -1) {
flags = samples[0].flags;
// chrome workaround, mark first sample as being a Random Access Point to avoid sourcebuffer append issue
@ -7676,7 +7720,8 @@ var MP4Remuxer = function () {
startDTS: firstDTS / pesTimeScale,
endDTS: this.nextAvcDts / pesTimeScale,
type: 'video',
nb: samples.length
nb: samples.length,
dropped: dropped
});
}
}, {
@ -7977,7 +8022,8 @@ var PassThroughRemuxer = function () {
startPTS: timeOffset,
startDTS: timeOffset,
type: 'audiovideo',
nb: 1
nb: 1,
dropped: 0
});
}
}, {

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long