mirror of
https://github.com/jellyfin/jellyfin-web
synced 2025-03-30 19:56:21 +00:00
update components
This commit is contained in:
parent
2d86f49653
commit
ab58f98cc1
27 changed files with 276 additions and 129 deletions
|
@ -21,7 +21,6 @@ class AbrController extends EventHandler {
|
|||
this._autoLevelCapping = -1;
|
||||
this._nextAutoLevel = -1;
|
||||
this.hls = hls;
|
||||
this.bwEstimator = new EwmaBandWidthEstimator(hls);
|
||||
this.onCheck = this.abandonRulesCheck.bind(this);
|
||||
}
|
||||
|
||||
|
@ -34,6 +33,26 @@ class AbrController extends EventHandler {
|
|||
if (!this.timer) {
|
||||
this.timer = setInterval(this.onCheck, 100);
|
||||
}
|
||||
|
||||
// lazy init of bw Estimator, rationale is that we use different params for Live/VoD
|
||||
// so we need to wait for stream manifest / playlist type to instantiate it.
|
||||
if (!this.bwEstimator) {
|
||||
let hls = this.hls,
|
||||
level = data.frag.level,
|
||||
isLive = hls.levels[level].details.live,
|
||||
config = hls.config,
|
||||
ewmaFast, ewmaSlow;
|
||||
|
||||
if (isLive) {
|
||||
ewmaFast = config.abrEwmaFastLive;
|
||||
ewmaSlow = config.abrEwmaSlowLive;
|
||||
} else {
|
||||
ewmaFast = config.abrEwmaFastVoD;
|
||||
ewmaSlow = config.abrEwmaSlowVoD;
|
||||
}
|
||||
this.bwEstimator = new EwmaBandWidthEstimator(hls,ewmaSlow,ewmaFast);
|
||||
}
|
||||
|
||||
let frag = data.frag;
|
||||
frag.trequest = performance.now();
|
||||
this.fragCurrent = frag;
|
||||
|
|
|
@ -338,9 +338,10 @@ class BufferController extends EventHandler {
|
|||
} else {
|
||||
// QuotaExceededError: http://www.w3.org/TR/html5/infrastructure.html#quotaexceedederror
|
||||
// let's stop appending any segments, and report BUFFER_FULL_ERROR error
|
||||
segments = [];
|
||||
this.segments = [];
|
||||
event.details = ErrorDetails.BUFFER_FULL_ERROR;
|
||||
hls.trigger(Event.ERROR,event);
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -11,29 +11,23 @@ import EWMA from '../utils/ewma';
|
|||
|
||||
class EwmaBandWidthEstimator {
|
||||
|
||||
constructor(hls) {
|
||||
constructor(hls,slow,fast) {
|
||||
this.hls = hls;
|
||||
this.defaultEstimate_ = 5e5; // 500kbps
|
||||
this.minWeight_ = 0.001;
|
||||
this.minDelayMs_ = 50;
|
||||
this.slow_ = new EWMA(slow);
|
||||
this.fast_ = new EWMA(fast);
|
||||
}
|
||||
|
||||
sample(durationMs,numBytes) {
|
||||
durationMs = Math.max(durationMs, this.minDelayMs_);
|
||||
var bandwidth = 8000* numBytes / durationMs;
|
||||
var bandwidth = 8000* numBytes / durationMs,
|
||||
//console.log('instant bw:'+ Math.round(bandwidth));
|
||||
// we weight sample using loading duration....
|
||||
var weigth = durationMs / 1000;
|
||||
|
||||
// lazy initialization. this allows to take into account config param changes that could happen after Hls instantiation,
|
||||
// but before first fragment loading. this is useful to A/B tests those params
|
||||
if(!this.fast_) {
|
||||
let config = this.hls.config;
|
||||
this.fast_ = new EWMA(config.abrEwmaFast);
|
||||
this.slow_ = new EWMA(config.abrEwmaSlow);
|
||||
}
|
||||
this.fast_.sample(weigth,bandwidth);
|
||||
this.slow_.sample(weigth,bandwidth);
|
||||
weight = durationMs / 1000;
|
||||
this.fast_.sample(weight,bandwidth);
|
||||
this.slow_.sample(weight,bandwidth);
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -285,8 +285,21 @@ class StreamController extends EventHandler {
|
|||
//logger.log('find SN matching with pos:' + bufferEnd + ':' + frag.sn);
|
||||
if (fragPrevious && frag.level === fragPrevious.level && frag.sn === fragPrevious.sn) {
|
||||
if (frag.sn < levelDetails.endSN) {
|
||||
frag = fragments[frag.sn + 1 - levelDetails.startSN];
|
||||
logger.log(`SN just loaded, load next one: ${frag.sn}`);
|
||||
let deltaPTS = fragPrevious.deltaPTS,
|
||||
curSNIdx = frag.sn - levelDetails.startSN;
|
||||
// if there is a significant delta between audio and video, larger than max allowed hole,
|
||||
// it might be because video fragment does not start with a keyframe.
|
||||
// let's try to load previous fragment again to get last keyframe
|
||||
// then we will reload again current fragment (that way we should be able to fill the buffer hole ...)
|
||||
if (deltaPTS && deltaPTS > config.maxBufferHole) {
|
||||
frag = fragments[curSNIdx-1];
|
||||
logger.warn(`SN just loaded, with large PTS gap between audio and video, maybe frag is not starting with a keyframe ? load previous one to try to overcome this`);
|
||||
// decrement previous frag load counter to avoid frag loop loading error when next fragment will get reloaded
|
||||
fragPrevious.loadCounter--;
|
||||
} else {
|
||||
frag = fragments[curSNIdx+1];
|
||||
logger.log(`SN just loaded, load next one: ${frag.sn}`);
|
||||
}
|
||||
} else {
|
||||
// have we reached end of VOD playlist ?
|
||||
if (!levelDetails.live) {
|
||||
|
@ -299,7 +312,7 @@ class StreamController extends EventHandler {
|
|||
this.state = State.ENDED;
|
||||
}
|
||||
}
|
||||
return;
|
||||
break;
|
||||
}
|
||||
}
|
||||
//logger.log(' loading frag ' + i +',pos/bufEnd:' + pos.toFixed(3) + '/' + bufferEnd.toFixed(3));
|
||||
|
@ -974,11 +987,15 @@ class StreamController extends EventHandler {
|
|||
}
|
||||
break;
|
||||
case ErrorDetails.BUFFER_FULL_ERROR:
|
||||
// trigger a smooth level switch to empty buffers
|
||||
// also reduce max buffer length as it might be too high. we do this to avoid loop flushing ...
|
||||
this.config.maxMaxBufferLength/=2;
|
||||
logger.warn(`reduce max buffer length to ${this.config.maxMaxBufferLength}s and trigger a nextLevelSwitch to flush old buffer and fix QuotaExceededError`);
|
||||
this.nextLevelSwitch();
|
||||
// only reduce max buf len if in appending state
|
||||
if (this.state === State.PARSING || this.state === State.PARSED) {
|
||||
// reduce max buffer length as it might be too high. we do this to avoid loop flushing ...
|
||||
this.config.maxMaxBufferLength/=2;
|
||||
logger.warn(`reduce max buffer length to ${this.config.maxMaxBufferLength}s and switch to IDLE state`);
|
||||
// increase fragment load Index to avoid frag loop loading error after buffer flush
|
||||
this.fragLoadIdx += 2 * this.config.fragLoadingLoopThreshold;
|
||||
this.state = State.IDLE;
|
||||
}
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
|
|
|
@ -71,6 +71,13 @@ class LevelHelper {
|
|||
fragments = details.fragments;
|
||||
frag = fragments[fragIdx];
|
||||
if(!isNaN(frag.startPTS)) {
|
||||
// delta PTS between audio and video
|
||||
let deltaPTS = Math.abs(frag.startPTS-startPTS);
|
||||
if (isNaN(frag.deltaPTS)) {
|
||||
frag.deltaPTS = deltaPTS;
|
||||
} else {
|
||||
frag.deltaPTS = Math.max(deltaPTS,frag.deltaPTS);
|
||||
}
|
||||
startPTS = Math.min(startPTS,frag.startPTS);
|
||||
endPTS = Math.max(endPTS, frag.endPTS);
|
||||
startDTS = Math.min(startDTS,frag.startDTS);
|
||||
|
|
|
@ -83,8 +83,10 @@ class Hls {
|
|||
timelineController: TimelineController,
|
||||
enableCEA708Captions: true,
|
||||
enableMP2TPassThrough : false,
|
||||
abrEwmaFast: 0,
|
||||
abrEwmaSlow: 0,
|
||||
abrEwmaFastLive: 5,
|
||||
abrEwmaSlowLive: 9,
|
||||
abrEwmaFastVoD: 4,
|
||||
abrEwmaSlowVoD: 15,
|
||||
abrBandWidthFactor : 0.8,
|
||||
abrBandWidthUpFactor : 0.7
|
||||
};
|
||||
|
|
|
@ -145,6 +145,12 @@ class MP4Remuxer {
|
|||
pts, dts, ptsnorm, dtsnorm,
|
||||
flags,
|
||||
samples = [];
|
||||
|
||||
// handle broken streams with PTS < DTS, tolerance up 200ms (18000 in 90kHz timescale)
|
||||
let PTSDTSshift = track.samples.reduce( (prev, curr) => Math.max(Math.min(prev,curr.pts-curr.dts),-18000),0);
|
||||
if (PTSDTSshift < 0) {
|
||||
logger.warn(`PTS < DTS detected in video samples, shifting DTS by ${Math.round(PTSDTSshift/90)} ms to overcome this issue`);
|
||||
}
|
||||
/* concatenate the video data and construct the mdat in place
|
||||
(need 8 more bytes to fill length and mpdat type) */
|
||||
mdat = new Uint8Array(track.len + (4 * track.nbNalu) + 8);
|
||||
|
@ -164,10 +170,11 @@ class MP4Remuxer {
|
|||
mp4SampleLength += 4 + unit.data.byteLength;
|
||||
}
|
||||
pts = avcSample.pts - this._initDTS;
|
||||
dts = avcSample.dts - this._initDTS;
|
||||
// ensure DTS is not bigger than PTS
|
||||
// shift dts by PTSDTSshift, to ensure that PTS >= DTS
|
||||
dts = avcSample.dts - this._initDTS + PTSDTSshift;
|
||||
// ensure DTS is not bigger than PTS // strap belt !!!
|
||||
dts = Math.min(pts,dts);
|
||||
//logger.log(`Video/PTS/DTS:${Math.round(pts/90)}/${Math.round(dts/90)}`);
|
||||
//logger.log(`Video/PTS/DTS/ptsnorm/DTSnorm:${Math.round(avcSample.pts/90)}/${Math.round(avcSample.dts/90)}/${Math.round(pts/90)}/${Math.round(dts/90)}`);
|
||||
// if not first AVC sample of video track, normalize PTS/DTS with previous sample value
|
||||
// and ensure that sample duration is positive
|
||||
if (lastDTS !== undefined) {
|
||||
|
@ -209,7 +216,7 @@ class MP4Remuxer {
|
|||
firstPTS = Math.max(0, ptsnorm);
|
||||
firstDTS = Math.max(0, dtsnorm);
|
||||
}
|
||||
//console.log('PTS/DTS/initDTS/normPTS/normDTS/relative PTS : ${avcSample.pts}/${avcSample.dts}/${this._initDTS}/${ptsnorm}/${dtsnorm}/${(avcSample.pts/4294967296).toFixed(3)}');
|
||||
//console.log(`PTS/DTS/initDTS/normPTS/normDTS/relative PTS : ${avcSample.pts}/${avcSample.dts}/${this._initDTS}/${ptsnorm}/${dtsnorm}/${(avcSample.pts/4294967296).toFixed(3)});
|
||||
mp4Sample = {
|
||||
size: mp4SampleLength,
|
||||
duration: 0,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue