1
0
Fork 0
mirror of https://github.com/jellyfin/jellyfin-web synced 2025-03-30 19:56:21 +00:00

update hls playback

This commit is contained in:
Luke Pulverenti 2016-04-20 14:51:47 -04:00
parent 7919706532
commit 396b125d66
27 changed files with 438 additions and 728 deletions

View file

@ -67,7 +67,7 @@ class BufferController extends EventHandler {
this.mediaSource = null;
this.media = null;
this.pendingTracks = null;
this.sourceBuffer = {};
this.sourceBuffer = null;
}
this.onmso = this.onmse = this.onmsc = null;
this.hls.trigger(Event.MEDIA_DETACHED);
@ -122,16 +122,18 @@ class BufferController extends EventHandler {
onBufferReset() {
var sourceBuffer = this.sourceBuffer;
for(var type in sourceBuffer) {
var sb = sourceBuffer[type];
try {
this.mediaSource.removeSourceBuffer(sb);
sb.removeEventListener('updateend', this.onsbue);
sb.removeEventListener('error', this.onsbe);
} catch(err) {
if (sourceBuffer) {
for(var type in sourceBuffer) {
var sb = sourceBuffer[type];
try {
this.mediaSource.removeSourceBuffer(sb);
sb.removeEventListener('updateend', this.onsbue);
sb.removeEventListener('error', this.onsbe);
} catch(err) {
}
}
this.sourceBuffer = null;
}
this.sourceBuffer = {};
this.flushRange = [];
this.appended = 0;
}
@ -144,24 +146,19 @@ class BufferController extends EventHandler {
return;
}
var sourceBuffer = this.sourceBuffer,mediaSource = this.mediaSource;
for (trackName in tracks) {
if(!sourceBuffer[trackName]) {
if (!this.sourceBuffer) {
var sourceBuffer = {}, mediaSource = this.mediaSource;
for (trackName in tracks) {
track = tracks[trackName];
// use levelCodec as first priority
codec = track.levelCodec || track.codec;
mimeType = `${track.container};codecs=${codec}`;
logger.log(`creating sourceBuffer with mimeType:${mimeType}`);
try {
sb = sourceBuffer[trackName] = mediaSource.addSourceBuffer(mimeType);
sb.addEventListener('updateend', this.onsbue);
sb.addEventListener('error', this.onsbe);
} catch(err) {
logger.error(`error while trying to add sourceBuffer:${err.message}`);
this.hls.trigger(Event.ERROR, {type: ErrorTypes.MEDIA_ERROR, details: ErrorDetails.BUFFER_ADD_CODEC_ERROR, fatal: false, err: err, mimeType : mimeType});
}
sb = sourceBuffer[trackName] = mediaSource.addSourceBuffer(mimeType);
sb.addEventListener('updateend', this.onsbue);
sb.addEventListener('error', this.onsbe);
}
this.sourceBuffer = sourceBuffer;
}
}
@ -226,8 +223,10 @@ class BufferController extends EventHandler {
// let's recompute this.appended, which is used to avoid flush looping
var appended = 0;
var sourceBuffer = this.sourceBuffer;
for (var type in sourceBuffer) {
appended += sourceBuffer[type].buffered.length;
if (sourceBuffer) {
for (var type in sourceBuffer) {
appended += sourceBuffer[type].buffered.length;
}
}
this.appended = appended;
this.hls.trigger(Event.BUFFER_FLUSHED);
@ -252,16 +251,9 @@ class BufferController extends EventHandler {
var segment = segments.shift();
try {
//logger.log(`appending ${segment.type} SB, size:${segment.data.length});
if(sourceBuffer[segment.type]) {
sourceBuffer[segment.type].appendBuffer(segment.data);
this.appendError = 0;
this.appended++;
} else {
// in case we don't have any source buffer matching with this segment type,
// it means that Mediasource fails to create sourcebuffer
// discard this segment, and trigger update end
this.onSBUpdateEnd();
}
sourceBuffer[segment.type].appendBuffer(segment.data);
this.appendError = 0;
this.appended++;
} catch(err) {
// in case any error occured while appending, put back segment in segments table
logger.error(`error while trying to append buffer:${err.message}`);

View file

@ -8,30 +8,20 @@ import EventHandler from '../event-handler';
class CapLevelController extends EventHandler {
constructor(hls) {
super(hls,
Event.FPS_DROP_LEVEL_CAPPING,
Event.MEDIA_ATTACHING,
Event.MANIFEST_PARSED);
}
destroy() {
if (this.hls.config.capLevelToPlayerSize) {
this.media = this.restrictedLevels = null;
this.media = null;
this.autoLevelCapping = Number.POSITIVE_INFINITY;
if (this.timer) {
this.timer = clearInterval(this.timer);
}
}
}
onFpsDropLevelCapping(data) {
if (!this.restrictedLevels) {
this.restrictedLevels = [];
}
if (!this.isLevelRestricted(data.droppedLevel)) {
this.restrictedLevels.push(data.droppedLevel);
}
}
onMediaAttaching(data) {
this.media = data.media instanceof HTMLVideoElement ? data.media : null;
}
@ -66,7 +56,7 @@ class CapLevelController extends EventHandler {
* returns level should be the one with the dimensions equal or greater than the media (player) dimensions (so the video will be downscaled)
*/
getMaxLevel(capLevelIndex) {
let result = 0,
let result,
i,
level,
mWidth = this.mediaWidth,
@ -76,9 +66,6 @@ class CapLevelController extends EventHandler {
for (i = 0; i <= capLevelIndex; i++) {
level = this.levels[i];
if (this.isLevelRestricted(i)) {
break;
}
result = i;
lWidth = level.width;
lHeight = level.height;
@ -89,10 +76,6 @@ class CapLevelController extends EventHandler {
return result;
}
isLevelRestricted(level) {
return (this.restrictedLevels && this.restrictedLevels.indexOf(level) !== -1) ? true : false;
}
get contentScaleFactor() {
let pixelRatio = 1;
try {

View file

@ -3,72 +3,46 @@
*/
import Event from '../events';
import EventHandler from '../event-handler';
import {logger} from '../utils/logger';
class FPSController extends EventHandler{
class FPSController {
constructor(hls) {
super(hls, Event.MEDIA_ATTACHING);
this.hls = hls;
this.timer = setInterval(this.checkFPS, hls.config.fpsDroppedMonitoringPeriod);
}
destroy() {
if (this.timer) {
clearInterval(this.timer);
clearInterval(this.timer);
}
this.isVideoPlaybackQualityAvailable = false;
}
onMediaAttaching(data) {
if (this.hls.config.capLevelOnFPSDrop) {
this.video = data.media instanceof HTMLVideoElement ? data.media : null;
if (typeof this.video.getVideoPlaybackQuality === 'function') {
this.isVideoPlaybackQualityAvailable = true;
}
clearInterval(this.timer);
this.timer = setInterval(this.checkFPSInterval.bind(this), this.hls.config.fpsDroppedMonitoringPeriod);
}
}
checkFPS(video, decodedFrames, droppedFrames) {
let currentTime = performance.now();
if (decodedFrames) {
if (this.lastTime) {
let currentPeriod = currentTime - this.lastTime,
currentDropped = droppedFrames - this.lastDroppedFrames,
currentDecoded = decodedFrames - this.lastDecodedFrames,
droppedFPS = 1000 * currentDropped / currentPeriod;
this.hls.trigger(Event.FPS_DROP, {currentDropped: currentDropped, currentDecoded: currentDecoded, totalDroppedFrames: droppedFrames});
if (droppedFPS > 0) {
//logger.log('checkFPS : droppedFPS/decodedFPS:' + droppedFPS/(1000 * currentDecoded / currentPeriod));
if (currentDropped > this.hls.config.fpsDroppedMonitoringThreshold * currentDecoded) {
let currentLevel = this.hls.currentLevel;
logger.warn('drop FPS ratio greater than max allowed value for currentLevel: ' + currentLevel);
if (currentLevel > 0 && (this.hls.autoLevelCapping === -1 || this.hls.autoLevelCapping >= currentLevel)) {
currentLevel = currentLevel - 1;
this.hls.trigger(Event.FPS_DROP_LEVEL_CAPPING, {level: currentLevel, droppedLevel: this.hls.currentLevel});
this.hls.autoLevelCapping = currentLevel;
this.hls.streamController.nextLevelSwitch();
checkFPS() {
var v = this.hls.video;
if (v) {
var decodedFrames = v.webkitDecodedFrameCount, droppedFrames = v.webkitDroppedFrameCount, currentTime = new Date();
if (decodedFrames) {
if (this.lastTime) {
var currentPeriod = currentTime - this.lastTime;
var currentDropped = droppedFrames - this.lastDroppedFrames;
var currentDecoded = decodedFrames - this.lastDecodedFrames;
var decodedFPS = 1000 * currentDecoded / currentPeriod;
var droppedFPS = 1000 * currentDropped / currentPeriod;
if (droppedFPS > 0) {
logger.log(`checkFPS : droppedFPS/decodedFPS:${droppedFPS.toFixed(1)}/${decodedFPS.toFixed(1)}`);
if (currentDropped > this.hls.config.fpsDroppedMonitoringThreshold * currentDecoded) {
logger.warn('drop FPS ratio greater than max allowed value');
this.hls.trigger(Event.FPS_DROP, {currentDropped: currentDropped, currentDecoded: currentDecoded, totalDroppedFrames: droppedFrames});
}
}
}
this.lastTime = currentTime;
this.lastDroppedFrames = droppedFrames;
this.lastDecodedFrames = decodedFrames;
}
this.lastTime = currentTime;
this.lastDroppedFrames = droppedFrames;
this.lastDecodedFrames = decodedFrames;
}
}
checkFPSInterval() {
if (this.video) {
if (this.isVideoPlaybackQualityAvailable) {
let videoPlaybackQuality = this.video.getVideoPlaybackQuality();
this.checkFPS(this.video, videoPlaybackQuality.totalVideoFrames, videoPlaybackQuality.droppedVideoFrames);
} else {
this.checkFPS(this.video, this.video.webkitDecodedFrameCount, this.video.webkitDroppedFrameCount);
}
}
}
}
export default FPSController;

View file

@ -234,15 +234,10 @@ class LevelController extends EventHandler {
onLevelLoaded(data) {
// check if current playlist is a live playlist
if (data.details.live) {
if (data.details.live && !this.timer) {
// if live playlist we will have to reload it periodically
// set reload period to average of the frag duration, if average not set then use playlist target duration
let timerInterval = data.details.averagetargetduration ? data.details.averagetargetduration : data.details.targetduration;
if (!this.timer || timerInterval !== this.timerInterval) {
clearInterval(this.timer);
this.timer = setInterval(this.ontick, 1000 * timerInterval);
this.timerInterval = timerInterval;
}
// set reload period to playlist target duration
this.timer = setInterval(this.ontick, 1000 * data.details.targetduration);
}
if (!data.details.live && this.timer) {
// playlist is not live and timer is armed : stopping it

View file

@ -218,7 +218,7 @@ class StreamController extends EventHandler {
// level 1 loaded [182580162,182580168] <============= here we should have bufferEnd > end. in that case break to avoid reloading 182580168
// level 1 loaded [182580164,182580171]
//
if (bufferEnd > end) {
if (levelDetails.PTSKnown && bufferEnd > end) {
break;
}
@ -486,13 +486,11 @@ class StreamController extends EventHandler {
fragCurrent.loader.abort();
}
this.fragCurrent = null;
// flush everything
this.hls.trigger(Event.BUFFER_FLUSHING, {startOffset: 0, endOffset: Number.POSITIVE_INFINITY});
this.state = State.PAUSED;
// increase fragment load Index to avoid frag loop loading error after buffer flush
this.fragLoadIdx += 2 * this.config.fragLoadingLoopThreshold;
// speed up switching, trigger timer function
this.tick();
this.state = State.PAUSED;
// flush everything
this.hls.trigger(Event.BUFFER_FLUSHING, {startOffset: 0, endOffset: Number.POSITIVE_INFINITY});
}
/*
@ -515,12 +513,14 @@ class StreamController extends EventHandler {
we should take into account new segment fetch time
*/
var fetchdelay, currentRange, nextRange;
// increase fragment load Index to avoid frag loop loading error after buffer flush
this.fragLoadIdx += 2 * this.config.fragLoadingLoopThreshold;
currentRange = this.getBufferRange(this.media.currentTime);
if (currentRange && currentRange.start > 1) {
// flush buffer preceding current fragment (flush until current fragment start offset)
// minus 1s to avoid video freezing, that could happen if we flush keyframe of current video ...
this.hls.trigger(Event.BUFFER_FLUSHING, {startOffset: 0, endOffset: currentRange.start - 1});
this.state = State.PAUSED;
this.hls.trigger(Event.BUFFER_FLUSHING, {startOffset: 0, endOffset: currentRange.start - 1});
}
if (!this.media.paused) {
// add a safety delay of 1s
@ -540,17 +540,15 @@ class StreamController extends EventHandler {
// we can flush buffer range following this one without stalling playback
nextRange = this.followingBufferRange(nextRange);
if (nextRange) {
// flush position is the start position of this new buffer
this.hls.trigger(Event.BUFFER_FLUSHING, {startOffset: nextRange.start, endOffset: Number.POSITIVE_INFINITY});
this.state = State.PAUSED;
// if we are here, we can also cancel any loading/demuxing in progress, as they are useless
var fragCurrent = this.fragCurrent;
if (fragCurrent && fragCurrent.loader) {
fragCurrent.loader.abort();
}
this.fragCurrent = null;
// increase fragment load Index to avoid frag loop loading error after buffer flush
this.fragLoadIdx += 2 * this.config.fragLoadingLoopThreshold;
// flush position is the start position of this new buffer
this.state = State.PAUSED;
this.hls.trigger(Event.BUFFER_FLUSHING, {startOffset: nextRange.start, endOffset: Number.POSITIVE_INFINITY});
}
}
}
@ -1047,8 +1045,9 @@ _checkBuffer() {
// next buffer is close ! adjust currentTime to nextBufferStart
// this will ensure effective video decoding
logger.log(`adjust currentTime from ${media.currentTime} to next buffered @ ${nextBufferStart} + nudge ${this.seekHoleNudgeDuration}`);
let hole = nextBufferStart + this.seekHoleNudgeDuration - media.currentTime;
media.currentTime = nextBufferStart + this.seekHoleNudgeDuration;
this.hls.trigger(Event.ERROR, {type: ErrorTypes.MEDIA_ERROR, details: ErrorDetails.BUFFER_SEEK_OVER_HOLE, fatal: false});
this.hls.trigger(Event.ERROR, {type: ErrorTypes.MEDIA_ERROR, details: ErrorDetails.BUFFER_SEEK_OVER_HOLE, fatal: false, hole : hole});
}
}
} else {

View file

@ -36,8 +36,6 @@ export const ErrorDetails = {
KEY_LOAD_ERROR: 'keyLoadError',
// Identifier for decrypt key load timeout error - data: { frag : fragment object}
KEY_LOAD_TIMEOUT: 'keyLoadTimeOut',
// Triggered when an exception occurs while adding a sourceBuffer to MediaSource - data : { err : exception , mimeType : mimeType }
BUFFER_ADD_CODEC_ERROR: 'bufferAddCodecError',
// Identifier for a buffer append error - data: append error description
BUFFER_APPEND_ERROR: 'bufferAppendError',
// Identifier for a buffer appending error event - data: appending error description

View file

@ -59,10 +59,8 @@ module.exports = {
FRAG_BUFFERED: 'hlsFragBuffered',
// fired when fragment matching with current media position is changing - data : { frag : fragment object }
FRAG_CHANGED: 'hlsFragChanged',
// Identifier for a FPS drop event - data: {curentDropped, currentDecoded, totalDroppedFrames}
// Identifier for a FPS drop event - data: {curentDropped, currentDecoded, totalDroppedFrames}
FPS_DROP: 'hlsFpsDrop',
//triggered when FPS drop triggers auto level capping - data: {level, droppedlevel}
FPS_DROP_LEVEL_CAPPING: 'hlsFpsDropLevelCapping',
// Identifier for an error event - data: { type : error type, details : error details, fatal : if true, hls.js cannot/will not try to recover, if false, hls.js will try to recover,other error specific data}
ERROR: 'hlsError',
// fired when hls.js instance starts destroying. Different from MEDIA_DETACHED as one could want to detach and reattach a media to the instance of hls.js to handle mid-rolls for example

View file

@ -13,7 +13,7 @@ import CapLevelController from './controller/cap-level-controller';
import StreamController from './controller/stream-controller';
import LevelController from './controller/level-controller';
import TimelineController from './controller/timeline-controller';
import FPSController from './controller/fps-controller';
//import FPSController from './controller/fps-controller';
import {logger, enableLogs} from './utils/logger';
import XhrLoader from './utils/xhr-loader';
import EventEmitter from 'events';
@ -21,11 +21,6 @@ import KeyLoader from './loader/key-loader';
class Hls {
static get version() {
// replaced with browserify-versionify transform
return '__VERSION__';
}
static isSupported() {
return (window.MediaSource && window.MediaSource.isTypeSupported('video/mp4; codecs="avc1.42E01E,mp4a.40.2"'));
}
@ -47,7 +42,6 @@ class Hls {
Hls.defaultConfig = {
autoStartLoad: true,
debug: false,
capLevelOnFPSDrop: false,
capLevelToPlayerSize: false,
maxBufferLength: 30,
maxBufferSize: 60 * 1000 * 1000,
@ -73,8 +67,8 @@ class Hls {
fragLoadingRetryDelay: 1000,
fragLoadingLoopThreshold: 3,
startFragPrefetch : false,
fpsDroppedMonitoringPeriod: 5000,
fpsDroppedMonitoringThreshold: 0.2,
// fpsDroppedMonitoringPeriod: 5000,
// fpsDroppedMonitoringThreshold: 0.2,
appendErrorMaxRetry: 3,
loader: XhrLoader,
fLoader: undefined,
@ -82,7 +76,6 @@ class Hls {
abrController : AbrController,
bufferController : BufferController,
capLevelController : CapLevelController,
fpsController: FPSController,
streamController: StreamController,
timelineController: TimelineController,
enableCEA708Captions: true,
@ -136,10 +129,10 @@ class Hls {
this.abrController = new config.abrController(this);
this.bufferController = new config.bufferController(this);
this.capLevelController = new config.capLevelController(this);
this.fpsController = new config.fpsController(this);
this.streamController = new config.streamController(this);
this.timelineController = new config.timelineController(this);
this.keyLoader = new KeyLoader(this);
//this.fpsController = new FPSController(this);
}
destroy() {
@ -152,10 +145,10 @@ class Hls {
this.abrController.destroy();
this.bufferController.destroy();
this.capLevelController.destroy();
this.fpsController.destroy();
this.streamController.destroy();
this.timelineController.destroy();
this.keyLoader.destroy();
this.keyLoader.destroy();
//this.fpsController.destroy();
this.url = null;
this.observer.removeAllListeners();
}

View file

@ -51,7 +51,7 @@ class FragmentLoader extends EventHandler {
this.hls.trigger(Event.ERROR, {type: ErrorTypes.NETWORK_ERROR, details: ErrorDetails.FRAG_LOAD_TIMEOUT, fatal: false, frag: this.frag});
}
loadprogress(stats) {
loadprogress(event, stats) {
this.frag.loaded = stats.loaded;
this.hls.trigger(Event.FRAG_LOAD_PROGRESS, {frag: this.frag, stats: stats});
}

View file

@ -211,7 +211,6 @@ class PlaylistLoader extends EventHandler {
totalduration-=frag.duration;
}
level.totalduration = totalduration;
level.averagetargetduration = totalduration / level.fragments.length;
level.endSN = currentSN - 1;
return level;
}
@ -225,8 +224,7 @@ class PlaylistLoader extends EventHandler {
hls = this.hls,
levels;
// responseURL not supported on some browsers (it is used to detect URL redirection)
// data-uri mode also not supported (but no need to detect redirection)
if (url === undefined || url.indexOf('data:') === 0) {
if (url === undefined) {
// fallback to initial URL
url = this.url;
}

View file

@ -132,164 +132,135 @@ class MP4Remuxer {
}
remuxVideo(track, timeOffset, contiguous) {
var offset = 8,
var view,
offset = 8,
pesTimeScale = this.PES_TIMESCALE,
pes2mp4ScaleFactor = this.PES2MP4SCALEFACTOR,
mp4SampleDuration,
avcSample,
mp4Sample,
mp4SampleLength,
unit,
mdat, moof,
firstPTS, firstDTS,
nextDTS,
lastPTS, lastDTS,
inputSamples = track.samples,
outputSamples = [];
// PTS is coded on 33bits, and can loop from -2^32 to 2^32
// PTSNormalize will make PTS/DTS value monotonic, we use last known DTS value as reference value
let nextAvcDts;
if (contiguous) {
// if parsed fragment is contiguous with last one, let's use last DTS value as reference
nextAvcDts = this.nextAvcDts;
} else {
// if not contiguous, let's use target timeOffset
nextAvcDts = timeOffset*pesTimeScale;
}
// compute first DTS and last DTS, normalize them against reference value
let sample = inputSamples[0];
firstDTS = Math.max(this._PTSNormalize(sample.dts,nextAvcDts) - this._initDTS,0);
firstPTS = Math.max(this._PTSNormalize(sample.pts,nextAvcDts) - this._initDTS,0);
// check timestamp continuity accross consecutive fragments (this is to remove inter-fragment gap/hole)
let delta = Math.round((firstDTS - nextAvcDts) / 90);
// if fragment are contiguous, or if there is a huge delta (more than 10s) between expected PTS and sample PTS
if (contiguous || Math.abs(delta) > 10000) {
if (delta) {
if (delta > 1) {
logger.log(`AVC:${delta} ms hole between fragments detected,filling it`);
} else if (delta < -1) {
logger.log(`AVC:${(-delta)} ms overlapping between fragments detected`);
}
// remove hole/gap : set DTS to next expected DTS
firstDTS = nextAvcDts;
inputSamples[0].dts = firstDTS + this._initDTS;
// offset PTS as well, ensure that PTS is smaller or equal than new DTS
firstPTS = Math.max(firstPTS - delta, nextAvcDts);
inputSamples[0].pts = firstPTS + this._initDTS;
logger.log(`Video/PTS/DTS adjusted: ${firstPTS}/${firstDTS},delta:${delta}`);
}
}
nextDTS = firstDTS;
// compute lastPTS/lastDTS
sample = inputSamples[inputSamples.length-1];
lastDTS = Math.max(this._PTSNormalize(sample.dts,nextAvcDts) - this._initDTS,0);
lastPTS = Math.max(this._PTSNormalize(sample.pts,nextAvcDts) - this._initDTS,0);
lastPTS = Math.max(lastPTS, lastDTS);
let vendor = navigator.vendor, userAgent = navigator.userAgent,
isSafari = vendor && vendor.indexOf('Apple') > -1 && userAgent && !userAgent.match('CriOS');
// on Safari let's signal the same sample duration for all samples
// sample duration (as expected by trun MP4 boxes), should be the delta between sample DTS
// set this constant duration as being the avg delta between consecutive DTS.
if (isSafari) {
mp4SampleDuration = Math.round((lastDTS-firstDTS)/(pes2mp4ScaleFactor*(inputSamples.length-1)));
}
// normalize all PTS/DTS now ...
for (let i = 0; i < inputSamples.length; i++) {
let sample = inputSamples[i];
if (isSafari) {
// sample DTS is computed using a constant decoding offset (mp4SampleDuration) between samples
sample.dts = firstDTS + i*pes2mp4ScaleFactor*mp4SampleDuration;
} else {
// ensure sample monotonic DTS
sample.dts = Math.max(this._PTSNormalize(sample.dts, nextAvcDts) - this._initDTS,firstDTS);
// ensure dts is a multiple of scale factor to avoid rounding issues
sample.dts = Math.round(sample.dts/pes2mp4ScaleFactor)*pes2mp4ScaleFactor;
}
// we normalize PTS against nextAvcDts, we also substract initDTS (some streams don't start @ PTS O)
// and we ensure that computed value is greater or equal than sample DTS
sample.pts = Math.max(this._PTSNormalize(sample.pts,nextAvcDts) - this._initDTS, sample.dts);
// ensure pts is a multiple of scale factor to avoid rounding issues
sample.pts = Math.round(sample.pts/pes2mp4ScaleFactor)*pes2mp4ScaleFactor;
}
firstPTS, firstDTS, lastDTS,
pts, dts, ptsnorm, dtsnorm,
flags,
samples = [];
/* concatenate the video data and construct the mdat in place
(need 8 more bytes to fill length and mpdat type) */
mdat = new Uint8Array(track.len + (4 * track.nbNalu) + 8);
let view = new DataView(mdat.buffer);
view = new DataView(mdat.buffer);
view.setUint32(0, mdat.byteLength);
mdat.set(MP4.types.mdat, 4);
for (let i = 0; i < inputSamples.length; i++) {
let avcSample = inputSamples[i],
mp4SampleLength = 0,
compositionTimeOffset;
while (track.samples.length) {
avcSample = track.samples.shift();
mp4SampleLength = 0;
// convert NALU bitstream to MP4 format (prepend NALU with size field)
while (avcSample.units.units.length) {
let unit = avcSample.units.units.shift();
unit = avcSample.units.units.shift();
view.setUint32(offset, unit.data.byteLength);
offset += 4;
mdat.set(unit.data, offset);
offset += unit.data.byteLength;
mp4SampleLength += 4 + unit.data.byteLength;
}
if(!isSafari) {
// expected sample duration is the Decoding Timestamp diff of consecutive samples
if (i < inputSamples.length - 1) {
mp4SampleDuration = inputSamples[i+1].dts - avcSample.dts;
} else {
// last sample duration is same than previous one
mp4SampleDuration = avcSample.dts - inputSamples[i-1].dts;
pts = avcSample.pts - this._initDTS;
dts = avcSample.dts - this._initDTS;
// ensure DTS is not bigger than PTS
dts = Math.min(pts,dts);
//logger.log(`Video/PTS/DTS:${Math.round(pts/90)}/${Math.round(dts/90)}`);
// if not first AVC sample of video track, normalize PTS/DTS with previous sample value
// and ensure that sample duration is positive
if (lastDTS !== undefined) {
ptsnorm = this._PTSNormalize(pts, lastDTS);
dtsnorm = this._PTSNormalize(dts, lastDTS);
var sampleDuration = (dtsnorm - lastDTS) / pes2mp4ScaleFactor;
if (sampleDuration <= 0) {
logger.log(`invalid sample duration at PTS/DTS: ${avcSample.pts}/${avcSample.dts}:${sampleDuration}`);
sampleDuration = 1;
}
mp4SampleDuration /= pes2mp4ScaleFactor;
compositionTimeOffset = Math.round((avcSample.pts - avcSample.dts) / pes2mp4ScaleFactor);
mp4Sample.duration = sampleDuration;
} else {
compositionTimeOffset = Math.max(0,mp4SampleDuration*Math.round((avcSample.pts - avcSample.dts)/(pes2mp4ScaleFactor*mp4SampleDuration)));
let nextAvcDts, delta;
if (contiguous) {
nextAvcDts = this.nextAvcDts;
} else {
nextAvcDts = timeOffset*pesTimeScale;
}
// first AVC sample of video track, normalize PTS/DTS
ptsnorm = this._PTSNormalize(pts, nextAvcDts);
dtsnorm = this._PTSNormalize(dts, nextAvcDts);
delta = Math.round((dtsnorm - nextAvcDts) / 90);
// if fragment are contiguous, detect hole/overlapping between fragments
if (contiguous) {
if (delta) {
if (delta > 1) {
logger.log(`AVC:${delta} ms hole between fragments detected,filling it`);
} else if (delta < -1) {
logger.log(`AVC:${(-delta)} ms overlapping between fragments detected`);
}
// set DTS to next DTS
dtsnorm = nextAvcDts;
// offset PTS as well, ensure that PTS is smaller or equal than new DTS
ptsnorm = Math.max(ptsnorm - delta, dtsnorm);
logger.log(`Video/PTS/DTS adjusted: ${ptsnorm}/${dtsnorm},delta:${delta}`);
}
}
// remember first PTS of our avcSamples, ensure value is positive
firstPTS = Math.max(0, ptsnorm);
firstDTS = Math.max(0, dtsnorm);
}
//console.log('PTS/DTS/initDTS/normPTS/normDTS/relative PTS : ${avcSample.pts}/${avcSample.dts}/${this._initDTS}/${ptsnorm}/${dtsnorm}/${(avcSample.pts/4294967296).toFixed(3)}');
outputSamples.push({
mp4Sample = {
size: mp4SampleLength,
// constant duration
duration: mp4SampleDuration,
cts: compositionTimeOffset,
duration: 0,
cts: (ptsnorm - dtsnorm) / pes2mp4ScaleFactor,
flags: {
isLeading: 0,
isDependedOn: 0,
hasRedundancy: 0,
degradPrio: 0,
dependsOn : avcSample.key ? 2 : 1,
isNonSync : avcSample.key ? 0 : 1
degradPrio: 0
}
});
};
flags = mp4Sample.flags;
if (avcSample.key === true) {
// the current sample is a key frame
flags.dependsOn = 2;
flags.isNonSync = 0;
} else {
flags.dependsOn = 1;
flags.isNonSync = 1;
}
samples.push(mp4Sample);
lastDTS = dtsnorm;
}
// next AVC sample DTS should be equal to last sample DTS + last sample duration (in PES timescale)
this.nextAvcDts = lastDTS + mp4SampleDuration*pes2mp4ScaleFactor;
var lastSampleDuration = 0;
if (samples.length >= 2) {
lastSampleDuration = samples[samples.length - 2].duration;
mp4Sample.duration = lastSampleDuration;
}
// next AVC sample DTS should be equal to last sample DTS + last sample duration
this.nextAvcDts = dtsnorm + lastSampleDuration * pes2mp4ScaleFactor;
track.len = 0;
track.nbNalu = 0;
if(outputSamples.length && navigator.userAgent.toLowerCase().indexOf('chrome') > -1) {
let flags = outputSamples[0].flags;
if(samples.length && navigator.userAgent.toLowerCase().indexOf('chrome') > -1) {
flags = samples[0].flags;
// chrome workaround, mark first sample as being a Random Access Point to avoid sourcebuffer append issue
// https://code.google.com/p/chromium/issues/detail?id=229412
flags.dependsOn = 2;
flags.isNonSync = 0;
}
track.samples = outputSamples;
track.samples = samples;
moof = MP4.moof(track.sequenceNumber++, firstDTS / pes2mp4ScaleFactor, track);
track.samples = [];
this.observer.trigger(Event.FRAG_PARSING_DATA, {
data1: moof,
data2: mdat,
startPTS: firstPTS / pesTimeScale,
endPTS: (lastPTS + pes2mp4ScaleFactor * mp4SampleDuration) / pesTimeScale,
endPTS: (ptsnorm + pes2mp4ScaleFactor * lastSampleDuration) / pesTimeScale,
startDTS: firstDTS / pesTimeScale,
endDTS: this.nextAvcDts / pesTimeScale,
type: 'video',
nb: outputSamples.length
nb: samples.length
});
}
@ -328,7 +299,7 @@ class MP4Remuxer {
mp4Sample.duration = (dtsnorm - lastDTS) / pes2mp4ScaleFactor;
if(Math.abs(mp4Sample.duration - expectedSampleDuration) > expectedSampleDuration/10) {
// more than 10% diff between sample duration and expectedSampleDuration .... lets log that
logger.trace(`invalid AAC sample duration at PTS ${Math.round(pts/90)},should be 1024,found :${Math.round(mp4Sample.duration*track.audiosamplerate/track.timescale)}`);
logger.log(`invalid AAC sample duration at PTS ${Math.round(pts/90)},should be 1024,found :${Math.round(mp4Sample.duration*track.audiosamplerate/track.timescale)}`);
}
// always adjust sample duration to avoid av sync issue
mp4Sample.duration = expectedSampleDuration;
@ -343,20 +314,20 @@ class MP4Remuxer {
ptsnorm = this._PTSNormalize(pts, nextAacPts);
dtsnorm = this._PTSNormalize(dts, nextAacPts);
delta = Math.round(1000 * (ptsnorm - nextAacPts) / pesTimeScale);
// if fragment are contiguous, or if there is a huge delta (more than 10s) between expected PTS and sample PTS
if (contiguous || Math.abs(delta) > 10000) {
// if fragment are contiguous, detect hole/overlapping between fragments
if (contiguous) {
// log delta
if (delta) {
if (delta > 0) {
logger.log(`${delta} ms hole between AAC samples detected,filling it`);
// if we have frame overlap, overlapping for more than half a frame duraion
// if we have frame overlap, overlapping for more than half a frame duration
} else if (delta < -12) {
// drop overlapping audio frames... browser will deal with it
logger.log(`${(-delta)} ms overlapping between AAC samples detected, drop frame`);
track.len -= unit.byteLength;
continue;
}
// set PTS/DTS to expected PTS/DTS
// set PTS/DTS to next PTS/DTS
ptsnorm = dtsnorm = nextAacPts;
}
}

View file

@ -112,11 +112,8 @@ class XhrLoader {
stats.tfirst = performance.now();
}
stats.loaded = event.loaded;
if (event.lengthComputable) {
stats.total = event.total;
}
if (this.onProgress) {
this.onProgress(stats);
this.onProgress(event, stats);
}
}
}