1
0
Fork 0
mirror of https://github.com/jellyfin/jellyfin-web synced 2025-03-30 19:56:21 +00:00

update components

This commit is contained in:
Luke Pulverenti 2016-04-16 12:51:35 -04:00
parent 3e61abaf71
commit 73b495f48c
62 changed files with 875 additions and 512 deletions

View file

@ -67,7 +67,7 @@ class BufferController extends EventHandler {
this.mediaSource = null;
this.media = null;
this.pendingTracks = null;
this.sourceBuffer = null;
this.sourceBuffer = {};
}
this.onmso = this.onmse = this.onmsc = null;
this.hls.trigger(Event.MEDIA_DETACHED);
@ -122,18 +122,16 @@ class BufferController extends EventHandler {
onBufferReset() {
var sourceBuffer = this.sourceBuffer;
if (sourceBuffer) {
for(var type in sourceBuffer) {
var sb = sourceBuffer[type];
try {
this.mediaSource.removeSourceBuffer(sb);
sb.removeEventListener('updateend', this.onsbue);
sb.removeEventListener('error', this.onsbe);
} catch(err) {
}
for(var type in sourceBuffer) {
var sb = sourceBuffer[type];
try {
this.mediaSource.removeSourceBuffer(sb);
sb.removeEventListener('updateend', this.onsbue);
sb.removeEventListener('error', this.onsbe);
} catch(err) {
}
this.sourceBuffer = null;
}
this.sourceBuffer = {};
this.flushRange = [];
this.appended = 0;
}
@ -146,19 +144,24 @@ class BufferController extends EventHandler {
return;
}
if (!this.sourceBuffer) {
var sourceBuffer = {}, mediaSource = this.mediaSource;
for (trackName in tracks) {
var sourceBuffer = this.sourceBuffer,mediaSource = this.mediaSource;
for (trackName in tracks) {
if(!sourceBuffer[trackName]) {
track = tracks[trackName];
// use levelCodec as first priority
codec = track.levelCodec || track.codec;
mimeType = `${track.container};codecs=${codec}`;
logger.log(`creating sourceBuffer with mimeType:${mimeType}`);
sb = sourceBuffer[trackName] = mediaSource.addSourceBuffer(mimeType);
sb.addEventListener('updateend', this.onsbue);
sb.addEventListener('error', this.onsbe);
try {
sb = sourceBuffer[trackName] = mediaSource.addSourceBuffer(mimeType);
sb.addEventListener('updateend', this.onsbue);
sb.addEventListener('error', this.onsbe);
} catch(err) {
logger.error(`error while trying to add sourceBuffer:${err.message}`);
this.hls.trigger(Event.ERROR, {type: ErrorTypes.MEDIA_ERROR, details: ErrorDetails.BUFFER_ADD_CODEC_ERROR, fatal: false, err: err, mimeType : mimeType});
}
}
this.sourceBuffer = sourceBuffer;
}
}
@ -223,10 +226,8 @@ class BufferController extends EventHandler {
// let's recompute this.appended, which is used to avoid flush looping
var appended = 0;
var sourceBuffer = this.sourceBuffer;
if (sourceBuffer) {
for (var type in sourceBuffer) {
appended += sourceBuffer[type].buffered.length;
}
for (var type in sourceBuffer) {
appended += sourceBuffer[type].buffered.length;
}
this.appended = appended;
this.hls.trigger(Event.BUFFER_FLUSHED);
@ -251,9 +252,16 @@ class BufferController extends EventHandler {
var segment = segments.shift();
try {
//logger.log(`appending ${segment.type} SB, size:${segment.data.length});
sourceBuffer[segment.type].appendBuffer(segment.data);
this.appendError = 0;
this.appended++;
if(sourceBuffer[segment.type]) {
sourceBuffer[segment.type].appendBuffer(segment.data);
this.appendError = 0;
this.appended++;
} else {
// in case we don't have any source buffer matching with this segment type,
// it means that Mediasource fails to create sourcebuffer
// discard this segment, and trigger update end
this.onSBUpdateEnd();
}
} catch(err) {
// in case any error occured while appending, put back segment in segments table
logger.error(`error while trying to append buffer:${err.message}`);

View file

@ -8,20 +8,30 @@ import EventHandler from '../event-handler';
class CapLevelController extends EventHandler {
constructor(hls) {
super(hls,
Event.FPS_DROP_LEVEL_CAPPING,
Event.MEDIA_ATTACHING,
Event.MANIFEST_PARSED);
}
destroy() {
if (this.hls.config.capLevelToPlayerSize) {
this.media = null;
this.media = this.restrictedLevels = null;
this.autoLevelCapping = Number.POSITIVE_INFINITY;
if (this.timer) {
this.timer = clearInterval(this.timer);
}
}
}
onFpsDropLevelCapping(data) {
if (!this.restrictedLevels) {
this.restrictedLevels = [];
}
if (!this.isLevelRestricted(data.droppedLevel)) {
this.restrictedLevels.push(data.droppedLevel);
}
}
onMediaAttaching(data) {
this.media = data.media instanceof HTMLVideoElement ? data.media : null;
}
@ -56,7 +66,7 @@ class CapLevelController extends EventHandler {
* returns level should be the one with the dimensions equal or greater than the media (player) dimensions (so the video will be downscaled)
*/
getMaxLevel(capLevelIndex) {
let result,
let result = 0,
i,
level,
mWidth = this.mediaWidth,
@ -66,6 +76,9 @@ class CapLevelController extends EventHandler {
for (i = 0; i <= capLevelIndex; i++) {
level = this.levels[i];
if (this.isLevelRestricted(i)) {
break;
}
result = i;
lWidth = level.width;
lHeight = level.height;
@ -76,6 +89,10 @@ class CapLevelController extends EventHandler {
return result;
}
isLevelRestricted(level) {
return (this.restrictedLevels && this.restrictedLevels.indexOf(level) !== -1) ? true : false;
}
get contentScaleFactor() {
let pixelRatio = 1;
try {

View file

@ -3,46 +3,72 @@
*/
import Event from '../events';
import EventHandler from '../event-handler';
import {logger} from '../utils/logger';
class FPSController {
class FPSController extends EventHandler{
constructor(hls) {
this.hls = hls;
this.timer = setInterval(this.checkFPS, hls.config.fpsDroppedMonitoringPeriod);
super(hls, Event.MEDIA_ATTACHING);
}
destroy() {
if (this.timer) {
clearInterval(this.timer);
clearInterval(this.timer);
}
this.isVideoPlaybackQualityAvailable = false;
}
checkFPS() {
var v = this.hls.video;
if (v) {
var decodedFrames = v.webkitDecodedFrameCount, droppedFrames = v.webkitDroppedFrameCount, currentTime = new Date();
if (decodedFrames) {
if (this.lastTime) {
var currentPeriod = currentTime - this.lastTime;
var currentDropped = droppedFrames - this.lastDroppedFrames;
var currentDecoded = decodedFrames - this.lastDecodedFrames;
var decodedFPS = 1000 * currentDecoded / currentPeriod;
var droppedFPS = 1000 * currentDropped / currentPeriod;
if (droppedFPS > 0) {
logger.log(`checkFPS : droppedFPS/decodedFPS:${droppedFPS.toFixed(1)}/${decodedFPS.toFixed(1)}`);
if (currentDropped > this.hls.config.fpsDroppedMonitoringThreshold * currentDecoded) {
logger.warn('drop FPS ratio greater than max allowed value');
this.hls.trigger(Event.FPS_DROP, {currentDropped: currentDropped, currentDecoded: currentDecoded, totalDroppedFrames: droppedFrames});
onMediaAttaching(data) {
if (this.hls.config.capLevelOnFPSDrop) {
this.video = data.media instanceof HTMLVideoElement ? data.media : null;
if (typeof this.video.getVideoPlaybackQuality === 'function') {
this.isVideoPlaybackQualityAvailable = true;
}
clearInterval(this.timer);
this.timer = setInterval(this.checkFPSInterval.bind(this), this.hls.config.fpsDroppedMonitoringPeriod);
}
}
checkFPS(video, decodedFrames, droppedFrames) {
let currentTime = performance.now();
if (decodedFrames) {
if (this.lastTime) {
let currentPeriod = currentTime - this.lastTime,
currentDropped = droppedFrames - this.lastDroppedFrames,
currentDecoded = decodedFrames - this.lastDecodedFrames,
droppedFPS = 1000 * currentDropped / currentPeriod;
this.hls.trigger(Event.FPS_DROP, {currentDropped: currentDropped, currentDecoded: currentDecoded, totalDroppedFrames: droppedFrames});
if (droppedFPS > 0) {
//logger.log('checkFPS : droppedFPS/decodedFPS:' + droppedFPS/(1000 * currentDecoded / currentPeriod));
if (currentDropped > this.hls.config.fpsDroppedMonitoringThreshold * currentDecoded) {
let currentLevel = this.hls.currentLevel;
logger.warn('drop FPS ratio greater than max allowed value for currentLevel: ' + currentLevel);
if (currentLevel > 0 && (this.hls.autoLevelCapping === -1 || this.hls.autoLevelCapping >= currentLevel)) {
currentLevel = currentLevel - 1;
this.hls.trigger(Event.FPS_DROP_LEVEL_CAPPING, {level: currentLevel, droppedLevel: this.hls.currentLevel});
this.hls.autoLevelCapping = currentLevel;
this.hls.streamController.nextLevelSwitch();
}
}
}
this.lastTime = currentTime;
this.lastDroppedFrames = droppedFrames;
this.lastDecodedFrames = decodedFrames;
}
this.lastTime = currentTime;
this.lastDroppedFrames = droppedFrames;
this.lastDecodedFrames = decodedFrames;
}
}
checkFPSInterval() {
if (this.video) {
if (this.isVideoPlaybackQualityAvailable) {
let videoPlaybackQuality = this.video.getVideoPlaybackQuality();
this.checkFPS(this.video, videoPlaybackQuality.totalVideoFrames, videoPlaybackQuality.droppedVideoFrames);
} else {
this.checkFPS(this.video, this.video.webkitDecodedFrameCount, this.video.webkitDroppedFrameCount);
}
}
}
}
export default FPSController;

View file

@ -234,10 +234,15 @@ class LevelController extends EventHandler {
onLevelLoaded(data) {
// check if current playlist is a live playlist
if (data.details.live && !this.timer) {
if (data.details.live) {
// if live playlist we will have to reload it periodically
// set reload period to playlist target duration
this.timer = setInterval(this.ontick, 1000 * data.details.targetduration);
// set reload period to average of the frag duration, if average not set then use playlist target duration
let timerInterval = data.details.averagetargetduration ? data.details.averagetargetduration : data.details.targetduration;
if (!this.timer || timerInterval !== this.timerInterval) {
clearInterval(this.timer);
this.timer = setInterval(this.ontick, 1000 * timerInterval);
this.timerInterval = timerInterval;
}
}
if (!data.details.live && this.timer) {
// playlist is not live and timer is armed : stopping it

View file

@ -36,6 +36,8 @@ export const ErrorDetails = {
KEY_LOAD_ERROR: 'keyLoadError',
// Identifier for decrypt key load timeout error - data: { frag : fragment object}
KEY_LOAD_TIMEOUT: 'keyLoadTimeOut',
// Triggered when an exception occurs while adding a sourceBuffer to MediaSource - data : { err : exception , mimeType : mimeType }
BUFFER_ADD_CODEC_ERROR: 'bufferAddCodecError',
// Identifier for a buffer append error - data: append error description
BUFFER_APPEND_ERROR: 'bufferAppendError',
// Identifier for a buffer appending error event - data: appending error description

View file

@ -59,8 +59,10 @@ module.exports = {
FRAG_BUFFERED: 'hlsFragBuffered',
// fired when fragment matching with current media position is changing - data : { frag : fragment object }
FRAG_CHANGED: 'hlsFragChanged',
// Identifier for a FPS drop event - data: {curentDropped, currentDecoded, totalDroppedFrames}
// Identifier for a FPS drop event - data: {curentDropped, currentDecoded, totalDroppedFrames}
FPS_DROP: 'hlsFpsDrop',
//triggered when FPS drop triggers auto level capping - data: {level, droppedlevel}
FPS_DROP_LEVEL_CAPPING: 'hlsFpsDropLevelCapping',
// Identifier for an error event - data: { type : error type, details : error details, fatal : if true, hls.js cannot/will not try to recover, if false, hls.js will try to recover,other error specific data}
ERROR: 'hlsError',
// fired when hls.js instance starts destroying. Different from MEDIA_DETACHED as one could want to detach and reattach a media to the instance of hls.js to handle mid-rolls for example

View file

@ -45,8 +45,9 @@ class LevelHelper {
LevelHelper.updateFragPTS(newDetails,PTSFrag.sn,PTSFrag.startPTS,PTSFrag.endPTS);
} else {
// ensure that delta is within oldfragments range
// no need to offset start if delta === 0
if (delta > 0 && delta < oldfragments.length) {
// also adjust sliding in case delta is 0 (we could have old=[50-60] and new=old=[50-61])
// in that case we also need to adjust start offset of all fragments
if (delta >= 0 && delta < oldfragments.length) {
// adjust start by sliding offset
var sliding = oldfragments[delta].start;
for(i = 0 ; i < newfragments.length ; i++) {

View file

@ -13,7 +13,7 @@ import CapLevelController from './controller/cap-level-controller';
import StreamController from './controller/stream-controller';
import LevelController from './controller/level-controller';
import TimelineController from './controller/timeline-controller';
//import FPSController from './controller/fps-controller';
import FPSController from './controller/fps-controller';
import {logger, enableLogs} from './utils/logger';
import XhrLoader from './utils/xhr-loader';
import EventEmitter from 'events';
@ -21,6 +21,11 @@ import KeyLoader from './loader/key-loader';
class Hls {
static get version() {
// replaced with browserify-versionify transform
return '__VERSION__';
}
static isSupported() {
return (window.MediaSource && window.MediaSource.isTypeSupported('video/mp4; codecs="avc1.42E01E,mp4a.40.2"'));
}
@ -42,6 +47,7 @@ class Hls {
Hls.defaultConfig = {
autoStartLoad: true,
debug: false,
capLevelOnFPSDrop: false,
capLevelToPlayerSize: false,
maxBufferLength: 30,
maxBufferSize: 60 * 1000 * 1000,
@ -67,8 +73,8 @@ class Hls {
fragLoadingRetryDelay: 1000,
fragLoadingLoopThreshold: 3,
startFragPrefetch : false,
// fpsDroppedMonitoringPeriod: 5000,
// fpsDroppedMonitoringThreshold: 0.2,
fpsDroppedMonitoringPeriod: 5000,
fpsDroppedMonitoringThreshold: 0.2,
appendErrorMaxRetry: 3,
loader: XhrLoader,
fLoader: undefined,
@ -76,6 +82,7 @@ class Hls {
abrController : AbrController,
bufferController : BufferController,
capLevelController : CapLevelController,
fpsController: FPSController,
streamController: StreamController,
timelineController: TimelineController,
enableCEA708Captions: true,
@ -129,10 +136,10 @@ class Hls {
this.abrController = new config.abrController(this);
this.bufferController = new config.bufferController(this);
this.capLevelController = new config.capLevelController(this);
this.fpsController = new config.fpsController(this);
this.streamController = new config.streamController(this);
this.timelineController = new config.timelineController(this);
this.keyLoader = new KeyLoader(this);
//this.fpsController = new FPSController(this);
}
destroy() {
@ -145,10 +152,10 @@ class Hls {
this.abrController.destroy();
this.bufferController.destroy();
this.capLevelController.destroy();
this.fpsController.destroy();
this.streamController.destroy();
this.timelineController.destroy();
this.keyLoader.destroy();
//this.fpsController.destroy();
this.keyLoader.destroy();
this.url = null;
this.observer.removeAllListeners();
}

View file

@ -51,7 +51,7 @@ class FragmentLoader extends EventHandler {
this.hls.trigger(Event.ERROR, {type: ErrorTypes.NETWORK_ERROR, details: ErrorDetails.FRAG_LOAD_TIMEOUT, fatal: false, frag: this.frag});
}
loadprogress(event, stats) {
loadprogress(stats) {
this.frag.loaded = stats.loaded;
this.hls.trigger(Event.FRAG_LOAD_PROGRESS, {frag: this.frag, stats: stats});
}

View file

@ -211,6 +211,7 @@ class PlaylistLoader extends EventHandler {
totalduration-=frag.duration;
}
level.totalduration = totalduration;
level.averagetargetduration = totalduration / level.fragments.length;
level.endSN = currentSN - 1;
return level;
}
@ -224,7 +225,8 @@ class PlaylistLoader extends EventHandler {
hls = this.hls,
levels;
// responseURL not supported on some browsers (it is used to detect URL redirection)
if (url === undefined) {
// data-uri mode also not supported (but no need to detect redirection)
if (url === undefined || url.indexOf('data:') === 0) {
// fallback to initial URL
url = this.url;
}

View file

@ -132,135 +132,164 @@ class MP4Remuxer {
}
remuxVideo(track, timeOffset, contiguous) {
var view,
offset = 8,
var offset = 8,
pesTimeScale = this.PES_TIMESCALE,
pes2mp4ScaleFactor = this.PES2MP4SCALEFACTOR,
avcSample,
mp4Sample,
mp4SampleLength,
unit,
mp4SampleDuration,
mdat, moof,
firstPTS, firstDTS, lastDTS,
pts, dts, ptsnorm, dtsnorm,
flags,
samples = [];
firstPTS, firstDTS,
nextDTS,
lastPTS, lastDTS,
inputSamples = track.samples,
outputSamples = [];
// PTS is coded on 33bits, and can loop from -2^32 to 2^32
// PTSNormalize will make PTS/DTS value monotonic, we use last known DTS value as reference value
let nextAvcDts;
if (contiguous) {
// if parsed fragment is contiguous with last one, let's use last DTS value as reference
nextAvcDts = this.nextAvcDts;
} else {
// if not contiguous, let's use target timeOffset
nextAvcDts = timeOffset*pesTimeScale;
}
// compute first DTS and last DTS, normalize them against reference value
let sample = inputSamples[0];
firstDTS = Math.max(this._PTSNormalize(sample.dts,nextAvcDts) - this._initDTS,0);
firstPTS = Math.max(this._PTSNormalize(sample.pts,nextAvcDts) - this._initDTS,0);
// check timestamp continuity accross consecutive fragments (this is to remove inter-fragment gap/hole)
let delta = Math.round((firstDTS - nextAvcDts) / 90);
// if fragment are contiguous, or if there is a huge delta (more than 10s) between expected PTS and sample PTS
if (contiguous || Math.abs(delta) > 10000) {
if (delta) {
if (delta > 1) {
logger.log(`AVC:${delta} ms hole between fragments detected,filling it`);
} else if (delta < -1) {
logger.log(`AVC:${(-delta)} ms overlapping between fragments detected`);
}
// remove hole/gap : set DTS to next expected DTS
firstDTS = nextAvcDts;
inputSamples[0].dts = firstDTS + this._initDTS;
// offset PTS as well, ensure that PTS is smaller or equal than new DTS
firstPTS = Math.max(firstPTS - delta, nextAvcDts);
inputSamples[0].pts = firstPTS + this._initDTS;
logger.log(`Video/PTS/DTS adjusted: ${firstPTS}/${firstDTS},delta:${delta}`);
}
}
nextDTS = firstDTS;
// compute lastPTS/lastDTS
sample = inputSamples[inputSamples.length-1];
lastDTS = Math.max(this._PTSNormalize(sample.dts,nextAvcDts) - this._initDTS,0);
lastPTS = Math.max(this._PTSNormalize(sample.pts,nextAvcDts) - this._initDTS,0);
lastPTS = Math.max(lastPTS, lastDTS);
let vendor = navigator.vendor, userAgent = navigator.userAgent,
isSafari = vendor && vendor.indexOf('Apple') > -1 && userAgent && !userAgent.match('CriOS');
// on Safari let's signal the same sample duration for all samples
// sample duration (as expected by trun MP4 boxes), should be the delta between sample DTS
// set this constant duration as being the avg delta between consecutive DTS.
if (isSafari) {
mp4SampleDuration = Math.round((lastDTS-firstDTS)/(pes2mp4ScaleFactor*(inputSamples.length-1)));
}
// normalize all PTS/DTS now ...
for (let i = 0; i < inputSamples.length; i++) {
let sample = inputSamples[i];
if (isSafari) {
// sample DTS is computed using a constant decoding offset (mp4SampleDuration) between samples
sample.dts = firstDTS + i*pes2mp4ScaleFactor*mp4SampleDuration;
} else {
// ensure sample monotonic DTS
sample.dts = Math.max(this._PTSNormalize(sample.dts, nextAvcDts) - this._initDTS,firstDTS);
// ensure dts is a multiple of scale factor to avoid rounding issues
sample.dts = Math.round(sample.dts/pes2mp4ScaleFactor)*pes2mp4ScaleFactor;
}
// we normalize PTS against nextAvcDts, we also substract initDTS (some streams don't start @ PTS O)
// and we ensure that computed value is greater or equal than sample DTS
sample.pts = Math.max(this._PTSNormalize(sample.pts,nextAvcDts) - this._initDTS, sample.dts);
// ensure pts is a multiple of scale factor to avoid rounding issues
sample.pts = Math.round(sample.pts/pes2mp4ScaleFactor)*pes2mp4ScaleFactor;
}
/* concatenate the video data and construct the mdat in place
(need 8 more bytes to fill length and mpdat type) */
mdat = new Uint8Array(track.len + (4 * track.nbNalu) + 8);
view = new DataView(mdat.buffer);
let view = new DataView(mdat.buffer);
view.setUint32(0, mdat.byteLength);
mdat.set(MP4.types.mdat, 4);
while (track.samples.length) {
avcSample = track.samples.shift();
mp4SampleLength = 0;
for (let i = 0; i < inputSamples.length; i++) {
let avcSample = inputSamples[i],
mp4SampleLength = 0,
compositionTimeOffset;
// convert NALU bitstream to MP4 format (prepend NALU with size field)
while (avcSample.units.units.length) {
unit = avcSample.units.units.shift();
let unit = avcSample.units.units.shift();
view.setUint32(offset, unit.data.byteLength);
offset += 4;
mdat.set(unit.data, offset);
offset += unit.data.byteLength;
mp4SampleLength += 4 + unit.data.byteLength;
}
pts = avcSample.pts - this._initDTS;
dts = avcSample.dts - this._initDTS;
// ensure DTS is not bigger than PTS
dts = Math.min(pts,dts);
//logger.log(`Video/PTS/DTS:${Math.round(pts/90)}/${Math.round(dts/90)}`);
// if not first AVC sample of video track, normalize PTS/DTS with previous sample value
// and ensure that sample duration is positive
if (lastDTS !== undefined) {
ptsnorm = this._PTSNormalize(pts, lastDTS);
dtsnorm = this._PTSNormalize(dts, lastDTS);
var sampleDuration = (dtsnorm - lastDTS) / pes2mp4ScaleFactor;
if (sampleDuration <= 0) {
logger.log(`invalid sample duration at PTS/DTS: ${avcSample.pts}/${avcSample.dts}:${sampleDuration}`);
sampleDuration = 1;
}
mp4Sample.duration = sampleDuration;
} else {
let nextAvcDts, delta;
if (contiguous) {
nextAvcDts = this.nextAvcDts;
if(!isSafari) {
// expected sample duration is the Decoding Timestamp diff of consecutive samples
if (i < inputSamples.length - 1) {
mp4SampleDuration = inputSamples[i+1].dts - avcSample.dts;
} else {
nextAvcDts = timeOffset*pesTimeScale;
// last sample duration is same than previous one
mp4SampleDuration = avcSample.dts - inputSamples[i-1].dts;
}
// first AVC sample of video track, normalize PTS/DTS
ptsnorm = this._PTSNormalize(pts, nextAvcDts);
dtsnorm = this._PTSNormalize(dts, nextAvcDts);
delta = Math.round((dtsnorm - nextAvcDts) / 90);
// if fragment are contiguous, or if there is a huge delta (more than 10s) between expected PTS and sample PTS
if (contiguous || Math.abs(delta) > 10000) {
if (delta) {
if (delta > 1) {
logger.log(`AVC:${delta} ms hole between fragments detected,filling it`);
} else if (delta < -1) {
logger.log(`AVC:${(-delta)} ms overlapping between fragments detected`);
}
// set DTS to next DTS
dtsnorm = nextAvcDts;
// offset PTS as well, ensure that PTS is smaller or equal than new DTS
ptsnorm = Math.max(ptsnorm - delta, dtsnorm);
logger.log(`Video/PTS/DTS adjusted: ${ptsnorm}/${dtsnorm},delta:${delta}`);
}
}
// remember first PTS of our avcSamples, ensure value is positive
firstPTS = Math.max(0, ptsnorm);
firstDTS = Math.max(0, dtsnorm);
mp4SampleDuration /= pes2mp4ScaleFactor;
compositionTimeOffset = Math.round((avcSample.pts - avcSample.dts) / pes2mp4ScaleFactor);
} else {
compositionTimeOffset = Math.max(0,mp4SampleDuration*Math.round((avcSample.pts - avcSample.dts)/(pes2mp4ScaleFactor*mp4SampleDuration)));
}
//console.log('PTS/DTS/initDTS/normPTS/normDTS/relative PTS : ${avcSample.pts}/${avcSample.dts}/${this._initDTS}/${ptsnorm}/${dtsnorm}/${(avcSample.pts/4294967296).toFixed(3)}');
mp4Sample = {
outputSamples.push({
size: mp4SampleLength,
duration: 0,
cts: (ptsnorm - dtsnorm) / pes2mp4ScaleFactor,
// constant duration
duration: mp4SampleDuration,
cts: compositionTimeOffset,
flags: {
isLeading: 0,
isDependedOn: 0,
hasRedundancy: 0,
degradPrio: 0
degradPrio: 0,
dependsOn : avcSample.key ? 2 : 1,
isNonSync : avcSample.key ? 0 : 1
}
};
flags = mp4Sample.flags;
if (avcSample.key === true) {
// the current sample is a key frame
flags.dependsOn = 2;
flags.isNonSync = 0;
} else {
flags.dependsOn = 1;
flags.isNonSync = 1;
}
samples.push(mp4Sample);
lastDTS = dtsnorm;
});
}
var lastSampleDuration = 0;
if (samples.length >= 2) {
lastSampleDuration = samples[samples.length - 2].duration;
mp4Sample.duration = lastSampleDuration;
}
// next AVC sample DTS should be equal to last sample DTS + last sample duration
this.nextAvcDts = dtsnorm + lastSampleDuration * pes2mp4ScaleFactor;
// next AVC sample DTS should be equal to last sample DTS + last sample duration (in PES timescale)
this.nextAvcDts = lastDTS + mp4SampleDuration*pes2mp4ScaleFactor;
track.len = 0;
track.nbNalu = 0;
if(samples.length && navigator.userAgent.toLowerCase().indexOf('chrome') > -1) {
flags = samples[0].flags;
if(outputSamples.length && navigator.userAgent.toLowerCase().indexOf('chrome') > -1) {
let flags = outputSamples[0].flags;
// chrome workaround, mark first sample as being a Random Access Point to avoid sourcebuffer append issue
// https://code.google.com/p/chromium/issues/detail?id=229412
flags.dependsOn = 2;
flags.isNonSync = 0;
}
track.samples = samples;
track.samples = outputSamples;
moof = MP4.moof(track.sequenceNumber++, firstDTS / pes2mp4ScaleFactor, track);
track.samples = [];
this.observer.trigger(Event.FRAG_PARSING_DATA, {
data1: moof,
data2: mdat,
startPTS: firstPTS / pesTimeScale,
endPTS: (ptsnorm + pes2mp4ScaleFactor * lastSampleDuration) / pesTimeScale,
endPTS: (lastPTS + pes2mp4ScaleFactor * mp4SampleDuration) / pesTimeScale,
startDTS: firstDTS / pesTimeScale,
endDTS: this.nextAvcDts / pesTimeScale,
type: 'video',
nb: samples.length
nb: outputSamples.length
});
}
@ -299,7 +328,7 @@ class MP4Remuxer {
mp4Sample.duration = (dtsnorm - lastDTS) / pes2mp4ScaleFactor;
if(Math.abs(mp4Sample.duration - expectedSampleDuration) > expectedSampleDuration/10) {
// more than 10% diff between sample duration and expectedSampleDuration .... lets log that
logger.log(`invalid AAC sample duration at PTS ${Math.round(pts/90)},should be 1024,found :${Math.round(mp4Sample.duration*track.audiosamplerate/track.timescale)}`);
logger.trace(`invalid AAC sample duration at PTS ${Math.round(pts/90)},should be 1024,found :${Math.round(mp4Sample.duration*track.audiosamplerate/track.timescale)}`);
}
// always adjust sample duration to avoid av sync issue
mp4Sample.duration = expectedSampleDuration;
@ -327,7 +356,7 @@ class MP4Remuxer {
track.len -= unit.byteLength;
continue;
}
// set PTS/DTS to next PTS/DTS
// set PTS/DTS to expected PTS/DTS
ptsnorm = dtsnorm = nextAacPts;
}
}

View file

@ -112,8 +112,11 @@ class XhrLoader {
stats.tfirst = performance.now();
}
stats.loaded = event.loaded;
if (event.lengthComputable) {
stats.total = event.total;
}
if (this.onProgress) {
this.onProgress(event, stats);
this.onProgress(stats);
}
}
}