1
0
Fork 0
mirror of https://github.com/jellyfin/jellyfin-web synced 2025-03-30 19:56:21 +00:00

update login

This commit is contained in:
Luke Pulverenti 2016-02-24 22:15:07 -05:00
parent 3c9e6e0374
commit 08122a5e93
36 changed files with 2845 additions and 2228 deletions

View file

@ -20,7 +20,10 @@ class AbrController extends EventHandler {
onFragLoadProgress(data) {
var stats = data.stats;
if (stats.aborted === undefined) {
// only update stats if first frag loading
// if same frag is loaded multiple times, it might be in browser cache, and loaded quickly
// and leading to wrong bw estimation
if (stats.aborted === undefined && data.frag.loadCounter === 1) {
this.lastfetchduration = (performance.now() - stats.trequest) / 1000;
this.lastfetchlevel = data.frag.level;
this.lastbw = (stats.loaded * 8) / this.lastfetchduration;

View file

@ -0,0 +1,346 @@
/*
* Buffer Controller
*/
import Event from '../events';
import EventHandler from '../event-handler';
import {logger} from '../utils/logger';
import {ErrorTypes, ErrorDetails} from '../errors';
class BufferController extends EventHandler {
constructor(hls) {
super(hls,
Event.MEDIA_ATTACHING,
Event.MEDIA_DETACHING,
Event.BUFFER_RESET,
Event.BUFFER_APPENDING,
Event.BUFFER_CODECS,
Event.BUFFER_EOS,
Event.BUFFER_FLUSHING);
// Source Buffer listeners
this.onsbue = this.onSBUpdateEnd.bind(this);
this.onsbe = this.onSBUpdateError.bind(this);
}
destroy() {
EventHandler.prototype.destroy.call(this);
}
onMediaAttaching(data) {
var media = this.media = data.media;
// setup the media source
var ms = this.mediaSource = new MediaSource();
//Media Source listeners
this.onmso = this.onMediaSourceOpen.bind(this);
this.onmse = this.onMediaSourceEnded.bind(this);
this.onmsc = this.onMediaSourceClose.bind(this);
ms.addEventListener('sourceopen', this.onmso);
ms.addEventListener('sourceended', this.onmse);
ms.addEventListener('sourceclose', this.onmsc);
// link video and media Source
media.src = URL.createObjectURL(ms);
}
onMediaDetaching() {
var ms = this.mediaSource;
if (ms) {
if (ms.readyState === 'open') {
try {
// endOfStream could trigger exception if any sourcebuffer is in updating state
// we don't really care about checking sourcebuffer state here,
// as we are anyway detaching the MediaSource
// let's just avoid this exception to propagate
ms.endOfStream();
} catch(err) {
logger.warn(`onMediaDetaching:${err.message} while calling endOfStream`);
}
}
ms.removeEventListener('sourceopen', this.onmso);
ms.removeEventListener('sourceended', this.onmse);
ms.removeEventListener('sourceclose', this.onmsc);
// unlink MediaSource from video tag
this.media.src = '';
this.mediaSource = null;
this.media = null;
this.pendingTracks = null;
}
this.onmso = this.onmse = this.onmsc = null;
this.hls.trigger(Event.MEDIA_DETACHED);
}
onMediaSourceOpen() {
logger.log('media source opened');
this.hls.trigger(Event.MEDIA_ATTACHED, { media : this.media });
// once received, don't listen anymore to sourceopen event
this.mediaSource.removeEventListener('sourceopen', this.onmso);
// if any buffer codecs pending, treat it here.
var pendingTracks = this.pendingTracks;
if (pendingTracks) {
this.onBufferCodecs(pendingTracks);
this.pendingTracks = null;
this.doAppending();
}
}
onMediaSourceClose() {
logger.log('media source closed');
}
onMediaSourceEnded() {
logger.log('media source ended');
}
onSBUpdateEnd() {
if (this._needsFlush) {
this.doFlush();
}
if (this._needsEos) {
this.onBufferEos();
}
this.hls.trigger(Event.BUFFER_APPENDED);
this.doAppending();
}
onSBUpdateError(event) {
logger.error(`sourceBuffer error:${event}`);
// according to http://www.w3.org/TR/media-source/#sourcebuffer-append-error
// this error might not always be fatal (it is fatal if decode error is set, in that case
// it will be followed by a mediaElement error ...)
this.hls.trigger(Event.ERROR, {type: ErrorTypes.MEDIA_ERROR, details: ErrorDetails.BUFFER_APPENDING_ERROR, fatal: false});
// we don't need to do more than that, as accordin to the spec, updateend will be fired just after
}
onBufferReset() {
var sourceBuffer = this.sourceBuffer;
if (sourceBuffer) {
for(var type in sourceBuffer) {
var sb = sourceBuffer[type];
try {
this.mediaSource.removeSourceBuffer(sb);
sb.removeEventListener('updateend', this.onsbue);
sb.removeEventListener('error', this.onsbe);
} catch(err) {
}
}
this.sourceBuffer = null;
}
this.flushRange = [];
this.appended = 0;
}
onBufferCodecs(tracks) {
var sb,trackName,track, codec, mimeType;
if (!this.media) {
this.pendingTracks = tracks;
return;
}
if (!this.sourceBuffer) {
var sourceBuffer = {}, mediaSource = this.mediaSource;
for (trackName in tracks) {
track = tracks[trackName];
// use levelCodec as first priority
codec = track.levelCodec || track.codec;
mimeType = `${track.container};codecs=${codec}`;
logger.log(`creating sourceBuffer with mimeType:${mimeType}`);
sb = sourceBuffer[trackName] = mediaSource.addSourceBuffer(mimeType);
sb.addEventListener('updateend', this.onsbue);
sb.addEventListener('error', this.onsbe);
}
this.sourceBuffer = sourceBuffer;
}
}
onBufferAppending(data) {
if (!this.segments) {
this.segments = [ data ];
} else {
this.segments.push(data);
}
this.doAppending();
}
onBufferAppendFail(data) {
logger.error(`sourceBuffer error:${data.event}`);
// according to http://www.w3.org/TR/media-source/#sourcebuffer-append-error
// this error might not always be fatal (it is fatal if decode error is set, in that case
// it will be followed by a mediaElement error ...)
this.hls.trigger(Event.ERROR, {type: ErrorTypes.MEDIA_ERROR, details: ErrorDetails.BUFFER_APPENDING_ERROR, fatal: false, frag: this.fragCurrent});
}
onBufferEos() {
var sb = this.sourceBuffer, mediaSource = this.mediaSource;
if (!mediaSource || mediaSource.readyState !== 'open') {
return;
}
if (!((sb.audio && sb.audio.updating) || (sb.video && sb.video.updating))) {
logger.log('all media data available, signal endOfStream() to MediaSource and stop loading fragment');
//Notify the media element that it now has all of the media data
mediaSource.endOfStream();
this._needsEos = false;
} else {
this._needsEos = true;
}
}
onBufferFlushing(data) {
this.flushRange.push({start: data.startOffset, end: data.endOffset});
// attempt flush immediatly
this.flushBufferCounter = 0;
this.doFlush();
}
doFlush() {
// loop through all buffer ranges to flush
while(this.flushRange.length) {
var range = this.flushRange[0];
// flushBuffer will abort any buffer append in progress and flush Audio/Video Buffer
if (this.flushBuffer(range.start, range.end)) {
// range flushed, remove from flush array
this.flushRange.shift();
this.flushBufferCounter = 0;
} else {
this._needsFlush = true;
// avoid looping, wait for SB update end to retrigger a flush
return;
}
}
if (this.flushRange.length === 0) {
// everything flushed
this._needsFlush = false;
// let's recompute this.appended, which is used to avoid flush looping
var appended = 0;
var sourceBuffer = this.sourceBuffer;
if (sourceBuffer) {
for (var type in sourceBuffer) {
appended += sourceBuffer[type].buffered.length;
}
}
this.appended = appended;
this.hls.trigger(Event.BUFFER_FLUSHED);
}
}
doAppending() {
var hls = this.hls, sourceBuffer = this.sourceBuffer, segments = this.segments;
if (sourceBuffer) {
if (this.media.error) {
segments = [];
logger.error('trying to append although a media error occured, flush segment and abort');
return;
}
for (var type in sourceBuffer) {
if (sourceBuffer[type].updating) {
//logger.log('sb update in progress');
return;
}
}
if (segments.length) {
var segment = segments.shift();
try {
//logger.log(`appending ${segment.type} SB, size:${segment.data.length});
sourceBuffer[segment.type].appendBuffer(segment.data);
this.appendError = 0;
this.appended++;
} catch(err) {
// in case any error occured while appending, put back segment in segments table
logger.error(`error while trying to append buffer:${err.message}`);
segments.unshift(segment);
var event = {type: ErrorTypes.MEDIA_ERROR};
if(err.code !== 22) {
if (this.appendError) {
this.appendError++;
} else {
this.appendError = 1;
}
event.details = ErrorDetails.BUFFER_APPEND_ERROR;
event.frag = this.fragCurrent;
/* with UHD content, we could get loop of quota exceeded error until
browser is able to evict some data from sourcebuffer. retrying help recovering this
*/
if (this.appendError > hls.config.appendErrorMaxRetry) {
logger.log(`fail ${hls.config.appendErrorMaxRetry} times to append segment in sourceBuffer`);
segments = [];
event.fatal = true;
hls.trigger(Event.ERROR, event);
return;
} else {
event.fatal = false;
hls.trigger(Event.ERROR, event);
}
} else {
// QuotaExceededError: http://www.w3.org/TR/html5/infrastructure.html#quotaexceedederror
// let's stop appending any segments, and report BUFFER_FULL error
segments = [];
event.details = ErrorDetails.BUFFER_FULL;
hls.trigger(Event.ERROR,event);
}
}
}
}
}
/*
flush specified buffered range,
return true once range has been flushed.
as sourceBuffer.remove() is asynchronous, flushBuffer will be retriggered on sourceBuffer update end
*/
flushBuffer(startOffset, endOffset) {
var sb, i, bufStart, bufEnd, flushStart, flushEnd;
//logger.log('flushBuffer,pos/start/end: ' + this.media.currentTime + '/' + startOffset + '/' + endOffset);
// safeguard to avoid infinite looping : don't try to flush more than the nb of appended segments
if (this.flushBufferCounter < this.appended && this.sourceBuffer) {
for (var type in this.sourceBuffer) {
sb = this.sourceBuffer[type];
if (!sb.updating) {
for (i = 0; i < sb.buffered.length; i++) {
bufStart = sb.buffered.start(i);
bufEnd = sb.buffered.end(i);
// workaround firefox not able to properly flush multiple buffered range.
if (navigator.userAgent.toLowerCase().indexOf('firefox') !== -1 && endOffset === Number.POSITIVE_INFINITY) {
flushStart = startOffset;
flushEnd = endOffset;
} else {
flushStart = Math.max(bufStart, startOffset);
flushEnd = Math.min(bufEnd, endOffset);
}
/* sometimes sourcebuffer.remove() does not flush
the exact expected time range.
to avoid rounding issues/infinite loop,
only flush buffer range of length greater than 500ms.
*/
if (Math.min(flushEnd,bufEnd) - flushStart > 0.5 ) {
this.flushBufferCounter++;
logger.log(`flush ${type} [${flushStart},${flushEnd}], of [${bufStart},${bufEnd}], pos:${this.media.currentTime}`);
sb.remove(flushStart, flushEnd);
return false;
}
}
} else {
//logger.log('abort ' + type + ' append in progress');
// this will abort any appending in progress
//sb.abort();
logger.warn('cannot flush, sb updating in progress');
return false;
}
}
} else {
logger.warn('abort flushing too many retries');
}
logger.log('buffer flushed');
// everything flushed !
return true;
}
}
export default BufferController;

View file

@ -11,7 +11,7 @@ import ID3 from '../demux/id3';
this.observer = observer;
this.remuxerClass = remuxerClass;
this.remuxer = new this.remuxerClass(observer);
this._aacTrack = {type: 'audio', id :-1, sequenceNumber: 0, samples : [], len : 0};
this._aacTrack = {container : 'audio/adts', type: 'audio', id :-1, sequenceNumber: 0, samples : [], len : 0};
}
static probe(data) {
@ -83,7 +83,7 @@ import ID3 from '../demux/id3';
break;
}
}
this.remuxer.remux(this._aacTrack,{samples : []}, {samples : [ { pts: pts, dts : pts, unit : id3.payload} ]}, timeOffset);
this.remuxer.remux(this._aacTrack,{samples : []}, {samples : [ { pts: pts, dts : pts, unit : id3.payload} ]}, { samples: [] }, timeOffset);
}
destroy() {

View file

@ -6,12 +6,14 @@ import Event from '../events';
import {ErrorTypes, ErrorDetails} from '../errors';
import AACDemuxer from '../demux/aacdemuxer';
import TSDemuxer from '../demux/tsdemuxer';
import MP4Remuxer from '../remux/mp4-remuxer';
import PassThroughRemuxer from '../remux/passthrough-remuxer';
class DemuxerInline {
constructor(hls,remuxer) {
constructor(hls,typeSupported) {
this.hls = hls;
this.remuxer = remuxer;
this.typeSupported = typeSupported;
}
destroy() {
@ -24,15 +26,21 @@ class DemuxerInline {
push(data, audioCodec, videoCodec, timeOffset, cc, level, sn, duration) {
var demuxer = this.demuxer;
if (!demuxer) {
var hls = this.hls;
// probe for content type
if (TSDemuxer.probe(data)) {
demuxer = this.demuxer = new TSDemuxer(this.hls,this.remuxer);
if (this.typeSupported.mp2t === true) {
demuxer = new TSDemuxer(hls,PassThroughRemuxer);
} else {
demuxer = new TSDemuxer(hls,MP4Remuxer);
}
} else if(AACDemuxer.probe(data)) {
demuxer = this.demuxer = new AACDemuxer(this.hls,this.remuxer);
demuxer = new AACDemuxer(hls,MP4Remuxer);
} else {
this.hls.trigger(Event.ERROR, {type : ErrorTypes.MEDIA_ERROR, details: ErrorDetails.FRAG_PARSING_ERROR, fatal: true, reason: 'no demux matching with content found'});
hls.trigger(Event.ERROR, {type : ErrorTypes.MEDIA_ERROR, details: ErrorDetails.FRAG_PARSING_ERROR, fatal: true, reason: 'no demux matching with content found'});
return;
}
this.demuxer = demuxer;
}
demuxer.push(data,audioCodec,videoCodec,timeOffset,cc,level,sn,duration);
}

View file

@ -6,7 +6,6 @@
import DemuxerInline from '../demux/demuxer-inline';
import Event from '../events';
import EventEmitter from 'events';
import MP4Remuxer from '../remux/mp4-remuxer';
var DemuxerWorker = function (self) {
// observer setup
@ -19,13 +18,13 @@ var DemuxerWorker = function (self) {
observer.removeListener(event, ...data);
};
self.addEventListener('message', function (ev) {
//console.log('demuxer cmd:' + ev.data.cmd);
switch (ev.data.cmd) {
var data = ev.data;
//console.log('demuxer cmd:' + data.cmd);
switch (data.cmd) {
case 'init':
self.demuxer = new DemuxerInline(observer,MP4Remuxer);
self.demuxer = new DemuxerInline(observer, data.typeSupported);
break;
case 'demux':
var data = ev.data;
self.demuxer.push(new Uint8Array(data.data), data.audioCodec, data.videoCodec, data.timeOffset, data.cc, data.level, data.sn, data.duration);
break;
default:
@ -33,31 +32,15 @@ var DemuxerWorker = function (self) {
}
});
// listen to events triggered by TS Demuxer
// listen to events triggered by Demuxer
observer.on(Event.FRAG_PARSING_INIT_SEGMENT, function(ev, data) {
var objData = {event: ev};
var objTransferable = [];
if (data.audioCodec) {
objData.audioCodec = data.audioCodec;
objData.audioMoov = data.audioMoov.buffer;
objData.audioChannelCount = data.audioChannelCount;
objTransferable.push(objData.audioMoov);
}
if (data.videoCodec) {
objData.videoCodec = data.videoCodec;
objData.videoMoov = data.videoMoov.buffer;
objData.videoWidth = data.videoWidth;
objData.videoHeight = data.videoHeight;
objTransferable.push(objData.videoMoov);
}
// pass moov as transferable object (no copy)
self.postMessage(objData,objTransferable);
self.postMessage({event: ev, tracks : data.tracks, unique : data.unique });
});
observer.on(Event.FRAG_PARSING_DATA, function(ev, data) {
var objData = {event: ev, type: data.type, startPTS: data.startPTS, endPTS: data.endPTS, startDTS: data.startDTS, endDTS: data.endDTS, moof: data.moof.buffer, mdat: data.mdat.buffer, nb: data.nb};
// pass moof/mdat data as transferable object (no copy)
self.postMessage(objData, [objData.moof, objData.mdat]);
var objData = {event: ev, type: data.type, startPTS: data.startPTS, endPTS: data.endPTS, startDTS: data.startDTS, endDTS: data.endDTS, data1: data.data1.buffer, data2: data.data2.buffer, nb: data.nb};
// pass data1/data2 as transferable object (no copy)
self.postMessage(objData, [objData.data1, objData.data2]);
});
observer.on(Event.FRAG_PARSED, function(event) {

View file

@ -2,13 +2,16 @@ import Event from '../events';
import DemuxerInline from '../demux/demuxer-inline';
import DemuxerWorker from '../demux/demuxer-worker';
import {logger} from '../utils/logger';
import MP4Remuxer from '../remux/mp4-remuxer';
import Decrypter from '../crypt/decrypter';
class Demuxer {
constructor(hls) {
this.hls = hls;
var typeSupported = {
mp4 : MediaSource.isTypeSupported('video/mp4'),
mp2t : hls.config.enableMP2TPassThrough && MediaSource.isTypeSupported('video/mp2t')
};
if (hls.config.enableWorker && (typeof(Worker) !== 'undefined')) {
logger.log('demuxing in webworker');
try {
@ -16,13 +19,13 @@ class Demuxer {
this.w = work(DemuxerWorker);
this.onwmsg = this.onWorkerMessage.bind(this);
this.w.addEventListener('message', this.onwmsg);
this.w.postMessage({cmd: 'init'});
this.w.postMessage({cmd: 'init', typeSupported : typeSupported});
} catch(err) {
logger.error('error while initializing DemuxerWorker, fallback on DemuxerInline');
this.demuxer = new DemuxerInline(hls,MP4Remuxer);
this.demuxer = new DemuxerInline(hls,typeSupported);
}
} else {
this.demuxer = new DemuxerInline(hls,MP4Remuxer);
this.demuxer = new DemuxerInline(hls,typeSupported);
}
this.demuxInitialized = true;
}
@ -56,7 +59,7 @@ class Demuxer {
if (this.decrypter == null) {
this.decrypter = new Decrypter(this.hls);
}
var localthis = this;
this.decrypter.decrypt(data, decryptdata.key, decryptdata.iv, function(decryptedData){
localthis.pushDecrypted(decryptedData, audioCodec, videoCodec, timeOffset, cc, level, sn, duration);
@ -67,47 +70,39 @@ class Demuxer {
}
onWorkerMessage(ev) {
//console.log('onWorkerMessage:' + ev.data.event);
switch(ev.data.event) {
var data = ev.data;
//console.log('onWorkerMessage:' + data.event);
switch(data.event) {
case Event.FRAG_PARSING_INIT_SEGMENT:
var obj = {};
if (ev.data.audioMoov) {
obj.audioMoov = new Uint8Array(ev.data.audioMoov);
obj.audioCodec = ev.data.audioCodec;
obj.audioChannelCount = ev.data.audioChannelCount;
}
if (ev.data.videoMoov) {
obj.videoMoov = new Uint8Array(ev.data.videoMoov);
obj.videoCodec = ev.data.videoCodec;
obj.videoWidth = ev.data.videoWidth;
obj.videoHeight = ev.data.videoHeight;
}
obj.tracks = data.tracks;
obj.unique = data.unique;
this.hls.trigger(Event.FRAG_PARSING_INIT_SEGMENT, obj);
break;
case Event.FRAG_PARSING_DATA:
this.hls.trigger(Event.FRAG_PARSING_DATA,{
moof: new Uint8Array(ev.data.moof),
mdat: new Uint8Array(ev.data.mdat),
startPTS: ev.data.startPTS,
endPTS: ev.data.endPTS,
startDTS: ev.data.startDTS,
endDTS: ev.data.endDTS,
type: ev.data.type,
nb: ev.data.nb
data1: new Uint8Array(data.data1),
data2: new Uint8Array(data.data2),
startPTS: data.startPTS,
endPTS: data.endPTS,
startDTS: data.startDTS,
endDTS: data.endDTS,
type: data.type,
nb: data.nb
});
break;
case Event.FRAG_PARSING_METADATA:
this.hls.trigger(Event.FRAG_PARSING_METADATA, {
samples: ev.data.samples
samples: data.samples
});
break;
case Event.FRAG_PARSING_USERDATA:
this.hls.trigger(Event.FRAG_PARSING_USERDATA, {
samples: ev.data.samples
samples: data.samples
});
break;
default:
this.hls.trigger(ev.data.event, ev.data.data);
this.hls.trigger(data.event, data.data);
break;
}
}

View file

@ -249,7 +249,7 @@ class ExpGolomb {
let sarRatio;
const aspectRatioIdc = this.readUByte();
switch (aspectRatioIdc) {
//case 1: sarRatio = [1,1]; break;
case 1: sarRatio = [1,1]; break;
case 2: sarRatio = [12,11]; break;
case 3: sarRatio = [10,11]; break;
case 4: sarRatio = [16,11]; break;
@ -276,7 +276,7 @@ class ExpGolomb {
}
}
return {
width: (((picWidthInMbsMinus1 + 1) * 16) - frameCropLeftOffset * 2 - frameCropRightOffset * 2) * sarScale,
width: Math.ceil((((picWidthInMbsMinus1 + 1) * 16) - frameCropLeftOffset * 2 - frameCropRightOffset * 2) * sarScale),
height: ((2 - frameMbsOnlyFlag) * (picHeightInMapUnitsMinus1 + 1) * 16) - ((frameMbsOnlyFlag? 2 : 4) * (frameCropTopOffset + frameCropBottomOffset))
};
}

View file

@ -23,7 +23,6 @@
this.remuxerClass = remuxerClass;
this.lastCC = 0;
this.remuxer = new this.remuxerClass(observer);
this._userData = [];
}
static probe(data) {
@ -40,8 +39,8 @@
this._pmtId = -1;
this.lastAacPTS = null;
this.aacOverFlow = null;
this._avcTrack = {type: 'video', id :-1, sequenceNumber: 0, samples : [], len : 0, nbNalu : 0};
this._aacTrack = {type: 'audio', id :-1, sequenceNumber: 0, samples : [], len : 0};
this._avcTrack = {container : 'video/mp2t', type: 'video', id :-1, sequenceNumber: 0, samples : [], len : 0, nbNalu : 0};
this._aacTrack = {container : 'video/mp2t', type: 'audio', id :-1, sequenceNumber: 0, samples : [], len : 0};
this._id3Track = {type: 'id3', id :-1, sequenceNumber: 0, samples : [], len : 0};
this._txtTrack = {type: 'text', id: -1, sequenceNumber: 0, samples: [], len: 0};
this.remuxer.switchLevel();
@ -55,7 +54,9 @@
// feed incoming data to the front of the parsing pipeline
push(data, audioCodec, videoCodec, timeOffset, cc, level, sn, duration) {
var avcData, aacData, id3Data,
start, len = data.length, stt, pid, atf, offset;
start, len = data.length, stt, pid, atf, offset,
codecsOnly = this.remuxer.passthrough;
this.audioCodec = audioCodec;
this.videoCodec = videoCodec;
this.timeOffset = timeOffset;
@ -108,6 +109,15 @@
if (stt) {
if (avcData) {
this._parseAVCPES(this._parsePES(avcData));
if (codecsOnly) {
// if we have video codec info AND
// if audio PID is undefined OR if we have audio codec info,
// we have all codec info !
if (this._avcTrack.codec && (aacId === -1 || this._aacTrack.codec)) {
this.remux(data);
return;
}
}
}
avcData = {data: [], size: 0};
}
@ -119,6 +129,15 @@
if (stt) {
if (aacData) {
this._parseAACPES(this._parsePES(aacData));
if (codecsOnly) {
// here we now that we have audio codec info
// if video PID is undefined OR if we have video codec info,
// we have all codec infos !
if (this._aacTrack.codec && (avcId === -1 || this._avcTrack.codec)) {
this.remux(data);
return;
}
}
}
aacData = {data: [], size: 0};
}
@ -166,11 +185,11 @@
if (id3Data) {
this._parseID3PES(this._parsePES(id3Data));
}
this.remux();
this.remux(null);
}
remux() {
this.remuxer.remux(this._aacTrack, this._avcTrack, this._id3Track, this._txtTrack, this.timeOffset, this.contiguous);
remux(data) {
this.remuxer.remux(this._aacTrack, this._avcTrack, this._id3Track, this._txtTrack, this.timeOffset, this.contiguous, data);
}
destroy() {
@ -223,9 +242,9 @@
}
_parsePES(stream) {
var i = 0, frag, pesFlags, pesPrefix, pesLen, pesHdrLen, pesData, pesPts, pesDts, payloadStartOffset;
var i = 0, frag, pesFlags, pesPrefix, pesLen, pesHdrLen, pesData, pesPts, pesDts, payloadStartOffset, data = stream.data;
//retrieve PTS/DTS from first fragment
frag = stream.data[0];
frag = data[0];
pesPrefix = (frag[0] << 16) + (frag[1] << 8) + frag[2];
if (pesPrefix === 1) {
pesLen = (frag[4] << 8) + frag[5];
@ -261,16 +280,27 @@
}
pesHdrLen = frag[8];
payloadStartOffset = pesHdrLen + 9;
// trim PES header
stream.data[0] = stream.data[0].subarray(payloadStartOffset);
stream.size -= payloadStartOffset;
//reassemble PES packet
pesData = new Uint8Array(stream.size);
// reassemble the packet
while (stream.data.length) {
frag = stream.data.shift();
while (data.length) {
frag = data.shift();
var len = frag.byteLength;
if (payloadStartOffset) {
if (payloadStartOffset > len) {
// trim full frag if PES header bigger than frag
payloadStartOffset-=len;
continue;
} else {
// trim partial frag if PES header smaller than frag
frag = frag.subarray(payloadStartOffset);
len-=payloadStartOffset;
payloadStartOffset = 0;
}
}
pesData.set(frag, i);
i += frag.byteLength;
i+=len;
}
return {data: pesData, pts: pesPts, dts: pesDts, len: pesLen};
} else {

View file

@ -39,5 +39,7 @@ export const ErrorDetails = {
// Identifier for a buffer appending error event - data: appending error description
BUFFER_APPENDING_ERROR: 'bufferAppendingError',
// Identifier for a buffer stalled error event
BUFFER_STALLED_ERROR: 'bufferStalledError'
BUFFER_STALLED_ERROR: 'bufferStalledError',
// Identifier for a buffer full error event
BUFFER_FULL_ERROR: 'bufferFullError'
};

View file

@ -7,6 +7,20 @@ module.exports = {
MEDIA_DETACHING: 'hlsMediaDetaching',
// fired when MediaSource has been detached from media element - data: { }
MEDIA_DETACHED: 'hlsMediaDetached',
// fired when we buffer is going to be resetted
BUFFER_RESET: 'hlsBufferReset',
// fired when we know about the codecs that we need buffers for to push into - data: {tracks : { container, codec, levelCodec, initSegment, metadata }}
BUFFER_CODECS: 'hlsBufferCodecs',
// fired when we append a segment to the buffer - data: { segment: segment object }
BUFFER_APPENDING: 'hlsBufferAppending',
// fired when we are done with appending a media segment to the buffer
BUFFER_APPENDED: 'hlsBufferAppended',
// fired when the stream is finished and we want to notify the media buffer that there will be no more data
BUFFER_EOS: 'hlsBufferEos',
// fired when the media buffer should be flushed - data {startOffset, endOffset}
BUFFER_FLUSHING: 'hlsBufferFlushing',
// fired when the media has been flushed
BUFFER_FLUSHED: 'hlsBufferFlushed',
// fired to signal that a manifest loading starts - data: { url : manifestURL}
MANIFEST_LOADING: 'hlsManifestLoading',
// fired after manifest has been loaded - data: { levels : [available quality levels] , url : manifestURL, stats : { trequest, tfirst, tload, mtime}}
@ -37,7 +51,7 @@ module.exports = {
FRAG_PARSING_USERDATA: 'hlsFragParsingUserdata',
// fired when parsing id3 is completed - data: { samples : [ id3 samples pes ] }
FRAG_PARSING_METADATA: 'hlsFragParsingMetadata',
// fired when moof/mdat have been extracted from fragment - data: { moof : moof MP4 box, mdat : mdat MP4 box}
// fired when data have been extracted from fragment - data: { data1 : moof MP4 box or TS fragments, data2 : mdat MP4 box or null}
FRAG_PARSING_DATA: 'hlsFragParsingData',
// fired when fragment parsing is completed - data: undefined
FRAG_PARSED: 'hlsFragParsed',

View file

@ -8,7 +8,8 @@ import {ErrorTypes, ErrorDetails} from './errors';
import PlaylistLoader from './loader/playlist-loader';
import FragmentLoader from './loader/fragment-loader';
import AbrController from './controller/abr-controller';
import MSEMediaController from './controller/mse-media-controller';
import BufferController from './controller/buffer-controller';
import StreamController from './controller/stream-controller';
import LevelController from './controller/level-controller';
import TimelineController from './controller/timeline-controller';
//import FPSController from './controller/fps-controller';
@ -59,6 +60,7 @@ class Hls {
fragLoadingMaxRetry: 6,
fragLoadingRetryDelay: 1000,
fragLoadingLoopThreshold: 3,
startFragPrefetch : false,
// fpsDroppedMonitoringPeriod: 5000,
// fpsDroppedMonitoringThreshold: 0.2,
appendErrorMaxRetry: 3,
@ -66,9 +68,11 @@ class Hls {
fLoader: undefined,
pLoader: undefined,
abrController : AbrController,
mediaController: MSEMediaController,
bufferController : BufferController,
streamController: StreamController,
timelineController: TimelineController,
enableCEA708Captions: true
enableCEA708Captions: true,
enableMP2TPassThrough : false
};
}
return Hls.defaultConfig;
@ -107,7 +111,8 @@ class Hls {
this.fragmentLoader = new FragmentLoader(this);
this.levelController = new LevelController(this);
this.abrController = new config.abrController(this);
this.mediaController = new config.mediaController(this);
this.bufferController = new config.bufferController(this);
this.streamController = new config.streamController(this);
this.timelineController = new config.timelineController(this);
this.keyLoader = new KeyLoader(this);
//this.fpsController = new FPSController(this);
@ -120,7 +125,8 @@ class Hls {
this.playlistLoader.destroy();
this.fragmentLoader.destroy();
this.levelController.destroy();
this.mediaController.destroy();
this.bufferController.destroy();
this.streamController.destroy();
this.timelineController.destroy();
this.keyLoader.destroy();
//this.fpsController.destroy();
@ -149,12 +155,12 @@ class Hls {
startLoad() {
logger.log('startLoad');
this.mediaController.startLoad();
this.streamController.startLoad();
}
swapAudioCodec() {
logger.log('swapAudioCodec');
this.mediaController.swapAudioCodec();
this.streamController.swapAudioCodec();
}
recoverMediaError() {
@ -171,26 +177,26 @@ class Hls {
/** Return current playback quality level **/
get currentLevel() {
return this.mediaController.currentLevel;
return this.streamController.currentLevel;
}
/* set quality level immediately (-1 for automatic level selection) */
set currentLevel(newLevel) {
logger.log(`set currentLevel:${newLevel}`);
this.loadLevel = newLevel;
this.mediaController.immediateLevelSwitch();
this.streamController.immediateLevelSwitch();
}
/** Return next playback quality level (quality level of next fragment) **/
get nextLevel() {
return this.mediaController.nextLevel;
return this.streamController.nextLevel;
}
/* set quality level for next fragment (-1 for automatic level selection) */
set nextLevel(newLevel) {
logger.log(`set nextLevel:${newLevel}`);
this.levelController.manualLevel = newLevel;
this.mediaController.nextLevelSwitch();
this.streamController.nextLevelSwitch();
}
/** Return the quality level of current/last loaded fragment **/

View file

@ -0,0 +1,4 @@
// This is mostly for support of the es6 module export
// syntax with the babel compiler, it looks like it doesnt support
// function exports like we are used to in node/commonjs
module.exports = require('./hls.js').default;

View file

@ -8,6 +8,10 @@ class DummyRemuxer {
this.observer = observer;
}
get passthrough() {
return false;
}
get timescale() {
return this.PES_TIMESCALE;
}

View file

@ -17,6 +17,10 @@ class MP4Remuxer {
this.MP4_TIMESCALE = this.PES_TIMESCALE / this.PES2MP4SCALEFACTOR;
}
get passthrough() {
return false;
}
get timescale() {
return this.MP4_TIMESCALE;
}
@ -61,62 +65,55 @@ class MP4Remuxer {
var observer = this.observer,
audioSamples = audioTrack.samples,
videoSamples = videoTrack.samples,
nbAudio = audioSamples.length,
nbVideo = videoSamples.length,
pesTimeScale = this.PES_TIMESCALE;
pesTimeScale = this.PES_TIMESCALE,
tracks = {},
data = { tracks : tracks, unique : false },
computePTSDTS = (this._initPTS === undefined),
initPTS, initDTS;
if(nbAudio === 0 && nbVideo === 0) {
if (computePTSDTS) {
initPTS = initDTS = Infinity;
}
if (audioTrack.config && audioSamples.length) {
tracks.audio = {
container : 'audio/mp4',
codec : audioTrack.codec,
initSegment : MP4.initSegment([audioTrack]),
metadata : {
channelCount : audioTrack.channelCount
}
};
if (computePTSDTS) {
// remember first PTS of this demuxing context. for audio, PTS + DTS ...
initPTS = initDTS = audioSamples[0].pts - pesTimeScale * timeOffset;
}
}
if (videoTrack.sps && videoTrack.pps && videoSamples.length) {
tracks.video = {
container : 'video/mp4',
codec : videoTrack.codec,
initSegment : MP4.initSegment([videoTrack]),
metadata : {
width : videoTrack.width,
height : videoTrack.height
}
};
if (computePTSDTS) {
initPTS = Math.min(initPTS,videoSamples[0].pts - pesTimeScale * timeOffset);
initDTS = Math.min(initDTS,videoSamples[0].dts - pesTimeScale * timeOffset);
}
}
if(!Object.keys(tracks)) {
observer.trigger(Event.ERROR, {type : ErrorTypes.MEDIA_ERROR, details: ErrorDetails.FRAG_PARSING_ERROR, fatal: false, reason: 'no audio/video samples found'});
} else if (nbVideo === 0) {
//audio only
if (audioTrack.config) {
observer.trigger(Event.FRAG_PARSING_INIT_SEGMENT, {
audioMoov: MP4.initSegment([audioTrack]),
audioCodec : audioTrack.codec,
audioChannelCount : audioTrack.channelCount
});
this.ISGenerated = true;
}
if (this._initPTS === undefined) {
// remember first PTS of this demuxing context
this._initPTS = audioSamples[0].pts - pesTimeScale * timeOffset;
this._initDTS = audioSamples[0].dts - pesTimeScale * timeOffset;
}
} else
if (nbAudio === 0) {
//video only
if (videoTrack.sps && videoTrack.pps) {
observer.trigger(Event.FRAG_PARSING_INIT_SEGMENT, {
videoMoov: MP4.initSegment([videoTrack]),
videoCodec: videoTrack.codec,
videoWidth: videoTrack.width,
videoHeight: videoTrack.height
});
this.ISGenerated = true;
if (this._initPTS === undefined) {
// remember first PTS of this demuxing context
this._initPTS = videoSamples[0].pts - pesTimeScale * timeOffset;
this._initDTS = videoSamples[0].dts - pesTimeScale * timeOffset;
}
}
} else {
//audio and video
if (audioTrack.config && videoTrack.sps && videoTrack.pps) {
observer.trigger(Event.FRAG_PARSING_INIT_SEGMENT, {
audioMoov: MP4.initSegment([audioTrack]),
audioCodec: audioTrack.codec,
audioChannelCount: audioTrack.channelCount,
videoMoov: MP4.initSegment([videoTrack]),
videoCodec: videoTrack.codec,
videoWidth: videoTrack.width,
videoHeight: videoTrack.height
});
this.ISGenerated = true;
if (this._initPTS === undefined) {
// remember first PTS of this demuxing context
this._initPTS = Math.min(videoSamples[0].pts, audioSamples[0].pts) - pesTimeScale * timeOffset;
this._initDTS = Math.min(videoSamples[0].dts, audioSamples[0].dts) - pesTimeScale * timeOffset;
}
observer.trigger(Event.FRAG_PARSING_INIT_SEGMENT,data);
this.ISGenerated = true;
if (computePTSDTS) {
this._initPTS = initPTS;
this._initDTS = initDTS;
}
}
}
@ -238,8 +235,8 @@ class MP4Remuxer {
moof = MP4.moof(track.sequenceNumber++, firstDTS / pes2mp4ScaleFactor, track);
track.samples = [];
this.observer.trigger(Event.FRAG_PARSING_DATA, {
moof: moof,
mdat: mdat,
data1: moof,
data2: mdat,
startPTS: firstPTS / pesTimeScale,
endPTS: (ptsnorm + pes2mp4ScaleFactor * lastSampleDuration) / pesTimeScale,
startDTS: firstDTS / pesTimeScale,
@ -358,8 +355,8 @@ class MP4Remuxer {
moof = MP4.moof(track.sequenceNumber++, firstDTS / pes2mp4ScaleFactor, track);
track.samples = [];
this.observer.trigger(Event.FRAG_PARSING_DATA, {
moof: moof,
mdat: mdat,
data1: moof,
data2: mdat,
startPTS: firstPTS / pesTimeScale,
endPTS: this.nextAacPts / pesTimeScale,
startDTS: firstDTS / pesTimeScale,

View file

@ -0,0 +1,75 @@
/**
* passthrough remuxer
*/
import Event from '../events';
class PassThroughRemuxer {
constructor(observer) {
this.observer = observer;
this.ISGenerated = false;
}
get passthrough() {
return true;
}
get timescale() {
return 0;
}
destroy() {
}
insertDiscontinuity() {
}
switchLevel() {
this.ISGenerated = false;
}
remux(audioTrack,videoTrack,id3Track,textTrack,timeOffset,rawData) {
var observer = this.observer;
// generate Init Segment if needed
if (!this.ISGenerated) {
var tracks = {},
data = { tracks : tracks, unique : true },
track = videoTrack,
codec = track.codec;
if (codec) {
data.tracks.video = {
container : track.container,
codec : codec,
metadata : {
width : track.width,
height : track.height
}
};
}
track = audioTrack;
codec = track.codec;
if (codec) {
data.tracks.audio = {
container : track.container,
codec : codec,
metadata : {
channelCount : track.channelCount
}
};
}
this.ISGenerated = true;
observer.trigger(Event.FRAG_PARSING_INIT_SEGMENT,data);
}
observer.trigger(Event.FRAG_PARSING_DATA, {
data1: rawData,
startPTS: timeOffset,
startDTS: timeOffset,
type: 'audiovideo',
nb: 1
});
}
}
export default PassThroughRemuxer;

View file

@ -25,27 +25,28 @@ class CEA708Interpreter {
{
var VTTCue = window.VTTCue || window.TextTrackCue;
this.cue = new VTTCue(-1, -1, '');
this.cue.text = '';
this.cue.pauseOnExit = false;
var cue = this.cue = new VTTCue(-1, -1, '');
cue.text = '';
cue.pauseOnExit = false;
// make sure it doesn't show up before it's ready
this.startTime = Number.MAX_VALUE;
cue.startTime = Number.MAX_VALUE;
// show it 'forever' once we do show it
// (we'll set the end time once we know it later)
this.cue.endTime = Number.MAX_VALUE;
cue.endTime = Number.MAX_VALUE;
this.memory.push(this.cue);
this.memory.push(cue);
}
clear()
{
if (this._textTrack && this._textTrack.cues)
var textTrack = this._textTrack;
if (textTrack && textTrack.cues)
{
while (this._textTrack.cues.length > 0)
while (textTrack.cues.length > 0)
{
this._textTrack.removeCue(this._textTrack.cues[0]);
textTrack.removeCue(textTrack.cues[0]);
}
}
}
@ -59,15 +60,15 @@ class CEA708Interpreter {
var count = bytes[0] & 31;
var position = 2;
var byte, ccbyte1, ccbyte2, ccValid, ccType;
var tmpByte, ccbyte1, ccbyte2, ccValid, ccType;
for (var j=0; j<count; j++)
{
byte = bytes[position++];
tmpByte = bytes[position++];
ccbyte1 = 0x7F & bytes[position++];
ccbyte2 = 0x7F & bytes[position++];
ccValid = ((4 & byte) === 0 ? false : true);
ccType = (3 & byte);
ccValid = ((4 & tmpByte) === 0 ? false : true);
ccType = (3 & tmpByte);
if (ccbyte1 === 0 && ccbyte2 === 0)
{
@ -287,9 +288,9 @@ class CEA708Interpreter {
}
}
_fromCharCode(byte)
_fromCharCode(tmpByte)
{
switch (byte)
switch (tmpByte)
{
case 42:
return 'á';
@ -325,7 +326,7 @@ class CEA708Interpreter {
return '█';
default:
return String.fromCharCode(byte);
return String.fromCharCode(tmpByte);
}
}
@ -343,11 +344,11 @@ class CEA708Interpreter {
this._has708 = true;
}
for (var i=0; i<this.memory.length; i++)
for(let memoryItem of this.memory)
{
this.memory[i].startTime = timestamp;
this._textTrack.addCue(this.memory[i]);
this.display.push(this.memory[i]);
memoryItem.startTime = timestamp;
this._textTrack.addCue(memoryItem);
this.display.push(memoryItem);
}
this.memory = [];
@ -356,9 +357,9 @@ class CEA708Interpreter {
_clearActiveCues(timestamp)
{
for (var i=0; i<this.display.length; i++)
for (let displayItem of this.display)
{
this.display[i].endTime = timestamp;
displayItem.endTime = timestamp;
}
this.display = [];