1
0
Fork 0
mirror of https://github.com/jellyfin/jellyfin-web synced 2025-03-30 19:56:21 +00:00

update web components

This commit is contained in:
Luke Pulverenti 2015-12-23 12:46:01 -05:00
parent 909402537a
commit fcdd2e4f4b
114 changed files with 1611 additions and 1238 deletions

View file

@ -32,7 +32,7 @@ class LevelController {
}
onManifestLoaded(event, data) {
var levels0 = [], levels = [], bitrateStart, i, bitrateSet = {}, videoCodecFound = false, audioCodecFound = false;
var levels0 = [], levels = [], bitrateStart, i, bitrateSet = {}, videoCodecFound = false, audioCodecFound = false, hls = this.hls;
// regroup redundant level together
data.levels.forEach(level => {
@ -65,31 +65,34 @@ class LevelController {
}
// only keep level with supported audio/video codecs
levels0 = levels0.filter(function(level) {
levels = levels.filter(function(level) {
var checkSupported = function(codec) { return MediaSource.isTypeSupported(`video/mp4;codecs=${codec}`);};
var audioCodec = level.audioCodec, videoCodec = level.videoCodec;
return ((audioCodec && checkSupported(audioCodec)) || !audioCodec) &&
((videoCodec && checkSupported(videoCodec)) || !videoCodec);
return (!audioCodec || checkSupported(audioCodec)) &&
(!videoCodec || checkSupported(videoCodec));
});
// start bitrate is the first bitrate of the manifest
bitrateStart = levels[0].bitrate;
// sort level on bitrate
levels.sort(function (a, b) {
return a.bitrate - b.bitrate;
});
this._levels = levels;
// find index of first level in sorted levels
for (i = 0; i < levels.length; i++) {
if (levels[i].bitrate === bitrateStart) {
this._firstLevel = i;
logger.log(`manifest loaded,${levels.length} level(s) found, first bitrate:${bitrateStart}`);
break;
if(levels.length) {
// start bitrate is the first bitrate of the manifest
bitrateStart = levels[0].bitrate;
// sort level on bitrate
levels.sort(function (a, b) {
return a.bitrate - b.bitrate;
});
this._levels = levels;
// find index of first level in sorted levels
for (i = 0; i < levels.length; i++) {
if (levels[i].bitrate === bitrateStart) {
this._firstLevel = i;
logger.log(`manifest loaded,${levels.length} level(s) found, first bitrate:${bitrateStart}`);
break;
}
}
hls.trigger(Event.MANIFEST_PARSED, {levels: this._levels, firstLevel: this._firstLevel, stats: data.stats});
} else {
hls.trigger(Event.ERROR, {type: ErrorTypes.NETWORK_ERROR, details: ErrorDetails.MANIFEST_PARSING_ERROR, fatal: true, url: hls.url, reason: 'no compatible level found in manifest'});
}
this.hls.trigger(Event.MANIFEST_PARSED, {levels: this._levels, firstLevel: this._firstLevel, stats: data.stats});
return;
}
@ -187,7 +190,8 @@ class LevelController {
}
/* try to switch to a redundant stream if any available.
* if no redundant stream available, emergency switch down (if in auto mode and current level not 0)
* otherwise, we cannot recover this network error ....
* otherwise, we cannot recover this network error ...
* don't raise FRAG_LOAD_ERROR and FRAG_LOAD_TIMEOUT as fatal, as it is handled by mediaController
*/
if (levelId !== undefined) {
level = this._levels[levelId];
@ -203,7 +207,8 @@ class LevelController {
hls.abrController.nextAutoLevel = 0;
} else if(level && level.details && level.details.live) {
logger.warn(`level controller,${details} on live stream, discard`);
} else {
// FRAG_LOAD_ERROR and FRAG_LOAD_TIMEOUT are handled by mediaController
} else if (details !== ErrorDetails.FRAG_LOAD_ERROR && details !== ErrorDetails.FRAG_LOAD_TIMEOUT) {
logger.error(`cannot recover ${details} error`);
this._level = undefined;
// stopping live reloading timer if any

View file

@ -391,26 +391,30 @@ class MSEMediaController {
this.appendError = 0;
} catch(err) {
// in case any error occured while appending, put back segment in mp4segments table
//logger.error(`error while trying to append buffer:${err.message},try appending later`);
logger.error(`error while trying to append buffer:${err.message},try appending later`);
this.mp4segments.unshift(segment);
if (this.appendError) {
this.appendError++;
} else {
this.appendError = 1;
}
var event = {type: ErrorTypes.MEDIA_ERROR, details: ErrorDetails.BUFFER_APPEND_ERROR, frag: this.fragCurrent};
/* with UHD content, we could get loop of quota exceeded error until
browser is able to evict some data from sourcebuffer. retrying help recovering this
*/
if (this.appendError > this.config.appendErrorMaxRetry) {
logger.log(`fail ${this.config.appendErrorMaxRetry} times to append segment in sourceBuffer`);
event.fatal = true;
hls.trigger(Event.ERROR, event);
this.state = State.ERROR;
return;
} else {
event.fatal = false;
hls.trigger(Event.ERROR, event);
// just discard QuotaExceededError for now, and wait for the natural browser buffer eviction
//http://www.w3.org/TR/html5/infrastructure.html#quotaexceedederror
if(err.code !== 22) {
if (this.appendError) {
this.appendError++;
} else {
this.appendError = 1;
}
var event = {type: ErrorTypes.MEDIA_ERROR, details: ErrorDetails.BUFFER_APPEND_ERROR, frag: this.fragCurrent};
/* with UHD content, we could get loop of quota exceeded error until
browser is able to evict some data from sourcebuffer. retrying help recovering this
*/
if (this.appendError > this.config.appendErrorMaxRetry) {
logger.log(`fail ${this.config.appendErrorMaxRetry} times to append segment in sourceBuffer`);
event.fatal = true;
hls.trigger(Event.ERROR, event);
this.state = State.ERROR;
return;
} else {
event.fatal = false;
hls.trigger(Event.ERROR, event);
}
}
}
this.state = State.APPENDING;
@ -1000,6 +1004,7 @@ class MSEMediaController {
this.demuxer.push(data.payload, audioCodec, currentLevel.videoCodec, start, fragCurrent.cc, level, sn, duration, fragCurrent.decryptdata);
}
}
this.fragLoadError = 0;
}
onInitSegment(event, data) {
@ -1095,6 +1100,24 @@ class MSEMediaController {
// abort fragment loading on errors
case ErrorDetails.FRAG_LOAD_ERROR:
case ErrorDetails.FRAG_LOAD_TIMEOUT:
var loadError = this.fragLoadError;
if(loadError) {
loadError++;
} else {
loadError=1;
}
if (loadError <= this.config.fragLoadingMaxRetry) {
this.fragLoadError = loadError;
// retry loading
this.state = State.IDLE;
} else {
logger.error(`mediaController: ${data.details} reaches max retry, redispatch as fatal ...`);
// redispatch same error but with fatal set to true
data.fatal = true;
this.hls.trigger(event, data);
this.state = State.ERROR;
}
break;
case ErrorDetails.FRAG_LOOP_LOADING_ERROR:
case ErrorDetails.LEVEL_LOAD_ERROR:
case ErrorDetails.LEVEL_LOAD_TIMEOUT:

View file

@ -79,7 +79,7 @@ import {ErrorTypes, ErrorDetails} from '../errors';
break;
}
}
this.remuxer.remux(this._aacTrack,{samples : []}, {samples : []}, timeOffset);
this.remuxer.remux(this._aacTrack,{samples : []}, {samples : [ { pts: pts, dts : pts, unit : id3.payload} ]}, timeOffset);
}
_ADTStoAudioConfig(data, offset, audioCodec) {

View file

@ -41,6 +41,7 @@ import {logger} from '../utils/logger';
logger.warn('ID3 tag found, but no timestamp');
}
this._length = len;
this._payload = data.subarray(0,len);
}
return;
}
@ -117,6 +118,10 @@ import {logger} from '../utils/logger';
return this._length;
}
get payload() {
return this._payload;
}
}
export default ID3;

View file

@ -46,7 +46,7 @@ class Hls {
enableWorker: true,
enableSoftwareAES: true,
fragLoadingTimeOut: 20000,
fragLoadingMaxRetry: 1,
fragLoadingMaxRetry: 6,
fragLoadingRetryDelay: 1000,
fragLoadingLoopThreshold: 3,
manifestLoadingTimeOut: 10000,
@ -54,7 +54,7 @@ class Hls {
manifestLoadingRetryDelay: 1000,
// fpsDroppedMonitoringPeriod: 5000,
// fpsDroppedMonitoringThreshold: 0.2,
appendErrorMaxRetry: 200,
appendErrorMaxRetry: 3,
loader: XhrLoader,
fLoader: undefined,
pLoader: undefined,

View file

@ -27,9 +27,9 @@ class FragmentLoader {
this.frag.loaded = 0;
var config = this.hls.config;
frag.loader = this.loader = typeof(config.fLoader) !== 'undefined' ? new config.fLoader(config) : new config.loader(config);
this.loader.load(frag.url, 'arraybuffer', this.loadsuccess.bind(this), this.loaderror.bind(this), this.loadtimeout.bind(this), config.fragLoadingTimeOut, config.fragLoadingMaxRetry, config.fragLoadingRetryDelay, this.loadprogress.bind(this), frag);
this.loader.load(frag.url, 'arraybuffer', this.loadsuccess.bind(this), this.loaderror.bind(this), this.loadtimeout.bind(this), config.fragLoadingTimeOut, 1, config.fragLoadingRetryDelay, this.loadprogress.bind(this), frag);
}
loadsuccess(event, stats) {
var payload = event.currentTarget.response;
stats.length = payload.byteLength;
@ -41,7 +41,7 @@ class FragmentLoader {
loaderror(event) {
this.loader.abort();
this.hls.trigger(Event.ERROR, {type: ErrorTypes.NETWORK_ERROR, details: ErrorDetails.FRAG_LOAD_ERROR, fatal: false, frag: this.frag, response: event});
}
}
loadtimeout() {
this.loader.abort();

View file

@ -94,7 +94,7 @@ class PlaylistLoader {
if (avcdata.length > 2) {
result = avcdata.shift() + '.';
result += parseInt(avcdata.shift()).toString(16);
result += ('00' + parseInt(avcdata.shift()).toString(16)).substr(-4);
result += ('000' + parseInt(avcdata.shift()).toString(16)).substr(-4);
} else {
result = codec;
}

View file

@ -152,6 +152,7 @@ class MP4 {
payload = Array.prototype.slice.call(arguments, 1),
size = 0,
i = payload.length,
len = i,
result,
view;
// calculate the total size we need to allocate
@ -163,7 +164,7 @@ class MP4 {
view.setUint32(0, result.byteLength);
result.set(type, 4);
// copy the payload into the result
for (i = 0, size = 8; i < payload.length; i++) {
for (i = 0, size = 8; i < len; i++) {
result.set(payload[i], size);
size += payload[i].byteLength;
}
@ -341,7 +342,9 @@ class MP4 {
0xE0 | track.sps.length // 3bit reserved (111) + numOfSequenceParameterSets
].concat(sps).concat([
track.pps.length // numOfPictureParameterSets
]).concat(pps))); // "PPS"
]).concat(pps))), // "PPS"
width = track.width,
height = track.height;
//console.log('avcc:' + Hex.hexDump(avcc));
return MP4.box(MP4.types.avc1, new Uint8Array([
0x00, 0x00, 0x00, // reserved
@ -352,10 +355,10 @@ class MP4 {
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, // pre_defined
(track.width >> 8) & 0xFF,
track.width & 0xff, // width
(track.height >> 8) & 0xFF,
track.height & 0xff, // height
(width >> 8) & 0xFF,
width & 0xff, // width
(height >> 8) & 0xFF,
height & 0xff, // height
0x00, 0x48, 0x00, 0x00, // horizresolution
0x00, 0x48, 0x00, 0x00, // vertresolution
0x00, 0x00, 0x00, 0x00, // reserved
@ -380,17 +383,18 @@ class MP4 {
}
static esds(track) {
var configlen = track.config.length;
return new Uint8Array([
0x00, // version 0
0x00, 0x00, 0x00, // flags
0x03, // descriptor_type
0x17+track.config.length, // length
0x17+configlen, // length
0x00, 0x01, //es_id
0x00, // stream_priority
0x04, // descriptor_type
0x0f+track.config.length, // length
0x0f+configlen, // length
0x40, //codec : mpeg4_audio
0x15, // stream_type
0x00, 0x00, 0x00, // buffer_size
@ -398,23 +402,24 @@ class MP4 {
0x00, 0x00, 0x00, 0x00, // avgBitrate
0x05 // descriptor_type
].concat([track.config.length]).concat(track.config).concat([0x06, 0x01, 0x02])); // GASpecificConfig)); // length + audio config descriptor
].concat([configlen]).concat(track.config).concat([0x06, 0x01, 0x02])); // GASpecificConfig)); // length + audio config descriptor
}
static mp4a(track) {
return MP4.box(MP4.types.mp4a, new Uint8Array([
0x00, 0x00, 0x00, // reserved
0x00, 0x00, 0x00, // reserved
0x00, 0x01, // data_reference_index
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, // reserved
0x00, track.channelCount, // channelcount
0x00, 0x10, // sampleSize:16bits
0x00, 0x00, 0x00, 0x00, // reserved2
(track.audiosamplerate >> 8) & 0xFF,
track.audiosamplerate & 0xff, //
0x00, 0x00]),
MP4.box(MP4.types.esds, MP4.esds(track)));
var audiosamplerate = track.audiosamplerate;
return MP4.box(MP4.types.mp4a, new Uint8Array([
0x00, 0x00, 0x00, // reserved
0x00, 0x00, 0x00, // reserved
0x00, 0x01, // data_reference_index
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, // reserved
0x00, track.channelCount, // channelcount
0x00, 0x10, // sampleSize:16bits
0x00, 0x00, 0x00, 0x00, // reserved2
(audiosamplerate >> 8) & 0xFF,
audiosamplerate & 0xff, //
0x00, 0x00]),
MP4.box(MP4.types.esds, MP4.esds(track)));
}
static stsd(track) {
@ -426,20 +431,24 @@ class MP4 {
}
static tkhd(track) {
var id = track.id,
duration = track.duration,
width = track.width,
height = track.height;
return MP4.box(MP4.types.tkhd, new Uint8Array([
0x00, // version 0
0x00, 0x00, 0x07, // flags
0x00, 0x00, 0x00, 0x00, // creation_time
0x00, 0x00, 0x00, 0x00, // modification_time
(track.id >> 24) & 0xFF,
(track.id >> 16) & 0xFF,
(track.id >> 8) & 0xFF,
track.id & 0xFF, // track_ID
(id >> 24) & 0xFF,
(id >> 16) & 0xFF,
(id >> 8) & 0xFF,
id & 0xFF, // track_ID
0x00, 0x00, 0x00, 0x00, // reserved
(track.duration >> 24),
(track.duration >> 16) & 0xFF,
(track.duration >> 8) & 0xFF,
track.duration & 0xFF, // duration
(duration >> 24),
(duration >> 16) & 0xFF,
(duration >> 8) & 0xFF,
duration & 0xFF, // duration
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, // reserved
0x00, 0x00, // layer
@ -455,25 +464,26 @@ class MP4 {
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x40, 0x00, 0x00, 0x00, // transformation: unity matrix
(track.width >> 8) & 0xFF,
track.width & 0xFF,
(width >> 8) & 0xFF,
width & 0xFF,
0x00, 0x00, // width
(track.height >> 8) & 0xFF,
track.height & 0xFF,
(height >> 8) & 0xFF,
height & 0xFF,
0x00, 0x00 // height
]));
}
static traf(track,baseMediaDecodeTime) {
var sampleDependencyTable = MP4.sdtp(track);
var sampleDependencyTable = MP4.sdtp(track),
id = track.id;
return MP4.box(MP4.types.traf,
MP4.box(MP4.types.tfhd, new Uint8Array([
0x00, // version 0
0x00, 0x00, 0x00, // flags
(track.id >> 24),
(track.id >> 16) & 0XFF,
(track.id >> 8) & 0XFF,
(track.id & 0xFF) // track_ID
(id >> 24),
(id >> 16) & 0XFF,
(id >> 8) & 0XFF,
(id & 0xFF) // track_ID
])),
MP4.box(MP4.types.tfdt, new Uint8Array([
0x00, // version 0
@ -505,13 +515,14 @@ class MP4 {
}
static trex(track) {
var id = track.id;
return MP4.box(MP4.types.trex, new Uint8Array([
0x00, // version 0
0x00, 0x00, 0x00, // flags
(track.id >> 24),
(track.id >> 16) & 0XFF,
(track.id >> 8) & 0XFF,
(track.id & 0xFF), // track_ID
(id >> 24),
(id >> 16) & 0XFF,
(id >> 8) & 0XFF,
(id & 0xFF), // track_ID
0x00, 0x00, 0x00, 0x01, // default_sample_description_index
0x00, 0x00, 0x00, 0x00, // default_sample_duration
0x00, 0x00, 0x00, 0x00, // default_sample_size
@ -520,44 +531,50 @@ class MP4 {
}
static trun(track, offset) {
var samples, sample, i, array;
samples = track.samples || [];
array = new Uint8Array(12 + (16 * samples.length));
offset += 8 + array.byteLength;
var samples= track.samples || [],
len = samples.length,
arraylen = 12 + (16 * len),
array = new Uint8Array(arraylen),
i,sample,duration,size,flags,cts;
offset += 8 + arraylen;
array.set([
0x00, // version 0
0x00, 0x0f, 0x01, // flags
(samples.length >>> 24) & 0xFF,
(samples.length >>> 16) & 0xFF,
(samples.length >>> 8) & 0xFF,
samples.length & 0xFF, // sample_count
(len >>> 24) & 0xFF,
(len >>> 16) & 0xFF,
(len >>> 8) & 0xFF,
len & 0xFF, // sample_count
(offset >>> 24) & 0xFF,
(offset >>> 16) & 0xFF,
(offset >>> 8) & 0xFF,
offset & 0xFF // data_offset
],0);
for (i = 0; i < samples.length; i++) {
for (i = 0; i < len; i++) {
sample = samples[i];
duration = sample.duration;
size = sample.size;
flags = sample.flags;
cts = sample.cts;
array.set([
(sample.duration >>> 24) & 0xFF,
(sample.duration >>> 16) & 0xFF,
(sample.duration >>> 8) & 0xFF,
sample.duration & 0xFF, // sample_duration
(sample.size >>> 24) & 0xFF,
(sample.size >>> 16) & 0xFF,
(sample.size >>> 8) & 0xFF,
sample.size & 0xFF, // sample_size
(sample.flags.isLeading << 2) | sample.flags.dependsOn,
(sample.flags.isDependedOn << 6) |
(sample.flags.hasRedundancy << 4) |
(sample.flags.paddingValue << 1) |
sample.flags.isNonSync,
sample.flags.degradPrio & 0xF0 << 8,
sample.flags.degradPrio & 0x0F, // sample_flags
(sample.cts >>> 24) & 0xFF,
(sample.cts >>> 16) & 0xFF,
(sample.cts >>> 8) & 0xFF,
sample.cts & 0xFF // sample_composition_time_offset
(duration >>> 24) & 0xFF,
(duration >>> 16) & 0xFF,
(duration >>> 8) & 0xFF,
duration & 0xFF, // sample_duration
(size >>> 24) & 0xFF,
(size >>> 16) & 0xFF,
(size >>> 8) & 0xFF,
size & 0xFF, // sample_size
(flags.isLeading << 2) | sample.flags.dependsOn,
(flags.isDependedOn << 6) |
(flags.hasRedundancy << 4) |
(flags.paddingValue << 1) |
flags.isNonSync,
flags.degradPrio & 0xF0 << 8,
flags.degradPrio & 0x0F, // sample_flags
(cts >>> 24) & 0xFF,
(cts >>> 16) & 0xFF,
(cts >>> 8) & 0xFF,
cts & 0xFF // sample_composition_time_offset
],12+16*i);
}
return MP4.box(MP4.types.trun, array);