1
0
Fork 0
mirror of https://github.com/jellyfin/jellyfin-web synced 2025-03-30 19:56:21 +00:00

get api libs from bower

This commit is contained in:
Luke Pulverenti 2015-12-16 00:30:14 -05:00
parent def418714f
commit f36e664503
97 changed files with 16860 additions and 197 deletions

View file

@ -0,0 +1,65 @@
/**
* dummy remuxer
*/
class DummyRemuxer {
constructor(observer) {
this.PES_TIMESCALE = 90000;
this.observer = observer;
}
get timescale() {
return this.PES_TIMESCALE;
}
destroy() {
}
insertDiscontinuity() {
}
remux(audioTrack,videoTrack,id3Track,timeOffset) {
this._remuxAACSamples(audioTrack,timeOffset);
this._remuxAVCSamples(videoTrack,timeOffset);
this._remuxID3Samples(id3Track,timeOffset);
}
_remuxAVCSamples(track, timeOffset) {
var avcSample, unit;
// loop through track.samples
while (track.samples.length) {
avcSample = track.samples.shift();
// loop through AVC sample NALUs
while (avcSample.units.units.length) {
unit = avcSample.units.units.shift();
}
}
//please lint
timeOffset = timeOffset;
}
_remuxAACSamples(track,timeOffset) {
var aacSample,unit;
// loop through track.samples
while (track.samples.length) {
aacSample = track.samples.shift();
unit = aacSample.unit;
}
//please lint
timeOffset = timeOffset;
}
_remuxID3Samples(track,timeOffset) {
var id3Sample,unit;
// loop through track.samples
while (track.samples.length) {
id3Sample = track.samples.shift();
unit = id3Sample.unit;
}
//please lint
timeOffset = timeOffset;
}
}
export default DummyRemuxer;

View file

@ -0,0 +1,578 @@
/**
* Generate MP4 Box
*/
//import Hex from '../utils/hex';
class MP4 {
static init() {
MP4.types = {
avc1: [], // codingname
avcC: [],
btrt: [],
dinf: [],
dref: [],
esds: [],
ftyp: [],
hdlr: [],
mdat: [],
mdhd: [],
mdia: [],
mfhd: [],
minf: [],
moof: [],
moov: [],
mp4a: [],
mvex: [],
mvhd: [],
sdtp: [],
stbl: [],
stco: [],
stsc: [],
stsd: [],
stsz: [],
stts: [],
tfdt: [],
tfhd: [],
traf: [],
trak: [],
trun: [],
trex: [],
tkhd: [],
vmhd: [],
smhd: []
};
var i;
for (i in MP4.types) {
if (MP4.types.hasOwnProperty(i)) {
MP4.types[i] = [
i.charCodeAt(0),
i.charCodeAt(1),
i.charCodeAt(2),
i.charCodeAt(3)
];
}
}
MP4.MAJOR_BRAND = new Uint8Array([
'i'.charCodeAt(0),
's'.charCodeAt(0),
'o'.charCodeAt(0),
'm'.charCodeAt(0)
]);
MP4.AVC1_BRAND = new Uint8Array([
'a'.charCodeAt(0),
'v'.charCodeAt(0),
'c'.charCodeAt(0),
'1'.charCodeAt(0)
]);
MP4.MINOR_VERSION = new Uint8Array([0, 0, 0, 1]);
MP4.VIDEO_HDLR = new Uint8Array([
0x00, // version 0
0x00, 0x00, 0x00, // flags
0x00, 0x00, 0x00, 0x00, // pre_defined
0x76, 0x69, 0x64, 0x65, // handler_type: 'vide'
0x00, 0x00, 0x00, 0x00, // reserved
0x00, 0x00, 0x00, 0x00, // reserved
0x00, 0x00, 0x00, 0x00, // reserved
0x56, 0x69, 0x64, 0x65,
0x6f, 0x48, 0x61, 0x6e,
0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'VideoHandler'
]);
MP4.AUDIO_HDLR = new Uint8Array([
0x00, // version 0
0x00, 0x00, 0x00, // flags
0x00, 0x00, 0x00, 0x00, // pre_defined
0x73, 0x6f, 0x75, 0x6e, // handler_type: 'soun'
0x00, 0x00, 0x00, 0x00, // reserved
0x00, 0x00, 0x00, 0x00, // reserved
0x00, 0x00, 0x00, 0x00, // reserved
0x53, 0x6f, 0x75, 0x6e,
0x64, 0x48, 0x61, 0x6e,
0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'SoundHandler'
]);
MP4.HDLR_TYPES = {
'video': MP4.VIDEO_HDLR,
'audio': MP4.AUDIO_HDLR
};
MP4.DREF = new Uint8Array([
0x00, // version 0
0x00, 0x00, 0x00, // flags
0x00, 0x00, 0x00, 0x01, // entry_count
0x00, 0x00, 0x00, 0x0c, // entry_size
0x75, 0x72, 0x6c, 0x20, // 'url' type
0x00, // version 0
0x00, 0x00, 0x01 // entry_flags
]);
MP4.STCO = new Uint8Array([
0x00, // version
0x00, 0x00, 0x00, // flags
0x00, 0x00, 0x00, 0x00 // entry_count
]);
MP4.STSC = MP4.STCO;
MP4.STTS = MP4.STCO;
MP4.STSZ = new Uint8Array([
0x00, // version
0x00, 0x00, 0x00, // flags
0x00, 0x00, 0x00, 0x00, // sample_size
0x00, 0x00, 0x00, 0x00, // sample_count
]);
MP4.VMHD = new Uint8Array([
0x00, // version
0x00, 0x00, 0x01, // flags
0x00, 0x00, // graphicsmode
0x00, 0x00,
0x00, 0x00,
0x00, 0x00 // opcolor
]);
MP4.SMHD = new Uint8Array([
0x00, // version
0x00, 0x00, 0x00, // flags
0x00, 0x00, // balance
0x00, 0x00 // reserved
]);
MP4.STSD = new Uint8Array([
0x00, // version 0
0x00, 0x00, 0x00, // flags
0x00, 0x00, 0x00, 0x01]);// entry_count
MP4.FTYP = MP4.box(MP4.types.ftyp, MP4.MAJOR_BRAND, MP4.MINOR_VERSION, MP4.MAJOR_BRAND, MP4.AVC1_BRAND);
MP4.DINF = MP4.box(MP4.types.dinf, MP4.box(MP4.types.dref, MP4.DREF));
}
static box(type) {
var
payload = Array.prototype.slice.call(arguments, 1),
size = 0,
i = payload.length,
result,
view;
// calculate the total size we need to allocate
while (i--) {
size += payload[i].byteLength;
}
result = new Uint8Array(size + 8);
view = new DataView(result.buffer);
view.setUint32(0, result.byteLength);
result.set(type, 4);
// copy the payload into the result
for (i = 0, size = 8; i < payload.length; i++) {
result.set(payload[i], size);
size += payload[i].byteLength;
}
return result;
}
static hdlr(type) {
return MP4.box(MP4.types.hdlr, MP4.HDLR_TYPES[type]);
}
static mdat(data) {
return MP4.box(MP4.types.mdat, data);
}
static mdhd(timescale, duration) {
return MP4.box(MP4.types.mdhd, new Uint8Array([
0x00, // version 0
0x00, 0x00, 0x00, // flags
0x00, 0x00, 0x00, 0x02, // creation_time
0x00, 0x00, 0x00, 0x03, // modification_time
(timescale >> 24) & 0xFF,
(timescale >> 16) & 0xFF,
(timescale >> 8) & 0xFF,
timescale & 0xFF, // timescale
(duration >> 24),
(duration >> 16) & 0xFF,
(duration >> 8) & 0xFF,
duration & 0xFF, // duration
0x55, 0xc4, // 'und' language (undetermined)
0x00, 0x00
]));
}
static mdia(track) {
return MP4.box(MP4.types.mdia, MP4.mdhd(track.timescale, track.duration), MP4.hdlr(track.type), MP4.minf(track));
}
static mfhd(sequenceNumber) {
return MP4.box(MP4.types.mfhd, new Uint8Array([
0x00,
0x00, 0x00, 0x00, // flags
(sequenceNumber >> 24),
(sequenceNumber >> 16) & 0xFF,
(sequenceNumber >> 8) & 0xFF,
sequenceNumber & 0xFF, // sequence_number
]));
}
static minf(track) {
if (track.type === 'audio') {
return MP4.box(MP4.types.minf, MP4.box(MP4.types.smhd, MP4.SMHD), MP4.DINF, MP4.stbl(track));
} else {
return MP4.box(MP4.types.minf, MP4.box(MP4.types.vmhd, MP4.VMHD), MP4.DINF, MP4.stbl(track));
}
}
static moof(sn, baseMediaDecodeTime, track) {
return MP4.box(MP4.types.moof, MP4.mfhd(sn), MP4.traf(track,baseMediaDecodeTime));
}
/**
* @param tracks... (optional) {array} the tracks associated with this movie
*/
static moov(tracks) {
var
i = tracks.length,
boxes = [];
while (i--) {
boxes[i] = MP4.trak(tracks[i]);
}
return MP4.box.apply(null, [MP4.types.moov, MP4.mvhd(tracks[0].timescale, tracks[0].duration)].concat(boxes).concat(MP4.mvex(tracks)));
}
static mvex(tracks) {
var
i = tracks.length,
boxes = [];
while (i--) {
boxes[i] = MP4.trex(tracks[i]);
}
return MP4.box.apply(null, [MP4.types.mvex].concat(boxes));
}
static mvhd(timescale,duration) {
var
bytes = new Uint8Array([
0x00, // version 0
0x00, 0x00, 0x00, // flags
0x00, 0x00, 0x00, 0x01, // creation_time
0x00, 0x00, 0x00, 0x02, // modification_time
(timescale >> 24) & 0xFF,
(timescale >> 16) & 0xFF,
(timescale >> 8) & 0xFF,
timescale & 0xFF, // timescale
(duration >> 24) & 0xFF,
(duration >> 16) & 0xFF,
(duration >> 8) & 0xFF,
duration & 0xFF, // duration
0x00, 0x01, 0x00, 0x00, // 1.0 rate
0x01, 0x00, // 1.0 volume
0x00, 0x00, // reserved
0x00, 0x00, 0x00, 0x00, // reserved
0x00, 0x00, 0x00, 0x00, // reserved
0x00, 0x01, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x01, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x40, 0x00, 0x00, 0x00, // transformation: unity matrix
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, // pre_defined
0xff, 0xff, 0xff, 0xff // next_track_ID
]);
return MP4.box(MP4.types.mvhd, bytes);
}
static sdtp(track) {
var
samples = track.samples || [],
bytes = new Uint8Array(4 + samples.length),
flags,
i;
// leave the full box header (4 bytes) all zero
// write the sample table
for (i = 0; i < samples.length; i++) {
flags = samples[i].flags;
bytes[i + 4] = (flags.dependsOn << 4) |
(flags.isDependedOn << 2) |
(flags.hasRedundancy);
}
return MP4.box(MP4.types.sdtp, bytes);
}
static stbl(track) {
return MP4.box(MP4.types.stbl, MP4.stsd(track), MP4.box(MP4.types.stts, MP4.STTS), MP4.box(MP4.types.stsc, MP4.STSC), MP4.box(MP4.types.stsz, MP4.STSZ), MP4.box(MP4.types.stco, MP4.STCO));
}
static avc1(track) {
var sps = [], pps = [], i, data, len;
// assemble the SPSs
for (i = 0; i < track.sps.length; i++) {
data = track.sps[i];
len = data.byteLength;
sps.push((len >>> 8) & 0xFF);
sps.push((len & 0xFF));
sps = sps.concat(Array.prototype.slice.call(data)); // SPS
}
// assemble the PPSs
for (i = 0; i < track.pps.length; i++) {
data = track.pps[i];
len = data.byteLength;
pps.push((len >>> 8) & 0xFF);
pps.push((len & 0xFF));
pps = pps.concat(Array.prototype.slice.call(data));
}
var avcc = MP4.box(MP4.types.avcC, new Uint8Array([
0x01, // version
sps[3], // profile
sps[4], // profile compat
sps[5], // level
0xfc | 3, // lengthSizeMinusOne, hard-coded to 4 bytes
0xE0 | track.sps.length // 3bit reserved (111) + numOfSequenceParameterSets
].concat(sps).concat([
track.pps.length // numOfPictureParameterSets
]).concat(pps))); // "PPS"
//console.log('avcc:' + Hex.hexDump(avcc));
return MP4.box(MP4.types.avc1, new Uint8Array([
0x00, 0x00, 0x00, // reserved
0x00, 0x00, 0x00, // reserved
0x00, 0x01, // data_reference_index
0x00, 0x00, // pre_defined
0x00, 0x00, // reserved
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, // pre_defined
(track.width >> 8) & 0xFF,
track.width & 0xff, // width
(track.height >> 8) & 0xFF,
track.height & 0xff, // height
0x00, 0x48, 0x00, 0x00, // horizresolution
0x00, 0x48, 0x00, 0x00, // vertresolution
0x00, 0x00, 0x00, 0x00, // reserved
0x00, 0x01, // frame_count
0x13,
0x76, 0x69, 0x64, 0x65,
0x6f, 0x6a, 0x73, 0x2d,
0x63, 0x6f, 0x6e, 0x74,
0x72, 0x69, 0x62, 0x2d,
0x68, 0x6c, 0x73, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, // compressorname
0x00, 0x18, // depth = 24
0x11, 0x11]), // pre_defined = -1
avcc,
MP4.box(MP4.types.btrt, new Uint8Array([
0x00, 0x1c, 0x9c, 0x80, // bufferSizeDB
0x00, 0x2d, 0xc6, 0xc0, // maxBitrate
0x00, 0x2d, 0xc6, 0xc0])) // avgBitrate
);
}
static esds(track) {
return new Uint8Array([
0x00, // version 0
0x00, 0x00, 0x00, // flags
0x03, // descriptor_type
0x17+track.config.length, // length
0x00, 0x01, //es_id
0x00, // stream_priority
0x04, // descriptor_type
0x0f+track.config.length, // length
0x40, //codec : mpeg4_audio
0x15, // stream_type
0x00, 0x00, 0x00, // buffer_size
0x00, 0x00, 0x00, 0x00, // maxBitrate
0x00, 0x00, 0x00, 0x00, // avgBitrate
0x05 // descriptor_type
].concat([track.config.length]).concat(track.config).concat([0x06, 0x01, 0x02])); // GASpecificConfig)); // length + audio config descriptor
}
static mp4a(track) {
return MP4.box(MP4.types.mp4a, new Uint8Array([
0x00, 0x00, 0x00, // reserved
0x00, 0x00, 0x00, // reserved
0x00, 0x01, // data_reference_index
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, // reserved
0x00, track.channelCount, // channelcount
0x00, 0x10, // sampleSize:16bits
0x00, 0x00, 0x00, 0x00, // reserved2
(track.audiosamplerate >> 8) & 0xFF,
track.audiosamplerate & 0xff, //
0x00, 0x00]),
MP4.box(MP4.types.esds, MP4.esds(track)));
}
static stsd(track) {
if (track.type === 'audio') {
return MP4.box(MP4.types.stsd, MP4.STSD, MP4.mp4a(track));
} else {
return MP4.box(MP4.types.stsd, MP4.STSD, MP4.avc1(track));
}
}
static tkhd(track) {
return MP4.box(MP4.types.tkhd, new Uint8Array([
0x00, // version 0
0x00, 0x00, 0x07, // flags
0x00, 0x00, 0x00, 0x00, // creation_time
0x00, 0x00, 0x00, 0x00, // modification_time
(track.id >> 24) & 0xFF,
(track.id >> 16) & 0xFF,
(track.id >> 8) & 0xFF,
track.id & 0xFF, // track_ID
0x00, 0x00, 0x00, 0x00, // reserved
(track.duration >> 24),
(track.duration >> 16) & 0xFF,
(track.duration >> 8) & 0xFF,
track.duration & 0xFF, // duration
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, // reserved
0x00, 0x00, // layer
0x00, 0x00, // alternate_group
0x00, 0x00, // non-audio track volume
0x00, 0x00, // reserved
0x00, 0x01, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x01, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x40, 0x00, 0x00, 0x00, // transformation: unity matrix
(track.width >> 8) & 0xFF,
track.width & 0xFF,
0x00, 0x00, // width
(track.height >> 8) & 0xFF,
track.height & 0xFF,
0x00, 0x00 // height
]));
}
static traf(track,baseMediaDecodeTime) {
var sampleDependencyTable = MP4.sdtp(track);
return MP4.box(MP4.types.traf,
MP4.box(MP4.types.tfhd, new Uint8Array([
0x00, // version 0
0x00, 0x00, 0x00, // flags
(track.id >> 24),
(track.id >> 16) & 0XFF,
(track.id >> 8) & 0XFF,
(track.id & 0xFF) // track_ID
])),
MP4.box(MP4.types.tfdt, new Uint8Array([
0x00, // version 0
0x00, 0x00, 0x00, // flags
(baseMediaDecodeTime >>24),
(baseMediaDecodeTime >> 16) & 0XFF,
(baseMediaDecodeTime >> 8) & 0XFF,
(baseMediaDecodeTime & 0xFF) // baseMediaDecodeTime
])),
MP4.trun(track,
sampleDependencyTable.length +
16 + // tfhd
16 + // tfdt
8 + // traf header
16 + // mfhd
8 + // moof header
8), // mdat header
sampleDependencyTable);
}
/**
* Generate a track box.
* @param track {object} a track definition
* @return {Uint8Array} the track box
*/
static trak(track) {
track.duration = track.duration || 0xffffffff;
return MP4.box(MP4.types.trak, MP4.tkhd(track), MP4.mdia(track));
}
static trex(track) {
return MP4.box(MP4.types.trex, new Uint8Array([
0x00, // version 0
0x00, 0x00, 0x00, // flags
(track.id >> 24),
(track.id >> 16) & 0XFF,
(track.id >> 8) & 0XFF,
(track.id & 0xFF), // track_ID
0x00, 0x00, 0x00, 0x01, // default_sample_description_index
0x00, 0x00, 0x00, 0x00, // default_sample_duration
0x00, 0x00, 0x00, 0x00, // default_sample_size
0x00, 0x01, 0x00, 0x01 // default_sample_flags
]));
}
static trun(track, offset) {
var samples, sample, i, array;
samples = track.samples || [];
array = new Uint8Array(12 + (16 * samples.length));
offset += 8 + array.byteLength;
array.set([
0x00, // version 0
0x00, 0x0f, 0x01, // flags
(samples.length >>> 24) & 0xFF,
(samples.length >>> 16) & 0xFF,
(samples.length >>> 8) & 0xFF,
samples.length & 0xFF, // sample_count
(offset >>> 24) & 0xFF,
(offset >>> 16) & 0xFF,
(offset >>> 8) & 0xFF,
offset & 0xFF // data_offset
],0);
for (i = 0; i < samples.length; i++) {
sample = samples[i];
array.set([
(sample.duration >>> 24) & 0xFF,
(sample.duration >>> 16) & 0xFF,
(sample.duration >>> 8) & 0xFF,
sample.duration & 0xFF, // sample_duration
(sample.size >>> 24) & 0xFF,
(sample.size >>> 16) & 0xFF,
(sample.size >>> 8) & 0xFF,
sample.size & 0xFF, // sample_size
(sample.flags.isLeading << 2) | sample.flags.dependsOn,
(sample.flags.isDependedOn << 6) |
(sample.flags.hasRedundancy << 4) |
(sample.flags.paddingValue << 1) |
sample.flags.isNonSync,
sample.flags.degradPrio & 0xF0 << 8,
sample.flags.degradPrio & 0x0F, // sample_flags
(sample.cts >>> 24) & 0xFF,
(sample.cts >>> 16) & 0xFF,
(sample.cts >>> 8) & 0xFF,
sample.cts & 0xFF // sample_composition_time_offset
],12+16*i);
}
return MP4.box(MP4.types.trun, array);
}
static initSegment(tracks) {
if (!MP4.types) {
MP4.init();
}
var movie = MP4.moov(tracks), result;
result = new Uint8Array(MP4.FTYP.byteLength + movie.byteLength);
result.set(MP4.FTYP);
result.set(movie, MP4.FTYP.byteLength);
return result;
}
}
export default MP4;

View file

@ -0,0 +1,379 @@
/**
* fMP4 remuxer
*/
import Event from '../events';
import {logger} from '../utils/logger';
import MP4 from '../remux/mp4-generator';
import {ErrorTypes, ErrorDetails} from '../errors';
class MP4Remuxer {
constructor(observer) {
this.observer = observer;
this.ISGenerated = false;
this.PES2MP4SCALEFACTOR = 4;
this.PES_TIMESCALE = 90000;
this.MP4_TIMESCALE = this.PES_TIMESCALE / this.PES2MP4SCALEFACTOR;
}
get timescale() {
return this.MP4_TIMESCALE;
}
destroy() {
}
insertDiscontinuity() {
this._initPTS = this._initDTS = this.nextAacPts = this.nextAvcDts = undefined;
}
switchLevel() {
this.ISGenerated = false;
}
remux(audioTrack,videoTrack,id3Track,timeOffset, contiguous) {
// generate Init Segment if needed
if (!this.ISGenerated) {
this.generateIS(audioTrack,videoTrack,timeOffset);
}
//logger.log('nb AVC samples:' + videoTrack.samples.length);
if (videoTrack.samples.length) {
this.remuxVideo(videoTrack,timeOffset,contiguous);
}
//logger.log('nb AAC samples:' + audioTrack.samples.length);
if (audioTrack.samples.length) {
this.remuxAudio(audioTrack,timeOffset,contiguous);
}
//logger.log('nb ID3 samples:' + audioTrack.samples.length);
if (id3Track.samples.length) {
this.remuxID3(id3Track,timeOffset);
}
//notify end of parsing
this.observer.trigger(Event.FRAG_PARSED);
}
generateIS(audioTrack,videoTrack,timeOffset) {
var observer = this.observer,
audioSamples = audioTrack.samples,
videoSamples = videoTrack.samples,
nbAudio = audioSamples.length,
nbVideo = videoSamples.length,
pesTimeScale = this.PES_TIMESCALE;
if(nbAudio === 0 && nbVideo === 0) {
observer.trigger(Event.ERROR, {type : ErrorTypes.MEDIA_ERROR, details: ErrorDetails.FRAG_PARSING_ERROR, fatal: false, reason: 'no audio/video samples found'});
} else if (nbVideo === 0) {
//audio only
if (audioTrack.config) {
observer.trigger(Event.FRAG_PARSING_INIT_SEGMENT, {
audioMoov: MP4.initSegment([audioTrack]),
audioCodec : audioTrack.codec,
audioChannelCount : audioTrack.channelCount
});
this.ISGenerated = true;
}
if (this._initPTS === undefined) {
// remember first PTS of this demuxing context
this._initPTS = audioSamples[0].pts - pesTimeScale * timeOffset;
this._initDTS = audioSamples[0].dts - pesTimeScale * timeOffset;
}
} else
if (nbAudio === 0) {
//video only
if (videoTrack.sps && videoTrack.pps) {
observer.trigger(Event.FRAG_PARSING_INIT_SEGMENT, {
videoMoov: MP4.initSegment([videoTrack]),
videoCodec: videoTrack.codec,
videoWidth: videoTrack.width,
videoHeight: videoTrack.height
});
this.ISGenerated = true;
if (this._initPTS === undefined) {
// remember first PTS of this demuxing context
this._initPTS = videoSamples[0].pts - pesTimeScale * timeOffset;
this._initDTS = videoSamples[0].dts - pesTimeScale * timeOffset;
}
}
} else {
//audio and video
if (audioTrack.config && videoTrack.sps && videoTrack.pps) {
observer.trigger(Event.FRAG_PARSING_INIT_SEGMENT, {
audioMoov: MP4.initSegment([audioTrack]),
audioCodec: audioTrack.codec,
audioChannelCount: audioTrack.channelCount,
videoMoov: MP4.initSegment([videoTrack]),
videoCodec: videoTrack.codec,
videoWidth: videoTrack.width,
videoHeight: videoTrack.height
});
this.ISGenerated = true;
if (this._initPTS === undefined) {
// remember first PTS of this demuxing context
this._initPTS = Math.min(videoSamples[0].pts, audioSamples[0].pts) - pesTimeScale * timeOffset;
this._initDTS = Math.min(videoSamples[0].dts, audioSamples[0].dts) - pesTimeScale * timeOffset;
}
}
}
}
remuxVideo(track, timeOffset, contiguous) {
var view,
i = 8,
pesTimeScale = this.PES_TIMESCALE,
pes2mp4ScaleFactor = this.PES2MP4SCALEFACTOR,
avcSample,
mp4Sample,
mp4SampleLength,
unit,
mdat, moof,
firstPTS, firstDTS, lastDTS,
pts, dts, ptsnorm, dtsnorm,
samples = [];
/* concatenate the video data and construct the mdat in place
(need 8 more bytes to fill length and mpdat type) */
mdat = new Uint8Array(track.len + (4 * track.nbNalu) + 8);
view = new DataView(mdat.buffer);
view.setUint32(0, mdat.byteLength);
mdat.set(MP4.types.mdat, 4);
while (track.samples.length) {
avcSample = track.samples.shift();
mp4SampleLength = 0;
// convert NALU bitstream to MP4 format (prepend NALU with size field)
while (avcSample.units.units.length) {
unit = avcSample.units.units.shift();
view.setUint32(i, unit.data.byteLength);
i += 4;
mdat.set(unit.data, i);
i += unit.data.byteLength;
mp4SampleLength += 4 + unit.data.byteLength;
}
pts = avcSample.pts - this._initDTS;
dts = avcSample.dts - this._initDTS;
//logger.log('Video/PTS/DTS:' + pts + '/' + dts);
// if not first AVC sample of video track, normalize PTS/DTS with previous sample value
// and ensure that sample duration is positive
if (lastDTS !== undefined) {
ptsnorm = this._PTSNormalize(pts, lastDTS);
dtsnorm = this._PTSNormalize(dts, lastDTS);
mp4Sample.duration = (dtsnorm - lastDTS) / pes2mp4ScaleFactor;
if (mp4Sample.duration < 0) {
//logger.log('invalid sample duration at PTS/DTS::' + avcSample.pts + '/' + avcSample.dts + ':' + mp4Sample.duration);
mp4Sample.duration = 0;
}
} else {
var nextAvcDts = this.nextAvcDts,delta;
// first AVC sample of video track, normalize PTS/DTS
ptsnorm = this._PTSNormalize(pts, nextAvcDts);
dtsnorm = this._PTSNormalize(dts, nextAvcDts);
delta = Math.round((dtsnorm - nextAvcDts) / 90);
// if fragment are contiguous, or delta less than 600ms, ensure there is no overlap/hole between fragments
if (contiguous || Math.abs(delta) < 600) {
if (delta) {
if (delta > 1) {
logger.log(`AVC:${delta} ms hole between fragments detected,filling it`);
} else if (delta < -1) {
logger.log(`AVC:${(-delta)} ms overlapping between fragments detected`);
}
// set DTS to next DTS
dtsnorm = nextAvcDts;
// offset PTS as well, ensure that PTS is smaller or equal than new DTS
ptsnorm = Math.max(ptsnorm - delta, dtsnorm);
logger.log('Video/PTS/DTS adjusted:' + ptsnorm + '/' + dtsnorm);
}
}
// remember first PTS of our avcSamples, ensure value is positive
firstPTS = Math.max(0, ptsnorm);
firstDTS = Math.max(0, dtsnorm);
}
//console.log('PTS/DTS/initDTS/normPTS/normDTS/relative PTS : ${avcSample.pts}/${avcSample.dts}/${this._initDTS}/${ptsnorm}/${dtsnorm}/${(avcSample.pts/4294967296).toFixed(3)}');
mp4Sample = {
size: mp4SampleLength,
duration: 0,
cts: (ptsnorm - dtsnorm) / pes2mp4ScaleFactor,
flags: {
isLeading: 0,
isDependedOn: 0,
hasRedundancy: 0,
degradPrio: 0
}
};
if (avcSample.key === true) {
// the current sample is a key frame
mp4Sample.flags.dependsOn = 2;
mp4Sample.flags.isNonSync = 0;
} else {
mp4Sample.flags.dependsOn = 1;
mp4Sample.flags.isNonSync = 1;
}
samples.push(mp4Sample);
lastDTS = dtsnorm;
}
if (samples.length >= 2) {
mp4Sample.duration = samples[samples.length - 2].duration;
}
// next AVC sample DTS should be equal to last sample DTS + last sample duration
this.nextAvcDts = dtsnorm + mp4Sample.duration * pes2mp4ScaleFactor;
track.len = 0;
track.nbNalu = 0;
if(navigator.userAgent.toLowerCase().indexOf('chrome') > -1) {
// chrome workaround, mark first sample as being a Random Access Point to avoid sourcebuffer append issue
// https://code.google.com/p/chromium/issues/detail?id=229412
samples[0].flags.dependsOn = 2;
samples[0].flags.isNonSync = 0;
}
track.samples = samples;
moof = MP4.moof(track.sequenceNumber++, firstDTS / pes2mp4ScaleFactor, track);
track.samples = [];
this.observer.trigger(Event.FRAG_PARSING_DATA, {
moof: moof,
mdat: mdat,
startPTS: firstPTS / pesTimeScale,
endPTS: (ptsnorm + pes2mp4ScaleFactor * mp4Sample.duration) / pesTimeScale,
startDTS: firstDTS / pesTimeScale,
endDTS: (dtsnorm + pes2mp4ScaleFactor * mp4Sample.duration) / pesTimeScale,
type: 'video',
nb: samples.length
});
}
remuxAudio(track,timeOffset, contiguous) {
var view,
i = 8,
pesTimeScale = this.PES_TIMESCALE,
pes2mp4ScaleFactor = this.PES2MP4SCALEFACTOR,
aacSample, mp4Sample,
unit,
mdat, moof,
firstPTS, firstDTS, lastDTS,
pts, dts, ptsnorm, dtsnorm,
samples = [];
/* concatenate the audio data and construct the mdat in place
(need 8 more bytes to fill length and mdat type) */
mdat = new Uint8Array(track.len + 8);
view = new DataView(mdat.buffer);
view.setUint32(0, mdat.byteLength);
mdat.set(MP4.types.mdat, 4);
while (track.samples.length) {
aacSample = track.samples.shift();
unit = aacSample.unit;
mdat.set(unit, i);
i += unit.byteLength;
pts = aacSample.pts - this._initDTS;
dts = aacSample.dts - this._initDTS;
//logger.log('Audio/PTS:' + aacSample.pts.toFixed(0));
if (lastDTS !== undefined) {
ptsnorm = this._PTSNormalize(pts, lastDTS);
dtsnorm = this._PTSNormalize(dts, lastDTS);
// we use DTS to compute sample duration, but we use PTS to compute initPTS which is used to sync audio and video
mp4Sample.duration = (dtsnorm - lastDTS) / pes2mp4ScaleFactor;
if (mp4Sample.duration < 0) {
logger.log(`invalid AAC sample duration at PTS:${aacSample.pts}:${mp4Sample.duration}`);
mp4Sample.duration = 0;
}
} else {
var nextAacPts = this.nextAacPts,delta;
ptsnorm = this._PTSNormalize(pts, nextAacPts);
dtsnorm = this._PTSNormalize(dts, nextAacPts);
delta = Math.round(1000 * (ptsnorm - nextAacPts) / pesTimeScale);
// if fragment are contiguous, or delta less than 600ms, ensure there is no overlap/hole between fragments
if (contiguous || Math.abs(delta) < 600) {
// log delta
if (delta) {
if (delta > 1) {
logger.log(`${delta} ms hole between AAC samples detected,filling it`);
// set PTS to next PTS, and ensure PTS is greater or equal than last DTS
} else if (delta < -1) {
logger.log(`${(-delta)} ms overlapping between AAC samples detected`);
}
// set DTS to next DTS
ptsnorm = dtsnorm = nextAacPts;
}
}
// remember first PTS of our aacSamples, ensure value is positive
firstPTS = Math.max(0, ptsnorm);
firstDTS = Math.max(0, dtsnorm);
}
//console.log('PTS/DTS/initDTS/normPTS/normDTS/relative PTS : ${aacSample.pts}/${aacSample.dts}/${this._initDTS}/${ptsnorm}/${dtsnorm}/${(aacSample.pts/4294967296).toFixed(3)}');
mp4Sample = {
size: unit.byteLength,
cts: 0,
duration:0,
flags: {
isLeading: 0,
isDependedOn: 0,
hasRedundancy: 0,
degradPrio: 0,
dependsOn: 1,
}
};
samples.push(mp4Sample);
lastDTS = dtsnorm;
}
//set last sample duration as being identical to previous sample
if (samples.length >= 2) {
mp4Sample.duration = samples[samples.length - 2].duration;
}
// next aac sample PTS should be equal to last sample PTS + duration
this.nextAacPts = ptsnorm + pes2mp4ScaleFactor * mp4Sample.duration;
//logger.log('Audio/PTS/PTSend:' + aacSample.pts.toFixed(0) + '/' + this.nextAacDts.toFixed(0));
track.len = 0;
track.samples = samples;
moof = MP4.moof(track.sequenceNumber++, firstDTS / pes2mp4ScaleFactor, track);
track.samples = [];
this.observer.trigger(Event.FRAG_PARSING_DATA, {
moof: moof,
mdat: mdat,
startPTS: firstPTS / pesTimeScale,
endPTS: this.nextAacPts / pesTimeScale,
startDTS: firstDTS / pesTimeScale,
endDTS: (dtsnorm + pes2mp4ScaleFactor * mp4Sample.duration) / pesTimeScale,
type: 'audio',
nb: samples.length
});
}
remuxID3(track,timeOffset) {
var length = track.samples.length, sample;
// consume samples
if(length) {
for(var index = 0; index < length; index++) {
sample = track.samples[index];
// setting id3 pts, dts to relative time
// using this._initPTS and this._initDTS to calculate relative time
sample.pts = ((sample.pts - this._initPTS) / this.PES_TIMESCALE);
sample.dts = ((sample.dts - this._initDTS) / this.PES_TIMESCALE);
}
this.observer.trigger(Event.FRAG_PARSING_METADATA, {
samples:track.samples
});
}
track.samples = [];
timeOffset = timeOffset;
}
_PTSNormalize(value, reference) {
var offset;
if (reference === undefined) {
return value;
}
if (reference < value) {
// - 2^33
offset = -8589934592;
} else {
// + 2^33
offset = 8589934592;
}
/* PTS is 33bit (from 0 to 2^33 -1)
if diff between value and reference is bigger than half of the amplitude (2^32) then it means that
PTS looping occured. fill the gap */
while (Math.abs(value - reference) > 4294967296) {
value += offset;
}
return value;
}
}
export default MP4Remuxer;