add tv genre configuration options

This commit is contained in:
Luke Pulverenti 2016-06-03 15:32:10 -04:00
parent 81ab24a44c
commit f13258a120
24 changed files with 261 additions and 143 deletions

View file

@ -764,7 +764,7 @@ class StreamController extends EventHandler {
var currentLevel = this.levels[this.level],
details = currentLevel.details,
duration = details.totalduration,
start = fragCurrent.start,
start = fragCurrent.startDTS !== undefined ? fragCurrent.startDTS : fragCurrent.start,
level = fragCurrent.level,
sn = fragCurrent.sn,
audioCodec = currentLevel.audioCodec || this.config.defaultAudioCodec;
@ -880,7 +880,7 @@ class StreamController extends EventHandler {
logger.log(`parsed ${data.type},PTS:[${data.startPTS.toFixed(3)},${data.endPTS.toFixed(3)}],DTS:[${data.startDTS.toFixed(3)}/${data.endDTS.toFixed(3)}],nb:${data.nb}`);
var drift = LevelHelper.updateFragPTS(level.details,frag.sn,data.startPTS,data.endPTS),
var drift = LevelHelper.updateFragPTSDTS(level.details,frag.sn,data.startPTS,data.endPTS,data.startDTS,data.endDTS),
hls = this.hls;
hls.trigger(Event.LEVEL_PTS_UPDATED, {details: level.details, level: this.level, drift: drift});
@ -1024,7 +1024,7 @@ _checkBuffer() {
logger.log(`target seek position:${targetSeekPosition}`);
}
var bufferInfo = BufferHelper.bufferInfo(media,currentTime,0),
expectedPlaying = !(media.paused || media.ended || media.seeking || readyState < 2),
expectedPlaying = !(media.paused || media.ended || media.seeking || media.buffered.length === 0),
jumpThreshold = 0.4, // tolerance needed as some browsers stalls playback before reaching buffered range end
playheadMoving = currentTime > media.playbackRate*this.lastCurrentTime;

View file

@ -37,14 +37,14 @@
switchLevel() {
this.pmtParsed = false;
this._pmtId = -1;
this.lastAacPTS = null;
this.aacOverFlow = null;
this._avcTrack = {container : 'video/mp2t', type: 'video', id :-1, sequenceNumber: 0, samples : [], len : 0, nbNalu : 0};
this._aacTrack = {container : 'video/mp2t', type: 'audio', id :-1, sequenceNumber: 0, samples : [], len : 0};
this._id3Track = {type: 'id3', id :-1, sequenceNumber: 0, samples : [], len : 0};
this._txtTrack = {type: 'text', id: -1, sequenceNumber: 0, samples: [], len: 0};
// flush any partial content
this.aacOverFlow = null;
this.aacLastPTS = null;
this.avcNaluState = 0;
this.remuxer.switchLevel();
}
@ -344,6 +344,23 @@
pes.data = null;
var debugString = '';
var pushAccesUnit = function() {
if (units2.length) {
// only push AVC sample if keyframe already found in this fragment OR
// keyframe found in last fragment (track.sps) AND
// samples already appended (we already found a keyframe in this fragment) OR fragment is contiguous
if (key === true ||
(track.sps && (samples.length || this.contiguous))) {
avcSample = {units: { units : units2, length : length}, pts: pes.pts, dts: pes.dts, key: key};
samples.push(avcSample);
track.len += length;
track.nbNalu += units2.length;
}
units2 = [];
length = 0;
}
};
units.forEach(unit => {
switch(unit.type) {
//NDR
@ -463,6 +480,7 @@
if(debug) {
debugString += 'AUD ';
}
pushAccesUnit();
break;
default:
push = false;
@ -477,25 +495,12 @@
if(debug || debugString.length) {
logger.log(debugString);
}
//build sample from PES
// Annex B to MP4 conversion to be done
if (units2.length) {
// only push AVC sample if keyframe already found in this fragment OR
// keyframe found in last fragment (track.sps) AND
// samples already appended (we already found a keyframe in this fragment) OR fragment is contiguous
if (key === true ||
(track.sps && (samples.length || this.contiguous))) {
avcSample = {units: { units : units2, length : length}, pts: pes.pts, dts: pes.dts, key: key};
samples.push(avcSample);
track.len += length;
track.nbNalu += units2.length;
}
}
pushAccesUnit();
}
_parseAVCNALu(array) {
var i = 0, len = array.byteLength, value, overflow, state = 0;
var i = 0, len = array.byteLength, value, overflow, state = this.avcNaluState;
var units = [], unit, unitType, lastUnitStart, lastUnitType;
//logger.log('PES:' + Hex.hexDump(array));
while (i < len) {
@ -526,14 +531,37 @@
//logger.log('pushing NALU, type/size:' + unit.type + '/' + unit.data.byteLength);
units.push(unit);
} else {
// lastUnitStart is undefined => this is the first start code found in this PES packet
// first check if start code delimiter is overlapping between 2 PES packets,
// ie it started in last packet (lastState not zero)
// and ended at the beginning of this PES packet (i <= 4 - lastState)
let lastState = this.avcNaluState;
if(lastState && (i <= 4 - lastState)) {
// start delimiter overlapping between PES packets
// strip start delimiter bytes from the end of last NAL unit
let track = this._avcTrack,
samples = track.samples;
if (samples.length) {
let lastavcSample = samples[samples.length - 1],
lastUnits = lastavcSample.units.units,
lastUnit = lastUnits[lastUnits.length - 1];
// check if lastUnit had a state different from zero
if (lastUnit.state) {
// strip last bytes
lastUnit.data = lastUnit.data.subarray(0,lastUnit.data.byteLength - lastState);
lastavcSample.units.length -= lastState;
track.len -= lastState;
}
}
}
// If NAL units are not starting right at the beginning of the PES packet, push preceding data into previous NAL unit.
overflow = i - state - 1;
if (overflow) {
var track = this._avcTrack,
if (overflow > 0) {
let track = this._avcTrack,
samples = track.samples;
//logger.log('first NALU found with overflow:' + overflow);
if (samples.length) {
var lastavcSample = samples[samples.length - 1],
let lastavcSample = samples[samples.length - 1],
lastUnits = lastavcSample.units.units,
lastUnit = lastUnits[lastUnits.length - 1],
tmp = new Uint8Array(lastUnit.data.byteLength + overflow);
@ -557,9 +585,10 @@
}
}
if (lastUnitStart) {
unit = {data: array.subarray(lastUnitStart, len), type: lastUnitType};
unit = {data: array.subarray(lastUnitStart, len), type: lastUnitType, state : state};
units.push(unit);
//logger.log('pushing NALU, type/size:' + unit.type + '/' + unit.data.byteLength);
//logger.log('pushing NALU, type/size/state:' + unit.type + '/' + unit.data.byteLength + '/' + state);
this.avcNaluState = state;
}
return units;
}
@ -572,7 +601,7 @@
duration = this._duration,
audioCodec = this.audioCodec,
aacOverFlow = this.aacOverFlow,
lastAacPTS = this.lastAacPTS,
aacLastPTS = this.aacLastPTS,
config, frameLength, frameDuration, frameIndex, offset, headerLength, stamp, len, aacSample;
if (aacOverFlow) {
var tmp = new Uint8Array(aacOverFlow.byteLength + data.byteLength);
@ -616,8 +645,8 @@
// if last AAC frame is overflowing, we should ensure timestamps are contiguous:
// first sample PTS should be equal to last sample PTS + frameDuration
if(aacOverFlow && lastAacPTS) {
var newPTS = lastAacPTS+frameDuration;
if(aacOverFlow && aacLastPTS) {
var newPTS = aacLastPTS+frameDuration;
if(Math.abs(newPTS-pts) > 1) {
logger.log(`AAC: align PTS for overlapping frames by ${Math.round((newPTS-pts)/90)}`);
pts=newPTS;
@ -659,7 +688,7 @@
aacOverFlow = null;
}
this.aacOverFlow = aacOverFlow;
this.lastAacPTS = stamp;
this.aacLastPTS = stamp;
}
_parseID3PES(pes) {

View file

@ -42,7 +42,7 @@ class LevelHelper {
// if at least one fragment contains PTS info, recompute PTS information for all fragments
if(PTSFrag) {
LevelHelper.updateFragPTS(newDetails,PTSFrag.sn,PTSFrag.startPTS,PTSFrag.endPTS);
LevelHelper.updateFragPTSDTS(newDetails,PTSFrag.sn,PTSFrag.startPTS,PTSFrag.endPTS,PTSFrag.startDTS,PTSFrag.endDTS);
} else {
// ensure that delta is within oldfragments range
// also adjust sliding in case delta is 0 (we could have old=[50-60] and new=old=[50-61])
@ -61,7 +61,7 @@ class LevelHelper {
return;
}
static updateFragPTS(details,sn,startPTS,endPTS) {
static updateFragPTSDTS(details,sn,startPTS,endPTS,startDTS,endDTS) {
var fragIdx, fragments, frag, i;
// exit if sn out of range
if (sn < details.startSN || sn > details.endSN) {
@ -73,12 +73,16 @@ class LevelHelper {
if(!isNaN(frag.startPTS)) {
startPTS = Math.min(startPTS,frag.startPTS);
endPTS = Math.max(endPTS, frag.endPTS);
startDTS = Math.min(startDTS,frag.startDTS);
endDTS = Math.max(endDTS, frag.endDTS);
}
var drift = startPTS - frag.start;
frag.start = frag.startPTS = startPTS;
frag.endPTS = endPTS;
frag.startDTS = startDTS;
frag.endDTS = endDTS;
frag.duration = endPTS - startPTS;
// adjust fragment PTS/duration from seqnum-1 to frag 0
for(i = fragIdx ; i > 0 ; i--) {