trim bower libs
This commit is contained in:
parent
ce68029f25
commit
909258464f
98 changed files with 128 additions and 28399 deletions
|
@ -1,47 +0,0 @@
|
|||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<title></title>
|
||||
</head>
|
||||
<body>
|
||||
<script src="../dist/hls.js"></script>
|
||||
<video id="video" controls></video>
|
||||
<script>
|
||||
/* get stream from query string */
|
||||
function getParameterByName(name) {
|
||||
name = name.replace(/[\[]/, "\\[").replace(/[\]]/, "\\]");
|
||||
var regex = new RegExp("[\\?&]" + name + "=([^&#]*)"),
|
||||
results = regex.exec(location.search);
|
||||
return results === null ? "" : decodeURIComponent(results[1].replace(/\+/g, " "));
|
||||
}
|
||||
|
||||
var stream = getParameterByName('stream') || 'http://www.streambox.fr/playlists/x36xhzz/url_9/193039199_mp4_h264_aac_fhd_7.m3u8';
|
||||
</script>
|
||||
<script>
|
||||
if(Hls.isSupported()) {
|
||||
var video = document.getElementById('video');
|
||||
var hls = new Hls();
|
||||
hls.loadSource(stream);
|
||||
hls.attachMedia(video);
|
||||
hls.on(Hls.Events.MANIFEST_PARSED,function() {
|
||||
video.play();
|
||||
});
|
||||
}
|
||||
</script>
|
||||
<script>
|
||||
var video = document.getElementById('video');
|
||||
window.onload = function(){
|
||||
var i=0;
|
||||
var el = document.getElementById('update');
|
||||
function foo(){
|
||||
i++;
|
||||
el.innerHTML = 'animation:' + i+',decoded:' + video.webkitDecodedFrameCount + ',dropped:' + video.webkitDroppedFrameCount;
|
||||
window.requestAnimationFrame(foo);
|
||||
}
|
||||
foo();
|
||||
};
|
||||
</script>
|
||||
<div id="update"></div>
|
||||
</body>
|
||||
</html>
|
574
dashboard-ui/bower_components/hls.js/demo/canvas.js
vendored
574
dashboard-ui/bower_components/hls.js/demo/canvas.js
vendored
|
@ -1,574 +0,0 @@
|
|||
|
||||
var eventLeftMargin = 180;
|
||||
var eventRightMargin = 0;
|
||||
|
||||
function canvasLoadEventUpdate(canvas, minTime, maxTime, events) {
|
||||
var ctx = canvas.getContext('2d');
|
||||
for (var i =0, y_offset = 20; i < events.length; i++) {
|
||||
var event = events[i], start = event.time, end = event.time + event.duration + event.latency;
|
||||
if((start >= minTime && start <= maxTime)) {
|
||||
y_offset+=20;
|
||||
}
|
||||
}
|
||||
canvas.height = y_offset;
|
||||
|
||||
ctx.fillStyle = "green";
|
||||
ctx.globalAlpha = 0.5;
|
||||
ctx.fillRect(0,0,eventLeftMargin, canvas.height);
|
||||
ctx.fillRect(canvas.width-eventRightMargin,0,eventRightMargin, canvas.height);
|
||||
ctx.globalAlpha = 1;
|
||||
|
||||
//draw legend
|
||||
var x_offset = 5;
|
||||
ctx.font = "12px Arial";
|
||||
|
||||
legend = "load event";
|
||||
ctx.fillStyle = "black";
|
||||
ctx.fillText(legend,x_offset,15);
|
||||
x_offset = eventLeftMargin+5;
|
||||
|
||||
|
||||
legend = 'start - end';
|
||||
ctx.fillStyle = "black";
|
||||
ctx.fillText(legend,x_offset,15);
|
||||
x_offset += ctx.measureText(legend).width+5;
|
||||
|
||||
legend = '[latency';
|
||||
ctx.fillStyle = "orange";
|
||||
ctx.fillText(legend,x_offset,15);
|
||||
x_offset += ctx.measureText(legend).width+5;
|
||||
|
||||
legend = 'loading';
|
||||
ctx.fillStyle = "green";
|
||||
ctx.fillText(legend,x_offset,15);
|
||||
x_offset += ctx.measureText(legend).width+5;
|
||||
|
||||
legend = 'parsing';
|
||||
ctx.fillStyle = "blue";
|
||||
ctx.fillText(legend,x_offset,15);
|
||||
x_offset += ctx.measureText(legend).width+5;
|
||||
|
||||
legend = 'appending]';
|
||||
ctx.fillStyle = "red";
|
||||
ctx.fillText(legend,x_offset,15);
|
||||
x_offset += ctx.measureText(legend).width+5;
|
||||
|
||||
legend = 'size bitrate';
|
||||
ctx.fillStyle = "black";
|
||||
ctx.fillText(legend,x_offset,15);
|
||||
x_offset += ctx.measureText(legend).width+5;
|
||||
|
||||
for (i =0, y_offset = 20; i < events.length; i++) {
|
||||
var event = events[i], start = Math.round(event.time), end = Math.round(event.time + event.duration + event.latency);
|
||||
if((start >= minTime && start <= maxTime)) {
|
||||
canvasDrawLoadEvent(ctx,y_offset,event,minTime,maxTime);
|
||||
y_offset+=20;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function canvasVideoEventUpdate(canvas, minTime, maxTime, events) {
|
||||
var ctx = canvas.getContext('2d');
|
||||
for (var i =0, y_offset = 20; i < events.length; i++) {
|
||||
var event = events[i], start = event.time, end = event.time;
|
||||
if((start >= minTime && start <= maxTime)) {
|
||||
y_offset+=20;
|
||||
}
|
||||
}
|
||||
canvas.height = y_offset;
|
||||
ctx.fillStyle = "green";
|
||||
ctx.globalAlpha = 0.5;
|
||||
ctx.fillRect(0,0,eventLeftMargin, canvas.height);
|
||||
ctx.fillRect(canvas.width-eventRightMargin,0,eventRightMargin, canvas.height);
|
||||
ctx.globalAlpha = 1;
|
||||
|
||||
//draw legend
|
||||
var x_offset = 5;
|
||||
ctx.font = "12px Arial";
|
||||
|
||||
legend = 'video event';
|
||||
ctx.fillStyle = "black";
|
||||
ctx.fillText(legend,x_offset,15);
|
||||
|
||||
x_offset = eventLeftMargin+5;
|
||||
legend = 'time';
|
||||
ctx.fillStyle = "black";
|
||||
ctx.fillText(legend,x_offset,15);
|
||||
|
||||
x_offset += ctx.measureText(legend).width+5;
|
||||
legend = '[duration]';
|
||||
ctx.fillStyle = "blue";
|
||||
ctx.fillText(legend,x_offset,15);
|
||||
|
||||
for (i =0, y_offset = 20; i < events.length; i++) {
|
||||
var event = events[i], start = Math.round(event.time), end = Math.round(event.time);
|
||||
if((start >= minTime && start <= maxTime)) {
|
||||
canvasDrawVideoEvent(ctx,y_offset,event,minTime,maxTime);
|
||||
y_offset+=20;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function canvasBufferWindowUpdate(canvas, minTime, maxTime, focusTime, events) {
|
||||
var ctx = canvas.getContext('2d'),
|
||||
minTimeBuffer, minTimePos,focusTimeBuffer,focusTimePos,
|
||||
bufferChartStart = eventLeftMargin,
|
||||
bufferChartWidth = ctx.canvas.width-eventLeftMargin-eventRightMargin;
|
||||
ctx.clearRect (0,0,canvas.width, canvas.height);
|
||||
|
||||
if(events.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
ctx.fillStyle = "green";
|
||||
ctx.globalAlpha = 0.5;
|
||||
ctx.fillRect(0,0,eventLeftMargin, canvas.height);
|
||||
ctx.globalAlpha = 1;
|
||||
|
||||
//draw legend
|
||||
var x_offset = 5;
|
||||
var y_offset = 0;
|
||||
ctx.font = "15px Arial";
|
||||
|
||||
var maxBuffer = 0, firstEventIdx = -1, focusEventIdx= -1, event;
|
||||
for (var i =0 ; i < events.length; i++) {
|
||||
event = events[i];
|
||||
maxBuffer = Math.max(maxBuffer, event.buffer+event.pos);
|
||||
if(firstEventIdx === -1 && event.time >= minTime) {
|
||||
firstEventIdx = Math.max(0,i-1);
|
||||
}
|
||||
if(focusEventIdx === -1 && event.time >= focusTime) {
|
||||
focusEventIdx = Math.max(0,i-1);
|
||||
}
|
||||
}
|
||||
// compute position and buffer length at pos minTime using linear approximation
|
||||
if((firstEventIdx+1) < events.length) {
|
||||
minTimePos = events[firstEventIdx].pos + (minTime-events[firstEventIdx].time)*(events[firstEventIdx+1].pos-events[firstEventIdx].pos)/(events[firstEventIdx+1].time-events[firstEventIdx].time);
|
||||
minTimeBuffer = minTimePos + events[firstEventIdx].buffer + (minTime-events[firstEventIdx].time)*(events[firstEventIdx+1].buffer-events[firstEventIdx].buffer)/(events[firstEventIdx+1].time-events[firstEventIdx].time);
|
||||
} else {
|
||||
minTimeBuffer = 0;
|
||||
minTimePos = 0;
|
||||
}
|
||||
|
||||
// compute position and buffer length at pos focusTime using linear approximation
|
||||
if((focusEventIdx+1) < events.length) {
|
||||
focusTimePos = events[focusEventIdx].pos + (focusTime-events[focusEventIdx].time)*(events[focusEventIdx+1].pos-events[focusEventIdx].pos)/(events[focusEventIdx+1].time-events[focusEventIdx].time);
|
||||
focusTimeBuffer = events[focusEventIdx].buffer + (focusTime-events[focusEventIdx].time)*(events[focusEventIdx+1].buffer-events[focusEventIdx].buffer)/(events[focusEventIdx+1].time-events[focusEventIdx].time);
|
||||
} else {
|
||||
focusTimePos = 0;
|
||||
focusTimeBuffer = 0;
|
||||
}
|
||||
|
||||
maxBuffer*=1.1;
|
||||
|
||||
y_offset += 15;
|
||||
legend = 'play pos/buffer zoomed';
|
||||
ctx.fillStyle = "black";
|
||||
ctx.fillText(legend,x_offset,y_offset);
|
||||
|
||||
y_offset += 15;
|
||||
legend = '[' + minTime + ',' + maxTime + ']';
|
||||
ctx.fillText(legend,x_offset,y_offset);
|
||||
|
||||
y_offset += 15;
|
||||
legend = 'focus time:' + focusTime + ' ms';
|
||||
ctx.fillText(legend,x_offset,y_offset);
|
||||
|
||||
y_offset += 15;
|
||||
legend = 'focus position:' + Math.round(focusTimePos) + ' ms';
|
||||
ctx.fillText(legend,x_offset,y_offset);
|
||||
|
||||
y_offset += 15;
|
||||
legend = 'focus buffer:' + Math.round(focusTimeBuffer) + ' ms';
|
||||
ctx.fillText(legend,x_offset,y_offset);
|
||||
|
||||
ctx.fillStyle = "blue";
|
||||
ctx.beginPath();
|
||||
ctx.moveTo(bufferChartStart, ctx.canvas.height);
|
||||
ctx.lineTo(bufferChartStart, ctx.canvas.height*(1 - minTimeBuffer/maxBuffer));
|
||||
for (var i =firstEventIdx+1 ; i < events.length; i++) {
|
||||
event = events[i];
|
||||
x_offset = bufferChartStart + (bufferChartWidth*(event.time-minTime))/(maxTime-minTime);
|
||||
y_offset = ctx.canvas.height*(1 - (event.buffer+event.pos)/maxBuffer);
|
||||
ctx.lineTo(x_offset,y_offset);
|
||||
}
|
||||
ctx.lineTo(x_offset, canvas.height);
|
||||
ctx.fill();
|
||||
|
||||
ctx.fillStyle = "brown";
|
||||
ctx.beginPath();
|
||||
ctx.moveTo(bufferChartStart, ctx.canvas.height);
|
||||
ctx.lineTo(bufferChartStart, ctx.canvas.height*(1 - minTimePos/maxBuffer));
|
||||
for (var i =firstEventIdx+1 ; i < events.length; i++) {
|
||||
event = events[i];
|
||||
x_offset = bufferChartStart + (bufferChartWidth*(event.time-minTime))/(maxTime-minTime);
|
||||
y_offset = ctx.canvas.height*(1 - (event.pos)/maxBuffer);
|
||||
ctx.lineTo(x_offset,y_offset);
|
||||
}
|
||||
ctx.lineTo(x_offset, canvas.height);
|
||||
ctx.fill();
|
||||
|
||||
ctx.fillStyle = "white";
|
||||
ctx.fillRect(canvas.width-eventRightMargin,0,eventRightMargin, canvas.height);
|
||||
ctx.fillStyle = "green";
|
||||
ctx.globalAlpha = 0.5;
|
||||
ctx.fillRect(canvas.width-eventRightMargin,0,eventRightMargin, canvas.height);
|
||||
ctx.globalAlpha = 1;
|
||||
|
||||
ctx.fillStyle = "black";
|
||||
x_offset = bufferChartStart + (bufferChartWidth*(focusTime-minTime))/(maxTime-minTime);
|
||||
ctx.moveTo(x_offset, ctx.canvas.height);
|
||||
y_offset = ctx.canvas.height*(1 - (focusTimePos+focusTimeBuffer)/maxBuffer);
|
||||
ctx.lineTo(x_offset,y_offset);
|
||||
ctx.stroke();
|
||||
}
|
||||
|
||||
function canvasBufferTimeRangeUpdate(canvas, minTime, maxTime, windowMinTime, windowMaxTime, events) {
|
||||
var ctx = canvas.getContext('2d'),
|
||||
bufferChartStart = eventLeftMargin,
|
||||
bufferChartWidth = ctx.canvas.width-eventLeftMargin-eventRightMargin,
|
||||
x_offset = 0,y_offset = 0,
|
||||
event;
|
||||
ctx.clearRect (0,0,canvas.width, canvas.height);
|
||||
|
||||
ctx.fillStyle = "green";
|
||||
ctx.globalAlpha = 0.5;
|
||||
ctx.fillRect(0,0,eventLeftMargin, canvas.height);
|
||||
ctx.fillRect(canvas.width-eventRightMargin,0,eventRightMargin, canvas.height);
|
||||
ctx.globalAlpha = 1;
|
||||
|
||||
x_offset = 5;
|
||||
y_offset = 15;
|
||||
legend = 'play pos/buffer';
|
||||
ctx.fillStyle = "black";
|
||||
ctx.font = "15px Arial";
|
||||
ctx.fillText(legend,x_offset,y_offset);
|
||||
|
||||
if(events.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
var maxBuffer = 0;
|
||||
for (var i =0 ; i < events.length; i++) {
|
||||
maxBuffer = Math.max(maxBuffer, events[i].buffer + events[i].pos);
|
||||
}
|
||||
|
||||
y_offset+=15;
|
||||
legend = 'last pos:' + events[events.length-1].pos + ' ms';
|
||||
ctx.fillText(legend,x_offset,y_offset);
|
||||
|
||||
y_offset+=15;
|
||||
legend = 'last buffer:' + events[events.length-1].buffer + ' ms';
|
||||
ctx.fillText(legend,x_offset,y_offset);
|
||||
|
||||
y_offset+=15;
|
||||
legend = 'max buffer:' + maxBuffer + ' ms';
|
||||
ctx.fillText(legend,x_offset,y_offset);
|
||||
|
||||
y_offset += 15;
|
||||
legend = 'nb samples:' + events.length;
|
||||
ctx.fillText(legend,x_offset,y_offset);
|
||||
|
||||
maxBuffer*=1.1;
|
||||
|
||||
ctx.fillStyle = "blue";
|
||||
ctx.beginPath();
|
||||
ctx.moveTo(bufferChartStart, ctx.canvas.height);
|
||||
for (var i =0 ; i < events.length; i++) {
|
||||
event = events[i];
|
||||
x_offset = bufferChartStart + (bufferChartWidth*(event.time-minTime))/(maxTime-minTime);
|
||||
y_offset = ctx.canvas.height*(1 - (event.buffer+event.pos)/maxBuffer);
|
||||
ctx.lineTo(x_offset,y_offset);
|
||||
}
|
||||
ctx.lineTo(x_offset, canvas.height);
|
||||
ctx.fill();
|
||||
|
||||
ctx.fillStyle = "brown";
|
||||
ctx.beginPath();
|
||||
ctx.moveTo(bufferChartStart, ctx.canvas.height);
|
||||
for (var i =0 ; i < events.length; i++) {
|
||||
event = events[i];
|
||||
x_offset = bufferChartStart + (bufferChartWidth*(event.time-minTime))/(maxTime-minTime);
|
||||
y_offset = ctx.canvas.height*(1 - event.pos/maxBuffer);
|
||||
ctx.lineTo(x_offset,y_offset);
|
||||
}
|
||||
ctx.lineTo(x_offset, canvas.height);
|
||||
ctx.fill();
|
||||
|
||||
ctx.globalAlpha = 0.7;
|
||||
ctx.fillStyle = "grey";
|
||||
var x_start = bufferChartStart;
|
||||
var x_w = bufferChartWidth*(windowMinTime-minTime)/(maxTime-minTime);
|
||||
ctx.fillRect(x_start,0,x_w, canvas.height);
|
||||
var x_start = bufferChartStart+bufferChartWidth*(windowMaxTime-minTime)/(maxTime-minTime);
|
||||
var x_w = canvas.width-x_start-eventRightMargin;
|
||||
ctx.fillRect(x_start,0,x_w, canvas.height);
|
||||
ctx.globalAlpha = 1;
|
||||
}
|
||||
|
||||
function canvasBitrateEventUpdate(canvas, minTime, maxTime, windowMinTime, windowMaxTime, levelEvents, bitrateEvents) {
|
||||
var ctx = canvas.getContext('2d'),
|
||||
bufferChartStart = eventLeftMargin,
|
||||
bufferChartWidth = ctx.canvas.width-eventLeftMargin-eventRightMargin,
|
||||
x_offset = 0,y_offset = 0,
|
||||
event, maxLevel, minLevel, sumLevel, maxBitrate, minBitrate, sumDuration;
|
||||
ctx.clearRect (0,0,canvas.width, canvas.height);
|
||||
|
||||
if(levelEvents.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
maxBitrate = minBitrate = bitrateEvents[0].bitrate;
|
||||
sumLevel = sumDuration = 0;
|
||||
for (var i =0 ; i < bitrateEvents.length; i++) {
|
||||
sumLevel += bitrateEvents[i].duration*bitrateEvents[i].level;
|
||||
sumDuration += bitrateEvents[i].duration;
|
||||
maxBitrate = Math.max(maxBitrate, bitrateEvents[i].bitrate);
|
||||
minBitrate = Math.min(minBitrate, bitrateEvents[i].bitrate);
|
||||
}
|
||||
|
||||
maxLevel = minLevel = levelEvents[0].id;
|
||||
for (var i =0 ; i < levelEvents.length; i++) {
|
||||
maxLevel = Math.max(maxLevel, levelEvents[i].id);
|
||||
minLevel = Math.min(minLevel, levelEvents[i].id);
|
||||
}
|
||||
|
||||
ctx.fillStyle = "green";
|
||||
ctx.globalAlpha = 0.5;
|
||||
ctx.fillRect(0,0,eventLeftMargin, canvas.height);
|
||||
ctx.fillRect(canvas.width-eventRightMargin,0,eventRightMargin, canvas.height);
|
||||
ctx.globalAlpha = 1;
|
||||
|
||||
x_offset = 5;
|
||||
y_offset = 0;
|
||||
ctx.fillStyle = "black";
|
||||
ctx.font = "15px Arial";
|
||||
|
||||
y_offset+=15;
|
||||
legend = 'last bitrate:' + (bitrateEvents[bitrateEvents.length-1].bitrate/1000).toFixed(2) + "Mb/s";
|
||||
ctx.fillText(legend,x_offset,y_offset);
|
||||
|
||||
y_offset+=15;
|
||||
legend = 'min bitrate:' + (minBitrate/1000).toFixed(2) + "Mb/s";
|
||||
ctx.fillText(legend,x_offset,y_offset);
|
||||
|
||||
y_offset+=15;
|
||||
legend = 'max bitrate:' + (maxBitrate/1000).toFixed(2) + "Mb/s";
|
||||
ctx.fillText(legend,x_offset,y_offset);
|
||||
|
||||
y_offset+=15;
|
||||
legend = 'min/last/max level:' + minLevel + '/' + levelEvents[levelEvents.length-1].id + '/' + maxLevel;
|
||||
ctx.fillText(legend,x_offset,y_offset);
|
||||
|
||||
y_offset += 15;
|
||||
legend = 'nb level switch:' + (levelEvents.length-1);
|
||||
ctx.fillText(legend,x_offset,y_offset);
|
||||
|
||||
y_offset += 15;
|
||||
legend = 'average level:' + (sumLevel/sumDuration).toFixed(2);
|
||||
ctx.fillText(legend,x_offset,y_offset);
|
||||
|
||||
maxBitrate*=1.1;
|
||||
|
||||
ctx.strokeStyle = "blue";
|
||||
ctx.beginPath();
|
||||
ctx.moveTo(bufferChartStart, ctx.canvas.height);
|
||||
for (var i =0 ; i < bitrateEvents.length; i++) {
|
||||
event = bitrateEvents[i];
|
||||
x_offset = bufferChartStart + (bufferChartWidth*(event.time-minTime))/(maxTime-minTime);
|
||||
y_offset = ctx.canvas.height*(1 - event.bitrate/maxBitrate);
|
||||
ctx.lineTo(x_offset,y_offset);
|
||||
}
|
||||
ctx.lineTo(bufferChartStart+bufferChartWidth, y_offset);
|
||||
ctx.stroke();
|
||||
|
||||
ctx.strokeStyle = "black";
|
||||
ctx.beginPath();
|
||||
x_offset = bufferChartStart;
|
||||
y_offset = ctx.canvas.height;
|
||||
ctx.moveTo(x_offset, y_offset);
|
||||
for (var i =0 ; i < levelEvents.length; i++) {
|
||||
event = levelEvents[i];
|
||||
x_offset = bufferChartStart + (bufferChartWidth*(event.time-minTime))/(maxTime-minTime);
|
||||
ctx.lineTo(x_offset,y_offset);
|
||||
y_offset = ctx.canvas.height*(1 - event.bitrate/maxBitrate);
|
||||
ctx.lineTo(x_offset,y_offset);
|
||||
}
|
||||
ctx.lineTo(bufferChartStart+bufferChartWidth, y_offset);
|
||||
ctx.stroke();
|
||||
|
||||
ctx.globalAlpha = 0.7;
|
||||
ctx.fillStyle = "grey";
|
||||
var x_start = bufferChartStart;
|
||||
var x_w = bufferChartWidth*(windowMinTime-minTime)/(maxTime-minTime);
|
||||
ctx.fillRect(x_start,0,x_w, canvas.height);
|
||||
var x_start = bufferChartStart+bufferChartWidth*(windowMaxTime-minTime)/(maxTime-minTime);
|
||||
var x_w = canvas.width-x_start-eventRightMargin;
|
||||
ctx.fillRect(x_start,0,x_w, canvas.height);
|
||||
ctx.globalAlpha = 1;
|
||||
|
||||
}
|
||||
|
||||
|
||||
function canvasDrawLoadEvent(ctx,yoffset,event,minTime,maxTime) {
|
||||
var legend,offset,x_start,x_w,
|
||||
networkChartStart = eventLeftMargin,
|
||||
networkChartWidth = ctx.canvas.width-eventLeftMargin-eventRightMargin,
|
||||
tend = Math.round(event.time + event.duration + event.latency);
|
||||
|
||||
//draw start
|
||||
ctx.fillStyle = "black";
|
||||
ctx.font = "12px Arial";
|
||||
legend = Math.round(event.time);
|
||||
offset = ctx.measureText(legend).width+5;
|
||||
x_start = networkChartStart-offset+networkChartWidth*(event.time-minTime)/(maxTime-minTime);
|
||||
ctx.fillText(legend,x_start,yoffset+12);
|
||||
|
||||
//draw latency rectangle
|
||||
ctx.fillStyle = "orange";
|
||||
x_start = networkChartStart + networkChartWidth*(event.time-minTime)/(maxTime-minTime);
|
||||
x_w = networkChartWidth*event.latency/(maxTime-minTime);
|
||||
ctx.fillRect(x_start,yoffset,x_w, 15);
|
||||
//draw download rectangle
|
||||
ctx.fillStyle = "green";
|
||||
x_start = networkChartStart + networkChartWidth*(event.time+event.latency-minTime)/(maxTime-minTime);
|
||||
x_w = networkChartWidth*event.load/(maxTime-minTime);
|
||||
ctx.fillRect(x_start,yoffset,x_w, 15);
|
||||
|
||||
if(event.parsing) {
|
||||
//draw parsing rectangle
|
||||
ctx.fillStyle = "blue";
|
||||
x_start = networkChartStart + networkChartWidth*(event.time+event.latency+event.load-minTime)/(maxTime-minTime);
|
||||
x_w = networkChartWidth*event.parsing/(maxTime-minTime);
|
||||
ctx.fillRect(x_start,yoffset,x_w, 15);
|
||||
|
||||
if(event.buffer) {
|
||||
//draw buffering rectangle
|
||||
ctx.fillStyle = "red";
|
||||
x_start = networkChartStart + networkChartWidth*(event.time+event.latency+event.load+event.parsing-minTime)/(maxTime-minTime);
|
||||
x_w = networkChartWidth*event.buffer/(maxTime-minTime);
|
||||
ctx.fillRect(x_start,yoffset,x_w, 15);
|
||||
}
|
||||
}
|
||||
|
||||
//draw end time
|
||||
ctx.fillStyle = "black";
|
||||
ctx.font = "12px Arial";
|
||||
legend = tend;
|
||||
x_start += x_w + 5;
|
||||
ctx.fillText(legend,x_start,yoffset+12);
|
||||
x_start += ctx.measureText(legend).width+5;
|
||||
|
||||
legend = "[" + Math.round(event.latency);
|
||||
ctx.fillStyle = "orange";
|
||||
ctx.fillText(legend,x_start,yoffset+12);
|
||||
x_start += ctx.measureText(legend).width+5;
|
||||
|
||||
legend = Math.round(event.load);
|
||||
if(!event.parsing) legend += "]";
|
||||
ctx.fillStyle = "green";
|
||||
ctx.fillText(legend,x_start,yoffset+12);
|
||||
x_start += ctx.measureText(legend).width+5;
|
||||
|
||||
if(event.parsing) {
|
||||
legend = Math.round(event.parsing);
|
||||
if(!event.buffer) legend +="]";
|
||||
ctx.fillStyle = "blue";
|
||||
ctx.fillText(legend,x_start,yoffset+12);
|
||||
x_start += ctx.measureText(legend).width+5;
|
||||
|
||||
if(event.buffer) {
|
||||
legend = Math.round(event.buffer) + "]";
|
||||
ctx.fillStyle = "red";
|
||||
ctx.fillText(legend,x_start,yoffset+12);
|
||||
x_start += ctx.measureText(legend).width+5;
|
||||
}
|
||||
}
|
||||
|
||||
if(event.size) {
|
||||
if(event.size > 1000*1000) {
|
||||
legend = (event.size/1000000).toFixed(1) + 'MB';
|
||||
} else {
|
||||
legend = Math.round(event.size/1000) + 'kB';
|
||||
}
|
||||
ctx.fillStyle = "black";
|
||||
ctx.fillText(legend,x_start,yoffset+12);
|
||||
x_start += ctx.measureText(legend).width+5;
|
||||
}
|
||||
|
||||
if(event.bw) {
|
||||
if(event.bw > 1000) {
|
||||
legend = (event.bw/1000).toFixed(1) + 'Mbps';
|
||||
} else {
|
||||
legend = event.bw + ' kbps';
|
||||
}
|
||||
ctx.fillStyle = "black";
|
||||
ctx.fillText(legend,x_start,yoffset+12);
|
||||
x_start += ctx.measureText(legend).width+5;
|
||||
}
|
||||
|
||||
// draw event name
|
||||
ctx.fillStyle = "black";
|
||||
ctx.font = "15px Arial";
|
||||
legend = event.type;
|
||||
if(event.id2 !== undefined) {
|
||||
legend += ' ' + event.id2;
|
||||
}
|
||||
if(event.id !== undefined) {
|
||||
if(event.type === 'fragment') {
|
||||
legend += ' @';
|
||||
}
|
||||
legend += ' ' + event.id;
|
||||
}
|
||||
if(event.start !== undefined) {
|
||||
legend += ' [' + event.start + ',' + event.end + ']';
|
||||
}
|
||||
ctx.fillText(legend,5,yoffset+15);
|
||||
}
|
||||
|
||||
function canvasDrawVideoEvent(ctx,yoffset,event,minTime,maxTime) {
|
||||
var legend,offset,x_start,x_w,
|
||||
networkChartStart = eventLeftMargin,
|
||||
networkChartWidth = ctx.canvas.width-eventLeftMargin-eventRightMargin;
|
||||
|
||||
// draw event name
|
||||
ctx.fillStyle = "black";
|
||||
ctx.font = "15px Arial";
|
||||
legend = event.type;
|
||||
if (event.name !== undefined) legend+= ':' + event.name;
|
||||
ctx.fillText(legend,5,yoffset+15);
|
||||
|
||||
|
||||
//draw start time
|
||||
ctx.fillStyle = "black";
|
||||
ctx.font = "12px Arial";
|
||||
legend = Math.round(event.time);
|
||||
offset = ctx.measureText(legend).width+5;
|
||||
x_start = networkChartStart-offset+networkChartWidth*(event.time-minTime)/(maxTime-minTime);
|
||||
ctx.fillText(legend,x_start,yoffset+12);
|
||||
|
||||
|
||||
//draw event rectangle
|
||||
x_start = networkChartStart + networkChartWidth*(event.time-minTime)/(maxTime-minTime);
|
||||
if(event.duration) {
|
||||
x_w = networkChartWidth*event.duration/(maxTime-minTime);
|
||||
} else {
|
||||
x_w = 1;
|
||||
}
|
||||
ctx.fillRect(x_start,yoffset,x_w, 15);
|
||||
|
||||
if(event.duration) {
|
||||
|
||||
//draw end time
|
||||
ctx.fillStyle = "black";
|
||||
ctx.font = "12px Arial";
|
||||
legend = Math.round(event.time+event.duration);
|
||||
x_start += x_w + 5;
|
||||
ctx.fillText(legend,x_start,yoffset+12);
|
||||
x_start += ctx.measureText(legend).width+5;
|
||||
|
||||
legend = "[" + Math.round(event.duration) + "]";
|
||||
ctx.fillStyle = "blue";
|
||||
ctx.fillText(legend,x_start,yoffset+12);
|
||||
}
|
||||
}
|
973
dashboard-ui/bower_components/hls.js/demo/index.html
vendored
973
dashboard-ui/bower_components/hls.js/demo/index.html
vendored
|
@ -1,973 +0,0 @@
|
|||
<!DOCTYPE html>
|
||||
|
||||
<head>
|
||||
<style>
|
||||
header {
|
||||
text-align: center;
|
||||
}
|
||||
#controls
|
||||
{
|
||||
width: 70%;
|
||||
min-width: 615px;
|
||||
padding: 3px;
|
||||
margin: 0px auto 20px auto;
|
||||
border: 1px solid #606060;
|
||||
overflow: hidden;
|
||||
}
|
||||
.innerControls
|
||||
{
|
||||
display:block;
|
||||
float: left;
|
||||
width: 99%;
|
||||
margin: 3px;
|
||||
padding-left: 3px;
|
||||
font-size: 8pt
|
||||
}
|
||||
.videoCentered
|
||||
{
|
||||
width: 720px;
|
||||
margin-left: auto;
|
||||
margin-right: auto;
|
||||
display: block
|
||||
}
|
||||
|
||||
.center
|
||||
{
|
||||
width: 70%;
|
||||
min-width: 615px;
|
||||
overflow: hidden;
|
||||
margin-left: auto;
|
||||
margin-right: auto;
|
||||
display: block
|
||||
}
|
||||
|
||||
#customButtons input { width: 25%; display : inline-block; text-align: center; font-size: 8pt;}
|
||||
#toggleButtons button { width: 24%; display : inline-block; text-align: center; font-size: 8pt; background-color: #A0A0A0 }
|
||||
|
||||
</style>
|
||||
<title>hls.js demo</title>
|
||||
<link rel="icon" type="image/png" href="http://static1.dmcdn.net/images/favicon-32x32.png" sizes="32x32" />
|
||||
<link rel="icon" type="image/png" href="http://static1.dmcdn.net/images/favicon-16x16.png" sizes="16x16" />
|
||||
<script src="https://ajax.googleapis.com/ajax/libs/jquery/2.1.3/jquery.min.js"></script>
|
||||
<link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.4/css/bootstrap.min.css">
|
||||
<link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.4/css/bootstrap-theme.min.css">
|
||||
<script src="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.4/js/bootstrap.min.js"></script>
|
||||
|
||||
</head>
|
||||
<body>
|
||||
<div class="header-container">
|
||||
<header class="wrapper clearfix">
|
||||
<h1 class="title"><a href="https://github.com/dailymotion/hls.js">hls.js</a> demo page</h1>
|
||||
</header>
|
||||
</div>
|
||||
|
||||
<div class="main-container">
|
||||
<header>
|
||||
<p>
|
||||
test with your HLS streams below in Chrome, Firefox, IE11 or Safari !
|
||||
<br>Advanced controls are also available at the bottom of this page.
|
||||
</p>
|
||||
</header>
|
||||
<div id="controls">
|
||||
<div id="customButtons"></div>
|
||||
<select id="streamSelect" class="innerControls"><option value="" selected>(Enter custom URL below)</option></select>
|
||||
<input id="streamURL" class="innerControls" type=text value=""/>
|
||||
<label class="innerControls"><input id="enableStreaming" type=checkbox checked/> Enable Streaming</label>
|
||||
<label class="innerControls"><input id="autoRecoverError" type=checkbox checked/> Auto-Recover Media Error</label>
|
||||
<label class="innerControls"><input id="enableWorker" type=checkbox checked/> Enable Worker</label>
|
||||
<label class="innerControls">Level Capping <input id="levelCapping" type=number/></label>
|
||||
<label class="innerControls">default Audio Codec <input id="defaultAudioCodec"/></label>
|
||||
<div id="StreamPermalink" class="innerControls"></div>
|
||||
<div>
|
||||
<select id="videoSize" style="float:left">
|
||||
<option value="240">player size: tiny (240p)</option>
|
||||
<option value="384">player size: small (384p)</option>
|
||||
<option value="480">player size: medium (480p)</option>
|
||||
<option value="720" selected>player size: large (720p)</option>
|
||||
<option value="1080">player size: huge (1080p)</option>
|
||||
</select>
|
||||
<span id="currentResolution" style="float:right;font-size: 8pt;">-</span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
||||
<video id="video" controls autoplay class="videoCentered"></video><br>
|
||||
<canvas id="buffered_c" height="15" class="videoCentered" onclick="buffered_seek(event);"></canvas><br><br>
|
||||
<pre id="HlsStatus" class="center"></pre>
|
||||
|
||||
<div class="center" id="toggleButtons">
|
||||
<button type="button" class="btn btn-sm" onclick="$('#PlaybackControl').toggle();">toggle playback controls</button>
|
||||
<button type="button" class="btn btn-sm" onclick="$('#QualityLevelControl').toggle();">toggle Quality Level controls</button>
|
||||
<button type="button" class="btn btn-sm" onclick="$('#MetricsDisplay').toggle();toggleMetricsDisplay();">toggle Metrics Display</button>
|
||||
<button type="button" class="btn btn-sm" onclick="$('#StatsDisplay').toggle();">toggle Stats Display</button>
|
||||
</div>
|
||||
|
||||
<div id='PlaybackControl'>
|
||||
<h4> Playback Control </h4>
|
||||
<button type="button" class="btn btn-sm btn-info" onclick="$('#video')[0].play()">play</button>
|
||||
<button type="button" class="btn btn-sm btn-info" onclick="$('#video')[0].pause()">pause</button>
|
||||
<button type="button" class="btn btn-sm btn-info" onclick="$('#video')[0].currentTime+=10">currentTime+=10</button>
|
||||
<button type="button" class="btn btn-sm btn-info" onclick="$('#video')[0].currentTime-=10">currentTime-=10</button>
|
||||
<button type="button" class="btn btn-sm btn-info" onclick="$('#video')[0].currentTime=$('#seek_pos').val()">seek to </button>
|
||||
<input type="text" id='seek_pos' size="8" onkeydown="if(window.event.keyCode=='13'){$('#video')[0].currentTime=$('#seek_pos').val();}"><br><br>
|
||||
<button type="button" class="btn btn-xs btn-warning" onclick="hls.attachMedia($('#video')[0])">attach Video</button>
|
||||
<button type="button" class="btn btn-xs btn-warning" onclick="hls.detachMedia()">detach Video</button><br>
|
||||
<button type="button" class="btn btn-xs btn-warning" onclick="hls.startLoad()">start Load</button>
|
||||
<button type="button" class="btn btn-xs btn-warning" onclick="hls.stopLoad()">stop Load</button>
|
||||
<button type="button" class="btn btn-xs btn-warning" onclick="hls.recoverMediaError()">recover Media Error</button><br>
|
||||
</div>
|
||||
|
||||
<div id='QualityLevelControl'>
|
||||
<h4> Quality Control </h4>
|
||||
<table>
|
||||
<tr>
|
||||
<td>current level</td>
|
||||
<td width=10px></td>
|
||||
<td> <div id="currentLevelControl" style="display: inline;"></div> </td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td> <p>next level</p></td>
|
||||
<td> </td>
|
||||
<td> <div id="nextLevelControl" style="display: inline;"></div> </td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td> <p>load level</p></td>
|
||||
<td> </td>
|
||||
<td> <div id="loadLevelControl" style="display: inline;"></div> </td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td> <p>cap level</p></td>
|
||||
<td> </td>
|
||||
<td> <div id="levelCappingControl" style="display: inline;"></div> </td>
|
||||
</tr>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
<div id='MetricsDisplay'>
|
||||
<h4> Real Time Metrics Display </h4>
|
||||
<div id="metricsButton">
|
||||
<button type="button" class="btn btn-xs btn-info" onclick="$('#metricsButtonWindow').toggle();$('#metricsButtonFixed').toggle();windowSliding=!windowSliding; refreshCanvas()">toggle sliding/fixed window</button><br>
|
||||
<div id="metricsButtonWindow">
|
||||
<button type="button" class="btn btn-xs btn-info" onclick="timeRangeSetSliding(0)">window ALL</button>
|
||||
<button type="button" class="btn btn-xs btn-info" onclick="timeRangeSetSliding(2000)">2s</button>
|
||||
<button type="button" class="btn btn-xs btn-info" onclick="timeRangeSetSliding(5000)">5s</button>
|
||||
<button type="button" class="btn btn-xs btn-info" onclick="timeRangeSetSliding(10000)">10s</button>
|
||||
<button type="button" class="btn btn-xs btn-info" onclick="timeRangeSetSliding(20000)">20s</button>
|
||||
<button type="button" class="btn btn-xs btn-info" onclick="timeRangeSetSliding(30000)">30s</button>
|
||||
<button type="button" class="btn btn-xs btn-info" onclick="timeRangeSetSliding(60000)">60s</button>
|
||||
<button type="button" class="btn btn-xs btn-info" onclick="timeRangeSetSliding(120000)">120s</button><br>
|
||||
<button type="button" class="btn btn-xs btn-info" onclick="timeRangeZoomIn()">Window Zoom In</button>
|
||||
<button type="button" class="btn btn-xs btn-info" onclick="timeRangeZoomOut()">Window Zoom Out</button><br>
|
||||
<button type="button" class="btn btn-xs btn-info" onclick="timeRangeSlideLeft()"> <<< Window Slide </button>
|
||||
<button type="button" class="btn btn-xs btn-info" onclick="timeRangeSlideRight()">Window Slide >>> </button><br>
|
||||
</div>
|
||||
<div id="metricsButtonFixed">
|
||||
<button type="button" class="btn btn-xs btn-info" onclick="windowStart=$('#windowStart').val()">fixed window start(ms)</button>
|
||||
<input type="text" id='windowStart' defaultValue="0" size="8" onkeydown="if(window.event.keyCode=='13'){windowStart=$('#windowStart').val();}">
|
||||
<button type="button" class="btn btn-xs btn-info" onclick="windowEnd=$('#windowEnd').val()">fixed window end(ms)</button>
|
||||
<input type="text" id='windowEnd' defaultValue="10000" size="8" onkeydown="if(window.event.keyCode=='13'){windowEnd=$('#windowEnd').val();}"><br>
|
||||
</div>
|
||||
<button type="button" class="btn btn-xs btn-success" onclick="goToMetrics()" style="font-size:18px">metrics link</button>
|
||||
<button type="button" class="btn btn-xs btn-success" onclick="goToMetricsPermaLink()" style="font-size:18px">metrics permalink</button>
|
||||
<button type="button" class="btn btn-xs btn-success" onclick="copyMetricsToClipBoard()" style="font-size:18px">copy metrics to clipboard</button>
|
||||
<canvas id="bufferTimerange_c" width="640" height="100" style="border:1px solid #000000" onmousedown="timeRangeCanvasonMouseDown(event)" onmousemove="timeRangeCanvasonMouseMove(event)" onmouseup="timeRangeCanvasonMouseUp(event)" onmouseout="timeRangeCanvasonMouseOut(event);"></canvas>
|
||||
<canvas id="bitrateTimerange_c" width="640" height="100" style="border:1px solid #000000;"></canvas>
|
||||
<canvas id="bufferWindow_c" width="640" height="100" style="border:1px solid #000000" onmousemove="windowCanvasonMouseMove(event);"></canvas>
|
||||
<canvas id="videoEvent_c" width="640" height="15" style="border:1px solid #000000;"></canvas>
|
||||
<canvas id="loadEvent_c" width="640" height="15" style="border:1px solid #000000;"></canvas><br>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div id='StatsDisplay'>
|
||||
<h4> Stats Display </h4>
|
||||
<pre id='HlsStats'></pre>
|
||||
<div id="buffered_log"></div>
|
||||
</div>
|
||||
</div>
|
||||
<br><br>
|
||||
|
||||
<script src="../../streams.js"></script>
|
||||
<!-- live-reload script -->
|
||||
<script src="//localhost:8001"></script>
|
||||
<script src="../dist/hls.js"></script>
|
||||
<script src="canvas.js"></script>
|
||||
<script src="metrics.js"></script>
|
||||
<script src="jsonpack.js"></script>
|
||||
<script>
|
||||
|
||||
$(document).ready(function() {
|
||||
$('#streamSelect').change(function() { $('#streamURL').val($('#streamSelect').val());loadStream($('#streamURL').val());});
|
||||
$('#streamURL').change(function() { loadStream($('#streamURL').val());});
|
||||
$('#videoSize').change(function() { $('#video').width($('#videoSize').val()); $('#buffered_c').width($('#videoSize').val()); });
|
||||
$("#PlaybackControl").hide();
|
||||
$("#QualityLevelControl").hide();
|
||||
$("#MetricsDisplay").hide();
|
||||
$("#StatsDisplay").hide();
|
||||
$('#metricsButtonWindow').toggle(windowSliding);
|
||||
$('#metricsButtonFixed').toggle(!windowSliding);
|
||||
$('#enableStreaming').click(function() { enableStreaming = this.checked; loadStream($('#streamURL').val()); });
|
||||
$('#autoRecoverError').click(function() { autoRecoverError = this.checked; updatePermalink();});
|
||||
$('#enableWorker').click(function() { enableWorker = this.checked; updatePermalink();});
|
||||
$('#levelCapping').change(function() { levelCapping = this.value; updatePermalink();});
|
||||
$('#defaultAudioCodec').change(function() { defaultAudioCodec = this.value; updatePermalink();});
|
||||
$('#enableStreaming').prop( "checked", enableStreaming );
|
||||
$('#autoRecoverError').prop( "checked", autoRecoverError );
|
||||
$('#enableWorker').prop( "checked", enableWorker );
|
||||
$('#levelCapping').val(levelCapping);
|
||||
$('#defaultAudioCodec').val(defaultAudioCodec || "undefined");
|
||||
});
|
||||
|
||||
|
||||
'use strict';
|
||||
var hls,events, stats,
|
||||
enableStreaming = JSON.parse(getURLParam('enableStreaming',true))
|
||||
autoRecoverError = JSON.parse(getURLParam('autoRecoverError',true)),
|
||||
enableWorker = JSON.parse(getURLParam('enableWorker',true)),
|
||||
levelCapping = JSON.parse(getURLParam('levelCapping',-1)),
|
||||
defaultAudioCodec = getURLParam('defaultAudioCodec',undefined);
|
||||
var video = $('#video')[0];
|
||||
video.volume = 0.05;
|
||||
|
||||
loadStream(decodeURIComponent(getURLParam('src','http://www.streambox.fr/playlists/x36xhzz/x36xhzz.m3u8')));
|
||||
|
||||
function loadStream(url) {
|
||||
hideCanvas();
|
||||
if(Hls.isSupported()) {
|
||||
if(hls) {
|
||||
hls.destroy();
|
||||
if(hls.bufferTimer) {
|
||||
clearInterval(hls.bufferTimer);
|
||||
hls.bufferTimer = undefined;
|
||||
}
|
||||
hls = null;
|
||||
}
|
||||
|
||||
$('#streamURL').val(url);
|
||||
updatePermalink();
|
||||
if(!enableStreaming) {
|
||||
$("#HlsStatus").text("Streaming disabled");
|
||||
return;
|
||||
}
|
||||
|
||||
$("#HlsStatus").text('loading ' + url);
|
||||
events = { url : url, t0 : performance.now(), load : [], buffer : [], video : [], level : [], bitrate : []};
|
||||
recoverDecodingErrorDate = recoverSwapAudioCodecDate = null;
|
||||
hls = new Hls({debug:true, enableWorker : enableWorker, defaultAudioCodec : defaultAudioCodec});
|
||||
$("#HlsStatus").text('loading manifest and attaching video element...');
|
||||
hls.loadSource(url);
|
||||
hls.autoLevelCapping = levelCapping;
|
||||
hls.attachMedia(video);
|
||||
hls.on(Hls.Events.MEDIA_ATTACHED,function() {
|
||||
$("#HlsStatus").text('MediaSource attached...');
|
||||
bufferingIdx = -1;
|
||||
events.video.push({time : performance.now() - events.t0, type : "Media attached"});
|
||||
});
|
||||
hls.on(Hls.Events.MEDIA_DETACHED,function() {
|
||||
$("#HlsStatus").text('MediaSource detached...');
|
||||
bufferingIdx = -1;
|
||||
events.video.push({time : performance.now() - events.t0, type : "Media detached"});
|
||||
});
|
||||
hls.on(Hls.Events.FRAG_PARSING_INIT_SEGMENT,function(event,data) {
|
||||
showCanvas();
|
||||
var event = {time : performance.now() - events.t0, type : "init segment"};
|
||||
events.video.push(event);
|
||||
});
|
||||
hls.on(Hls.Events.FRAG_PARSING_METADATA, function(event, data) {
|
||||
console.log("Id3 samples ", data.samples);
|
||||
});
|
||||
hls.on(Hls.Events.LEVEL_SWITCH,function(event,data) {
|
||||
events.level.push({time : performance.now() - events.t0, id : data.level, bitrate : Math.round(hls.levels[data.level].bitrate/1000)});
|
||||
updateLevelInfo();
|
||||
});
|
||||
hls.on(Hls.Events.MANIFEST_PARSED,function(event,data) {
|
||||
var event = {
|
||||
type : "manifest",
|
||||
name : "",
|
||||
start : 0,
|
||||
end : data.levels.length,
|
||||
time : data.stats.trequest - events.t0,
|
||||
latency : data.stats.tfirst - data.stats.trequest,
|
||||
load : data.stats.tload - data.stats.tfirst,
|
||||
duration : data.stats.tload - data.stats.tfirst,
|
||||
};
|
||||
events.load.push(event);
|
||||
refreshCanvas();
|
||||
});
|
||||
hls.on(Hls.Events.MANIFEST_PARSED,function(event,data) {
|
||||
$("#HlsStatus").text("manifest successfully loaded," + hls.levels.length + " levels found");
|
||||
stats = {levelNb: data.levels.length};
|
||||
updateLevelInfo();
|
||||
});
|
||||
hls.on(Hls.Events.LEVEL_LOADED,function(event,data) {
|
||||
events.isLive = data.details.live;
|
||||
var event = {
|
||||
type : "level",
|
||||
id : data.level,
|
||||
start : data.details.startSN,
|
||||
end : data.details.endSN,
|
||||
time : data.stats.trequest - events.t0,
|
||||
latency : data.stats.tfirst - data.stats.trequest,
|
||||
load : data.stats.tload - data.stats.tfirst,
|
||||
parsing : data.stats.tparsed - data.stats.tload,
|
||||
duration : data.stats.tload - data.stats.tfirst
|
||||
};
|
||||
events.load.push(event);
|
||||
refreshCanvas();
|
||||
});
|
||||
hls.on(Hls.Events.FRAG_BUFFERED,function(event,data) {
|
||||
var event = {
|
||||
type : "fragment",
|
||||
id : data.frag.level,
|
||||
id2 : data.frag.sn,
|
||||
time : data.stats.trequest - events.t0,
|
||||
latency : data.stats.tfirst - data.stats.trequest,
|
||||
load : data.stats.tload - data.stats.tfirst,
|
||||
parsing : data.stats.tparsed - data.stats.tload,
|
||||
buffer : data.stats.tbuffered - data.stats.tparsed,
|
||||
duration : data.stats.tbuffered - data.stats.tfirst,
|
||||
bw : Math.round(8*data.stats.length/(data.stats.tbuffered - data.stats.tfirst)),
|
||||
size : data.stats.length
|
||||
};
|
||||
events.load.push(event);
|
||||
events.bitrate.push({time : performance.now() - events.t0, bitrate : event.bw , duration : data.frag.duration, level : event.id});
|
||||
if(hls.bufferTimer === undefined) {
|
||||
events.buffer.push({ time : 0, buffer : 0, pos: 0});
|
||||
hls.bufferTimer = window.setInterval(checkBuffer, 100);
|
||||
}
|
||||
refreshCanvas();
|
||||
updateLevelInfo();
|
||||
|
||||
var latency = data.stats.tfirst - data.stats.trequest, process = data.stats.tbuffered - data.stats.trequest, bitrate = Math.round(8 * data.stats.length / (data.stats.tbuffered - data.stats.tfirst));
|
||||
if (stats.fragBuffered) {
|
||||
stats.fragMinLatency = Math.min(stats.fragMinLatency, latency);
|
||||
stats.fragMaxLatency = Math.max(stats.fragMaxLatency, latency);
|
||||
stats.fragMinProcess = Math.min(stats.fragMinProcess, process);
|
||||
stats.fragMaxProcess = Math.max(stats.fragMaxProcess, process);
|
||||
stats.fragMinKbps = Math.min(stats.fragMinKbps, bitrate);
|
||||
stats.fragMaxKbps = Math.max(stats.fragMaxKbps, bitrate);
|
||||
stats.autoLevelCappingMin = Math.min(stats.autoLevelCappingMin, hls.autoLevelCapping);
|
||||
stats.autoLevelCappingMax = Math.max(stats.autoLevelCappingMax, hls.autoLevelCapping);
|
||||
stats.fragBuffered++;
|
||||
} else {
|
||||
stats.fragMinLatency = stats.fragMaxLatency = latency;
|
||||
stats.fragMinProcess = stats.fragMaxProcess = process;
|
||||
stats.fragMinKbps = stats.fragMaxKbps = bitrate;
|
||||
stats.fragBuffered = 1;
|
||||
stats.fragBufferedBytes = 0;
|
||||
stats.autoLevelCappingMin = stats.autoLevelCappingMax = hls.autoLevelCapping;
|
||||
this.sumLatency = 0;
|
||||
this.sumKbps = 0;
|
||||
this.sumProcess = 0;
|
||||
}
|
||||
stats.fraglastLatency = latency;
|
||||
this.sumLatency += latency;
|
||||
stats.fragAvgLatency = Math.round(this.sumLatency / stats.fragBuffered);
|
||||
stats.fragLastProcess = process;
|
||||
this.sumProcess += process;
|
||||
stats.fragAvgProcess = Math.round(this.sumProcess / stats.fragBuffered);
|
||||
stats.fragLastKbps = bitrate;
|
||||
this.sumKbps += bitrate;
|
||||
stats.fragAvgKbps = Math.round(this.sumKbps / stats.fragBuffered);
|
||||
stats.fragBufferedBytes += data.stats.length;
|
||||
stats.autoLevelCappingLast = hls.autoLevelCapping;
|
||||
});
|
||||
hls.on(Hls.Events.FRAG_CHANGED,function(event,data) {
|
||||
var event = {time : performance.now() - events.t0, type : 'frag changed', name : data.frag.sn + ' @ ' + data.frag.level };
|
||||
events.video.push(event);
|
||||
refreshCanvas();
|
||||
updateLevelInfo();
|
||||
|
||||
var level = data.frag.level, autoLevel = data.frag.autoLevel;
|
||||
if (stats.levelStart === undefined) {
|
||||
stats.levelStart = level;
|
||||
}
|
||||
if (autoLevel) {
|
||||
if (stats.fragChangedAuto) {
|
||||
stats.autoLevelMin = Math.min(stats.autoLevelMin, level);
|
||||
stats.autoLevelMax = Math.max(stats.autoLevelMax, level);
|
||||
stats.fragChangedAuto++;
|
||||
if (this.levelLastAuto && level !== stats.autoLevelLast) {
|
||||
stats.autoLevelSwitch++;
|
||||
}
|
||||
} else {
|
||||
stats.autoLevelMin = stats.autoLevelMax = level;
|
||||
stats.autoLevelSwitch = 0;
|
||||
stats.fragChangedAuto = 1;
|
||||
this.sumAutoLevel = 0;
|
||||
}
|
||||
this.sumAutoLevel += level;
|
||||
stats.autoLevelAvg = Math.round(1000 * this.sumAutoLevel / stats.fragChangedAuto) / 1000;
|
||||
stats.autoLevelLast = level;
|
||||
} else {
|
||||
if (stats.fragChangedManual) {
|
||||
stats.manualLevelMin = Math.min(stats.manualLevelMin, level);
|
||||
stats.manualLevelMax = Math.max(stats.manualLevelMax, level);
|
||||
stats.fragChangedManual++;
|
||||
if (!this.levelLastAuto && level !== stats.manualLevelLast) {
|
||||
stats.manualLevelSwitch++;
|
||||
}
|
||||
} else {
|
||||
stats.manualLevelMin = stats.manualLevelMax = level;
|
||||
stats.manualLevelSwitch = 0;
|
||||
stats.fragChangedManual = 1;
|
||||
}
|
||||
stats.manualLevelLast = level;
|
||||
}
|
||||
this.levelLastAuto = autoLevel;
|
||||
});
|
||||
|
||||
hls.on(Hls.Events.FRAG_LOAD_EMERGENCY_ABORTED,function(event,data) {
|
||||
if (stats) {
|
||||
if (stats.fragLoadEmergencyAborted === undefined) {
|
||||
stats.fragLoadEmergencyAborted = 1;
|
||||
} else {
|
||||
stats.fragLoadEmergencyAborted++;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
hls.on(Hls.Events.ERROR, function(event,data) {
|
||||
switch(data.details) {
|
||||
case Hls.ErrorDetails.MANIFEST_LOAD_ERROR:
|
||||
try {
|
||||
$("#HlsStatus").html("cannot Load <a href=\"" + data.url + "\">" + url + "</a><br>HTTP response code:" + data.response.status + "<br>" + data.response.statusText);
|
||||
if(data.response.status === 0) {
|
||||
$("#HlsStatus").append("this might be a CORS issue, consider installing <a href=\"https://chrome.google.com/webstore/detail/allow-control-allow-origi/nlfbmbojpeacfghkpbjhddihlkkiljbi\">Allow-Control-Allow-Origin</a> Chrome Extension");
|
||||
}
|
||||
} catch(err) {
|
||||
$("#HlsStatus").html("cannot Load <a href=\"" + data.url + "\">" + url + "</a><br>Reason:Load " + data.event.type);
|
||||
}
|
||||
break;
|
||||
case Hls.ErrorDetails.MANIFEST_LOAD_TIMEOUT:
|
||||
$("#HlsStatus").text("timeout while loading manifest");
|
||||
break;
|
||||
case Hls.ErrorDetails.MANIFEST_PARSING_ERROR:
|
||||
$("#HlsStatus").text("error while parsing manifest:" + data.reason);
|
||||
break;
|
||||
case Hls.ErrorDetails.LEVEL_LOAD_ERROR:
|
||||
$("#HlsStatus").text("error while loading level playlist");
|
||||
break;
|
||||
case Hls.ErrorDetails.LEVEL_LOAD_TIMEOUT:
|
||||
$("#HlsStatus").text("timeout while loading level playlist");
|
||||
break;
|
||||
case Hls.ErrorDetails.LEVEL_SWITCH_ERROR:
|
||||
$("#HlsStatus").text("error while trying to switch to level " + data.level);
|
||||
break;
|
||||
case Hls.ErrorDetails.FRAG_LOAD_ERROR:
|
||||
$("#HlsStatus").text("error while loading fragment " + data.frag.url);
|
||||
break;
|
||||
case Hls.ErrorDetails.FRAG_LOAD_TIMEOUT:
|
||||
$("#HlsStatus").text("timeout while loading fragment " + data.frag.url);
|
||||
break;
|
||||
case Hls.ErrorDetails.FRAG_LOOP_LOADING_ERROR:
|
||||
$("#HlsStatus").text("Frag Loop Loading Error");
|
||||
break;
|
||||
case Hls.ErrorDetails.FRAG_DECRYPT_ERROR:
|
||||
$("#HlsStatus").text("Decrypting Error:" + data.reason);
|
||||
break;
|
||||
case Hls.ErrorDetails.FRAG_PARSING_ERROR:
|
||||
$("#HlsStatus").text("Parsing Error:" + data.reason);
|
||||
break;
|
||||
case Hls.ErrorDetails.KEY_LOAD_ERROR:
|
||||
$("#HlsStatus").text("error while loading key " + data.frag.decryptdata.uri);
|
||||
break;
|
||||
case Hls.ErrorDetails.KEY_LOAD_TIMEOUT:
|
||||
$("#HlsStatus").text("timeout while loading key " + data.frag.decryptdata.uri);
|
||||
break;
|
||||
case Hls.ErrorDetails.BUFFER_APPEND_ERROR:
|
||||
$("#HlsStatus").text("Buffer Append Error");
|
||||
break;
|
||||
case Hls.ErrorDetails.BUFFER_APPENDING_ERROR:
|
||||
$("#HlsStatus").text("Buffer Appending Error");
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
if(data.fatal) {
|
||||
switch(data.type) {
|
||||
case Hls.ErrorTypes.MEDIA_ERROR:
|
||||
handleMediaError();
|
||||
break;
|
||||
case Hls.ErrorTypes.NETWORK_ERROR:
|
||||
$("#HlsStatus").append(",network error ...");
|
||||
break;
|
||||
default:
|
||||
$("#HlsStatus").append(", unrecoverable error");
|
||||
hls.destroy();
|
||||
break;
|
||||
}
|
||||
console.log($("#HlsStatus").text());
|
||||
}
|
||||
if(!stats) stats = {};
|
||||
// track all errors independently
|
||||
if (stats[data.details] === undefined) {
|
||||
stats[data.details] = 1;
|
||||
} else {
|
||||
stats[data.details] += 1;
|
||||
}
|
||||
// track fatal error
|
||||
if (data.fatal) {
|
||||
if (stats.fatalError === undefined) {
|
||||
stats.fatalError = 1;
|
||||
} else {
|
||||
stats.fatalError += 1;
|
||||
}
|
||||
}
|
||||
$("#HlsStats").text(JSON.stringify(sortObject(stats),null,"\t"));
|
||||
});
|
||||
|
||||
hls.on(Hls.Events.FPS_DROP,function(event,data) {
|
||||
var evt = {time : performance.now() - events.t0, type : "frame drop", name : data.currentDropped + "/" + data.currentDecoded};
|
||||
events.video.push(evt);
|
||||
if (stats) {
|
||||
if (stats.fpsDropEvent === undefined) {
|
||||
stats.fpsDropEvent = 1;
|
||||
} else {
|
||||
stats.fpsDropEvent++;
|
||||
}
|
||||
stats.fpsTotalDroppedFrames = data.totalDroppedFrames;
|
||||
}
|
||||
});
|
||||
video.addEventListener('resize', handleVideoEvent);
|
||||
video.addEventListener('seeking', handleVideoEvent);
|
||||
video.addEventListener('seeked', handleVideoEvent);
|
||||
video.addEventListener('pause', handleVideoEvent);
|
||||
video.addEventListener('play', handleVideoEvent);
|
||||
video.addEventListener('canplay', handleVideoEvent);
|
||||
video.addEventListener('canplaythrough', handleVideoEvent);
|
||||
video.addEventListener('ended', handleVideoEvent);
|
||||
video.addEventListener('playing', handleVideoEvent);
|
||||
video.addEventListener('error', handleVideoEvent);
|
||||
video.addEventListener('loadedmetadata', handleVideoEvent);
|
||||
video.addEventListener('loadeddata', handleVideoEvent);
|
||||
video.addEventListener('durationchange', handleVideoEvent);
|
||||
} else {
|
||||
if(navigator.userAgent.toLowerCase().indexOf('firefox') !== -1) {
|
||||
$("#HlsStatus").text("you are using Firefox, it looks like MediaSource is not enabled,<br>please ensure the following keys are set appropriately in <b>about:config</b><br>media.mediasource.enabled=true<br>media.mediasource.mp4.enabled=true<br><b>media.mediasource.whitelist=false</b>");
|
||||
} else {
|
||||
$("#HlsStatus").text("your Browser does not support MediaSourceExtension / MP4 mediasource");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
var lastSeekingIdx, lastStartPosition,lastDuration;
|
||||
function handleVideoEvent(evt) {
|
||||
var data = '';
|
||||
switch(evt.type) {
|
||||
case 'durationchange':
|
||||
if(evt.target.duration - lastDuration <= 0.5) {
|
||||
// some browsers reports several duration change events with almost the same value ... avoid spamming video events
|
||||
return;
|
||||
}
|
||||
lastDuration = evt.target.duration;
|
||||
data = Math.round(evt.target.duration*1000);
|
||||
break;
|
||||
case 'resize':
|
||||
data = evt.target.videoWidth + '/' + evt.target.videoHeight;
|
||||
break;
|
||||
case 'loadedmetadata':
|
||||
// data = 'duration:' + evt.target.duration + '/videoWidth:' + evt.target.videoWidth + '/videoHeight:' + evt.target.videoHeight;
|
||||
// break;
|
||||
case 'loadeddata':
|
||||
case 'canplay':
|
||||
case 'canplaythrough':
|
||||
case 'ended':
|
||||
case 'seeking':
|
||||
case 'seeked':
|
||||
case 'play':
|
||||
case 'playing':
|
||||
lastStartPosition = evt.target.currentTime;
|
||||
case 'pause':
|
||||
case 'waiting':
|
||||
case 'stalled':
|
||||
case 'error':
|
||||
data = Math.round(evt.target.currentTime*1000);
|
||||
if(evt.type === 'error') {
|
||||
var errorTxt,mediaError=evt.currentTarget.error;
|
||||
switch(mediaError.code) {
|
||||
case mediaError.MEDIA_ERR_ABORTED:
|
||||
errorTxt = "You aborted the video playback";
|
||||
break;
|
||||
case mediaError.MEDIA_ERR_DECODE:
|
||||
errorTxt = "The video playback was aborted due to a corruption problem or because the video used features your browser did not support";
|
||||
handleMediaError();
|
||||
break;
|
||||
case mediaError.MEDIA_ERR_NETWORK:
|
||||
errorTxt = "A network error caused the video download to fail part-way";
|
||||
break;
|
||||
case mediaError.MEDIA_ERR_SRC_NOT_SUPPORTED:
|
||||
errorTxt = "The video could not be loaded, either because the server or network failed or because the format is not supported";
|
||||
break;
|
||||
}
|
||||
$("#HlsStatus").text(errorTxt);
|
||||
console.error(errorTxt);
|
||||
}
|
||||
break;
|
||||
// case 'progress':
|
||||
// data = 'currentTime:' + evt.target.currentTime + ',bufferRange:[' + this.video.buffered.start(0) + ',' + this.video.buffered.end(0) + ']';
|
||||
// break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
var event = {time : performance.now() - events.t0, type : evt.type, name : data};
|
||||
events.video.push(event);
|
||||
if(evt.type === 'seeking') {
|
||||
lastSeekingIdx = events.video.length-1;
|
||||
}
|
||||
if(evt.type === 'seeked') {
|
||||
events.video[lastSeekingIdx].duration = event.time - events.video[lastSeekingIdx].time;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
var recoverDecodingErrorDate,recoverSwapAudioCodecDate;
|
||||
function handleMediaError() {
|
||||
if(autoRecoverError) {
|
||||
var now = performance.now();
|
||||
if(!recoverDecodingErrorDate || (now - recoverDecodingErrorDate) > 3000) {
|
||||
recoverDecodingErrorDate = performance.now();
|
||||
$("#HlsStatus").append(",try to recover media Error ...");
|
||||
hls.recoverMediaError();
|
||||
} else {
|
||||
if(!recoverSwapAudioCodecDate || (now - recoverSwapAudioCodecDate) > 3000) {
|
||||
recoverSwapAudioCodecDate = performance.now();
|
||||
$("#HlsStatus").append(",try to swap Audio Codec and recover media Error ...");
|
||||
hls.swapAudioCodec();
|
||||
hls.recoverMediaError();
|
||||
} else {
|
||||
$("#HlsStatus").append(",cannot recover, last media error recovery failed ...");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function timeRangesToString(r) {
|
||||
var log = "";
|
||||
for (var i=0; i<r.length; i++) {
|
||||
log += "[" + r.start(i) + "," + r.end(i) + "]";
|
||||
}
|
||||
return log;
|
||||
}
|
||||
|
||||
var bufferingIdx = -1;
|
||||
|
||||
function checkBuffer() {
|
||||
var v = $('#video')[0];
|
||||
var canvas = $('#buffered_c')[0];
|
||||
var ctx = canvas.getContext('2d');
|
||||
var r = v.buffered;
|
||||
var bufferingDuration;
|
||||
ctx.fillStyle = "black";
|
||||
ctx.fillRect(0,0,canvas.width,canvas.height);
|
||||
ctx.fillStyle = "gray";
|
||||
if (r) {
|
||||
if(!canvas.width || canvas.width !== v.clientWidth) {
|
||||
canvas.width = v.clientWidth;
|
||||
}
|
||||
var pos = v.currentTime,bufferLen;
|
||||
for (var i=0, bufferLen=0; i<r.length; i++) {
|
||||
var start = r.start(i)/v.duration * canvas.width;
|
||||
var end = r.end(i)/v.duration * canvas.width;
|
||||
ctx.fillRect(start, 3, Math.max(2, end-start), 10);
|
||||
if(pos >= r.start(i) && pos < r.end(i)) {
|
||||
// play position is inside this buffer TimeRange, retrieve end of buffer position and buffer length
|
||||
bufferLen = r.end(i) - pos;
|
||||
}
|
||||
}
|
||||
// check if we are in buffering / or playback ended state
|
||||
if(bufferLen <= 0.1 && v.paused === false && (pos-lastStartPosition) > 0.5) {
|
||||
// don't create buffering event if we are at the end of the playlist, don't report ended for live playlist
|
||||
if(lastDuration -pos <= 0.5 && events.isLive === false) {
|
||||
} else {
|
||||
// we are not at the end of the playlist ... real buffering
|
||||
if(bufferingIdx !== -1) {
|
||||
bufferingDuration = performance.now() - events.t0 - events.video[bufferingIdx].time;
|
||||
events.video[bufferingIdx].duration = bufferingDuration;
|
||||
events.video[bufferingIdx].name = bufferingDuration;
|
||||
} else {
|
||||
events.video.push({ type : 'buffering' , time : performance.now() - events.t0 });
|
||||
// we are in buffering state
|
||||
bufferingIdx = events.video.length-1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if(bufferLen > 0.1 && bufferingIdx !=-1) {
|
||||
bufferingDuration = performance.now() - events.t0 - events.video[bufferingIdx].time;
|
||||
events.video[bufferingIdx].duration = bufferingDuration;
|
||||
events.video[bufferingIdx].name = bufferingDuration;
|
||||
// we are out of buffering state
|
||||
bufferingIdx = -1;
|
||||
}
|
||||
|
||||
// update buffer/position for current Time
|
||||
var event = { time : performance.now() - events.t0, buffer : Math.round(bufferLen*1000), pos: Math.round(pos*1000)};
|
||||
var bufEvents = events.buffer, bufEventLen = bufEvents.length;
|
||||
if(bufEventLen > 1) {
|
||||
var event0 = bufEvents[bufEventLen-2],event1 = bufEvents[bufEventLen-1];
|
||||
var slopeBuf0 = (event0.buffer - event1.buffer)/(event0.time-event1.time);
|
||||
var slopeBuf1 = (event1.buffer - event.buffer)/(event1.time-event.time);
|
||||
|
||||
var slopePos0 = (event0.pos - event1.pos)/(event0.time-event1.time);
|
||||
var slopePos1 = (event1.pos - event.pos)/(event1.time-event.time);
|
||||
// compute slopes. if less than 30% difference, remove event1
|
||||
if((slopeBuf0 === slopeBuf1 || Math.abs(slopeBuf0/slopeBuf1 -1) <= 0.3) &&
|
||||
(slopePos0 === slopePos1 || Math.abs(slopePos0/slopePos1 -1) <= 0.3))
|
||||
{
|
||||
bufEvents.pop();
|
||||
}
|
||||
}
|
||||
events.buffer.push(event);
|
||||
refreshCanvas();
|
||||
|
||||
var log = "Duration:"
|
||||
+ v.duration + "<br>"
|
||||
+ "Buffered:"
|
||||
+ timeRangesToString(v.buffered) + "<br>"
|
||||
+ "Seekable:"
|
||||
+ timeRangesToString(v.seekable) + "<br>"
|
||||
+ "Played:"
|
||||
+ timeRangesToString(v.played) + "<br>";
|
||||
|
||||
|
||||
var videoPlaybackQuality = v.getVideoPlaybackQuality;
|
||||
if(videoPlaybackQuality && typeof(videoPlaybackQuality) === typeof(Function)) {
|
||||
log+="Dropped Frames:"+ v.getVideoPlaybackQuality().droppedVideoFrames + "<br>";
|
||||
log+="Corrupted Frames:"+ v.getVideoPlaybackQuality().corruptedVideoFrames + "<br>";
|
||||
} else if(v.webkitDroppedFrameCount) {
|
||||
log+="Dropped Frames:"+ v.webkitDroppedFrameCount + "<br>";
|
||||
}
|
||||
$("#buffered_log").html(log);
|
||||
$("#HlsStats").text(JSON.stringify(sortObject(stats),null,"\t"));
|
||||
ctx.fillStyle = "blue";
|
||||
var x = v.currentTime / v.duration * canvas.width;
|
||||
ctx.fillRect(x, 0, 2, 15);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
function sortObject(obj) {
|
||||
if(typeof obj !== 'object')
|
||||
return obj
|
||||
var temp = {};
|
||||
var keys = [];
|
||||
for(var key in obj)
|
||||
keys.push(key);
|
||||
keys.sort();
|
||||
for(var index in keys)
|
||||
temp[keys[index]] = sortObject(obj[keys[index]]);
|
||||
return temp;
|
||||
}
|
||||
|
||||
|
||||
function showCanvas() {
|
||||
showMetrics();
|
||||
$("#buffered_log").show();
|
||||
$("#buffered_c").show();
|
||||
}
|
||||
|
||||
function hideCanvas() {
|
||||
hideMetrics();
|
||||
$("#buffered_log").hide();
|
||||
$("#buffered_c").hide();
|
||||
}
|
||||
|
||||
function getMetrics() {
|
||||
var json = JSON.stringify(events);
|
||||
var jsonpacked = jsonpack.pack(json);
|
||||
console.log("packing JSON from " + json.length + " to " + jsonpacked.length + " bytes");
|
||||
return btoa(jsonpacked);
|
||||
}
|
||||
|
||||
function copyMetricsToClipBoard() {
|
||||
copyTextToClipboard(getMetrics());
|
||||
}
|
||||
|
||||
function copyTextToClipboard(text) {
|
||||
var textArea = document.createElement("textarea");
|
||||
textArea.value = text;
|
||||
document.body.appendChild(textArea);
|
||||
textArea.select();
|
||||
try {
|
||||
var successful = document.execCommand('copy');
|
||||
var msg = successful ? 'successful' : 'unsuccessful';
|
||||
console.log('Copying text command was ' + msg);
|
||||
} catch (err) {
|
||||
console.log('Oops, unable to copy');
|
||||
}
|
||||
document.body.removeChild(textArea);
|
||||
}
|
||||
|
||||
function goToMetrics() {
|
||||
var url = document.URL;
|
||||
url = url.substr(0,url.lastIndexOf("/")+1) + 'metrics.html';
|
||||
console.log(url);
|
||||
window.open(url,'_blank');
|
||||
}
|
||||
|
||||
function goToMetricsPermaLink() {
|
||||
var url = document.URL;
|
||||
var b64 = getMetrics();
|
||||
url = url.substr(0,url.lastIndexOf("/")+1) + 'metrics.html?data=' + b64;
|
||||
console.log(url);
|
||||
window.open(url,'_blank');
|
||||
}
|
||||
|
||||
function minsecs(ts) {
|
||||
var m = Math.floor(Math.floor(ts % 3600) / 60);
|
||||
var s = Math.floor(ts % 60);
|
||||
return m + ":" + (s < 10 ? "0" : "") + s;
|
||||
}
|
||||
|
||||
function buffered_seek(event) {
|
||||
var canvas = $("#buffered_c")[0];
|
||||
var v = $('#video')[0];
|
||||
var target = (event.clientX - canvas.offsetLeft) / canvas.width * v.duration;
|
||||
v.currentTime = target;
|
||||
}
|
||||
|
||||
function updateLevelInfo() {
|
||||
var button_template = '<button type="button" class="btn btn-sm ';
|
||||
var button_enabled = 'btn-primary" ';
|
||||
var button_disabled = 'btn-success" ';
|
||||
|
||||
var html1 = button_template;
|
||||
if(hls.autoLevelEnabled) {
|
||||
html1 += button_enabled;
|
||||
} else {
|
||||
html1 += button_disabled;
|
||||
}
|
||||
html1 += 'onclick="hls.currentLevel=-1">auto</button>';
|
||||
|
||||
|
||||
var html2 = button_template;
|
||||
if(hls.autoLevelEnabled) {
|
||||
html2 += button_enabled;
|
||||
} else {
|
||||
html2 += button_disabled;
|
||||
}
|
||||
html2 += 'onclick="hls.loadLevel=-1">auto</button>';
|
||||
|
||||
var html3 = button_template;
|
||||
if(hls.autoLevelCapping === -1) {
|
||||
html3 += button_enabled;
|
||||
} else {
|
||||
html3 += button_disabled;
|
||||
}
|
||||
html3 += 'onclick="levelCapping=hls.autoLevelCapping=-1;updateLevelInfo();updatePermalink();">auto</button>';
|
||||
|
||||
var html4 = button_template;
|
||||
if(hls.autoLevelEnabled) {
|
||||
html4 += button_enabled;
|
||||
} else {
|
||||
html4 += button_disabled;
|
||||
}
|
||||
html4 += 'onclick="hls.nextLevel=-1">auto</button>';
|
||||
|
||||
for (var i=0; i < hls.levels.length; i++) {
|
||||
html1 += button_template;
|
||||
if(hls.currentLevel === i) {
|
||||
html1 += button_enabled;
|
||||
} else {
|
||||
html1 += button_disabled;
|
||||
}
|
||||
var levelName = i, label = level2label(i);
|
||||
if(label) {
|
||||
levelName += '(' + level2label(i) + ')';
|
||||
}
|
||||
html1 += 'onclick="hls.currentLevel=' + i + '">' + levelName + '</button>';
|
||||
|
||||
html2 += button_template;
|
||||
if(hls.loadLevel === i) {
|
||||
html2 += button_enabled;
|
||||
} else {
|
||||
html2 += button_disabled;
|
||||
}
|
||||
html2 += 'onclick="hls.loadLevel=' + i + '">' + levelName + '</button>';
|
||||
|
||||
html3 += button_template;
|
||||
if(hls.autoLevelCapping === i) {
|
||||
html3 += button_enabled;
|
||||
} else {
|
||||
html3 += button_disabled;
|
||||
}
|
||||
html3 += 'onclick="levelCapping=hls.autoLevelCapping=' + i + ';updateLevelInfo();updatePermalink();">' + levelName + '</button>';
|
||||
|
||||
html4 += button_template;
|
||||
if(hls.nextLevel === i) {
|
||||
html4 += button_enabled;
|
||||
} else {
|
||||
html4 += button_disabled;
|
||||
}
|
||||
html4 += 'onclick="hls.nextLevel=' + i + '">' + levelName + '</button>';
|
||||
}
|
||||
var v = $('#video')[0];
|
||||
if(v.videoWidth) {
|
||||
$("#currentResolution").html("video resolution:" + v.videoWidth + 'x' + v.videoHeight);
|
||||
}
|
||||
if($("#currentLevelControl").html() != html1) {
|
||||
$("#currentLevelControl").html(html1);
|
||||
}
|
||||
|
||||
if($("#loadLevelControl").html() != html2) {
|
||||
$("#loadLevelControl").html(html2);
|
||||
}
|
||||
|
||||
if($("#levelCappingControl").html() != html3) {
|
||||
$("#levelCappingControl").html(html3);
|
||||
}
|
||||
|
||||
if($("#nextLevelControl").html() != html4) {
|
||||
$("#nextLevelControl").html(html4);
|
||||
}
|
||||
}
|
||||
|
||||
function level2label(index) {
|
||||
if(hls && hls.levels.length-1 >= index) {
|
||||
var level = hls.levels[index];
|
||||
if (level.name) {
|
||||
return level.name;
|
||||
} else {
|
||||
if (level.height) {
|
||||
return(level.height + 'p / ' + Math.round(level.bitrate / 1024) + 'kb');
|
||||
} else {
|
||||
if(level.bitrate) {
|
||||
return(Math.round(level.bitrate / 1024) + 'kb');
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function getURLParam(sParam, defaultValue) {
|
||||
var sPageURL = window.location.search.substring(1);
|
||||
var sURLVariables = sPageURL.split('&');
|
||||
for (var i = 0; i < sURLVariables.length; i++) {
|
||||
var sParameterName = sURLVariables[i].split('=');
|
||||
if (sParameterName[0] == sParam) {
|
||||
return "undefined" == sParameterName[1] ? undefined : sParameterName[1];
|
||||
}
|
||||
}
|
||||
return defaultValue;
|
||||
}
|
||||
|
||||
function updatePermalink() {
|
||||
var url = $('#streamURL').val();
|
||||
var hlsLink = document.URL.split('?')[0] + '?src=' + encodeURIComponent(url) +
|
||||
'&enableStreaming=' + enableStreaming +
|
||||
'&autoRecoverError=' + autoRecoverError +
|
||||
'&enableWorker=' + enableWorker +
|
||||
'&levelCapping=' + levelCapping +
|
||||
'&defaultAudioCodec=' + defaultAudioCodec;
|
||||
var description = 'permalink: ' + "<a href=\"" + hlsLink + "\">" + hlsLink + "</a>";
|
||||
$("#StreamPermalink").html(description);
|
||||
}
|
||||
|
||||
</script>
|
||||
</body>
|
||||
|
||||
</html>
|
|
@ -1,578 +0,0 @@
|
|||
/*
|
||||
Copyright (c) 2013, Rodrigo González, Sapienlab All Rights Reserved.
|
||||
Available via MIT LICENSE. See https://github.com/roro89/jsonpack/blob/master/LICENSE.md for details.
|
||||
*/
|
||||
(function(define) {
|
||||
|
||||
define([], function() {
|
||||
|
||||
var TOKEN_TRUE = -1;
|
||||
var TOKEN_FALSE = -2;
|
||||
var TOKEN_NULL = -3;
|
||||
var TOKEN_EMPTY_STRING = -4;
|
||||
var TOKEN_UNDEFINED = -5;
|
||||
|
||||
var pack = function(json, options) {
|
||||
|
||||
// Canonizes the options
|
||||
options = options || {};
|
||||
|
||||
// A shorthand for debugging
|
||||
var verbose = options.verbose || false;
|
||||
|
||||
verbose && console.log('Normalize the JSON Object');
|
||||
|
||||
// JSON as Javascript Object (Not string representation)
|
||||
json = typeof json === 'string' ? this.JSON.parse(json) : json;
|
||||
|
||||
verbose && console.log('Creating a empty dictionary');
|
||||
|
||||
// The dictionary
|
||||
var dictionary = {
|
||||
strings : [],
|
||||
integers : [],
|
||||
floats : []
|
||||
};
|
||||
|
||||
verbose && console.log('Creating the AST');
|
||||
|
||||
// The AST
|
||||
var ast = (function recursiveAstBuilder(item) {
|
||||
|
||||
verbose && console.log('Calling recursiveAstBuilder with ' + this.JSON.stringify(item));
|
||||
|
||||
// The type of the item
|
||||
var type = typeof item;
|
||||
|
||||
// Case 7: The item is null
|
||||
if (item === null) {
|
||||
return {
|
||||
type : 'null',
|
||||
index : TOKEN_NULL
|
||||
};
|
||||
}
|
||||
|
||||
//add undefined
|
||||
if (typeof item === 'undefined') {
|
||||
return {
|
||||
type : 'undefined',
|
||||
index : TOKEN_UNDEFINED
|
||||
};
|
||||
}
|
||||
|
||||
// Case 1: The item is Array Object
|
||||
if ( item instanceof Array) {
|
||||
|
||||
// Create a new sub-AST of type Array (@)
|
||||
var ast = ['@'];
|
||||
|
||||
// Add each items
|
||||
for (var i in item) {
|
||||
|
||||
if (!item.hasOwnProperty(i)) continue;
|
||||
|
||||
ast.push(recursiveAstBuilder(item[i]));
|
||||
}
|
||||
|
||||
// And return
|
||||
return ast;
|
||||
|
||||
}
|
||||
|
||||
// Case 2: The item is Object
|
||||
if (type === 'object') {
|
||||
|
||||
// Create a new sub-AST of type Object ($)
|
||||
var ast = ['$'];
|
||||
|
||||
// Add each items
|
||||
for (var key in item) {
|
||||
|
||||
if (!item.hasOwnProperty(key))
|
||||
continue;
|
||||
|
||||
ast.push(recursiveAstBuilder(key));
|
||||
ast.push(recursiveAstBuilder(item[key]));
|
||||
}
|
||||
|
||||
// And return
|
||||
return ast;
|
||||
|
||||
}
|
||||
|
||||
// Case 3: The item empty string
|
||||
if (item === '') {
|
||||
return {
|
||||
type : 'empty',
|
||||
index : TOKEN_EMPTY_STRING
|
||||
};
|
||||
}
|
||||
|
||||
// Case 4: The item is String
|
||||
if (type === 'string') {
|
||||
|
||||
// The index of that word in the dictionary
|
||||
var index = _indexOf.call(dictionary.strings, item);
|
||||
|
||||
// If not, add to the dictionary and actualize the index
|
||||
if (index == -1) {
|
||||
dictionary.strings.push(_encode(item));
|
||||
index = dictionary.strings.length - 1;
|
||||
}
|
||||
|
||||
// Return the token
|
||||
return {
|
||||
type : 'strings',
|
||||
index : index
|
||||
};
|
||||
}
|
||||
|
||||
// Case 5: The item is integer
|
||||
if (type === 'number' && item % 1 === 0) {
|
||||
|
||||
// The index of that number in the dictionary
|
||||
var index = _indexOf.call(dictionary.integers, item);
|
||||
|
||||
// If not, add to the dictionary and actualize the index
|
||||
if (index == -1) {
|
||||
dictionary.integers.push(_base10To36(item));
|
||||
index = dictionary.integers.length - 1;
|
||||
}
|
||||
|
||||
// Return the token
|
||||
return {
|
||||
type : 'integers',
|
||||
index : index
|
||||
};
|
||||
}
|
||||
|
||||
// Case 6: The item is float
|
||||
if (type === 'number') {
|
||||
// The index of that number in the dictionary
|
||||
var index = _indexOf.call(dictionary.floats, item);
|
||||
|
||||
// If not, add to the dictionary and actualize the index
|
||||
if (index == -1) {
|
||||
// Float not use base 36
|
||||
dictionary.floats.push(item);
|
||||
index = dictionary.floats.length - 1;
|
||||
}
|
||||
|
||||
// Return the token
|
||||
return {
|
||||
type : 'floats',
|
||||
index : index
|
||||
};
|
||||
}
|
||||
|
||||
// Case 7: The item is boolean
|
||||
if (type === 'boolean') {
|
||||
return {
|
||||
type : 'boolean',
|
||||
index : item ? TOKEN_TRUE : TOKEN_FALSE
|
||||
};
|
||||
}
|
||||
|
||||
// Default
|
||||
throw new Error('Unexpected argument of type ' + typeof (item));
|
||||
|
||||
})(json);
|
||||
|
||||
// A set of shorthands proxies for the length of the dictionaries
|
||||
var stringLength = dictionary.strings.length;
|
||||
var integerLength = dictionary.integers.length;
|
||||
var floatLength = dictionary.floats.length;
|
||||
|
||||
verbose && console.log('Parsing the dictionary');
|
||||
|
||||
// Create a raw dictionary
|
||||
var packed = dictionary.strings.join('|');
|
||||
packed += '^' + dictionary.integers.join('|');
|
||||
packed += '^' + dictionary.floats.join('|');
|
||||
|
||||
verbose && console.log('Parsing the structure');
|
||||
|
||||
// And add the structure
|
||||
packed += '^' + (function recursiveParser(item) {
|
||||
|
||||
verbose && console.log('Calling a recursiveParser with ' + this.JSON.stringify(item));
|
||||
|
||||
// If the item is Array, then is a object of
|
||||
// type [object Object] or [object Array]
|
||||
if ( item instanceof Array) {
|
||||
|
||||
// The packed resulting
|
||||
var packed = item.shift();
|
||||
|
||||
for (var i in item) {
|
||||
|
||||
if (!item.hasOwnProperty(i))
|
||||
continue;
|
||||
|
||||
packed += recursiveParser(item[i]) + '|';
|
||||
}
|
||||
|
||||
return (packed[packed.length - 1] === '|' ? packed.slice(0, -1) : packed) + ']';
|
||||
|
||||
}
|
||||
|
||||
// A shorthand proxies
|
||||
var type = item.type, index = item.index;
|
||||
|
||||
if (type === 'strings') {
|
||||
// Just return the base 36 of index
|
||||
return _base10To36(index);
|
||||
}
|
||||
|
||||
if (type === 'integers') {
|
||||
// Return a base 36 of index plus stringLength offset
|
||||
return _base10To36(stringLength + index);
|
||||
}
|
||||
|
||||
if (type === 'floats') {
|
||||
// Return a base 36 of index plus stringLength and integerLength offset
|
||||
return _base10To36(stringLength + integerLength + index);
|
||||
}
|
||||
|
||||
if (type === 'boolean') {
|
||||
return item.index;
|
||||
}
|
||||
|
||||
if (type === 'null') {
|
||||
return TOKEN_NULL;
|
||||
}
|
||||
|
||||
if (type === 'undefined') {
|
||||
return TOKEN_UNDEFINED;
|
||||
}
|
||||
|
||||
if (type === 'empty') {
|
||||
return TOKEN_EMPTY_STRING;
|
||||
}
|
||||
|
||||
throw new TypeError('The item is alien!');
|
||||
|
||||
})(ast);
|
||||
|
||||
verbose && console.log('Ending parser');
|
||||
|
||||
// If debug, return a internal representation of dictionary and stuff
|
||||
if (options.debug)
|
||||
return {
|
||||
dictionary : dictionary,
|
||||
ast : ast,
|
||||
packed : packed
|
||||
};
|
||||
|
||||
return packed;
|
||||
|
||||
};
|
||||
|
||||
var unpack = function(packed, options) {
|
||||
|
||||
// Canonizes the options
|
||||
options = options || {};
|
||||
|
||||
// A raw buffer
|
||||
var rawBuffers = packed.split('^');
|
||||
|
||||
// Create a dictionary
|
||||
options.verbose && console.log('Building dictionary');
|
||||
var dictionary = [];
|
||||
|
||||
// Add the strings values
|
||||
var buffer = rawBuffers[0];
|
||||
if (buffer !== '') {
|
||||
buffer = buffer.split('|');
|
||||
options.verbose && console.log('Parse the strings dictionary');
|
||||
for (var i=0, n=buffer.length; i<n; i++){
|
||||
dictionary.push(_decode(buffer[i]));
|
||||
}
|
||||
}
|
||||
|
||||
// Add the integers values
|
||||
buffer = rawBuffers[1];
|
||||
if (buffer !== '') {
|
||||
buffer = buffer.split('|');
|
||||
options.verbose && console.log('Parse the integers dictionary');
|
||||
for (var i=0, n=buffer.length; i<n; i++){
|
||||
dictionary.push(_base36To10(buffer[i]));
|
||||
}
|
||||
}
|
||||
|
||||
// Add the floats values
|
||||
buffer = rawBuffers[2];
|
||||
if (buffer !== '') {
|
||||
buffer = buffer.split('|')
|
||||
options.verbose && console.log('Parse the floats dictionary');
|
||||
for (var i=0, n=buffer.length; i<n; i++){
|
||||
dictionary.push(parseFloat(buffer[i]));
|
||||
}
|
||||
}
|
||||
// Free memory
|
||||
delete buffer;
|
||||
|
||||
options.verbose && console.log('Tokenizing the structure');
|
||||
|
||||
// Tokenizer the structure
|
||||
var number36 = '';
|
||||
var tokens = [];
|
||||
var len=rawBuffers[3].length;
|
||||
for (var i = 0; i < len; i++) {
|
||||
var symbol = rawBuffers[3].charAt(i);
|
||||
if (symbol === '|' || symbol === '$' || symbol === '@' || symbol === ']') {
|
||||
if (number36) {
|
||||
tokens.push(_base36To10(number36));
|
||||
number36 = '';
|
||||
}
|
||||
symbol !== '|' && tokens.push(symbol);
|
||||
} else {
|
||||
number36 += symbol;
|
||||
}
|
||||
}
|
||||
|
||||
// A shorthand proxy for tokens.length
|
||||
var tokensLength = tokens.length;
|
||||
|
||||
// The index of the next token to read
|
||||
var tokensIndex = 0;
|
||||
|
||||
options.verbose && console.log('Starting recursive parser');
|
||||
|
||||
return (function recursiveUnpackerParser() {
|
||||
|
||||
// Maybe '$' (object) or '@' (array)
|
||||
var type = tokens[tokensIndex++];
|
||||
|
||||
options.verbose && console.log('Reading collection type ' + (type === '$' ? 'object' : 'Array'));
|
||||
|
||||
// Parse an array
|
||||
if (type === '@') {
|
||||
|
||||
var node = [];
|
||||
|
||||
for (; tokensIndex < tokensLength; tokensIndex++) {
|
||||
var value = tokens[tokensIndex];
|
||||
options.verbose && console.log('Read ' + value + ' symbol');
|
||||
if (value === ']')
|
||||
return node;
|
||||
if (value === '@' || value === '$') {
|
||||
node.push(recursiveUnpackerParser());
|
||||
} else {
|
||||
switch(value) {
|
||||
case TOKEN_TRUE:
|
||||
node.push(true);
|
||||
break;
|
||||
case TOKEN_FALSE:
|
||||
node.push(false);
|
||||
break;
|
||||
case TOKEN_NULL:
|
||||
node.push(null);
|
||||
break;
|
||||
case TOKEN_UNDEFINED:
|
||||
node.push(undefined);
|
||||
break;
|
||||
case TOKEN_EMPTY_STRING:
|
||||
node.push('');
|
||||
break;
|
||||
default:
|
||||
node.push(dictionary[value]);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
options.verbose && console.log('Parsed ' + this.JSON.stringify(node));
|
||||
|
||||
return node;
|
||||
|
||||
}
|
||||
|
||||
// Parse a object
|
||||
if (type === '$') {
|
||||
var node = {};
|
||||
|
||||
for (; tokensIndex < tokensLength; tokensIndex++) {
|
||||
|
||||
var key = tokens[tokensIndex];
|
||||
|
||||
if (key === ']')
|
||||
return node;
|
||||
|
||||
if (key === TOKEN_EMPTY_STRING)
|
||||
key = '';
|
||||
else
|
||||
key = dictionary[key];
|
||||
|
||||
var value = tokens[++tokensIndex];
|
||||
|
||||
if (value === '@' || value === '$') {
|
||||
node[key] = recursiveUnpackerParser();
|
||||
} else {
|
||||
switch(value) {
|
||||
case TOKEN_TRUE:
|
||||
node[key] = true;
|
||||
break;
|
||||
case TOKEN_FALSE:
|
||||
node[key] = false;
|
||||
break;
|
||||
case TOKEN_NULL:
|
||||
node[key] = null;
|
||||
break;
|
||||
case TOKEN_UNDEFINED:
|
||||
node[key] = undefined;
|
||||
break;
|
||||
case TOKEN_EMPTY_STRING:
|
||||
node[key] = '';
|
||||
break;
|
||||
default:
|
||||
node[key] = dictionary[value];
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
options.verbose && console.log('Parsed ' + this.JSON.stringify(node));
|
||||
|
||||
return node;
|
||||
}
|
||||
|
||||
throw new TypeError('Bad token ' + type + ' isn\'t a type');
|
||||
|
||||
})();
|
||||
|
||||
}
|
||||
/**
|
||||
* Get the index value of the dictionary
|
||||
* @param {Object} dictionary a object that have two array attributes: 'string' and 'number'
|
||||
* @param {Object} data
|
||||
*/
|
||||
var _indexOfDictionary = function(dictionary, value) {
|
||||
|
||||
// The type of the value
|
||||
var type = typeof value;
|
||||
|
||||
// If is boolean, return a boolean token
|
||||
if (type === 'boolean')
|
||||
return value ? TOKEN_TRUE : TOKEN_FALSE;
|
||||
|
||||
// If is null, return a... yes! the null token
|
||||
if (value === null)
|
||||
return TOKEN_NULL;
|
||||
|
||||
//add undefined
|
||||
if (typeof value === 'undefined')
|
||||
return TOKEN_UNDEFINED;
|
||||
|
||||
|
||||
if (value === '') {
|
||||
return TOKEN_EMPTY_STRING;
|
||||
}
|
||||
|
||||
if (type === 'string') {
|
||||
value = _encode(value);
|
||||
var index = _indexOf.call(dictionary.strings, value);
|
||||
if (index === -1) {
|
||||
dictionary.strings.push(value);
|
||||
index = dictionary.strings.length - 1;
|
||||
}
|
||||
}
|
||||
|
||||
// If has an invalid JSON type (example a function)
|
||||
if (type !== 'string' && type !== 'number') {
|
||||
throw new Error('The type is not a JSON type');
|
||||
};
|
||||
|
||||
if (type === 'string') {// string
|
||||
value = _encode(value);
|
||||
} else if (value % 1 === 0) {// integer
|
||||
value = _base10To36(value);
|
||||
} else {// float
|
||||
|
||||
}
|
||||
|
||||
// If is number, "serialize" the value
|
||||
value = type === 'number' ? _base10To36(value) : _encode(value);
|
||||
|
||||
// Retrieve the index of that value in the dictionary
|
||||
var index = _indexOf.call(dictionary[type], value);
|
||||
|
||||
// If that value is not in the dictionary
|
||||
if (index === -1) {
|
||||
// Push the value
|
||||
dictionary[type].push(value);
|
||||
// And return their index
|
||||
index = dictionary[type].length - 1;
|
||||
}
|
||||
|
||||
// If the type is a number, then add the '+' prefix character
|
||||
// to differentiate that they is a number index. If not, then
|
||||
// just return a 36-based representation of the index
|
||||
return type === 'number' ? '+' + index : index;
|
||||
|
||||
};
|
||||
|
||||
var _encode = function(str) {
|
||||
if ( typeof str !== 'string')
|
||||
return str;
|
||||
|
||||
return str.replace(/[\+ \|\^\%]/g, function(a) {
|
||||
return ({
|
||||
' ' : '+',
|
||||
'+' : '%2B',
|
||||
'|' : '%7C',
|
||||
'^' : '%5E',
|
||||
'%' : '%25'
|
||||
})[a]
|
||||
});
|
||||
};
|
||||
|
||||
var _decode = function(str) {
|
||||
if ( typeof str !== 'string')
|
||||
return str;
|
||||
|
||||
return str.replace(/\+|%2B|%7C|%5E|%25/g, function(a) {
|
||||
return ({
|
||||
'+' : ' ',
|
||||
'%2B' : '+',
|
||||
'%7C' : '|',
|
||||
'%5E' : '^',
|
||||
'%25' : '%'
|
||||
})[a]
|
||||
})
|
||||
};
|
||||
|
||||
var _base10To36 = function(number) {
|
||||
return Number.prototype.toString.call(number, 36).toUpperCase();
|
||||
};
|
||||
|
||||
var _base36To10 = function(number) {
|
||||
return parseInt(number, 36);
|
||||
};
|
||||
|
||||
var _indexOf = Array.prototype.indexOf ||
|
||||
function(obj, start) {
|
||||
for (var i = (start || 0), j = this.length; i < j; i++) {
|
||||
if (this[i] === obj) {
|
||||
return i;
|
||||
}
|
||||
}
|
||||
return -1;
|
||||
};
|
||||
|
||||
return {
|
||||
JSON : JSON,
|
||||
pack : pack,
|
||||
unpack : unpack
|
||||
};
|
||||
|
||||
});
|
||||
|
||||
})( typeof define == 'undefined' || !define.amd ? function(deps, factory) {
|
||||
var jsonpack = factory();
|
||||
if ( typeof exports != 'undefined')
|
||||
for (var key in jsonpack)
|
||||
exports[key] = jsonpack[key];
|
||||
else
|
||||
window.jsonpack = jsonpack;
|
||||
} : define);
|
|
@ -1,74 +0,0 @@
|
|||
<!DOCTYPE html>
|
||||
|
||||
<head>
|
||||
<title>hls.js metrics page</title>
|
||||
<link rel="icon" type="image/png" href="http://static1.dmcdn.net/images/favicon-32x32.png" sizes="32x32" />
|
||||
<link rel="icon" type="image/png" href="http://static1.dmcdn.net/images/favicon-16x16.png" sizes="16x16" />
|
||||
<link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.4/css/bootstrap.min.css">
|
||||
<link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.4/css/bootstrap-theme.min.css">
|
||||
<script src="https://ajax.googleapis.com/ajax/libs/jquery/2.1.3/jquery.min.js"></script>
|
||||
<script src="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.4/js/bootstrap.min.js"></script>
|
||||
</head>
|
||||
<body>
|
||||
<div class="header-container">
|
||||
<header class="wrapper clearfix">
|
||||
<h1 class="title">hls.js metrics page</h1>
|
||||
</header>
|
||||
</div>
|
||||
<pre id='HlsDate'></pre>
|
||||
<pre id='StreamPermalink'></pre>
|
||||
<input id="metricsData" class="innerControls" type=text value=""/>
|
||||
window size
|
||||
<div id="metricsButton">
|
||||
<button type="button" class="btn btn-xs btn-primary" onclick="timeRangeSetSliding(0)">window ALL</button>
|
||||
<button type="button" class="btn btn-xs btn-primary" onclick="timeRangeSetSliding(2000)">2s</button>
|
||||
<button type="button" class="btn btn-xs btn-primary" onclick="timeRangeSetSliding(5000)">5s</button>
|
||||
<button type="button" class="btn btn-xs btn-primary" onclick="timeRangeSetSliding(10000)">10s</button>
|
||||
<button type="button" class="btn btn-xs btn-primary" onclick="timeRangeSetSliding(20000)">20s</button>
|
||||
<button type="button" class="btn btn-xs btn-primary" onclick="timeRangeSetSliding(30000)">30s</button>
|
||||
<button type="button" class="btn btn-xs btn-primary" onclick="timeRangeSetSliding(60000)">60s</button>
|
||||
<button type="button" class="btn btn-xs btn-primary" onclick="timeRangeSetSliding(120000)">120s</button><br>
|
||||
<button type="button" class="btn btn-xs btn-primary" onclick="timeRangeZoomIn()">Window Zoom In</button>
|
||||
<button type="button" class="btn btn-xs btn-primary" onclick="timeRangeZoomOut()">Window Zoom Out</button><br>
|
||||
<button type="button" class="btn btn-xs btn-primary" onclick="timeRangeSlideLeft()"> <<< Window Slide </button>
|
||||
<button type="button" class="btn btn-xs btn-primary" onclick="timeRangeSlideRight()">Window Slide >>> </button><br>
|
||||
<button type="button" class="btn btn-xs btn-primary" onclick="windowStart=$('#windowStart').val()">fixed window start(ms)</button>
|
||||
<input type="text" id='windowStart' defaultValue="0" size="8" onkeydown="if(window.event.keyCode=='13'){windowStart=$('#windowStart').val();}">
|
||||
<button type="button" class="btn btn-xs btn-primary" onclick="windowEnd=$('#windowEnd').val()">fixed window end(ms)</button>
|
||||
<input type="text" id='windowEnd' defaultValue="10000" size="8" onkeydown="if(window.event.keyCode=='13'){windowEnd=$('#windowEnd').val();}"><br>
|
||||
<canvas id="bufferTimerange_c" width="640" height="100" style="border:1px solid #000000" onmousedown="timeRangeCanvasonMouseDown(event)" onmousemove="timeRangeCanvasonMouseMove(event)" onmouseup="timeRangeCanvasonMouseUp(event)" onmouseout="timeRangeCanvasonMouseOut(event)";></canvas>
|
||||
<canvas id="bitrateTimerange_c" width="640" height="100" style="border:1px solid #000000";></canvas>
|
||||
<canvas id="bufferWindow_c" width="640" height="100" style="border:1px solid #000000" onmousemove="windowCanvasonMouseMove(event)";></canvas>
|
||||
<canvas id="videoEvent_c" width="640" height="15" style="border:1px solid #000000";></canvas>
|
||||
<canvas id="loadEvent_c" width="640" height="15" style="border:1px solid #000000";></canvas><br>
|
||||
</div>
|
||||
|
||||
<script src="canvas.js"></script>
|
||||
<script src="metrics.js"></script>
|
||||
<script src="jsonpack.js"></script>
|
||||
<script>
|
||||
|
||||
|
||||
$(document).ready(function() {
|
||||
$('#metricsData').change(function() { events = jsonpack.unpack(atob($('#metricsData').val())); updateMetrics(); });
|
||||
});
|
||||
|
||||
var data = location.search.split('data=')[1],events;
|
||||
if (data) {
|
||||
events = jsonpack.unpack(atob(decodeURIComponent(data)));
|
||||
updateMetrics();
|
||||
}
|
||||
|
||||
function updateMetrics() {
|
||||
var hlsLink = document.URL.substr(0,document.URL.lastIndexOf("/")+1) + 'index.html?src=' + encodeURIComponent(events.url);
|
||||
var description = 'playlist: ' + "<a href=\"" + events.url + "\">" + events.url + "</a>" + '<br>replay: ' + "<a href=\"" + hlsLink + "\">" + hlsLink + "</a>";
|
||||
$("#StreamPermalink").html(description);
|
||||
$("#HlsDate").text("session Start Date:" + new Date(events.t0));
|
||||
metricsDisplayed=true;
|
||||
showMetrics();
|
||||
refreshCanvas();
|
||||
}
|
||||
|
||||
</script>
|
||||
|
||||
</body>
|
201
dashboard-ui/bower_components/hls.js/demo/metrics.js
vendored
201
dashboard-ui/bower_components/hls.js/demo/metrics.js
vendored
|
@ -1,201 +0,0 @@
|
|||
function showMetrics() {
|
||||
if(metricsDisplayed) {
|
||||
var width = window.innerWidth-30;
|
||||
$("#bufferWindow_c")[0].width =
|
||||
$("#bitrateTimerange_c")[0].width =
|
||||
$("#bufferTimerange_c")[0].width =
|
||||
$("#videoEvent_c")[0].width =
|
||||
$("#metricsButton")[0].width =
|
||||
$("#loadEvent_c")[0].width = width;
|
||||
$("#bufferWindow_c").show();
|
||||
$("#bitrateTimerange_c").show();
|
||||
$("#bufferTimerange_c").show();
|
||||
$("#videoEvent_c").show();
|
||||
$("#metricsButton").show();
|
||||
$("#loadEvent_c").show();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function toggleMetricsDisplay() {
|
||||
metricsDisplayed = !metricsDisplayed;
|
||||
if(metricsDisplayed) {
|
||||
showMetrics();
|
||||
} else {
|
||||
hideMetrics();
|
||||
}
|
||||
}
|
||||
|
||||
function hideMetrics() {
|
||||
if(!metricsDisplayed) {
|
||||
$("#bufferWindow_c").hide();
|
||||
$("#bitrateTimerange_c").hide();
|
||||
$("#bufferTimerange_c").hide();
|
||||
$("#videoEvent_c").hide();
|
||||
$("#metricsButton").hide();
|
||||
$("#loadEvent_c").hide();
|
||||
}
|
||||
}
|
||||
|
||||
function timeRangeSetSliding(duration) {
|
||||
windowDuration = duration;
|
||||
windowSliding = true;
|
||||
refreshCanvas();
|
||||
}
|
||||
|
||||
var timeRangeMouseDown=false;
|
||||
function timeRangeCanvasonMouseDown(evt) {
|
||||
var canvas = evt.currentTarget,
|
||||
bRect = canvas.getBoundingClientRect(),
|
||||
mouseX = Math.round((evt.clientX - bRect.left)*(canvas.width/bRect.width));
|
||||
windowStart = Math.max(0,Math.round((mouseX-eventLeftMargin) * getWindowTimeRange().now / (canvas.width-eventLeftMargin)));
|
||||
windowEnd = windowStart+500;
|
||||
timeRangeMouseDown = true;
|
||||
windowSliding = false;
|
||||
//console.log('windowStart/windowEnd:' + '/' + windowStart + '/' + windowEnd);
|
||||
$("#windowStart").val(windowStart);
|
||||
$("#windowEnd").val(windowEnd);
|
||||
refreshCanvas();
|
||||
}
|
||||
|
||||
function timeRangeCanvasonMouseMove(evt) {
|
||||
if(timeRangeMouseDown) {
|
||||
var canvas = evt.currentTarget,
|
||||
bRect = canvas.getBoundingClientRect(),
|
||||
mouseX = Math.round((evt.clientX - bRect.left)*(canvas.width/bRect.width)),
|
||||
pos = Math.max(0,Math.round((mouseX-eventLeftMargin) * getWindowTimeRange().now / (canvas.width-eventLeftMargin)));
|
||||
if(pos < windowStart) {
|
||||
windowStart = pos;
|
||||
} else {
|
||||
windowEnd = pos;
|
||||
}
|
||||
if(windowStart === windowEnd) {
|
||||
// to avoid division by zero ...
|
||||
windowEnd +=50;
|
||||
}
|
||||
//console.log('windowStart/windowEnd:' + '/' + windowStart + '/' + windowEnd);
|
||||
$("#windowStart").val(windowStart);
|
||||
$("#windowEnd").val(windowEnd);
|
||||
refreshCanvas();
|
||||
}
|
||||
}
|
||||
|
||||
function timeRangeCanvasonMouseUp(evt) {
|
||||
timeRangeMouseDown = false;
|
||||
}
|
||||
|
||||
function timeRangeCanvasonMouseOut(evt) {
|
||||
timeRangeMouseDown = false;
|
||||
}
|
||||
|
||||
function windowCanvasonMouseMove(evt) {
|
||||
var canvas = evt.currentTarget,
|
||||
bRect = canvas.getBoundingClientRect(),
|
||||
mouseX = Math.round((evt.clientX - bRect.left)*(canvas.width/bRect.width)),
|
||||
timeRange = getWindowTimeRange();
|
||||
windowFocus = timeRange.min + Math.max(0,Math.round((mouseX-eventLeftMargin) * (timeRange.max - timeRange.min) / (canvas.width-eventLeftMargin)));
|
||||
//console.log(windowFocus);
|
||||
refreshCanvas();
|
||||
}
|
||||
|
||||
var windowDuration=20000,windowSliding=true,windowStart=0,windowEnd=10000,windowFocus,metricsDisplayed=false;
|
||||
$("#windowStart").val(windowStart);
|
||||
$("#windowEnd").val(windowEnd);
|
||||
function refreshCanvas() {
|
||||
if(metricsDisplayed) {
|
||||
try {
|
||||
var windowTime = getWindowTimeRange();
|
||||
canvasBufferTimeRangeUpdate($("#bufferTimerange_c")[0], 0, windowTime.now, windowTime.min,windowTime.max, events.buffer);
|
||||
if(windowTime.min !== 0 || windowTime.max !== windowTime.now) {
|
||||
$("#bufferWindow_c").show();
|
||||
canvasBufferWindowUpdate($("#bufferWindow_c")[0], windowTime.min,windowTime.max, windowTime.focus, events.buffer);
|
||||
} else {
|
||||
$("#bufferWindow_c").hide();
|
||||
}
|
||||
canvasBitrateEventUpdate($("#bitrateTimerange_c")[0], 0, windowTime.now, windowTime.min,windowTime.max, events.level, events.bitrate);
|
||||
canvasVideoEventUpdate($("#videoEvent_c")[0], windowTime.min,windowTime.max, events.video);
|
||||
canvasLoadEventUpdate($("#loadEvent_c")[0], windowTime.min,windowTime.max, events.load);
|
||||
} catch(err) {
|
||||
console.log("refreshCanvas error:" +err.message);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function getWindowTimeRange() {
|
||||
var tnow,minTime,maxTime;
|
||||
if(events.buffer.length) {
|
||||
tnow = events.buffer[events.buffer.length-1].time;
|
||||
} else {
|
||||
tnow = 0;
|
||||
}
|
||||
if(windowSliding) {
|
||||
// let's show the requested window
|
||||
if(windowDuration) {
|
||||
minTime = Math.max(0, tnow-windowDuration),
|
||||
maxTime = Math.min(minTime + windowDuration, tnow);
|
||||
} else {
|
||||
minTime = 0;
|
||||
maxTime = tnow;
|
||||
}
|
||||
} else {
|
||||
minTime = windowStart;
|
||||
maxTime = windowEnd;
|
||||
}
|
||||
if(windowFocus === undefined || windowFocus < minTime || windowFocus > maxTime) {
|
||||
windowFocus = minTime;
|
||||
}
|
||||
return { min : minTime, max: maxTime, now : tnow, focus : windowFocus}
|
||||
}
|
||||
|
||||
function timeRangeZoomIn() {
|
||||
if(windowSliding) {
|
||||
windowDuration/=2;
|
||||
} else {
|
||||
var duration = windowEnd-windowStart;
|
||||
windowStart+=duration/4;
|
||||
windowEnd-=duration/4;
|
||||
if(windowStart === windowEnd) {
|
||||
windowEnd+=50;
|
||||
}
|
||||
}
|
||||
$("#windowStart").val(windowStart);
|
||||
$("#windowEnd").val(windowEnd);
|
||||
refreshCanvas();
|
||||
}
|
||||
|
||||
function timeRangeZoomOut() {
|
||||
if(windowSliding) {
|
||||
windowDuration*=2;
|
||||
} else {
|
||||
var duration = windowEnd-windowStart;
|
||||
windowStart-=duration/2;
|
||||
windowEnd+=duration/2;
|
||||
windowStart=Math.max(0,windowStart);
|
||||
windowEnd=Math.min(events.buffer[events.buffer.length-1].time,windowEnd);
|
||||
}
|
||||
$("#windowStart").val(windowStart);
|
||||
$("#windowEnd").val(windowEnd);
|
||||
refreshCanvas();
|
||||
}
|
||||
|
||||
function timeRangeSlideLeft() {
|
||||
var duration = windowEnd-windowStart;
|
||||
windowStart-=duration/4;
|
||||
windowEnd-=duration/4;
|
||||
windowStart=Math.max(0,windowStart);
|
||||
windowEnd=Math.min(events.buffer[events.buffer.length-1].time,windowEnd);
|
||||
$("#windowStart").val(windowStart);
|
||||
$("#windowEnd").val(windowEnd);
|
||||
refreshCanvas();
|
||||
}
|
||||
|
||||
function timeRangeSlideRight() {
|
||||
var duration = windowEnd-windowStart;
|
||||
windowStart+=duration/4;
|
||||
windowEnd+=duration/4;
|
||||
windowStart=Math.max(0,windowStart);
|
||||
windowEnd=Math.min(events.buffer[events.buffer.length-1].time,windowEnd);
|
||||
$("#windowStart").val(windowStart);
|
||||
$("#windowEnd").val(windowEnd);
|
||||
refreshCanvas();
|
||||
}
|
8992
dashboard-ui/bower_components/hls.js/dist/hls.js
vendored
8992
dashboard-ui/bower_components/hls.js/dist/hls.js
vendored
File diff suppressed because it is too large
Load diff
File diff suppressed because one or more lines are too long
|
@ -1,688 +0,0 @@
|
|||
(function() {
|
||||
'use strict';
|
||||
|
||||
var
|
||||
DataView = window.DataView,
|
||||
/**
|
||||
* Returns the string representation of an ASCII encoded four byte buffer.
|
||||
* @param buffer {Uint8Array} a four-byte buffer to translate
|
||||
* @return {string} the corresponding string
|
||||
*/
|
||||
parseType = function(buffer) {
|
||||
var result = '';
|
||||
result += String.fromCharCode(buffer[0]);
|
||||
result += String.fromCharCode(buffer[1]);
|
||||
result += String.fromCharCode(buffer[2]);
|
||||
result += String.fromCharCode(buffer[3]);
|
||||
return result;
|
||||
},
|
||||
parseMp4Date = function(seconds) {
|
||||
return new Date(seconds * 1000 - 2082844800000);
|
||||
},
|
||||
parseSampleFlags = function(flags) {
|
||||
return {
|
||||
isLeading: (flags[0] & 0x0c) >>> 2,
|
||||
dependsOn: flags[0] & 0x03,
|
||||
isDependedOn: (flags[1] & 0xc0) >>> 6,
|
||||
hasRedundancy: (flags[1] & 0x30) >>> 4,
|
||||
paddingValue: (flags[1] & 0x0e) >>> 1,
|
||||
isNonSyncSample: flags[1] & 0x01,
|
||||
degradationPriority: (flags[2] << 8) | flags[3]
|
||||
};
|
||||
},
|
||||
nalParse = function(avcStream) {
|
||||
var
|
||||
avcView = new DataView(avcStream.buffer, avcStream.byteOffset, avcStream.byteLength),
|
||||
result = [],
|
||||
i,
|
||||
length;
|
||||
for (i = 0; i < avcStream.length; i += length) {
|
||||
length = avcView.getUint32(i);
|
||||
i += 4;
|
||||
switch(avcStream[i] & 0x1F) {
|
||||
case 0x01:
|
||||
result.push('NDR');
|
||||
break;
|
||||
case 0x05:
|
||||
result.push('IDR');
|
||||
break;
|
||||
case 0x06:
|
||||
result.push('SEI');
|
||||
break;
|
||||
case 0x07:
|
||||
result.push('SPS');
|
||||
break;
|
||||
case 0x08:
|
||||
result.push('PPS');
|
||||
break;
|
||||
case 0x09:
|
||||
result.push('AUD');
|
||||
break;
|
||||
default:
|
||||
result.push(avcStream[i] & 0x1F);
|
||||
break;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
},
|
||||
|
||||
// registry of handlers for individual mp4 box types
|
||||
parse = {
|
||||
// codingname, not a first-class box type. stsd entries share the
|
||||
// same format as real boxes so the parsing infrastructure can be
|
||||
// shared
|
||||
avc1: function(data) {
|
||||
var view = new DataView(data.buffer, data.byteOffset, data.byteLength);
|
||||
return {
|
||||
dataReferenceIndex: view.getUint16(6),
|
||||
width: view.getUint16(24),
|
||||
height: view.getUint16(26),
|
||||
horizresolution: view.getUint16(28) + (view.getUint16(30) / 16),
|
||||
vertresolution: view.getUint16(32) + (view.getUint16(34) / 16),
|
||||
frameCount: view.getUint16(40),
|
||||
depth: view.getUint16(74),
|
||||
config: mp4toJSON(data.subarray(78, data.byteLength))
|
||||
};
|
||||
},
|
||||
avcC: function(data) {
|
||||
var
|
||||
view = new DataView(data.buffer, data.byteOffset, data.byteLength),
|
||||
result = {
|
||||
configurationVersion: data[0],
|
||||
avcProfileIndication: data[1],
|
||||
profileCompatibility: data[2],
|
||||
avcLevelIndication: data[3],
|
||||
lengthSizeMinusOne: data[4] & 0x03,
|
||||
sps: [],
|
||||
pps: []
|
||||
},
|
||||
numOfSequenceParameterSets = data[5] & 0x1f,
|
||||
numOfPictureParameterSets,
|
||||
nalSize,
|
||||
offset,
|
||||
i;
|
||||
|
||||
// iterate past any SPSs
|
||||
offset = 6;
|
||||
for (i = 0; i < numOfSequenceParameterSets; i++) {
|
||||
nalSize = view.getUint16(offset);
|
||||
offset += 2;
|
||||
result.sps.push(new Uint8Array(data.subarray(offset, offset + nalSize)));
|
||||
offset += nalSize;
|
||||
}
|
||||
// iterate past any PPSs
|
||||
numOfPictureParameterSets = data[offset];
|
||||
offset++;
|
||||
for (i = 0; i < numOfPictureParameterSets; i++) {
|
||||
nalSize = view.getUint16(offset);
|
||||
offset += 2;
|
||||
result.pps.push(new Uint8Array(data.subarray(offset, offset + nalSize)));
|
||||
offset += nalSize;
|
||||
}
|
||||
return result;
|
||||
},
|
||||
btrt: function(data) {
|
||||
var view = new DataView(data.buffer, data.byteOffset, data.byteLength);
|
||||
return {
|
||||
bufferSizeDB: view.getUint32(0),
|
||||
maxBitrate: view.getUint32(4),
|
||||
avgBitrate: view.getUint32(8)
|
||||
};
|
||||
},
|
||||
ftyp: function(data) {
|
||||
var
|
||||
view = new DataView(data.buffer, data.byteOffset, data.byteLength),
|
||||
result = {
|
||||
majorBrand: parseType(data.subarray(0, 4)),
|
||||
minorVersion: view.getUint32(4),
|
||||
compatibleBrands: []
|
||||
},
|
||||
i = 8;
|
||||
while (i < data.byteLength) {
|
||||
result.compatibleBrands.push(parseType(data.subarray(i, i + 4)));
|
||||
i += 4;
|
||||
}
|
||||
return result;
|
||||
},
|
||||
dinf: function(data) {
|
||||
return {
|
||||
boxes: mp4toJSON(data)
|
||||
};
|
||||
},
|
||||
dref: function(data) {
|
||||
return {
|
||||
version: data[0],
|
||||
flags: new Uint8Array(data.subarray(1, 4)),
|
||||
dataReferences: mp4toJSON(data.subarray(8))
|
||||
};
|
||||
},
|
||||
hdlr: function(data) {
|
||||
var
|
||||
view = new DataView(data.buffer, data.byteOffset, data.byteLength),
|
||||
result = {
|
||||
version: view.getUint8(0),
|
||||
flags: new Uint8Array(data.subarray(1, 4)),
|
||||
handlerType: parseType(data.subarray(8, 12)),
|
||||
name: ''
|
||||
},
|
||||
i = 8;
|
||||
|
||||
// parse out the name field
|
||||
for (i = 24; i < data.byteLength; i++) {
|
||||
if (data[i] === 0x00) {
|
||||
// the name field is null-terminated
|
||||
i++;
|
||||
break;
|
||||
}
|
||||
result.name += String.fromCharCode(data[i]);
|
||||
}
|
||||
// decode UTF-8 to javascript's internal representation
|
||||
// see http://ecmanaut.blogspot.com/2006/07/encoding-decoding-utf8-in-javascript.html
|
||||
result.name = window.decodeURIComponent(window.escape(result.name));
|
||||
|
||||
return result;
|
||||
},
|
||||
mdat: function(data) {
|
||||
return {
|
||||
byteLength: data.byteLength,
|
||||
nals: nalParse(data)
|
||||
};
|
||||
},
|
||||
mdhd: function(data) {
|
||||
var
|
||||
view = new DataView(data.buffer, data.byteOffset, data.byteLength),
|
||||
i = 4,
|
||||
language,
|
||||
result = {
|
||||
version: view.getUint8(0),
|
||||
flags: new Uint8Array(data.subarray(1, 4)),
|
||||
language: ''
|
||||
};
|
||||
if (result.version === 1) {
|
||||
i += 4;
|
||||
result.creationTime = parseMp4Date(view.getUint32(i)); // truncating top 4 bytes
|
||||
i += 8;
|
||||
result.modificationTime = parseMp4Date(view.getUint32(i)); // truncating top 4 bytes
|
||||
i += 4;
|
||||
result.timescale = view.getUint32(i);
|
||||
i += 8;
|
||||
result.duration = view.getUint32(i); // truncating top 4 bytes
|
||||
} else {
|
||||
result.creationTime = parseMp4Date(view.getUint32(i));
|
||||
i += 4;
|
||||
result.modificationTime = parseMp4Date(view.getUint32(i));
|
||||
i += 4;
|
||||
result.timescale = view.getUint32(i);
|
||||
i += 4;
|
||||
result.duration = view.getUint32(i);
|
||||
}
|
||||
i += 4;
|
||||
// language is stored as an ISO-639-2/T code in an array of three 5-bit fields
|
||||
// each field is the packed difference between its ASCII value and 0x60
|
||||
language = view.getUint16(i);
|
||||
result.language += String.fromCharCode((language >> 10) + 0x60);
|
||||
result.language += String.fromCharCode(((language & 0x03c0) >> 5) + 0x60);
|
||||
result.language += String.fromCharCode((language & 0x1f) + 0x60);
|
||||
|
||||
return result;
|
||||
},
|
||||
mdia: function(data) {
|
||||
return {
|
||||
boxes: mp4toJSON(data)
|
||||
};
|
||||
},
|
||||
mfhd: function(data) {
|
||||
return {
|
||||
version: data[0],
|
||||
flags: new Uint8Array(data.subarray(1, 4)),
|
||||
sequenceNumber: (data[4] << 24) |
|
||||
(data[5] << 16) |
|
||||
(data[6] << 8) |
|
||||
(data[7])
|
||||
};
|
||||
},
|
||||
minf: function(data) {
|
||||
return {
|
||||
boxes: mp4toJSON(data)
|
||||
};
|
||||
},
|
||||
moof: function(data) {
|
||||
return {
|
||||
boxes: mp4toJSON(data)
|
||||
};
|
||||
},
|
||||
moov: function(data) {
|
||||
return {
|
||||
boxes: mp4toJSON(data)
|
||||
};
|
||||
},
|
||||
mvex: function(data) {
|
||||
return {
|
||||
boxes: mp4toJSON(data)
|
||||
};
|
||||
},
|
||||
mvhd: function(data) {
|
||||
var
|
||||
view = new DataView(data.buffer, data.byteOffset, data.byteLength),
|
||||
i = 4,
|
||||
result = {
|
||||
version: view.getUint8(0),
|
||||
flags: new Uint8Array(data.subarray(1, 4))
|
||||
};
|
||||
|
||||
if (result.version === 1) {
|
||||
i += 4;
|
||||
result.creationTime = parseMp4Date(view.getUint32(i)); // truncating top 4 bytes
|
||||
i += 8;
|
||||
result.modificationTime = parseMp4Date(view.getUint32(i)); // truncating top 4 bytes
|
||||
i += 4;
|
||||
result.timescale = view.getUint32(i);
|
||||
i += 8;
|
||||
result.duration = view.getUint32(i); // truncating top 4 bytes
|
||||
} else {
|
||||
result.creationTime = parseMp4Date(view.getUint32(i));
|
||||
i += 4;
|
||||
result.modificationTime = parseMp4Date(view.getUint32(i));
|
||||
i += 4;
|
||||
result.timescale = view.getUint32(i);
|
||||
i += 4;
|
||||
result.duration = view.getUint32(i);
|
||||
}
|
||||
i += 4;
|
||||
|
||||
// convert fixed-point, base 16 back to a number
|
||||
result.rate = view.getUint16(i) + (view.getUint16(i + 2) / 16);
|
||||
i += 4;
|
||||
result.volume = view.getUint8(i) + (view.getUint8(i + 1) / 8);
|
||||
i += 2;
|
||||
i += 2;
|
||||
i += 2 * 4;
|
||||
result.matrix = new Uint32Array(data.subarray(i, i + (9 * 4)));
|
||||
i += 9 * 4;
|
||||
i += 6 * 4;
|
||||
result.nextTrackId = view.getUint32(i);
|
||||
return result;
|
||||
},
|
||||
pdin: function(data) {
|
||||
var view = new DataView(data.buffer, data.byteOffset, data.byteLength);
|
||||
return {
|
||||
version: view.getUint8(0),
|
||||
flags: new Uint8Array(data.subarray(1, 4)),
|
||||
rate: view.getUint32(4),
|
||||
initialDelay: view.getUint32(8)
|
||||
};
|
||||
},
|
||||
sdtp: function(data) {
|
||||
var
|
||||
result = {
|
||||
version: data[0],
|
||||
flags: new Uint8Array(data.subarray(1, 4)),
|
||||
samples: []
|
||||
}, i;
|
||||
|
||||
for (i = 4; i < data.byteLength; i++) {
|
||||
result.samples.push({
|
||||
dependsOn: (data[i] & 0x30) >> 4,
|
||||
isDependedOn: (data[i] & 0x0c) >> 2,
|
||||
hasRedundancy: data[i] & 0x03
|
||||
});
|
||||
}
|
||||
return result;
|
||||
},
|
||||
sidx: function(data) {
|
||||
var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
|
||||
result = {
|
||||
version: data[0],
|
||||
flags: new Uint8Array(data.subarray(1, 4)),
|
||||
references: [],
|
||||
referenceId: view.getUint32(4),
|
||||
timescale: view.getUint32(8),
|
||||
earliestPresentationTime: view.getUint32(12),
|
||||
firstOffset: view.getUint32(16)
|
||||
},
|
||||
referenceCount = view.getUint16(22),
|
||||
i;
|
||||
|
||||
for (i = 24; referenceCount; i += 12, referenceCount-- ) {
|
||||
result.references.push({
|
||||
referenceType: (data[i] & 0x80) >>> 7,
|
||||
referencedSize: view.getUint32(i) & 0x7FFFFFFF,
|
||||
subsegmentDuration: view.getUint32(i + 4),
|
||||
startsWithSap: !!(data[i + 8] & 0x80),
|
||||
sapType: (data[i + 8] & 0x70) >>> 4,
|
||||
sapDeltaTime: view.getUint32(i + 8) & 0x0FFFFFFF
|
||||
});
|
||||
}
|
||||
|
||||
return result;
|
||||
},
|
||||
stbl: function(data) {
|
||||
return {
|
||||
boxes: mp4toJSON(data)
|
||||
};
|
||||
},
|
||||
stco: function(data) {
|
||||
var
|
||||
view = new DataView(data.buffer, data.byteOffset, data.byteLength),
|
||||
result = {
|
||||
version: data[0],
|
||||
flags: new Uint8Array(data.subarray(1, 4)),
|
||||
chunkOffsets: []
|
||||
},
|
||||
entryCount = view.getUint32(4),
|
||||
i;
|
||||
for (i = 8; entryCount; i += 4, entryCount--) {
|
||||
result.chunkOffsets.push(view.getUint32(i));
|
||||
}
|
||||
return result;
|
||||
},
|
||||
stsc: function(data) {
|
||||
var
|
||||
view = new DataView(data.buffer, data.byteOffset, data.byteLength),
|
||||
entryCount = view.getUint32(4),
|
||||
result = {
|
||||
version: data[0],
|
||||
flags: new Uint8Array(data.subarray(1, 4)),
|
||||
sampleToChunks: []
|
||||
},
|
||||
i;
|
||||
for (i = 8; entryCount; i += 12, entryCount--) {
|
||||
result.sampleToChunks.push({
|
||||
firstChunk: view.getUint32(i),
|
||||
samplesPerChunk: view.getUint32(i + 4),
|
||||
sampleDescriptionIndex: view.getUint32(i + 8)
|
||||
});
|
||||
}
|
||||
return result;
|
||||
},
|
||||
stsd: function(data) {
|
||||
return {
|
||||
version: data[0],
|
||||
flags: new Uint8Array(data.subarray(1, 4)),
|
||||
sampleDescriptions: mp4toJSON(data.subarray(8))
|
||||
};
|
||||
},
|
||||
stsz: function(data) {
|
||||
var
|
||||
view = new DataView(data.buffer, data.byteOffset, data.byteLength),
|
||||
result = {
|
||||
version: data[0],
|
||||
flags: new Uint8Array(data.subarray(1, 4)),
|
||||
sampleSize: view.getUint32(4),
|
||||
entries: []
|
||||
},
|
||||
i;
|
||||
for (i = 12; i < data.byteLength; i += 4) {
|
||||
result.entries.push(view.getUint32(i));
|
||||
}
|
||||
return result;
|
||||
},
|
||||
stts: function(data) {
|
||||
var
|
||||
view = new DataView(data.buffer, data.byteOffset, data.byteLength),
|
||||
result = {
|
||||
version: data[0],
|
||||
flags: new Uint8Array(data.subarray(1, 4)),
|
||||
timeToSamples: []
|
||||
},
|
||||
entryCount = view.getUint32(4),
|
||||
i;
|
||||
|
||||
for (i = 8; entryCount; i += 8, entryCount--) {
|
||||
result.timeToSamples.push({
|
||||
sampleCount: view.getUint32(i),
|
||||
sampleDelta: view.getUint32(i + 4)
|
||||
});
|
||||
}
|
||||
return result;
|
||||
},
|
||||
styp: function(data) {
|
||||
return parse.ftyp(data);
|
||||
},
|
||||
tfdt: function(data) {
|
||||
return {
|
||||
version: data[0],
|
||||
flags: new Uint8Array(data.subarray(1, 4)),
|
||||
baseMediaDecodeTime: data[4] << 24 | data[5] << 16 | data[6] << 8 | data[7]
|
||||
};
|
||||
},
|
||||
tfhd: function(data) {
|
||||
var
|
||||
view = new DataView(data.buffer, data.byteOffset, data.byteLength),
|
||||
result = {
|
||||
version: data[0],
|
||||
flags: new Uint8Array(data.subarray(1, 4)),
|
||||
trackId: view.getUint32(4)
|
||||
},
|
||||
baseDataOffsetPresent = result.flags[2] & 0x01,
|
||||
sampleDescriptionIndexPresent = result.flags[2] & 0x02,
|
||||
defaultSampleDurationPresent = result.flags[2] & 0x08,
|
||||
defaultSampleSizePresent = result.flags[2] & 0x10,
|
||||
defaultSampleFlagsPresent = result.flags[2] & 0x20,
|
||||
i;
|
||||
|
||||
i = 8;
|
||||
if (baseDataOffsetPresent) {
|
||||
i += 4; // truncate top 4 bytes
|
||||
result.baseDataOffset = view.getUint32(12);
|
||||
i += 4;
|
||||
}
|
||||
if (sampleDescriptionIndexPresent) {
|
||||
result.sampleDescriptionIndex = view.getUint32(i);
|
||||
i += 4;
|
||||
}
|
||||
if (defaultSampleDurationPresent) {
|
||||
result.defaultSampleDuration = view.getUint32(i);
|
||||
i += 4;
|
||||
}
|
||||
if (defaultSampleSizePresent) {
|
||||
result.defaultSampleSize = view.getUint32(i);
|
||||
i += 4;
|
||||
}
|
||||
if (defaultSampleFlagsPresent) {
|
||||
result.defaultSampleFlags = view.getUint32(i);
|
||||
}
|
||||
return result;
|
||||
},
|
||||
tkhd: function(data) {
|
||||
var
|
||||
view = new DataView(data.buffer, data.byteOffset, data.byteLength),
|
||||
i = 4,
|
||||
result = {
|
||||
version: view.getUint8(0),
|
||||
flags: new Uint8Array(data.subarray(1, 4)),
|
||||
};
|
||||
if (result.version === 1) {
|
||||
i += 4;
|
||||
result.creationTime = parseMp4Date(view.getUint32(i)); // truncating top 4 bytes
|
||||
i += 8;
|
||||
result.modificationTime = parseMp4Date(view.getUint32(i)); // truncating top 4 bytes
|
||||
i += 4;
|
||||
result.trackId = view.getUint32(i);
|
||||
i += 4;
|
||||
i += 8;
|
||||
result.duration = view.getUint32(i); // truncating top 4 bytes
|
||||
} else {
|
||||
result.creationTime = parseMp4Date(view.getUint32(i));
|
||||
i += 4;
|
||||
result.modificationTime = parseMp4Date(view.getUint32(i));
|
||||
i += 4;
|
||||
result.trackId = view.getUint32(i);
|
||||
i += 4;
|
||||
i += 4;
|
||||
result.duration = view.getUint32(i);
|
||||
}
|
||||
i += 4;
|
||||
i += 2 * 4;
|
||||
result.layer = view.getUint16(i);
|
||||
i += 2;
|
||||
result.alternateGroup = view.getUint16(i);
|
||||
i += 2;
|
||||
// convert fixed-point, base 16 back to a number
|
||||
result.volume = view.getUint8(i) + (view.getUint8(i + 1) / 8);
|
||||
i += 2;
|
||||
i += 2;
|
||||
result.matrix = new Uint32Array(data.subarray(i, i + (9 * 4)));
|
||||
i += 9 * 4;
|
||||
result.width = view.getUint16(i) + (view.getUint16(i + 2) / 16);
|
||||
i += 4;
|
||||
result.height = view.getUint16(i) + (view.getUint16(i + 2) / 16);
|
||||
return result;
|
||||
},
|
||||
traf: function(data) {
|
||||
return {
|
||||
boxes: mp4toJSON(data)
|
||||
};
|
||||
},
|
||||
trak: function(data) {
|
||||
return {
|
||||
boxes: mp4toJSON(data)
|
||||
};
|
||||
},
|
||||
trex: function(data) {
|
||||
var view = new DataView(data.buffer, data.byteOffset, data.byteLength);
|
||||
return {
|
||||
version: data[0],
|
||||
flags: new Uint8Array(data.subarray(1, 4)),
|
||||
trackId: view.getUint32(4),
|
||||
defaultSampleDescriptionIndex: view.getUint32(8),
|
||||
defaultSampleDuration: view.getUint32(12),
|
||||
defaultSampleSize: view.getUint32(16),
|
||||
sampleDependsOn: data[20] & 0x03,
|
||||
sampleIsDependedOn: (data[21] & 0xc0) >> 6,
|
||||
sampleHasRedundancy: (data[21] & 0x30) >> 4,
|
||||
samplePaddingValue: (data[21] & 0x0e) >> 1,
|
||||
sampleIsDifferenceSample: !!(data[21] & 0x01),
|
||||
sampleDegradationPriority: view.getUint16(22)
|
||||
};
|
||||
},
|
||||
trun: function(data) {
|
||||
var
|
||||
result = {
|
||||
version: data[0],
|
||||
flags: new Uint8Array(data.subarray(1, 4)),
|
||||
samples: []
|
||||
},
|
||||
view = new DataView(data.buffer, data.byteOffset, data.byteLength),
|
||||
dataOffsetPresent = result.flags[2] & 0x01,
|
||||
firstSampleFlagsPresent = result.flags[2] & 0x04,
|
||||
sampleDurationPresent = result.flags[1] & 0x01,
|
||||
sampleSizePresent = result.flags[1] & 0x02,
|
||||
sampleFlagsPresent = result.flags[1] & 0x04,
|
||||
sampleCompositionTimeOffsetPresent = result.flags[1] & 0x08,
|
||||
sampleCount = view.getUint32(4),
|
||||
offset = 8,
|
||||
sample;
|
||||
|
||||
if (dataOffsetPresent) {
|
||||
result.dataOffset = view.getUint32(offset);
|
||||
offset += 4;
|
||||
}
|
||||
if (firstSampleFlagsPresent && sampleCount) {
|
||||
sample = {
|
||||
flags: parseSampleFlags(data.subarray(offset, offset + 4))
|
||||
};
|
||||
offset += 4;
|
||||
if (sampleDurationPresent) {
|
||||
sample.duration = view.getUint32(offset);
|
||||
offset += 4;
|
||||
}
|
||||
if (sampleSizePresent) {
|
||||
sample.size = view.getUint32(offset);
|
||||
offset += 4;
|
||||
}
|
||||
if (sampleCompositionTimeOffsetPresent) {
|
||||
sample.compositionTimeOffset = view.getUint32(offset);
|
||||
offset += 4;
|
||||
}
|
||||
result.samples.push(sample);
|
||||
sampleCount--;
|
||||
}
|
||||
while (sampleCount--) {
|
||||
sample = {};
|
||||
if (sampleDurationPresent) {
|
||||
sample.duration = view.getUint32(offset);
|
||||
offset += 4;
|
||||
}
|
||||
if (sampleSizePresent) {
|
||||
sample.size = view.getUint32(offset);
|
||||
offset += 4;
|
||||
}
|
||||
if (sampleFlagsPresent) {
|
||||
sample.flags = parseSampleFlags(data.subarray(offset, offset + 4));
|
||||
offset += 4;
|
||||
}
|
||||
if (sampleCompositionTimeOffsetPresent) {
|
||||
sample.compositionTimeOffset = view.getUint32(offset);
|
||||
offset += 4;
|
||||
}
|
||||
result.samples.push(sample);
|
||||
}
|
||||
return result;
|
||||
},
|
||||
'url ': function(data) {
|
||||
return {
|
||||
version: data[0],
|
||||
flags: new Uint8Array(data.subarray(1, 4))
|
||||
};
|
||||
},
|
||||
vmhd: function(data) {
|
||||
//var view = new DataView(data.buffer, data.byteOffset, data.byteLength);
|
||||
return {
|
||||
version: data[0],
|
||||
flags: new Uint8Array(data.subarray(1, 4)),
|
||||
//graphicsmode: view.getUint16(4),
|
||||
//opcolor: new Uint16Array([view.getUint16(6),
|
||||
// view.getUint16(8),
|
||||
// view.getUint16(10)])
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Return a javascript array of box objects parsed from an ISO base
|
||||
* media file.
|
||||
* @param data {Uint8Array} the binary data of the media to be inspected
|
||||
* @return {array} a javascript array of potentially nested box objects
|
||||
*/
|
||||
var mp4toJSON = function(data) {
|
||||
var
|
||||
i = 0,
|
||||
result = [],
|
||||
view = new DataView(data.buffer, data.byteOffset, data.byteLength),
|
||||
size,
|
||||
type,
|
||||
end,
|
||||
box;
|
||||
|
||||
while (i < data.byteLength) {
|
||||
// parse box data
|
||||
size = view.getUint32(i),
|
||||
type = parseType(data.subarray(i + 4, i + 8));
|
||||
end = size > 1 ? i + size : data.byteLength;
|
||||
|
||||
// parse type-specific data
|
||||
box = (parse[type] || function(data) {
|
||||
return {
|
||||
data: data
|
||||
};
|
||||
})(data.subarray(i + 8, end));
|
||||
box.size = size;
|
||||
box.type = type;
|
||||
|
||||
// store this box and move to the next
|
||||
result.push(box);
|
||||
i = end;
|
||||
}
|
||||
return result;
|
||||
};
|
||||
|
||||
|
||||
let MP4Inspect = {
|
||||
mp4toJSON: mp4toJSON
|
||||
};
|
||||
|
||||
export default MP4Inspect;
|
||||
|
||||
|
||||
})();
|
|
@ -1,5 +0,0 @@
|
|||
#/bin/sh
|
||||
git checkout gh-pages
|
||||
git rebase v0.5.x
|
||||
git push origin gh-pages --force
|
||||
git checkout v0.5.x
|
|
@ -1,214 +0,0 @@
|
|||
/*
|
||||
* simple ABR Controller
|
||||
* - compute next level based on last fragment bw heuristics
|
||||
* - implement an abandon rules triggered if we have less than 2 frag buffered and if computed bw shows that we risk buffer stalling
|
||||
*/
|
||||
|
||||
import Event from '../events';
|
||||
import EventHandler from '../event-handler';
|
||||
import BufferHelper from '../helper/buffer-helper';
|
||||
import {ErrorDetails} from '../errors';
|
||||
import {logger} from '../utils/logger';
|
||||
import EwmaBandWidthEstimator from './ewma-bandwidth-estimator';
|
||||
|
||||
class AbrController extends EventHandler {
|
||||
|
||||
constructor(hls) {
|
||||
super(hls, Event.FRAG_LOADING,
|
||||
Event.FRAG_LOADED,
|
||||
Event.ERROR);
|
||||
this.lastLoadedFragLevel = 0;
|
||||
this._autoLevelCapping = -1;
|
||||
this._nextAutoLevel = -1;
|
||||
this.hls = hls;
|
||||
this.onCheck = this.abandonRulesCheck.bind(this);
|
||||
}
|
||||
|
||||
destroy() {
|
||||
this.clearTimer();
|
||||
EventHandler.prototype.destroy.call(this);
|
||||
}
|
||||
|
||||
onFragLoading(data) {
|
||||
if (!this.timer) {
|
||||
this.timer = setInterval(this.onCheck, 100);
|
||||
}
|
||||
|
||||
// lazy init of bw Estimator, rationale is that we use different params for Live/VoD
|
||||
// so we need to wait for stream manifest / playlist type to instantiate it.
|
||||
if (!this.bwEstimator) {
|
||||
let hls = this.hls,
|
||||
level = data.frag.level,
|
||||
isLive = hls.levels[level].details.live,
|
||||
config = hls.config,
|
||||
ewmaFast, ewmaSlow;
|
||||
|
||||
if (isLive) {
|
||||
ewmaFast = config.abrEwmaFastLive;
|
||||
ewmaSlow = config.abrEwmaSlowLive;
|
||||
} else {
|
||||
ewmaFast = config.abrEwmaFastVoD;
|
||||
ewmaSlow = config.abrEwmaSlowVoD;
|
||||
}
|
||||
this.bwEstimator = new EwmaBandWidthEstimator(hls,ewmaSlow,ewmaFast,config.abrEwmaDefaultEstimate);
|
||||
}
|
||||
|
||||
let frag = data.frag;
|
||||
frag.trequest = performance.now();
|
||||
this.fragCurrent = frag;
|
||||
}
|
||||
|
||||
abandonRulesCheck() {
|
||||
/*
|
||||
monitor fragment retrieval time...
|
||||
we compute expected time of arrival of the complete fragment.
|
||||
we compare it to expected time of buffer starvation
|
||||
*/
|
||||
let hls = this.hls, v = hls.media,frag = this.fragCurrent;
|
||||
|
||||
// if loader has been destroyed or loading has been aborted, stop timer and return
|
||||
if(!frag.loader || ( frag.loader.stats && frag.loader.stats.aborted)) {
|
||||
logger.warn(`frag loader destroy or aborted, disarm abandonRulesCheck`);
|
||||
this.clearTimer();
|
||||
return;
|
||||
}
|
||||
/* only monitor frag retrieval time if
|
||||
(video not paused OR first fragment being loaded(ready state === HAVE_NOTHING = 0)) AND autoswitching enabled AND not lowest level (=> means that we have several levels) */
|
||||
if (v && (!v.paused || !v.readyState) && frag.autoLevel && frag.level) {
|
||||
let requestDelay = performance.now() - frag.trequest;
|
||||
// monitor fragment load progress after half of expected fragment duration,to stabilize bitrate
|
||||
if (requestDelay > (500 * frag.duration)) {
|
||||
let levels = hls.levels,
|
||||
loadRate = Math.max(1,frag.loaded * 1000 / requestDelay), // byte/s; at least 1 byte/s to avoid division by zero
|
||||
// compute expected fragment length using frag duration and level bitrate. also ensure that expected len is gte than already loaded size
|
||||
expectedLen = Math.max(frag.loaded, Math.round(frag.duration * levels[frag.level].bitrate / 8));
|
||||
|
||||
let pos = v.currentTime;
|
||||
let fragLoadedDelay = (expectedLen - frag.loaded) / loadRate;
|
||||
let bufferStarvationDelay = BufferHelper.bufferInfo(v,pos,hls.config.maxBufferHole).end - pos;
|
||||
// consider emergency switch down only if we have less than 2 frag buffered AND
|
||||
// time to finish loading current fragment is bigger than buffer starvation delay
|
||||
// ie if we risk buffer starvation if bw does not increase quickly
|
||||
if (bufferStarvationDelay < 2*frag.duration && fragLoadedDelay > bufferStarvationDelay) {
|
||||
let fragLevelNextLoadedDelay, nextLoadLevel;
|
||||
// lets iterate through lower level and try to find the biggest one that could avoid rebuffering
|
||||
// we start from current level - 1 and we step down , until we find a matching level
|
||||
for (nextLoadLevel = frag.level - 1 ; nextLoadLevel >=0 ; nextLoadLevel--) {
|
||||
// compute time to load next fragment at lower level
|
||||
// 0.8 : consider only 80% of current bw to be conservative
|
||||
// 8 = bits per byte (bps/Bps)
|
||||
fragLevelNextLoadedDelay = frag.duration * levels[nextLoadLevel].bitrate / (8 * 0.8 * loadRate);
|
||||
logger.log(`fragLoadedDelay/bufferStarvationDelay/fragLevelNextLoadedDelay[${nextLoadLevel}] :${fragLoadedDelay.toFixed(1)}/${bufferStarvationDelay.toFixed(1)}/${fragLevelNextLoadedDelay.toFixed(1)}`);
|
||||
if (fragLevelNextLoadedDelay < bufferStarvationDelay) {
|
||||
// we found a lower level that be rebuffering free with current estimated bw !
|
||||
break;
|
||||
}
|
||||
}
|
||||
// only emergency switch down if it takes less time to load new fragment at lowest level instead
|
||||
// of finishing loading current one ...
|
||||
if (fragLevelNextLoadedDelay < fragLoadedDelay) {
|
||||
// ensure nextLoadLevel is not negative
|
||||
nextLoadLevel = Math.max(0,nextLoadLevel);
|
||||
// force next load level in auto mode
|
||||
hls.nextLoadLevel = nextLoadLevel;
|
||||
// update bw estimate for this fragment before cancelling load (this will help reducing the bw)
|
||||
this.bwEstimator.sample(requestDelay,frag.loaded);
|
||||
// abort fragment loading ...
|
||||
logger.warn(`loading too slow, abort fragment loading and switch to level ${nextLoadLevel}`);
|
||||
//abort fragment loading
|
||||
frag.loader.abort();
|
||||
this.clearTimer();
|
||||
hls.trigger(Event.FRAG_LOAD_EMERGENCY_ABORTED, {frag: frag});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
onFragLoaded(data) {
|
||||
var stats = data.stats;
|
||||
// only update stats on first frag loading
|
||||
// if same frag is loaded multiple times, it might be in browser cache, and loaded quickly
|
||||
// and leading to wrong bw estimation
|
||||
if (stats.aborted === undefined && data.frag.loadCounter === 1) {
|
||||
this.bwEstimator.sample(performance.now() - stats.trequest,stats.loaded);
|
||||
}
|
||||
|
||||
// stop monitoring bw once frag loaded
|
||||
this.clearTimer();
|
||||
// store level id after successful fragment load
|
||||
this.lastLoadedFragLevel = data.frag.level;
|
||||
// reset forced auto level value so that next level will be selected
|
||||
this._nextAutoLevel = -1;
|
||||
}
|
||||
|
||||
onError(data) {
|
||||
// stop timer in case of frag loading error
|
||||
switch(data.details) {
|
||||
case ErrorDetails.FRAG_LOAD_ERROR:
|
||||
case ErrorDetails.FRAG_LOAD_TIMEOUT:
|
||||
this.clearTimer();
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
clearTimer() {
|
||||
if (this.timer) {
|
||||
clearInterval(this.timer);
|
||||
this.timer = null;
|
||||
}
|
||||
}
|
||||
|
||||
/** Return the capping/max level value that could be used by automatic level selection algorithm **/
|
||||
get autoLevelCapping() {
|
||||
return this._autoLevelCapping;
|
||||
}
|
||||
|
||||
/** set the capping/max level value that could be used by automatic level selection algorithm **/
|
||||
set autoLevelCapping(newLevel) {
|
||||
this._autoLevelCapping = newLevel;
|
||||
}
|
||||
|
||||
get nextAutoLevel() {
|
||||
var hls = this.hls, i, maxAutoLevel, levels = hls.levels, config = hls.config;
|
||||
if (this._autoLevelCapping === -1 && levels && levels.length) {
|
||||
maxAutoLevel = levels.length - 1;
|
||||
} else {
|
||||
maxAutoLevel = this._autoLevelCapping;
|
||||
}
|
||||
|
||||
// in case next auto level has been forced, return it straight-away (but capped)
|
||||
if (this._nextAutoLevel !== -1) {
|
||||
return Math.min(this._nextAutoLevel,maxAutoLevel);
|
||||
}
|
||||
|
||||
let avgbw = this.bwEstimator ? this.bwEstimator.getEstimate() : config.abrEwmaDefaultEstimate,
|
||||
adjustedbw;
|
||||
// follow algorithm captured from stagefright :
|
||||
// https://android.googlesource.com/platform/frameworks/av/+/master/media/libstagefright/httplive/LiveSession.cpp
|
||||
// Pick the highest bandwidth stream below or equal to estimated bandwidth.
|
||||
for (i = 0; i <= maxAutoLevel; i++) {
|
||||
// consider only 80% of the available bandwidth, but if we are switching up,
|
||||
// be even more conservative (70%) to avoid overestimating and immediately
|
||||
// switching back.
|
||||
if (i <= this.lastLoadedFragLevel) {
|
||||
adjustedbw = config.abrBandWidthFactor * avgbw;
|
||||
} else {
|
||||
adjustedbw = config.abrBandWidthUpFactor * avgbw;
|
||||
}
|
||||
if (adjustedbw < levels[i].bitrate) {
|
||||
return Math.max(0, i - 1);
|
||||
}
|
||||
}
|
||||
return i - 1;
|
||||
}
|
||||
|
||||
set nextAutoLevel(nextLevel) {
|
||||
this._nextAutoLevel = nextLevel;
|
||||
}
|
||||
}
|
||||
|
||||
export default AbrController;
|
||||
|
|
@ -1,404 +0,0 @@
|
|||
/*
|
||||
* Buffer Controller
|
||||
*/
|
||||
|
||||
import Event from '../events';
|
||||
import EventHandler from '../event-handler';
|
||||
import {logger} from '../utils/logger';
|
||||
import {ErrorTypes, ErrorDetails} from '../errors';
|
||||
|
||||
|
||||
class BufferController extends EventHandler {
|
||||
|
||||
constructor(hls) {
|
||||
super(hls,
|
||||
Event.MEDIA_ATTACHING,
|
||||
Event.MEDIA_DETACHING,
|
||||
Event.BUFFER_RESET,
|
||||
Event.BUFFER_APPENDING,
|
||||
Event.BUFFER_CODECS,
|
||||
Event.BUFFER_EOS,
|
||||
Event.BUFFER_FLUSHING,
|
||||
Event.LEVEL_UPDATED);
|
||||
|
||||
// the value that we have set mediasource.duration to
|
||||
// (the actual duration may be tweaked slighly by the browser)
|
||||
this._msDuration = null;
|
||||
// the value that we want to set mediaSource.duration to
|
||||
this._levelDuration = null;
|
||||
|
||||
// Source Buffer listeners
|
||||
this.onsbue = this.onSBUpdateEnd.bind(this);
|
||||
this.onsbe = this.onSBUpdateError.bind(this);
|
||||
}
|
||||
|
||||
destroy() {
|
||||
EventHandler.prototype.destroy.call(this);
|
||||
}
|
||||
|
||||
onMediaAttaching(data) {
|
||||
let media = this.media = data.media;
|
||||
if (media) {
|
||||
// setup the media source
|
||||
var ms = this.mediaSource = new MediaSource();
|
||||
//Media Source listeners
|
||||
this.onmso = this.onMediaSourceOpen.bind(this);
|
||||
this.onmse = this.onMediaSourceEnded.bind(this);
|
||||
this.onmsc = this.onMediaSourceClose.bind(this);
|
||||
ms.addEventListener('sourceopen', this.onmso);
|
||||
ms.addEventListener('sourceended', this.onmse);
|
||||
ms.addEventListener('sourceclose', this.onmsc);
|
||||
// link video and media Source
|
||||
media.src = URL.createObjectURL(ms);
|
||||
}
|
||||
}
|
||||
|
||||
onMediaDetaching() {
|
||||
var ms = this.mediaSource;
|
||||
if (ms) {
|
||||
if (ms.readyState === 'open') {
|
||||
try {
|
||||
// endOfStream could trigger exception if any sourcebuffer is in updating state
|
||||
// we don't really care about checking sourcebuffer state here,
|
||||
// as we are anyway detaching the MediaSource
|
||||
// let's just avoid this exception to propagate
|
||||
ms.endOfStream();
|
||||
} catch(err) {
|
||||
logger.warn(`onMediaDetaching:${err.message} while calling endOfStream`);
|
||||
}
|
||||
}
|
||||
ms.removeEventListener('sourceopen', this.onmso);
|
||||
ms.removeEventListener('sourceended', this.onmse);
|
||||
ms.removeEventListener('sourceclose', this.onmsc);
|
||||
|
||||
try {
|
||||
// unlink MediaSource from video tag
|
||||
this.media.src = '';
|
||||
this.media.removeAttribute('src');
|
||||
} catch(err) {
|
||||
logger.warn(`onMediaDetaching:${err.message} while unlinking video.src`);
|
||||
}
|
||||
this.mediaSource = null;
|
||||
this.media = null;
|
||||
this.pendingTracks = null;
|
||||
this.sourceBuffer = null;
|
||||
}
|
||||
this.onmso = this.onmse = this.onmsc = null;
|
||||
this.hls.trigger(Event.MEDIA_DETACHED);
|
||||
}
|
||||
|
||||
onMediaSourceOpen() {
|
||||
logger.log('media source opened');
|
||||
this.hls.trigger(Event.MEDIA_ATTACHED, { media : this.media });
|
||||
// once received, don't listen anymore to sourceopen event
|
||||
this.mediaSource.removeEventListener('sourceopen', this.onmso);
|
||||
// if any buffer codecs pending, treat it here.
|
||||
var pendingTracks = this.pendingTracks;
|
||||
if (pendingTracks) {
|
||||
this.onBufferCodecs(pendingTracks);
|
||||
this.pendingTracks = null;
|
||||
this.doAppending();
|
||||
}
|
||||
}
|
||||
|
||||
onMediaSourceClose() {
|
||||
logger.log('media source closed');
|
||||
}
|
||||
|
||||
onMediaSourceEnded() {
|
||||
logger.log('media source ended');
|
||||
}
|
||||
|
||||
|
||||
onSBUpdateEnd() {
|
||||
|
||||
if (this._needsFlush) {
|
||||
this.doFlush();
|
||||
}
|
||||
|
||||
if (this._needsEos) {
|
||||
this.onBufferEos();
|
||||
}
|
||||
|
||||
this.hls.trigger(Event.BUFFER_APPENDED);
|
||||
|
||||
this.doAppending();
|
||||
}
|
||||
|
||||
onSBUpdateError(event) {
|
||||
logger.error(`sourceBuffer error:${event}`);
|
||||
// according to http://www.w3.org/TR/media-source/#sourcebuffer-append-error
|
||||
// this error might not always be fatal (it is fatal if decode error is set, in that case
|
||||
// it will be followed by a mediaElement error ...)
|
||||
this.hls.trigger(Event.ERROR, {type: ErrorTypes.MEDIA_ERROR, details: ErrorDetails.BUFFER_APPENDING_ERROR, fatal: false});
|
||||
// we don't need to do more than that, as accordin to the spec, updateend will be fired just after
|
||||
}
|
||||
|
||||
onBufferReset() {
|
||||
var sourceBuffer = this.sourceBuffer;
|
||||
if (sourceBuffer) {
|
||||
for(var type in sourceBuffer) {
|
||||
var sb = sourceBuffer[type];
|
||||
try {
|
||||
this.mediaSource.removeSourceBuffer(sb);
|
||||
sb.removeEventListener('updateend', this.onsbue);
|
||||
sb.removeEventListener('error', this.onsbe);
|
||||
} catch(err) {
|
||||
}
|
||||
}
|
||||
this.sourceBuffer = null;
|
||||
}
|
||||
this.flushRange = [];
|
||||
this.appended = 0;
|
||||
}
|
||||
|
||||
onBufferCodecs(tracks) {
|
||||
let mediaSource = this.mediaSource;
|
||||
|
||||
// delay sourcebuffer creation if media source not opened yet
|
||||
if(!mediaSource || mediaSource.readyState !== 'open') {
|
||||
this.pendingTracks = tracks;
|
||||
return;
|
||||
}
|
||||
|
||||
if (!this.sourceBuffer) {
|
||||
let sourceBuffer = {};
|
||||
for (let trackName in tracks) {
|
||||
let track = tracks[trackName];
|
||||
// use levelCodec as first priority
|
||||
let codec = track.levelCodec || track.codec;
|
||||
let mimeType = `${track.container};codecs=${codec}`;
|
||||
logger.log(`creating sourceBuffer with mimeType:${mimeType}`);
|
||||
let sb = sourceBuffer[trackName] = mediaSource.addSourceBuffer(mimeType);
|
||||
sb.addEventListener('updateend', this.onsbue);
|
||||
sb.addEventListener('error', this.onsbe);
|
||||
}
|
||||
this.sourceBuffer = sourceBuffer;
|
||||
}
|
||||
}
|
||||
|
||||
onBufferAppending(data) {
|
||||
if (!this.segments) {
|
||||
this.segments = [ data ];
|
||||
} else {
|
||||
this.segments.push(data);
|
||||
}
|
||||
this.doAppending();
|
||||
}
|
||||
|
||||
onBufferAppendFail(data) {
|
||||
logger.error(`sourceBuffer error:${data.event}`);
|
||||
// according to http://www.w3.org/TR/media-source/#sourcebuffer-append-error
|
||||
// this error might not always be fatal (it is fatal if decode error is set, in that case
|
||||
// it will be followed by a mediaElement error ...)
|
||||
this.hls.trigger(Event.ERROR, {type: ErrorTypes.MEDIA_ERROR, details: ErrorDetails.BUFFER_APPENDING_ERROR, fatal: false, frag: this.fragCurrent});
|
||||
}
|
||||
|
||||
onBufferEos() {
|
||||
var sb = this.sourceBuffer, mediaSource = this.mediaSource;
|
||||
if (!mediaSource || mediaSource.readyState !== 'open') {
|
||||
return;
|
||||
}
|
||||
if (!((sb.audio && sb.audio.updating) || (sb.video && sb.video.updating))) {
|
||||
logger.log('all media data available, signal endOfStream() to MediaSource and stop loading fragment');
|
||||
//Notify the media element that it now has all of the media data
|
||||
mediaSource.endOfStream();
|
||||
this._needsEos = false;
|
||||
} else {
|
||||
this._needsEos = true;
|
||||
}
|
||||
}
|
||||
|
||||
onBufferFlushing(data) {
|
||||
this.flushRange.push({start: data.startOffset, end: data.endOffset});
|
||||
// attempt flush immediatly
|
||||
this.flushBufferCounter = 0;
|
||||
this.doFlush();
|
||||
}
|
||||
|
||||
onLevelUpdated(event) {
|
||||
let details = event.details;
|
||||
if (details.fragments.length === 0) {
|
||||
return;
|
||||
}
|
||||
this._levelDuration = details.totalduration + details.fragments[0].start;
|
||||
this.updateMediaElementDuration();
|
||||
}
|
||||
|
||||
// https://github.com/dailymotion/hls.js/issues/355
|
||||
updateMediaElementDuration() {
|
||||
if (this._levelDuration === null) {
|
||||
return;
|
||||
}
|
||||
let media = this.media;
|
||||
let mediaSource = this.mediaSource;
|
||||
if (!media || !mediaSource || media.readyState === 0 || mediaSource.readyState !== 'open') {
|
||||
return;
|
||||
}
|
||||
for (let type in mediaSource.sourceBuffers) {
|
||||
if (mediaSource.sourceBuffers[type].updating) {
|
||||
// can't set duration whilst a buffer is updating
|
||||
return;
|
||||
}
|
||||
}
|
||||
if (this._msDuration === null) {
|
||||
// initialise to the value that the media source is reporting
|
||||
this._msDuration = mediaSource.duration;
|
||||
}
|
||||
// this._levelDuration was the last value we set.
|
||||
// not using mediaSource.duration as the browser may tweak this value
|
||||
// only update mediasource duration if its value increase, this is to avoid
|
||||
// flushing already buffered portion when switching between quality level, as they
|
||||
if (this._levelDuration > this._msDuration) {
|
||||
logger.log(`Updating mediasource duration to ${this._levelDuration}`);
|
||||
mediaSource.duration = this._levelDuration;
|
||||
this._msDuration = this._levelDuration;
|
||||
}
|
||||
}
|
||||
|
||||
doFlush() {
|
||||
// loop through all buffer ranges to flush
|
||||
while(this.flushRange.length) {
|
||||
var range = this.flushRange[0];
|
||||
// flushBuffer will abort any buffer append in progress and flush Audio/Video Buffer
|
||||
if (this.flushBuffer(range.start, range.end)) {
|
||||
// range flushed, remove from flush array
|
||||
this.flushRange.shift();
|
||||
this.flushBufferCounter = 0;
|
||||
} else {
|
||||
this._needsFlush = true;
|
||||
// avoid looping, wait for SB update end to retrigger a flush
|
||||
return;
|
||||
}
|
||||
}
|
||||
if (this.flushRange.length === 0) {
|
||||
// everything flushed
|
||||
this._needsFlush = false;
|
||||
|
||||
// let's recompute this.appended, which is used to avoid flush looping
|
||||
var appended = 0;
|
||||
var sourceBuffer = this.sourceBuffer;
|
||||
if (sourceBuffer) {
|
||||
for (var type in sourceBuffer) {
|
||||
appended += sourceBuffer[type].buffered.length;
|
||||
}
|
||||
}
|
||||
this.appended = appended;
|
||||
this.hls.trigger(Event.BUFFER_FLUSHED);
|
||||
}
|
||||
}
|
||||
|
||||
doAppending() {
|
||||
var hls = this.hls, sourceBuffer = this.sourceBuffer, segments = this.segments;
|
||||
if (sourceBuffer) {
|
||||
if (this.media.error) {
|
||||
segments = [];
|
||||
logger.error('trying to append although a media error occured, flush segment and abort');
|
||||
return;
|
||||
}
|
||||
for (var type in sourceBuffer) {
|
||||
if (sourceBuffer[type].updating) {
|
||||
//logger.log('sb update in progress');
|
||||
return;
|
||||
}
|
||||
}
|
||||
if (segments.length) {
|
||||
var segment = segments.shift();
|
||||
try {
|
||||
//logger.log(`appending ${segment.type} SB, size:${segment.data.length});
|
||||
sourceBuffer[segment.type].appendBuffer(segment.data);
|
||||
this.appendError = 0;
|
||||
this.appended++;
|
||||
} catch(err) {
|
||||
// in case any error occured while appending, put back segment in segments table
|
||||
logger.error(`error while trying to append buffer:${err.message}`);
|
||||
segments.unshift(segment);
|
||||
var event = {type: ErrorTypes.MEDIA_ERROR};
|
||||
if(err.code !== 22) {
|
||||
if (this.appendError) {
|
||||
this.appendError++;
|
||||
} else {
|
||||
this.appendError = 1;
|
||||
}
|
||||
event.details = ErrorDetails.BUFFER_APPEND_ERROR;
|
||||
event.frag = this.fragCurrent;
|
||||
/* with UHD content, we could get loop of quota exceeded error until
|
||||
browser is able to evict some data from sourcebuffer. retrying help recovering this
|
||||
*/
|
||||
if (this.appendError > hls.config.appendErrorMaxRetry) {
|
||||
logger.log(`fail ${hls.config.appendErrorMaxRetry} times to append segment in sourceBuffer`);
|
||||
segments = [];
|
||||
event.fatal = true;
|
||||
hls.trigger(Event.ERROR, event);
|
||||
return;
|
||||
} else {
|
||||
event.fatal = false;
|
||||
hls.trigger(Event.ERROR, event);
|
||||
}
|
||||
} else {
|
||||
// QuotaExceededError: http://www.w3.org/TR/html5/infrastructure.html#quotaexceedederror
|
||||
// let's stop appending any segments, and report BUFFER_FULL_ERROR error
|
||||
this.segments = [];
|
||||
event.details = ErrorDetails.BUFFER_FULL_ERROR;
|
||||
hls.trigger(Event.ERROR,event);
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
flush specified buffered range,
|
||||
return true once range has been flushed.
|
||||
as sourceBuffer.remove() is asynchronous, flushBuffer will be retriggered on sourceBuffer update end
|
||||
*/
|
||||
flushBuffer(startOffset, endOffset) {
|
||||
var sb, i, bufStart, bufEnd, flushStart, flushEnd;
|
||||
//logger.log('flushBuffer,pos/start/end: ' + this.media.currentTime + '/' + startOffset + '/' + endOffset);
|
||||
// safeguard to avoid infinite looping : don't try to flush more than the nb of appended segments
|
||||
if (this.flushBufferCounter < this.appended && this.sourceBuffer) {
|
||||
for (var type in this.sourceBuffer) {
|
||||
sb = this.sourceBuffer[type];
|
||||
if (!sb.updating) {
|
||||
for (i = 0; i < sb.buffered.length; i++) {
|
||||
bufStart = sb.buffered.start(i);
|
||||
bufEnd = sb.buffered.end(i);
|
||||
// workaround firefox not able to properly flush multiple buffered range.
|
||||
if (navigator.userAgent.toLowerCase().indexOf('firefox') !== -1 && endOffset === Number.POSITIVE_INFINITY) {
|
||||
flushStart = startOffset;
|
||||
flushEnd = endOffset;
|
||||
} else {
|
||||
flushStart = Math.max(bufStart, startOffset);
|
||||
flushEnd = Math.min(bufEnd, endOffset);
|
||||
}
|
||||
/* sometimes sourcebuffer.remove() does not flush
|
||||
the exact expected time range.
|
||||
to avoid rounding issues/infinite loop,
|
||||
only flush buffer range of length greater than 500ms.
|
||||
*/
|
||||
if (Math.min(flushEnd,bufEnd) - flushStart > 0.5 ) {
|
||||
this.flushBufferCounter++;
|
||||
logger.log(`flush ${type} [${flushStart},${flushEnd}], of [${bufStart},${bufEnd}], pos:${this.media.currentTime}`);
|
||||
sb.remove(flushStart, flushEnd);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
//logger.log('abort ' + type + ' append in progress');
|
||||
// this will abort any appending in progress
|
||||
//sb.abort();
|
||||
logger.warn('cannot flush, sb updating in progress');
|
||||
return false;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
logger.warn('abort flushing too many retries');
|
||||
}
|
||||
logger.log('buffer flushed');
|
||||
// everything flushed !
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
export default BufferController;
|
|
@ -1,106 +0,0 @@
|
|||
/*
|
||||
* cap stream level to media size dimension controller
|
||||
*/
|
||||
|
||||
import Event from '../events';
|
||||
import EventHandler from '../event-handler';
|
||||
|
||||
class CapLevelController extends EventHandler {
|
||||
constructor(hls) {
|
||||
super(hls,
|
||||
Event.MEDIA_ATTACHING,
|
||||
Event.MANIFEST_PARSED);
|
||||
}
|
||||
|
||||
destroy() {
|
||||
if (this.hls.config.capLevelToPlayerSize) {
|
||||
this.media = null;
|
||||
this.autoLevelCapping = Number.POSITIVE_INFINITY;
|
||||
if (this.timer) {
|
||||
this.timer = clearInterval(this.timer);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
onMediaAttaching(data) {
|
||||
this.media = data.media instanceof HTMLVideoElement ? data.media : null;
|
||||
}
|
||||
|
||||
onManifestParsed(data) {
|
||||
if (this.hls.config.capLevelToPlayerSize) {
|
||||
this.autoLevelCapping = Number.POSITIVE_INFINITY;
|
||||
this.levels = data.levels;
|
||||
this.hls.firstLevel = this.getMaxLevel(data.firstLevel);
|
||||
clearInterval(this.timer);
|
||||
this.timer = setInterval(this.detectPlayerSize.bind(this), 1000);
|
||||
this.detectPlayerSize();
|
||||
}
|
||||
}
|
||||
|
||||
detectPlayerSize() {
|
||||
if (this.media) {
|
||||
let levelsLength = this.levels ? this.levels.length : 0;
|
||||
if (levelsLength) {
|
||||
this.hls.autoLevelCapping = this.getMaxLevel(levelsLength - 1);
|
||||
if (this.hls.autoLevelCapping > this.autoLevelCapping) {
|
||||
// if auto level capping has a higher value for the previous one, flush the buffer using nextLevelSwitch
|
||||
// usually happen when the user go to the fullscreen mode.
|
||||
this.hls.streamController.nextLevelSwitch();
|
||||
}
|
||||
this.autoLevelCapping = this.hls.autoLevelCapping;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* returns level should be the one with the dimensions equal or greater than the media (player) dimensions (so the video will be downscaled)
|
||||
*/
|
||||
getMaxLevel(capLevelIndex) {
|
||||
let result,
|
||||
i,
|
||||
level,
|
||||
mWidth = this.mediaWidth,
|
||||
mHeight = this.mediaHeight,
|
||||
lWidth = 0,
|
||||
lHeight = 0;
|
||||
|
||||
for (i = 0; i <= capLevelIndex; i++) {
|
||||
level = this.levels[i];
|
||||
result = i;
|
||||
lWidth = level.width;
|
||||
lHeight = level.height;
|
||||
if (mWidth <= lWidth || mHeight <= lHeight) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
get contentScaleFactor() {
|
||||
let pixelRatio = 1;
|
||||
try {
|
||||
pixelRatio = window.devicePixelRatio;
|
||||
} catch(e) {}
|
||||
return pixelRatio;
|
||||
}
|
||||
|
||||
get mediaWidth() {
|
||||
let width;
|
||||
if (this.media) {
|
||||
width = this.media.width || this.media.clientWidth || this.media.offsetWidth;
|
||||
width *= this.contentScaleFactor;
|
||||
}
|
||||
return width;
|
||||
}
|
||||
|
||||
get mediaHeight() {
|
||||
let height;
|
||||
if (this.media) {
|
||||
height = this.media.height || this.media.clientHeight || this.media.offsetHeight;
|
||||
height *= this.contentScaleFactor;
|
||||
}
|
||||
return height;
|
||||
}
|
||||
}
|
||||
|
||||
export default CapLevelController;
|
|
@ -1,49 +0,0 @@
|
|||
/*
|
||||
* EWMA Bandwidth Estimator
|
||||
* - heavily inspired from shaka-player
|
||||
* Tracks bandwidth samples and estimates available bandwidth.
|
||||
* Based on the minimum of two exponentially-weighted moving averages with
|
||||
* different half-lives.
|
||||
*/
|
||||
|
||||
import EWMA from '../utils/ewma';
|
||||
|
||||
|
||||
class EwmaBandWidthEstimator {
|
||||
|
||||
constructor(hls,slow,fast,defaultEstimate) {
|
||||
this.hls = hls;
|
||||
this.defaultEstimate_ = defaultEstimate;
|
||||
this.minWeight_ = 0.001;
|
||||
this.minDelayMs_ = 50;
|
||||
this.slow_ = new EWMA(slow);
|
||||
this.fast_ = new EWMA(fast);
|
||||
}
|
||||
|
||||
sample(durationMs,numBytes) {
|
||||
durationMs = Math.max(durationMs, this.minDelayMs_);
|
||||
var bandwidth = 8000* numBytes / durationMs,
|
||||
//console.log('instant bw:'+ Math.round(bandwidth));
|
||||
// we weight sample using loading duration....
|
||||
weight = durationMs / 1000;
|
||||
this.fast_.sample(weight,bandwidth);
|
||||
this.slow_.sample(weight,bandwidth);
|
||||
}
|
||||
|
||||
|
||||
getEstimate() {
|
||||
if (!this.fast_ || !this.slow_ || this.fast_.getTotalWeight() < this.minWeight_) {
|
||||
return this.defaultEstimate_;
|
||||
}
|
||||
//console.log('slow estimate:'+ Math.round(this.slow_.getEstimate()));
|
||||
//console.log('fast estimate:'+ Math.round(this.fast_.getEstimate()));
|
||||
// Take the minimum of these two estimates. This should have the effect of
|
||||
// adapting down quickly, but up more slowly.
|
||||
return Math.min(this.fast_.getEstimate(),this.slow_.getEstimate());
|
||||
}
|
||||
|
||||
destroy() {
|
||||
}
|
||||
}
|
||||
export default EwmaBandWidthEstimator;
|
||||
|
|
@ -1,49 +0,0 @@
|
|||
/*
|
||||
* FPS Controller
|
||||
*/
|
||||
|
||||
import Event from '../events';
|
||||
import {logger} from '../utils/logger';
|
||||
|
||||
class FPSController {
|
||||
|
||||
constructor(hls) {
|
||||
this.hls = hls;
|
||||
this.timer = setInterval(this.checkFPS, hls.config.fpsDroppedMonitoringPeriod);
|
||||
}
|
||||
|
||||
destroy() {
|
||||
if (this.timer) {
|
||||
clearInterval(this.timer);
|
||||
}
|
||||
}
|
||||
|
||||
checkFPS() {
|
||||
var v = this.hls.video;
|
||||
if (v) {
|
||||
var decodedFrames = v.webkitDecodedFrameCount, droppedFrames = v.webkitDroppedFrameCount, currentTime = new Date();
|
||||
if (decodedFrames) {
|
||||
if (this.lastTime) {
|
||||
var currentPeriod = currentTime - this.lastTime;
|
||||
var currentDropped = droppedFrames - this.lastDroppedFrames;
|
||||
var currentDecoded = decodedFrames - this.lastDecodedFrames;
|
||||
var decodedFPS = 1000 * currentDecoded / currentPeriod;
|
||||
var droppedFPS = 1000 * currentDropped / currentPeriod;
|
||||
if (droppedFPS > 0) {
|
||||
logger.log(`checkFPS : droppedFPS/decodedFPS:${droppedFPS.toFixed(1)}/${decodedFPS.toFixed(1)}`);
|
||||
if (currentDropped > this.hls.config.fpsDroppedMonitoringThreshold * currentDecoded) {
|
||||
logger.warn('drop FPS ratio greater than max allowed value');
|
||||
this.hls.trigger(Event.FPS_DROP, {currentDropped: currentDropped, currentDecoded: currentDecoded, totalDroppedFrames: droppedFrames});
|
||||
}
|
||||
}
|
||||
}
|
||||
this.lastTime = currentTime;
|
||||
this.lastDroppedFrames = droppedFrames;
|
||||
this.lastDecodedFrames = decodedFrames;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default FPSController;
|
||||
|
|
@ -1,300 +0,0 @@
|
|||
/*
|
||||
* Level Controller
|
||||
*/
|
||||
|
||||
import Event from '../events';
|
||||
import EventHandler from '../event-handler';
|
||||
import {logger} from '../utils/logger';
|
||||
import {ErrorTypes, ErrorDetails} from '../errors';
|
||||
|
||||
class LevelController extends EventHandler {
|
||||
|
||||
constructor(hls) {
|
||||
super(hls,
|
||||
Event.MANIFEST_LOADED,
|
||||
Event.LEVEL_LOADED,
|
||||
Event.ERROR);
|
||||
this.ontick = this.tick.bind(this);
|
||||
this._manualLevel = this._autoLevelCapping = -1;
|
||||
}
|
||||
|
||||
destroy() {
|
||||
if (this.timer) {
|
||||
clearTimeout(this.timer);
|
||||
this.timer = null;
|
||||
}
|
||||
this._manualLevel = -1;
|
||||
}
|
||||
|
||||
startLoad() {
|
||||
this.canload = true;
|
||||
// speed up live playlist refresh if timer exists
|
||||
if (this.timer) {
|
||||
this.tick();
|
||||
}
|
||||
}
|
||||
|
||||
stopLoad() {
|
||||
this.canload = false;
|
||||
}
|
||||
|
||||
onManifestLoaded(data) {
|
||||
var levels0 = [], levels = [], bitrateStart, i, bitrateSet = {}, videoCodecFound = false, audioCodecFound = false, hls = this.hls;
|
||||
|
||||
// regroup redundant level together
|
||||
data.levels.forEach(level => {
|
||||
if(level.videoCodec) {
|
||||
videoCodecFound = true;
|
||||
}
|
||||
if(level.audioCodec) {
|
||||
audioCodecFound = true;
|
||||
}
|
||||
var redundantLevelId = bitrateSet[level.bitrate];
|
||||
if (redundantLevelId === undefined) {
|
||||
bitrateSet[level.bitrate] = levels0.length;
|
||||
level.url = [level.url];
|
||||
level.urlId = 0;
|
||||
levels0.push(level);
|
||||
} else {
|
||||
levels0[redundantLevelId].url.push(level.url);
|
||||
}
|
||||
});
|
||||
|
||||
// remove audio-only level if we also have levels with audio+video codecs signalled
|
||||
if(videoCodecFound && audioCodecFound) {
|
||||
levels0.forEach(level => {
|
||||
if(level.videoCodec) {
|
||||
levels.push(level);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
levels = levels0;
|
||||
}
|
||||
|
||||
// only keep level with supported audio/video codecs
|
||||
levels = levels.filter(function(level) {
|
||||
var checkSupportedAudio = function(codec) { return MediaSource.isTypeSupported(`audio/mp4;codecs=${codec}`);};
|
||||
var checkSupportedVideo = function(codec) { return MediaSource.isTypeSupported(`video/mp4;codecs=${codec}`);};
|
||||
var audioCodec = level.audioCodec, videoCodec = level.videoCodec;
|
||||
|
||||
return (!audioCodec || checkSupportedAudio(audioCodec)) &&
|
||||
(!videoCodec || checkSupportedVideo(videoCodec));
|
||||
});
|
||||
|
||||
if(levels.length) {
|
||||
// start bitrate is the first bitrate of the manifest
|
||||
bitrateStart = levels[0].bitrate;
|
||||
// sort level on bitrate
|
||||
levels.sort(function (a, b) {
|
||||
return a.bitrate - b.bitrate;
|
||||
});
|
||||
this._levels = levels;
|
||||
// find index of first level in sorted levels
|
||||
for (i = 0; i < levels.length; i++) {
|
||||
if (levels[i].bitrate === bitrateStart) {
|
||||
this._firstLevel = i;
|
||||
logger.log(`manifest loaded,${levels.length} level(s) found, first bitrate:${bitrateStart}`);
|
||||
break;
|
||||
}
|
||||
}
|
||||
hls.trigger(Event.MANIFEST_PARSED, {levels: this._levels, firstLevel: this._firstLevel, stats: data.stats});
|
||||
} else {
|
||||
hls.trigger(Event.ERROR, {type: ErrorTypes.MEDIA_ERROR, details: ErrorDetails.MANIFEST_INCOMPATIBLE_CODECS_ERROR, fatal: true, url: hls.url, reason: 'no level with compatible codecs found in manifest'});
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
get levels() {
|
||||
return this._levels;
|
||||
}
|
||||
|
||||
get level() {
|
||||
return this._level;
|
||||
}
|
||||
|
||||
set level(newLevel) {
|
||||
let levels = this._levels;
|
||||
if (levels && levels.length > newLevel) {
|
||||
if (this._level !== newLevel || levels[newLevel].details === undefined) {
|
||||
this.setLevelInternal(newLevel);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
setLevelInternal(newLevel) {
|
||||
let levels = this._levels;
|
||||
// check if level idx is valid
|
||||
if (newLevel >= 0 && newLevel < levels.length) {
|
||||
// stopping live reloading timer if any
|
||||
if (this.timer) {
|
||||
clearTimeout(this.timer);
|
||||
this.timer = null;
|
||||
}
|
||||
this._level = newLevel;
|
||||
logger.log(`switching to level ${newLevel}`);
|
||||
this.hls.trigger(Event.LEVEL_SWITCH, {level: newLevel});
|
||||
var level = levels[newLevel];
|
||||
// check if we need to load playlist for this level
|
||||
if (level.details === undefined || level.details.live === true) {
|
||||
// level not retrieved yet, or live playlist we need to (re)load it
|
||||
logger.log(`(re)loading playlist for level ${newLevel}`);
|
||||
var urlId = level.urlId;
|
||||
this.hls.trigger(Event.LEVEL_LOADING, {url: level.url[urlId], level: newLevel, id: urlId});
|
||||
}
|
||||
} else {
|
||||
// invalid level id given, trigger error
|
||||
this.hls.trigger(Event.ERROR, {type : ErrorTypes.OTHER_ERROR, details: ErrorDetails.LEVEL_SWITCH_ERROR, level: newLevel, fatal: false, reason: 'invalid level idx'});
|
||||
}
|
||||
}
|
||||
|
||||
get manualLevel() {
|
||||
return this._manualLevel;
|
||||
}
|
||||
|
||||
set manualLevel(newLevel) {
|
||||
this._manualLevel = newLevel;
|
||||
if (this._startLevel === undefined) {
|
||||
this._startLevel = newLevel;
|
||||
}
|
||||
if (newLevel !== -1) {
|
||||
this.level = newLevel;
|
||||
}
|
||||
}
|
||||
|
||||
get firstLevel() {
|
||||
return this._firstLevel;
|
||||
}
|
||||
|
||||
set firstLevel(newLevel) {
|
||||
this._firstLevel = newLevel;
|
||||
}
|
||||
|
||||
get startLevel() {
|
||||
if (this._startLevel === undefined) {
|
||||
return this._firstLevel;
|
||||
} else {
|
||||
return this._startLevel;
|
||||
}
|
||||
}
|
||||
|
||||
set startLevel(newLevel) {
|
||||
this._startLevel = newLevel;
|
||||
}
|
||||
|
||||
onError(data) {
|
||||
if(data.fatal) {
|
||||
return;
|
||||
}
|
||||
|
||||
let details = data.details, hls = this.hls, levelId, level, levelError = false;
|
||||
// try to recover not fatal errors
|
||||
switch(details) {
|
||||
case ErrorDetails.FRAG_LOAD_ERROR:
|
||||
case ErrorDetails.FRAG_LOAD_TIMEOUT:
|
||||
case ErrorDetails.FRAG_LOOP_LOADING_ERROR:
|
||||
case ErrorDetails.KEY_LOAD_ERROR:
|
||||
case ErrorDetails.KEY_LOAD_TIMEOUT:
|
||||
levelId = data.frag.level;
|
||||
break;
|
||||
case ErrorDetails.LEVEL_LOAD_ERROR:
|
||||
case ErrorDetails.LEVEL_LOAD_TIMEOUT:
|
||||
levelId = data.level;
|
||||
levelError = true;
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
/* try to switch to a redundant stream if any available.
|
||||
* if no redundant stream available, emergency switch down (if in auto mode and current level not 0)
|
||||
* otherwise, we cannot recover this network error ...
|
||||
* don't raise FRAG_LOAD_ERROR and FRAG_LOAD_TIMEOUT as fatal, as it is handled by mediaController
|
||||
*/
|
||||
if (levelId !== undefined) {
|
||||
level = this._levels[levelId];
|
||||
if (level.urlId < (level.url.length - 1)) {
|
||||
level.urlId++;
|
||||
level.details = undefined;
|
||||
logger.warn(`level controller,${details} for level ${levelId}: switching to redundant stream id ${level.urlId}`);
|
||||
} else {
|
||||
// we could try to recover if in auto mode and current level not lowest level (0)
|
||||
let recoverable = ((this._manualLevel === -1) && levelId);
|
||||
if (recoverable) {
|
||||
logger.warn(`level controller,${details}: emergency switch-down for next fragment`);
|
||||
hls.abrController.nextAutoLevel = 0;
|
||||
} else if(level && level.details && level.details.live) {
|
||||
logger.warn(`level controller,${details} on live stream, discard`);
|
||||
if (levelError) {
|
||||
// reset this._level so that another call to set level() will retrigger a frag load
|
||||
this._level = undefined;
|
||||
}
|
||||
// FRAG_LOAD_ERROR and FRAG_LOAD_TIMEOUT are handled by mediaController
|
||||
} else if (details !== ErrorDetails.FRAG_LOAD_ERROR && details !== ErrorDetails.FRAG_LOAD_TIMEOUT) {
|
||||
logger.error(`cannot recover ${details} error`);
|
||||
this._level = undefined;
|
||||
// stopping live reloading timer if any
|
||||
if (this.timer) {
|
||||
clearTimeout(this.timer);
|
||||
this.timer = null;
|
||||
}
|
||||
// redispatch same error but with fatal set to true
|
||||
data.fatal = true;
|
||||
hls.trigger(Event.ERROR, data);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
onLevelLoaded(data) {
|
||||
// only process level loaded events matching with expected level
|
||||
if (data.level === this._level) {
|
||||
let newDetails = data.details;
|
||||
// if current playlist is a live playlist, arm a timer to reload it
|
||||
if (newDetails.live) {
|
||||
let reloadInterval = 1000*newDetails.targetduration,
|
||||
curLevel = this._levels[data.level],
|
||||
curDetails = curLevel.details;
|
||||
if (curDetails && newDetails.endSN === curDetails.endSN) {
|
||||
// follow HLS Spec, If the client reloads a Playlist file and finds that it has not
|
||||
// changed then it MUST wait for a period of one-half the target
|
||||
// duration before retrying.
|
||||
reloadInterval /=2;
|
||||
logger.log(`same live playlist, reload twice faster`);
|
||||
}
|
||||
// decrement reloadInterval with level loading delay
|
||||
reloadInterval -= performance.now() - data.stats.trequest;
|
||||
// in any case, don't reload more than every second
|
||||
reloadInterval = Math.max(1000,Math.round(reloadInterval));
|
||||
logger.log(`live playlist, reload in ${reloadInterval} ms`);
|
||||
this.timer = setTimeout(this.ontick,reloadInterval);
|
||||
} else {
|
||||
this.timer = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
tick() {
|
||||
var levelId = this._level;
|
||||
if (levelId !== undefined && this.canload) {
|
||||
var level = this._levels[levelId], urlId = level.urlId;
|
||||
this.hls.trigger(Event.LEVEL_LOADING, {url: level.url[urlId], level: levelId, id: urlId});
|
||||
}
|
||||
}
|
||||
|
||||
get nextLoadLevel() {
|
||||
if (this._manualLevel !== -1) {
|
||||
return this._manualLevel;
|
||||
} else {
|
||||
return this.hls.abrController.nextAutoLevel;
|
||||
}
|
||||
}
|
||||
|
||||
set nextLoadLevel(nextLevel) {
|
||||
this.level = nextLevel;
|
||||
if (this._manualLevel === -1) {
|
||||
this.hls.abrController.nextAutoLevel = nextLevel;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default LevelController;
|
||||
|
File diff suppressed because it is too large
Load diff
|
@ -1,69 +0,0 @@
|
|||
/*
|
||||
* Timeline Controller
|
||||
*/
|
||||
|
||||
import Event from '../events';
|
||||
import EventHandler from '../event-handler';
|
||||
import CEA708Interpreter from '../utils/cea-708-interpreter';
|
||||
|
||||
class TimelineController extends EventHandler {
|
||||
|
||||
constructor(hls) {
|
||||
super(hls, Event.MEDIA_ATTACHING,
|
||||
Event.MEDIA_DETACHING,
|
||||
Event.FRAG_PARSING_USERDATA,
|
||||
Event.MANIFEST_LOADING,
|
||||
Event.FRAG_LOADED);
|
||||
|
||||
this.hls = hls;
|
||||
this.config = hls.config;
|
||||
|
||||
if (this.config.enableCEA708Captions)
|
||||
{
|
||||
this.cea708Interpreter = new CEA708Interpreter();
|
||||
}
|
||||
}
|
||||
|
||||
destroy() {
|
||||
EventHandler.prototype.destroy.call(this);
|
||||
}
|
||||
|
||||
onMediaAttaching(data) {
|
||||
var media = this.media = data.media;
|
||||
this.cea708Interpreter.attach(media);
|
||||
}
|
||||
|
||||
onMediaDetaching() {
|
||||
this.cea708Interpreter.detach();
|
||||
}
|
||||
|
||||
onManifestLoading()
|
||||
{
|
||||
this.lastPts = Number.POSITIVE_INFINITY;
|
||||
}
|
||||
|
||||
onFragLoaded(data)
|
||||
{
|
||||
var pts = data.frag.start; //Number.POSITIVE_INFINITY;
|
||||
|
||||
// if this is a frag for a previously loaded timerange, remove all captions
|
||||
// TODO: consider just removing captions for the timerange
|
||||
if (pts <= this.lastPts)
|
||||
{
|
||||
this.cea708Interpreter.clear();
|
||||
}
|
||||
|
||||
this.lastPts = pts;
|
||||
}
|
||||
|
||||
onFragParsingUserdata(data) {
|
||||
// push all of the CEA-708 messages into the interpreter
|
||||
// immediately. It will create the proper timestamps based on our PTS value
|
||||
for (var i=0; i<data.samples.length; i++)
|
||||
{
|
||||
this.cea708Interpreter.push(data.samples[i].pts, data.samples[i].bytes);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default TimelineController;
|
|
@ -1,205 +0,0 @@
|
|||
/*
|
||||
*
|
||||
* This file contains an adaptation of the AES decryption algorithm
|
||||
* from the Standford Javascript Cryptography Library. That work is
|
||||
* covered by the following copyright and permissions notice:
|
||||
*
|
||||
* Copyright 2009-2010 Emily Stark, Mike Hamburg, Dan Boneh.
|
||||
* All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are
|
||||
* met:
|
||||
*
|
||||
* 1. Redistributions of source code must retain the above copyright
|
||||
* notice, this list of conditions and the following disclaimer.
|
||||
*
|
||||
* 2. Redistributions in binary form must reproduce the above
|
||||
* copyright notice, this list of conditions and the following
|
||||
* disclaimer in the documentation and/or other materials provided
|
||||
* with the distribution.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
|
||||
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
* DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> OR CONTRIBUTORS BE
|
||||
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
|
||||
* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
|
||||
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
|
||||
* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
|
||||
* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*
|
||||
* The views and conclusions contained in the software and documentation
|
||||
* are those of the authors and should not be interpreted as representing
|
||||
* official policies, either expressed or implied, of the authors.
|
||||
*/
|
||||
class AES {
|
||||
|
||||
/**
|
||||
* Schedule out an AES key for both encryption and decryption. This
|
||||
* is a low-level class. Use a cipher mode to do bulk encryption.
|
||||
*
|
||||
* @constructor
|
||||
* @param key {Array} The key as an array of 4, 6 or 8 words.
|
||||
*/
|
||||
constructor(key) {
|
||||
/**
|
||||
* The expanded S-box and inverse S-box tables. These will be computed
|
||||
* on the client so that we don't have to send them down the wire.
|
||||
*
|
||||
* There are two tables, _tables[0] is for encryption and
|
||||
* _tables[1] is for decryption.
|
||||
*
|
||||
* The first 4 sub-tables are the expanded S-box with MixColumns. The
|
||||
* last (_tables[01][4]) is the S-box itself.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
this._tables = [[[],[],[],[],[]],[[],[],[],[],[]]];
|
||||
|
||||
this._precompute();
|
||||
|
||||
var i, j, tmp,
|
||||
encKey, decKey,
|
||||
sbox = this._tables[0][4], decTable = this._tables[1],
|
||||
keyLen = key.length, rcon = 1;
|
||||
|
||||
if (keyLen !== 4 && keyLen !== 6 && keyLen !== 8) {
|
||||
throw new Error('Invalid aes key size=' + keyLen);
|
||||
}
|
||||
|
||||
encKey = key.slice(0);
|
||||
decKey = [];
|
||||
this._key = [encKey, decKey];
|
||||
|
||||
// schedule encryption keys
|
||||
for (i = keyLen; i < 4 * keyLen + 28; i++) {
|
||||
tmp = encKey[i-1];
|
||||
|
||||
// apply sbox
|
||||
if (i%keyLen === 0 || (keyLen === 8 && i%keyLen === 4)) {
|
||||
tmp = sbox[tmp>>>24]<<24 ^ sbox[tmp>>16&255]<<16 ^ sbox[tmp>>8&255]<<8 ^ sbox[tmp&255];
|
||||
|
||||
// shift rows and add rcon
|
||||
if (i%keyLen === 0) {
|
||||
tmp = tmp<<8 ^ tmp>>>24 ^ rcon<<24;
|
||||
rcon = rcon<<1 ^ (rcon>>7)*283;
|
||||
}
|
||||
}
|
||||
|
||||
encKey[i] = encKey[i-keyLen] ^ tmp;
|
||||
}
|
||||
|
||||
// schedule decryption keys
|
||||
for (j = 0; i; j++, i--) {
|
||||
tmp = encKey[j&3 ? i : i - 4];
|
||||
if (i<=4 || j<4) {
|
||||
decKey[j] = tmp;
|
||||
} else {
|
||||
decKey[j] = decTable[0][sbox[tmp>>>24 ]] ^
|
||||
decTable[1][sbox[tmp>>16 & 255]] ^
|
||||
decTable[2][sbox[tmp>>8 & 255]] ^
|
||||
decTable[3][sbox[tmp & 255]];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Expand the S-box tables.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
_precompute() {
|
||||
var encTable = this._tables[0], decTable = this._tables[1],
|
||||
sbox = encTable[4], sboxInv = decTable[4],
|
||||
i, x, xInv, d=[], th=[], x2, x4, x8, s, tEnc, tDec;
|
||||
|
||||
// Compute double and third tables
|
||||
for (i = 0; i < 256; i++) {
|
||||
th[( d[i] = i<<1 ^ (i>>7)*283 )^i]=i;
|
||||
}
|
||||
|
||||
for (x = xInv = 0; !sbox[x]; x ^= x2 || 1, xInv = th[xInv] || 1) {
|
||||
// Compute sbox
|
||||
s = xInv ^ xInv<<1 ^ xInv<<2 ^ xInv<<3 ^ xInv<<4;
|
||||
s = s>>8 ^ s&255 ^ 99;
|
||||
sbox[x] = s;
|
||||
sboxInv[s] = x;
|
||||
|
||||
// Compute MixColumns
|
||||
x8 = d[x4 = d[x2 = d[x]]];
|
||||
tDec = x8*0x1010101 ^ x4*0x10001 ^ x2*0x101 ^ x*0x1010100;
|
||||
tEnc = d[s]*0x101 ^ s*0x1010100;
|
||||
|
||||
for (i = 0; i < 4; i++) {
|
||||
encTable[i][x] = tEnc = tEnc<<24 ^ tEnc>>>8;
|
||||
decTable[i][s] = tDec = tDec<<24 ^ tDec>>>8;
|
||||
}
|
||||
}
|
||||
|
||||
// Compactify. Considerable speedup on Firefox.
|
||||
for (i = 0; i < 5; i++) {
|
||||
encTable[i] = encTable[i].slice(0);
|
||||
decTable[i] = decTable[i].slice(0);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Decrypt 16 bytes, specified as four 32-bit words.
|
||||
* @param encrypted0 {number} the first word to decrypt
|
||||
* @param encrypted1 {number} the second word to decrypt
|
||||
* @param encrypted2 {number} the third word to decrypt
|
||||
* @param encrypted3 {number} the fourth word to decrypt
|
||||
* @param out {Int32Array} the array to write the decrypted words
|
||||
* into
|
||||
* @param offset {number} the offset into the output array to start
|
||||
* writing results
|
||||
* @return {Array} The plaintext.
|
||||
*/
|
||||
decrypt(encrypted0, encrypted1, encrypted2, encrypted3, out, offset) {
|
||||
var key = this._key[1],
|
||||
// state variables a,b,c,d are loaded with pre-whitened data
|
||||
a = encrypted0 ^ key[0],
|
||||
b = encrypted3 ^ key[1],
|
||||
c = encrypted2 ^ key[2],
|
||||
d = encrypted1 ^ key[3],
|
||||
a2, b2, c2,
|
||||
|
||||
nInnerRounds = key.length / 4 - 2, // key.length === 2 ?
|
||||
i,
|
||||
kIndex = 4,
|
||||
table = this._tables[1],
|
||||
|
||||
// load up the tables
|
||||
table0 = table[0],
|
||||
table1 = table[1],
|
||||
table2 = table[2],
|
||||
table3 = table[3],
|
||||
sbox = table[4];
|
||||
|
||||
// Inner rounds. Cribbed from OpenSSL.
|
||||
for (i = 0; i < nInnerRounds; i++) {
|
||||
a2 = table0[a>>>24] ^ table1[b>>16 & 255] ^ table2[c>>8 & 255] ^ table3[d & 255] ^ key[kIndex];
|
||||
b2 = table0[b>>>24] ^ table1[c>>16 & 255] ^ table2[d>>8 & 255] ^ table3[a & 255] ^ key[kIndex + 1];
|
||||
c2 = table0[c>>>24] ^ table1[d>>16 & 255] ^ table2[a>>8 & 255] ^ table3[b & 255] ^ key[kIndex + 2];
|
||||
d = table0[d>>>24] ^ table1[a>>16 & 255] ^ table2[b>>8 & 255] ^ table3[c & 255] ^ key[kIndex + 3];
|
||||
kIndex += 4;
|
||||
a=a2; b=b2; c=c2;
|
||||
}
|
||||
|
||||
// Last round.
|
||||
for (i = 0; i < 4; i++) {
|
||||
out[(3 & -i) + offset] =
|
||||
sbox[a>>>24 ]<<24 ^
|
||||
sbox[b>>16 & 255]<<16 ^
|
||||
sbox[c>>8 & 255]<<8 ^
|
||||
sbox[d & 255] ^
|
||||
key[kIndex++];
|
||||
a2=a; a=b; b=c; c=d; d=a2;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default AES;
|
|
@ -1,167 +0,0 @@
|
|||
/*
|
||||
*
|
||||
* This file contains an adaptation of the AES decryption algorithm
|
||||
* from the Standford Javascript Cryptography Library. That work is
|
||||
* covered by the following copyright and permissions notice:
|
||||
*
|
||||
* Copyright 2009-2010 Emily Stark, Mike Hamburg, Dan Boneh.
|
||||
* All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are
|
||||
* met:
|
||||
*
|
||||
* 1. Redistributions of source code must retain the above copyright
|
||||
* notice, this list of conditions and the following disclaimer.
|
||||
*
|
||||
* 2. Redistributions in binary form must reproduce the above
|
||||
* copyright notice, this list of conditions and the following
|
||||
* disclaimer in the documentation and/or other materials provided
|
||||
* with the distribution.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
|
||||
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
* DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> OR CONTRIBUTORS BE
|
||||
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
|
||||
* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
|
||||
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
|
||||
* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
|
||||
* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*
|
||||
* The views and conclusions contained in the software and documentation
|
||||
* are those of the authors and should not be interpreted as representing
|
||||
* official policies, either expressed or implied, of the authors.
|
||||
*/
|
||||
|
||||
import AES from './aes';
|
||||
|
||||
class AES128Decrypter {
|
||||
|
||||
constructor(key, initVector) {
|
||||
this.key = key;
|
||||
this.iv = initVector;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert network-order (big-endian) bytes into their little-endian
|
||||
* representation.
|
||||
*/
|
||||
ntoh(word) {
|
||||
return (word << 24) |
|
||||
((word & 0xff00) << 8) |
|
||||
((word & 0xff0000) >> 8) |
|
||||
(word >>> 24);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Decrypt bytes using AES-128 with CBC and PKCS#7 padding.
|
||||
* @param encrypted {Uint8Array} the encrypted bytes
|
||||
* @param key {Uint32Array} the bytes of the decryption key
|
||||
* @param initVector {Uint32Array} the initialization vector (IV) to
|
||||
* use for the first round of CBC.
|
||||
* @return {Uint8Array} the decrypted bytes
|
||||
*
|
||||
* @see http://en.wikipedia.org/wiki/Advanced_Encryption_Standard
|
||||
* @see http://en.wikipedia.org/wiki/Block_cipher_mode_of_operation#Cipher_Block_Chaining_.28CBC.29
|
||||
* @see https://tools.ietf.org/html/rfc2315
|
||||
*/
|
||||
doDecrypt(encrypted, key, initVector) {
|
||||
var
|
||||
// word-level access to the encrypted bytes
|
||||
encrypted32 = new Int32Array(encrypted.buffer, encrypted.byteOffset, encrypted.byteLength >> 2),
|
||||
|
||||
decipher = new AES(Array.prototype.slice.call(key)),
|
||||
|
||||
// byte and word-level access for the decrypted output
|
||||
decrypted = new Uint8Array(encrypted.byteLength),
|
||||
decrypted32 = new Int32Array(decrypted.buffer),
|
||||
|
||||
// temporary variables for working with the IV, encrypted, and
|
||||
// decrypted data
|
||||
init0, init1, init2, init3,
|
||||
encrypted0, encrypted1, encrypted2, encrypted3,
|
||||
|
||||
// iteration variable
|
||||
wordIx;
|
||||
|
||||
// pull out the words of the IV to ensure we don't modify the
|
||||
// passed-in reference and easier access
|
||||
init0 = ~~initVector[0];
|
||||
init1 = ~~initVector[1];
|
||||
init2 = ~~initVector[2];
|
||||
init3 = ~~initVector[3];
|
||||
|
||||
// decrypt four word sequences, applying cipher-block chaining (CBC)
|
||||
// to each decrypted block
|
||||
for (wordIx = 0; wordIx < encrypted32.length; wordIx += 4) {
|
||||
// convert big-endian (network order) words into little-endian
|
||||
// (javascript order)
|
||||
encrypted0 = ~~this.ntoh(encrypted32[wordIx]);
|
||||
encrypted1 = ~~this.ntoh(encrypted32[wordIx + 1]);
|
||||
encrypted2 = ~~this.ntoh(encrypted32[wordIx + 2]);
|
||||
encrypted3 = ~~this.ntoh(encrypted32[wordIx + 3]);
|
||||
|
||||
// decrypt the block
|
||||
decipher.decrypt(encrypted0,
|
||||
encrypted1,
|
||||
encrypted2,
|
||||
encrypted3,
|
||||
decrypted32,
|
||||
wordIx);
|
||||
|
||||
// XOR with the IV, and restore network byte-order to obtain the
|
||||
// plaintext
|
||||
decrypted32[wordIx] = this.ntoh(decrypted32[wordIx] ^ init0);
|
||||
decrypted32[wordIx + 1] = this.ntoh(decrypted32[wordIx + 1] ^ init1);
|
||||
decrypted32[wordIx + 2] = this.ntoh(decrypted32[wordIx + 2] ^ init2);
|
||||
decrypted32[wordIx + 3] = this.ntoh(decrypted32[wordIx + 3] ^ init3);
|
||||
|
||||
// setup the IV for the next round
|
||||
init0 = encrypted0;
|
||||
init1 = encrypted1;
|
||||
init2 = encrypted2;
|
||||
init3 = encrypted3;
|
||||
}
|
||||
|
||||
return decrypted;
|
||||
}
|
||||
|
||||
localDecrypt(encrypted, key, initVector, decrypted) {
|
||||
var bytes = this.doDecrypt(encrypted,
|
||||
key,
|
||||
initVector);
|
||||
decrypted.set(bytes, encrypted.byteOffset);
|
||||
}
|
||||
|
||||
decrypt(encrypted) {
|
||||
var
|
||||
step = 4 * 8000,
|
||||
//encrypted32 = new Int32Array(encrypted.buffer),
|
||||
encrypted32 = new Int32Array(encrypted),
|
||||
decrypted = new Uint8Array(encrypted.byteLength),
|
||||
i = 0;
|
||||
|
||||
// split up the encryption job and do the individual chunks asynchronously
|
||||
var key = this.key;
|
||||
var initVector = this.iv;
|
||||
this.localDecrypt(encrypted32.subarray(i, i + step), key, initVector, decrypted);
|
||||
|
||||
for (i = step; i < encrypted32.length; i += step) {
|
||||
initVector = new Uint32Array([
|
||||
this.ntoh(encrypted32[i - 4]),
|
||||
this.ntoh(encrypted32[i - 3]),
|
||||
this.ntoh(encrypted32[i - 2]),
|
||||
this.ntoh(encrypted32[i - 1])
|
||||
]);
|
||||
this.localDecrypt(encrypted32.subarray(i, i + step), key, initVector, decrypted);
|
||||
}
|
||||
|
||||
return decrypted;
|
||||
}
|
||||
}
|
||||
|
||||
export default AES128Decrypter;
|
|
@ -1,86 +0,0 @@
|
|||
/*
|
||||
* AES128 decryption.
|
||||
*/
|
||||
|
||||
import AES128Decrypter from './aes128-decrypter';
|
||||
import {ErrorTypes, ErrorDetails} from '../errors';
|
||||
import {logger} from '../utils/logger';
|
||||
|
||||
class Decrypter {
|
||||
|
||||
constructor(hls) {
|
||||
this.hls = hls;
|
||||
try {
|
||||
const browserCrypto = window ? window.crypto : crypto;
|
||||
this.subtle = browserCrypto.subtle || browserCrypto.webkitSubtle;
|
||||
this.disableWebCrypto = !this.subtle;
|
||||
} catch (e) {
|
||||
this.disableWebCrypto = true;
|
||||
}
|
||||
}
|
||||
|
||||
destroy() {
|
||||
}
|
||||
|
||||
decrypt(data, key, iv, callback) {
|
||||
if (this.disableWebCrypto && this.hls.config.enableSoftwareAES) {
|
||||
this.decryptBySoftware(data, key, iv, callback);
|
||||
} else {
|
||||
this.decryptByWebCrypto(data, key, iv, callback);
|
||||
}
|
||||
}
|
||||
|
||||
decryptByWebCrypto(data, key, iv, callback) {
|
||||
logger.log('decrypting by WebCrypto API');
|
||||
|
||||
this.subtle.importKey('raw', key, { name : 'AES-CBC', length : 128 }, false, ['decrypt']).
|
||||
then((importedKey) => {
|
||||
this.subtle.decrypt({ name : 'AES-CBC', iv : iv.buffer }, importedKey, data).
|
||||
then(callback).
|
||||
catch ((err) => {
|
||||
this.onWebCryptoError(err, data, key, iv, callback);
|
||||
});
|
||||
}).
|
||||
catch ((err) => {
|
||||
this.onWebCryptoError(err, data, key, iv, callback);
|
||||
});
|
||||
}
|
||||
|
||||
decryptBySoftware(data, key8, iv8, callback) {
|
||||
logger.log('decrypting by JavaScript Implementation');
|
||||
|
||||
var view = new DataView(key8.buffer);
|
||||
var key = new Uint32Array([
|
||||
view.getUint32(0),
|
||||
view.getUint32(4),
|
||||
view.getUint32(8),
|
||||
view.getUint32(12)
|
||||
]);
|
||||
|
||||
view = new DataView(iv8.buffer);
|
||||
var iv = new Uint32Array([
|
||||
view.getUint32(0),
|
||||
view.getUint32(4),
|
||||
view.getUint32(8),
|
||||
view.getUint32(12)
|
||||
]);
|
||||
|
||||
var decrypter = new AES128Decrypter(key, iv);
|
||||
callback(decrypter.decrypt(data).buffer);
|
||||
}
|
||||
|
||||
onWebCryptoError(err, data, key, iv, callback) {
|
||||
if (this.hls.config.enableSoftwareAES) {
|
||||
logger.log('disabling to use WebCrypto API');
|
||||
this.disableWebCrypto = true;
|
||||
this.decryptBySoftware(data, key, iv, callback);
|
||||
}
|
||||
else {
|
||||
logger.error(`decrypting error : ${err.message}`);
|
||||
this.hls.trigger(Event.ERROR, {type : ErrorTypes.MEDIA_ERROR, details : ErrorDetails.FRAG_DECRYPT_ERROR, fatal : true, reason : err.message});
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export default Decrypter;
|
|
@ -1,93 +0,0 @@
|
|||
/**
|
||||
* AAC demuxer
|
||||
*/
|
||||
import ADTS from './adts';
|
||||
import {logger} from '../utils/logger';
|
||||
import ID3 from '../demux/id3';
|
||||
|
||||
class AACDemuxer {
|
||||
|
||||
constructor(observer,remuxerClass) {
|
||||
this.observer = observer;
|
||||
this.remuxerClass = remuxerClass;
|
||||
this.remuxer = new this.remuxerClass(observer);
|
||||
this._aacTrack = {container : 'audio/adts', type: 'audio', id :-1, sequenceNumber: 0, samples : [], len : 0};
|
||||
}
|
||||
|
||||
static probe(data) {
|
||||
// check if data contains ID3 timestamp and ADTS sync worc
|
||||
var id3 = new ID3(data), offset,len;
|
||||
if(id3.hasTimeStamp) {
|
||||
// look for ADTS header (0xFFFx)
|
||||
for (offset = id3.length, len = data.length; offset < len - 1; offset++) {
|
||||
if ((data[offset] === 0xff) && (data[offset+1] & 0xf0) === 0xf0) {
|
||||
//logger.log('ADTS sync word found !');
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
// feed incoming data to the front of the parsing pipeline
|
||||
push(data, audioCodec, videoCodec, timeOffset, cc, level, sn, duration) {
|
||||
var track = this._aacTrack,
|
||||
id3 = new ID3(data),
|
||||
pts = 90*id3.timeStamp,
|
||||
config, frameLength, frameDuration, frameIndex, offset, headerLength, stamp, len, aacSample;
|
||||
// look for ADTS header (0xFFFx)
|
||||
for (offset = id3.length, len = data.length; offset < len - 1; offset++) {
|
||||
if ((data[offset] === 0xff) && (data[offset+1] & 0xf0) === 0xf0) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!track.audiosamplerate) {
|
||||
config = ADTS.getAudioConfig(this.observer,data, offset, audioCodec);
|
||||
track.config = config.config;
|
||||
track.audiosamplerate = config.samplerate;
|
||||
track.channelCount = config.channelCount;
|
||||
track.codec = config.codec;
|
||||
track.duration = duration;
|
||||
logger.log(`parsed codec:${track.codec},rate:${config.samplerate},nb channel:${config.channelCount}`);
|
||||
}
|
||||
frameIndex = 0;
|
||||
frameDuration = 1024 * 90000 / track.audiosamplerate;
|
||||
while ((offset + 5) < len) {
|
||||
// The protection skip bit tells us if we have 2 bytes of CRC data at the end of the ADTS header
|
||||
headerLength = (!!(data[offset + 1] & 0x01) ? 7 : 9);
|
||||
// retrieve frame size
|
||||
frameLength = ((data[offset + 3] & 0x03) << 11) |
|
||||
(data[offset + 4] << 3) |
|
||||
((data[offset + 5] & 0xE0) >>> 5);
|
||||
frameLength -= headerLength;
|
||||
//stamp = pes.pts;
|
||||
|
||||
if ((frameLength > 0) && ((offset + headerLength + frameLength) <= len)) {
|
||||
stamp = pts + frameIndex * frameDuration;
|
||||
//logger.log(`AAC frame, offset/length/total/pts:${offset+headerLength}/${frameLength}/${data.byteLength}/${(stamp/90).toFixed(0)}`);
|
||||
aacSample = {unit: data.subarray(offset + headerLength, offset + headerLength + frameLength), pts: stamp, dts: stamp};
|
||||
track.samples.push(aacSample);
|
||||
track.len += frameLength;
|
||||
offset += frameLength + headerLength;
|
||||
frameIndex++;
|
||||
// look for ADTS header (0xFFFx)
|
||||
for ( ; offset < (len - 1); offset++) {
|
||||
if ((data[offset] === 0xff) && ((data[offset + 1] & 0xf0) === 0xf0)) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
this.remuxer.remux(this._aacTrack,{samples : []}, {samples : [ { pts: pts, dts : pts, unit : id3.payload} ]}, { samples: [] }, timeOffset);
|
||||
}
|
||||
|
||||
destroy() {
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export default AACDemuxer;
|
|
@ -1,132 +0,0 @@
|
|||
/**
|
||||
* ADTS parser helper
|
||||
*/
|
||||
import {logger} from '../utils/logger';
|
||||
import {ErrorTypes, ErrorDetails} from '../errors';
|
||||
|
||||
class ADTS {
|
||||
|
||||
static getAudioConfig(observer, data, offset, audioCodec) {
|
||||
var adtsObjectType, // :int
|
||||
adtsSampleingIndex, // :int
|
||||
adtsExtensionSampleingIndex, // :int
|
||||
adtsChanelConfig, // :int
|
||||
config,
|
||||
userAgent = navigator.userAgent.toLowerCase(),
|
||||
adtsSampleingRates = [
|
||||
96000, 88200,
|
||||
64000, 48000,
|
||||
44100, 32000,
|
||||
24000, 22050,
|
||||
16000, 12000,
|
||||
11025, 8000,
|
||||
7350];
|
||||
// byte 2
|
||||
adtsObjectType = ((data[offset + 2] & 0xC0) >>> 6) + 1;
|
||||
adtsSampleingIndex = ((data[offset + 2] & 0x3C) >>> 2);
|
||||
if(adtsSampleingIndex > adtsSampleingRates.length-1) {
|
||||
observer.trigger(Event.ERROR, {type: ErrorTypes.MEDIA_ERROR, details: ErrorDetails.FRAG_PARSING_ERROR, fatal: true, reason: `invalid ADTS sampling index:${adtsSampleingIndex}`});
|
||||
return;
|
||||
}
|
||||
adtsChanelConfig = ((data[offset + 2] & 0x01) << 2);
|
||||
// byte 3
|
||||
adtsChanelConfig |= ((data[offset + 3] & 0xC0) >>> 6);
|
||||
logger.log(`manifest codec:${audioCodec},ADTS data:type:${adtsObjectType},sampleingIndex:${adtsSampleingIndex}[${adtsSampleingRates[adtsSampleingIndex]}Hz],channelConfig:${adtsChanelConfig}`);
|
||||
// firefox: freq less than 24kHz = AAC SBR (HE-AAC)
|
||||
if (userAgent.indexOf('firefox') !== -1) {
|
||||
if (adtsSampleingIndex >= 6) {
|
||||
adtsObjectType = 5;
|
||||
config = new Array(4);
|
||||
// HE-AAC uses SBR (Spectral Band Replication) , high frequencies are constructed from low frequencies
|
||||
// there is a factor 2 between frame sample rate and output sample rate
|
||||
// multiply frequency by 2 (see table below, equivalent to substract 3)
|
||||
adtsExtensionSampleingIndex = adtsSampleingIndex - 3;
|
||||
} else {
|
||||
adtsObjectType = 2;
|
||||
config = new Array(2);
|
||||
adtsExtensionSampleingIndex = adtsSampleingIndex;
|
||||
}
|
||||
// Android : always use AAC
|
||||
} else if (userAgent.indexOf('android') !== -1) {
|
||||
adtsObjectType = 2;
|
||||
config = new Array(2);
|
||||
adtsExtensionSampleingIndex = adtsSampleingIndex;
|
||||
} else {
|
||||
/* for other browsers (chrome ...)
|
||||
always force audio type to be HE-AAC SBR, as some browsers do not support audio codec switch properly (like Chrome ...)
|
||||
*/
|
||||
adtsObjectType = 5;
|
||||
config = new Array(4);
|
||||
// if (manifest codec is HE-AAC or HE-AACv2) OR (manifest codec not specified AND frequency less than 24kHz)
|
||||
if ((audioCodec && ((audioCodec.indexOf('mp4a.40.29') !== -1) ||
|
||||
(audioCodec.indexOf('mp4a.40.5') !== -1))) ||
|
||||
(!audioCodec && adtsSampleingIndex >= 6)) {
|
||||
// HE-AAC uses SBR (Spectral Band Replication) , high frequencies are constructed from low frequencies
|
||||
// there is a factor 2 between frame sample rate and output sample rate
|
||||
// multiply frequency by 2 (see table below, equivalent to substract 3)
|
||||
adtsExtensionSampleingIndex = adtsSampleingIndex - 3;
|
||||
} else {
|
||||
// if (manifest codec is AAC) AND (frequency less than 24kHz AND nb channel is 1) OR (manifest codec not specified and mono audio)
|
||||
// Chrome fails to play back with low frequency AAC LC mono when initialized with HE-AAC. This is not a problem with stereo.
|
||||
if (audioCodec && audioCodec.indexOf('mp4a.40.2') !== -1 && (adtsSampleingIndex >= 6 && adtsChanelConfig === 1) ||
|
||||
(!audioCodec && adtsChanelConfig === 1)) {
|
||||
adtsObjectType = 2;
|
||||
config = new Array(2);
|
||||
}
|
||||
adtsExtensionSampleingIndex = adtsSampleingIndex;
|
||||
}
|
||||
}
|
||||
/* refer to http://wiki.multimedia.cx/index.php?title=MPEG-4_Audio#Audio_Specific_Config
|
||||
ISO 14496-3 (AAC).pdf - Table 1.13 — Syntax of AudioSpecificConfig()
|
||||
Audio Profile / Audio Object Type
|
||||
0: Null
|
||||
1: AAC Main
|
||||
2: AAC LC (Low Complexity)
|
||||
3: AAC SSR (Scalable Sample Rate)
|
||||
4: AAC LTP (Long Term Prediction)
|
||||
5: SBR (Spectral Band Replication)
|
||||
6: AAC Scalable
|
||||
sampling freq
|
||||
0: 96000 Hz
|
||||
1: 88200 Hz
|
||||
2: 64000 Hz
|
||||
3: 48000 Hz
|
||||
4: 44100 Hz
|
||||
5: 32000 Hz
|
||||
6: 24000 Hz
|
||||
7: 22050 Hz
|
||||
8: 16000 Hz
|
||||
9: 12000 Hz
|
||||
10: 11025 Hz
|
||||
11: 8000 Hz
|
||||
12: 7350 Hz
|
||||
13: Reserved
|
||||
14: Reserved
|
||||
15: frequency is written explictly
|
||||
Channel Configurations
|
||||
These are the channel configurations:
|
||||
0: Defined in AOT Specifc Config
|
||||
1: 1 channel: front-center
|
||||
2: 2 channels: front-left, front-right
|
||||
*/
|
||||
// audioObjectType = profile => profile, the MPEG-4 Audio Object Type minus 1
|
||||
config[0] = adtsObjectType << 3;
|
||||
// samplingFrequencyIndex
|
||||
config[0] |= (adtsSampleingIndex & 0x0E) >> 1;
|
||||
config[1] |= (adtsSampleingIndex & 0x01) << 7;
|
||||
// channelConfiguration
|
||||
config[1] |= adtsChanelConfig << 3;
|
||||
if (adtsObjectType === 5) {
|
||||
// adtsExtensionSampleingIndex
|
||||
config[1] |= (adtsExtensionSampleingIndex & 0x0E) >> 1;
|
||||
config[2] = (adtsExtensionSampleingIndex & 0x01) << 7;
|
||||
// adtsObjectType (force to 2, chrome is checking that object type is less than 5 ???
|
||||
// https://chromium.googlesource.com/chromium/src.git/+/master/media/formats/mp4/aac.cc
|
||||
config[2] |= 2 << 2;
|
||||
config[3] = 0;
|
||||
}
|
||||
return {config: config, samplerate: adtsSampleingRates[adtsSampleingIndex], channelCount: adtsChanelConfig, codec: ('mp4a.40.' + adtsObjectType)};
|
||||
}
|
||||
}
|
||||
|
||||
export default ADTS;
|
|
@ -1,49 +0,0 @@
|
|||
/* inline demuxer.
|
||||
* probe fragments and instantiate appropriate demuxer depending on content type (TSDemuxer, AACDemuxer, ...)
|
||||
*/
|
||||
|
||||
import Event from '../events';
|
||||
import {ErrorTypes, ErrorDetails} from '../errors';
|
||||
import AACDemuxer from '../demux/aacdemuxer';
|
||||
import TSDemuxer from '../demux/tsdemuxer';
|
||||
import MP4Remuxer from '../remux/mp4-remuxer';
|
||||
import PassThroughRemuxer from '../remux/passthrough-remuxer';
|
||||
|
||||
class DemuxerInline {
|
||||
|
||||
constructor(hls,typeSupported) {
|
||||
this.hls = hls;
|
||||
this.typeSupported = typeSupported;
|
||||
}
|
||||
|
||||
destroy() {
|
||||
var demuxer = this.demuxer;
|
||||
if (demuxer) {
|
||||
demuxer.destroy();
|
||||
}
|
||||
}
|
||||
|
||||
push(data, audioCodec, videoCodec, timeOffset, cc, level, sn, duration) {
|
||||
var demuxer = this.demuxer;
|
||||
if (!demuxer) {
|
||||
var hls = this.hls;
|
||||
// probe for content type
|
||||
if (TSDemuxer.probe(data)) {
|
||||
if (this.typeSupported.mp2t === true) {
|
||||
demuxer = new TSDemuxer(hls,PassThroughRemuxer);
|
||||
} else {
|
||||
demuxer = new TSDemuxer(hls,MP4Remuxer);
|
||||
}
|
||||
} else if(AACDemuxer.probe(data)) {
|
||||
demuxer = new AACDemuxer(hls,MP4Remuxer);
|
||||
} else {
|
||||
hls.trigger(Event.ERROR, {type : ErrorTypes.MEDIA_ERROR, details: ErrorDetails.FRAG_PARSING_ERROR, fatal: true, reason: 'no demux matching with content found'});
|
||||
return;
|
||||
}
|
||||
this.demuxer = demuxer;
|
||||
}
|
||||
demuxer.push(data,audioCodec,videoCodec,timeOffset,cc,level,sn,duration);
|
||||
}
|
||||
}
|
||||
|
||||
export default DemuxerInline;
|
|
@ -1,67 +0,0 @@
|
|||
/* demuxer web worker.
|
||||
* - listen to worker message, and trigger DemuxerInline upon reception of Fragments.
|
||||
* - provides MP4 Boxes back to main thread using [transferable objects](https://developers.google.com/web/updates/2011/12/Transferable-Objects-Lightning-Fast) in order to minimize message passing overhead.
|
||||
*/
|
||||
|
||||
import DemuxerInline from '../demux/demuxer-inline';
|
||||
import Event from '../events';
|
||||
import EventEmitter from 'events';
|
||||
|
||||
var DemuxerWorker = function (self) {
|
||||
// observer setup
|
||||
var observer = new EventEmitter();
|
||||
observer.trigger = function trigger (event, ...data) {
|
||||
observer.emit(event, event, ...data);
|
||||
};
|
||||
|
||||
observer.off = function off (event, ...data) {
|
||||
observer.removeListener(event, ...data);
|
||||
};
|
||||
self.addEventListener('message', function (ev) {
|
||||
var data = ev.data;
|
||||
//console.log('demuxer cmd:' + data.cmd);
|
||||
switch (data.cmd) {
|
||||
case 'init':
|
||||
self.demuxer = new DemuxerInline(observer, data.typeSupported);
|
||||
break;
|
||||
case 'demux':
|
||||
self.demuxer.push(new Uint8Array(data.data), data.audioCodec, data.videoCodec, data.timeOffset, data.cc, data.level, data.sn, data.duration);
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
});
|
||||
|
||||
// listen to events triggered by Demuxer
|
||||
observer.on(Event.FRAG_PARSING_INIT_SEGMENT, function(ev, data) {
|
||||
self.postMessage({event: ev, tracks : data.tracks, unique : data.unique });
|
||||
});
|
||||
|
||||
observer.on(Event.FRAG_PARSING_DATA, function(ev, data) {
|
||||
var objData = {event: ev, type: data.type, startPTS: data.startPTS, endPTS: data.endPTS, startDTS: data.startDTS, endDTS: data.endDTS, data1: data.data1.buffer, data2: data.data2.buffer, nb: data.nb, dropped : data.dropped};
|
||||
// pass data1/data2 as transferable object (no copy)
|
||||
self.postMessage(objData, [objData.data1, objData.data2]);
|
||||
});
|
||||
|
||||
observer.on(Event.FRAG_PARSED, function(event) {
|
||||
self.postMessage({event: event});
|
||||
});
|
||||
|
||||
observer.on(Event.ERROR, function(event, data) {
|
||||
self.postMessage({event: event, data: data});
|
||||
});
|
||||
|
||||
observer.on(Event.FRAG_PARSING_METADATA, function(event, data) {
|
||||
var objData = {event: event, samples: data.samples};
|
||||
self.postMessage(objData);
|
||||
});
|
||||
|
||||
observer.on(Event.FRAG_PARSING_USERDATA, function(event, data) {
|
||||
var objData = {event: event, samples: data.samples};
|
||||
self.postMessage(objData);
|
||||
});
|
||||
|
||||
};
|
||||
|
||||
export default DemuxerWorker;
|
||||
|
|
@ -1,113 +0,0 @@
|
|||
import Event from '../events';
|
||||
import DemuxerInline from '../demux/demuxer-inline';
|
||||
import DemuxerWorker from '../demux/demuxer-worker';
|
||||
import {logger} from '../utils/logger';
|
||||
import Decrypter from '../crypt/decrypter';
|
||||
|
||||
class Demuxer {
|
||||
|
||||
constructor(hls) {
|
||||
this.hls = hls;
|
||||
var typeSupported = {
|
||||
mp4 : MediaSource.isTypeSupported('video/mp4'),
|
||||
mp2t : hls.config.enableMP2TPassThrough && MediaSource.isTypeSupported('video/mp2t')
|
||||
};
|
||||
if (hls.config.enableWorker && (typeof(Worker) !== 'undefined')) {
|
||||
logger.log('demuxing in webworker');
|
||||
try {
|
||||
var work = require('webworkify');
|
||||
this.w = work(DemuxerWorker);
|
||||
this.onwmsg = this.onWorkerMessage.bind(this);
|
||||
this.w.addEventListener('message', this.onwmsg);
|
||||
this.w.postMessage({cmd: 'init', typeSupported : typeSupported});
|
||||
} catch(err) {
|
||||
logger.error('error while initializing DemuxerWorker, fallback on DemuxerInline');
|
||||
this.demuxer = new DemuxerInline(hls,typeSupported);
|
||||
}
|
||||
} else {
|
||||
this.demuxer = new DemuxerInline(hls,typeSupported);
|
||||
}
|
||||
this.demuxInitialized = true;
|
||||
}
|
||||
|
||||
destroy() {
|
||||
if (this.w) {
|
||||
this.w.removeEventListener('message', this.onwmsg);
|
||||
this.w.terminate();
|
||||
this.w = null;
|
||||
} else {
|
||||
this.demuxer.destroy();
|
||||
this.demuxer = null;
|
||||
}
|
||||
if (this.decrypter) {
|
||||
this.decrypter.destroy();
|
||||
this.decrypter = null;
|
||||
}
|
||||
}
|
||||
|
||||
pushDecrypted(data, audioCodec, videoCodec, timeOffset, cc, level, sn, duration) {
|
||||
if (this.w) {
|
||||
// post fragment payload as transferable objects (no copy)
|
||||
this.w.postMessage({cmd: 'demux', data: data, audioCodec: audioCodec, videoCodec: videoCodec, timeOffset: timeOffset, cc: cc, level: level, sn : sn, duration: duration}, [data]);
|
||||
} else {
|
||||
this.demuxer.push(new Uint8Array(data), audioCodec, videoCodec, timeOffset, cc, level, sn, duration);
|
||||
}
|
||||
}
|
||||
|
||||
push(data, audioCodec, videoCodec, timeOffset, cc, level, sn, duration, decryptdata) {
|
||||
if ((data.byteLength > 0) && (decryptdata != null) && (decryptdata.key != null) && (decryptdata.method === 'AES-128')) {
|
||||
if (this.decrypter == null) {
|
||||
this.decrypter = new Decrypter(this.hls);
|
||||
}
|
||||
|
||||
var localthis = this;
|
||||
this.decrypter.decrypt(data, decryptdata.key, decryptdata.iv, function(decryptedData){
|
||||
localthis.pushDecrypted(decryptedData, audioCodec, videoCodec, timeOffset, cc, level, sn, duration);
|
||||
});
|
||||
} else {
|
||||
this.pushDecrypted(data, audioCodec, videoCodec, timeOffset, cc, level, sn, duration);
|
||||
}
|
||||
}
|
||||
|
||||
onWorkerMessage(ev) {
|
||||
var data = ev.data;
|
||||
//console.log('onWorkerMessage:' + data.event);
|
||||
switch(data.event) {
|
||||
case Event.FRAG_PARSING_INIT_SEGMENT:
|
||||
var obj = {};
|
||||
obj.tracks = data.tracks;
|
||||
obj.unique = data.unique;
|
||||
this.hls.trigger(Event.FRAG_PARSING_INIT_SEGMENT, obj);
|
||||
break;
|
||||
case Event.FRAG_PARSING_DATA:
|
||||
this.hls.trigger(Event.FRAG_PARSING_DATA,{
|
||||
data1: new Uint8Array(data.data1),
|
||||
data2: new Uint8Array(data.data2),
|
||||
startPTS: data.startPTS,
|
||||
endPTS: data.endPTS,
|
||||
startDTS: data.startDTS,
|
||||
endDTS: data.endDTS,
|
||||
type: data.type,
|
||||
nb: data.nb,
|
||||
dropped: data.dropped,
|
||||
});
|
||||
break;
|
||||
case Event.FRAG_PARSING_METADATA:
|
||||
this.hls.trigger(Event.FRAG_PARSING_METADATA, {
|
||||
samples: data.samples
|
||||
});
|
||||
break;
|
||||
case Event.FRAG_PARSING_USERDATA:
|
||||
this.hls.trigger(Event.FRAG_PARSING_USERDATA, {
|
||||
samples: data.samples
|
||||
});
|
||||
break;
|
||||
default:
|
||||
this.hls.trigger(data.event, data.data);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default Demuxer;
|
||||
|
|
@ -1,294 +0,0 @@
|
|||
/**
|
||||
* Parser for exponential Golomb codes, a variable-bitwidth number encoding scheme used by h264.
|
||||
*/
|
||||
|
||||
import {logger} from '../utils/logger';
|
||||
|
||||
class ExpGolomb {
|
||||
|
||||
constructor(data) {
|
||||
this.data = data;
|
||||
// the number of bytes left to examine in this.data
|
||||
this.bytesAvailable = this.data.byteLength;
|
||||
// the current word being examined
|
||||
this.word = 0; // :uint
|
||||
// the number of bits left to examine in the current word
|
||||
this.bitsAvailable = 0; // :uint
|
||||
}
|
||||
|
||||
// ():void
|
||||
loadWord() {
|
||||
var
|
||||
position = this.data.byteLength - this.bytesAvailable,
|
||||
workingBytes = new Uint8Array(4),
|
||||
availableBytes = Math.min(4, this.bytesAvailable);
|
||||
if (availableBytes === 0) {
|
||||
throw new Error('no bytes available');
|
||||
}
|
||||
workingBytes.set(this.data.subarray(position, position + availableBytes));
|
||||
this.word = new DataView(workingBytes.buffer).getUint32(0);
|
||||
// track the amount of this.data that has been processed
|
||||
this.bitsAvailable = availableBytes * 8;
|
||||
this.bytesAvailable -= availableBytes;
|
||||
}
|
||||
|
||||
// (count:int):void
|
||||
skipBits(count) {
|
||||
var skipBytes; // :int
|
||||
if (this.bitsAvailable > count) {
|
||||
this.word <<= count;
|
||||
this.bitsAvailable -= count;
|
||||
} else {
|
||||
count -= this.bitsAvailable;
|
||||
skipBytes = count >> 3;
|
||||
count -= (skipBytes >> 3);
|
||||
this.bytesAvailable -= skipBytes;
|
||||
this.loadWord();
|
||||
this.word <<= count;
|
||||
this.bitsAvailable -= count;
|
||||
}
|
||||
}
|
||||
|
||||
// (size:int):uint
|
||||
readBits(size) {
|
||||
var
|
||||
bits = Math.min(this.bitsAvailable, size), // :uint
|
||||
valu = this.word >>> (32 - bits); // :uint
|
||||
if (size > 32) {
|
||||
logger.error('Cannot read more than 32 bits at a time');
|
||||
}
|
||||
this.bitsAvailable -= bits;
|
||||
if (this.bitsAvailable > 0) {
|
||||
this.word <<= bits;
|
||||
} else if (this.bytesAvailable > 0) {
|
||||
this.loadWord();
|
||||
}
|
||||
bits = size - bits;
|
||||
if (bits > 0) {
|
||||
return valu << bits | this.readBits(bits);
|
||||
} else {
|
||||
return valu;
|
||||
}
|
||||
}
|
||||
|
||||
// ():uint
|
||||
skipLZ() {
|
||||
var leadingZeroCount; // :uint
|
||||
for (leadingZeroCount = 0; leadingZeroCount < this.bitsAvailable; ++leadingZeroCount) {
|
||||
if (0 !== (this.word & (0x80000000 >>> leadingZeroCount))) {
|
||||
// the first bit of working word is 1
|
||||
this.word <<= leadingZeroCount;
|
||||
this.bitsAvailable -= leadingZeroCount;
|
||||
return leadingZeroCount;
|
||||
}
|
||||
}
|
||||
// we exhausted word and still have not found a 1
|
||||
this.loadWord();
|
||||
return leadingZeroCount + this.skipLZ();
|
||||
}
|
||||
|
||||
// ():void
|
||||
skipUEG() {
|
||||
this.skipBits(1 + this.skipLZ());
|
||||
}
|
||||
|
||||
// ():void
|
||||
skipEG() {
|
||||
this.skipBits(1 + this.skipLZ());
|
||||
}
|
||||
|
||||
// ():uint
|
||||
readUEG() {
|
||||
var clz = this.skipLZ(); // :uint
|
||||
return this.readBits(clz + 1) - 1;
|
||||
}
|
||||
|
||||
// ():int
|
||||
readEG() {
|
||||
var valu = this.readUEG(); // :int
|
||||
if (0x01 & valu) {
|
||||
// the number is odd if the low order bit is set
|
||||
return (1 + valu) >>> 1; // add 1 to make it even, and divide by 2
|
||||
} else {
|
||||
return -1 * (valu >>> 1); // divide by two then make it negative
|
||||
}
|
||||
}
|
||||
|
||||
// Some convenience functions
|
||||
// :Boolean
|
||||
readBoolean() {
|
||||
return 1 === this.readBits(1);
|
||||
}
|
||||
|
||||
// ():int
|
||||
readUByte() {
|
||||
return this.readBits(8);
|
||||
}
|
||||
|
||||
// ():int
|
||||
readUShort() {
|
||||
return this.readBits(16);
|
||||
}
|
||||
// ():int
|
||||
readUInt() {
|
||||
return this.readBits(32);
|
||||
}
|
||||
|
||||
/**
|
||||
* Advance the ExpGolomb decoder past a scaling list. The scaling
|
||||
* list is optionally transmitted as part of a sequence parameter
|
||||
* set and is not relevant to transmuxing.
|
||||
* @param count {number} the number of entries in this scaling list
|
||||
* @see Recommendation ITU-T H.264, Section 7.3.2.1.1.1
|
||||
*/
|
||||
skipScalingList(count) {
|
||||
var
|
||||
lastScale = 8,
|
||||
nextScale = 8,
|
||||
j,
|
||||
deltaScale;
|
||||
for (j = 0; j < count; j++) {
|
||||
if (nextScale !== 0) {
|
||||
deltaScale = this.readEG();
|
||||
nextScale = (lastScale + deltaScale + 256) % 256;
|
||||
}
|
||||
lastScale = (nextScale === 0) ? lastScale : nextScale;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Read a sequence parameter set and return some interesting video
|
||||
* properties. A sequence parameter set is the H264 metadata that
|
||||
* describes the properties of upcoming video frames.
|
||||
* @param data {Uint8Array} the bytes of a sequence parameter set
|
||||
* @return {object} an object with configuration parsed from the
|
||||
* sequence parameter set, including the dimensions of the
|
||||
* associated video frames.
|
||||
*/
|
||||
readSPS() {
|
||||
var
|
||||
frameCropLeftOffset = 0,
|
||||
frameCropRightOffset = 0,
|
||||
frameCropTopOffset = 0,
|
||||
frameCropBottomOffset = 0,
|
||||
sarScale = 1,
|
||||
profileIdc,profileCompat,levelIdc,
|
||||
numRefFramesInPicOrderCntCycle, picWidthInMbsMinus1,
|
||||
picHeightInMapUnitsMinus1,
|
||||
frameMbsOnlyFlag,
|
||||
scalingListCount,
|
||||
i;
|
||||
this.readUByte();
|
||||
profileIdc = this.readUByte(); // profile_idc
|
||||
profileCompat = this.readBits(5); // constraint_set[0-4]_flag, u(5)
|
||||
this.skipBits(3); // reserved_zero_3bits u(3),
|
||||
levelIdc = this.readUByte(); //level_idc u(8)
|
||||
this.skipUEG(); // seq_parameter_set_id
|
||||
// some profiles have more optional data we don't need
|
||||
if (profileIdc === 100 ||
|
||||
profileIdc === 110 ||
|
||||
profileIdc === 122 ||
|
||||
profileIdc === 244 ||
|
||||
profileIdc === 44 ||
|
||||
profileIdc === 83 ||
|
||||
profileIdc === 86 ||
|
||||
profileIdc === 118 ||
|
||||
profileIdc === 128) {
|
||||
var chromaFormatIdc = this.readUEG();
|
||||
if (chromaFormatIdc === 3) {
|
||||
this.skipBits(1); // separate_colour_plane_flag
|
||||
}
|
||||
this.skipUEG(); // bit_depth_luma_minus8
|
||||
this.skipUEG(); // bit_depth_chroma_minus8
|
||||
this.skipBits(1); // qpprime_y_zero_transform_bypass_flag
|
||||
if (this.readBoolean()) { // seq_scaling_matrix_present_flag
|
||||
scalingListCount = (chromaFormatIdc !== 3) ? 8 : 12;
|
||||
for (i = 0; i < scalingListCount; i++) {
|
||||
if (this.readBoolean()) { // seq_scaling_list_present_flag[ i ]
|
||||
if (i < 6) {
|
||||
this.skipScalingList(16);
|
||||
} else {
|
||||
this.skipScalingList(64);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
this.skipUEG(); // log2_max_frame_num_minus4
|
||||
var picOrderCntType = this.readUEG();
|
||||
if (picOrderCntType === 0) {
|
||||
this.readUEG(); //log2_max_pic_order_cnt_lsb_minus4
|
||||
} else if (picOrderCntType === 1) {
|
||||
this.skipBits(1); // delta_pic_order_always_zero_flag
|
||||
this.skipEG(); // offset_for_non_ref_pic
|
||||
this.skipEG(); // offset_for_top_to_bottom_field
|
||||
numRefFramesInPicOrderCntCycle = this.readUEG();
|
||||
for(i = 0; i < numRefFramesInPicOrderCntCycle; i++) {
|
||||
this.skipEG(); // offset_for_ref_frame[ i ]
|
||||
}
|
||||
}
|
||||
this.skipUEG(); // max_num_ref_frames
|
||||
this.skipBits(1); // gaps_in_frame_num_value_allowed_flag
|
||||
picWidthInMbsMinus1 = this.readUEG();
|
||||
picHeightInMapUnitsMinus1 = this.readUEG();
|
||||
frameMbsOnlyFlag = this.readBits(1);
|
||||
if (frameMbsOnlyFlag === 0) {
|
||||
this.skipBits(1); // mb_adaptive_frame_field_flag
|
||||
}
|
||||
this.skipBits(1); // direct_8x8_inference_flag
|
||||
if (this.readBoolean()) { // frame_cropping_flag
|
||||
frameCropLeftOffset = this.readUEG();
|
||||
frameCropRightOffset = this.readUEG();
|
||||
frameCropTopOffset = this.readUEG();
|
||||
frameCropBottomOffset = this.readUEG();
|
||||
}
|
||||
if (this.readBoolean()) {
|
||||
// vui_parameters_present_flag
|
||||
if (this.readBoolean()) {
|
||||
// aspect_ratio_info_present_flag
|
||||
let sarRatio;
|
||||
const aspectRatioIdc = this.readUByte();
|
||||
switch (aspectRatioIdc) {
|
||||
case 1: sarRatio = [1,1]; break;
|
||||
case 2: sarRatio = [12,11]; break;
|
||||
case 3: sarRatio = [10,11]; break;
|
||||
case 4: sarRatio = [16,11]; break;
|
||||
case 5: sarRatio = [40,33]; break;
|
||||
case 6: sarRatio = [24,11]; break;
|
||||
case 7: sarRatio = [20,11]; break;
|
||||
case 8: sarRatio = [32,11]; break;
|
||||
case 9: sarRatio = [80,33]; break;
|
||||
case 10: sarRatio = [18,11]; break;
|
||||
case 11: sarRatio = [15,11]; break;
|
||||
case 12: sarRatio = [64,33]; break;
|
||||
case 13: sarRatio = [160,99]; break;
|
||||
case 14: sarRatio = [4,3]; break;
|
||||
case 15: sarRatio = [3,2]; break;
|
||||
case 16: sarRatio = [2,1]; break;
|
||||
case 255: {
|
||||
sarRatio = [this.readUByte() << 8 | this.readUByte(), this.readUByte() << 8 | this.readUByte()];
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (sarRatio) {
|
||||
sarScale = sarRatio[0] / sarRatio[1];
|
||||
}
|
||||
}
|
||||
}
|
||||
return {
|
||||
width: Math.ceil((((picWidthInMbsMinus1 + 1) * 16) - frameCropLeftOffset * 2 - frameCropRightOffset * 2) * sarScale),
|
||||
height: ((2 - frameMbsOnlyFlag) * (picHeightInMapUnitsMinus1 + 1) * 16) - ((frameMbsOnlyFlag? 2 : 4) * (frameCropTopOffset + frameCropBottomOffset))
|
||||
};
|
||||
}
|
||||
|
||||
readSliceType() {
|
||||
// skip NALu type
|
||||
this.readUByte();
|
||||
// discard first_mb_in_slice
|
||||
this.readUEG();
|
||||
// return slice_type
|
||||
return this.readUEG();
|
||||
}
|
||||
}
|
||||
|
||||
export default ExpGolomb;
|
|
@ -1,128 +0,0 @@
|
|||
/**
|
||||
* ID3 parser
|
||||
*/
|
||||
import {logger} from '../utils/logger';
|
||||
//import Hex from '../utils/hex';
|
||||
|
||||
class ID3 {
|
||||
|
||||
constructor(data) {
|
||||
this._hasTimeStamp = false;
|
||||
var offset = 0, byte1,byte2,byte3,byte4,tagSize,endPos,header,len;
|
||||
do {
|
||||
header = this.readUTF(data,offset,3);
|
||||
offset+=3;
|
||||
// first check for ID3 header
|
||||
if (header === 'ID3') {
|
||||
// skip 24 bits
|
||||
offset += 3;
|
||||
// retrieve tag(s) length
|
||||
byte1 = data[offset++] & 0x7f;
|
||||
byte2 = data[offset++] & 0x7f;
|
||||
byte3 = data[offset++] & 0x7f;
|
||||
byte4 = data[offset++] & 0x7f;
|
||||
tagSize = (byte1 << 21) + (byte2 << 14) + (byte3 << 7) + byte4;
|
||||
endPos = offset + tagSize;
|
||||
//logger.log(`ID3 tag found, size/end: ${tagSize}/${endPos}`);
|
||||
|
||||
// read ID3 tags
|
||||
this._parseID3Frames(data, offset,endPos);
|
||||
offset = endPos;
|
||||
} else if (header === '3DI') {
|
||||
// http://id3.org/id3v2.4.0-structure chapter 3.4. ID3v2 footer
|
||||
offset += 7;
|
||||
logger.log(`3DI footer found, end: ${offset}`);
|
||||
} else {
|
||||
offset -= 3;
|
||||
len = offset;
|
||||
if (len) {
|
||||
//logger.log(`ID3 len: ${len}`);
|
||||
if (!this.hasTimeStamp) {
|
||||
logger.warn('ID3 tag found, but no timestamp');
|
||||
}
|
||||
this._length = len;
|
||||
this._payload = data.subarray(0,len);
|
||||
}
|
||||
return;
|
||||
}
|
||||
} while (true);
|
||||
}
|
||||
|
||||
readUTF(data,start,len) {
|
||||
|
||||
var result = '',offset = start, end = start + len;
|
||||
do {
|
||||
result += String.fromCharCode(data[offset++]);
|
||||
} while(offset < end);
|
||||
return result;
|
||||
}
|
||||
|
||||
_parseID3Frames(data,offset,endPos) {
|
||||
var tagId,tagLen,tagStart,tagFlags,timestamp;
|
||||
while(offset + 8 <= endPos) {
|
||||
tagId = this.readUTF(data,offset,4);
|
||||
offset +=4;
|
||||
|
||||
tagLen = data[offset++] << 24 +
|
||||
data[offset++] << 16 +
|
||||
data[offset++] << 8 +
|
||||
data[offset++];
|
||||
|
||||
tagFlags = data[offset++] << 8 +
|
||||
data[offset++];
|
||||
|
||||
tagStart = offset;
|
||||
//logger.log("ID3 tag id:" + tagId);
|
||||
switch(tagId) {
|
||||
case 'PRIV':
|
||||
//logger.log('parse frame:' + Hex.hexDump(data.subarray(offset,endPos)));
|
||||
// owner should be "com.apple.streaming.transportStreamTimestamp"
|
||||
if (this.readUTF(data,offset,44) === 'com.apple.streaming.transportStreamTimestamp') {
|
||||
offset+=44;
|
||||
// smelling even better ! we found the right descriptor
|
||||
// skip null character (string end) + 3 first bytes
|
||||
offset+= 4;
|
||||
|
||||
// timestamp is 33 bit expressed as a big-endian eight-octet number, with the upper 31 bits set to zero.
|
||||
var pts33Bit = data[offset++] & 0x1;
|
||||
this._hasTimeStamp = true;
|
||||
|
||||
timestamp = ((data[offset++] << 23) +
|
||||
(data[offset++] << 15) +
|
||||
(data[offset++] << 7) +
|
||||
data[offset++]) /45;
|
||||
|
||||
if (pts33Bit) {
|
||||
timestamp += 47721858.84; // 2^32 / 90
|
||||
}
|
||||
timestamp = Math.round(timestamp);
|
||||
logger.trace(`ID3 timestamp found: ${timestamp}`);
|
||||
this._timeStamp = timestamp;
|
||||
}
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
get hasTimeStamp() {
|
||||
return this._hasTimeStamp;
|
||||
}
|
||||
|
||||
get timeStamp() {
|
||||
return this._timeStamp;
|
||||
}
|
||||
|
||||
get length() {
|
||||
return this._length;
|
||||
}
|
||||
|
||||
get payload() {
|
||||
return this._payload;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export default ID3;
|
||||
|
|
@ -1,703 +0,0 @@
|
|||
/**
|
||||
* highly optimized TS demuxer:
|
||||
* parse PAT, PMT
|
||||
* extract PES packet from audio and video PIDs
|
||||
* extract AVC/H264 NAL units and AAC/ADTS samples from PES packet
|
||||
* trigger the remuxer upon parsing completion
|
||||
* it also tries to workaround as best as it can audio codec switch (HE-AAC to AAC and vice versa), without having to restart the MediaSource.
|
||||
* it also controls the remuxing process :
|
||||
* upon discontinuity or level switch detection, it will also notifies the remuxer so that it can reset its state.
|
||||
*/
|
||||
|
||||
import ADTS from './adts';
|
||||
import Event from '../events';
|
||||
import ExpGolomb from './exp-golomb';
|
||||
// import Hex from '../utils/hex';
|
||||
import {logger} from '../utils/logger';
|
||||
import {ErrorTypes, ErrorDetails} from '../errors';
|
||||
|
||||
class TSDemuxer {
|
||||
|
||||
constructor(observer,remuxerClass) {
|
||||
this.observer = observer;
|
||||
this.remuxerClass = remuxerClass;
|
||||
this.lastCC = 0;
|
||||
this.remuxer = new this.remuxerClass(observer);
|
||||
}
|
||||
|
||||
static probe(data) {
|
||||
// a TS fragment should contain at least 3 TS packets, a PAT, a PMT, and one PID, each starting with 0x47
|
||||
if (data.length >= 3*188 && data[0] === 0x47 && data[188] === 0x47 && data[2*188] === 0x47) {
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
switchLevel() {
|
||||
this.pmtParsed = false;
|
||||
this._pmtId = -1;
|
||||
this._avcTrack = {container : 'video/mp2t', type: 'video', id :-1, sequenceNumber: 0, samples : [], len : 0, nbNalu : 0, dropped : 0};
|
||||
this._aacTrack = {container : 'video/mp2t', type: 'audio', id :-1, sequenceNumber: 0, samples : [], len : 0};
|
||||
this._id3Track = {type: 'id3', id :-1, sequenceNumber: 0, samples : [], len : 0};
|
||||
this._txtTrack = {type: 'text', id: -1, sequenceNumber: 0, samples: [], len: 0};
|
||||
// flush any partial content
|
||||
this.aacOverFlow = null;
|
||||
this.aacLastPTS = null;
|
||||
this.avcNaluState = 0;
|
||||
this.remuxer.switchLevel();
|
||||
}
|
||||
|
||||
insertDiscontinuity() {
|
||||
this.switchLevel();
|
||||
this.remuxer.insertDiscontinuity();
|
||||
}
|
||||
|
||||
// feed incoming data to the front of the parsing pipeline
|
||||
push(data, audioCodec, videoCodec, timeOffset, cc, level, sn, duration) {
|
||||
var avcData, aacData, id3Data,
|
||||
start, len = data.length, stt, pid, atf, offset,
|
||||
codecsOnly = this.remuxer.passthrough,
|
||||
unknownPIDs = false;
|
||||
|
||||
this.audioCodec = audioCodec;
|
||||
this.videoCodec = videoCodec;
|
||||
this.timeOffset = timeOffset;
|
||||
this._duration = duration;
|
||||
this.contiguous = false;
|
||||
if (cc !== this.lastCC) {
|
||||
logger.log('discontinuity detected');
|
||||
this.insertDiscontinuity();
|
||||
this.lastCC = cc;
|
||||
}
|
||||
if (level !== this.lastLevel) {
|
||||
logger.log('level switch detected');
|
||||
this.switchLevel();
|
||||
this.lastLevel = level;
|
||||
} else if (sn === (this.lastSN+1)) {
|
||||
this.contiguous = true;
|
||||
}
|
||||
this.lastSN = sn;
|
||||
|
||||
var pmtParsed = this.pmtParsed,
|
||||
avcId = this._avcTrack.id,
|
||||
aacId = this._aacTrack.id,
|
||||
id3Id = this._id3Track.id;
|
||||
|
||||
// don't parse last TS packet if incomplete
|
||||
len -= len % 188;
|
||||
// loop through TS packets
|
||||
for (start = 0; start < len; start += 188) {
|
||||
if (data[start] === 0x47) {
|
||||
stt = !!(data[start + 1] & 0x40);
|
||||
// pid is a 13-bit field starting at the last bit of TS[1]
|
||||
pid = ((data[start + 1] & 0x1f) << 8) + data[start + 2];
|
||||
atf = (data[start + 3] & 0x30) >> 4;
|
||||
// if an adaption field is present, its length is specified by the fifth byte of the TS packet header.
|
||||
if (atf > 1) {
|
||||
offset = start + 5 + data[start + 4];
|
||||
// continue if there is only adaptation field
|
||||
if (offset === (start + 188)) {
|
||||
continue;
|
||||
}
|
||||
} else {
|
||||
offset = start + 4;
|
||||
}
|
||||
if (pmtParsed) {
|
||||
if (pid === avcId) {
|
||||
if (stt) {
|
||||
if (avcData) {
|
||||
this._parseAVCPES(this._parsePES(avcData));
|
||||
if (codecsOnly) {
|
||||
// if we have video codec info AND
|
||||
// if audio PID is undefined OR if we have audio codec info,
|
||||
// we have all codec info !
|
||||
if (this._avcTrack.codec && (aacId === -1 || this._aacTrack.codec)) {
|
||||
this.remux(data);
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
avcData = {data: [], size: 0};
|
||||
}
|
||||
if (avcData) {
|
||||
avcData.data.push(data.subarray(offset, start + 188));
|
||||
avcData.size += start + 188 - offset;
|
||||
}
|
||||
} else if (pid === aacId) {
|
||||
if (stt) {
|
||||
if (aacData) {
|
||||
this._parseAACPES(this._parsePES(aacData));
|
||||
if (codecsOnly) {
|
||||
// here we now that we have audio codec info
|
||||
// if video PID is undefined OR if we have video codec info,
|
||||
// we have all codec infos !
|
||||
if (this._aacTrack.codec && (avcId === -1 || this._avcTrack.codec)) {
|
||||
this.remux(data);
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
aacData = {data: [], size: 0};
|
||||
}
|
||||
if (aacData) {
|
||||
aacData.data.push(data.subarray(offset, start + 188));
|
||||
aacData.size += start + 188 - offset;
|
||||
}
|
||||
} else if (pid === id3Id) {
|
||||
if (stt) {
|
||||
if (id3Data) {
|
||||
this._parseID3PES(this._parsePES(id3Data));
|
||||
}
|
||||
id3Data = {data: [], size: 0};
|
||||
}
|
||||
if (id3Data) {
|
||||
id3Data.data.push(data.subarray(offset, start + 188));
|
||||
id3Data.size += start + 188 - offset;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (stt) {
|
||||
offset += data[offset] + 1;
|
||||
}
|
||||
if (pid === 0) {
|
||||
this._parsePAT(data, offset);
|
||||
} else if (pid === this._pmtId) {
|
||||
this._parsePMT(data, offset);
|
||||
pmtParsed = this.pmtParsed = true;
|
||||
avcId = this._avcTrack.id;
|
||||
aacId = this._aacTrack.id;
|
||||
id3Id = this._id3Track.id;
|
||||
if (unknownPIDs) {
|
||||
logger.log('reparse from beginning');
|
||||
unknownPIDs = false;
|
||||
// we set it to -188, the += 188 in the for loop will reset start to 0
|
||||
start = -188;
|
||||
}
|
||||
} else {
|
||||
logger.log('unknown PID found before PAT/PMT');
|
||||
unknownPIDs = true;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
this.observer.trigger(Event.ERROR, {type : ErrorTypes.MEDIA_ERROR, details: ErrorDetails.FRAG_PARSING_ERROR, fatal: false, reason: 'TS packet did not start with 0x47'});
|
||||
}
|
||||
}
|
||||
// parse last PES packet
|
||||
if (avcData) {
|
||||
this._parseAVCPES(this._parsePES(avcData));
|
||||
}
|
||||
if (aacData) {
|
||||
this._parseAACPES(this._parsePES(aacData));
|
||||
}
|
||||
if (id3Data) {
|
||||
this._parseID3PES(this._parsePES(id3Data));
|
||||
}
|
||||
this.remux(null);
|
||||
}
|
||||
|
||||
remux(data) {
|
||||
this.remuxer.remux(this._aacTrack, this._avcTrack, this._id3Track, this._txtTrack, this.timeOffset, this.contiguous, data);
|
||||
}
|
||||
|
||||
destroy() {
|
||||
this.switchLevel();
|
||||
this._initPTS = this._initDTS = undefined;
|
||||
this._duration = 0;
|
||||
}
|
||||
|
||||
_parsePAT(data, offset) {
|
||||
// skip the PSI header and parse the first PMT entry
|
||||
this._pmtId = (data[offset + 10] & 0x1F) << 8 | data[offset + 11];
|
||||
//logger.log('PMT PID:' + this._pmtId);
|
||||
}
|
||||
|
||||
_parsePMT(data, offset) {
|
||||
var sectionLength, tableEnd, programInfoLength, pid;
|
||||
sectionLength = (data[offset + 1] & 0x0f) << 8 | data[offset + 2];
|
||||
tableEnd = offset + 3 + sectionLength - 4;
|
||||
// to determine where the table is, we have to figure out how
|
||||
// long the program info descriptors are
|
||||
programInfoLength = (data[offset + 10] & 0x0f) << 8 | data[offset + 11];
|
||||
// advance the offset to the first entry in the mapping table
|
||||
offset += 12 + programInfoLength;
|
||||
while (offset < tableEnd) {
|
||||
pid = (data[offset + 1] & 0x1F) << 8 | data[offset + 2];
|
||||
switch(data[offset]) {
|
||||
// ISO/IEC 13818-7 ADTS AAC (MPEG-2 lower bit-rate audio)
|
||||
case 0x0f:
|
||||
//logger.log('AAC PID:' + pid);
|
||||
this._aacTrack.id = pid;
|
||||
break;
|
||||
// Packetized metadata (ID3)
|
||||
case 0x15:
|
||||
//logger.log('ID3 PID:' + pid);
|
||||
this._id3Track.id = pid;
|
||||
break;
|
||||
// ITU-T Rec. H.264 and ISO/IEC 14496-10 (lower bit-rate video)
|
||||
case 0x1b:
|
||||
//logger.log('AVC PID:' + pid);
|
||||
this._avcTrack.id = pid;
|
||||
break;
|
||||
default:
|
||||
logger.log('unkown stream type:' + data[offset]);
|
||||
break;
|
||||
}
|
||||
// move to the next table entry
|
||||
// skip past the elementary stream descriptors, if present
|
||||
offset += ((data[offset + 3] & 0x0F) << 8 | data[offset + 4]) + 5;
|
||||
}
|
||||
}
|
||||
|
||||
_parsePES(stream) {
|
||||
var i = 0, frag, pesFlags, pesPrefix, pesLen, pesHdrLen, pesData, pesPts, pesDts, payloadStartOffset, data = stream.data;
|
||||
//retrieve PTS/DTS from first fragment
|
||||
frag = data[0];
|
||||
pesPrefix = (frag[0] << 16) + (frag[1] << 8) + frag[2];
|
||||
if (pesPrefix === 1) {
|
||||
pesLen = (frag[4] << 8) + frag[5];
|
||||
pesFlags = frag[7];
|
||||
if (pesFlags & 0xC0) {
|
||||
/* PES header described here : http://dvd.sourceforge.net/dvdinfo/pes-hdr.html
|
||||
as PTS / DTS is 33 bit we cannot use bitwise operator in JS,
|
||||
as Bitwise operators treat their operands as a sequence of 32 bits */
|
||||
pesPts = (frag[9] & 0x0E) * 536870912 +// 1 << 29
|
||||
(frag[10] & 0xFF) * 4194304 +// 1 << 22
|
||||
(frag[11] & 0xFE) * 16384 +// 1 << 14
|
||||
(frag[12] & 0xFF) * 128 +// 1 << 7
|
||||
(frag[13] & 0xFE) / 2;
|
||||
// check if greater than 2^32 -1
|
||||
if (pesPts > 4294967295) {
|
||||
// decrement 2^33
|
||||
pesPts -= 8589934592;
|
||||
}
|
||||
if (pesFlags & 0x40) {
|
||||
pesDts = (frag[14] & 0x0E ) * 536870912 +// 1 << 29
|
||||
(frag[15] & 0xFF ) * 4194304 +// 1 << 22
|
||||
(frag[16] & 0xFE ) * 16384 +// 1 << 14
|
||||
(frag[17] & 0xFF ) * 128 +// 1 << 7
|
||||
(frag[18] & 0xFE ) / 2;
|
||||
// check if greater than 2^32 -1
|
||||
if (pesDts > 4294967295) {
|
||||
// decrement 2^33
|
||||
pesDts -= 8589934592;
|
||||
}
|
||||
} else {
|
||||
pesDts = pesPts;
|
||||
}
|
||||
}
|
||||
pesHdrLen = frag[8];
|
||||
payloadStartOffset = pesHdrLen + 9;
|
||||
|
||||
stream.size -= payloadStartOffset;
|
||||
//reassemble PES packet
|
||||
pesData = new Uint8Array(stream.size);
|
||||
while (data.length) {
|
||||
frag = data.shift();
|
||||
var len = frag.byteLength;
|
||||
if (payloadStartOffset) {
|
||||
if (payloadStartOffset > len) {
|
||||
// trim full frag if PES header bigger than frag
|
||||
payloadStartOffset-=len;
|
||||
continue;
|
||||
} else {
|
||||
// trim partial frag if PES header smaller than frag
|
||||
frag = frag.subarray(payloadStartOffset);
|
||||
len-=payloadStartOffset;
|
||||
payloadStartOffset = 0;
|
||||
}
|
||||
}
|
||||
pesData.set(frag, i);
|
||||
i+=len;
|
||||
}
|
||||
return {data: pesData, pts: pesPts, dts: pesDts, len: pesLen};
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
_parseAVCPES(pes) {
|
||||
var track = this._avcTrack,
|
||||
samples = track.samples,
|
||||
units = this._parseAVCNALu(pes.data),
|
||||
units2 = [],
|
||||
debug = false,
|
||||
key = false,
|
||||
length = 0,
|
||||
expGolombDecoder,
|
||||
avcSample,
|
||||
push,
|
||||
i;
|
||||
// no NALu found
|
||||
if (units.length === 0 && samples.length > 0) {
|
||||
// append pes.data to previous NAL unit
|
||||
var lastavcSample = samples[samples.length - 1];
|
||||
var lastUnit = lastavcSample.units.units[lastavcSample.units.units.length - 1];
|
||||
var tmp = new Uint8Array(lastUnit.data.byteLength + pes.data.byteLength);
|
||||
tmp.set(lastUnit.data, 0);
|
||||
tmp.set(pes.data, lastUnit.data.byteLength);
|
||||
lastUnit.data = tmp;
|
||||
lastavcSample.units.length += pes.data.byteLength;
|
||||
track.len += pes.data.byteLength;
|
||||
}
|
||||
//free pes.data to save up some memory
|
||||
pes.data = null;
|
||||
var debugString = '';
|
||||
|
||||
var pushAccesUnit = function() {
|
||||
if (units2.length) {
|
||||
// only push AVC sample if keyframe already found in this fragment OR
|
||||
// keyframe found in last fragment (track.sps) AND
|
||||
// samples already appended (we already found a keyframe in this fragment) OR fragment is contiguous
|
||||
if (key === true ||
|
||||
(track.sps && (samples.length || this.contiguous))) {
|
||||
avcSample = {units: { units : units2, length : length}, pts: pes.pts, dts: pes.dts, key: key};
|
||||
samples.push(avcSample);
|
||||
track.len += length;
|
||||
track.nbNalu += units2.length;
|
||||
} else {
|
||||
// dropped samples, track it
|
||||
track.dropped++;
|
||||
}
|
||||
units2 = [];
|
||||
length = 0;
|
||||
}
|
||||
}.bind(this);
|
||||
|
||||
units.forEach(unit => {
|
||||
switch(unit.type) {
|
||||
//NDR
|
||||
case 1:
|
||||
push = true;
|
||||
if(debug) {
|
||||
debugString += 'NDR ';
|
||||
}
|
||||
break;
|
||||
//IDR
|
||||
case 5:
|
||||
push = true;
|
||||
if(debug) {
|
||||
debugString += 'IDR ';
|
||||
}
|
||||
key = true;
|
||||
break;
|
||||
//SEI
|
||||
case 6:
|
||||
push = true;
|
||||
if(debug) {
|
||||
debugString += 'SEI ';
|
||||
}
|
||||
expGolombDecoder = new ExpGolomb(unit.data);
|
||||
|
||||
// skip frameType
|
||||
expGolombDecoder.readUByte();
|
||||
|
||||
var payloadType = expGolombDecoder.readUByte();
|
||||
|
||||
// TODO: there can be more than one payload in an SEI packet...
|
||||
// TODO: need to read type and size in a while loop to get them all
|
||||
if (payloadType === 4)
|
||||
{
|
||||
var payloadSize = 0;
|
||||
|
||||
do {
|
||||
payloadSize = expGolombDecoder.readUByte();
|
||||
}
|
||||
while (payloadSize === 255);
|
||||
|
||||
var countryCode = expGolombDecoder.readUByte();
|
||||
|
||||
if (countryCode === 181)
|
||||
{
|
||||
var providerCode = expGolombDecoder.readUShort();
|
||||
|
||||
if (providerCode === 49)
|
||||
{
|
||||
var userStructure = expGolombDecoder.readUInt();
|
||||
|
||||
if (userStructure === 0x47413934)
|
||||
{
|
||||
var userDataType = expGolombDecoder.readUByte();
|
||||
|
||||
// Raw CEA-608 bytes wrapped in CEA-708 packet
|
||||
if (userDataType === 3)
|
||||
{
|
||||
var firstByte = expGolombDecoder.readUByte();
|
||||
var secondByte = expGolombDecoder.readUByte();
|
||||
|
||||
var totalCCs = 31 & firstByte;
|
||||
var byteArray = [firstByte, secondByte];
|
||||
|
||||
for (i=0; i<totalCCs; i++)
|
||||
{
|
||||
// 3 bytes per CC
|
||||
byteArray.push(expGolombDecoder.readUByte());
|
||||
byteArray.push(expGolombDecoder.readUByte());
|
||||
byteArray.push(expGolombDecoder.readUByte());
|
||||
}
|
||||
|
||||
this._txtTrack.samples.push({type: 3, pts: pes.pts, bytes: byteArray});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
break;
|
||||
//SPS
|
||||
case 7:
|
||||
push = true;
|
||||
if(debug) {
|
||||
debugString += 'SPS ';
|
||||
}
|
||||
if(!track.sps) {
|
||||
expGolombDecoder = new ExpGolomb(unit.data);
|
||||
var config = expGolombDecoder.readSPS();
|
||||
track.width = config.width;
|
||||
track.height = config.height;
|
||||
track.sps = [unit.data];
|
||||
track.duration = this._duration;
|
||||
var codecarray = unit.data.subarray(1, 4);
|
||||
var codecstring = 'avc1.';
|
||||
for (i = 0; i < 3; i++) {
|
||||
var h = codecarray[i].toString(16);
|
||||
if (h.length < 2) {
|
||||
h = '0' + h;
|
||||
}
|
||||
codecstring += h;
|
||||
}
|
||||
track.codec = codecstring;
|
||||
}
|
||||
break;
|
||||
//PPS
|
||||
case 8:
|
||||
push = true;
|
||||
if(debug) {
|
||||
debugString += 'PPS ';
|
||||
}
|
||||
if (!track.pps) {
|
||||
track.pps = [unit.data];
|
||||
}
|
||||
break;
|
||||
case 9:
|
||||
push = false;
|
||||
if(debug) {
|
||||
debugString += 'AUD ';
|
||||
}
|
||||
pushAccesUnit();
|
||||
break;
|
||||
default:
|
||||
push = false;
|
||||
debugString += 'unknown NAL ' + unit.type + ' ';
|
||||
break;
|
||||
}
|
||||
if(push) {
|
||||
units2.push(unit);
|
||||
length+=unit.data.byteLength;
|
||||
}
|
||||
});
|
||||
if(debug || debugString.length) {
|
||||
logger.log(debugString);
|
||||
}
|
||||
pushAccesUnit();
|
||||
}
|
||||
|
||||
|
||||
_parseAVCNALu(array) {
|
||||
var i = 0, len = array.byteLength, value, overflow, state = this.avcNaluState;
|
||||
var units = [], unit, unitType, lastUnitStart, lastUnitType;
|
||||
//logger.log('PES:' + Hex.hexDump(array));
|
||||
while (i < len) {
|
||||
value = array[i++];
|
||||
// finding 3 or 4-byte start codes (00 00 01 OR 00 00 00 01)
|
||||
switch (state) {
|
||||
case 0:
|
||||
if (value === 0) {
|
||||
state = 1;
|
||||
}
|
||||
break;
|
||||
case 1:
|
||||
if( value === 0) {
|
||||
state = 2;
|
||||
} else {
|
||||
state = 0;
|
||||
}
|
||||
break;
|
||||
case 2:
|
||||
case 3:
|
||||
if( value === 0) {
|
||||
state = 3;
|
||||
} else if (value === 1 && i < len) {
|
||||
unitType = array[i] & 0x1f;
|
||||
//logger.log('find NALU @ offset:' + i + ',type:' + unitType);
|
||||
if (lastUnitStart) {
|
||||
unit = {data: array.subarray(lastUnitStart, i - state - 1), type: lastUnitType};
|
||||
//logger.log('pushing NALU, type/size:' + unit.type + '/' + unit.data.byteLength);
|
||||
units.push(unit);
|
||||
} else {
|
||||
// lastUnitStart is undefined => this is the first start code found in this PES packet
|
||||
// first check if start code delimiter is overlapping between 2 PES packets,
|
||||
// ie it started in last packet (lastState not zero)
|
||||
// and ended at the beginning of this PES packet (i <= 4 - lastState)
|
||||
let lastState = this.avcNaluState;
|
||||
if(lastState && (i <= 4 - lastState)) {
|
||||
// start delimiter overlapping between PES packets
|
||||
// strip start delimiter bytes from the end of last NAL unit
|
||||
let track = this._avcTrack,
|
||||
samples = track.samples;
|
||||
if (samples.length) {
|
||||
let lastavcSample = samples[samples.length - 1],
|
||||
lastUnits = lastavcSample.units.units,
|
||||
lastUnit = lastUnits[lastUnits.length - 1];
|
||||
// check if lastUnit had a state different from zero
|
||||
if (lastUnit.state) {
|
||||
// strip last bytes
|
||||
lastUnit.data = lastUnit.data.subarray(0,lastUnit.data.byteLength - lastState);
|
||||
lastavcSample.units.length -= lastState;
|
||||
track.len -= lastState;
|
||||
}
|
||||
}
|
||||
}
|
||||
// If NAL units are not starting right at the beginning of the PES packet, push preceding data into previous NAL unit.
|
||||
overflow = i - state - 1;
|
||||
if (overflow > 0) {
|
||||
let track = this._avcTrack,
|
||||
samples = track.samples;
|
||||
//logger.log('first NALU found with overflow:' + overflow);
|
||||
if (samples.length) {
|
||||
let lastavcSample = samples[samples.length - 1],
|
||||
lastUnits = lastavcSample.units.units,
|
||||
lastUnit = lastUnits[lastUnits.length - 1],
|
||||
tmp = new Uint8Array(lastUnit.data.byteLength + overflow);
|
||||
tmp.set(lastUnit.data, 0);
|
||||
tmp.set(array.subarray(0, overflow), lastUnit.data.byteLength);
|
||||
lastUnit.data = tmp;
|
||||
lastavcSample.units.length += overflow;
|
||||
track.len += overflow;
|
||||
}
|
||||
}
|
||||
}
|
||||
lastUnitStart = i;
|
||||
lastUnitType = unitType;
|
||||
state = 0;
|
||||
} else {
|
||||
state = 0;
|
||||
}
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (lastUnitStart) {
|
||||
unit = {data: array.subarray(lastUnitStart, len), type: lastUnitType, state : state};
|
||||
units.push(unit);
|
||||
//logger.log('pushing NALU, type/size/state:' + unit.type + '/' + unit.data.byteLength + '/' + state);
|
||||
this.avcNaluState = state;
|
||||
}
|
||||
return units;
|
||||
}
|
||||
|
||||
_parseAACPES(pes) {
|
||||
var track = this._aacTrack,
|
||||
data = pes.data,
|
||||
pts = pes.pts,
|
||||
startOffset = 0,
|
||||
duration = this._duration,
|
||||
audioCodec = this.audioCodec,
|
||||
aacOverFlow = this.aacOverFlow,
|
||||
aacLastPTS = this.aacLastPTS,
|
||||
config, frameLength, frameDuration, frameIndex, offset, headerLength, stamp, len, aacSample;
|
||||
if (aacOverFlow) {
|
||||
var tmp = new Uint8Array(aacOverFlow.byteLength + data.byteLength);
|
||||
tmp.set(aacOverFlow, 0);
|
||||
tmp.set(data, aacOverFlow.byteLength);
|
||||
//logger.log(`AAC: append overflowing ${aacOverFlow.byteLength} bytes to beginning of new PES`);
|
||||
data = tmp;
|
||||
}
|
||||
// look for ADTS header (0xFFFx)
|
||||
for (offset = startOffset, len = data.length; offset < len - 1; offset++) {
|
||||
if ((data[offset] === 0xff) && (data[offset+1] & 0xf0) === 0xf0) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
// if ADTS header does not start straight from the beginning of the PES payload, raise an error
|
||||
if (offset) {
|
||||
var reason, fatal;
|
||||
if (offset < len - 1) {
|
||||
reason = `AAC PES did not start with ADTS header,offset:${offset}`;
|
||||
fatal = false;
|
||||
} else {
|
||||
reason = 'no ADTS header found in AAC PES';
|
||||
fatal = true;
|
||||
}
|
||||
this.observer.trigger(Event.ERROR, {type: ErrorTypes.MEDIA_ERROR, details: ErrorDetails.FRAG_PARSING_ERROR, fatal: fatal, reason: reason});
|
||||
if (fatal) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
if (!track.audiosamplerate) {
|
||||
config = ADTS.getAudioConfig(this.observer,data, offset, audioCodec);
|
||||
track.config = config.config;
|
||||
track.audiosamplerate = config.samplerate;
|
||||
track.channelCount = config.channelCount;
|
||||
track.codec = config.codec;
|
||||
track.duration = duration;
|
||||
logger.log(`parsed codec:${track.codec},rate:${config.samplerate},nb channel:${config.channelCount}`);
|
||||
}
|
||||
frameIndex = 0;
|
||||
frameDuration = 1024 * 90000 / track.audiosamplerate;
|
||||
|
||||
// if last AAC frame is overflowing, we should ensure timestamps are contiguous:
|
||||
// first sample PTS should be equal to last sample PTS + frameDuration
|
||||
if(aacOverFlow && aacLastPTS) {
|
||||
var newPTS = aacLastPTS+frameDuration;
|
||||
if(Math.abs(newPTS-pts) > 1) {
|
||||
logger.log(`AAC: align PTS for overlapping frames by ${Math.round((newPTS-pts)/90)}`);
|
||||
pts=newPTS;
|
||||
}
|
||||
}
|
||||
|
||||
while ((offset + 5) < len) {
|
||||
// The protection skip bit tells us if we have 2 bytes of CRC data at the end of the ADTS header
|
||||
headerLength = (!!(data[offset + 1] & 0x01) ? 7 : 9);
|
||||
// retrieve frame size
|
||||
frameLength = ((data[offset + 3] & 0x03) << 11) |
|
||||
(data[offset + 4] << 3) |
|
||||
((data[offset + 5] & 0xE0) >>> 5);
|
||||
frameLength -= headerLength;
|
||||
//stamp = pes.pts;
|
||||
|
||||
if ((frameLength > 0) && ((offset + headerLength + frameLength) <= len)) {
|
||||
stamp = pts + frameIndex * frameDuration;
|
||||
//logger.log(`AAC frame, offset/length/total/pts:${offset+headerLength}/${frameLength}/${data.byteLength}/${(stamp/90).toFixed(0)}`);
|
||||
aacSample = {unit: data.subarray(offset + headerLength, offset + headerLength + frameLength), pts: stamp, dts: stamp};
|
||||
track.samples.push(aacSample);
|
||||
track.len += frameLength;
|
||||
offset += frameLength + headerLength;
|
||||
frameIndex++;
|
||||
// look for ADTS header (0xFFFx)
|
||||
for ( ; offset < (len - 1); offset++) {
|
||||
if ((data[offset] === 0xff) && ((data[offset + 1] & 0xf0) === 0xf0)) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (offset < len) {
|
||||
aacOverFlow = data.subarray(offset, len);
|
||||
//logger.log(`AAC: overflow detected:${len-offset}`);
|
||||
} else {
|
||||
aacOverFlow = null;
|
||||
}
|
||||
this.aacOverFlow = aacOverFlow;
|
||||
this.aacLastPTS = stamp;
|
||||
}
|
||||
|
||||
_parseID3PES(pes) {
|
||||
this._id3Track.samples.push(pes);
|
||||
}
|
||||
}
|
||||
|
||||
export default TSDemuxer;
|
||||
|
|
@ -1,51 +0,0 @@
|
|||
export const ErrorTypes = {
|
||||
// Identifier for a network error (loading error / timeout ...)
|
||||
NETWORK_ERROR: 'networkError',
|
||||
// Identifier for a media Error (video/parsing/mediasource error)
|
||||
MEDIA_ERROR: 'mediaError',
|
||||
// Identifier for all other errors
|
||||
OTHER_ERROR: 'otherError'
|
||||
};
|
||||
|
||||
export const ErrorDetails = {
|
||||
// Identifier for a manifest load error - data: { url : faulty URL, response : XHR response}
|
||||
MANIFEST_LOAD_ERROR: 'manifestLoadError',
|
||||
// Identifier for a manifest load timeout - data: { url : faulty URL, response : XHR response}
|
||||
MANIFEST_LOAD_TIMEOUT: 'manifestLoadTimeOut',
|
||||
// Identifier for a manifest parsing error - data: { url : faulty URL, reason : error reason}
|
||||
MANIFEST_PARSING_ERROR: 'manifestParsingError',
|
||||
// Identifier for a manifest with only incompatible codecs error - data: { url : faulty URL, reason : error reason}
|
||||
MANIFEST_INCOMPATIBLE_CODECS_ERROR: 'manifestIncompatibleCodecsError',
|
||||
// Identifier for playlist load error - data: { url : faulty URL, response : XHR response}
|
||||
LEVEL_LOAD_ERROR: 'levelLoadError',
|
||||
// Identifier for playlist load timeout - data: { url : faulty URL, response : XHR response}
|
||||
LEVEL_LOAD_TIMEOUT: 'levelLoadTimeOut',
|
||||
// Identifier for a level switch error - data: { level : faulty level Id, event : error description}
|
||||
LEVEL_SWITCH_ERROR: 'levelSwitchError',
|
||||
// Identifier for fragment load error - data: { frag : fragment object, response : XHR response}
|
||||
FRAG_LOAD_ERROR: 'fragLoadError',
|
||||
// Identifier for fragment loop loading error - data: { frag : fragment object}
|
||||
FRAG_LOOP_LOADING_ERROR: 'fragLoopLoadingError',
|
||||
// Identifier for fragment load timeout error - data: { frag : fragment object}
|
||||
FRAG_LOAD_TIMEOUT: 'fragLoadTimeOut',
|
||||
// Identifier for a fragment decryption error event - data: parsing error description
|
||||
FRAG_DECRYPT_ERROR: 'fragDecryptError',
|
||||
// Identifier for a fragment parsing error event - data: parsing error description
|
||||
FRAG_PARSING_ERROR: 'fragParsingError',
|
||||
// Identifier for decrypt key load error - data: { frag : fragment object, response : XHR response}
|
||||
KEY_LOAD_ERROR: 'keyLoadError',
|
||||
// Identifier for decrypt key load timeout error - data: { frag : fragment object}
|
||||
KEY_LOAD_TIMEOUT: 'keyLoadTimeOut',
|
||||
// Identifier for a buffer append error - data: append error description
|
||||
BUFFER_APPEND_ERROR: 'bufferAppendError',
|
||||
// Identifier for a buffer appending error event - data: appending error description
|
||||
BUFFER_APPENDING_ERROR: 'bufferAppendingError',
|
||||
// Identifier for a buffer stalled error event
|
||||
BUFFER_STALLED_ERROR: 'bufferStalledError',
|
||||
// Identifier for a buffer full event
|
||||
BUFFER_FULL_ERROR: 'bufferFullError',
|
||||
// Identifier for a buffer seek over hole event
|
||||
BUFFER_SEEK_OVER_HOLE: 'bufferSeekOverHole',
|
||||
// Identifier for an internal exception happening inside hls.js while handling an event
|
||||
INTERNAL_EXCEPTION: 'internalException'
|
||||
};
|
|
@ -1,73 +0,0 @@
|
|||
/*
|
||||
*
|
||||
* All objects in the event handling chain should inherit from this class
|
||||
*
|
||||
*/
|
||||
|
||||
import {logger} from './utils/logger';
|
||||
import {ErrorTypes, ErrorDetails} from './errors';
|
||||
import Event from './events';
|
||||
|
||||
class EventHandler {
|
||||
|
||||
constructor(hls, ...events) {
|
||||
this.hls = hls;
|
||||
this.onEvent = this.onEvent.bind(this);
|
||||
this.handledEvents = events;
|
||||
this.useGenericHandler = true;
|
||||
|
||||
this.registerListeners();
|
||||
}
|
||||
|
||||
destroy() {
|
||||
this.unregisterListeners();
|
||||
}
|
||||
|
||||
isEventHandler() {
|
||||
return typeof this.handledEvents === 'object' && this.handledEvents.length && typeof this.onEvent === 'function';
|
||||
}
|
||||
|
||||
registerListeners() {
|
||||
if (this.isEventHandler()) {
|
||||
this.handledEvents.forEach(function(event) {
|
||||
if (event === 'hlsEventGeneric') {
|
||||
throw new Error('Forbidden event name: ' + event);
|
||||
}
|
||||
this.hls.on(event, this.onEvent);
|
||||
}.bind(this));
|
||||
}
|
||||
}
|
||||
|
||||
unregisterListeners() {
|
||||
if (this.isEventHandler()) {
|
||||
this.handledEvents.forEach(function(event) {
|
||||
this.hls.off(event, this.onEvent);
|
||||
}.bind(this));
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* arguments: event (string), data (any)
|
||||
*/
|
||||
onEvent(event, data) {
|
||||
this.onEventGeneric(event, data);
|
||||
}
|
||||
|
||||
onEventGeneric(event, data) {
|
||||
var eventToFunction = function(event, data) {
|
||||
var funcName = 'on' + event.replace('hls', '');
|
||||
if (typeof this[funcName] !== 'function') {
|
||||
throw new Error(`Event ${event} has no generic handler in this ${this.constructor.name} class (tried ${funcName})`);
|
||||
}
|
||||
return this[funcName].bind(this, data);
|
||||
};
|
||||
try {
|
||||
eventToFunction.call(this, event, data).call();
|
||||
} catch (err) {
|
||||
logger.error(`internal error happened while processing ${event}:${err.message}`);
|
||||
this.hls.trigger(Event.ERROR, {type: ErrorTypes.OTHER_ERROR, details: ErrorDetails.INTERNAL_EXCEPTION, fatal: false, event : event, err : err});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default EventHandler;
|
|
@ -1,72 +0,0 @@
|
|||
module.exports = {
|
||||
// fired before MediaSource is attaching to media element - data: { media }
|
||||
MEDIA_ATTACHING: 'hlsMediaAttaching',
|
||||
// fired when MediaSource has been succesfully attached to media element - data: { }
|
||||
MEDIA_ATTACHED: 'hlsMediaAttached',
|
||||
// fired before detaching MediaSource from media element - data: { }
|
||||
MEDIA_DETACHING: 'hlsMediaDetaching',
|
||||
// fired when MediaSource has been detached from media element - data: { }
|
||||
MEDIA_DETACHED: 'hlsMediaDetached',
|
||||
// fired when we buffer is going to be resetted
|
||||
BUFFER_RESET: 'hlsBufferReset',
|
||||
// fired when we know about the codecs that we need buffers for to push into - data: {tracks : { container, codec, levelCodec, initSegment, metadata }}
|
||||
BUFFER_CODECS: 'hlsBufferCodecs',
|
||||
// fired when we append a segment to the buffer - data: { segment: segment object }
|
||||
BUFFER_APPENDING: 'hlsBufferAppending',
|
||||
// fired when we are done with appending a media segment to the buffer
|
||||
BUFFER_APPENDED: 'hlsBufferAppended',
|
||||
// fired when the stream is finished and we want to notify the media buffer that there will be no more data
|
||||
BUFFER_EOS: 'hlsBufferEos',
|
||||
// fired when the media buffer should be flushed - data {startOffset, endOffset}
|
||||
BUFFER_FLUSHING: 'hlsBufferFlushing',
|
||||
// fired when the media has been flushed
|
||||
BUFFER_FLUSHED: 'hlsBufferFlushed',
|
||||
// fired to signal that a manifest loading starts - data: { url : manifestURL}
|
||||
MANIFEST_LOADING: 'hlsManifestLoading',
|
||||
// fired after manifest has been loaded - data: { levels : [available quality levels] , url : manifestURL, stats : { trequest, tfirst, tload, mtime}}
|
||||
MANIFEST_LOADED: 'hlsManifestLoaded',
|
||||
// fired after manifest has been parsed - data: { levels : [available quality levels] , firstLevel : index of first quality level appearing in Manifest}
|
||||
MANIFEST_PARSED: 'hlsManifestParsed',
|
||||
// fired when a level playlist loading starts - data: { url : level URL level : id of level being loaded}
|
||||
LEVEL_LOADING: 'hlsLevelLoading',
|
||||
// fired when a level playlist loading finishes - data: { details : levelDetails object, level : id of loaded level, stats : { trequest, tfirst, tload, mtime} }
|
||||
LEVEL_LOADED: 'hlsLevelLoaded',
|
||||
// fired when a level's details have been updated based on previous details, after it has been loaded. - data: { details : levelDetails object, level : id of updated level }
|
||||
LEVEL_UPDATED: 'hlsLevelUpdated',
|
||||
// fired when a level's PTS information has been updated after parsing a fragment - data: { details : levelDetails object, level : id of updated level, drift: PTS drift observed when parsing last fragment }
|
||||
LEVEL_PTS_UPDATED: 'hlsLevelPtsUpdated',
|
||||
// fired when a level switch is requested - data: { level : id of new level }
|
||||
LEVEL_SWITCH: 'hlsLevelSwitch',
|
||||
// fired when a fragment loading starts - data: { frag : fragment object}
|
||||
FRAG_LOADING: 'hlsFragLoading',
|
||||
// fired when a fragment loading is progressing - data: { frag : fragment object, { trequest, tfirst, loaded}}
|
||||
FRAG_LOAD_PROGRESS: 'hlsFragLoadProgress',
|
||||
// Identifier for fragment load aborting for emergency switch down - data: {frag : fragment object}
|
||||
FRAG_LOAD_EMERGENCY_ABORTED: 'hlsFragLoadEmergencyAborted',
|
||||
// fired when a fragment loading is completed - data: { frag : fragment object, payload : fragment payload, stats : { trequest, tfirst, tload, length}}
|
||||
FRAG_LOADED: 'hlsFragLoaded',
|
||||
// fired when Init Segment has been extracted from fragment - data: { moov : moov MP4 box, codecs : codecs found while parsing fragment}
|
||||
FRAG_PARSING_INIT_SEGMENT: 'hlsFragParsingInitSegment',
|
||||
// fired when parsing sei text is completed - data: { samples : [ sei samples pes ] }
|
||||
FRAG_PARSING_USERDATA: 'hlsFragParsingUserdata',
|
||||
// fired when parsing id3 is completed - data: { samples : [ id3 samples pes ] }
|
||||
FRAG_PARSING_METADATA: 'hlsFragParsingMetadata',
|
||||
// fired when data have been extracted from fragment - data: { data1 : moof MP4 box or TS fragments, data2 : mdat MP4 box or null}
|
||||
FRAG_PARSING_DATA: 'hlsFragParsingData',
|
||||
// fired when fragment parsing is completed - data: undefined
|
||||
FRAG_PARSED: 'hlsFragParsed',
|
||||
// fired when fragment remuxed MP4 boxes have all been appended into SourceBuffer - data: { frag : fragment object, stats : { trequest, tfirst, tload, tparsed, tbuffered, length} }
|
||||
FRAG_BUFFERED: 'hlsFragBuffered',
|
||||
// fired when fragment matching with current media position is changing - data : { frag : fragment object }
|
||||
FRAG_CHANGED: 'hlsFragChanged',
|
||||
// Identifier for a FPS drop event - data: {curentDropped, currentDecoded, totalDroppedFrames}
|
||||
FPS_DROP: 'hlsFpsDrop',
|
||||
// Identifier for an error event - data: { type : error type, details : error details, fatal : if true, hls.js cannot/will not try to recover, if false, hls.js will try to recover,other error specific data}
|
||||
ERROR: 'hlsError',
|
||||
// fired when hls.js instance starts destroying. Different from MEDIA_DETACHED as one could want to detach and reattach a media to the instance of hls.js to handle mid-rolls for example
|
||||
DESTROYING: 'hlsDestroying',
|
||||
// fired when a decrypt key loading starts - data: { frag : fragment object}
|
||||
KEY_LOADING: 'hlsKeyLoading',
|
||||
// fired when a decrypt key loading is completed - data: { frag : fragment object, payload : key payload, stats : { trequest, tfirst, tload, length}}
|
||||
KEY_LOADED: 'hlsKeyLoaded',
|
||||
};
|
|
@ -1,77 +0,0 @@
|
|||
/**
|
||||
* Buffer Helper class, providing methods dealing buffer length retrieval
|
||||
*/
|
||||
|
||||
|
||||
class BufferHelper {
|
||||
|
||||
static bufferInfo(media, pos,maxHoleDuration) {
|
||||
if (media) {
|
||||
var vbuffered = media.buffered, buffered = [],i;
|
||||
for (i = 0; i < vbuffered.length; i++) {
|
||||
buffered.push({start: vbuffered.start(i), end: vbuffered.end(i)});
|
||||
}
|
||||
return this.bufferedInfo(buffered,pos,maxHoleDuration);
|
||||
} else {
|
||||
return {len: 0, start: 0, end: 0, nextStart : undefined} ;
|
||||
}
|
||||
}
|
||||
|
||||
static bufferedInfo(buffered,pos,maxHoleDuration) {
|
||||
var buffered2 = [],
|
||||
// bufferStart and bufferEnd are buffer boundaries around current video position
|
||||
bufferLen,bufferStart, bufferEnd,bufferStartNext,i;
|
||||
// sort on buffer.start/smaller end (IE does not always return sorted buffered range)
|
||||
buffered.sort(function (a, b) {
|
||||
var diff = a.start - b.start;
|
||||
if (diff) {
|
||||
return diff;
|
||||
} else {
|
||||
return b.end - a.end;
|
||||
}
|
||||
});
|
||||
// there might be some small holes between buffer time range
|
||||
// consider that holes smaller than maxHoleDuration are irrelevant and build another
|
||||
// buffer time range representations that discards those holes
|
||||
for (i = 0; i < buffered.length; i++) {
|
||||
var buf2len = buffered2.length;
|
||||
if(buf2len) {
|
||||
var buf2end = buffered2[buf2len - 1].end;
|
||||
// if small hole (value between 0 or maxHoleDuration ) or overlapping (negative)
|
||||
if((buffered[i].start - buf2end) < maxHoleDuration) {
|
||||
// merge overlapping time ranges
|
||||
// update lastRange.end only if smaller than item.end
|
||||
// e.g. [ 1, 15] with [ 2,8] => [ 1,15] (no need to modify lastRange.end)
|
||||
// whereas [ 1, 8] with [ 2,15] => [ 1,15] ( lastRange should switch from [1,8] to [1,15])
|
||||
if(buffered[i].end > buf2end) {
|
||||
buffered2[buf2len - 1].end = buffered[i].end;
|
||||
}
|
||||
} else {
|
||||
// big hole
|
||||
buffered2.push(buffered[i]);
|
||||
}
|
||||
} else {
|
||||
// first value
|
||||
buffered2.push(buffered[i]);
|
||||
}
|
||||
}
|
||||
for (i = 0, bufferLen = 0, bufferStart = bufferEnd = pos; i < buffered2.length; i++) {
|
||||
var start = buffered2[i].start,
|
||||
end = buffered2[i].end;
|
||||
//logger.log('buf start/end:' + buffered.start(i) + '/' + buffered.end(i));
|
||||
if ((pos + maxHoleDuration) >= start && pos < end) {
|
||||
// play position is inside this buffer TimeRange, retrieve end of buffer position and buffer length
|
||||
bufferStart = start;
|
||||
bufferEnd = end;
|
||||
bufferLen = bufferEnd - pos;
|
||||
} else if ((pos + maxHoleDuration) < start) {
|
||||
bufferStartNext = start;
|
||||
break;
|
||||
}
|
||||
}
|
||||
return {len: bufferLen, start: bufferStart, end: bufferEnd, nextStart : bufferStartNext};
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export default BufferHelper;
|
|
@ -1,137 +0,0 @@
|
|||
/**
|
||||
* Level Helper class, providing methods dealing with playlist sliding and drift
|
||||
*/
|
||||
|
||||
import {logger} from '../utils/logger';
|
||||
|
||||
class LevelHelper {
|
||||
|
||||
static mergeDetails(oldDetails,newDetails) {
|
||||
var start = Math.max(oldDetails.startSN,newDetails.startSN)-newDetails.startSN,
|
||||
end = Math.min(oldDetails.endSN,newDetails.endSN)-newDetails.startSN,
|
||||
delta = newDetails.startSN - oldDetails.startSN,
|
||||
oldfragments = oldDetails.fragments,
|
||||
newfragments = newDetails.fragments,
|
||||
ccOffset =0,
|
||||
PTSFrag;
|
||||
|
||||
// check if old/new playlists have fragments in common
|
||||
if ( end < start) {
|
||||
newDetails.PTSKnown = false;
|
||||
return;
|
||||
}
|
||||
// loop through overlapping SN and update startPTS , cc, and duration if any found
|
||||
for(var i = start ; i <= end ; i++) {
|
||||
var oldFrag = oldfragments[delta+i],
|
||||
newFrag = newfragments[i];
|
||||
ccOffset = oldFrag.cc - newFrag.cc;
|
||||
if (!isNaN(oldFrag.startPTS)) {
|
||||
newFrag.start = newFrag.startPTS = oldFrag.startPTS;
|
||||
newFrag.endPTS = oldFrag.endPTS;
|
||||
newFrag.duration = oldFrag.duration;
|
||||
PTSFrag = newFrag;
|
||||
}
|
||||
}
|
||||
|
||||
if(ccOffset) {
|
||||
logger.log(`discontinuity sliding from playlist, take drift into account`);
|
||||
for(i = 0 ; i < newfragments.length ; i++) {
|
||||
newfragments[i].cc += ccOffset;
|
||||
}
|
||||
}
|
||||
|
||||
// if at least one fragment contains PTS info, recompute PTS information for all fragments
|
||||
if(PTSFrag) {
|
||||
LevelHelper.updateFragPTSDTS(newDetails,PTSFrag.sn,PTSFrag.startPTS,PTSFrag.endPTS,PTSFrag.startDTS,PTSFrag.endDTS);
|
||||
} else {
|
||||
// ensure that delta is within oldfragments range
|
||||
// also adjust sliding in case delta is 0 (we could have old=[50-60] and new=old=[50-61])
|
||||
// in that case we also need to adjust start offset of all fragments
|
||||
if (delta >= 0 && delta < oldfragments.length) {
|
||||
// adjust start by sliding offset
|
||||
var sliding = oldfragments[delta].start;
|
||||
for(i = 0 ; i < newfragments.length ; i++) {
|
||||
newfragments[i].start += sliding;
|
||||
}
|
||||
}
|
||||
}
|
||||
// if we are here, it means we have fragments overlapping between
|
||||
// old and new level. reliable PTS info is thus relying on old level
|
||||
newDetails.PTSKnown = oldDetails.PTSKnown;
|
||||
return;
|
||||
}
|
||||
|
||||
static updateFragPTSDTS(details,sn,startPTS,endPTS,startDTS,endDTS) {
|
||||
var fragIdx, fragments, frag, i;
|
||||
// exit if sn out of range
|
||||
if (sn < details.startSN || sn > details.endSN) {
|
||||
return 0;
|
||||
}
|
||||
fragIdx = sn - details.startSN;
|
||||
fragments = details.fragments;
|
||||
frag = fragments[fragIdx];
|
||||
if(!isNaN(frag.startPTS)) {
|
||||
// delta PTS between audio and video
|
||||
let deltaPTS = Math.abs(frag.startPTS-startPTS);
|
||||
if (isNaN(frag.deltaPTS)) {
|
||||
frag.deltaPTS = deltaPTS;
|
||||
} else {
|
||||
frag.deltaPTS = Math.max(deltaPTS,frag.deltaPTS);
|
||||
}
|
||||
startPTS = Math.min(startPTS,frag.startPTS);
|
||||
endPTS = Math.max(endPTS, frag.endPTS);
|
||||
startDTS = Math.min(startDTS,frag.startDTS);
|
||||
endDTS = Math.max(endDTS, frag.endDTS);
|
||||
}
|
||||
|
||||
var drift = startPTS - frag.start;
|
||||
|
||||
frag.start = frag.startPTS = startPTS;
|
||||
frag.endPTS = endPTS;
|
||||
frag.startDTS = startDTS;
|
||||
frag.endDTS = endDTS;
|
||||
frag.duration = endPTS - startPTS;
|
||||
// adjust fragment PTS/duration from seqnum-1 to frag 0
|
||||
for(i = fragIdx ; i > 0 ; i--) {
|
||||
LevelHelper.updatePTS(fragments,i,i-1);
|
||||
}
|
||||
|
||||
// adjust fragment PTS/duration from seqnum to last frag
|
||||
for(i = fragIdx ; i < fragments.length - 1 ; i++) {
|
||||
LevelHelper.updatePTS(fragments,i,i+1);
|
||||
}
|
||||
details.PTSKnown = true;
|
||||
//logger.log(` frag start/end:${startPTS.toFixed(3)}/${endPTS.toFixed(3)}`);
|
||||
|
||||
return drift;
|
||||
}
|
||||
|
||||
static updatePTS(fragments,fromIdx, toIdx) {
|
||||
var fragFrom = fragments[fromIdx],fragTo = fragments[toIdx], fragToPTS = fragTo.startPTS;
|
||||
// if we know startPTS[toIdx]
|
||||
if(!isNaN(fragToPTS)) {
|
||||
// update fragment duration.
|
||||
// it helps to fix drifts between playlist reported duration and fragment real duration
|
||||
if (toIdx > fromIdx) {
|
||||
fragFrom.duration = fragToPTS-fragFrom.start;
|
||||
if(fragFrom.duration < 0) {
|
||||
logger.error(`negative duration computed for frag ${fragFrom.sn},level ${fragFrom.level}, there should be some duration drift between playlist and fragment!`);
|
||||
}
|
||||
} else {
|
||||
fragTo.duration = fragFrom.start - fragToPTS;
|
||||
if(fragTo.duration < 0) {
|
||||
logger.error(`negative duration computed for frag ${fragTo.sn},level ${fragTo.level}, there should be some duration drift between playlist and fragment!`);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// we dont know startPTS[toIdx]
|
||||
if (toIdx > fromIdx) {
|
||||
fragTo.start = fragFrom.start + fragFrom.duration;
|
||||
} else {
|
||||
fragTo.start = fragFrom.start - fragTo.duration;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default LevelHelper;
|
312
dashboard-ui/bower_components/hls.js/src/hls.js
vendored
312
dashboard-ui/bower_components/hls.js/src/hls.js
vendored
|
@ -1,312 +0,0 @@
|
|||
/**
|
||||
* HLS interface
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
import Event from './events';
|
||||
import {ErrorTypes, ErrorDetails} from './errors';
|
||||
import PlaylistLoader from './loader/playlist-loader';
|
||||
import FragmentLoader from './loader/fragment-loader';
|
||||
import AbrController from './controller/abr-controller';
|
||||
import BufferController from './controller/buffer-controller';
|
||||
import CapLevelController from './controller/cap-level-controller';
|
||||
import StreamController from './controller/stream-controller';
|
||||
import LevelController from './controller/level-controller';
|
||||
import TimelineController from './controller/timeline-controller';
|
||||
//import FPSController from './controller/fps-controller';
|
||||
import {logger, enableLogs} from './utils/logger';
|
||||
import XhrLoader from './utils/xhr-loader';
|
||||
import EventEmitter from 'events';
|
||||
import KeyLoader from './loader/key-loader';
|
||||
|
||||
class Hls {
|
||||
|
||||
static isSupported() {
|
||||
return (window.MediaSource &&
|
||||
typeof window.MediaSource.isTypeSupported === 'function' &&
|
||||
window.MediaSource.isTypeSupported('video/mp4; codecs="avc1.42E01E,mp4a.40.2"'));
|
||||
}
|
||||
|
||||
static get Events() {
|
||||
return Event;
|
||||
}
|
||||
|
||||
static get ErrorTypes() {
|
||||
return ErrorTypes;
|
||||
}
|
||||
|
||||
static get ErrorDetails() {
|
||||
return ErrorDetails;
|
||||
}
|
||||
|
||||
static get DefaultConfig() {
|
||||
if(!Hls.defaultConfig) {
|
||||
Hls.defaultConfig = {
|
||||
autoStartLoad: true,
|
||||
startPosition: -1,
|
||||
debug: false,
|
||||
capLevelToPlayerSize: false,
|
||||
maxBufferLength: 30,
|
||||
maxBufferSize: 60 * 1000 * 1000,
|
||||
maxBufferHole: 0.5,
|
||||
maxSeekHole: 2,
|
||||
seekHoleNudgeDuration : 0.01,
|
||||
stalledInBufferedNudgeThreshold: 10,
|
||||
maxFragLookUpTolerance : 0.2,
|
||||
liveSyncDurationCount:3,
|
||||
liveMaxLatencyDurationCount: Infinity,
|
||||
liveSyncDuration: undefined,
|
||||
liveMaxLatencyDuration: undefined,
|
||||
maxMaxBufferLength: 600,
|
||||
enableWorker: true,
|
||||
enableSoftwareAES: true,
|
||||
manifestLoadingTimeOut: 10000,
|
||||
manifestLoadingMaxRetry: 1,
|
||||
manifestLoadingRetryDelay: 1000,
|
||||
levelLoadingTimeOut: 10000,
|
||||
levelLoadingMaxRetry: 4,
|
||||
levelLoadingRetryDelay: 1000,
|
||||
fragLoadingTimeOut: 20000,
|
||||
fragLoadingMaxRetry: 6,
|
||||
fragLoadingRetryDelay: 1000,
|
||||
fragLoadingLoopThreshold: 3,
|
||||
startFragPrefetch : false,
|
||||
// fpsDroppedMonitoringPeriod: 5000,
|
||||
// fpsDroppedMonitoringThreshold: 0.2,
|
||||
appendErrorMaxRetry: 3,
|
||||
loader: XhrLoader,
|
||||
fLoader: undefined,
|
||||
pLoader: undefined,
|
||||
abrController : AbrController,
|
||||
bufferController : BufferController,
|
||||
capLevelController : CapLevelController,
|
||||
streamController: StreamController,
|
||||
timelineController: TimelineController,
|
||||
enableCEA708Captions: true,
|
||||
enableMP2TPassThrough : false,
|
||||
abrEwmaFastLive: 5,
|
||||
abrEwmaSlowLive: 9,
|
||||
abrEwmaFastVoD: 4,
|
||||
abrEwmaSlowVoD: 15,
|
||||
abrEwmaDefaultEstimate: 5e5, // 500 kbps
|
||||
abrBandWidthFactor : 0.8,
|
||||
abrBandWidthUpFactor : 0.7
|
||||
};
|
||||
}
|
||||
return Hls.defaultConfig;
|
||||
}
|
||||
|
||||
static set DefaultConfig(defaultConfig) {
|
||||
Hls.defaultConfig = defaultConfig;
|
||||
}
|
||||
|
||||
constructor(config = {}) {
|
||||
var defaultConfig = Hls.DefaultConfig;
|
||||
|
||||
if ((config.liveSyncDurationCount || config.liveMaxLatencyDurationCount) && (config.liveSyncDuration || config.liveMaxLatencyDuration)) {
|
||||
throw new Error('Illegal hls.js config: don\'t mix up liveSyncDurationCount/liveMaxLatencyDurationCount and liveSyncDuration/liveMaxLatencyDuration');
|
||||
}
|
||||
|
||||
for (var prop in defaultConfig) {
|
||||
if (prop in config) { continue; }
|
||||
config[prop] = defaultConfig[prop];
|
||||
}
|
||||
|
||||
if (config.liveMaxLatencyDurationCount !== undefined && config.liveMaxLatencyDurationCount <= config.liveSyncDurationCount) {
|
||||
throw new Error('Illegal hls.js config: "liveMaxLatencyDurationCount" must be gt "liveSyncDurationCount"');
|
||||
}
|
||||
|
||||
if (config.liveMaxLatencyDuration !== undefined && (config.liveMaxLatencyDuration <= config.liveSyncDuration || config.liveSyncDuration === undefined)) {
|
||||
throw new Error('Illegal hls.js config: "liveMaxLatencyDuration" must be gt "liveSyncDuration"');
|
||||
}
|
||||
|
||||
enableLogs(config.debug);
|
||||
this.config = config;
|
||||
// observer setup
|
||||
var observer = this.observer = new EventEmitter();
|
||||
observer.trigger = function trigger (event, ...data) {
|
||||
observer.emit(event, event, ...data);
|
||||
};
|
||||
|
||||
observer.off = function off (event, ...data) {
|
||||
observer.removeListener(event, ...data);
|
||||
};
|
||||
this.on = observer.on.bind(observer);
|
||||
this.off = observer.off.bind(observer);
|
||||
this.trigger = observer.trigger.bind(observer);
|
||||
this.playlistLoader = new PlaylistLoader(this);
|
||||
this.fragmentLoader = new FragmentLoader(this);
|
||||
this.levelController = new LevelController(this);
|
||||
this.abrController = new config.abrController(this);
|
||||
this.bufferController = new config.bufferController(this);
|
||||
this.capLevelController = new config.capLevelController(this);
|
||||
this.streamController = new config.streamController(this);
|
||||
this.timelineController = new config.timelineController(this);
|
||||
this.keyLoader = new KeyLoader(this);
|
||||
//this.fpsController = new FPSController(this);
|
||||
}
|
||||
|
||||
destroy() {
|
||||
logger.log('destroy');
|
||||
this.trigger(Event.DESTROYING);
|
||||
this.detachMedia();
|
||||
this.playlistLoader.destroy();
|
||||
this.fragmentLoader.destroy();
|
||||
this.levelController.destroy();
|
||||
this.abrController.destroy();
|
||||
this.bufferController.destroy();
|
||||
this.capLevelController.destroy();
|
||||
this.streamController.destroy();
|
||||
this.timelineController.destroy();
|
||||
this.keyLoader.destroy();
|
||||
//this.fpsController.destroy();
|
||||
this.url = null;
|
||||
this.observer.removeAllListeners();
|
||||
}
|
||||
|
||||
attachMedia(media) {
|
||||
logger.log('attachMedia');
|
||||
this.media = media;
|
||||
this.trigger(Event.MEDIA_ATTACHING, {media: media});
|
||||
}
|
||||
|
||||
detachMedia() {
|
||||
logger.log('detachMedia');
|
||||
this.trigger(Event.MEDIA_DETACHING);
|
||||
this.media = null;
|
||||
}
|
||||
|
||||
loadSource(url) {
|
||||
logger.log(`loadSource:${url}`);
|
||||
this.url = url;
|
||||
// when attaching to a source URL, trigger a playlist load
|
||||
this.trigger(Event.MANIFEST_LOADING, {url: url});
|
||||
}
|
||||
|
||||
startLoad(startPosition=-1) {
|
||||
logger.log('startLoad');
|
||||
this.levelController.startLoad();
|
||||
this.streamController.startLoad(startPosition);
|
||||
}
|
||||
|
||||
stopLoad() {
|
||||
logger.log('stopLoad');
|
||||
this.levelController.stopLoad();
|
||||
this.streamController.stopLoad();
|
||||
}
|
||||
|
||||
swapAudioCodec() {
|
||||
logger.log('swapAudioCodec');
|
||||
this.streamController.swapAudioCodec();
|
||||
}
|
||||
|
||||
recoverMediaError() {
|
||||
logger.log('recoverMediaError');
|
||||
var media = this.media;
|
||||
this.detachMedia();
|
||||
this.attachMedia(media);
|
||||
}
|
||||
|
||||
/** Return all quality levels **/
|
||||
get levels() {
|
||||
return this.levelController.levels;
|
||||
}
|
||||
|
||||
/** Return current playback quality level **/
|
||||
get currentLevel() {
|
||||
return this.streamController.currentLevel;
|
||||
}
|
||||
|
||||
/* set quality level immediately (-1 for automatic level selection) */
|
||||
set currentLevel(newLevel) {
|
||||
logger.log(`set currentLevel:${newLevel}`);
|
||||
this.loadLevel = newLevel;
|
||||
this.streamController.immediateLevelSwitch();
|
||||
}
|
||||
|
||||
/** Return next playback quality level (quality level of next fragment) **/
|
||||
get nextLevel() {
|
||||
return this.streamController.nextLevel;
|
||||
}
|
||||
|
||||
/* set quality level for next fragment (-1 for automatic level selection) */
|
||||
set nextLevel(newLevel) {
|
||||
logger.log(`set nextLevel:${newLevel}`);
|
||||
this.levelController.manualLevel = newLevel;
|
||||
this.streamController.nextLevelSwitch();
|
||||
}
|
||||
|
||||
/** Return the quality level of current/last loaded fragment **/
|
||||
get loadLevel() {
|
||||
return this.levelController.level;
|
||||
}
|
||||
|
||||
/* set quality level for current/next loaded fragment (-1 for automatic level selection) */
|
||||
set loadLevel(newLevel) {
|
||||
logger.log(`set loadLevel:${newLevel}`);
|
||||
this.levelController.manualLevel = newLevel;
|
||||
}
|
||||
|
||||
/** Return the quality level of next loaded fragment **/
|
||||
get nextLoadLevel() {
|
||||
return this.levelController.nextLoadLevel;
|
||||
}
|
||||
|
||||
/** set quality level of next loaded fragment **/
|
||||
set nextLoadLevel(level) {
|
||||
this.levelController.nextLoadLevel = level;
|
||||
}
|
||||
|
||||
/** Return first level (index of first level referenced in manifest)
|
||||
**/
|
||||
get firstLevel() {
|
||||
return this.levelController.firstLevel;
|
||||
}
|
||||
|
||||
/** set first level (index of first level referenced in manifest)
|
||||
**/
|
||||
set firstLevel(newLevel) {
|
||||
logger.log(`set firstLevel:${newLevel}`);
|
||||
this.levelController.firstLevel = newLevel;
|
||||
}
|
||||
|
||||
/** Return start level (level of first fragment that will be played back)
|
||||
if not overrided by user, first level appearing in manifest will be used as start level
|
||||
if -1 : automatic start level selection, playback will start from level matching download bandwidth (determined from download of first segment)
|
||||
**/
|
||||
get startLevel() {
|
||||
return this.levelController.startLevel;
|
||||
}
|
||||
|
||||
/** set start level (level of first fragment that will be played back)
|
||||
if not overrided by user, first level appearing in manifest will be used as start level
|
||||
if -1 : automatic start level selection, playback will start from level matching download bandwidth (determined from download of first segment)
|
||||
**/
|
||||
set startLevel(newLevel) {
|
||||
logger.log(`set startLevel:${newLevel}`);
|
||||
this.levelController.startLevel = newLevel;
|
||||
}
|
||||
|
||||
/** Return the capping/max level value that could be used by automatic level selection algorithm **/
|
||||
get autoLevelCapping() {
|
||||
return this.abrController.autoLevelCapping;
|
||||
}
|
||||
|
||||
/** set the capping/max level value that could be used by automatic level selection algorithm **/
|
||||
set autoLevelCapping(newLevel) {
|
||||
logger.log(`set autoLevelCapping:${newLevel}`);
|
||||
this.abrController.autoLevelCapping = newLevel;
|
||||
}
|
||||
|
||||
/* check if we are in automatic level selection mode */
|
||||
get autoLevelEnabled() {
|
||||
return (this.levelController.manualLevel === -1);
|
||||
}
|
||||
|
||||
/* return manual level */
|
||||
get manualLevel() {
|
||||
return this.levelController.manualLevel;
|
||||
}
|
||||
}
|
||||
|
||||
export default Hls;
|
|
@ -1,4 +0,0 @@
|
|||
// This is mostly for support of the es6 module export
|
||||
// syntax with the babel compiler, it looks like it doesnt support
|
||||
// function exports like we are used to in node/commonjs
|
||||
module.exports = require('./hls.js').default;
|
|
@ -1,60 +0,0 @@
|
|||
/*
|
||||
* Fragment Loader
|
||||
*/
|
||||
|
||||
import Event from '../events';
|
||||
import EventHandler from '../event-handler';
|
||||
import {ErrorTypes, ErrorDetails} from '../errors';
|
||||
|
||||
class FragmentLoader extends EventHandler {
|
||||
|
||||
constructor(hls) {
|
||||
super(hls, Event.FRAG_LOADING);
|
||||
}
|
||||
|
||||
destroy() {
|
||||
if (this.loader) {
|
||||
this.loader.destroy();
|
||||
this.loader = null;
|
||||
}
|
||||
EventHandler.prototype.destroy.call(this);
|
||||
}
|
||||
|
||||
onFragLoading(data) {
|
||||
var frag = data.frag;
|
||||
this.frag = frag;
|
||||
this.frag.loaded = 0;
|
||||
var config = this.hls.config;
|
||||
frag.loader = this.loader = typeof(config.fLoader) !== 'undefined' ? new config.fLoader(config) : new config.loader(config);
|
||||
this.loader.load(frag.url, 'arraybuffer', this.loadsuccess.bind(this), this.loaderror.bind(this), this.loadtimeout.bind(this), config.fragLoadingTimeOut, 1, 0, this.loadprogress.bind(this), frag);
|
||||
}
|
||||
|
||||
loadsuccess(event, stats) {
|
||||
var payload = event.currentTarget.response;
|
||||
stats.length = payload.byteLength;
|
||||
// detach fragment loader on load success
|
||||
this.frag.loader = undefined;
|
||||
this.hls.trigger(Event.FRAG_LOADED, {payload: payload, frag: this.frag, stats: stats});
|
||||
}
|
||||
|
||||
loaderror(event) {
|
||||
if (this.loader) {
|
||||
this.loader.abort();
|
||||
}
|
||||
this.hls.trigger(Event.ERROR, {type: ErrorTypes.NETWORK_ERROR, details: ErrorDetails.FRAG_LOAD_ERROR, fatal: false, frag: this.frag, response: event});
|
||||
}
|
||||
|
||||
loadtimeout() {
|
||||
if (this.loader) {
|
||||
this.loader.abort();
|
||||
}
|
||||
this.hls.trigger(Event.ERROR, {type: ErrorTypes.NETWORK_ERROR, details: ErrorDetails.FRAG_LOAD_TIMEOUT, fatal: false, frag: this.frag});
|
||||
}
|
||||
|
||||
loadprogress(event, stats) {
|
||||
this.frag.loaded = stats.loaded;
|
||||
this.hls.trigger(Event.FRAG_LOAD_PROGRESS, {frag: this.frag, stats: stats});
|
||||
}
|
||||
}
|
||||
|
||||
export default FragmentLoader;
|
|
@ -1,70 +0,0 @@
|
|||
/*
|
||||
* Decrypt key Loader
|
||||
*/
|
||||
|
||||
import Event from '../events';
|
||||
import EventHandler from '../event-handler';
|
||||
import {ErrorTypes, ErrorDetails} from '../errors';
|
||||
|
||||
class KeyLoader extends EventHandler {
|
||||
|
||||
constructor(hls) {
|
||||
super(hls, Event.KEY_LOADING);
|
||||
this.decryptkey = null;
|
||||
this.decrypturl = null;
|
||||
}
|
||||
|
||||
destroy() {
|
||||
if (this.loader) {
|
||||
this.loader.destroy();
|
||||
this.loader = null;
|
||||
}
|
||||
EventHandler.prototype.destroy.call(this);
|
||||
}
|
||||
|
||||
onKeyLoading(data) {
|
||||
var frag = this.frag = data.frag,
|
||||
decryptdata = frag.decryptdata,
|
||||
uri = decryptdata.uri;
|
||||
// if uri is different from previous one or if decrypt key not retrieved yet
|
||||
if (uri !== this.decrypturl || this.decryptkey === null) {
|
||||
var config = this.hls.config;
|
||||
frag.loader = this.loader = new config.loader(config);
|
||||
this.decrypturl = uri;
|
||||
this.decryptkey = null;
|
||||
frag.loader.load(uri, 'arraybuffer', this.loadsuccess.bind(this), this.loaderror.bind(this), this.loadtimeout.bind(this), config.fragLoadingTimeOut, config.fragLoadingMaxRetry, config.fragLoadingRetryDelay, this.loadprogress.bind(this), frag);
|
||||
} else if (this.decryptkey) {
|
||||
// we already loaded this key, return it
|
||||
decryptdata.key = this.decryptkey;
|
||||
this.hls.trigger(Event.KEY_LOADED, {frag: frag});
|
||||
}
|
||||
}
|
||||
|
||||
loadsuccess(event) {
|
||||
var frag = this.frag;
|
||||
this.decryptkey = frag.decryptdata.key = new Uint8Array(event.currentTarget.response);
|
||||
// detach fragment loader on load success
|
||||
frag.loader = undefined;
|
||||
this.hls.trigger(Event.KEY_LOADED, {frag: frag});
|
||||
}
|
||||
|
||||
loaderror(event) {
|
||||
if (this.loader) {
|
||||
this.loader.abort();
|
||||
}
|
||||
this.hls.trigger(Event.ERROR, {type: ErrorTypes.NETWORK_ERROR, details: ErrorDetails.KEY_LOAD_ERROR, fatal: false, frag: this.frag, response: event});
|
||||
}
|
||||
|
||||
loadtimeout() {
|
||||
if (this.loader) {
|
||||
this.loader.abort();
|
||||
}
|
||||
this.hls.trigger(Event.ERROR, {type: ErrorTypes.NETWORK_ERROR, details: ErrorDetails.KEY_LOAD_TIMEOUT, fatal: false, frag: this.frag});
|
||||
}
|
||||
|
||||
loadprogress() {
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
export default KeyLoader;
|
|
@ -1,318 +0,0 @@
|
|||
/**
|
||||
* Playlist Loader
|
||||
*/
|
||||
|
||||
import Event from '../events';
|
||||
import EventHandler from '../event-handler';
|
||||
import {ErrorTypes, ErrorDetails} from '../errors';
|
||||
import URLHelper from '../utils/url';
|
||||
import AttrList from '../utils/attr-list';
|
||||
//import {logger} from '../utils/logger';
|
||||
|
||||
class PlaylistLoader extends EventHandler {
|
||||
|
||||
constructor(hls) {
|
||||
super(hls,
|
||||
Event.MANIFEST_LOADING,
|
||||
Event.LEVEL_LOADING);
|
||||
}
|
||||
|
||||
destroy() {
|
||||
if (this.loader) {
|
||||
this.loader.destroy();
|
||||
this.loader = null;
|
||||
}
|
||||
this.url = this.id = null;
|
||||
EventHandler.prototype.destroy.call(this);
|
||||
}
|
||||
|
||||
onManifestLoading(data) {
|
||||
this.load(data.url, null);
|
||||
}
|
||||
|
||||
onLevelLoading(data) {
|
||||
this.load(data.url, data.level, data.id);
|
||||
}
|
||||
|
||||
load(url, id1, id2) {
|
||||
var config = this.hls.config,
|
||||
retry,
|
||||
timeout,
|
||||
retryDelay;
|
||||
|
||||
if (this.loading && this.loader) {
|
||||
if (this.url === url && this.id === id1 && this.id2 === id2) {
|
||||
// same request than last pending one, don't do anything
|
||||
return;
|
||||
} else {
|
||||
// one playlist load request is pending, but with different params, abort it before loading new playlist
|
||||
this.loader.abort();
|
||||
}
|
||||
}
|
||||
|
||||
this.url = url;
|
||||
this.id = id1;
|
||||
this.id2 = id2;
|
||||
if(this.id === null) {
|
||||
retry = config.manifestLoadingMaxRetry;
|
||||
timeout = config.manifestLoadingTimeOut;
|
||||
retryDelay = config.manifestLoadingRetryDelay;
|
||||
} else {
|
||||
retry = config.levelLoadingMaxRetry;
|
||||
timeout = config.levelLoadingTimeOut;
|
||||
retryDelay = config.levelLoadingRetryDelay;
|
||||
}
|
||||
this.loader = typeof(config.pLoader) !== 'undefined' ? new config.pLoader(config) : new config.loader(config);
|
||||
this.loading = true;
|
||||
this.loader.load(url, '', this.loadsuccess.bind(this), this.loaderror.bind(this), this.loadtimeout.bind(this), timeout, retry, retryDelay);
|
||||
}
|
||||
|
||||
resolve(url, baseUrl) {
|
||||
return URLHelper.buildAbsoluteURL(baseUrl, url);
|
||||
}
|
||||
|
||||
parseMasterPlaylist(string, baseurl) {
|
||||
let levels = [], result;
|
||||
|
||||
// https://regex101.com is your friend
|
||||
const re = /#EXT-X-STREAM-INF:([^\n\r]*)[\r\n]+([^\r\n]+)/g;
|
||||
while ((result = re.exec(string)) != null){
|
||||
const level = {};
|
||||
|
||||
var attrs = level.attrs = new AttrList(result[1]);
|
||||
level.url = this.resolve(result[2], baseurl);
|
||||
|
||||
var resolution = attrs.decimalResolution('RESOLUTION');
|
||||
if(resolution) {
|
||||
level.width = resolution.width;
|
||||
level.height = resolution.height;
|
||||
}
|
||||
level.bitrate = attrs.decimalInteger('AVERAGE-BANDWIDTH') || attrs.decimalInteger('BANDWIDTH');
|
||||
level.name = attrs.NAME;
|
||||
|
||||
var codecs = attrs.CODECS;
|
||||
if(codecs) {
|
||||
codecs = codecs.split(',');
|
||||
for (let i = 0; i < codecs.length; i++) {
|
||||
const codec = codecs[i];
|
||||
if (codec.indexOf('avc1') !== -1) {
|
||||
level.videoCodec = this.avc1toavcoti(codec);
|
||||
} else {
|
||||
level.audioCodec = codec;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
levels.push(level);
|
||||
}
|
||||
return levels;
|
||||
}
|
||||
|
||||
avc1toavcoti(codec) {
|
||||
var result, avcdata = codec.split('.');
|
||||
if (avcdata.length > 2) {
|
||||
result = avcdata.shift() + '.';
|
||||
result += parseInt(avcdata.shift()).toString(16);
|
||||
result += ('000' + parseInt(avcdata.shift()).toString(16)).substr(-4);
|
||||
} else {
|
||||
result = codec;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
cloneObj(obj) {
|
||||
return JSON.parse(JSON.stringify(obj));
|
||||
}
|
||||
|
||||
parseLevelPlaylist(string, baseurl, id) {
|
||||
var currentSN = 0,
|
||||
totalduration = 0,
|
||||
level = {url: baseurl, fragments: [], live: true, startSN: 0},
|
||||
levelkey = {method : null, key : null, iv : null, uri : null},
|
||||
cc = 0,
|
||||
programDateTime = null,
|
||||
frag = null,
|
||||
result,
|
||||
regexp,
|
||||
byteRangeEndOffset,
|
||||
byteRangeStartOffset;
|
||||
|
||||
regexp = /(?:#EXT-X-(MEDIA-SEQUENCE):(\d+))|(?:#EXT-X-(TARGETDURATION):(\d+))|(?:#EXT-X-(KEY):(.*))|(?:#EXT-X-(START):(.*))|(?:#EXT(INF):([\d\.]+)[^\r\n]*([\r\n]+[^#|\r\n]+)?)|(?:#EXT-X-(BYTERANGE):([\d]+[@[\d]*)]*[\r\n]+([^#|\r\n]+)?|(?:#EXT-X-(ENDLIST))|(?:#EXT-X-(DIS)CONTINUITY))|(?:#EXT-X-(PROGRAM-DATE-TIME):(.*)[\r\n]+([^#|\r\n]+)?)/g;
|
||||
while ((result = regexp.exec(string)) !== null) {
|
||||
result.shift();
|
||||
result = result.filter(function(n) { return (n !== undefined); });
|
||||
switch (result[0]) {
|
||||
case 'MEDIA-SEQUENCE':
|
||||
currentSN = level.startSN = parseInt(result[1]);
|
||||
break;
|
||||
case 'TARGETDURATION':
|
||||
level.targetduration = parseFloat(result[1]);
|
||||
break;
|
||||
case 'ENDLIST':
|
||||
level.live = false;
|
||||
break;
|
||||
case 'DIS':
|
||||
cc++;
|
||||
break;
|
||||
case 'BYTERANGE':
|
||||
var params = result[1].split('@');
|
||||
if (params.length === 1) {
|
||||
byteRangeStartOffset = byteRangeEndOffset;
|
||||
} else {
|
||||
byteRangeStartOffset = parseInt(params[1]);
|
||||
}
|
||||
byteRangeEndOffset = parseInt(params[0]) + byteRangeStartOffset;
|
||||
if (frag && !frag.url) {
|
||||
frag.byteRangeStartOffset = byteRangeStartOffset;
|
||||
frag.byteRangeEndOffset = byteRangeEndOffset;
|
||||
frag.url = this.resolve(result[2], baseurl);
|
||||
}
|
||||
break;
|
||||
case 'INF':
|
||||
var duration = parseFloat(result[1]);
|
||||
if (!isNaN(duration)) {
|
||||
var fragdecryptdata,
|
||||
sn = currentSN++;
|
||||
if (levelkey.method && levelkey.uri && !levelkey.iv) {
|
||||
fragdecryptdata = this.cloneObj(levelkey);
|
||||
var uint8View = new Uint8Array(16);
|
||||
for (var i = 12; i < 16; i++) {
|
||||
uint8View[i] = (sn >> 8*(15-i)) & 0xff;
|
||||
}
|
||||
fragdecryptdata.iv = uint8View;
|
||||
} else {
|
||||
fragdecryptdata = levelkey;
|
||||
}
|
||||
var url = result[2] ? this.resolve(result[2], baseurl) : null;
|
||||
frag = {url: url, duration: duration, start: totalduration, sn: sn, level: id, cc: cc, byteRangeStartOffset: byteRangeStartOffset, byteRangeEndOffset: byteRangeEndOffset, decryptdata : fragdecryptdata, programDateTime: programDateTime};
|
||||
level.fragments.push(frag);
|
||||
totalduration += duration;
|
||||
byteRangeStartOffset = null;
|
||||
programDateTime = null;
|
||||
}
|
||||
break;
|
||||
case 'KEY':
|
||||
// https://tools.ietf.org/html/draft-pantos-http-live-streaming-08#section-3.4.4
|
||||
var decryptparams = result[1];
|
||||
var keyAttrs = new AttrList(decryptparams);
|
||||
var decryptmethod = keyAttrs.enumeratedString('METHOD'),
|
||||
decrypturi = keyAttrs.URI,
|
||||
decryptiv = keyAttrs.hexadecimalInteger('IV');
|
||||
if (decryptmethod) {
|
||||
levelkey = { method: null, key: null, iv: null, uri: null };
|
||||
if ((decrypturi) && (decryptmethod === 'AES-128')) {
|
||||
levelkey.method = decryptmethod;
|
||||
// URI to get the key
|
||||
levelkey.uri = this.resolve(decrypturi, baseurl);
|
||||
levelkey.key = null;
|
||||
// Initialization Vector (IV)
|
||||
levelkey.iv = decryptiv;
|
||||
}
|
||||
}
|
||||
break;
|
||||
case 'START':
|
||||
let startParams = result[1];
|
||||
let startAttrs = new AttrList(startParams);
|
||||
let startTimeOffset = startAttrs.decimalFloatingPoint('TIME-OFFSET');
|
||||
if (startTimeOffset) {
|
||||
level.startTimeOffset = startTimeOffset;
|
||||
}
|
||||
break;
|
||||
case 'PROGRAM-DATE-TIME':
|
||||
programDateTime = new Date(Date.parse(result[1]));
|
||||
if (frag && !frag.url && result.length >= 3) {
|
||||
frag.url = this.resolve(result[2], baseurl);
|
||||
frag.programDateTime = programDateTime;
|
||||
}
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
//logger.log('found ' + level.fragments.length + ' fragments');
|
||||
if(frag && !frag.url) {
|
||||
level.fragments.pop();
|
||||
totalduration-=frag.duration;
|
||||
}
|
||||
level.totalduration = totalduration;
|
||||
level.endSN = currentSN - 1;
|
||||
return level;
|
||||
}
|
||||
|
||||
loadsuccess(event, stats) {
|
||||
var target = event.currentTarget,
|
||||
string = target.responseText,
|
||||
url = target.responseURL,
|
||||
id = this.id,
|
||||
id2 = this.id2,
|
||||
hls = this.hls,
|
||||
levels;
|
||||
|
||||
this.loading = false;
|
||||
// responseURL not supported on some browsers (it is used to detect URL redirection)
|
||||
if (url === undefined) {
|
||||
// fallback to initial URL
|
||||
url = this.url;
|
||||
}
|
||||
stats.tload = performance.now();
|
||||
stats.mtime = new Date(target.getResponseHeader('Last-Modified'));
|
||||
if (string.indexOf('#EXTM3U') === 0) {
|
||||
if (string.indexOf('#EXTINF:') > 0) {
|
||||
// 1 level playlist
|
||||
// if first request, fire manifest loaded event, level will be reloaded afterwards
|
||||
// (this is to have a uniform logic for 1 level/multilevel playlists)
|
||||
if (this.id === null) {
|
||||
hls.trigger(Event.MANIFEST_LOADED, {levels: [{url: url}], url: url, stats: stats});
|
||||
} else {
|
||||
var levelDetails = this.parseLevelPlaylist(string, url, id);
|
||||
stats.tparsed = performance.now();
|
||||
hls.trigger(Event.LEVEL_LOADED, {details: levelDetails, level: id, id: id2, stats: stats});
|
||||
}
|
||||
} else {
|
||||
levels = this.parseMasterPlaylist(string, url);
|
||||
// multi level playlist, parse level info
|
||||
if (levels.length) {
|
||||
hls.trigger(Event.MANIFEST_LOADED, {levels: levels, url: url, stats: stats});
|
||||
} else {
|
||||
hls.trigger(Event.ERROR, {type: ErrorTypes.NETWORK_ERROR, details: ErrorDetails.MANIFEST_PARSING_ERROR, fatal: true, url: url, reason: 'no level found in manifest'});
|
||||
}
|
||||
}
|
||||
} else {
|
||||
hls.trigger(Event.ERROR, {type: ErrorTypes.NETWORK_ERROR, details: ErrorDetails.MANIFEST_PARSING_ERROR, fatal: true, url: url, reason: 'no EXTM3U delimiter'});
|
||||
}
|
||||
}
|
||||
|
||||
loaderror(event) {
|
||||
var details, fatal;
|
||||
if (this.id === null) {
|
||||
details = ErrorDetails.MANIFEST_LOAD_ERROR;
|
||||
fatal = true;
|
||||
} else {
|
||||
details = ErrorDetails.LEVEL_LOAD_ERROR;
|
||||
fatal = false;
|
||||
}
|
||||
if (this.loader) {
|
||||
this.loader.abort();
|
||||
}
|
||||
this.loading = false;
|
||||
this.hls.trigger(Event.ERROR, {type: ErrorTypes.NETWORK_ERROR, details: details, fatal: fatal, url: this.url, loader: this.loader, response: event.currentTarget, level: this.id, id: this.id2});
|
||||
}
|
||||
|
||||
loadtimeout() {
|
||||
var details, fatal;
|
||||
if (this.id === null) {
|
||||
details = ErrorDetails.MANIFEST_LOAD_TIMEOUT;
|
||||
fatal = true;
|
||||
} else {
|
||||
details = ErrorDetails.LEVEL_LOAD_TIMEOUT;
|
||||
fatal = false;
|
||||
}
|
||||
if (this.loader) {
|
||||
this.loader.abort();
|
||||
}
|
||||
this.loading = false;
|
||||
this.hls.trigger(Event.ERROR, {type: ErrorTypes.NETWORK_ERROR, details: details, fatal: fatal, url: this.url, loader: this.loader, level: this.id, id: this.id2});
|
||||
}
|
||||
}
|
||||
|
||||
export default PlaylistLoader;
|
|
@ -1,77 +0,0 @@
|
|||
/**
|
||||
* dummy remuxer
|
||||
*/
|
||||
|
||||
class DummyRemuxer {
|
||||
constructor(observer) {
|
||||
this.PES_TIMESCALE = 90000;
|
||||
this.observer = observer;
|
||||
}
|
||||
|
||||
get passthrough() {
|
||||
return false;
|
||||
}
|
||||
|
||||
destroy() {
|
||||
}
|
||||
|
||||
insertDiscontinuity() {
|
||||
}
|
||||
|
||||
remux(audioTrack,videoTrack,id3Track,textTrack,timeOffset) {
|
||||
this._remuxAACSamples(audioTrack,timeOffset);
|
||||
this._remuxAVCSamples(videoTrack,timeOffset);
|
||||
this._remuxID3Samples(id3Track,timeOffset);
|
||||
this._remuxTextSamples(textTrack,timeOffset);
|
||||
}
|
||||
|
||||
_remuxAVCSamples(track, timeOffset) {
|
||||
var avcSample, unit;
|
||||
// loop through track.samples
|
||||
while (track.samples.length) {
|
||||
avcSample = track.samples.shift();
|
||||
// loop through AVC sample NALUs
|
||||
while (avcSample.units.units.length) {
|
||||
unit = avcSample.units.units.shift();
|
||||
}
|
||||
}
|
||||
//please lint
|
||||
timeOffset = timeOffset;
|
||||
}
|
||||
|
||||
_remuxAACSamples(track,timeOffset) {
|
||||
var aacSample,unit;
|
||||
// loop through track.samples
|
||||
while (track.samples.length) {
|
||||
aacSample = track.samples.shift();
|
||||
unit = aacSample.unit;
|
||||
}
|
||||
//please lint
|
||||
timeOffset = timeOffset;
|
||||
}
|
||||
|
||||
_remuxID3Samples(track,timeOffset) {
|
||||
var id3Sample,unit;
|
||||
// loop through track.samples
|
||||
while (track.samples.length) {
|
||||
id3Sample = track.samples.shift();
|
||||
unit = id3Sample.unit;
|
||||
}
|
||||
//please lint
|
||||
timeOffset = timeOffset;
|
||||
}
|
||||
|
||||
_remuxTextSamples(track,timeOffset) {
|
||||
var textSample,bytes;
|
||||
// loop through track.samples
|
||||
while (track.samples.length) {
|
||||
textSample = track.samples.shift();
|
||||
bytes = textSample.bytes;
|
||||
}
|
||||
//please lint
|
||||
timeOffset = timeOffset;
|
||||
}
|
||||
}
|
||||
|
||||
export default DummyRemuxer;
|
||||
|
|
@ -1,589 +0,0 @@
|
|||
/**
|
||||
* Generate MP4 Box
|
||||
*/
|
||||
|
||||
//import Hex from '../utils/hex';
|
||||
class MP4 {
|
||||
static init() {
|
||||
MP4.types = {
|
||||
avc1: [], // codingname
|
||||
avcC: [],
|
||||
btrt: [],
|
||||
dinf: [],
|
||||
dref: [],
|
||||
esds: [],
|
||||
ftyp: [],
|
||||
hdlr: [],
|
||||
mdat: [],
|
||||
mdhd: [],
|
||||
mdia: [],
|
||||
mfhd: [],
|
||||
minf: [],
|
||||
moof: [],
|
||||
moov: [],
|
||||
mp4a: [],
|
||||
mvex: [],
|
||||
mvhd: [],
|
||||
sdtp: [],
|
||||
stbl: [],
|
||||
stco: [],
|
||||
stsc: [],
|
||||
stsd: [],
|
||||
stsz: [],
|
||||
stts: [],
|
||||
tfdt: [],
|
||||
tfhd: [],
|
||||
traf: [],
|
||||
trak: [],
|
||||
trun: [],
|
||||
trex: [],
|
||||
tkhd: [],
|
||||
vmhd: [],
|
||||
smhd: []
|
||||
};
|
||||
|
||||
var i;
|
||||
for (i in MP4.types) {
|
||||
if (MP4.types.hasOwnProperty(i)) {
|
||||
MP4.types[i] = [
|
||||
i.charCodeAt(0),
|
||||
i.charCodeAt(1),
|
||||
i.charCodeAt(2),
|
||||
i.charCodeAt(3)
|
||||
];
|
||||
}
|
||||
}
|
||||
|
||||
var videoHdlr = new Uint8Array([
|
||||
0x00, // version 0
|
||||
0x00, 0x00, 0x00, // flags
|
||||
0x00, 0x00, 0x00, 0x00, // pre_defined
|
||||
0x76, 0x69, 0x64, 0x65, // handler_type: 'vide'
|
||||
0x00, 0x00, 0x00, 0x00, // reserved
|
||||
0x00, 0x00, 0x00, 0x00, // reserved
|
||||
0x00, 0x00, 0x00, 0x00, // reserved
|
||||
0x56, 0x69, 0x64, 0x65,
|
||||
0x6f, 0x48, 0x61, 0x6e,
|
||||
0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'VideoHandler'
|
||||
]);
|
||||
|
||||
var audioHdlr = new Uint8Array([
|
||||
0x00, // version 0
|
||||
0x00, 0x00, 0x00, // flags
|
||||
0x00, 0x00, 0x00, 0x00, // pre_defined
|
||||
0x73, 0x6f, 0x75, 0x6e, // handler_type: 'soun'
|
||||
0x00, 0x00, 0x00, 0x00, // reserved
|
||||
0x00, 0x00, 0x00, 0x00, // reserved
|
||||
0x00, 0x00, 0x00, 0x00, // reserved
|
||||
0x53, 0x6f, 0x75, 0x6e,
|
||||
0x64, 0x48, 0x61, 0x6e,
|
||||
0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'SoundHandler'
|
||||
]);
|
||||
|
||||
MP4.HDLR_TYPES = {
|
||||
'video': videoHdlr,
|
||||
'audio': audioHdlr
|
||||
};
|
||||
|
||||
var dref = new Uint8Array([
|
||||
0x00, // version 0
|
||||
0x00, 0x00, 0x00, // flags
|
||||
0x00, 0x00, 0x00, 0x01, // entry_count
|
||||
0x00, 0x00, 0x00, 0x0c, // entry_size
|
||||
0x75, 0x72, 0x6c, 0x20, // 'url' type
|
||||
0x00, // version 0
|
||||
0x00, 0x00, 0x01 // entry_flags
|
||||
]);
|
||||
|
||||
var stco = new Uint8Array([
|
||||
0x00, // version
|
||||
0x00, 0x00, 0x00, // flags
|
||||
0x00, 0x00, 0x00, 0x00 // entry_count
|
||||
]);
|
||||
|
||||
MP4.STTS = MP4.STSC = MP4.STCO = stco;
|
||||
|
||||
MP4.STSZ = new Uint8Array([
|
||||
0x00, // version
|
||||
0x00, 0x00, 0x00, // flags
|
||||
0x00, 0x00, 0x00, 0x00, // sample_size
|
||||
0x00, 0x00, 0x00, 0x00, // sample_count
|
||||
]);
|
||||
MP4.VMHD = new Uint8Array([
|
||||
0x00, // version
|
||||
0x00, 0x00, 0x01, // flags
|
||||
0x00, 0x00, // graphicsmode
|
||||
0x00, 0x00,
|
||||
0x00, 0x00,
|
||||
0x00, 0x00 // opcolor
|
||||
]);
|
||||
MP4.SMHD = new Uint8Array([
|
||||
0x00, // version
|
||||
0x00, 0x00, 0x00, // flags
|
||||
0x00, 0x00, // balance
|
||||
0x00, 0x00 // reserved
|
||||
]);
|
||||
|
||||
MP4.STSD = new Uint8Array([
|
||||
0x00, // version 0
|
||||
0x00, 0x00, 0x00, // flags
|
||||
0x00, 0x00, 0x00, 0x01]);// entry_count
|
||||
|
||||
var majorBrand = new Uint8Array([105,115,111,109]); // isom
|
||||
var avc1Brand = new Uint8Array([97,118,99,49]); // avc1
|
||||
var minorVersion = new Uint8Array([0, 0, 0, 1]);
|
||||
|
||||
MP4.FTYP = MP4.box(MP4.types.ftyp, majorBrand, minorVersion, majorBrand, avc1Brand);
|
||||
MP4.DINF = MP4.box(MP4.types.dinf, MP4.box(MP4.types.dref, dref));
|
||||
}
|
||||
|
||||
static box(type) {
|
||||
var
|
||||
payload = Array.prototype.slice.call(arguments, 1),
|
||||
size = 8,
|
||||
i = payload.length,
|
||||
len = i,
|
||||
result;
|
||||
// calculate the total size we need to allocate
|
||||
while (i--) {
|
||||
size += payload[i].byteLength;
|
||||
}
|
||||
result = new Uint8Array(size);
|
||||
result[0] = (size >> 24) & 0xff;
|
||||
result[1] = (size >> 16) & 0xff;
|
||||
result[2] = (size >> 8) & 0xff;
|
||||
result[3] = size & 0xff;
|
||||
result.set(type, 4);
|
||||
// copy the payload into the result
|
||||
for (i = 0, size = 8; i < len; i++) {
|
||||
// copy payload[i] array @ offset size
|
||||
result.set(payload[i], size);
|
||||
size += payload[i].byteLength;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
static hdlr(type) {
|
||||
return MP4.box(MP4.types.hdlr, MP4.HDLR_TYPES[type]);
|
||||
}
|
||||
|
||||
static mdat(data) {
|
||||
return MP4.box(MP4.types.mdat, data);
|
||||
}
|
||||
|
||||
static mdhd(timescale, duration) {
|
||||
duration *= timescale;
|
||||
return MP4.box(MP4.types.mdhd, new Uint8Array([
|
||||
0x00, // version 0
|
||||
0x00, 0x00, 0x00, // flags
|
||||
0x00, 0x00, 0x00, 0x02, // creation_time
|
||||
0x00, 0x00, 0x00, 0x03, // modification_time
|
||||
(timescale >> 24) & 0xFF,
|
||||
(timescale >> 16) & 0xFF,
|
||||
(timescale >> 8) & 0xFF,
|
||||
timescale & 0xFF, // timescale
|
||||
(duration >> 24),
|
||||
(duration >> 16) & 0xFF,
|
||||
(duration >> 8) & 0xFF,
|
||||
duration & 0xFF, // duration
|
||||
0x55, 0xc4, // 'und' language (undetermined)
|
||||
0x00, 0x00
|
||||
]));
|
||||
}
|
||||
|
||||
static mdia(track) {
|
||||
return MP4.box(MP4.types.mdia, MP4.mdhd(track.timescale, track.duration), MP4.hdlr(track.type), MP4.minf(track));
|
||||
}
|
||||
|
||||
static mfhd(sequenceNumber) {
|
||||
return MP4.box(MP4.types.mfhd, new Uint8Array([
|
||||
0x00,
|
||||
0x00, 0x00, 0x00, // flags
|
||||
(sequenceNumber >> 24),
|
||||
(sequenceNumber >> 16) & 0xFF,
|
||||
(sequenceNumber >> 8) & 0xFF,
|
||||
sequenceNumber & 0xFF, // sequence_number
|
||||
]));
|
||||
}
|
||||
|
||||
static minf(track) {
|
||||
if (track.type === 'audio') {
|
||||
return MP4.box(MP4.types.minf, MP4.box(MP4.types.smhd, MP4.SMHD), MP4.DINF, MP4.stbl(track));
|
||||
} else {
|
||||
return MP4.box(MP4.types.minf, MP4.box(MP4.types.vmhd, MP4.VMHD), MP4.DINF, MP4.stbl(track));
|
||||
}
|
||||
}
|
||||
|
||||
static moof(sn, baseMediaDecodeTime, track) {
|
||||
return MP4.box(MP4.types.moof, MP4.mfhd(sn), MP4.traf(track,baseMediaDecodeTime));
|
||||
}
|
||||
/**
|
||||
* @param tracks... (optional) {array} the tracks associated with this movie
|
||||
*/
|
||||
static moov(tracks) {
|
||||
var
|
||||
i = tracks.length,
|
||||
boxes = [];
|
||||
|
||||
while (i--) {
|
||||
boxes[i] = MP4.trak(tracks[i]);
|
||||
}
|
||||
|
||||
return MP4.box.apply(null, [MP4.types.moov, MP4.mvhd(tracks[0].timescale, tracks[0].duration)].concat(boxes).concat(MP4.mvex(tracks)));
|
||||
}
|
||||
|
||||
static mvex(tracks) {
|
||||
var
|
||||
i = tracks.length,
|
||||
boxes = [];
|
||||
|
||||
while (i--) {
|
||||
boxes[i] = MP4.trex(tracks[i]);
|
||||
}
|
||||
return MP4.box.apply(null, [MP4.types.mvex].concat(boxes));
|
||||
}
|
||||
|
||||
static mvhd(timescale,duration) {
|
||||
duration*=timescale;
|
||||
var
|
||||
bytes = new Uint8Array([
|
||||
0x00, // version 0
|
||||
0x00, 0x00, 0x00, // flags
|
||||
0x00, 0x00, 0x00, 0x01, // creation_time
|
||||
0x00, 0x00, 0x00, 0x02, // modification_time
|
||||
(timescale >> 24) & 0xFF,
|
||||
(timescale >> 16) & 0xFF,
|
||||
(timescale >> 8) & 0xFF,
|
||||
timescale & 0xFF, // timescale
|
||||
(duration >> 24) & 0xFF,
|
||||
(duration >> 16) & 0xFF,
|
||||
(duration >> 8) & 0xFF,
|
||||
duration & 0xFF, // duration
|
||||
0x00, 0x01, 0x00, 0x00, // 1.0 rate
|
||||
0x01, 0x00, // 1.0 volume
|
||||
0x00, 0x00, // reserved
|
||||
0x00, 0x00, 0x00, 0x00, // reserved
|
||||
0x00, 0x00, 0x00, 0x00, // reserved
|
||||
0x00, 0x01, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x01, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00,
|
||||
0x40, 0x00, 0x00, 0x00, // transformation: unity matrix
|
||||
0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, // pre_defined
|
||||
0xff, 0xff, 0xff, 0xff // next_track_ID
|
||||
]);
|
||||
return MP4.box(MP4.types.mvhd, bytes);
|
||||
}
|
||||
|
||||
static sdtp(track) {
|
||||
var
|
||||
samples = track.samples || [],
|
||||
bytes = new Uint8Array(4 + samples.length),
|
||||
flags,
|
||||
i;
|
||||
// leave the full box header (4 bytes) all zero
|
||||
// write the sample table
|
||||
for (i = 0; i < samples.length; i++) {
|
||||
flags = samples[i].flags;
|
||||
bytes[i + 4] = (flags.dependsOn << 4) |
|
||||
(flags.isDependedOn << 2) |
|
||||
(flags.hasRedundancy);
|
||||
}
|
||||
|
||||
return MP4.box(MP4.types.sdtp, bytes);
|
||||
}
|
||||
|
||||
static stbl(track) {
|
||||
return MP4.box(MP4.types.stbl, MP4.stsd(track), MP4.box(MP4.types.stts, MP4.STTS), MP4.box(MP4.types.stsc, MP4.STSC), MP4.box(MP4.types.stsz, MP4.STSZ), MP4.box(MP4.types.stco, MP4.STCO));
|
||||
}
|
||||
|
||||
static avc1(track) {
|
||||
var sps = [], pps = [], i, data, len;
|
||||
// assemble the SPSs
|
||||
|
||||
for (i = 0; i < track.sps.length; i++) {
|
||||
data = track.sps[i];
|
||||
len = data.byteLength;
|
||||
sps.push((len >>> 8) & 0xFF);
|
||||
sps.push((len & 0xFF));
|
||||
sps = sps.concat(Array.prototype.slice.call(data)); // SPS
|
||||
}
|
||||
|
||||
// assemble the PPSs
|
||||
for (i = 0; i < track.pps.length; i++) {
|
||||
data = track.pps[i];
|
||||
len = data.byteLength;
|
||||
pps.push((len >>> 8) & 0xFF);
|
||||
pps.push((len & 0xFF));
|
||||
pps = pps.concat(Array.prototype.slice.call(data));
|
||||
}
|
||||
|
||||
var avcc = MP4.box(MP4.types.avcC, new Uint8Array([
|
||||
0x01, // version
|
||||
sps[3], // profile
|
||||
sps[4], // profile compat
|
||||
sps[5], // level
|
||||
0xfc | 3, // lengthSizeMinusOne, hard-coded to 4 bytes
|
||||
0xE0 | track.sps.length // 3bit reserved (111) + numOfSequenceParameterSets
|
||||
].concat(sps).concat([
|
||||
track.pps.length // numOfPictureParameterSets
|
||||
]).concat(pps))), // "PPS"
|
||||
width = track.width,
|
||||
height = track.height;
|
||||
//console.log('avcc:' + Hex.hexDump(avcc));
|
||||
return MP4.box(MP4.types.avc1, new Uint8Array([
|
||||
0x00, 0x00, 0x00, // reserved
|
||||
0x00, 0x00, 0x00, // reserved
|
||||
0x00, 0x01, // data_reference_index
|
||||
0x00, 0x00, // pre_defined
|
||||
0x00, 0x00, // reserved
|
||||
0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, // pre_defined
|
||||
(width >> 8) & 0xFF,
|
||||
width & 0xff, // width
|
||||
(height >> 8) & 0xFF,
|
||||
height & 0xff, // height
|
||||
0x00, 0x48, 0x00, 0x00, // horizresolution
|
||||
0x00, 0x48, 0x00, 0x00, // vertresolution
|
||||
0x00, 0x00, 0x00, 0x00, // reserved
|
||||
0x00, 0x01, // frame_count
|
||||
0x12,
|
||||
0x64, 0x61, 0x69, 0x6C, //dailymotion/hls.js
|
||||
0x79, 0x6D, 0x6F, 0x74,
|
||||
0x69, 0x6F, 0x6E, 0x2F,
|
||||
0x68, 0x6C, 0x73, 0x2E,
|
||||
0x6A, 0x73, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, // compressorname
|
||||
0x00, 0x18, // depth = 24
|
||||
0x11, 0x11]), // pre_defined = -1
|
||||
avcc,
|
||||
MP4.box(MP4.types.btrt, new Uint8Array([
|
||||
0x00, 0x1c, 0x9c, 0x80, // bufferSizeDB
|
||||
0x00, 0x2d, 0xc6, 0xc0, // maxBitrate
|
||||
0x00, 0x2d, 0xc6, 0xc0])) // avgBitrate
|
||||
);
|
||||
}
|
||||
|
||||
static esds(track) {
|
||||
var configlen = track.config.length;
|
||||
return new Uint8Array([
|
||||
0x00, // version 0
|
||||
0x00, 0x00, 0x00, // flags
|
||||
|
||||
0x03, // descriptor_type
|
||||
0x17+configlen, // length
|
||||
0x00, 0x01, //es_id
|
||||
0x00, // stream_priority
|
||||
|
||||
0x04, // descriptor_type
|
||||
0x0f+configlen, // length
|
||||
0x40, //codec : mpeg4_audio
|
||||
0x15, // stream_type
|
||||
0x00, 0x00, 0x00, // buffer_size
|
||||
0x00, 0x00, 0x00, 0x00, // maxBitrate
|
||||
0x00, 0x00, 0x00, 0x00, // avgBitrate
|
||||
|
||||
0x05 // descriptor_type
|
||||
].concat([configlen]).concat(track.config).concat([0x06, 0x01, 0x02])); // GASpecificConfig)); // length + audio config descriptor
|
||||
}
|
||||
|
||||
static mp4a(track) {
|
||||
var audiosamplerate = track.audiosamplerate;
|
||||
return MP4.box(MP4.types.mp4a, new Uint8Array([
|
||||
0x00, 0x00, 0x00, // reserved
|
||||
0x00, 0x00, 0x00, // reserved
|
||||
0x00, 0x01, // data_reference_index
|
||||
0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, // reserved
|
||||
0x00, track.channelCount, // channelcount
|
||||
0x00, 0x10, // sampleSize:16bits
|
||||
0x00, 0x00, 0x00, 0x00, // reserved2
|
||||
(audiosamplerate >> 8) & 0xFF,
|
||||
audiosamplerate & 0xff, //
|
||||
0x00, 0x00]),
|
||||
MP4.box(MP4.types.esds, MP4.esds(track)));
|
||||
}
|
||||
|
||||
static stsd(track) {
|
||||
if (track.type === 'audio') {
|
||||
return MP4.box(MP4.types.stsd, MP4.STSD, MP4.mp4a(track));
|
||||
} else {
|
||||
return MP4.box(MP4.types.stsd, MP4.STSD, MP4.avc1(track));
|
||||
}
|
||||
}
|
||||
|
||||
static tkhd(track) {
|
||||
var id = track.id,
|
||||
duration = track.duration*track.timescale,
|
||||
width = track.width,
|
||||
height = track.height;
|
||||
return MP4.box(MP4.types.tkhd, new Uint8Array([
|
||||
0x00, // version 0
|
||||
0x00, 0x00, 0x07, // flags
|
||||
0x00, 0x00, 0x00, 0x00, // creation_time
|
||||
0x00, 0x00, 0x00, 0x00, // modification_time
|
||||
(id >> 24) & 0xFF,
|
||||
(id >> 16) & 0xFF,
|
||||
(id >> 8) & 0xFF,
|
||||
id & 0xFF, // track_ID
|
||||
0x00, 0x00, 0x00, 0x00, // reserved
|
||||
(duration >> 24),
|
||||
(duration >> 16) & 0xFF,
|
||||
(duration >> 8) & 0xFF,
|
||||
duration & 0xFF, // duration
|
||||
0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, // reserved
|
||||
0x00, 0x00, // layer
|
||||
0x00, 0x00, // alternate_group
|
||||
0x00, 0x00, // non-audio track volume
|
||||
0x00, 0x00, // reserved
|
||||
0x00, 0x01, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x01, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00,
|
||||
0x40, 0x00, 0x00, 0x00, // transformation: unity matrix
|
||||
(width >> 8) & 0xFF,
|
||||
width & 0xFF,
|
||||
0x00, 0x00, // width
|
||||
(height >> 8) & 0xFF,
|
||||
height & 0xFF,
|
||||
0x00, 0x00 // height
|
||||
]));
|
||||
}
|
||||
|
||||
static traf(track,baseMediaDecodeTime) {
|
||||
var sampleDependencyTable = MP4.sdtp(track),
|
||||
id = track.id;
|
||||
return MP4.box(MP4.types.traf,
|
||||
MP4.box(MP4.types.tfhd, new Uint8Array([
|
||||
0x00, // version 0
|
||||
0x00, 0x00, 0x00, // flags
|
||||
(id >> 24),
|
||||
(id >> 16) & 0XFF,
|
||||
(id >> 8) & 0XFF,
|
||||
(id & 0xFF) // track_ID
|
||||
])),
|
||||
MP4.box(MP4.types.tfdt, new Uint8Array([
|
||||
0x00, // version 0
|
||||
0x00, 0x00, 0x00, // flags
|
||||
(baseMediaDecodeTime >>24),
|
||||
(baseMediaDecodeTime >> 16) & 0XFF,
|
||||
(baseMediaDecodeTime >> 8) & 0XFF,
|
||||
(baseMediaDecodeTime & 0xFF) // baseMediaDecodeTime
|
||||
])),
|
||||
MP4.trun(track,
|
||||
sampleDependencyTable.length +
|
||||
16 + // tfhd
|
||||
16 + // tfdt
|
||||
8 + // traf header
|
||||
16 + // mfhd
|
||||
8 + // moof header
|
||||
8), // mdat header
|
||||
sampleDependencyTable);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a track box.
|
||||
* @param track {object} a track definition
|
||||
* @return {Uint8Array} the track box
|
||||
*/
|
||||
static trak(track) {
|
||||
track.duration = track.duration || 0xffffffff;
|
||||
return MP4.box(MP4.types.trak, MP4.tkhd(track), MP4.mdia(track));
|
||||
}
|
||||
|
||||
static trex(track) {
|
||||
var id = track.id;
|
||||
return MP4.box(MP4.types.trex, new Uint8Array([
|
||||
0x00, // version 0
|
||||
0x00, 0x00, 0x00, // flags
|
||||
(id >> 24),
|
||||
(id >> 16) & 0XFF,
|
||||
(id >> 8) & 0XFF,
|
||||
(id & 0xFF), // track_ID
|
||||
0x00, 0x00, 0x00, 0x01, // default_sample_description_index
|
||||
0x00, 0x00, 0x00, 0x00, // default_sample_duration
|
||||
0x00, 0x00, 0x00, 0x00, // default_sample_size
|
||||
0x00, 0x01, 0x00, 0x01 // default_sample_flags
|
||||
]));
|
||||
}
|
||||
|
||||
static trun(track, offset) {
|
||||
var samples= track.samples || [],
|
||||
len = samples.length,
|
||||
arraylen = 12 + (16 * len),
|
||||
array = new Uint8Array(arraylen),
|
||||
i,sample,duration,size,flags,cts;
|
||||
offset += 8 + arraylen;
|
||||
array.set([
|
||||
0x00, // version 0
|
||||
0x00, 0x0f, 0x01, // flags
|
||||
(len >>> 24) & 0xFF,
|
||||
(len >>> 16) & 0xFF,
|
||||
(len >>> 8) & 0xFF,
|
||||
len & 0xFF, // sample_count
|
||||
(offset >>> 24) & 0xFF,
|
||||
(offset >>> 16) & 0xFF,
|
||||
(offset >>> 8) & 0xFF,
|
||||
offset & 0xFF // data_offset
|
||||
],0);
|
||||
for (i = 0; i < len; i++) {
|
||||
sample = samples[i];
|
||||
duration = sample.duration;
|
||||
size = sample.size;
|
||||
flags = sample.flags;
|
||||
cts = sample.cts;
|
||||
array.set([
|
||||
(duration >>> 24) & 0xFF,
|
||||
(duration >>> 16) & 0xFF,
|
||||
(duration >>> 8) & 0xFF,
|
||||
duration & 0xFF, // sample_duration
|
||||
(size >>> 24) & 0xFF,
|
||||
(size >>> 16) & 0xFF,
|
||||
(size >>> 8) & 0xFF,
|
||||
size & 0xFF, // sample_size
|
||||
(flags.isLeading << 2) | flags.dependsOn,
|
||||
(flags.isDependedOn << 6) |
|
||||
(flags.hasRedundancy << 4) |
|
||||
(flags.paddingValue << 1) |
|
||||
flags.isNonSync,
|
||||
flags.degradPrio & 0xF0 << 8,
|
||||
flags.degradPrio & 0x0F, // sample_flags
|
||||
(cts >>> 24) & 0xFF,
|
||||
(cts >>> 16) & 0xFF,
|
||||
(cts >>> 8) & 0xFF,
|
||||
cts & 0xFF // sample_composition_time_offset
|
||||
],12+16*i);
|
||||
}
|
||||
return MP4.box(MP4.types.trun, array);
|
||||
}
|
||||
|
||||
static initSegment(tracks) {
|
||||
if (!MP4.types) {
|
||||
MP4.init();
|
||||
}
|
||||
var movie = MP4.moov(tracks), result;
|
||||
result = new Uint8Array(MP4.FTYP.byteLength + movie.byteLength);
|
||||
result.set(MP4.FTYP);
|
||||
result.set(movie, MP4.FTYP.byteLength);
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
export default MP4;
|
|
@ -1,471 +0,0 @@
|
|||
/**
|
||||
* fMP4 remuxer
|
||||
*/
|
||||
|
||||
|
||||
import Event from '../events';
|
||||
import {logger} from '../utils/logger';
|
||||
import MP4 from '../remux/mp4-generator';
|
||||
import {ErrorTypes, ErrorDetails} from '../errors';
|
||||
|
||||
class MP4Remuxer {
|
||||
constructor(observer) {
|
||||
this.observer = observer;
|
||||
this.ISGenerated = false;
|
||||
this.PES2MP4SCALEFACTOR = 4;
|
||||
this.PES_TIMESCALE = 90000;
|
||||
this.MP4_TIMESCALE = this.PES_TIMESCALE / this.PES2MP4SCALEFACTOR;
|
||||
}
|
||||
|
||||
get passthrough() {
|
||||
return false;
|
||||
}
|
||||
|
||||
destroy() {
|
||||
}
|
||||
|
||||
insertDiscontinuity() {
|
||||
this._initPTS = this._initDTS = undefined;
|
||||
}
|
||||
|
||||
switchLevel() {
|
||||
this.ISGenerated = false;
|
||||
}
|
||||
|
||||
remux(audioTrack,videoTrack,id3Track,textTrack,timeOffset, contiguous) {
|
||||
// generate Init Segment if needed
|
||||
if (!this.ISGenerated) {
|
||||
this.generateIS(audioTrack,videoTrack,timeOffset);
|
||||
}
|
||||
if (this.ISGenerated) {
|
||||
//logger.log('nb AVC samples:' + videoTrack.samples.length);
|
||||
if (videoTrack.samples.length) {
|
||||
this.remuxVideo(videoTrack,timeOffset,contiguous);
|
||||
}
|
||||
//logger.log('nb AAC samples:' + audioTrack.samples.length);
|
||||
if (audioTrack.samples.length) {
|
||||
this.remuxAudio(audioTrack,timeOffset,contiguous);
|
||||
}
|
||||
}
|
||||
//logger.log('nb ID3 samples:' + audioTrack.samples.length);
|
||||
if (id3Track.samples.length) {
|
||||
this.remuxID3(id3Track,timeOffset);
|
||||
}
|
||||
//logger.log('nb ID3 samples:' + audioTrack.samples.length);
|
||||
if (textTrack.samples.length) {
|
||||
this.remuxText(textTrack,timeOffset);
|
||||
}
|
||||
//notify end of parsing
|
||||
this.observer.trigger(Event.FRAG_PARSED);
|
||||
}
|
||||
|
||||
generateIS(audioTrack,videoTrack,timeOffset) {
|
||||
var observer = this.observer,
|
||||
audioSamples = audioTrack.samples,
|
||||
videoSamples = videoTrack.samples,
|
||||
pesTimeScale = this.PES_TIMESCALE,
|
||||
tracks = {},
|
||||
data = { tracks : tracks, unique : false },
|
||||
computePTSDTS = (this._initPTS === undefined),
|
||||
initPTS, initDTS;
|
||||
|
||||
if (computePTSDTS) {
|
||||
initPTS = initDTS = Infinity;
|
||||
}
|
||||
if (audioTrack.config && audioSamples.length) {
|
||||
audioTrack.timescale = audioTrack.audiosamplerate;
|
||||
// MP4 duration (track duration in seconds multiplied by timescale) is coded on 32 bits
|
||||
// we know that each AAC sample contains 1024 frames....
|
||||
// in order to avoid overflowing the 32 bit counter for large duration, we use smaller timescale (timescale/gcd)
|
||||
// we just need to ensure that AAC sample duration will still be an integer (will be 1024/gcd)
|
||||
if (audioTrack.timescale * audioTrack.duration > Math.pow(2, 32)) {
|
||||
let greatestCommonDivisor = function(a, b) {
|
||||
if ( ! b) {
|
||||
return a;
|
||||
}
|
||||
return greatestCommonDivisor(b, a % b);
|
||||
};
|
||||
audioTrack.timescale = audioTrack.audiosamplerate / greatestCommonDivisor(audioTrack.audiosamplerate,1024);
|
||||
}
|
||||
logger.log ('audio mp4 timescale :'+ audioTrack.timescale);
|
||||
tracks.audio = {
|
||||
container : 'audio/mp4',
|
||||
codec : audioTrack.codec,
|
||||
initSegment : MP4.initSegment([audioTrack]),
|
||||
metadata : {
|
||||
channelCount : audioTrack.channelCount
|
||||
}
|
||||
};
|
||||
if (computePTSDTS) {
|
||||
// remember first PTS of this demuxing context. for audio, PTS + DTS ...
|
||||
initPTS = initDTS = audioSamples[0].pts - pesTimeScale * timeOffset;
|
||||
}
|
||||
}
|
||||
|
||||
if (videoTrack.sps && videoTrack.pps && videoSamples.length) {
|
||||
videoTrack.timescale = this.MP4_TIMESCALE;
|
||||
tracks.video = {
|
||||
container : 'video/mp4',
|
||||
codec : videoTrack.codec,
|
||||
initSegment : MP4.initSegment([videoTrack]),
|
||||
metadata : {
|
||||
width : videoTrack.width,
|
||||
height : videoTrack.height
|
||||
}
|
||||
};
|
||||
if (computePTSDTS) {
|
||||
initPTS = Math.min(initPTS,videoSamples[0].pts - pesTimeScale * timeOffset);
|
||||
initDTS = Math.min(initDTS,videoSamples[0].dts - pesTimeScale * timeOffset);
|
||||
}
|
||||
}
|
||||
|
||||
if(Object.keys(tracks).length) {
|
||||
observer.trigger(Event.FRAG_PARSING_INIT_SEGMENT,data);
|
||||
this.ISGenerated = true;
|
||||
if (computePTSDTS) {
|
||||
this._initPTS = initPTS;
|
||||
this._initDTS = initDTS;
|
||||
}
|
||||
} else {
|
||||
observer.trigger(Event.ERROR, {type : ErrorTypes.MEDIA_ERROR, details: ErrorDetails.FRAG_PARSING_ERROR, fatal: false, reason: 'no audio/video samples found'});
|
||||
}
|
||||
}
|
||||
|
||||
remuxVideo(track, timeOffset, contiguous) {
|
||||
var view,
|
||||
offset = 8,
|
||||
pesTimeScale = this.PES_TIMESCALE,
|
||||
pes2mp4ScaleFactor = this.PES2MP4SCALEFACTOR,
|
||||
avcSample,
|
||||
mp4Sample,
|
||||
mp4SampleLength,
|
||||
unit,
|
||||
mdat, moof,
|
||||
firstPTS, firstDTS, lastDTS,
|
||||
pts, dts, ptsnorm, dtsnorm,
|
||||
flags,
|
||||
samples = [];
|
||||
|
||||
// handle broken streams with PTS < DTS, tolerance up 200ms (18000 in 90kHz timescale)
|
||||
let PTSDTSshift = track.samples.reduce( (prev, curr) => Math.max(Math.min(prev,curr.pts-curr.dts),-18000),0);
|
||||
if (PTSDTSshift < 0) {
|
||||
logger.warn(`PTS < DTS detected in video samples, shifting DTS by ${Math.round(PTSDTSshift/90)} ms to overcome this issue`);
|
||||
}
|
||||
/* concatenate the video data and construct the mdat in place
|
||||
(need 8 more bytes to fill length and mpdat type) */
|
||||
mdat = new Uint8Array(track.len + (4 * track.nbNalu) + 8);
|
||||
view = new DataView(mdat.buffer);
|
||||
view.setUint32(0, mdat.byteLength);
|
||||
mdat.set(MP4.types.mdat, 4);
|
||||
while (track.samples.length) {
|
||||
avcSample = track.samples.shift();
|
||||
mp4SampleLength = 0;
|
||||
// convert NALU bitstream to MP4 format (prepend NALU with size field)
|
||||
while (avcSample.units.units.length) {
|
||||
unit = avcSample.units.units.shift();
|
||||
view.setUint32(offset, unit.data.byteLength);
|
||||
offset += 4;
|
||||
mdat.set(unit.data, offset);
|
||||
offset += unit.data.byteLength;
|
||||
mp4SampleLength += 4 + unit.data.byteLength;
|
||||
}
|
||||
pts = avcSample.pts - this._initDTS;
|
||||
// shift dts by PTSDTSshift, to ensure that PTS >= DTS
|
||||
dts = avcSample.dts - this._initDTS + PTSDTSshift;
|
||||
// ensure DTS is not bigger than PTS // strap belt !!!
|
||||
dts = Math.min(pts,dts);
|
||||
//logger.log(`Video/PTS/DTS/ptsnorm/DTSnorm:${Math.round(avcSample.pts/90)}/${Math.round(avcSample.dts/90)}/${Math.round(pts/90)}/${Math.round(dts/90)}`);
|
||||
// if not first AVC sample of video track, normalize PTS/DTS with previous sample value
|
||||
// and ensure that sample duration is positive
|
||||
if (lastDTS !== undefined) {
|
||||
ptsnorm = this._PTSNormalize(pts, lastDTS);
|
||||
dtsnorm = this._PTSNormalize(dts, lastDTS);
|
||||
var sampleDuration = (dtsnorm - lastDTS) / pes2mp4ScaleFactor;
|
||||
if (sampleDuration <= 0) {
|
||||
logger.log(`invalid sample duration at PTS/DTS: ${avcSample.pts}/${avcSample.dts}:${sampleDuration}`);
|
||||
sampleDuration = 1;
|
||||
}
|
||||
mp4Sample.duration = sampleDuration;
|
||||
} else {
|
||||
let nextAvcDts, delta;
|
||||
if (contiguous) {
|
||||
nextAvcDts = this.nextAvcDts;
|
||||
} else {
|
||||
nextAvcDts = timeOffset*pesTimeScale;
|
||||
}
|
||||
// first AVC sample of video track, normalize PTS/DTS
|
||||
ptsnorm = this._PTSNormalize(pts, nextAvcDts);
|
||||
dtsnorm = this._PTSNormalize(dts, nextAvcDts);
|
||||
delta = Math.round((dtsnorm - nextAvcDts) / 90);
|
||||
// if fragment are contiguous, detect hole/overlapping between fragments
|
||||
if (contiguous) {
|
||||
if (delta) {
|
||||
if (delta > 1) {
|
||||
logger.log(`AVC:${delta} ms hole between fragments detected,filling it`);
|
||||
} else if (delta < -1) {
|
||||
logger.log(`AVC:${(-delta)} ms overlapping between fragments detected`);
|
||||
}
|
||||
// set DTS to next DTS
|
||||
dtsnorm = nextAvcDts;
|
||||
// offset PTS as well, ensure that PTS is smaller or equal than new DTS
|
||||
ptsnorm = Math.max(ptsnorm - delta, dtsnorm);
|
||||
logger.log(`Video/PTS/DTS adjusted: ${ptsnorm}/${dtsnorm},delta:${delta}`);
|
||||
}
|
||||
}
|
||||
// remember first PTS of our avcSamples, ensure value is positive
|
||||
firstPTS = Math.max(0, ptsnorm);
|
||||
firstDTS = Math.max(0, dtsnorm);
|
||||
}
|
||||
//console.log(`PTS/DTS/initDTS/normPTS/normDTS/relative PTS : ${avcSample.pts}/${avcSample.dts}/${this._initDTS}/${ptsnorm}/${dtsnorm}/${(avcSample.pts/4294967296).toFixed(3)});
|
||||
mp4Sample = {
|
||||
size: mp4SampleLength,
|
||||
duration: 0,
|
||||
cts: (ptsnorm - dtsnorm) / pes2mp4ScaleFactor,
|
||||
flags: {
|
||||
isLeading: 0,
|
||||
isDependedOn: 0,
|
||||
hasRedundancy: 0,
|
||||
degradPrio: 0
|
||||
}
|
||||
};
|
||||
flags = mp4Sample.flags;
|
||||
if (avcSample.key === true) {
|
||||
// the current sample is a key frame
|
||||
flags.dependsOn = 2;
|
||||
flags.isNonSync = 0;
|
||||
} else {
|
||||
flags.dependsOn = 1;
|
||||
flags.isNonSync = 1;
|
||||
}
|
||||
samples.push(mp4Sample);
|
||||
lastDTS = dtsnorm;
|
||||
}
|
||||
var lastSampleDuration = 0;
|
||||
if (samples.length >= 2) {
|
||||
lastSampleDuration = samples[samples.length - 2].duration;
|
||||
mp4Sample.duration = lastSampleDuration;
|
||||
}
|
||||
// next AVC sample DTS should be equal to last sample DTS + last sample duration
|
||||
this.nextAvcDts = dtsnorm + lastSampleDuration * pes2mp4ScaleFactor;
|
||||
let dropped = track.dropped;
|
||||
track.len = 0;
|
||||
track.nbNalu = 0;
|
||||
track.dropped = 0;
|
||||
if(samples.length && navigator.userAgent.toLowerCase().indexOf('chrome') > -1) {
|
||||
flags = samples[0].flags;
|
||||
// chrome workaround, mark first sample as being a Random Access Point to avoid sourcebuffer append issue
|
||||
// https://code.google.com/p/chromium/issues/detail?id=229412
|
||||
flags.dependsOn = 2;
|
||||
flags.isNonSync = 0;
|
||||
}
|
||||
track.samples = samples;
|
||||
moof = MP4.moof(track.sequenceNumber++, firstDTS / pes2mp4ScaleFactor, track);
|
||||
track.samples = [];
|
||||
this.observer.trigger(Event.FRAG_PARSING_DATA, {
|
||||
data1: moof,
|
||||
data2: mdat,
|
||||
startPTS: firstPTS / pesTimeScale,
|
||||
endPTS: (ptsnorm + pes2mp4ScaleFactor * lastSampleDuration) / pesTimeScale,
|
||||
startDTS: firstDTS / pesTimeScale,
|
||||
endDTS: this.nextAvcDts / pesTimeScale,
|
||||
type: 'video',
|
||||
nb: samples.length,
|
||||
dropped : dropped
|
||||
});
|
||||
}
|
||||
|
||||
remuxAudio(track,timeOffset, contiguous) {
|
||||
var view,
|
||||
offset = 8,
|
||||
pesTimeScale = this.PES_TIMESCALE,
|
||||
mp4timeScale = track.timescale,
|
||||
pes2mp4ScaleFactor = pesTimeScale/mp4timeScale,
|
||||
expectedSampleDuration = track.timescale * 1024 / track.audiosamplerate,
|
||||
aacSample, mp4Sample,
|
||||
unit,
|
||||
mdat, moof,
|
||||
firstPTS, firstDTS, lastDTS,
|
||||
pts, dts, ptsnorm, dtsnorm,
|
||||
samples = [],
|
||||
samples0 = [];
|
||||
|
||||
track.samples.sort(function(a, b) {
|
||||
return (a.pts-b.pts);
|
||||
});
|
||||
samples0 = track.samples;
|
||||
|
||||
while (samples0.length) {
|
||||
aacSample = samples0.shift();
|
||||
unit = aacSample.unit;
|
||||
pts = aacSample.pts - this._initDTS;
|
||||
dts = aacSample.dts - this._initDTS;
|
||||
//logger.log(`Audio/PTS:${Math.round(pts/90)}`);
|
||||
// if not first sample
|
||||
if (lastDTS !== undefined) {
|
||||
ptsnorm = this._PTSNormalize(pts, lastDTS);
|
||||
dtsnorm = this._PTSNormalize(dts, lastDTS);
|
||||
// let's compute sample duration.
|
||||
// sample Duration should be close to expectedSampleDuration
|
||||
mp4Sample.duration = (dtsnorm - lastDTS) / pes2mp4ScaleFactor;
|
||||
if(Math.abs(mp4Sample.duration - expectedSampleDuration) > expectedSampleDuration/10) {
|
||||
// more than 10% diff between sample duration and expectedSampleDuration .... lets log that
|
||||
logger.log(`invalid AAC sample duration at PTS ${Math.round(pts/90)},should be 1024,found :${Math.round(mp4Sample.duration*track.audiosamplerate/track.timescale)}`);
|
||||
}
|
||||
// always adjust sample duration to avoid av sync issue
|
||||
mp4Sample.duration = expectedSampleDuration;
|
||||
ptsnorm = dtsnorm = expectedSampleDuration * pes2mp4ScaleFactor + lastDTS;
|
||||
} else {
|
||||
let nextAacPts, delta;
|
||||
if (contiguous) {
|
||||
nextAacPts = this.nextAacPts;
|
||||
} else {
|
||||
nextAacPts = timeOffset*pesTimeScale;
|
||||
}
|
||||
ptsnorm = this._PTSNormalize(pts, nextAacPts);
|
||||
dtsnorm = this._PTSNormalize(dts, nextAacPts);
|
||||
delta = Math.round(1000 * (ptsnorm - nextAacPts) / pesTimeScale);
|
||||
// if fragment are contiguous, detect hole/overlapping between fragments
|
||||
if (contiguous) {
|
||||
// log delta
|
||||
if (delta) {
|
||||
if (delta > 0) {
|
||||
logger.log(`${delta} ms hole between AAC samples detected,filling it`);
|
||||
// if we have frame overlap, overlapping for more than half a frame duration
|
||||
} else if (delta < -12) {
|
||||
// drop overlapping audio frames... browser will deal with it
|
||||
logger.log(`${(-delta)} ms overlapping between AAC samples detected, drop frame`);
|
||||
track.len -= unit.byteLength;
|
||||
continue;
|
||||
}
|
||||
// set PTS/DTS to next PTS/DTS
|
||||
ptsnorm = dtsnorm = nextAacPts;
|
||||
}
|
||||
}
|
||||
// remember first PTS of our aacSamples, ensure value is positive
|
||||
firstPTS = Math.max(0, ptsnorm);
|
||||
firstDTS = Math.max(0, dtsnorm);
|
||||
if(track.len > 0) {
|
||||
/* concatenate the audio data and construct the mdat in place
|
||||
(need 8 more bytes to fill length and mdat type) */
|
||||
mdat = new Uint8Array(track.len + 8);
|
||||
view = new DataView(mdat.buffer);
|
||||
view.setUint32(0, mdat.byteLength);
|
||||
mdat.set(MP4.types.mdat, 4);
|
||||
} else {
|
||||
// no audio samples
|
||||
return;
|
||||
}
|
||||
}
|
||||
mdat.set(unit, offset);
|
||||
offset += unit.byteLength;
|
||||
//console.log('PTS/DTS/initDTS/normPTS/normDTS/relative PTS : ${aacSample.pts}/${aacSample.dts}/${this._initDTS}/${ptsnorm}/${dtsnorm}/${(aacSample.pts/4294967296).toFixed(3)}');
|
||||
mp4Sample = {
|
||||
size: unit.byteLength,
|
||||
cts: 0,
|
||||
duration:0,
|
||||
flags: {
|
||||
isLeading: 0,
|
||||
isDependedOn: 0,
|
||||
hasRedundancy: 0,
|
||||
degradPrio: 0,
|
||||
dependsOn: 1,
|
||||
}
|
||||
};
|
||||
samples.push(mp4Sample);
|
||||
lastDTS = dtsnorm;
|
||||
}
|
||||
var lastSampleDuration = 0;
|
||||
var nbSamples = samples.length;
|
||||
//set last sample duration as being identical to previous sample
|
||||
if (nbSamples >= 2) {
|
||||
lastSampleDuration = samples[nbSamples - 2].duration;
|
||||
mp4Sample.duration = lastSampleDuration;
|
||||
}
|
||||
if (nbSamples) {
|
||||
// next aac sample PTS should be equal to last sample PTS + duration
|
||||
this.nextAacPts = ptsnorm + pes2mp4ScaleFactor * lastSampleDuration;
|
||||
//logger.log('Audio/PTS/PTSend:' + aacSample.pts.toFixed(0) + '/' + this.nextAacDts.toFixed(0));
|
||||
track.len = 0;
|
||||
track.samples = samples;
|
||||
moof = MP4.moof(track.sequenceNumber++, firstDTS / pes2mp4ScaleFactor, track);
|
||||
track.samples = [];
|
||||
this.observer.trigger(Event.FRAG_PARSING_DATA, {
|
||||
data1: moof,
|
||||
data2: mdat,
|
||||
startPTS: firstPTS / pesTimeScale,
|
||||
endPTS: this.nextAacPts / pesTimeScale,
|
||||
startDTS: firstDTS / pesTimeScale,
|
||||
endDTS: (dtsnorm + pes2mp4ScaleFactor * lastSampleDuration) / pesTimeScale,
|
||||
type: 'audio',
|
||||
nb: nbSamples
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
remuxID3(track,timeOffset) {
|
||||
var length = track.samples.length, sample;
|
||||
// consume samples
|
||||
if(length) {
|
||||
for(var index = 0; index < length; index++) {
|
||||
sample = track.samples[index];
|
||||
// setting id3 pts, dts to relative time
|
||||
// using this._initPTS and this._initDTS to calculate relative time
|
||||
sample.pts = ((sample.pts - this._initPTS) / this.PES_TIMESCALE);
|
||||
sample.dts = ((sample.dts - this._initDTS) / this.PES_TIMESCALE);
|
||||
}
|
||||
this.observer.trigger(Event.FRAG_PARSING_METADATA, {
|
||||
samples:track.samples
|
||||
});
|
||||
}
|
||||
|
||||
track.samples = [];
|
||||
timeOffset = timeOffset;
|
||||
}
|
||||
|
||||
remuxText(track,timeOffset) {
|
||||
track.samples.sort(function(a, b) {
|
||||
return (a.pts-b.pts);
|
||||
});
|
||||
|
||||
var length = track.samples.length, sample;
|
||||
// consume samples
|
||||
if(length) {
|
||||
for(var index = 0; index < length; index++) {
|
||||
sample = track.samples[index];
|
||||
// setting text pts, dts to relative time
|
||||
// using this._initPTS and this._initDTS to calculate relative time
|
||||
sample.pts = ((sample.pts - this._initPTS) / this.PES_TIMESCALE);
|
||||
}
|
||||
this.observer.trigger(Event.FRAG_PARSING_USERDATA, {
|
||||
samples:track.samples
|
||||
});
|
||||
}
|
||||
|
||||
track.samples = [];
|
||||
timeOffset = timeOffset;
|
||||
}
|
||||
|
||||
_PTSNormalize(value, reference) {
|
||||
var offset;
|
||||
if (reference === undefined) {
|
||||
return value;
|
||||
}
|
||||
if (reference < value) {
|
||||
// - 2^33
|
||||
offset = -8589934592;
|
||||
} else {
|
||||
// + 2^33
|
||||
offset = 8589934592;
|
||||
}
|
||||
/* PTS is 33bit (from 0 to 2^33 -1)
|
||||
if diff between value and reference is bigger than half of the amplitude (2^32) then it means that
|
||||
PTS looping occured. fill the gap */
|
||||
while (Math.abs(value - reference) > 4294967296) {
|
||||
value += offset;
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export default MP4Remuxer;
|
|
@ -1,71 +0,0 @@
|
|||
/**
|
||||
* passthrough remuxer
|
||||
*/
|
||||
import Event from '../events';
|
||||
|
||||
class PassThroughRemuxer {
|
||||
constructor(observer) {
|
||||
this.observer = observer;
|
||||
this.ISGenerated = false;
|
||||
}
|
||||
|
||||
get passthrough() {
|
||||
return true;
|
||||
}
|
||||
|
||||
destroy() {
|
||||
}
|
||||
|
||||
insertDiscontinuity() {
|
||||
}
|
||||
|
||||
switchLevel() {
|
||||
this.ISGenerated = false;
|
||||
}
|
||||
|
||||
remux(audioTrack,videoTrack,id3Track,textTrack,timeOffset,rawData) {
|
||||
var observer = this.observer;
|
||||
// generate Init Segment if needed
|
||||
if (!this.ISGenerated) {
|
||||
var tracks = {},
|
||||
data = { tracks : tracks, unique : true },
|
||||
track = videoTrack,
|
||||
codec = track.codec;
|
||||
|
||||
if (codec) {
|
||||
data.tracks.video = {
|
||||
container : track.container,
|
||||
codec : codec,
|
||||
metadata : {
|
||||
width : track.width,
|
||||
height : track.height
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
track = audioTrack;
|
||||
codec = track.codec;
|
||||
if (codec) {
|
||||
data.tracks.audio = {
|
||||
container : track.container,
|
||||
codec : codec,
|
||||
metadata : {
|
||||
channelCount : track.channelCount
|
||||
}
|
||||
};
|
||||
}
|
||||
this.ISGenerated = true;
|
||||
observer.trigger(Event.FRAG_PARSING_INIT_SEGMENT,data);
|
||||
}
|
||||
observer.trigger(Event.FRAG_PARSING_DATA, {
|
||||
data1: rawData,
|
||||
startPTS: timeOffset,
|
||||
startDTS: timeOffset,
|
||||
type: 'audiovideo',
|
||||
nb: 1,
|
||||
dropped : 0
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export default PassThroughRemuxer;
|
|
@ -1,83 +0,0 @@
|
|||
|
||||
// adapted from https://github.com/kanongil/node-m3u8parse/blob/master/attrlist.js
|
||||
class AttrList {
|
||||
|
||||
constructor(attrs) {
|
||||
if (typeof attrs === 'string') {
|
||||
attrs = AttrList.parseAttrList(attrs);
|
||||
}
|
||||
for(var attr in attrs){
|
||||
if(attrs.hasOwnProperty(attr)) {
|
||||
this[attr] = attrs[attr];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
decimalInteger(attrName) {
|
||||
const intValue = parseInt(this[attrName], 10);
|
||||
if (intValue > Number.MAX_SAFE_INTEGER) {
|
||||
return Infinity;
|
||||
}
|
||||
return intValue;
|
||||
}
|
||||
|
||||
hexadecimalInteger(attrName) {
|
||||
if(this[attrName]) {
|
||||
let stringValue = (this[attrName] || '0x').slice(2);
|
||||
stringValue = ((stringValue.length & 1) ? '0' : '') + stringValue;
|
||||
|
||||
const value = new Uint8Array(stringValue.length / 2);
|
||||
for (let i = 0; i < stringValue.length / 2; i++) {
|
||||
value[i] = parseInt(stringValue.slice(i * 2, i * 2 + 2), 16);
|
||||
}
|
||||
return value;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
hexadecimalIntegerAsNumber(attrName) {
|
||||
const intValue = parseInt(this[attrName], 16);
|
||||
if (intValue > Number.MAX_SAFE_INTEGER) {
|
||||
return Infinity;
|
||||
}
|
||||
return intValue;
|
||||
}
|
||||
|
||||
decimalFloatingPoint(attrName) {
|
||||
return parseFloat(this[attrName]);
|
||||
}
|
||||
|
||||
enumeratedString(attrName) {
|
||||
return this[attrName];
|
||||
}
|
||||
|
||||
decimalResolution(attrName) {
|
||||
const res = /^(\d+)x(\d+)$/.exec(this[attrName]);
|
||||
if (res === null) {
|
||||
return undefined;
|
||||
}
|
||||
return {
|
||||
width: parseInt(res[1], 10),
|
||||
height: parseInt(res[2], 10)
|
||||
};
|
||||
}
|
||||
|
||||
static parseAttrList(input) {
|
||||
const re = /\s*(.+?)\s*=((?:\".*?\")|.*?)(?:,|$)/g;
|
||||
var match, attrs = {};
|
||||
while ((match = re.exec(input)) !== null) {
|
||||
var value = match[2], quote = '"';
|
||||
|
||||
if (value.indexOf(quote) === 0 &&
|
||||
value.lastIndexOf(quote) === (value.length-1)) {
|
||||
value = value.slice(1, -1);
|
||||
}
|
||||
attrs[match[1]] = value;
|
||||
}
|
||||
return attrs;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export default AttrList;
|
|
@ -1,43 +0,0 @@
|
|||
var BinarySearch = {
|
||||
/**
|
||||
* Searches for an item in an array which matches a certain condition.
|
||||
* This requires the condition to only match one item in the array,
|
||||
* and for the array to be ordered.
|
||||
*
|
||||
* @param {Array} list The array to search.
|
||||
* @param {Function} comparisonFunction
|
||||
* Called and provided a candidate item as the first argument.
|
||||
* Should return:
|
||||
* > -1 if the item should be located at a lower index than the provided item.
|
||||
* > 1 if the item should be located at a higher index than the provided item.
|
||||
* > 0 if the item is the item you're looking for.
|
||||
*
|
||||
* @return {*} The object if it is found or null otherwise.
|
||||
*/
|
||||
search: function(list, comparisonFunction) {
|
||||
var minIndex = 0;
|
||||
var maxIndex = list.length - 1;
|
||||
var currentIndex = null;
|
||||
var currentElement = null;
|
||||
|
||||
while (minIndex <= maxIndex) {
|
||||
currentIndex = (minIndex + maxIndex) / 2 | 0;
|
||||
currentElement = list[currentIndex];
|
||||
|
||||
var comparisonResult = comparisonFunction(currentElement);
|
||||
if (comparisonResult > 0) {
|
||||
minIndex = currentIndex + 1;
|
||||
}
|
||||
else if (comparisonResult < 0) {
|
||||
maxIndex = currentIndex - 1;
|
||||
}
|
||||
else {
|
||||
return currentElement;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = BinarySearch;
|
|
@ -1,381 +0,0 @@
|
|||
/*
|
||||
* CEA-708 interpreter
|
||||
*/
|
||||
|
||||
class CEA708Interpreter {
|
||||
|
||||
constructor() {
|
||||
}
|
||||
|
||||
attach(media) {
|
||||
this.media = media;
|
||||
this.display = [];
|
||||
this.memory = [];
|
||||
}
|
||||
|
||||
detach()
|
||||
{
|
||||
this.clear();
|
||||
}
|
||||
|
||||
destroy() {
|
||||
}
|
||||
|
||||
_createCue()
|
||||
{
|
||||
var VTTCue = window.VTTCue || window.TextTrackCue;
|
||||
|
||||
var cue = this.cue = new VTTCue(-1, -1, '');
|
||||
cue.text = '';
|
||||
cue.pauseOnExit = false;
|
||||
|
||||
// make sure it doesn't show up before it's ready
|
||||
cue.startTime = Number.MAX_VALUE;
|
||||
|
||||
// show it 'forever' once we do show it
|
||||
// (we'll set the end time once we know it later)
|
||||
cue.endTime = Number.MAX_VALUE;
|
||||
|
||||
this.memory.push(cue);
|
||||
}
|
||||
|
||||
clear()
|
||||
{
|
||||
var textTrack = this._textTrack;
|
||||
if (textTrack && textTrack.cues)
|
||||
{
|
||||
while (textTrack.cues.length > 0)
|
||||
{
|
||||
textTrack.removeCue(textTrack.cues[0]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
push(timestamp, bytes)
|
||||
{
|
||||
if (!this.cue)
|
||||
{
|
||||
this._createCue();
|
||||
}
|
||||
|
||||
var count = bytes[0] & 31;
|
||||
var position = 2;
|
||||
var tmpByte, ccbyte1, ccbyte2, ccValid, ccType;
|
||||
|
||||
for (var j=0; j<count; j++)
|
||||
{
|
||||
tmpByte = bytes[position++];
|
||||
ccbyte1 = 0x7F & bytes[position++];
|
||||
ccbyte2 = 0x7F & bytes[position++];
|
||||
ccValid = ((4 & tmpByte) === 0 ? false : true);
|
||||
ccType = (3 & tmpByte);
|
||||
|
||||
if (ccbyte1 === 0 && ccbyte2 === 0)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (ccValid)
|
||||
{
|
||||
if (ccType === 0) // || ccType === 1
|
||||
{
|
||||
// Standard Characters
|
||||
if (0x20 & ccbyte1 || 0x40 & ccbyte1)
|
||||
{
|
||||
this.cue.text += this._fromCharCode(ccbyte1) + this._fromCharCode(ccbyte2);
|
||||
}
|
||||
// Special Characters
|
||||
else if ((ccbyte1 === 0x11 || ccbyte1 === 0x19) && ccbyte2 >= 0x30 && ccbyte2 <= 0x3F)
|
||||
{
|
||||
// extended chars, e.g. musical note, accents
|
||||
switch (ccbyte2)
|
||||
{
|
||||
case 48:
|
||||
this.cue.text += '®';
|
||||
break;
|
||||
case 49:
|
||||
this.cue.text += '°';
|
||||
break;
|
||||
case 50:
|
||||
this.cue.text += '½';
|
||||
break;
|
||||
case 51:
|
||||
this.cue.text += '¿';
|
||||
break;
|
||||
case 52:
|
||||
this.cue.text += '™';
|
||||
break;
|
||||
case 53:
|
||||
this.cue.text += '¢';
|
||||
break;
|
||||
case 54:
|
||||
this.cue.text += '';
|
||||
break;
|
||||
case 55:
|
||||
this.cue.text += '£';
|
||||
break;
|
||||
case 56:
|
||||
this.cue.text += '♪';
|
||||
break;
|
||||
case 57:
|
||||
this.cue.text += ' ';
|
||||
break;
|
||||
case 58:
|
||||
this.cue.text += 'è';
|
||||
break;
|
||||
case 59:
|
||||
this.cue.text += 'â';
|
||||
break;
|
||||
case 60:
|
||||
this.cue.text += 'ê';
|
||||
break;
|
||||
case 61:
|
||||
this.cue.text += 'î';
|
||||
break;
|
||||
case 62:
|
||||
this.cue.text += 'ô';
|
||||
break;
|
||||
case 63:
|
||||
this.cue.text += 'û';
|
||||
break;
|
||||
}
|
||||
}
|
||||
if ((ccbyte1 === 0x11 || ccbyte1 === 0x19) && ccbyte2 >= 0x20 && ccbyte2 <= 0x2F)
|
||||
{
|
||||
// Mid-row codes: color/underline
|
||||
switch (ccbyte2)
|
||||
{
|
||||
case 0x20:
|
||||
// White
|
||||
break;
|
||||
case 0x21:
|
||||
// White Underline
|
||||
break;
|
||||
case 0x22:
|
||||
// Green
|
||||
break;
|
||||
case 0x23:
|
||||
// Green Underline
|
||||
break;
|
||||
case 0x24:
|
||||
// Blue
|
||||
break;
|
||||
case 0x25:
|
||||
// Blue Underline
|
||||
break;
|
||||
case 0x26:
|
||||
// Cyan
|
||||
break;
|
||||
case 0x27:
|
||||
// Cyan Underline
|
||||
break;
|
||||
case 0x28:
|
||||
// Red
|
||||
break;
|
||||
case 0x29:
|
||||
// Red Underline
|
||||
break;
|
||||
case 0x2A:
|
||||
// Yellow
|
||||
break;
|
||||
case 0x2B:
|
||||
// Yellow Underline
|
||||
break;
|
||||
case 0x2C:
|
||||
// Magenta
|
||||
break;
|
||||
case 0x2D:
|
||||
// Magenta Underline
|
||||
break;
|
||||
case 0x2E:
|
||||
// Italics
|
||||
break;
|
||||
case 0x2F:
|
||||
// Italics Underline
|
||||
break;
|
||||
}
|
||||
}
|
||||
if ((ccbyte1 === 0x14 || ccbyte1 === 0x1C) && ccbyte2 >= 0x20 && ccbyte2 <= 0x2F)
|
||||
{
|
||||
// Mid-row codes: color/underline
|
||||
switch (ccbyte2)
|
||||
{
|
||||
case 0x20:
|
||||
// TODO: shouldn't affect roll-ups...
|
||||
this._clearActiveCues(timestamp);
|
||||
// RCL: Resume Caption Loading
|
||||
// begin pop on
|
||||
break;
|
||||
case 0x21:
|
||||
// BS: Backspace
|
||||
this.cue.text = this.cue.text.substr(0, this.cue.text.length-1);
|
||||
break;
|
||||
case 0x22:
|
||||
// AOF: reserved (formerly alarm off)
|
||||
break;
|
||||
case 0x23:
|
||||
// AON: reserved (formerly alarm on)
|
||||
break;
|
||||
case 0x24:
|
||||
// DER: Delete to end of row
|
||||
break;
|
||||
case 0x25:
|
||||
// RU2: roll-up 2 rows
|
||||
//this._rollup(2);
|
||||
break;
|
||||
case 0x26:
|
||||
// RU3: roll-up 3 rows
|
||||
//this._rollup(3);
|
||||
break;
|
||||
case 0x27:
|
||||
// RU4: roll-up 4 rows
|
||||
//this._rollup(4);
|
||||
break;
|
||||
case 0x28:
|
||||
// FON: Flash on
|
||||
break;
|
||||
case 0x29:
|
||||
// RDC: Resume direct captioning
|
||||
this._clearActiveCues(timestamp);
|
||||
break;
|
||||
case 0x2A:
|
||||
// TR: Text Restart
|
||||
break;
|
||||
case 0x2B:
|
||||
// RTD: Resume Text Display
|
||||
break;
|
||||
case 0x2C:
|
||||
// EDM: Erase Displayed Memory
|
||||
this._clearActiveCues(timestamp);
|
||||
break;
|
||||
case 0x2D:
|
||||
// CR: Carriage Return
|
||||
// only affects roll-up
|
||||
//this._rollup(1);
|
||||
break;
|
||||
case 0x2E:
|
||||
// ENM: Erase non-displayed memory
|
||||
this._text = '';
|
||||
break;
|
||||
case 0x2F:
|
||||
this._flipMemory(timestamp);
|
||||
// EOC: End of caption
|
||||
// hide any displayed captions and show any hidden one
|
||||
break;
|
||||
}
|
||||
}
|
||||
if ((ccbyte1 === 0x17 || ccbyte1 === 0x1F) && ccbyte2 >= 0x21 && ccbyte2 <= 0x23)
|
||||
{
|
||||
// Mid-row codes: color/underline
|
||||
switch (ccbyte2)
|
||||
{
|
||||
case 0x21:
|
||||
// TO1: tab offset 1 column
|
||||
break;
|
||||
case 0x22:
|
||||
// TO1: tab offset 2 column
|
||||
break;
|
||||
case 0x23:
|
||||
// TO1: tab offset 3 column
|
||||
break;
|
||||
}
|
||||
}
|
||||
else {
|
||||
// Probably a pre-amble address code
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
_fromCharCode(tmpByte)
|
||||
{
|
||||
switch (tmpByte)
|
||||
{
|
||||
case 42:
|
||||
return 'á';
|
||||
|
||||
case 2:
|
||||
return 'á';
|
||||
|
||||
case 2:
|
||||
return 'é';
|
||||
|
||||
case 4:
|
||||
return 'í';
|
||||
|
||||
case 5:
|
||||
return 'ó';
|
||||
|
||||
case 6:
|
||||
return 'ú';
|
||||
|
||||
case 3:
|
||||
return 'ç';
|
||||
|
||||
case 4:
|
||||
return '÷';
|
||||
|
||||
case 5:
|
||||
return 'Ñ';
|
||||
|
||||
case 6:
|
||||
return 'ñ';
|
||||
|
||||
case 7:
|
||||
return '█';
|
||||
|
||||
default:
|
||||
return String.fromCharCode(tmpByte);
|
||||
}
|
||||
}
|
||||
|
||||
_flipMemory(timestamp)
|
||||
{
|
||||
this._clearActiveCues(timestamp);
|
||||
this._flushCaptions(timestamp);
|
||||
}
|
||||
|
||||
_flushCaptions(timestamp)
|
||||
{
|
||||
if (!this._has708)
|
||||
{
|
||||
this._textTrack = this.media.addTextTrack('captions', 'English', 'en');
|
||||
this._has708 = true;
|
||||
}
|
||||
|
||||
for(let memoryItem of this.memory)
|
||||
{
|
||||
memoryItem.startTime = timestamp;
|
||||
this._textTrack.addCue(memoryItem);
|
||||
this.display.push(memoryItem);
|
||||
}
|
||||
|
||||
this.memory = [];
|
||||
this.cue = null;
|
||||
}
|
||||
|
||||
_clearActiveCues(timestamp)
|
||||
{
|
||||
for (let displayItem of this.display)
|
||||
{
|
||||
displayItem.endTime = timestamp;
|
||||
}
|
||||
|
||||
this.display = [];
|
||||
}
|
||||
|
||||
/* _rollUp(n)
|
||||
{
|
||||
// TODO: implement roll-up captions
|
||||
}
|
||||
*/
|
||||
_clearBufferedCues()
|
||||
{
|
||||
//remove them all...
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export default CEA708Interpreter;
|
||||
|
|
@ -1,37 +0,0 @@
|
|||
/*
|
||||
* compute an Exponential Weighted moving average
|
||||
* - https://en.wikipedia.org/wiki/Moving_average#Exponential_moving_average
|
||||
* - heavily inspired from shaka-player
|
||||
*/
|
||||
|
||||
class EWMA {
|
||||
|
||||
// About half of the estimated value will be from the last |halfLife| samples by weight.
|
||||
constructor(halfLife) {
|
||||
// Larger values of alpha expire historical data more slowly.
|
||||
this.alpha_ = halfLife ? Math.exp(Math.log(0.5) / halfLife) : 0;
|
||||
this.estimate_ = 0;
|
||||
this.totalWeight_ = 0;
|
||||
}
|
||||
|
||||
sample(weight,value) {
|
||||
var adjAlpha = Math.pow(this.alpha_, weight);
|
||||
this.estimate_ = value * (1 - adjAlpha) + adjAlpha * this.estimate_;
|
||||
this.totalWeight_ += weight;
|
||||
}
|
||||
|
||||
getTotalWeight() {
|
||||
return this.totalWeight_;
|
||||
}
|
||||
|
||||
getEstimate() {
|
||||
if (this.alpha_) {
|
||||
var zeroFactor = 1 - Math.pow(this.alpha_, this.totalWeight_);
|
||||
return this.estimate_ / zeroFactor;
|
||||
} else {
|
||||
return this.estimate_;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default EWMA;
|
|
@ -1,16 +0,0 @@
|
|||
class Hex {
|
||||
|
||||
static hexDump(array) {
|
||||
var i, str = '';
|
||||
for(i = 0; i < array.length; i++) {
|
||||
var h = array[i].toString(16);
|
||||
if (h.length < 2) {
|
||||
h = '0' + h;
|
||||
}
|
||||
str += h;
|
||||
}
|
||||
return str;
|
||||
}
|
||||
}
|
||||
|
||||
export default Hex;
|
|
@ -1,73 +0,0 @@
|
|||
'use strict';
|
||||
|
||||
function noop() {}
|
||||
|
||||
const fakeLogger = {
|
||||
trace: noop,
|
||||
debug: noop,
|
||||
log: noop,
|
||||
warn: noop,
|
||||
info: noop,
|
||||
error: noop
|
||||
};
|
||||
|
||||
let exportedLogger = fakeLogger;
|
||||
|
||||
//let lastCallTime;
|
||||
// function formatMsgWithTimeInfo(type, msg) {
|
||||
// const now = Date.now();
|
||||
// const diff = lastCallTime ? '+' + (now - lastCallTime) : '0';
|
||||
// lastCallTime = now;
|
||||
// msg = (new Date(now)).toISOString() + ' | [' + type + '] > ' + msg + ' ( ' + diff + ' ms )';
|
||||
// return msg;
|
||||
// }
|
||||
|
||||
function formatMsg(type, msg) {
|
||||
msg = '[' + type + '] > ' + msg;
|
||||
return msg;
|
||||
}
|
||||
|
||||
function consolePrintFn(type) {
|
||||
const func = window.console[type];
|
||||
if (func) {
|
||||
return function(...args) {
|
||||
if(args[0]) {
|
||||
args[0] = formatMsg(type, args[0]);
|
||||
}
|
||||
func.apply(window.console, args);
|
||||
};
|
||||
}
|
||||
return noop;
|
||||
}
|
||||
|
||||
function exportLoggerFunctions(debugConfig, ...functions) {
|
||||
functions.forEach(function(type) {
|
||||
exportedLogger[type] = debugConfig[type] ? debugConfig[type].bind(debugConfig) : consolePrintFn(type);
|
||||
});
|
||||
}
|
||||
|
||||
export var enableLogs = function(debugConfig) {
|
||||
if (debugConfig === true || typeof debugConfig === 'object') {
|
||||
exportLoggerFunctions(debugConfig,
|
||||
// Remove out from list here to hard-disable a log-level
|
||||
//'trace',
|
||||
'debug',
|
||||
'log',
|
||||
'info',
|
||||
'warn',
|
||||
'error'
|
||||
);
|
||||
// Some browsers don't allow to use bind on console object anyway
|
||||
// fallback to default if needed
|
||||
try {
|
||||
exportedLogger.log();
|
||||
} catch (e) {
|
||||
exportedLogger = fakeLogger;
|
||||
}
|
||||
}
|
||||
else {
|
||||
exportedLogger = fakeLogger;
|
||||
}
|
||||
};
|
||||
|
||||
export var logger = exportedLogger;
|
|
@ -1,84 +0,0 @@
|
|||
var URLHelper = {
|
||||
// build an absolute URL from a relative one using the provided baseURL
|
||||
// if relativeURL is an absolute URL it will be returned as is.
|
||||
buildAbsoluteURL: function(baseURL, relativeURL) {
|
||||
// remove any remaining space and CRLF
|
||||
relativeURL = relativeURL.trim();
|
||||
if (/^[a-z]+:/i.test(relativeURL)) {
|
||||
// complete url, not relative
|
||||
return relativeURL;
|
||||
}
|
||||
|
||||
var relativeURLQuery = null;
|
||||
var relativeURLHash = null;
|
||||
|
||||
var relativeURLHashSplit = /^([^#]*)(.*)$/.exec(relativeURL);
|
||||
if (relativeURLHashSplit) {
|
||||
relativeURLHash = relativeURLHashSplit[2];
|
||||
relativeURL = relativeURLHashSplit[1];
|
||||
}
|
||||
var relativeURLQuerySplit = /^([^\?]*)(.*)$/.exec(relativeURL);
|
||||
if (relativeURLQuerySplit) {
|
||||
relativeURLQuery = relativeURLQuerySplit[2];
|
||||
relativeURL = relativeURLQuerySplit[1];
|
||||
}
|
||||
|
||||
var baseURLHashSplit = /^([^#]*)(.*)$/.exec(baseURL);
|
||||
if (baseURLHashSplit) {
|
||||
baseURL = baseURLHashSplit[1];
|
||||
}
|
||||
var baseURLQuerySplit = /^([^\?]*)(.*)$/.exec(baseURL);
|
||||
if (baseURLQuerySplit) {
|
||||
baseURL = baseURLQuerySplit[1];
|
||||
}
|
||||
|
||||
var baseURLDomainSplit = /^(([a-z]+:)?\/\/[a-z0-9\.\-_~]+(:[0-9]+)?)?(\/.*)$/i.exec(baseURL);
|
||||
if (!baseURLDomainSplit) {
|
||||
throw new Error('Error trying to parse base URL.');
|
||||
}
|
||||
|
||||
// e.g. 'http:', 'https:', ''
|
||||
var baseURLProtocol = baseURLDomainSplit[2] || '';
|
||||
// e.g. 'http://example.com', '//example.com', ''
|
||||
var baseURLProtocolDomain = baseURLDomainSplit[1] || '';
|
||||
// e.g. '/a/b/c/playlist.m3u8'
|
||||
var baseURLPath = baseURLDomainSplit[4];
|
||||
|
||||
var builtURL = null;
|
||||
if (/^\/\//.test(relativeURL)) {
|
||||
// relative url starts wth '//' so copy protocol (which may be '' if baseUrl didn't provide one)
|
||||
builtURL = baseURLProtocol+'//'+URLHelper.buildAbsolutePath('', relativeURL.substring(2));
|
||||
}
|
||||
else if (/^\//.test(relativeURL)) {
|
||||
// relative url starts with '/' so start from root of domain
|
||||
builtURL = baseURLProtocolDomain+'/'+URLHelper.buildAbsolutePath('', relativeURL.substring(1));
|
||||
}
|
||||
else {
|
||||
builtURL = URLHelper.buildAbsolutePath(baseURLProtocolDomain+baseURLPath, relativeURL);
|
||||
}
|
||||
|
||||
// put the query and hash parts back
|
||||
if (relativeURLQuery) {
|
||||
builtURL += relativeURLQuery;
|
||||
}
|
||||
if (relativeURLHash) {
|
||||
builtURL += relativeURLHash;
|
||||
}
|
||||
return builtURL;
|
||||
},
|
||||
|
||||
// build an absolute path using the provided basePath
|
||||
// adapted from https://developer.mozilla.org/en-US/docs/Web/API/document/cookie#Using_relative_URLs_in_the_path_parameter
|
||||
// this does not handle the case where relativePath is "/" or "//". These cases should be handled outside this.
|
||||
buildAbsolutePath: function(basePath, relativePath) {
|
||||
var sRelPath = relativePath;
|
||||
var nUpLn, sDir = '', sPath = basePath.replace(/[^\/]*$/, sRelPath.replace(/(\/|^)(?:\.?\/+)+/g, '$1'));
|
||||
for (var nEnd, nStart = 0; nEnd = sPath.indexOf('/../', nStart), nEnd > -1; nStart = nEnd + nUpLn) {
|
||||
nUpLn = /^\/(?:\.\.\/)*/.exec(sPath.slice(nEnd))[0].length;
|
||||
sDir = (sDir + sPath.substring(nStart, nEnd)).replace(new RegExp('(?:\\\/+[^\\\/]*){0,' + ((nUpLn - 1) / 3) + '}$'), '/');
|
||||
}
|
||||
return sDir + sPath.substr(nStart);
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = URLHelper;
|
|
@ -1,122 +0,0 @@
|
|||
/**
|
||||
* XHR based logger
|
||||
*/
|
||||
|
||||
import {logger} from '../utils/logger';
|
||||
|
||||
class XhrLoader {
|
||||
|
||||
constructor(config) {
|
||||
if (config && config.xhrSetup) {
|
||||
this.xhrSetup = config.xhrSetup;
|
||||
}
|
||||
}
|
||||
|
||||
destroy() {
|
||||
this.abort();
|
||||
this.loader = null;
|
||||
}
|
||||
|
||||
abort() {
|
||||
var loader = this.loader,
|
||||
timeoutHandle = this.timeoutHandle;
|
||||
if (loader && loader.readyState !== 4) {
|
||||
this.stats.aborted = true;
|
||||
loader.abort();
|
||||
}
|
||||
if (timeoutHandle) {
|
||||
window.clearTimeout(timeoutHandle);
|
||||
}
|
||||
}
|
||||
|
||||
load(url, responseType, onSuccess, onError, onTimeout, timeout, maxRetry, retryDelay, onProgress = null, frag = null) {
|
||||
this.url = url;
|
||||
if (frag && !isNaN(frag.byteRangeStartOffset) && !isNaN(frag.byteRangeEndOffset)) {
|
||||
this.byteRange = frag.byteRangeStartOffset + '-' + (frag.byteRangeEndOffset-1);
|
||||
}
|
||||
this.responseType = responseType;
|
||||
this.onSuccess = onSuccess;
|
||||
this.onProgress = onProgress;
|
||||
this.onTimeout = onTimeout;
|
||||
this.onError = onError;
|
||||
this.stats = {trequest: performance.now(), retry: 0};
|
||||
this.timeout = timeout;
|
||||
this.maxRetry = maxRetry;
|
||||
this.retryDelay = retryDelay;
|
||||
this.loadInternal();
|
||||
}
|
||||
|
||||
loadInternal() {
|
||||
var xhr;
|
||||
|
||||
if (typeof XDomainRequest !== 'undefined') {
|
||||
xhr = this.loader = new XDomainRequest();
|
||||
} else {
|
||||
xhr = this.loader = new XMLHttpRequest();
|
||||
}
|
||||
|
||||
xhr.onloadend = this.loadend.bind(this);
|
||||
xhr.onprogress = this.loadprogress.bind(this);
|
||||
|
||||
xhr.open('GET', this.url, true);
|
||||
if (this.byteRange) {
|
||||
xhr.setRequestHeader('Range', 'bytes=' + this.byteRange);
|
||||
}
|
||||
xhr.responseType = this.responseType;
|
||||
let stats = this.stats;
|
||||
stats.tfirst = 0;
|
||||
stats.loaded = 0;
|
||||
if (this.xhrSetup) {
|
||||
this.xhrSetup(xhr, this.url);
|
||||
}
|
||||
this.timeoutHandle = window.setTimeout(this.loadtimeout.bind(this), this.timeout);
|
||||
xhr.send();
|
||||
}
|
||||
|
||||
loadend(event) {
|
||||
var xhr = event.currentTarget,
|
||||
status = xhr.status,
|
||||
stats = this.stats;
|
||||
// don't proceed if xhr has been aborted
|
||||
if (!stats.aborted) {
|
||||
// http status between 200 to 299 are all successful
|
||||
if (status >= 200 && status < 300) {
|
||||
window.clearTimeout(this.timeoutHandle);
|
||||
stats.tload = Math.max(stats.tfirst,performance.now());
|
||||
this.onSuccess(event, stats);
|
||||
} else {
|
||||
// error ...
|
||||
if (stats.retry < this.maxRetry) {
|
||||
logger.warn(`${status} while loading ${this.url}, retrying in ${this.retryDelay}...`);
|
||||
this.destroy();
|
||||
window.setTimeout(this.loadInternal.bind(this), this.retryDelay);
|
||||
// exponential backoff
|
||||
this.retryDelay = Math.min(2 * this.retryDelay, 64000);
|
||||
stats.retry++;
|
||||
} else {
|
||||
window.clearTimeout(this.timeoutHandle);
|
||||
logger.error(`${status} while loading ${this.url}` );
|
||||
this.onError(event);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
loadtimeout(event) {
|
||||
logger.warn(`timeout while loading ${this.url}` );
|
||||
this.onTimeout(event, this.stats);
|
||||
}
|
||||
|
||||
loadprogress(event) {
|
||||
var stats = this.stats;
|
||||
if (stats.tfirst === 0) {
|
||||
stats.tfirst = Math.max(performance.now(), stats.trequest);
|
||||
}
|
||||
stats.loaded = event.loaded;
|
||||
if (this.onProgress) {
|
||||
this.onProgress(event, stats);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default XhrLoader;
|
Loading…
Add table
Add a link
Reference in a new issue