1
0
Fork 0
mirror of https://github.com/jellyfin/jellyfin-web synced 2025-03-30 19:56:21 +00:00

update components

This commit is contained in:
Luke Pulverenti 2016-01-13 16:22:46 -05:00
parent 2c580f4f73
commit 3578670ef6
37 changed files with 1463 additions and 1145 deletions

View file

@ -16,12 +16,12 @@
}, },
"devDependencies": {}, "devDependencies": {},
"ignore": [], "ignore": [],
"version": "1.0.24", "version": "1.0.25",
"_release": "1.0.24", "_release": "1.0.25",
"_resolution": { "_resolution": {
"type": "version", "type": "version",
"tag": "1.0.24", "tag": "1.0.25",
"commit": "4b9b655cad98bd908e78352d5bbed028644505d4" "commit": "f2e83b0e30527b5182ceb043d170ad7188368245"
}, },
"_source": "git://github.com/MediaBrowser/Emby.ApiClient.Javascript.git", "_source": "git://github.com/MediaBrowser/Emby.ApiClient.Javascript.git",
"_target": "~1.0.3", "_target": "~1.0.3",

View file

@ -1,4 +1,4 @@
(function (globalScope, localStorage, sessionStorage) { (function (globalScope) {
function myStore(defaultObject) { function myStore(defaultObject) {
@ -45,7 +45,7 @@
}; };
} }
globalScope.appStorage = new myStore(localStorage); globalScope.appStorage = new myStore(globalScope.localStorage);
globalScope.sessionStore = new myStore(sessionStorage); globalScope.sessionStore = new myStore(globalScope.sessionStorage);
})(window, window.localStorage, window.sessionStorage); })(this);

View file

@ -15,12 +15,12 @@
}, },
"devDependencies": {}, "devDependencies": {},
"ignore": [], "ignore": [],
"version": "1.0.16", "version": "1.0.17",
"_release": "1.0.16", "_release": "1.0.17",
"_resolution": { "_resolution": {
"type": "version", "type": "version",
"tag": "1.0.16", "tag": "1.0.17",
"commit": "8058a1a93ad995fd3b7f56019719c33654698df6" "commit": "f70b1e8c554d92f4a7e3f1e8358ba42971d82492"
}, },
"_source": "git://github.com/MediaBrowser/emby-webcomponents.git", "_source": "git://github.com/MediaBrowser/emby-webcomponents.git",
"_target": "~1.0.0", "_target": "~1.0.0",

View file

@ -171,17 +171,6 @@
} }
}); });
var videoAudioCodecs = [];
if (canPlayMp3) {
videoAudioCodecs.push('mp3');
}
if (canPlayAac) {
videoAudioCodecs.push('aac');
}
if (canPlayAc3) {
videoAudioCodecs.push('ac3');
}
// Can't use mkv on mobile because we have to use the native player controls and they won't be able to seek it // Can't use mkv on mobile because we have to use the native player controls and they won't be able to seek it
if (canPlayMkv && !browser.mobile) { if (canPlayMkv && !browser.mobile) {
profile.TranscodingProfiles.push({ profile.TranscodingProfiles.push({
@ -246,7 +235,7 @@
}] }]
}); });
var videoAudioChannels = browser.safari ? '2' : '6'; var videoAudioChannels = '6';
profile.CodecProfiles.push({ profile.CodecProfiles.push({
Type: 'VideoAudio', Type: 'VideoAudio',

View file

@ -1,6 +1,6 @@
{ {
"name": "hls.js", "name": "hls.js",
"version": "0.3.15", "version": "0.4.5",
"description": "Media Source Extension - HLS library, by/for Dailymotion", "description": "Media Source Extension - HLS library, by/for Dailymotion",
"homepage": "https://github.com/dailymotion/hls.js", "homepage": "https://github.com/dailymotion/hls.js",
"authors": [ "authors": [
@ -15,13 +15,14 @@
"test", "test",
"tests" "tests"
], ],
"_release": "0.3.15", "_release": "0.4.5",
"_resolution": { "_resolution": {
"type": "version", "type": "version",
"tag": "v0.3.15", "tag": "v0.4.5",
"commit": "d3ecf55b89063d7ba3bd70800d5839755b0c7e63" "commit": "908ac4a44a182bdbede9c1830828983c18532ca0"
}, },
"_source": "git://github.com/dailymotion/hls.js.git", "_source": "git://github.com/dailymotion/hls.js.git",
"_target": "~0.3.11", "_target": "~0.4.5",
"_originalSource": "dailymotion/hls.js" "_originalSource": "dailymotion/hls.js",
"_direct": true
} }

View file

@ -184,17 +184,21 @@ configuration parameters could be provided to hls.js upon instantiation of Hls O
debug : false, debug : false,
autoStartLoad : true, autoStartLoad : true,
maxBufferLength : 30, maxBufferLength : 30,
maxMaxBufferLength : 600,
maxBufferSize : 60*1000*1000, maxBufferSize : 60*1000*1000,
liveSyncDurationCount : 3, liveSyncDurationCount : 3,
liveMaxLatencyDurationCount: 10, liveMaxLatencyDurationCount: 10,
enableWorker : true, enableWorker : true,
enableSoftwareAES: true, enableSoftwareAES: true,
fragLoadingTimeOut : 20000,
fragLoadingMaxRetry : 6,
fragLoadingRetryDelay : 500,
manifestLoadingTimeOut : 10000, manifestLoadingTimeOut : 10000,
manifestLoadingMaxRetry : 6, manifestLoadingMaxRetry : 6,
manifestLoadingRetryDelay : 500, manifestLoadingRetryDelay : 500,
levelLoadingTimeOut : 10000,
levelLoadingMaxRetry : 6,
levelLoadingRetryDelay : 500,
fragLoadingTimeOut : 20000,
fragLoadingMaxRetry : 6,
fragLoadingRetryDelay : 500,
fpsDroppedMonitoringPeriod : 5000, fpsDroppedMonitoringPeriod : 5000,
fpsDroppedMonitoringThreshold : 0.2, fpsDroppedMonitoringThreshold : 0.2,
appendErrorMaxRetry : 3, appendErrorMaxRetry : 3,
@ -209,6 +213,10 @@ configuration parameters could be provided to hls.js upon instantiation of Hls O
var hls = new Hls(config); var hls = new Hls(config);
``` ```
#### ```Hls.DefaultConfig get/set```
this getter/setter allows to retrieve and override Hls default configuration.
this configuration will be applied by default to all instances.
#### ```debug``` #### ```debug```
(default false) (default false)
@ -225,10 +233,24 @@ a logger object could also be provided for custom logging : ```config.debug=cust
(default 30s) (default 30s)
maximum buffer Length in seconds. if buffer length is/become less than this value, a new fragment will be loaded. maximum buffer Length in seconds. if buffer length is/become less than this value, a new fragment will be loaded.
this is the guaranteed buffer length hls.js will try to reach, regardless of maxBufferSize.
#### ```maxBufferSize``` #### ```maxBufferSize```
(default 60 MB) (default 60 MB)
maximum buffer size in bytes. if buffer size upfront is bigger than this value, no fragment will be loaded. 'minimum' maximum buffer size in bytes. if buffer size upfront is bigger than this value, no fragment will be loaded.
#### ```maxMaxBufferLength```
(default 600s)
maximum buffer Length in seconds. hls.js will never exceed this value. even if maxBufferSize is not reached yet.
hls.js tries to buffer up to a maximum number of bytes (60 MB by default) rather than to buffer up to a maximum nb of seconds.
this is to mimic the browser behaviour (the buffer eviction algorithm is starting after the browser detects that video buffer size reaches a limit in bytes)
config.maxBufferLength is the minimum guaranteed buffer length that hls.js will try to achieve, even if that value exceeds the amount of bytes 60 MB of memory.
maxMaxBufferLength acts as a capping value, as if bitrate is really low, you could need more than one hour of buffer to fill 60 MB....
#### ```liveSyncDurationCount``` #### ```liveSyncDurationCount```
(default 3) (default 3)
@ -255,19 +277,19 @@ enable webworker (if available on browser) for TS demuxing/MP4 remuxing, to impr
enable to use JavaScript version AES decryption for fallback of WebCrypto API. enable to use JavaScript version AES decryption for fallback of WebCrypto API.
#### ```fragLoadingTimeOut```/```manifestLoadingTimeOut``` #### ```fragLoadingTimeOut```/```manifestLoadingTimeOut```/```levelLoadingTimeOut```
(default 60000ms for fragment/10000ms for manifest) (default 60000ms for fragment/10000ms for level and manifest)
URL Loader timeout. URL Loader timeout.
A timeout callback will be triggered if loading duration exceeds this timeout. A timeout callback will be triggered if loading duration exceeds this timeout.
no further action will be done : the load operation will not be cancelled/aborted. no further action will be done : the load operation will not be cancelled/aborted.
It is up to the application to catch this event and treat it as needed. It is up to the application to catch this event and treat it as needed.
#### ```fragLoadingMaxRetry```/```manifestLoadingMaxRetry``` #### ```fragLoadingMaxRetry```/```manifestLoadingMaxRetry```/```levelLoadingMaxRetry```
(default 3) (default 3)
max nb of load retry max nb of load retry
#### ```fragLoadingRetryDelay```/```manifestLoadingRetryDelay``` #### ```fragLoadingRetryDelay```/```manifestLoadingRetryDelay```/```levelLoadingRetryDelay```
(default 500ms) (default 1000ms)
initial delay between XmlHttpRequest error and first load retry (in ms) initial delay between XmlHttpRequest error and first load retry (in ms)
any I/O error will trigger retries every 500ms,1s,2s,4s,8s, ... capped to 64s (exponential backoff) any I/O error will trigger retries every 500ms,1s,2s,4s,8s, ... capped to 64s (exponential backoff)
@ -485,7 +507,7 @@ full list of Events available below :
- `Hls.Events.FRAG_LOADING` - fired when a fragment loading starts - `Hls.Events.FRAG_LOADING` - fired when a fragment loading starts
- data: { frag : fragment object} - data: { frag : fragment object}
- `Hls.Events.FRAG_LOAD_PROGRESS` - fired when a fragment load is in progress - `Hls.Events.FRAG_LOAD_PROGRESS` - fired when a fragment load is in progress
- data: { frag : fragment object, stats : progress event } - data: { frag : fragment object with frag.loaded=stats.loaded, stats : { trequest, tfirst, loaded} }
- `Hls.Events.FRAG_LOADED` - fired when a fragment loading is completed - `Hls.Events.FRAG_LOADED` - fired when a fragment loading is completed
- data: { frag : fragment object, payload : fragment payload, stats : { trequest, tfirst, tload, length}} - data: { frag : fragment object, payload : fragment payload, stats : { trequest, tfirst, tload, length}}
- `Hls.Events.FRAG_PARSING_INIT_SEGMENT` - fired when Init Segment has been extracted from fragment - `Hls.Events.FRAG_PARSING_INIT_SEGMENT` - fired when Init Segment has been extracted from fragment

View file

@ -1,6 +1,6 @@
{ {
"name": "hls.js", "name": "hls.js",
"version": "0.3.15", "version": "0.4.5",
"description": "Media Source Extension - HLS library, by/for Dailymotion", "description": "Media Source Extension - HLS library, by/for Dailymotion",
"homepage": "https://github.com/dailymotion/hls.js", "homepage": "https://github.com/dailymotion/hls.js",
"authors": [ "authors": [

View file

@ -75,6 +75,7 @@ header {
<label class="innerControls"><input id="enableStreaming" type=checkbox checked/> Enable Streaming</label> <label class="innerControls"><input id="enableStreaming" type=checkbox checked/> Enable Streaming</label>
<label class="innerControls"><input id="autoRecoverError" type=checkbox checked/> Auto-Recover Media Error</label> <label class="innerControls"><input id="autoRecoverError" type=checkbox checked/> Auto-Recover Media Error</label>
<label class="innerControls"><input id="enableWorker" type=checkbox checked/> Enable Worker</label> <label class="innerControls"><input id="enableWorker" type=checkbox checked/> Enable Worker</label>
<label class="innerControls">Level Capping <input id="levelCapping" type=number/></label>
<div id="StreamPermalink" class="innerControls"></div> <div id="StreamPermalink" class="innerControls"></div>
<div> <div>
<select id="videoSize" style="float:left"> <select id="videoSize" style="float:left">
@ -205,9 +206,11 @@ $(document).ready(function() {
$('#enableStreaming').click(function() { enableStreaming = this.checked; loadStream($('#streamURL').val()); }); $('#enableStreaming').click(function() { enableStreaming = this.checked; loadStream($('#streamURL').val()); });
$('#autoRecoverError').click(function() { autoRecoverError = this.checked; updatePermalink();}); $('#autoRecoverError').click(function() { autoRecoverError = this.checked; updatePermalink();});
$('#enableWorker').click(function() { enableWorker = this.checked; updatePermalink();}); $('#enableWorker').click(function() { enableWorker = this.checked; updatePermalink();});
$('#levelCapping').change(function() { levelCapping = this.value; updatePermalink();});
$('#enableStreaming').prop( "checked", enableStreaming ); $('#enableStreaming').prop( "checked", enableStreaming );
$('#autoRecoverError').prop( "checked", autoRecoverError ); $('#autoRecoverError').prop( "checked", autoRecoverError );
$('#enableWorker').prop( "checked", enableWorker ); $('#enableWorker').prop( "checked", enableWorker );
$('#levelCapping').val(levelCapping);
}); });
@ -216,6 +219,7 @@ $(document).ready(function() {
enableStreaming = JSON.parse(getURLParam('enableStreaming',true)) enableStreaming = JSON.parse(getURLParam('enableStreaming',true))
autoRecoverError = JSON.parse(getURLParam('autoRecoverError',true)), autoRecoverError = JSON.parse(getURLParam('autoRecoverError',true)),
enableWorker = JSON.parse(getURLParam('enableWorker',true)); enableWorker = JSON.parse(getURLParam('enableWorker',true));
levelCapping = JSON.parse(getURLParam('levelCapping',-1));
var video = $('#video')[0]; var video = $('#video')[0];
video.volume = 0.05; video.volume = 0.05;
@ -247,6 +251,7 @@ $(document).ready(function() {
hls = new Hls({debug:true, enableWorker : enableWorker}); hls = new Hls({debug:true, enableWorker : enableWorker});
$("#HlsStatus").text('loading manifest and attaching video element...'); $("#HlsStatus").text('loading manifest and attaching video element...');
hls.loadSource(url); hls.loadSource(url);
hls.autoLevelCapping = levelCapping;
hls.attachMedia(video); hls.attachMedia(video);
hls.on(Hls.Events.MEDIA_ATTACHED,function() { hls.on(Hls.Events.MEDIA_ATTACHED,function() {
$("#HlsStatus").text('MediaSource attached...'); $("#HlsStatus").text('MediaSource attached...');
@ -429,17 +434,29 @@ $(document).ready(function() {
$("#HlsStatus").html("cannot Load <a href=\"" + data.url + "\">" + url + "</a><br>Reason:Load " + data.event.type); $("#HlsStatus").html("cannot Load <a href=\"" + data.url + "\">" + url + "</a><br>Reason:Load " + data.event.type);
} }
break; break;
case Hls.ErrorDetails.MANIFEST_LOAD_TIMEOUT:
$("#HlsStatus").text("timeout while loading manifest");
break;
case Hls.ErrorDetails.MANIFEST_PARSING_ERROR:
$("#HlsStatus").text("error while parsing manifest:" + data.reason);
break;
case Hls.ErrorDetails.LEVEL_LOAD_ERROR: case Hls.ErrorDetails.LEVEL_LOAD_ERROR:
$("#HlsStatus").text("error while trying to load level playlist"); $("#HlsStatus").text("error while loading level playlist");
break;
case Hls.ErrorDetails.LEVEL_LOAD_TIMEOUT:
$("#HlsStatus").text("timeout while loading level playlist");
break; break;
case Hls.ErrorDetails.LEVEL_SWITCH_ERROR: case Hls.ErrorDetails.LEVEL_SWITCH_ERROR:
$("#HlsStatus").text("error while trying to switch to level " + data.level); $("#HlsStatus").text("error while trying to switch to level " + data.level);
break; break;
case Hls.ErrorDetails.FRAG_LOAD_ERROR: case Hls.ErrorDetails.FRAG_LOAD_ERROR:
$("#HlsStatus").text("error while trying to load fragment " + data.frag.url); $("#HlsStatus").text("error while loading fragment " + data.frag.url);
break; break;
case Hls.ErrorDetails.LEVEL_LOAD_TIMEOUT: case Hls.ErrorDetails.FRAG_LOAD_TIMEOUT:
$("#HlsStatus").text("timeout while trying to load level playlist"); $("#HlsStatus").text("timeout while loading fragment " + data.frag.url);
break;
case Hls.ErrorDetails.FRAG_LOOP_LOADING_ERROR:
$("#HlsStatus").text("Frag Loop Loading Error");
break; break;
case Hls.ErrorDetails.FRAG_DECRYPT_ERROR: case Hls.ErrorDetails.FRAG_DECRYPT_ERROR:
$("#HlsStatus").text("Decrypting Error:" + data.reason); $("#HlsStatus").text("Decrypting Error:" + data.reason);
@ -447,15 +464,18 @@ $(document).ready(function() {
case Hls.ErrorDetails.FRAG_PARSING_ERROR: case Hls.ErrorDetails.FRAG_PARSING_ERROR:
$("#HlsStatus").text("Parsing Error:" + data.reason); $("#HlsStatus").text("Parsing Error:" + data.reason);
break; break;
case Hls.ErrorDetails.KEY_LOAD_ERROR:
$("#HlsStatus").text("error while loading key " + data.frag.decryptdata.uri);
break;
case Hls.ErrorDetails.KEY_LOAD_TIMEOUT:
$("#HlsStatus").text("timeout while loading key " + data.frag.decryptdata.uri);
break;
case Hls.ErrorDetails.BUFFER_APPEND_ERROR: case Hls.ErrorDetails.BUFFER_APPEND_ERROR:
$("#HlsStatus").text("Buffer Append Error"); $("#HlsStatus").text("Buffer Append Error");
break; break;
case Hls.ErrorDetails.BUFFER_APPENDING_ERROR: case Hls.ErrorDetails.BUFFER_APPENDING_ERROR:
$("#HlsStatus").text("Buffer Appending Error"); $("#HlsStatus").text("Buffer Appending Error");
break; break;
case Hls.ErrorDetails.FRAG_LOOP_LOADING_ERROR:
$("#HlsStatus").text("Frag Loop Loading Error");
break;
default: default:
break; break;
} }
@ -835,7 +855,7 @@ function timeRangesToString(r) {
} else { } else {
html3 += button_disabled; html3 += button_disabled;
} }
html3 += 'onclick="hls.autoLevelCapping=-1;updateLevelInfo()">auto</button>'; html3 += 'onclick="levelCapping=hls.autoLevelCapping=-1;updateLevelInfo();updatePermalink();">auto</button>';
var html4 = button_template; var html4 = button_template;
if(hls.autoLevelEnabled) { if(hls.autoLevelEnabled) {
@ -872,7 +892,7 @@ function timeRangesToString(r) {
} else { } else {
html3 += button_disabled; html3 += button_disabled;
} }
html3 += 'onclick="hls.autoLevelCapping=' + i + ';updateLevelInfo()">' + levelName + '</button>'; html3 += 'onclick="levelCapping=hls.autoLevelCapping=' + i + ';updateLevelInfo();updatePermalink();">' + levelName + '</button>';
html4 += button_template; html4 += button_template;
if(hls.nextLevel === i) { if(hls.nextLevel === i) {
@ -925,7 +945,11 @@ function timeRangesToString(r) {
function updatePermalink() { function updatePermalink() {
var url = $('#streamURL').val(); var url = $('#streamURL').val();
var hlsLink = document.URL.split('?')[0] + '?src=' + encodeURIComponent(url) + '&enableStreaming=' + enableStreaming + '&autoRecoverError=' + autoRecoverError + '&enableWorker=' + enableWorker; var hlsLink = document.URL.split('?')[0] + '?src=' + encodeURIComponent(url) +
'&enableStreaming=' + enableStreaming +
'&autoRecoverError=' + autoRecoverError +
'&enableWorker=' + enableWorker +
'&levelCapping=' + levelCapping;
var description = 'permalink: ' + "<a href=\"" + hlsLink + "\">" + hlsLink + "</a>"; var description = 'permalink: ' + "<a href=\"" + hlsLink + "\">" + hlsLink + "</a>";
$("#StreamPermalink").html(description); $("#StreamPermalink").html(description);
} }

View file

@ -39,10 +39,17 @@ design idea is pretty simple :
- [src/controller/abr-controller.js][] - [src/controller/abr-controller.js][]
- in charge of determining auto quality level. - in charge of determining auto quality level.
- auto quality switch algorithm is pretty naive and simple ATM and similar to the one that could be found in google [StageFright](https://android.googlesource.com/platform/frameworks/av/+/master/media/libstagefright/httplive/LiveSession.cpp) - auto quality switch algorithm is pretty naive and simple ATM and similar to the one that could be found in google [StageFright](https://android.googlesource.com/platform/frameworks/av/+/master/media/libstagefright/httplive/LiveSession.cpp)
- [src/crypt/aes.js][]
- AES 128 software decryption routine, low level class handling decryption of 128 bit of data.
- [src/crypt/aes128-decrypter.js][]
- AES 128-CBC software decryption routine, high-level class handling cipher-block chaining (CBC), and that should also handle padding (TODO).
- [src/crypt/decrypter.js][]
- decrypter interface, use either WebCrypto API if available and enabled, or fallback on AES 128 software decryption routine.
- [src/demux/aacdemuxer.js][] - [src/demux/aacdemuxer.js][]
- AAC ES demuxer - AAC ES demuxer
- extract ADTS samples from AAC ES - extract ADTS samples from AAC ES
- [src/demux/adts.js][]
- ADTS header parser helper, extract audio config from ADTS header. used by AAC ES and TS demuxer.
- [src/demux/demuxer.js][] - [src/demux/demuxer.js][]
- demuxer abstraction interface, that will either use a [Worker](https://en.wikipedia.org/wiki/Web_worker) to demux or demux inline depending on config/browser capabilities. - demuxer abstraction interface, that will either use a [Worker](https://en.wikipedia.org/wiki/Web_worker) to demux or demux inline depending on config/browser capabilities.
- also handle fragment decryption using WebCrypto API (fragment decryption is performed in main thread) - also handle fragment decryption using WebCrypto API (fragment decryption is performed in main thread)
@ -90,6 +97,8 @@ design idea is pretty simple :
- in charge of converting AVC/AAC samples provided by demuxer into fragmented ISO BMFF boxes, compatible with MediaSource - in charge of converting AVC/AAC samples provided by demuxer into fragmented ISO BMFF boxes, compatible with MediaSource
- this remuxer is able to deal with small gaps between fragments and ensure timestamp continuity. - this remuxer is able to deal with small gaps between fragments and ensure timestamp continuity.
- it notifies remuxing completion using events (```FRAG_PARSING_INIT_SEGMENT```and ```FRAG_PARSING_DATA```) - it notifies remuxing completion using events (```FRAG_PARSING_INIT_SEGMENT```and ```FRAG_PARSING_DATA```)
- [src/utils/attr-list.js][]
- Attribute List parsing helper class, used by playlist-loader
- [src/utils/binary-search.js][] - [src/utils/binary-search.js][]
- binary search helper class - binary search helper class
- [src/utils/hex.js][] - [src/utils/hex.js][]
@ -111,7 +120,11 @@ design idea is pretty simple :
[src/controller/fps-controller.js]: src/controller/fps-controller.js [src/controller/fps-controller.js]: src/controller/fps-controller.js
[src/controller/level-controller.js]: src/controller/level-controller.js [src/controller/level-controller.js]: src/controller/level-controller.js
[src/controller/mse-media-controller.js]: src/controller/mse-media-controller.js [src/controller/mse-media-controller.js]: src/controller/mse-media-controller.js
[src/crypt/aes.js]: src/crypt/aes.js
[src/crypt/aes128-decrypter.js]: src/crypt/aes128-decrypter.js
[src/crypt/decrypter.js]: src/crypt/decrypter.js
[src/demux/aacdemuxer.js]: src/demux/aacdemuxer.js [src/demux/aacdemuxer.js]: src/demux/aacdemuxer.js
[src/demux/adts.js]: src/demux/adts.js
[src/demux/demuxer.js]: src/demux/demuxer.js [src/demux/demuxer.js]: src/demux/demuxer.js
[src/demux/demuxer-inline.js]: src/demux/demuxer-inline.js [src/demux/demuxer-inline.js]: src/demux/demuxer-inline.js
[src/demux/demuxer-worker.js]: src/demux/demuxer-worker.js [src/demux/demuxer-worker.js]: src/demux/demuxer-worker.js
@ -125,6 +138,7 @@ design idea is pretty simple :
[src/remux/dummy-remuxer.js]: src/remux/dummy-remuxer.js [src/remux/dummy-remuxer.js]: src/remux/dummy-remuxer.js
[src/remux/mp4-generator.js]: src/remux/mp4-generator.js [src/remux/mp4-generator.js]: src/remux/mp4-generator.js
[src/remux/mp4-remuxer.js]: src/remux/mp4-remuxer.js [src/remux/mp4-remuxer.js]: src/remux/mp4-remuxer.js
[src/utils/attr-list.js]: src/utils/attr-list.js
[src/utils/binary-search.js]: src/utils/binary-search.js [src/utils/binary-search.js]: src/utils/binary-search.js
[src/utils/hex.js]: src/utils/hex.js [src/utils/hex.js]: src/utils/hex.js
[src/utils/logger.js]: src/utils/logger.js [src/utils/logger.js]: src/utils/logger.js
@ -151,5 +165,5 @@ design idea is pretty simple :
- if frag level is 0 or auto level switch is disabled, this error is marked as fatal and a call to ```hls.startLoad()``` could help recover it. - if frag level is 0 or auto level switch is disabled, this error is marked as fatal and a call to ```hls.startLoad()``` could help recover it.
- ```FRAG_PARSING_ERROR``` is raised by [src/demux/tsdemuxer.js][] upon TS parsing error. this error is not fatal. - ```FRAG_PARSING_ERROR``` is raised by [src/demux/tsdemuxer.js][] upon TS parsing error. this error is not fatal.
- ```FRAG_DECRYPT_ERROR``` is raised by [src/demux/demuxer.js][] upon fragment decrypting error. this error is fatal. - ```FRAG_DECRYPT_ERROR``` is raised by [src/demux/demuxer.js][] upon fragment decrypting error. this error is fatal.
- ```BUFFER_PREPARE_APPEND_ERROR``` is raised by [src/controller/mse-media-controller.js][] when an exception is raised when calling sourceBuffer.appendBuffer(). this error is non fatal and become fatal after config.appendErrorMaxRetry retries. when fatal, a call to ```hls.recoverMediaError()``` could help recover it. - ```BUFFER_APPEND_ERROR``` is raised by [src/controller/mse-media-controller.js][] when an exception is raised when calling sourceBuffer.appendBuffer(). this error is non fatal and become fatal after config.appendErrorMaxRetry retries. when fatal, a call to ```hls.recoverMediaError()``` could help recover it.
- ```BUFFER_APPENDING_ERROR``` is raised by [src/controller/mse-media-controller.js][] after SourceBuffer appending error. this error is fatal and a call to ```hls.recoverMediaError()``` could help recover it. - ```BUFFER_APPENDING_ERROR``` is raised by [src/controller/mse-media-controller.js][] after SourceBuffer appending error. this error is fatal and a call to ```hls.recoverMediaError()``` could help recover it.

File diff suppressed because it is too large Load diff

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View file

@ -1,6 +1,6 @@
{ {
"name": "hls.js", "name": "hls.js",
"version": "0.3.15", "version": "0.4.5",
"description": "Media Source Extension - HLS library, by/for Dailymotion", "description": "Media Source Extension - HLS library, by/for Dailymotion",
"homepage": "https://github.com/dailymotion/hls.js", "homepage": "https://github.com/dailymotion/hls.js",
"authors": "Guillaume du Pontavice <guillaume.dupontavice@dailymotion.com>", "authors": "Guillaume du Pontavice <guillaume.dupontavice@dailymotion.com>",
@ -21,7 +21,7 @@
"minify": "uglifyjs dist/hls.js -c sequences=true,dead_code=true,conditionals=true,booleans=true,unused=true,if_return=true,join_vars=true,drop_console=true -m sort --screw-ie8 > dist/hls.min.js", "minify": "uglifyjs dist/hls.js -c sequences=true,dead_code=true,conditionals=true,booleans=true,unused=true,if_return=true,join_vars=true,drop_console=true -m sort --screw-ie8 > dist/hls.min.js",
"watch": "watchify --debug -s Hls src/hls.js -o dist/hls.js", "watch": "watchify --debug -s Hls src/hls.js -o dist/hls.js",
"pretest": "npm run lint", "pretest": "npm run lint",
"test": "mocha --recursive tests/unit", "test": "mocha --compilers js:babel/register --recursive tests/unit",
"lint": "jshint src/", "lint": "jshint src/",
"serve": "http-server -p 8000 .", "serve": "http-server -p 8000 .",
"open": "opener http://localhost:8000/demo/", "open": "opener http://localhost:8000/demo/",
@ -38,7 +38,10 @@
"webworkify": "^1.0.2" "webworkify": "^1.0.2"
}, },
"devDependencies": { "devDependencies": {
"arraybuffer-equal": "^1.0.4",
"babel": "^5.8.34",
"browserify": "^8.1.1", "browserify": "^8.1.1",
"deep-strict-equal": "^0.1.0",
"exorcist": "^0.4.0", "exorcist": "^0.4.0",
"http-server": "^0.7.4", "http-server": "^0.7.4",
"jshint": "^2.5.11", "jshint": "^2.5.11",

View file

@ -15,11 +15,12 @@ const State = {
IDLE : 0, IDLE : 0,
KEY_LOADING : 1, KEY_LOADING : 1,
FRAG_LOADING : 2, FRAG_LOADING : 2,
WAITING_LEVEL : 3, FRAG_LOADING_WAITING_RETRY : 3,
PARSING : 4, WAITING_LEVEL : 4,
PARSED : 5, PARSING : 5,
APPENDING : 6, PARSED : 6,
BUFFER_FLUSHING : 7 APPENDING : 7,
BUFFER_FLUSHING : 8
}; };
class MSEMediaController { class MSEMediaController {
@ -28,6 +29,7 @@ class MSEMediaController {
this.config = hls.config; this.config = hls.config;
this.audioCodecSwap = false; this.audioCodecSwap = false;
this.hls = hls; this.hls = hls;
this.ticks = 0;
// Source Buffer listeners // Source Buffer listeners
this.onsbue = this.onSBUpdateEnd.bind(this); this.onsbue = this.onSBUpdateEnd.bind(this);
this.onsbe = this.onSBUpdateError.bind(this); this.onsbe = this.onSBUpdateError.bind(this);
@ -84,6 +86,7 @@ class MSEMediaController {
this.demuxer = new Demuxer(hls); this.demuxer = new Demuxer(hls);
this.timer = setInterval(this.ontick, 100); this.timer = setInterval(this.ontick, 100);
this.level = -1; this.level = -1;
this.fragLoadError = 0;
hls.on(Event.FRAG_LOADED, this.onfl); hls.on(Event.FRAG_LOADED, this.onfl);
hls.on(Event.FRAG_PARSING_INIT_SEGMENT, this.onis); hls.on(Event.FRAG_PARSING_INIT_SEGMENT, this.onis);
hls.on(Event.FRAG_PARSING_DATA, this.onfpg); hls.on(Event.FRAG_PARSING_DATA, this.onfpg);
@ -136,6 +139,17 @@ class MSEMediaController {
} }
tick() { tick() {
this.ticks++;
if (this.ticks === 1) {
this.doTick();
if (this.ticks > 1) {
setTimeout(this.tick, 1);
}
this.ticks = 0;
}
}
doTick() {
var pos, level, levelDetails, hls = this.hls; var pos, level, levelDetails, hls = this.hls;
switch(this.state) { switch(this.state) {
case State.ERROR: case State.ERROR:
@ -367,6 +381,17 @@ class MSEMediaController {
} }
} }
break; break;
case State.FRAG_LOADING_WAITING_RETRY:
var now = performance.now();
var retryDate = this.retryDate;
var media = this.media;
var isSeeking = media && media.seeking;
// if current time is gt than retryDate, or if media seeking let's switch to IDLE state to retry loading
if(!retryDate || (now >= retryDate) || isSeeking) {
logger.log(`mediaController: retryDate reached, switch back to IDLE state`);
this.state = State.IDLE;
}
break;
case State.PARSING: case State.PARSING:
// nothing to do, wait for fragment being parsed // nothing to do, wait for fragment being parsed
break; break;
@ -454,10 +479,10 @@ class MSEMediaController {
default: default:
break; break;
} }
// check/update current fragment
this._checkFragmentChanged();
// check buffer // check buffer
this._checkBuffer(); this._checkBuffer();
// check/update current fragment
this._checkFragmentChanged();
} }
@ -876,8 +901,12 @@ class MSEMediaController {
} }
onMediaMetadata() { onMediaMetadata() {
if (this.media.currentTime !== this.startPosition) { var media = this.media,
this.media.currentTime = this.startPosition; currentTime = media.currentTime;
// only adjust currentTime if not equal to 0
if (!currentTime && currentTime !== this.startPosition) {
logger.log('onMediaMetadata: adjust currentTime to startPosition');
media.currentTime = this.startPosition;
} }
this.loadedmetadata = true; this.loadedmetadata = true;
this.tick(); this.tick();
@ -992,8 +1021,11 @@ class MSEMediaController {
level = fragCurrent.level, level = fragCurrent.level,
sn = fragCurrent.sn, sn = fragCurrent.sn,
audioCodec = currentLevel.audioCodec; audioCodec = currentLevel.audioCodec;
if(audioCodec && this.audioCodecSwap) { if(this.audioCodecSwap) {
logger.log('swapping playlist audio codec'); logger.log('swapping playlist audio codec');
if(audioCodec === undefined) {
audioCodec = this.lastAudioCodec;
}
if(audioCodec.indexOf('mp4a.40.5') !==-1) { if(audioCodec.indexOf('mp4a.40.5') !==-1) {
audioCodec = 'mp4a.40.2'; audioCodec = 'mp4a.40.2';
} else { } else {
@ -1012,6 +1044,7 @@ class MSEMediaController {
// check if codecs have been explicitely defined in the master playlist for this level; // check if codecs have been explicitely defined in the master playlist for this level;
// if yes use these ones instead of the ones parsed from the demux // if yes use these ones instead of the ones parsed from the demux
var audioCodec = this.levels[this.level].audioCodec, videoCodec = this.levels[this.level].videoCodec, sb; var audioCodec = this.levels[this.level].audioCodec, videoCodec = this.levels[this.level].videoCodec, sb;
this.lastAudioCodec = data.audioCodec;
if(audioCodec && this.audioCodecSwap) { if(audioCodec && this.audioCodecSwap) {
logger.log('swapping playlist audio codec'); logger.log('swapping playlist audio codec');
if(audioCodec.indexOf('mp4a.40.5') !==-1) { if(audioCodec.indexOf('mp4a.40.5') !==-1) {
@ -1070,7 +1103,7 @@ class MSEMediaController {
this.tparse2 = Date.now(); this.tparse2 = Date.now();
var level = this.levels[this.level], var level = this.levels[this.level],
frag = this.fragCurrent; frag = this.fragCurrent;
logger.log(`parsed data, type/startPTS/endPTS/startDTS/endDTS/nb:${data.type}/${data.startPTS.toFixed(3)}/${data.endPTS.toFixed(3)}/${data.startDTS.toFixed(3)}/${data.endDTS.toFixed(3)}/${data.nb}`); logger.log(`parsed ${data.type},PTS:[${data.startPTS.toFixed(3)},${data.endPTS.toFixed(3)}],DTS:[${data.startDTS.toFixed(3)}/${data.endDTS.toFixed(3)}],nb:${data.nb}`);
var drift = LevelHelper.updateFragPTS(level.details,frag.sn,data.startPTS,data.endPTS); var drift = LevelHelper.updateFragPTS(level.details,frag.sn,data.startPTS,data.endPTS);
this.hls.trigger(Event.LEVEL_PTS_UPDATED, {details: level.details, level: this.level, drift: drift}); this.hls.trigger(Event.LEVEL_PTS_UPDATED, {details: level.details, level: this.level, drift: drift});
@ -1097,9 +1130,9 @@ class MSEMediaController {
onError(event, data) { onError(event, data) {
switch(data.details) { switch(data.details) {
// abort fragment loading on errors
case ErrorDetails.FRAG_LOAD_ERROR: case ErrorDetails.FRAG_LOAD_ERROR:
case ErrorDetails.FRAG_LOAD_TIMEOUT: case ErrorDetails.FRAG_LOAD_TIMEOUT:
if(!data.fatal) {
var loadError = this.fragLoadError; var loadError = this.fragLoadError;
if(loadError) { if(loadError) {
loadError++; loadError++;
@ -1108,8 +1141,14 @@ class MSEMediaController {
} }
if (loadError <= this.config.fragLoadingMaxRetry) { if (loadError <= this.config.fragLoadingMaxRetry) {
this.fragLoadError = loadError; this.fragLoadError = loadError;
// retry loading // reset load counter to avoid frag loop loading error
this.state = State.IDLE; data.frag.loadCounter = 0;
// exponential backoff capped to 64s
var delay = Math.min(Math.pow(2,loadError-1)*this.config.fragLoadingRetryDelay,64000);
logger.warn(`mediaController: frag loading failed, retry in ${delay} ms`);
this.retryDate = performance.now() + delay;
// retry loading state
this.state = State.FRAG_LOADING_WAITING_RETRY;
} else { } else {
logger.error(`mediaController: ${data.details} reaches max retry, redispatch as fatal ...`); logger.error(`mediaController: ${data.details} reaches max retry, redispatch as fatal ...`);
// redispatch same error but with fatal set to true // redispatch same error but with fatal set to true
@ -1117,6 +1156,7 @@ class MSEMediaController {
this.hls.trigger(event, data); this.hls.trigger(event, data);
this.state = State.ERROR; this.state = State.ERROR;
} }
}
break; break;
case ErrorDetails.FRAG_LOOP_LOADING_ERROR: case ErrorDetails.FRAG_LOOP_LOADING_ERROR:
case ErrorDetails.LEVEL_LOAD_ERROR: case ErrorDetails.LEVEL_LOAD_ERROR:
@ -1162,20 +1202,30 @@ _checkBuffer() {
media.currentTime = seekAfterBuffered; media.currentTime = seekAfterBuffered;
this.seekAfterBuffered = undefined; this.seekAfterBuffered = undefined;
} }
} else if(readyState < 3 ) { } else {
// readyState = 1 or 2 var currentTime = media.currentTime,
// HAVE_METADATA (numeric value 1) Enough of the resource has been obtained that the duration of the resource is available. bufferInfo = this.bufferInfo(currentTime,0),
// The API will no longer throw an exception when seeking. isPlaying = !(media.paused || media.ended || media.seeking || readyState < 3),
// HAVE_CURRENT_DATA (numeric value 2) Data for the immediate current playback position is available, jumpThreshold = 0.2;
// but either not enough data is available that the user agent could
// successfully advance the current playback position // check buffer upfront
var currentTime = media.currentTime; // if less than 200ms is buffered, and media is playing but playhead is not moving,
var bufferInfo = this.bufferInfo(currentTime,0); // and we have a new buffer range available upfront, let's seek to that one
// check if current time is buffered or not if(bufferInfo.len <= jumpThreshold) {
if(bufferInfo.len === 0) { if(currentTime > media.playbackRate*this.lastCurrentTime || !isPlaying) {
// no buffer available @ currentTime, check if next buffer is close (in a 300 ms range) // playhead moving or media not playing
var nextBufferStart = bufferInfo.nextStart; jumpThreshold = 0;
if(nextBufferStart && (nextBufferStart - currentTime < 0.3)) { } else {
logger.trace('playback seems stuck');
}
// if we are below threshold, try to jump if next buffer range is close
if(bufferInfo.len <= jumpThreshold) {
// no buffer available @ currentTime, check if next buffer is close (more than 5ms diff but within a 300 ms range)
var nextBufferStart = bufferInfo.nextStart, delta = nextBufferStart-currentTime;
if(nextBufferStart &&
(delta < 0.3) &&
(delta > 0.005) &&
!media.seeking) {
// next buffer is close ! adjust currentTime to nextBufferStart // next buffer is close ! adjust currentTime to nextBufferStart
// this will ensure effective video decoding // this will ensure effective video decoding
logger.log(`adjust currentTime from ${currentTime} to ${nextBufferStart}`); logger.log(`adjust currentTime from ${currentTime} to ${nextBufferStart}`);
@ -1186,6 +1236,7 @@ _checkBuffer() {
} }
} }
} }
}
swapAudioCodec() { swapAudioCodec() {
this.audioCodecSwap = !this.audioCodecSwap; this.audioCodecSwap = !this.audioCodecSwap;

View file

@ -130,7 +130,7 @@ class AES128Decrypter {
return decrypted; return decrypted;
} }
localDecript(encrypted, key, initVector, decrypted) { localDecrypt(encrypted, key, initVector, decrypted) {
var bytes = this.doDecrypt(encrypted, var bytes = this.doDecrypt(encrypted,
key, key,
initVector); initVector);
@ -148,7 +148,7 @@ class AES128Decrypter {
// split up the encryption job and do the individual chunks asynchronously // split up the encryption job and do the individual chunks asynchronously
var key = this.key; var key = this.key;
var initVector = this.iv; var initVector = this.iv;
this.localDecript(encrypted32.subarray(i, i + step), key, initVector, decrypted); this.localDecrypt(encrypted32.subarray(i, i + step), key, initVector, decrypted);
for (i = step; i < encrypted32.length; i += step) { for (i = step; i < encrypted32.length; i += step) {
initVector = new Uint32Array([ initVector = new Uint32Array([
@ -157,7 +157,7 @@ class AES128Decrypter {
this.ntoh(encrypted32[i - 2]), this.ntoh(encrypted32[i - 2]),
this.ntoh(encrypted32[i - 1]) this.ntoh(encrypted32[i - 1])
]); ]);
this.localDecript(encrypted32.subarray(i, i + step), key, initVector, decrypted); this.localDecrypt(encrypted32.subarray(i, i + step), key, initVector, decrypted);
} }
return decrypted; return decrypted;

View file

@ -1,9 +1,9 @@
/** /**
* AAC demuxer * AAC demuxer
*/ */
import ADTS from './adts';
import {logger} from '../utils/logger'; import {logger} from '../utils/logger';
import ID3 from '../demux/id3'; import ID3 from '../demux/id3';
import {ErrorTypes, ErrorDetails} from '../errors';
class AACDemuxer { class AACDemuxer {
@ -32,7 +32,10 @@ import {ErrorTypes, ErrorDetails} from '../errors';
// feed incoming data to the front of the parsing pipeline // feed incoming data to the front of the parsing pipeline
push(data, audioCodec, videoCodec, timeOffset, cc, level, sn, duration) { push(data, audioCodec, videoCodec, timeOffset, cc, level, sn, duration) {
var id3 = new ID3(data), adtsStartOffset,len, track = this._aacTrack, pts = id3.timeStamp, config, nbSamples,adtsFrameSize,adtsHeaderLen,stamp,aacSample; var track = this._aacTrack,
id3 = new ID3(data),
pts = 90*id3.timeStamp,
config, adtsFrameSize, adtsStartOffset, adtsHeaderLen, stamp, nbSamples, len, aacSample;
// look for ADTS header (0xFFFx) // look for ADTS header (0xFFFx)
for (adtsStartOffset = id3.length, len = data.length; adtsStartOffset < len - 1; adtsStartOffset++) { for (adtsStartOffset = id3.length, len = data.length; adtsStartOffset < len - 1; adtsStartOffset++) {
if ((data[adtsStartOffset] === 0xff) && (data[adtsStartOffset+1] & 0xf0) === 0xf0) { if ((data[adtsStartOffset] === 0xff) && (data[adtsStartOffset+1] & 0xf0) === 0xf0) {
@ -41,7 +44,7 @@ import {ErrorTypes, ErrorDetails} from '../errors';
} }
if (!track.audiosamplerate) { if (!track.audiosamplerate) {
config = this._ADTStoAudioConfig(data, adtsStartOffset, audioCodec); config = ADTS.getAudioConfig(this.observer,data, adtsStartOffset, audioCodec);
track.config = config.config; track.config = config.config;
track.audiosamplerate = config.samplerate; track.audiosamplerate = config.samplerate;
track.channelCount = config.channelCount; track.channelCount = config.channelCount;
@ -60,7 +63,7 @@ import {ErrorTypes, ErrorDetails} from '../errors';
adtsFrameSize |= ((data[adtsStartOffset + 5] & 0xE0) >>> 5); adtsFrameSize |= ((data[adtsStartOffset + 5] & 0xE0) >>> 5);
adtsHeaderLen = (!!(data[adtsStartOffset + 1] & 0x01) ? 7 : 9); adtsHeaderLen = (!!(data[adtsStartOffset + 1] & 0x01) ? 7 : 9);
adtsFrameSize -= adtsHeaderLen; adtsFrameSize -= adtsHeaderLen;
stamp = Math.round(90*pts + nbSamples * 1024 * 90000 / track.audiosamplerate); stamp = Math.round(pts + nbSamples * 1024 * 90000 / track.audiosamplerate);
//stamp = pes.pts; //stamp = pes.pts;
//console.log('AAC frame, offset/length/pts:' + (adtsStartOffset+7) + '/' + adtsFrameSize + '/' + stamp.toFixed(0)); //console.log('AAC frame, offset/length/pts:' + (adtsStartOffset+7) + '/' + adtsFrameSize + '/' + stamp.toFixed(0));
if ((adtsFrameSize > 0) && ((adtsStartOffset + adtsHeaderLen + adtsFrameSize) <= len)) { if ((adtsFrameSize > 0) && ((adtsStartOffset + adtsHeaderLen + adtsFrameSize) <= len)) {
@ -82,124 +85,6 @@ import {ErrorTypes, ErrorDetails} from '../errors';
this.remuxer.remux(this._aacTrack,{samples : []}, {samples : [ { pts: pts, dts : pts, unit : id3.payload} ]}, timeOffset); this.remuxer.remux(this._aacTrack,{samples : []}, {samples : [ { pts: pts, dts : pts, unit : id3.payload} ]}, timeOffset);
} }
_ADTStoAudioConfig(data, offset, audioCodec) {
var adtsObjectType, // :int
adtsSampleingIndex, // :int
adtsExtensionSampleingIndex, // :int
adtsChanelConfig, // :int
config,
userAgent = navigator.userAgent.toLowerCase(),
adtsSampleingRates = [
96000, 88200,
64000, 48000,
44100, 32000,
24000, 22050,
16000, 12000,
11025, 8000,
7350];
// byte 2
adtsObjectType = ((data[offset + 2] & 0xC0) >>> 6) + 1;
adtsSampleingIndex = ((data[offset + 2] & 0x3C) >>> 2);
if(adtsSampleingIndex > adtsSampleingRates.length-1) {
this.observer.trigger(Event.ERROR, {type: ErrorTypes.MEDIA_ERROR, details: ErrorDetails.FRAG_PARSING_ERROR, fatal: true, reason: `invalid ADTS sampling index:${adtsSampleingIndex}`});
return;
}
adtsChanelConfig = ((data[offset + 2] & 0x01) << 2);
// byte 3
adtsChanelConfig |= ((data[offset + 3] & 0xC0) >>> 6);
logger.log(`manifest codec:${audioCodec},ADTS data:type:${adtsObjectType},sampleingIndex:${adtsSampleingIndex}[${adtsSampleingRates[adtsSampleingIndex]}Hz],channelConfig:${adtsChanelConfig}`);
// firefox: freq less than 24kHz = AAC SBR (HE-AAC)
if (userAgent.indexOf('firefox') !== -1) {
if (adtsSampleingIndex >= 6) {
adtsObjectType = 5;
config = new Array(4);
// HE-AAC uses SBR (Spectral Band Replication) , high frequencies are constructed from low frequencies
// there is a factor 2 between frame sample rate and output sample rate
// multiply frequency by 2 (see table below, equivalent to substract 3)
adtsExtensionSampleingIndex = adtsSampleingIndex - 3;
} else {
adtsObjectType = 2;
config = new Array(2);
adtsExtensionSampleingIndex = adtsSampleingIndex;
}
// Android : always use AAC
} else if (userAgent.indexOf('android') !== -1) {
adtsObjectType = 2;
config = new Array(2);
adtsExtensionSampleingIndex = adtsSampleingIndex;
} else {
/* for other browsers (chrome ...)
always force audio type to be HE-AAC SBR, as some browsers do not support audio codec switch properly (like Chrome ...)
*/
adtsObjectType = 5;
config = new Array(4);
// if (manifest codec is HE-AAC) OR (manifest codec not specified AND frequency less than 24kHz)
if ((audioCodec && audioCodec.indexOf('mp4a.40.5') !== -1) || (!audioCodec && adtsSampleingIndex >= 6)) {
// HE-AAC uses SBR (Spectral Band Replication) , high frequencies are constructed from low frequencies
// there is a factor 2 between frame sample rate and output sample rate
// multiply frequency by 2 (see table below, equivalent to substract 3)
adtsExtensionSampleingIndex = adtsSampleingIndex - 3;
} else {
// if (manifest codec is AAC) AND (frequency less than 24kHz OR nb channel is 1)
if (audioCodec && audioCodec.indexOf('mp4a.40.2') !== -1 && (adtsSampleingIndex >= 6 || adtsChanelConfig === 1)) {
adtsObjectType = 2;
config = new Array(2);
}
adtsExtensionSampleingIndex = adtsSampleingIndex;
}
}
/* refer to http://wiki.multimedia.cx/index.php?title=MPEG-4_Audio#Audio_Specific_Config
ISO 14496-3 (AAC).pdf - Table 1.13 Syntax of AudioSpecificConfig()
Audio Profile / Audio Object Type
0: Null
1: AAC Main
2: AAC LC (Low Complexity)
3: AAC SSR (Scalable Sample Rate)
4: AAC LTP (Long Term Prediction)
5: SBR (Spectral Band Replication)
6: AAC Scalable
sampling freq
0: 96000 Hz
1: 88200 Hz
2: 64000 Hz
3: 48000 Hz
4: 44100 Hz
5: 32000 Hz
6: 24000 Hz
7: 22050 Hz
8: 16000 Hz
9: 12000 Hz
10: 11025 Hz
11: 8000 Hz
12: 7350 Hz
13: Reserved
14: Reserved
15: frequency is written explictly
Channel Configurations
These are the channel configurations:
0: Defined in AOT Specifc Config
1: 1 channel: front-center
2: 2 channels: front-left, front-right
*/
// audioObjectType = profile => profile, the MPEG-4 Audio Object Type minus 1
config[0] = adtsObjectType << 3;
// samplingFrequencyIndex
config[0] |= (adtsSampleingIndex & 0x0E) >> 1;
config[1] |= (adtsSampleingIndex & 0x01) << 7;
// channelConfiguration
config[1] |= adtsChanelConfig << 3;
if (adtsObjectType === 5) {
// adtsExtensionSampleingIndex
config[1] |= (adtsExtensionSampleingIndex & 0x0E) >> 1;
config[2] = (adtsExtensionSampleingIndex & 0x01) << 7;
// adtsObjectType (force to 2, chrome is checking that object type is less than 5 ???
// https://chromium.googlesource.com/chromium/src.git/+/master/media/formats/mp4/aac.cc
config[2] |= 2 << 2;
config[3] = 0;
}
return {config: config, samplerate: adtsSampleingRates[adtsSampleingIndex], channelCount: adtsChanelConfig, codec: ('mp4a.40.' + adtsObjectType)};
}
destroy() { destroy() {
} }

View file

@ -0,0 +1,132 @@
/**
* ADTS parser helper
*/
import {logger} from '../utils/logger';
import {ErrorTypes, ErrorDetails} from '../errors';
class ADTS {
static getAudioConfig(observer, data, offset, audioCodec) {
var adtsObjectType, // :int
adtsSampleingIndex, // :int
adtsExtensionSampleingIndex, // :int
adtsChanelConfig, // :int
config,
userAgent = navigator.userAgent.toLowerCase(),
adtsSampleingRates = [
96000, 88200,
64000, 48000,
44100, 32000,
24000, 22050,
16000, 12000,
11025, 8000,
7350];
// byte 2
adtsObjectType = ((data[offset + 2] & 0xC0) >>> 6) + 1;
adtsSampleingIndex = ((data[offset + 2] & 0x3C) >>> 2);
if(adtsSampleingIndex > adtsSampleingRates.length-1) {
observer.trigger(Event.ERROR, {type: ErrorTypes.MEDIA_ERROR, details: ErrorDetails.FRAG_PARSING_ERROR, fatal: true, reason: `invalid ADTS sampling index:${adtsSampleingIndex}`});
return;
}
adtsChanelConfig = ((data[offset + 2] & 0x01) << 2);
// byte 3
adtsChanelConfig |= ((data[offset + 3] & 0xC0) >>> 6);
logger.log(`manifest codec:${audioCodec},ADTS data:type:${adtsObjectType},sampleingIndex:${adtsSampleingIndex}[${adtsSampleingRates[adtsSampleingIndex]}Hz],channelConfig:${adtsChanelConfig}`);
// firefox: freq less than 24kHz = AAC SBR (HE-AAC)
if (userAgent.indexOf('firefox') !== -1) {
if (adtsSampleingIndex >= 6) {
adtsObjectType = 5;
config = new Array(4);
// HE-AAC uses SBR (Spectral Band Replication) , high frequencies are constructed from low frequencies
// there is a factor 2 between frame sample rate and output sample rate
// multiply frequency by 2 (see table below, equivalent to substract 3)
adtsExtensionSampleingIndex = adtsSampleingIndex - 3;
} else {
adtsObjectType = 2;
config = new Array(2);
adtsExtensionSampleingIndex = adtsSampleingIndex;
}
// Android : always use AAC
} else if (userAgent.indexOf('android') !== -1) {
adtsObjectType = 2;
config = new Array(2);
adtsExtensionSampleingIndex = adtsSampleingIndex;
} else {
/* for other browsers (chrome ...)
always force audio type to be HE-AAC SBR, as some browsers do not support audio codec switch properly (like Chrome ...)
*/
adtsObjectType = 5;
config = new Array(4);
// if (manifest codec is HE-AAC or HE-AACv2) OR (manifest codec not specified AND frequency less than 24kHz)
if ((audioCodec && ((audioCodec.indexOf('mp4a.40.29') !== -1) ||
(audioCodec.indexOf('mp4a.40.5') !== -1))) ||
(!audioCodec && adtsSampleingIndex >= 6)) {
// HE-AAC uses SBR (Spectral Band Replication) , high frequencies are constructed from low frequencies
// there is a factor 2 between frame sample rate and output sample rate
// multiply frequency by 2 (see table below, equivalent to substract 3)
adtsExtensionSampleingIndex = adtsSampleingIndex - 3;
} else {
// if (manifest codec is AAC) AND (frequency less than 24kHz OR nb channel is 1) OR (manifest codec not specified and mono audio)
// Chrome fails to play back with AAC LC mono when initialized with HE-AAC. This is not a problem with stereo.
if (audioCodec && audioCodec.indexOf('mp4a.40.2') !== -1 && (adtsSampleingIndex >= 6 || adtsChanelConfig === 1) ||
(!audioCodec && adtsChanelConfig === 1)) {
adtsObjectType = 2;
config = new Array(2);
}
adtsExtensionSampleingIndex = adtsSampleingIndex;
}
}
/* refer to http://wiki.multimedia.cx/index.php?title=MPEG-4_Audio#Audio_Specific_Config
ISO 14496-3 (AAC).pdf - Table 1.13 Syntax of AudioSpecificConfig()
Audio Profile / Audio Object Type
0: Null
1: AAC Main
2: AAC LC (Low Complexity)
3: AAC SSR (Scalable Sample Rate)
4: AAC LTP (Long Term Prediction)
5: SBR (Spectral Band Replication)
6: AAC Scalable
sampling freq
0: 96000 Hz
1: 88200 Hz
2: 64000 Hz
3: 48000 Hz
4: 44100 Hz
5: 32000 Hz
6: 24000 Hz
7: 22050 Hz
8: 16000 Hz
9: 12000 Hz
10: 11025 Hz
11: 8000 Hz
12: 7350 Hz
13: Reserved
14: Reserved
15: frequency is written explictly
Channel Configurations
These are the channel configurations:
0: Defined in AOT Specifc Config
1: 1 channel: front-center
2: 2 channels: front-left, front-right
*/
// audioObjectType = profile => profile, the MPEG-4 Audio Object Type minus 1
config[0] = adtsObjectType << 3;
// samplingFrequencyIndex
config[0] |= (adtsSampleingIndex & 0x0E) >> 1;
config[1] |= (adtsSampleingIndex & 0x01) << 7;
// channelConfiguration
config[1] |= adtsChanelConfig << 3;
if (adtsObjectType === 5) {
// adtsExtensionSampleingIndex
config[1] |= (adtsExtensionSampleingIndex & 0x0E) >> 1;
config[2] = (adtsExtensionSampleingIndex & 0x01) << 7;
// adtsObjectType (force to 2, chrome is checking that object type is less than 5 ???
// https://chromium.googlesource.com/chromium/src.git/+/master/media/formats/mp4/aac.cc
config[2] |= 2 << 2;
config[3] = 0;
}
return {config: config, samplerate: adtsSampleingRates[adtsSampleingIndex], channelCount: adtsChanelConfig, codec: ('mp4a.40.' + adtsObjectType)};
}
}
export default ADTS;

View file

@ -179,7 +179,12 @@ class ExpGolomb {
if (profileIdc === 100 || if (profileIdc === 100 ||
profileIdc === 110 || profileIdc === 110 ||
profileIdc === 122 || profileIdc === 122 ||
profileIdc === 144) { profileIdc === 244 ||
profileIdc === 44 ||
profileIdc === 83 ||
profileIdc === 86 ||
profileIdc === 118 ||
profileIdc === 128) {
var chromaFormatIdc = this.readUEG(); var chromaFormatIdc = this.readUEG();
if (chromaFormatIdc === 3) { if (chromaFormatIdc === 3) {
this.skipBits(1); // separate_colour_plane_flag this.skipBits(1); // separate_colour_plane_flag

View file

@ -9,6 +9,7 @@
* upon discontinuity or level switch detection, it will also notifies the remuxer so that it can reset its state. * upon discontinuity or level switch detection, it will also notifies the remuxer so that it can reset its state.
*/ */
import ADTS from './adts';
import Event from '../events'; import Event from '../events';
import ExpGolomb from './exp-golomb'; import ExpGolomb from './exp-golomb';
// import Hex from '../utils/hex'; // import Hex from '../utils/hex';
@ -21,7 +22,6 @@
this.observer = observer; this.observer = observer;
this.remuxerClass = remuxerClass; this.remuxerClass = remuxerClass;
this.lastCC = 0; this.lastCC = 0;
this.PES_TIMESCALE = 90000;
this.remuxer = new this.remuxerClass(observer); this.remuxer = new this.remuxerClass(observer);
} }
@ -423,16 +423,19 @@
// If NAL units are not starting right at the beginning of the PES packet, push preceding data into previous NAL unit. // If NAL units are not starting right at the beginning of the PES packet, push preceding data into previous NAL unit.
overflow = i - state - 1; overflow = i - state - 1;
if (overflow) { if (overflow) {
var track = this._avcTrack,
samples = track.samples;
//logger.log('first NALU found with overflow:' + overflow); //logger.log('first NALU found with overflow:' + overflow);
if (this._avcTrack.samples.length) { if (samples.length) {
var lastavcSample = this._avcTrack.samples[this._avcTrack.samples.length - 1]; var lastavcSample = samples[samples.length - 1],
var lastUnit = lastavcSample.units.units[lastavcSample.units.units.length - 1]; lastUnits = lastavcSample.units.units,
var tmp = new Uint8Array(lastUnit.data.byteLength + overflow); lastUnit = lastUnits[lastUnits.length - 1],
tmp = new Uint8Array(lastUnit.data.byteLength + overflow);
tmp.set(lastUnit.data, 0); tmp.set(lastUnit.data, 0);
tmp.set(array.subarray(0, overflow), lastUnit.data.byteLength); tmp.set(array.subarray(0, overflow), lastUnit.data.byteLength);
lastUnit.data = tmp; lastUnit.data = tmp;
lastavcSample.units.length += overflow; lastavcSample.units.length += overflow;
this._avcTrack.len += overflow; track.len += overflow;
} }
} }
} }
@ -460,7 +463,13 @@
} }
_parseAACPES(pes) { _parseAACPES(pes) {
var track = this._aacTrack, aacSample, data = pes.data, config, adtsFrameSize, adtsStartOffset, adtsHeaderLen, stamp, nbSamples, len; var track = this._aacTrack,
data = pes.data,
pts = pes.pts,
startOffset = 0,
duration = this._duration,
audioCodec = this.audioCodec,
config, frameLength, frameDuration, frameIndex, offset, headerLength, stamp, len, aacSample;
if (this.aacOverFlow) { if (this.aacOverFlow) {
var tmp = new Uint8Array(this.aacOverFlow.byteLength + data.byteLength); var tmp = new Uint8Array(this.aacOverFlow.byteLength + data.byteLength);
tmp.set(this.aacOverFlow, 0); tmp.set(this.aacOverFlow, 0);
@ -468,16 +477,16 @@
data = tmp; data = tmp;
} }
// look for ADTS header (0xFFFx) // look for ADTS header (0xFFFx)
for (adtsStartOffset = 0, len = data.length; adtsStartOffset < len - 1; adtsStartOffset++) { for (offset = startOffset, len = data.length; offset < len - 1; offset++) {
if ((data[adtsStartOffset] === 0xff) && (data[adtsStartOffset+1] & 0xf0) === 0xf0) { if ((data[offset] === 0xff) && (data[offset+1] & 0xf0) === 0xf0) {
break; break;
} }
} }
// if ADTS header does not start straight from the beginning of the PES payload, raise an error // if ADTS header does not start straight from the beginning of the PES payload, raise an error
if (adtsStartOffset) { if (offset) {
var reason, fatal; var reason, fatal;
if (adtsStartOffset < len - 1) { if (offset < len - 1) {
reason = `AAC PES did not start with ADTS header,offset:${adtsStartOffset}`; reason = `AAC PES did not start with ADTS header,offset:${offset}`;
fatal = false; fatal = false;
} else { } else {
reason = 'no ADTS header found in AAC PES'; reason = 'no ADTS header found in AAC PES';
@ -489,37 +498,38 @@
} }
} }
if (!track.audiosamplerate) { if (!track.audiosamplerate) {
config = this._ADTStoAudioConfig(data, adtsStartOffset, this.audioCodec); config = ADTS.getAudioConfig(this.observer,data, offset, audioCodec);
track.config = config.config; track.config = config.config;
track.audiosamplerate = config.samplerate; track.audiosamplerate = config.samplerate;
track.channelCount = config.channelCount; track.channelCount = config.channelCount;
track.codec = config.codec; track.codec = config.codec;
track.timescale = this.remuxer.timescale; track.timescale = this.remuxer.timescale;
track.duration = this.remuxer.timescale * this._duration; track.duration = track.timescale * duration;
logger.log(`parsed codec:${track.codec},rate:${config.samplerate},nb channel:${config.channelCount}`); logger.log(`parsed codec:${track.codec},rate:${config.samplerate},nb channel:${config.channelCount}`);
} }
nbSamples = 0; frameIndex = 0;
while ((adtsStartOffset + 5) < len) { frameDuration = 1024 * 90000 / track.audiosamplerate;
while ((offset + 5) < len) {
// The protection skip bit tells us if we have 2 bytes of CRC data at the end of the ADTS header
headerLength = (!!(data[offset + 1] & 0x01) ? 7 : 9);
// retrieve frame size // retrieve frame size
adtsFrameSize = ((data[adtsStartOffset + 3] & 0x03) << 11); frameLength = ((data[offset + 3] & 0x03) << 11) |
// byte 4 (data[offset + 4] << 3) |
adtsFrameSize |= (data[adtsStartOffset + 4] << 3); ((data[offset + 5] & 0xE0) >>> 5);
// byte 5 frameLength -= headerLength;
adtsFrameSize |= ((data[adtsStartOffset + 5] & 0xE0) >>> 5); stamp = Math.round(pts + frameIndex * frameDuration);
adtsHeaderLen = (!!(data[adtsStartOffset + 1] & 0x01) ? 7 : 9);
adtsFrameSize -= adtsHeaderLen;
stamp = Math.round(pes.pts + nbSamples * 1024 * this.PES_TIMESCALE / track.audiosamplerate);
//stamp = pes.pts; //stamp = pes.pts;
//console.log('AAC frame, offset/length/pts:' + (adtsStartOffset+7) + '/' + adtsFrameSize + '/' + stamp.toFixed(0));
if ((adtsFrameSize > 0) && ((adtsStartOffset + adtsHeaderLen + adtsFrameSize) <= len)) { //console.log('AAC frame, offset/length/pts:' + (offset+headerLength) + '/' + frameLength + '/' + stamp.toFixed(0));
aacSample = {unit: data.subarray(adtsStartOffset + adtsHeaderLen, adtsStartOffset + adtsHeaderLen + adtsFrameSize), pts: stamp, dts: stamp}; if ((frameLength > 0) && ((offset + headerLength + frameLength) <= len)) {
this._aacTrack.samples.push(aacSample); aacSample = {unit: data.subarray(offset + headerLength, offset + headerLength + frameLength), pts: stamp, dts: stamp};
this._aacTrack.len += adtsFrameSize; track.samples.push(aacSample);
adtsStartOffset += adtsFrameSize + adtsHeaderLen; track.len += frameLength;
nbSamples++; offset += frameLength + headerLength;
frameIndex++;
// look for ADTS header (0xFFFx) // look for ADTS header (0xFFFx)
for ( ; adtsStartOffset < (len - 1); adtsStartOffset++) { for ( ; offset < (len - 1); offset++) {
if ((data[adtsStartOffset] === 0xff) && ((data[adtsStartOffset + 1] & 0xf0) === 0xf0)) { if ((data[offset] === 0xff) && ((data[offset + 1] & 0xf0) === 0xf0)) {
break; break;
} }
} }
@ -527,135 +537,13 @@
break; break;
} }
} }
if (adtsStartOffset < len) { if (offset < len) {
this.aacOverFlow = data.subarray(adtsStartOffset, len); this.aacOverFlow = data.subarray(offset, len);
} else { } else {
this.aacOverFlow = null; this.aacOverFlow = null;
} }
} }
_ADTStoAudioConfig(data, offset, audioCodec) {
var adtsObjectType, // :int
adtsSampleingIndex, // :int
adtsExtensionSampleingIndex, // :int
adtsChanelConfig, // :int
config,
userAgent = navigator.userAgent.toLowerCase(),
adtsSampleingRates = [
96000, 88200,
64000, 48000,
44100, 32000,
24000, 22050,
16000, 12000,
11025, 8000,
7350];
// byte 2
adtsObjectType = ((data[offset + 2] & 0xC0) >>> 6) + 1;
adtsSampleingIndex = ((data[offset + 2] & 0x3C) >>> 2);
if(adtsSampleingIndex > adtsSampleingRates.length-1) {
this.observer.trigger(Event.ERROR, {type: ErrorTypes.MEDIA_ERROR, details: ErrorDetails.FRAG_PARSING_ERROR, fatal: true, reason: `invalid ADTS sampling index:${adtsSampleingIndex}`});
return;
}
adtsChanelConfig = ((data[offset + 2] & 0x01) << 2);
// byte 3
adtsChanelConfig |= ((data[offset + 3] & 0xC0) >>> 6);
logger.log(`manifest codec:${audioCodec},ADTS data:type:${adtsObjectType},sampleingIndex:${adtsSampleingIndex}[${adtsSampleingRates[adtsSampleingIndex]}Hz],channelConfig:${adtsChanelConfig}`);
// firefox: freq less than 24kHz = AAC SBR (HE-AAC)
if (userAgent.indexOf('firefox') !== -1) {
if (adtsSampleingIndex >= 6) {
adtsObjectType = 5;
config = new Array(4);
// HE-AAC uses SBR (Spectral Band Replication) , high frequencies are constructed from low frequencies
// there is a factor 2 between frame sample rate and output sample rate
// multiply frequency by 2 (see table below, equivalent to substract 3)
adtsExtensionSampleingIndex = adtsSampleingIndex - 3;
} else {
adtsObjectType = 2;
config = new Array(2);
adtsExtensionSampleingIndex = adtsSampleingIndex;
}
// Android : always use AAC
} else if (userAgent.indexOf('android') !== -1) {
adtsObjectType = 2;
config = new Array(2);
adtsExtensionSampleingIndex = adtsSampleingIndex;
} else {
/* for other browsers (chrome ...)
always force audio type to be HE-AAC SBR, as some browsers do not support audio codec switch properly (like Chrome ...)
*/
adtsObjectType = 5;
config = new Array(4);
// if (manifest codec is HE-AAC or HE-AACv2) OR (manifest codec not specified AND frequency less than 24kHz)
if ((audioCodec && ((audioCodec.indexOf('mp4a.40.29') !== -1) ||
(audioCodec.indexOf('mp4a.40.5') !== -1))) ||
(!audioCodec && adtsSampleingIndex >= 6)) {
// HE-AAC uses SBR (Spectral Band Replication) , high frequencies are constructed from low frequencies
// there is a factor 2 between frame sample rate and output sample rate
// multiply frequency by 2 (see table below, equivalent to substract 3)
adtsExtensionSampleingIndex = adtsSampleingIndex - 3;
} else {
// if (manifest codec is AAC) AND (frequency less than 24kHz OR nb channel is 1) OR (manifest codec not specified and mono audio)
// Chrome fails to play back with AAC LC mono when initialized with HE-AAC. This is not a problem with stereo.
if (audioCodec && audioCodec.indexOf('mp4a.40.2') !== -1 && (adtsSampleingIndex >= 6 || adtsChanelConfig === 1) ||
(!audioCodec && adtsChanelConfig === 1)) {
adtsObjectType = 2;
config = new Array(2);
}
adtsExtensionSampleingIndex = adtsSampleingIndex;
}
}
/* refer to http://wiki.multimedia.cx/index.php?title=MPEG-4_Audio#Audio_Specific_Config
ISO 14496-3 (AAC).pdf - Table 1.13 Syntax of AudioSpecificConfig()
Audio Profile / Audio Object Type
0: Null
1: AAC Main
2: AAC LC (Low Complexity)
3: AAC SSR (Scalable Sample Rate)
4: AAC LTP (Long Term Prediction)
5: SBR (Spectral Band Replication)
6: AAC Scalable
sampling freq
0: 96000 Hz
1: 88200 Hz
2: 64000 Hz
3: 48000 Hz
4: 44100 Hz
5: 32000 Hz
6: 24000 Hz
7: 22050 Hz
8: 16000 Hz
9: 12000 Hz
10: 11025 Hz
11: 8000 Hz
12: 7350 Hz
13: Reserved
14: Reserved
15: frequency is written explictly
Channel Configurations
These are the channel configurations:
0: Defined in AOT Specifc Config
1: 1 channel: front-center
2: 2 channels: front-left, front-right
*/
// audioObjectType = profile => profile, the MPEG-4 Audio Object Type minus 1
config[0] = adtsObjectType << 3;
// samplingFrequencyIndex
config[0] |= (adtsSampleingIndex & 0x0E) >> 1;
config[1] |= (adtsSampleingIndex & 0x01) << 7;
// channelConfiguration
config[1] |= adtsChanelConfig << 3;
if (adtsObjectType === 5) {
// adtsExtensionSampleingIndex
config[1] |= (adtsExtensionSampleingIndex & 0x0E) >> 1;
config[2] = (adtsExtensionSampleingIndex & 0x01) << 7;
// adtsObjectType (force to 2, chrome is checking that object type is less than 5 ???
// https://chromium.googlesource.com/chromium/src.git/+/master/media/formats/mp4/aac.cc
config[2] |= 2 << 2;
config[3] = 0;
}
return {config: config, samplerate: adtsSampleingRates[adtsSampleingIndex], channelCount: adtsChanelConfig, codec: ('mp4a.40.' + adtsObjectType)};
}
_parseID3PES(pes) { _parseID3PES(pes) {
this._id3Track.samples.push(pes); this._id3Track.samples.push(pes);
} }

View file

@ -66,8 +66,8 @@ class LevelHelper {
fragments = details.fragments; fragments = details.fragments;
frag = fragments[fragIdx]; frag = fragments[fragIdx];
if(!isNaN(frag.startPTS)) { if(!isNaN(frag.startPTS)) {
startPTS = Math.max(startPTS,frag.startPTS); startPTS = Math.min(startPTS,frag.startPTS);
endPTS = Math.min(endPTS, frag.endPTS); endPTS = Math.max(endPTS, frag.endPTS);
} }
var drift = startPTS - frag.start; var drift = startPTS - frag.start;
@ -99,12 +99,12 @@ class LevelHelper {
if (toIdx > fromIdx) { if (toIdx > fromIdx) {
fragFrom.duration = fragToPTS-fragFrom.start; fragFrom.duration = fragToPTS-fragFrom.start;
if(fragFrom.duration < 0) { if(fragFrom.duration < 0) {
logger.error(`negative duration computed for ${fragFrom}, there should be some duration drift between playlist and fragment!`); logger.error(`negative duration computed for frag ${fragFrom.sn},level ${fragFrom.level}, there should be some duration drift between playlist and fragment!`);
} }
} else { } else {
fragTo.duration = fragFrom.start - fragToPTS; fragTo.duration = fragFrom.start - fragToPTS;
if(fragTo.duration < 0) { if(fragTo.duration < 0) {
logger.error(`negative duration computed for ${fragTo}, there should be some duration drift between playlist and fragment!`); logger.error(`negative duration computed for frag ${fragTo.sn},level ${fragTo.level}, there should be some duration drift between playlist and fragment!`);
} }
} }
} else { } else {

View file

@ -34,8 +34,9 @@ class Hls {
return ErrorDetails; return ErrorDetails;
} }
constructor(config = {}) { static get DefaultConfig() {
var configDefault = { if(!Hls.defaultConfig) {
Hls.defaultConfig = {
autoStartLoad: true, autoStartLoad: true,
debug: false, debug: false,
maxBufferLength: 30, maxBufferLength: 30,
@ -45,13 +46,16 @@ class Hls {
maxMaxBufferLength: 600, maxMaxBufferLength: 600,
enableWorker: true, enableWorker: true,
enableSoftwareAES: true, enableSoftwareAES: true,
manifestLoadingTimeOut: 10000,
manifestLoadingMaxRetry: 1,
manifestLoadingRetryDelay: 1000,
levelLoadingTimeOut: 10000,
levelLoadingMaxRetry: 4,
levelLoadingRetryDelay: 1000,
fragLoadingTimeOut: 20000, fragLoadingTimeOut: 20000,
fragLoadingMaxRetry: 6, fragLoadingMaxRetry: 6,
fragLoadingRetryDelay: 1000, fragLoadingRetryDelay: 1000,
fragLoadingLoopThreshold: 3, fragLoadingLoopThreshold: 3,
manifestLoadingTimeOut: 10000,
manifestLoadingMaxRetry: 1,
manifestLoadingRetryDelay: 1000,
// fpsDroppedMonitoringPeriod: 5000, // fpsDroppedMonitoringPeriod: 5000,
// fpsDroppedMonitoringThreshold: 0.2, // fpsDroppedMonitoringThreshold: 0.2,
appendErrorMaxRetry: 3, appendErrorMaxRetry: 3,
@ -61,9 +65,19 @@ class Hls {
abrController : AbrController, abrController : AbrController,
mediaController: MSEMediaController mediaController: MSEMediaController
}; };
for (var prop in configDefault) { }
return Hls.defaultConfig;
}
static set DefaultConfig(defaultConfig) {
Hls.defaultConfig = defaultConfig;
}
constructor(config = {}) {
var defaultConfig = Hls.DefaultConfig;
for (var prop in defaultConfig) {
if (prop in config) { continue; } if (prop in config) { continue; }
config[prop] = configDefault[prop]; config[prop] = defaultConfig[prop];
} }
if (config.liveMaxLatencyDurationCount !== undefined && config.liveMaxLatencyDurationCount <= config.liveSyncDurationCount) { if (config.liveMaxLatencyDurationCount !== undefined && config.liveMaxLatencyDurationCount <= config.liveSyncDurationCount) {

View file

@ -27,7 +27,7 @@ class FragmentLoader {
this.frag.loaded = 0; this.frag.loaded = 0;
var config = this.hls.config; var config = this.hls.config;
frag.loader = this.loader = typeof(config.fLoader) !== 'undefined' ? new config.fLoader(config) : new config.loader(config); frag.loader = this.loader = typeof(config.fLoader) !== 'undefined' ? new config.fLoader(config) : new config.loader(config);
this.loader.load(frag.url, 'arraybuffer', this.loadsuccess.bind(this), this.loaderror.bind(this), this.loadtimeout.bind(this), config.fragLoadingTimeOut, 1, config.fragLoadingRetryDelay, this.loadprogress.bind(this), frag); this.loader.load(frag.url, 'arraybuffer', this.loadsuccess.bind(this), this.loaderror.bind(this), this.loadtimeout.bind(this), config.fragLoadingTimeOut, 1, 0, this.loadprogress.bind(this), frag);
} }
loadsuccess(event, stats) { loadsuccess(event, stats) {

View file

@ -5,6 +5,7 @@
import Event from '../events'; import Event from '../events';
import {ErrorTypes, ErrorDetails} from '../errors'; import {ErrorTypes, ErrorDetails} from '../errors';
import URLHelper from '../utils/url'; import URLHelper from '../utils/url';
import AttrList from '../utils/attr-list';
//import {logger} from '../utils/logger'; //import {logger} from '../utils/logger';
class PlaylistLoader { class PlaylistLoader {
@ -36,12 +37,24 @@ class PlaylistLoader {
} }
load(url, id1, id2) { load(url, id1, id2) {
var config = this.hls.config; var config = this.hls.config,
retry,
timeout,
retryDelay;
this.url = url; this.url = url;
this.id = id1; this.id = id1;
this.id2 = id2; this.id2 = id2;
if(this.id === undefined) {
retry = config.manifestLoadingMaxRetry;
timeout = config.manifestLoadingTimeOut;
retryDelay = config.manifestLoadingRetryDelay;
} else {
retry = config.levelLoadingMaxRetry;
timeout = config.levelLoadingTimeOut;
retryDelay = config.levelLoadingRetryDelay;
}
this.loader = typeof(config.pLoader) !== 'undefined' ? new config.pLoader(config) : new config.loader(config); this.loader = typeof(config.pLoader) !== 'undefined' ? new config.pLoader(config) : new config.loader(config);
this.loader.load(url, '', this.loadsuccess.bind(this), this.loaderror.bind(this), this.loadtimeout.bind(this), config.manifestLoadingTimeOut, config.manifestLoadingMaxRetry, config.manifestLoadingRetryDelay); this.loader.load(url, '', this.loadsuccess.bind(this), this.loaderror.bind(this), this.loadtimeout.bind(this), timeout, retry, retryDelay);
} }
resolve(url, baseUrl) { resolve(url, baseUrl) {
@ -49,42 +62,38 @@ class PlaylistLoader {
} }
parseMasterPlaylist(string, baseurl) { parseMasterPlaylist(string, baseurl) {
var levels = [], level = {}, result, codecs, codec; let levels = [], result;
// https://regex101.com is your friend // https://regex101.com is your friend
var re = /#EXT-X-STREAM-INF:([^\n\r]*(BAND)WIDTH=(\d+))?([^\n\r]*(CODECS)=\"([^\"\n\r]*)\",?)?([^\n\r]*(RES)OLUTION=(\d+)x(\d+))?([^\n\r]*(NAME)=\"(.*)\")?[^\n\r]*[\r\n]+([^\r\n]+)/g; const re = /#EXT-X-STREAM-INF:([^\n\r]*)[\r\n]+([^\r\n]+)/g;
while ((result = re.exec(string)) != null){ while ((result = re.exec(string)) != null){
result.shift(); const level = {};
result = result.filter(function(n) { return (n !== undefined); });
level.url = this.resolve(result.pop(), baseurl); var attrs = level.attrs = new AttrList(result[1]);
while (result.length > 0) { level.url = this.resolve(result[2], baseurl);
switch (result.shift()) {
case 'RES': var resolution = attrs.decimalResolution('RESOLUTION');
level.width = parseInt(result.shift()); if(resolution) {
level.height = parseInt(result.shift()); level.width = resolution.width;
break; level.height = resolution.height;
case 'BAND': }
level.bitrate = parseInt(result.shift()); level.bitrate = attrs.decimalInteger('BANDWIDTH');
break; level.name = attrs.NAME;
case 'NAME':
level.name = result.shift(); var codecs = attrs.CODECS;
break; if(codecs) {
case 'CODECS': codecs = codecs.split(',');
codecs = result.shift().split(','); for (let i = 0; i < codecs.length; i++) {
while (codecs.length > 0) { const codec = codecs[i];
codec = codecs.shift();
if (codec.indexOf('avc1') !== -1) { if (codec.indexOf('avc1') !== -1) {
level.videoCodec = this.avc1toavcoti(codec); level.videoCodec = this.avc1toavcoti(codec);
} else { } else {
level.audioCodec = codec; level.audioCodec = codec;
} }
} }
break;
default:
break;
}
} }
levels.push(level); levels.push(level);
level = {};
} }
return levels; return levels;
} }
@ -101,26 +110,24 @@ class PlaylistLoader {
return result; return result;
} }
parseKeyParamsByRegex(string, regexp) {
var result = regexp.exec(string);
if (result) {
result.shift();
result = result.filter(function(n) { return (n !== undefined); });
if (result.length === 2) {
return result[1];
}
}
return null;
}
cloneObj(obj) { cloneObj(obj) {
return JSON.parse(JSON.stringify(obj)); return JSON.parse(JSON.stringify(obj));
} }
parseLevelPlaylist(string, baseurl, id) { parseLevelPlaylist(string, baseurl, id) {
var currentSN = 0, totalduration = 0, level = {url: baseurl, fragments: [], live: true, startSN: 0}, result, regexp, cc = 0, frag, byteRangeEndOffset, byteRangeStartOffset; var currentSN = 0,
var levelkey = {method : null, key : null, iv : null, uri : null}; totalduration = 0,
regexp = /(?:#EXT-X-(MEDIA-SEQUENCE):(\d+))|(?:#EXT-X-(TARGETDURATION):(\d+))|(?:#EXT-X-(KEY):(.*))|(?:#EXT(INF):([\d\.]+)[^\r\n]*([\r\n]+[^#|\r\n]+)?)|(?:#EXT-X-(BYTERANGE):([\d]+[@[\d]*)]*[\r\n]+([^#|\r\n]+)?|(?:#EXT-X-(ENDLIST))|(?:#EXT-X-(DIS)CONTINUITY))/g; level = {url: baseurl, fragments: [], live: true, startSN: 0},
levelkey = {method : null, key : null, iv : null, uri : null},
cc = 0,
programDateTime = null,
frag = null,
result,
regexp,
byteRangeEndOffset,
byteRangeStartOffset;
regexp = /(?:#EXT-X-(MEDIA-SEQUENCE):(\d+))|(?:#EXT-X-(TARGETDURATION):(\d+))|(?:#EXT-X-(KEY):(.*))|(?:#EXT(INF):([\d\.]+)[^\r\n]*([\r\n]+[^#|\r\n]+)?)|(?:#EXT-X-(BYTERANGE):([\d]+[@[\d]*)]*[\r\n]+([^#|\r\n]+)?|(?:#EXT-X-(ENDLIST))|(?:#EXT-X-(DIS)CONTINUITY))|(?:#EXT-X-(PROGRAM-DATE-TIME):(.*))/g;
while ((result = regexp.exec(string)) !== null) { while ((result = regexp.exec(string)) !== null) {
result.shift(); result.shift();
result = result.filter(function(n) { return (n !== undefined); }); result = result.filter(function(n) { return (n !== undefined); });
@ -145,7 +152,6 @@ class PlaylistLoader {
byteRangeStartOffset = parseInt(params[1]); byteRangeStartOffset = parseInt(params[1]);
} }
byteRangeEndOffset = parseInt(params[0]) + byteRangeStartOffset; byteRangeEndOffset = parseInt(params[0]) + byteRangeStartOffset;
frag = level.fragments.length ? level.fragments[level.fragments.length - 1] : null;
if (frag && !frag.url) { if (frag && !frag.url) {
frag.byteRangeStartOffset = byteRangeStartOffset; frag.byteRangeStartOffset = byteRangeStartOffset;
frag.byteRangeEndOffset = byteRangeEndOffset; frag.byteRangeEndOffset = byteRangeEndOffset;
@ -167,17 +173,21 @@ class PlaylistLoader {
} else { } else {
fragdecryptdata = levelkey; fragdecryptdata = levelkey;
} }
level.fragments.push({url: result[2] ? this.resolve(result[2], baseurl) : null, duration: duration, start: totalduration, sn: sn, level: id, cc: cc, byteRangeStartOffset: byteRangeStartOffset, byteRangeEndOffset: byteRangeEndOffset, decryptdata : fragdecryptdata}); var url = result[2] ? this.resolve(result[2], baseurl) : null;
frag = {url: url, duration: duration, start: totalduration, sn: sn, level: id, cc: cc, byteRangeStartOffset: byteRangeStartOffset, byteRangeEndOffset: byteRangeEndOffset, decryptdata : fragdecryptdata, programDateTime: programDateTime};
level.fragments.push(frag);
totalduration += duration; totalduration += duration;
byteRangeStartOffset = null; byteRangeStartOffset = null;
programDateTime = null;
} }
break; break;
case 'KEY': case 'KEY':
// https://tools.ietf.org/html/draft-pantos-http-live-streaming-08#section-3.4.4 // https://tools.ietf.org/html/draft-pantos-http-live-streaming-08#section-3.4.4
var decryptparams = result[1]; var decryptparams = result[1];
var decryptmethod = this.parseKeyParamsByRegex(decryptparams, /(METHOD)=([^,]*)/), var keyAttrs = new AttrList(decryptparams);
decrypturi = this.parseKeyParamsByRegex(decryptparams, /(URI)=["]([^,]*)["]/), var decryptmethod = keyAttrs.enumeratedString('METHOD'),
decryptiv = this.parseKeyParamsByRegex(decryptparams, /(IV)=([^,]*)/); decrypturi = keyAttrs.URI,
decryptiv = keyAttrs.hexadecimalInteger('IV');
if (decryptmethod) { if (decryptmethod) {
levelkey = { method: null, key: null, iv: null, uri: null }; levelkey = { method: null, key: null, iv: null, uri: null };
if ((decrypturi) && (decryptmethod === 'AES-128')) { if ((decrypturi) && (decryptmethod === 'AES-128')) {
@ -186,40 +196,42 @@ class PlaylistLoader {
levelkey.uri = this.resolve(decrypturi, baseurl); levelkey.uri = this.resolve(decrypturi, baseurl);
levelkey.key = null; levelkey.key = null;
// Initialization Vector (IV) // Initialization Vector (IV)
if (decryptiv) {
levelkey.iv = decryptiv; levelkey.iv = decryptiv;
if (levelkey.iv.substring(0, 2) === '0x') {
levelkey.iv = levelkey.iv.substring(2);
}
levelkey.iv = levelkey.iv.match(/.{8}/g);
levelkey.iv[0] = parseInt(levelkey.iv[0], 16);
levelkey.iv[1] = parseInt(levelkey.iv[1], 16);
levelkey.iv[2] = parseInt(levelkey.iv[2], 16);
levelkey.iv[3] = parseInt(levelkey.iv[3], 16);
levelkey.iv = new Uint32Array(levelkey.iv);
}
} }
} }
break; break;
case 'PROGRAM-DATE-TIME':
programDateTime = new Date(Date.parse(result[1]));
break;
default: default:
break; break;
} }
} }
//logger.log('found ' + level.fragments.length + ' fragments'); //logger.log('found ' + level.fragments.length + ' fragments');
if(frag && !frag.url) {
level.fragments.pop();
totalduration-=frag.duration;
}
level.totalduration = totalduration; level.totalduration = totalduration;
level.endSN = currentSN - 1; level.endSN = currentSN - 1;
return level; return level;
} }
loadsuccess(event, stats) { loadsuccess(event, stats) {
var string = event.currentTarget.responseText, url = event.currentTarget.responseURL, id = this.id, id2 = this.id2, hls = this.hls, levels; var target = event.currentTarget,
string = target.responseText,
url = target.responseURL,
id = this.id,
id2 = this.id2,
hls = this.hls,
levels;
// responseURL not supported on some browsers (it is used to detect URL redirection) // responseURL not supported on some browsers (it is used to detect URL redirection)
if (url === undefined) { if (url === undefined) {
// fallback to initial URL // fallback to initial URL
url = this.url; url = this.url;
} }
stats.tload = performance.now(); stats.tload = performance.now();
stats.mtime = new Date(event.currentTarget.getResponseHeader('Last-Modified')); stats.mtime = new Date(target.getResponseHeader('Last-Modified'));
if (string.indexOf('#EXTM3U') === 0) { if (string.indexOf('#EXTM3U') === 0) {
if (string.indexOf('#EXTINF:') > 0) { if (string.indexOf('#EXTINF:') > 0) {
// 1 level playlist // 1 level playlist

View file

@ -54,23 +54,7 @@ class MP4 {
} }
} }
MP4.MAJOR_BRAND = new Uint8Array([ var videoHdlr = new Uint8Array([
'i'.charCodeAt(0),
's'.charCodeAt(0),
'o'.charCodeAt(0),
'm'.charCodeAt(0)
]);
MP4.AVC1_BRAND = new Uint8Array([
'a'.charCodeAt(0),
'v'.charCodeAt(0),
'c'.charCodeAt(0),
'1'.charCodeAt(0)
]);
MP4.MINOR_VERSION = new Uint8Array([0, 0, 0, 1]);
MP4.VIDEO_HDLR = new Uint8Array([
0x00, // version 0 0x00, // version 0
0x00, 0x00, 0x00, // flags 0x00, 0x00, 0x00, // flags
0x00, 0x00, 0x00, 0x00, // pre_defined 0x00, 0x00, 0x00, 0x00, // pre_defined
@ -83,7 +67,7 @@ class MP4 {
0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'VideoHandler' 0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'VideoHandler'
]); ]);
MP4.AUDIO_HDLR = new Uint8Array([ var audioHdlr = new Uint8Array([
0x00, // version 0 0x00, // version 0
0x00, 0x00, 0x00, // flags 0x00, 0x00, 0x00, // flags
0x00, 0x00, 0x00, 0x00, // pre_defined 0x00, 0x00, 0x00, 0x00, // pre_defined
@ -97,11 +81,11 @@ class MP4 {
]); ]);
MP4.HDLR_TYPES = { MP4.HDLR_TYPES = {
'video': MP4.VIDEO_HDLR, 'video': videoHdlr,
'audio': MP4.AUDIO_HDLR 'audio': audioHdlr
}; };
MP4.DREF = new Uint8Array([ var dref = new Uint8Array([
0x00, // version 0 0x00, // version 0
0x00, 0x00, 0x00, // flags 0x00, 0x00, 0x00, // flags
0x00, 0x00, 0x00, 0x01, // entry_count 0x00, 0x00, 0x00, 0x01, // entry_count
@ -110,13 +94,15 @@ class MP4 {
0x00, // version 0 0x00, // version 0
0x00, 0x00, 0x01 // entry_flags 0x00, 0x00, 0x01 // entry_flags
]); ]);
MP4.STCO = new Uint8Array([
var stco = new Uint8Array([
0x00, // version 0x00, // version
0x00, 0x00, 0x00, // flags 0x00, 0x00, 0x00, // flags
0x00, 0x00, 0x00, 0x00 // entry_count 0x00, 0x00, 0x00, 0x00 // entry_count
]); ]);
MP4.STSC = MP4.STCO;
MP4.STTS = MP4.STCO; MP4.STTS = MP4.STSC = MP4.STCO = stco;
MP4.STSZ = new Uint8Array([ MP4.STSZ = new Uint8Array([
0x00, // version 0x00, // version
0x00, 0x00, 0x00, // flags 0x00, 0x00, 0x00, // flags
@ -143,28 +129,34 @@ class MP4 {
0x00, 0x00, 0x00, // flags 0x00, 0x00, 0x00, // flags
0x00, 0x00, 0x00, 0x01]);// entry_count 0x00, 0x00, 0x00, 0x01]);// entry_count
MP4.FTYP = MP4.box(MP4.types.ftyp, MP4.MAJOR_BRAND, MP4.MINOR_VERSION, MP4.MAJOR_BRAND, MP4.AVC1_BRAND); var majorBrand = new Uint8Array([105,115,111,109]); // isom
MP4.DINF = MP4.box(MP4.types.dinf, MP4.box(MP4.types.dref, MP4.DREF)); var avc1Brand = new Uint8Array([97,118,99,49]); // avc1
var minorVersion = new Uint8Array([0, 0, 0, 1]);
MP4.FTYP = MP4.box(MP4.types.ftyp, majorBrand, minorVersion, majorBrand, avc1Brand);
MP4.DINF = MP4.box(MP4.types.dinf, MP4.box(MP4.types.dref, dref));
} }
static box(type) { static box(type) {
var var
payload = Array.prototype.slice.call(arguments, 1), payload = Array.prototype.slice.call(arguments, 1),
size = 0, size = 8,
i = payload.length, i = payload.length,
len = i, len = i,
result, result;
view;
// calculate the total size we need to allocate // calculate the total size we need to allocate
while (i--) { while (i--) {
size += payload[i].byteLength; size += payload[i].byteLength;
} }
result = new Uint8Array(size + 8); result = new Uint8Array(size);
view = new DataView(result.buffer); result[0] = (size >> 24) & 0xff;
view.setUint32(0, result.byteLength); result[1] = (size >> 16) & 0xff;
result[2] = (size >> 8) & 0xff;
result[3] = size & 0xff;
result.set(type, 4); result.set(type, 4);
// copy the payload into the result // copy the payload into the result
for (i = 0, size = 8; i < len; i++) { for (i = 0, size = 8; i < len; i++) {
// copy payload[i] array @ offset size
result.set(payload[i], size); result.set(payload[i], size);
size += payload[i].byteLength; size += payload[i].byteLength;
} }
@ -564,7 +556,7 @@ class MP4 {
(size >>> 16) & 0xFF, (size >>> 16) & 0xFF,
(size >>> 8) & 0xFF, (size >>> 8) & 0xFF,
size & 0xFF, // sample_size size & 0xFF, // sample_size
(flags.isLeading << 2) | sample.flags.dependsOn, (flags.isLeading << 2) | flags.dependsOn,
(flags.isDependedOn << 6) | (flags.isDependedOn << 6) |
(flags.hasRedundancy << 4) | (flags.hasRedundancy << 4) |
(flags.paddingValue << 1) | (flags.paddingValue << 1) |

View file

@ -119,7 +119,7 @@ class MP4Remuxer {
remuxVideo(track, timeOffset, contiguous) { remuxVideo(track, timeOffset, contiguous) {
var view, var view,
i = 8, offset = 8,
pesTimeScale = this.PES_TIMESCALE, pesTimeScale = this.PES_TIMESCALE,
pes2mp4ScaleFactor = this.PES2MP4SCALEFACTOR, pes2mp4ScaleFactor = this.PES2MP4SCALEFACTOR,
avcSample, avcSample,
@ -142,25 +142,28 @@ class MP4Remuxer {
// convert NALU bitstream to MP4 format (prepend NALU with size field) // convert NALU bitstream to MP4 format (prepend NALU with size field)
while (avcSample.units.units.length) { while (avcSample.units.units.length) {
unit = avcSample.units.units.shift(); unit = avcSample.units.units.shift();
view.setUint32(i, unit.data.byteLength); view.setUint32(offset, unit.data.byteLength);
i += 4; offset += 4;
mdat.set(unit.data, i); mdat.set(unit.data, offset);
i += unit.data.byteLength; offset += unit.data.byteLength;
mp4SampleLength += 4 + unit.data.byteLength; mp4SampleLength += 4 + unit.data.byteLength;
} }
pts = avcSample.pts - this._initDTS; pts = avcSample.pts - this._initDTS;
dts = avcSample.dts - this._initDTS; dts = avcSample.dts - this._initDTS;
//logger.log('Video/PTS/DTS:' + pts + '/' + dts); // ensure DTS is not bigger than PTS
dts = Math.min(pts,dts);
//logger.log(`Video/PTS/DTS:${pts}/${dts}`);
// if not first AVC sample of video track, normalize PTS/DTS with previous sample value // if not first AVC sample of video track, normalize PTS/DTS with previous sample value
// and ensure that sample duration is positive // and ensure that sample duration is positive
if (lastDTS !== undefined) { if (lastDTS !== undefined) {
ptsnorm = this._PTSNormalize(pts, lastDTS); ptsnorm = this._PTSNormalize(pts, lastDTS);
dtsnorm = this._PTSNormalize(dts, lastDTS); dtsnorm = this._PTSNormalize(dts, lastDTS);
mp4Sample.duration = (dtsnorm - lastDTS) / pes2mp4ScaleFactor; var sampleDuration = (dtsnorm - lastDTS) / pes2mp4ScaleFactor;
if (mp4Sample.duration < 0) { if (sampleDuration <= 0) {
//logger.log('invalid sample duration at PTS/DTS::' + avcSample.pts + '/' + avcSample.dts + ':' + mp4Sample.duration); logger.log(`invalid sample duration at PTS/DTS: ${avcSample.pts}/${avcSample.dts}:${sampleDuration}`);
mp4Sample.duration = 0; sampleDuration = 1;
} }
mp4Sample.duration = sampleDuration;
} else { } else {
var nextAvcDts = this.nextAvcDts,delta; var nextAvcDts = this.nextAvcDts,delta;
// first AVC sample of video track, normalize PTS/DTS // first AVC sample of video track, normalize PTS/DTS
@ -179,7 +182,7 @@ class MP4Remuxer {
dtsnorm = nextAvcDts; dtsnorm = nextAvcDts;
// offset PTS as well, ensure that PTS is smaller or equal than new DTS // offset PTS as well, ensure that PTS is smaller or equal than new DTS
ptsnorm = Math.max(ptsnorm - delta, dtsnorm); ptsnorm = Math.max(ptsnorm - delta, dtsnorm);
logger.log('Video/PTS/DTS adjusted:' + ptsnorm + '/' + dtsnorm); logger.log(`Video/PTS/DTS adjusted: ${ptsnorm}/${dtsnorm},delta:${delta}`);
} }
} }
// remember first PTS of our avcSamples, ensure value is positive // remember first PTS of our avcSamples, ensure value is positive
@ -209,18 +212,21 @@ class MP4Remuxer {
samples.push(mp4Sample); samples.push(mp4Sample);
lastDTS = dtsnorm; lastDTS = dtsnorm;
} }
var lastSampleDuration = 0;
if (samples.length >= 2) { if (samples.length >= 2) {
mp4Sample.duration = samples[samples.length - 2].duration; lastSampleDuration = samples[samples.length - 2].duration;
mp4Sample.duration = lastSampleDuration;
} }
// next AVC sample DTS should be equal to last sample DTS + last sample duration // next AVC sample DTS should be equal to last sample DTS + last sample duration
this.nextAvcDts = dtsnorm + mp4Sample.duration * pes2mp4ScaleFactor; this.nextAvcDts = dtsnorm + lastSampleDuration * pes2mp4ScaleFactor;
track.len = 0; track.len = 0;
track.nbNalu = 0; track.nbNalu = 0;
if(navigator.userAgent.toLowerCase().indexOf('chrome') > -1) { if(samples.length && navigator.userAgent.toLowerCase().indexOf('chrome') > -1) {
var flags = samples[0].flags;
// chrome workaround, mark first sample as being a Random Access Point to avoid sourcebuffer append issue // chrome workaround, mark first sample as being a Random Access Point to avoid sourcebuffer append issue
// https://code.google.com/p/chromium/issues/detail?id=229412 // https://code.google.com/p/chromium/issues/detail?id=229412
samples[0].flags.dependsOn = 2; flags.dependsOn = 2;
samples[0].flags.isNonSync = 0; flags.isNonSync = 0;
} }
track.samples = samples; track.samples = samples;
moof = MP4.moof(track.sequenceNumber++, firstDTS / pes2mp4ScaleFactor, track); moof = MP4.moof(track.sequenceNumber++, firstDTS / pes2mp4ScaleFactor, track);
@ -229,9 +235,9 @@ class MP4Remuxer {
moof: moof, moof: moof,
mdat: mdat, mdat: mdat,
startPTS: firstPTS / pesTimeScale, startPTS: firstPTS / pesTimeScale,
endPTS: (ptsnorm + pes2mp4ScaleFactor * mp4Sample.duration) / pesTimeScale, endPTS: (ptsnorm + pes2mp4ScaleFactor * lastSampleDuration) / pesTimeScale,
startDTS: firstDTS / pesTimeScale, startDTS: firstDTS / pesTimeScale,
endDTS: (dtsnorm + pes2mp4ScaleFactor * mp4Sample.duration) / pesTimeScale, endDTS: this.nextAvcDts / pesTimeScale,
type: 'video', type: 'video',
nb: samples.length nb: samples.length
}); });
@ -239,7 +245,7 @@ class MP4Remuxer {
remuxAudio(track,timeOffset, contiguous) { remuxAudio(track,timeOffset, contiguous) {
var view, var view,
i = 8, offset = 8,
pesTimeScale = this.PES_TIMESCALE, pesTimeScale = this.PES_TIMESCALE,
pes2mp4ScaleFactor = this.PES2MP4SCALEFACTOR, pes2mp4ScaleFactor = this.PES2MP4SCALEFACTOR,
aacSample, mp4Sample, aacSample, mp4Sample,
@ -247,27 +253,32 @@ class MP4Remuxer {
mdat, moof, mdat, moof,
firstPTS, firstDTS, lastDTS, firstPTS, firstDTS, lastDTS,
pts, dts, ptsnorm, dtsnorm, pts, dts, ptsnorm, dtsnorm,
samples = []; samples = [],
/* concatenate the audio data and construct the mdat in place samples0 = [];
(need 8 more bytes to fill length and mdat type) */
mdat = new Uint8Array(track.len + 8); track.samples.forEach(aacSample => {
view = new DataView(mdat.buffer); if(pts === undefined || aacSample.pts > pts) {
view.setUint32(0, mdat.byteLength); samples0.push(aacSample);
mdat.set(MP4.types.mdat, 4); pts = aacSample.pts;
while (track.samples.length) { } else {
aacSample = track.samples.shift(); logger.warn('dropping past audio frame');
}
});
while (samples0.length) {
aacSample = samples0.shift();
unit = aacSample.unit; unit = aacSample.unit;
mdat.set(unit, i);
i += unit.byteLength;
pts = aacSample.pts - this._initDTS; pts = aacSample.pts - this._initDTS;
dts = aacSample.dts - this._initDTS; dts = aacSample.dts - this._initDTS;
//logger.log('Audio/PTS:' + aacSample.pts.toFixed(0)); //logger.log(`Audio/PTS:${aacSample.pts.toFixed(0)}`);
// if not first sample
if (lastDTS !== undefined) { if (lastDTS !== undefined) {
ptsnorm = this._PTSNormalize(pts, lastDTS); ptsnorm = this._PTSNormalize(pts, lastDTS);
dtsnorm = this._PTSNormalize(dts, lastDTS); dtsnorm = this._PTSNormalize(dts, lastDTS);
// we use DTS to compute sample duration, but we use PTS to compute initPTS which is used to sync audio and video // let's compute sample duration
mp4Sample.duration = (dtsnorm - lastDTS) / pes2mp4ScaleFactor; mp4Sample.duration = (dtsnorm - lastDTS) / pes2mp4ScaleFactor;
if (mp4Sample.duration < 0) { if (mp4Sample.duration < 0) {
// not expected to happen ...
logger.log(`invalid AAC sample duration at PTS:${aacSample.pts}:${mp4Sample.duration}`); logger.log(`invalid AAC sample duration at PTS:${aacSample.pts}:${mp4Sample.duration}`);
mp4Sample.duration = 0; mp4Sample.duration = 0;
} }
@ -280,11 +291,13 @@ class MP4Remuxer {
if (contiguous || Math.abs(delta) < 600) { if (contiguous || Math.abs(delta) < 600) {
// log delta // log delta
if (delta) { if (delta) {
if (delta > 1) { if (delta > 0) {
logger.log(`${delta} ms hole between AAC samples detected,filling it`); logger.log(`${delta} ms hole between AAC samples detected,filling it`);
// set PTS to next PTS, and ensure PTS is greater or equal than last DTS } else if (delta < 0) {
} else if (delta < -1) { // drop overlapping audio frames... browser will deal with it
logger.log(`${(-delta)} ms overlapping between AAC samples detected`); logger.log(`${(-delta)} ms overlapping between AAC samples detected, drop frame`);
track.len -= unit.byteLength;
continue;
} }
// set DTS to next DTS // set DTS to next DTS
ptsnorm = dtsnorm = nextAacPts; ptsnorm = dtsnorm = nextAacPts;
@ -293,7 +306,15 @@ class MP4Remuxer {
// remember first PTS of our aacSamples, ensure value is positive // remember first PTS of our aacSamples, ensure value is positive
firstPTS = Math.max(0, ptsnorm); firstPTS = Math.max(0, ptsnorm);
firstDTS = Math.max(0, dtsnorm); firstDTS = Math.max(0, dtsnorm);
/* concatenate the audio data and construct the mdat in place
(need 8 more bytes to fill length and mdat type) */
mdat = new Uint8Array(track.len + 8);
view = new DataView(mdat.buffer);
view.setUint32(0, mdat.byteLength);
mdat.set(MP4.types.mdat, 4);
} }
mdat.set(unit, offset);
offset += unit.byteLength;
//console.log('PTS/DTS/initDTS/normPTS/normDTS/relative PTS : ${aacSample.pts}/${aacSample.dts}/${this._initDTS}/${ptsnorm}/${dtsnorm}/${(aacSample.pts/4294967296).toFixed(3)}'); //console.log('PTS/DTS/initDTS/normPTS/normDTS/relative PTS : ${aacSample.pts}/${aacSample.dts}/${this._initDTS}/${ptsnorm}/${dtsnorm}/${(aacSample.pts/4294967296).toFixed(3)}');
mp4Sample = { mp4Sample = {
size: unit.byteLength, size: unit.byteLength,
@ -310,12 +331,16 @@ class MP4Remuxer {
samples.push(mp4Sample); samples.push(mp4Sample);
lastDTS = dtsnorm; lastDTS = dtsnorm;
} }
var lastSampleDuration = 0;
var nbSamples = samples.length;
//set last sample duration as being identical to previous sample //set last sample duration as being identical to previous sample
if (samples.length >= 2) { if (nbSamples >= 2) {
mp4Sample.duration = samples[samples.length - 2].duration; lastSampleDuration = samples[nbSamples - 2].duration;
mp4Sample.duration = lastSampleDuration;
} }
if (nbSamples) {
// next aac sample PTS should be equal to last sample PTS + duration // next aac sample PTS should be equal to last sample PTS + duration
this.nextAacPts = ptsnorm + pes2mp4ScaleFactor * mp4Sample.duration; this.nextAacPts = ptsnorm + pes2mp4ScaleFactor * lastSampleDuration;
//logger.log('Audio/PTS/PTSend:' + aacSample.pts.toFixed(0) + '/' + this.nextAacDts.toFixed(0)); //logger.log('Audio/PTS/PTSend:' + aacSample.pts.toFixed(0) + '/' + this.nextAacDts.toFixed(0));
track.len = 0; track.len = 0;
track.samples = samples; track.samples = samples;
@ -327,11 +352,12 @@ class MP4Remuxer {
startPTS: firstPTS / pesTimeScale, startPTS: firstPTS / pesTimeScale,
endPTS: this.nextAacPts / pesTimeScale, endPTS: this.nextAacPts / pesTimeScale,
startDTS: firstDTS / pesTimeScale, startDTS: firstDTS / pesTimeScale,
endDTS: (dtsnorm + pes2mp4ScaleFactor * mp4Sample.duration) / pesTimeScale, endDTS: (dtsnorm + pes2mp4ScaleFactor * lastSampleDuration) / pesTimeScale,
type: 'audio', type: 'audio',
nb: samples.length nb: nbSamples
}); });
} }
}
remuxID3(track,timeOffset) { remuxID3(track,timeOffset) {
var length = track.samples.length, sample; var length = track.samples.length, sample;

View file

@ -0,0 +1,83 @@
// adapted from https://github.com/kanongil/node-m3u8parse/blob/master/attrlist.js
class AttrList {
constructor(attrs) {
if (typeof attrs === 'string') {
attrs = AttrList.parseAttrList(attrs);
}
for(var attr in attrs){
if(attrs.hasOwnProperty(attr)) {
this[attr] = attrs[attr];
}
}
}
decimalInteger(attrName) {
const intValue = parseInt(this[attrName], 10);
if (intValue > Number.MAX_SAFE_INTEGER) {
return Infinity;
}
return intValue;
}
hexadecimalInteger(attrName) {
if(this[attrName]) {
let stringValue = (this[attrName] || '0x').slice(2);
stringValue = ((stringValue.length & 1) ? '0' : '') + stringValue;
const value = new Uint8Array(stringValue.length / 2);
for (let i = 0; i < stringValue.length / 2; i++) {
value[i] = parseInt(stringValue.slice(i * 2, i * 2 + 2), 16);
}
return value;
} else {
return null;
}
}
hexadecimalIntegerAsNumber(attrName) {
const intValue = parseInt(this[attrName], 16);
if (intValue > Number.MAX_SAFE_INTEGER) {
return Infinity;
}
return intValue;
}
decimalFloatingPoint(attrName) {
return parseFloat(this[attrName]);
}
enumeratedString(attrName) {
return this[attrName];
}
decimalResolution(attrName) {
const res = /^(\d+)x(\d+)$/.exec(this[attrName]);
if (res === null) {
return undefined;
}
return {
width: parseInt(res[1], 10),
height: parseInt(res[2], 10)
};
}
static parseAttrList(input) {
const re = /(.+?)=((?:\".*?\")|.*?)(?:,|$)/g;
var match, attrs = {};
while ((match = re.exec(input)) !== null) {
var value = match[2], quote = '"';
if (value.indexOf(quote) === 0 &&
value.lastIndexOf(quote) === (value.length-1)) {
value = value.slice(1, -1);
}
attrs[match[1]] = value;
}
return attrs;
}
}
export default AttrList;

View file

@ -18,19 +18,21 @@ class XhrLoader {
} }
abort() { abort() {
if (this.loader && this.loader.readyState !== 4) { var loader = this.loader,
timeoutHandle = this.timeoutHandle;
if (loader && loader.readyState !== 4) {
this.stats.aborted = true; this.stats.aborted = true;
this.loader.abort(); loader.abort();
} }
if (this.timeoutHandle) { if (timeoutHandle) {
window.clearTimeout(this.timeoutHandle); window.clearTimeout(timeoutHandle);
} }
} }
load(url, responseType, onSuccess, onError, onTimeout, timeout, maxRetry, retryDelay, onProgress = null, frag = null) { load(url, responseType, onSuccess, onError, onTimeout, timeout, maxRetry, retryDelay, onProgress = null, frag = null) {
this.url = url; this.url = url;
if (frag && !isNaN(frag.byteRangeStartOffset) && !isNaN(frag.byteRangeEndOffset)) { if (frag && !isNaN(frag.byteRangeStartOffset) && !isNaN(frag.byteRangeEndOffset)) {
this.byteRange = frag.byteRangeStartOffset + '-' + frag.byteRangeEndOffset; this.byteRange = frag.byteRangeStartOffset + '-' + (frag.byteRangeEndOffset-1);
} }
this.responseType = responseType; this.responseType = responseType;
this.onSuccess = onSuccess; this.onSuccess = onSuccess;
@ -47,9 +49,9 @@ class XhrLoader {
loadInternal() { loadInternal() {
var xhr = this.loader = new XMLHttpRequest(); var xhr = this.loader = new XMLHttpRequest();
xhr.onload = this.loadsuccess.bind(this); xhr.onreadystatechange = this.statechange.bind(this);
xhr.onerror = this.loaderror.bind(this);
xhr.onprogress = this.loadprogress.bind(this); xhr.onprogress = this.loadprogress.bind(this);
xhr.open('GET', this.url, true); xhr.open('GET', this.url, true);
if (this.byteRange) { if (this.byteRange) {
xhr.setRequestHeader('Range', 'bytes=' + this.byteRange); xhr.setRequestHeader('Range', 'bytes=' + this.byteRange);
@ -63,26 +65,35 @@ class XhrLoader {
xhr.send(); xhr.send();
} }
loadsuccess(event) { statechange(event) {
var xhr = event.currentTarget,
status = xhr.status,
stats = this.stats;
// don't proceed if xhr has been aborted
// 4 = Response from server has been completely loaded.
if (!stats.aborted && xhr.readyState === 4) {
// http status between 200 to 299 are all successful
if (status >= 200 && status < 300) {
window.clearTimeout(this.timeoutHandle); window.clearTimeout(this.timeoutHandle);
this.stats.tload = performance.now(); stats.tload = performance.now();
this.onSuccess(event, this.stats); this.onSuccess(event, stats);
} } else {
// error ...
loaderror(event) { if (stats.retry < this.maxRetry) {
if (this.stats.retry < this.maxRetry) { logger.warn(`${status} while loading ${this.url}, retrying in ${this.retryDelay}...`);
logger.warn(`${event.type} while loading ${this.url}, retrying in ${this.retryDelay}...`);
this.destroy(); this.destroy();
window.setTimeout(this.loadInternal.bind(this), this.retryDelay); window.setTimeout(this.loadInternal.bind(this), this.retryDelay);
// exponential backoff // exponential backoff
this.retryDelay = Math.min(2 * this.retryDelay, 64000); this.retryDelay = Math.min(2 * this.retryDelay, 64000);
this.stats.retry++; stats.retry++;
} else { } else {
window.clearTimeout(this.timeoutHandle); window.clearTimeout(this.timeoutHandle);
logger.error(`${event.type} while loading ${this.url}` ); logger.error(`${status} while loading ${this.url}` );
this.onError(event); this.onError(event);
} }
} }
}
}
loadtimeout(event) { loadtimeout(event) {
logger.warn(`timeout while loading ${this.url}` ); logger.warn(`timeout while loading ${this.url}` );

View file

@ -28,14 +28,14 @@
"iron-component-page": "polymerelements/iron-component-page#^1.0.0" "iron-component-page": "polymerelements/iron-component-page#^1.0.0"
}, },
"ignore": [], "ignore": [],
"homepage": "https://github.com/polymerelements/iron-flex-layout", "homepage": "https://github.com/PolymerElements/iron-flex-layout",
"_release": "1.2.2", "_release": "1.2.2",
"_resolution": { "_resolution": {
"type": "version", "type": "version",
"tag": "v1.2.2", "tag": "v1.2.2",
"commit": "41c4f35be1368afb770312b907a258175565dbdf" "commit": "41c4f35be1368afb770312b907a258175565dbdf"
}, },
"_source": "git://github.com/polymerelements/iron-flex-layout.git", "_source": "git://github.com/PolymerElements/iron-flex-layout.git",
"_target": "^1.0.0", "_target": "^1.0.0",
"_originalSource": "polymerelements/iron-flex-layout" "_originalSource": "PolymerElements/iron-flex-layout"
} }

View file

@ -36,7 +36,7 @@
"tag": "v1.0.8", "tag": "v1.0.8",
"commit": "e9a66727f3da0446f04956d4e4f1dcd51cdec2ff" "commit": "e9a66727f3da0446f04956d4e4f1dcd51cdec2ff"
}, },
"_source": "git://github.com/PolymerElements/iron-selector.git", "_source": "git://github.com/polymerelements/iron-selector.git",
"_target": "^1.0.0", "_target": "^1.0.0",
"_originalSource": "PolymerElements/iron-selector" "_originalSource": "polymerelements/iron-selector"
} }

View file

@ -53,7 +53,7 @@
"tag": "v1.1.4", "tag": "v1.1.4",
"commit": "8ca01ac3cafc61abd980d262875ffca0c79640fa" "commit": "8ca01ac3cafc61abd980d262875ffca0c79640fa"
}, },
"_source": "git://github.com/polymerelements/paper-input.git", "_source": "git://github.com/PolymerElements/paper-input.git",
"_target": "^1.0.9", "_target": "^1.0.0",
"_originalSource": "polymerelements/paper-input" "_originalSource": "PolymerElements/paper-input"
} }

View file

@ -1,6 +1,6 @@
{ {
"name": "paper-radio-group", "name": "paper-radio-group",
"version": "1.0.8", "version": "1.0.9",
"description": "A group of material design radio buttons", "description": "A group of material design radio buttons",
"authors": [ "authors": [
"The Polymer Authors" "The Polymer Authors"
@ -30,15 +30,15 @@
"paper-radio-button": "PolymerElements/paper-radio-button#^1.0.0", "paper-radio-button": "PolymerElements/paper-radio-button#^1.0.0",
"paper-styles": "PolymerElements/paper-styles#^1.0.0", "paper-styles": "PolymerElements/paper-styles#^1.0.0",
"test-fixture": "PolymerElements/test-fixture#^1.0.0", "test-fixture": "PolymerElements/test-fixture#^1.0.0",
"web-component-tester": "polymer/web-component-tester#^3.4.0", "web-component-tester": "^4.0.0",
"webcomponentsjs": "webcomponents/webcomponentsjs#^0.7.0" "webcomponentsjs": "webcomponents/webcomponentsjs#^0.7.0"
}, },
"main": "paper-radio-group.html", "main": "paper-radio-group.html",
"_release": "1.0.8", "_release": "1.0.9",
"_resolution": { "_resolution": {
"type": "version", "type": "version",
"tag": "v1.0.8", "tag": "v1.0.9",
"commit": "398bb090d50b1422ba1848ae531cff6e6aff753f" "commit": "27a8447ed1709dd1a9bef514acd7dee218604077"
}, },
"_source": "git://github.com/PolymerElements/paper-radio-group.git", "_source": "git://github.com/PolymerElements/paper-radio-group.git",
"_target": "~1.0.4", "_target": "~1.0.4",

View file

@ -1,22 +1,25 @@
language: node_js language: node_js
sudo: false sudo: false
before_script: before_script:
- npm install web-component-tester - npm install -g bower polylint web-component-tester
- npm install bower
- 'export PATH=$PWD/node_modules/.bin:$PATH'
- bower install - bower install
- polylint
env: env:
global: global:
- secure: fuljRoGGYqpPmvGA/f/nsqHWRmAFSu0BpeXS838DmXDZGe2l6liTQM3WOg/zsbQimo62bzyfaX7RI0LEPv3kcdIAjxF78/0y+Gb092YOlWjBDf8+Kwz4UuxwEHQ9FRv4T0CiZnp81oo/J9DgBNUIuIKNoycVfp/GQNM7VH8NvYM= - secure: fuljRoGGYqpPmvGA/f/nsqHWRmAFSu0BpeXS838DmXDZGe2l6liTQM3WOg/zsbQimo62bzyfaX7RI0LEPv3kcdIAjxF78/0y+Gb092YOlWjBDf8+Kwz4UuxwEHQ9FRv4T0CiZnp81oo/J9DgBNUIuIKNoycVfp/GQNM7VH8NvYM=
- secure: ibwd66QS2gTWqIW57liwNiDZE1GHmS4lJ62T4cJ1lLUV41B6pONB62fzAVtvJhLfAyQeR3cX0+grCFJtc2v94r5DhVoTAdxuM3MrYQuM+p1XNpK/UUjEkRZkSP2GxHIztXDwePe96W5DqX/C9E/KC3G00NF5/rdtTkoEe6sn0wA= - secure: ibwd66QS2gTWqIW57liwNiDZE1GHmS4lJ62T4cJ1lLUV41B6pONB62fzAVtvJhLfAyQeR3cX0+grCFJtc2v94r5DhVoTAdxuM3MrYQuM+p1XNpK/UUjEkRZkSP2GxHIztXDwePe96W5DqX/C9E/KC3G00NF5/rdtTkoEe6sn0wA=
node_js: 4 - CXX=g++-4.8
node_js: stable
addons: addons:
firefox: latest firefox: latest
apt: apt:
sources: sources:
- google-chrome - google-chrome
- ubuntu-toolchain-r-test
packages: packages:
- google-chrome-stable - google-chrome-stable
- g++-4.8
sauce_connect: true
script: script:
- xvfb-run wct - xvfb-run wct
- "if [ \"${TRAVIS_PULL_REQUEST}\" = \"false\" ]; then wct -s 'default'; fi" - "if [ \"${TRAVIS_PULL_REQUEST}\" = \"false\" ]; then wct -s 'default'; fi"

View file

@ -1,6 +1,6 @@
{ {
"name": "paper-radio-group", "name": "paper-radio-group",
"version": "1.0.8", "version": "1.0.9",
"description": "A group of material design radio buttons", "description": "A group of material design radio buttons",
"authors": [ "authors": [
"The Polymer Authors" "The Polymer Authors"
@ -30,7 +30,7 @@
"paper-radio-button": "PolymerElements/paper-radio-button#^1.0.0", "paper-radio-button": "PolymerElements/paper-radio-button#^1.0.0",
"paper-styles": "PolymerElements/paper-styles#^1.0.0", "paper-styles": "PolymerElements/paper-styles#^1.0.0",
"test-fixture": "PolymerElements/test-fixture#^1.0.0", "test-fixture": "PolymerElements/test-fixture#^1.0.0",
"web-component-tester": "polymer/web-component-tester#^3.4.0", "web-component-tester": "^4.0.0",
"webcomponentsjs": "webcomponents/webcomponentsjs#^0.7.0" "webcomponentsjs": "webcomponents/webcomponentsjs#^0.7.0"
}, },
"main": "paper-radio-group.html" "main": "paper-radio-group.html"

View file

@ -138,6 +138,7 @@ information about `paper-radio-button`.
if (this.allowEmptySelection) { if (this.allowEmptySelection) {
value = ''; value = '';
} else { } else {
if (oldItem)
oldItem.checked = true; oldItem.checked = true;
return; return;
} }

View file

@ -1,5 +1,4 @@
<!doctype html> <!DOCTYPE html><!--
<!--
@license @license
Copyright (c) 2015 The Polymer Project Authors. All rights reserved. Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
@ -7,9 +6,7 @@ The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
Code distributed by Google as part of the polymer project is also Code distributed by Google as part of the polymer project is also
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
--> --><html><head>
<html>
<head>
<meta charset="UTF-8"> <meta charset="UTF-8">
<meta name="viewport" content="width=device-width, minimum-scale=1.0, initial-scale=1.0, user-scalable=yes"> <meta name="viewport" content="width=device-width, minimum-scale=1.0, initial-scale=1.0, user-scalable=yes">
<title>paper-radio-group tests</title> <title>paper-radio-group tests</title>
@ -18,8 +15,10 @@ subject to an additional IP rights grant found at http://polymer.github.io/PATEN
<body> <body>
<script> <script>
WCT.loadSuites([ WCT.loadSuites([
'basic.html' 'basic.html',
'basic.html?dom=shadow'
]); ]);
</script> </script>
</body>
</html>
</body></html>

View file

@ -591,7 +591,7 @@
return false; return false;
} }
return self.canAutoPlayVideo() && !browserInfo.mobile; return self.canAutoPlayVideo();
}; };
self.canAutoPlayVideo = function () { self.canAutoPlayVideo = function () {

View file

@ -16,23 +16,42 @@
} }
} }
function unveilElements(elems) { function cancelAll(tokens) {
for (var i = 0, length = tokens.length; i < length; i++) {
if (!elems.length) { tokens[i] = true;
}
}
function unveilElements(images) {
if (!images.length) {
return; return;
} }
var images = elems; var cancellationTokens = [];
function unveilInternal(tokenIndex) {
function unveil() {
var remaining = []; var remaining = [];
var anyFound = false;
var out = false;
// TODO: This out construct assumes left to right, top to bottom
for (var i = 0, length = images.length; i < length; i++) { for (var i = 0, length = images.length; i < length; i++) {
if (cancellationTokens[tokenIndex]) {
return;
}
var img = images[i]; var img = images[i];
if (isVisible(img)) { if (!out && isVisible(img)) {
anyFound = true;
fillImage(img); fillImage(img);
} else { } else {
if (anyFound) {
out = true;
}
remaining.push(img); remaining.push(img);
} }
} }
@ -40,13 +59,27 @@
images = remaining; images = remaining;
if (!images.length) { if (!images.length) {
document.removeEventListener('focus', unveil, true);
document.removeEventListener('scroll', unveil, true); document.removeEventListener('scroll', unveil, true);
document.removeEventListener(wheelEvent, unveil, true); document.removeEventListener(wheelEvent, unveil, true);
window.removeEventListener('resize', unveil, true); window.removeEventListener('resize', unveil, true);
} }
} }
function unveil() {
cancelAll(cancellationTokens);
var index = cancellationTokens.length;
cancellationTokens.length++;
setTimeout(function () {
unveilInternal(index);
}, 1);
}
document.addEventListener('scroll', unveil, true); document.addEventListener('scroll', unveil, true);
document.addEventListener('focus', unveil, true);
document.addEventListener(wheelEvent, unveil, true); document.addEventListener(wheelEvent, unveil, true);
window.addEventListener('resize', unveil, true); window.addEventListener('resize', unveil, true);
@ -102,14 +135,9 @@
return elem.animate(keyframes, timing); return elem.animate(keyframes, timing);
} }
function simpleImageStore() { window.ImageStore = {
setImageInto: setImageIntoElement
var self = this; };
self.setImageInto = setImageIntoElement;
}
window.ImageStore = new simpleImageStore();
window.ImageLoader = { window.ImageLoader = {
fillImages: fillImages, fillImages: fillImages,