1
0
Fork 0
mirror of https://github.com/jellyfin/jellyfin-web synced 2025-03-30 19:56:21 +00:00

get api libs from bower

This commit is contained in:
Luke Pulverenti 2015-12-16 00:30:14 -05:00
parent def418714f
commit f36e664503
97 changed files with 16860 additions and 197 deletions

View file

@ -0,0 +1,29 @@
{
"name": "emby-apiclient",
"main": "apiclient.js",
"authors": [
"The Emby Authors"
],
"repository": {
"type": "git",
"url": "git://github.com/MediaBrowser/Emby.ApiClient.Javascript.git"
},
"license": "https://github.com/MediaBrowser/Emby.ApiClient.Javascript/blob/master/LICENSE",
"homepage": "https://github.com/MediaBrowser/Emby.ApiClient.Javascript",
"dependencies": {
"cryptojslib": "cryptojslib#^3.1.2"
},
"devDependencies": {},
"ignore": [],
"version": "1.0.3",
"_release": "1.0.3",
"_resolution": {
"type": "version",
"tag": "1.0.3",
"commit": "8e6266950e05e2d032d62340cb7307a3d19a23ec"
},
"_source": "git://github.com/MediaBrowser/Emby.ApiClient.Javascript.git",
"_target": "~1.0.3",
"_originalSource": "emby-apiclient",
"_direct": true
}

View file

@ -0,0 +1,22 @@
The MIT License (MIT)
Copyright (c) 2014 Emby
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View file

@ -0,0 +1,20 @@
{
"name": "emby-apiclient",
"main": "apiclient.js",
"authors": [
"The Emby Authors"
],
"repository": {
"type": "git",
"url": "git://github.com/MediaBrowser/Emby.ApiClient.Javascript.git"
},
"license": "https://github.com/MediaBrowser/Emby.ApiClient.Javascript/blob/master/LICENSE",
"homepage": "https://github.com/MediaBrowser/Emby.ApiClient.Javascript",
"dependencies": {
"cryptojslib": "cryptojslib#^3.1.2"
},
"devDependencies": {
},
"ignore": []
}

View file

@ -0,0 +1,28 @@
{
"name": "hls.js",
"version": "0.3.11",
"description": "Media Source Extension - HLS library, by/for Dailymotion",
"homepage": "https://github.com/dailymotion/hls.js",
"authors": [
"Guillaume du Pontavice <guillaume.dupontavice@dailymotion.com>"
],
"main": "dist/hls.js",
"private": true,
"ignore": [
"**/.*",
"node_modules",
"bower_components",
"test",
"tests"
],
"_release": "0.3.11",
"_resolution": {
"type": "version",
"tag": "v0.3.11",
"commit": "4e698e4adc4e1e0fa80ef9c8334a5ef382bbe347"
},
"_source": "git://github.com/dailymotion/hls.js.git",
"_target": "~0.3.11",
"_originalSource": "dailymotion/hls.js",
"_direct": true
}

View file

@ -0,0 +1,609 @@
##Hello hls.js !
###first step : setup and support
first include ```dist/hls.{min}.js``` in your web page and check whether your browser is supporting [MediaSource Extensions][].
[MediaSource Extensions]: http://w3c.github.io/media-source/
just invoke the following static method : ```Hls.isSupported()```
```js
<script src="dist/hls.{min}.js"></script>
<script>
if(Hls.isSupported()) {
console.log("hello hls.js!");
}
</script>
```
###second step: instanciate hls object and bind it to```<video>```element
let's
- create a```<video>```element
- create a new HLS object
- bind video element to this HLS object
```js
<script src="dist/hls.{min}.js"></script>
<video id="video"></video>
<script>
if(Hls.isSupported()) {
var video = document.getElementById('video');
var hls = new Hls();
// bind them together
hls.attachMedia(video);
// MEDIA_ATTACHED event is fired by hls object once MediaSource is ready
hls.on(Hls.Events.MEDIA_ATTACHED,function() {
console.log("video and hls.js are now bound together !");
});
}
</script>
```
###third step: load a manifest
you need to provide manifest URL as below:
```js
<script src="dist/hls.{min}.js"></script>
<video id="video"></video>
<script>
if(Hls.isSupported()) {
var video = document.getElementById('video');
var hls = new Hls();
// bind them together
hls.attachMedia(video);
hls.on(Hls.Events.MEDIA_ATTACHED,function() {
console.log("video and hls.js are now bound together !");
hls.loadSource("http://my.streamURL.com/playlist.m3u8");
hls.on(Hls.Events.MANIFEST_PARSED, function(event,data) {
console.log("manifest loaded, found " + data.levels.length + " quality level");
}
});
}
</script>
```
###fourth step : control through ```<video>``` element
video is controlled through HTML ```<video>``` element.
HTMLVideoElement control and events could be used seamlessly.
```js
video.play();
```
###fifth step : error handling
all errors are signalled through a unique single event.
each error is categorized by :
- its type:
- ```Hls.ErrorTypes.NETWORK_ERROR```for network related errors
- ```Hls.ErrorTypes.MEDIA_ERROR```for media/video related errors
- ```Hls.ErrorTypes.OTHER_ERROR```for all other errors
- its details:
- ```Hls.ErrorDetails.MANIFEST_LOAD_ERROR```raised when manifest loading fails because of a network error
- ```Hls.ErrorDetails.MANIFEST_LOAD_TIMEOUT```raised when manifest loading fails because of a timeout
- ```Hls.ErrorDetails.MANIFEST_PARSING_ERROR```raised when manifest parsing failed to find proper content
- ```Hls.ErrorDetails.LEVEL_LOAD_ERROR```raised when level loading fails because of a network error
- ```Hls.ErrorDetails.LEVEL_LOAD_TIMEOUT```raised when level loading fails because of a timeout
- ```Hls.ErrorDetails.LEVEL_SWITCH_ERROR```raised when level switching fails
- ```Hls.ErrorDetails.FRAG_LOAD_ERROR```raised when fragment loading fails because of a network error
- ```Hls.ErrorDetails.FRAG_LOOP_LOADING_ERROR```raised upon detection of same fragment being requested in loop
- ```Hls.ErrorDetails.FRAG_LOAD_TIMEOUT```raised when fragment loading fails because of a timeout
- ```Hls.ErrorDetails.FRAG_DECRYPT_ERROR```raised when fragment decryption fails
- ```Hls.ErrorDetails.FRAG_PARSING_ERROR```raised when fragment parsing fails
- ```Hls.ErrorDetails.BUFFER_APPEND_ERROR```raised when exception is raised while preparing buffer append
- ```Hls.ErrorDetails.BUFFER_APPENDING_ERROR```raised when exception is raised during buffer appending
- its fatality:
- ```false```if error is not fatal, hls.js will try to recover it
- ```true```if error is fatal, an action is required to (try to) recover it.
full details is described [below](##Errors)
see sample code below to listen to errors:
```js
hls.on(Hls.Events.ERROR,function(event,data) {
var errorType = data.type;
var errorDetails = data.details;
var errorFatal = data.fatal;
switch(data.details) {
case hls.ErrorDetails.FRAG_LOAD_ERROR:
// ....
break;
default:
break;
}
}
```
#### Fatal Error Recovery
hls.js provides means to 'try to' recover fatal network and media errors, through these 2 methods:
##### ```hls.startLoad()```
should be invoked to recover network error.
##### ```hls.recoverMediaError()```
should be invoked to recover media error
##### error recovery sample code
```js
hls.on(Hls.Events.ERROR,function(event,data) {
if(data.fatal) {
switch(data.type) {
case Hls.ErrorTypes.NETWORK_ERROR:
// try to recover network error
console.log("fatal network error encountered, try to recover");
hls.startLoad();
break;
case Hls.ErrorTypes.MEDIA_ERROR:
console.log("fatal media error encountered, try to recover");
hls.recoverMediaError();
break;
default:
// cannot recover
hls.destroy();
break;
}
}
```
##### ```hls.swapAudioCodec()```
If media error are still raised after calling ```hls.recoverMediaError()```,
calling this method, could be useful to workaround audio codec mismatch.
the workflow should be :
on Media Error : first call ```hls.swapAudioCodec()```, then call ```hls.recoverMediaError()```
###final step : destroying, switching between streams
```hls.destroy()``` should be called to free used resources and destroy hls context.
## Fine Tuning
configuration parameters could be provided to hls.js upon instantiation of Hls Object.
```js
var config = {
debug : false,
autoStartLoad : true,
maxBufferLength : 30,
maxBufferSize : 60*1000*1000,
liveSyncDurationCount : 3,
liveMaxLatencyDurationCount: 10,
enableWorker : true,
enableSoftwareAES: true,
fragLoadingTimeOut : 20000,
fragLoadingMaxRetry : 6,
fragLoadingRetryDelay : 500,
manifestLoadingTimeOut : 10000,
manifestLoadingMaxRetry : 6,
manifestLoadingRetryDelay : 500,
fpsDroppedMonitoringPeriod : 5000,
fpsDroppedMonitoringThreshold : 0.2,
appendErrorMaxRetry : 200,
loader : customLoader,
fLoader: customFragmentLoader,
pLoader: customPlaylistLoader,
xhrSetup : XMLHttpRequestSetupCallback,
abrController : customAbrController
};
var hls = new Hls(config);
```
#### ```debug```
(default false)
setting ```config.debug=true``` will turn on debug logs on JS console.
a logger object could also be provided for custom logging : ```config.debug=customLogger```
#### ```autoStartLoad```
(default true)
- if set to true, start level playlist and first fragments will be loaded automatically, after triggering of ```Hls.Events.MANIFEST_PARSED``` event
- if set to false, an explicit API call (```hls.startLoad()```) will be needed to start quality level/fragment loading.
#### ```maxBufferLength```
(default 30s)
maximum buffer Length in seconds. if buffer length is/become less than this value, a new fragment will be loaded.
#### ```maxBufferSize```
(default 60 MB)
maximum buffer size in bytes. if buffer size upfront is bigger than this value, no fragment will be loaded.
#### ```liveSyncDurationCount```
(default 3)
edge of live delay, expressed in multiple of ```EXT-X-TARGETDURATION```.
if set to 3, playback will start from fragment N-3, N being the last fragment of the live playlist.
decreasing this value is likely to cause playback stalls.
#### ```liveMaxLatencyDurationCount```
(default Infinity)
maximum delay allowed from edge of live, expressed in multiple of ```EXT-X-TARGETDURATION```.
if set to 10, the player will seek back to ```liveSyncDurationCount``` whenever the next fragment to be loaded is older than N-10, N being the last fragment of the live playlist.
If set, this value must be stricly superior to ```liveSyncDurationCount```
a value too close from ```liveSyncDurationCount``` is likely to cause playback stalls.
#### ```enableWorker```
(default true)
enable webworker (if available on browser) for TS demuxing/MP4 remuxing, to improve performance and avoid lag/frame drops.
#### ```enableSoftwareAES```
(default true)
enable to use JavaScript version AES decryption for fallback of WebCrypto API.
#### ```fragLoadingTimeOut```/```manifestLoadingTimeOut```
(default 60000ms for fragment/10000ms for manifest)
URL Loader timeout.
A timeout callback will be triggered if loading duration exceeds this timeout.
no further action will be done : the load operation will not be cancelled/aborted.
It is up to the application to catch this event and treat it as needed.
#### ```fragLoadingMaxRetry```/```manifestLoadingMaxRetry```
(default 3)
max nb of load retry
#### ```fragLoadingRetryDelay```/```manifestLoadingRetryDelay```
(default 500ms)
initial delay between XmlHttpRequest error and first load retry (in ms)
any I/O error will trigger retries every 500ms,1s,2s,4s,8s, ... capped to 64s (exponential backoff)
max nb of append retry
#### ```appendErrorMaxRetry```
(default 200)
max number of sourceBuffer.appendBuffer() retry upon error.
such error could happen in loop with UHD streams, when internal buffer is full. (Quota Exceeding Error will be triggered). in that case we need to wait for the browser to evict some data before being able to append buffer correctly.
#### ```loader```
(default : standard XmlHttpRequest based URL loader)
override standard URL loader by a custom one.
could be useful for P2P or stubbing (testing).
Use this, if you want to overwrite both the fragment and the playlist loader.
Note: If fLoader or pLoader are used, they overwrite loader!
```js
var customLoader = function() {
/* calling load() will start retrieving content at given URL (HTTP GET)
params :
url : URL to load
responseType : xhr response Type (arraybuffer or default response Type for playlist)
onSuccess : callback triggered upon successful loading of URL.
it should return xhr event and load stats object {trequest,tfirst,tload}
onError : callback triggered if any I/O error is met while loading fragment
onTimeOut : callback triggered if loading is still not finished after a certain duration
timeout : timeout after which onTimeOut callback will be triggered(if loading is still not finished after that delay)
maxRetry : max nb of load retry
retryDelay : delay between an I/O error and following connection retry (ms). this to avoid spamming the server.
*/
this.load = function(url,responseType,onSuccess,onError,timeout,maxRetry,retryDelay) {}
/* abort any loading in progress */
this.abort = function() {}
/* destroy loading context */
this.destroy = function() {}
}
```
#### ```fLoader```
(default : undefined)
This enables the manipulation of the fragment loader.
Note: This will overwrite the default loader, as well as your own loader function (see above).
```js
var customFragmentLoader = function() {
//See loader for details
}
```
#### ```pLoader```
(default : undefined)
This enables the manipulation of the playlist loader.
Note: This will overwrite the default loader, as well as your own loader function (see above).
```js
var customPlaylistLoader = function() {
//See loader for details
}
```
#### ```xhrSetup```
(default : none)
XmlHttpRequest customization callback for default XHR based loader.
parameter should be a function with one single argument (of type XMLHttpRequest).
If ```xhrSetup``` is specified, default loader will invoke it before calling ```xhr.send()```.
This allows user to easily modify/setup XHR. see example below.
```js
var config = {
xhrSetup: function(xhr, url) {
xhr.withCredentials = true; // do send cookies
}
}
```
#### ```abrController```
(default : internal ABR controller)
customized Adaptive Bitrate Streaming Controller
parameter should be a class providing 2 getter/setters and a destroy() method:
- get/set nextAutoLevel : get/set : return next auto-quality level/force next auto-quality level that should be returned (currently used for emergency switch down)
- get/set autoLevelCapping : get/set : capping/max level value that could be used by ABR Controller
- destroy() : should clean-up all used resources
## Video Binding/Unbinding API
#### ```hls.attachMedia(videoElement)```
calling this method will :
- bind videoElement and hls instance,
- create MediaSource and set it as video source
- once MediaSource object is successfully created, MEDIA_ATTACHED event will be fired.
#### ```hls.detachMedia()```
calling this method will :
- unbind VideoElement from hls instance,
- signal the end of the stream on MediaSource
- reset video source (```video.src = ''```)
## Quality switch Control API
by default, hls.js handles quality switch automatically, using heuristics based on fragment loading bitrate and quality level bandwidth exposed in the variant manifest.
it is also possible to manually control quality swith using below API:
#### ```hls.levels```
return array of available quality levels
#### ```hls.currentLevel```
get : return current playback quality level
set : trigger an immediate quality level switch to new quality level. this will pause the video if it was playing, flush the whole buffer, and fetch fragment matching with current position and requested quality level. then resume the video if needed once fetched fragment will have been buffered.
set to -1 for automatic level selection
#### ```hls.nextLevel```
get : return next playback quality level (playback quality level for next buffered fragment). return -1 if next fragment not buffered yet.
set : trigger a quality level switch for next fragment. this could eventually flush already buffered next fragment
set to -1 for automatic level selection
#### ```hls.loadLevel```
get : return last loaded fragment quality level.
set : set quality level for next loaded fragment
set to -1 for automatic level selection
#### ```hls.firstLevel```
get : first level index (index of first level appearing in Manifest. it is usually defined as start level hint for player)
#### ```hls.startLevel```
get/set : start level index (level of first fragment that will be played back)
- if not overrided by user : first level appearing in manifest will be used as start level.
- if -1 : automatic start level selection, playback will start from level matching download bandwidth (determined from download of first segment)
default value is firstLevel
#### ```hls.autoLevelEnabled```
tell whether auto level selection is enabled or not
#### ```hls.autoLevelCapping```
get/set : capping/max level value that could be used by ABR Controller
default value is -1 (no level capping)
## Network Loading Control API
by default, hls.js will automatically start loading quality level playlists, and fragments after Events.MANIFEST_PARSED event has been triggered (and video element has been attached).
however if ```config.autoStartLoad``` is set to ```false```, the following method needs to be called to manually start playlist and fragments loading:
#### ```hls.startLoad()```
start/restart playlist/fragment loading. this is only effective if MANIFEST_PARSED event has been triggered and video element has been attached to hls object.
## Runtime Events
hls.js fires a bunch of events, that could be registered as below:
```js
hls.on(Hls.Events.LEVEL_LOADED,function(event,data) {
var level_duration = data.details.totalduration;
});
```
full list of Events available below :
- `Hls.Events.MEDIA_ATTACHING` - fired to attach Media to hls instance.
- data: { video , mediaSource }
- `Hls.Events.MEDIA_ATTACHED` - fired when Media has been succesfully attached to hls instance
- data: { video , mediaSource }
- `Hls.Events.MEDIA_DETACHING` - fired before detaching Media from hls instance
- data: { }
- `Hls.Events.MEDIA_DETACHED` - fired when Media has been detached from hls instance
- data: { }
- `Hls.Events.MANIFEST_LOADING` - fired to signal that a manifest loading starts
- data: { url : manifestURL}
- `Hls.Events.MANIFEST_LOADED` - fired after manifest has been loaded
- data: { levels : [available quality levels] , url : manifestURL, stats : { trequest, tfirst, tload, mtime}}
- `Hls.Events.MANIFEST_PARSED` - fired after manifest has been parsed
- data: { levels : [available quality levels] , firstLevel : index of first quality level appearing in Manifest}
- `Hls.Events.LEVEL_LOADING` - fired when a level playlist loading starts
- data: { url : level URL, level : id of level being loaded}
- `Hls.Events.LEVEL_LOADED` - fired when a level playlist loading finishes
- data: { details : levelDetails object, levelId : id of loaded level, stats : { trequest, tfirst, tload, mtime} }
- `Hls.Events.LEVEL_UPDATED` - fired when a level's details have been updated based on previous details, after it has been loaded
- data: { details : levelDetails object, level : id of updated level }
- `Hls.Events.LEVEL_PTS_UPDATED` - fired when a level's PTS information has been updated after parsing a fragment
- data: { details : levelDetails object, level : id of updated level, drift: PTS drift observed when parsing last fragment }
- `Hls.Events.LEVEL_SWITCH` - fired when a level switch is requested
- data: { levelId : id of new level }
- `Hls.Events.KEY_LOADING` - fired when a decryption key loading starts
- data: { frag : fragment object}
- `Hls.Events.KEY_LOADED` - fired when a decryption key loading is completed
- data: { frag : fragment object}
- `Hls.Events.FRAG_LOADING` - fired when a fragment loading starts
- data: { frag : fragment object}
- `Hls.Events.FRAG_LOAD_PROGRESS` - fired when a fragment load is in progress
- data: { frag : fragment object, stats : progress event }
- `Hls.Events.FRAG_LOADED` - fired when a fragment loading is completed
- data: { frag : fragment object, payload : fragment payload, stats : { trequest, tfirst, tload, length}}
- `Hls.Events.FRAG_PARSING_INIT_SEGMENT` - fired when Init Segment has been extracted from fragment
- data: { moov : moov MP4 box, codecs : codecs found while parsing fragment}
- `Hls.Events.FRAG_PARSING_METADATA` - fired when parsing id3 is completed
- data: { samples : [ id3 pes - pts and dts timestamp are relative, values are in seconds]}
- `Hls.Events.FRAG_PARSING_DATA` - fired when moof/mdat have been extracted from fragment
- data: { moof : moof MP4 box, mdat : mdat MP4 box, startPTS : PTS of first sample, endPTS : PTS of last sample, startDTS : DTS of first sample, endDTS : DTS of last sample, type : stream type (audio or video), nb : number of samples}
- `Hls.Events.FRAG_PARSED` - fired when fragment parsing is completed
- data: undefined
- `Hls.Events.FRAG_BUFFERED` - fired when fragment remuxed MP4 boxes have all been appended into SourceBuffer
- data: { frag : fragment object, stats : { trequest, tfirst, tload, tparsed, tbuffered, length} }
- `Hls.Events.FRAG_CHANGED` - fired when fragment matching with current video position is changing
- data: { frag : fragment object }
- `Hls.Events.FPS_DROP` - triggered when FPS drop in last monitoring period is higher than given threshold
- data: {curentDropped : nb of dropped frames in last monitoring period, currentDecoded: nb of decoded frames in last monitoring period, totalDropped : total dropped frames on this video element}
- `Hls.Events.ERROR` - Identifier for an error event
- data: { type : error Type, details : error details, fatal : is error fatal or not, other error specific data}
- `Hls.Events.DESTROYING` - fired when hls.js instance starts destroying. Different from MEDIA_DETACHED as one could want to detach and reattach a video to the instance of hls.js to handle mid-rolls for example.
- data: { }
##Errors
full list of Errors is described below:
- ```Hls.ErrorDetails.MANIFEST_LOAD_ERROR``` - raised when manifest loading fails because of a network error
- data: { type : ```NETWORK_ERROR```, details : ```Hls.ErrorDetails.MANIFEST_LOAD_ERROR```, fatal : ```true```,url : manifest URL, response : xhr response, loader : URL loader}
- ```Hls.ErrorDetails.MANIFEST_LOAD_TIMEOUT``` - raised when manifest loading fails because of a timeout
- data: { type : ```NETWORK_ERROR```, details : ```Hls.ErrorDetails.MANIFEST_LOAD_TIMEOUT```, fatal : ```true```,url : manifest URL, loader : URL loader}
- ```Hls.ErrorDetails.MANIFEST_PARSING_ERROR``` - raised when manifest parsing failed to find proper content
- data: { type : ```NETWORK_ERROR```, details : ```Hls.ErrorDetails.MANIFEST_PARSING_ERROR```, fatal : ```true```,url : manifest URL, reason : parsing error reason}
- ```Hls.ErrorDetails.LEVEL_LOAD_ERROR```raised when level loading fails because of a network error
- data: { type : ```NETWORK_ERROR```, details : ```Hls.ErrorDetails.LEVEL_LOAD_ERROR```, fatal : ```true```,url : level URL, response : xhr response, loader : URL loader}
- ```Hls.ErrorDetails.LEVEL_LOAD_TIMEOUT```raised when level loading fails because of a timeout
- data: { type : ```NETWORK_ERROR```, details : ```Hls.ErrorDetails.LEVEL_LOAD_TIMEOUT```, fatal : ```true```,url : level URL, loader : URL loader}
- ```Hls.ErrorDetails.LEVEL_SWITCH_ERROR```raised when level switching fails
- data: { type : ```OTHER_ERROR```, details : ```Hls.ErrorDetails.LEVEL_SWITCH_ERROR```, fatal : ```false```,level : failed level index, reason : failure reason}
- ```Hls.ErrorDetails.FRAG_LOAD_ERROR```raised when fragment loading fails because of a network error
- data: { type : ```NETWORK_ERROR```, details : ```Hls.ErrorDetails.FRAG_LOAD_ERROR```, fatal : ```true/false```,frag : fragment object, response : xhr response}
- ```Hls.ErrorDetails.FRAG_LOOP_LOADING_ERROR```raised upon detection of same fragment being requested in loop
- data: { type : ```NETWORK_ERROR```, details : ```Hls.ErrorDetails.FRAG_LOOP_LOADING_ERROR```, fatal : ```true/false```,frag : fragment object}
- ```Hls.ErrorDetails.FRAG_LOAD_TIMEOUT```raised when fragment loading fails because of a timeout
- data: { type : ```NETWORK_ERROR```, details : ```Hls.ErrorDetails.FRAG_LOAD_TIMEOUT```, fatal : ```true/false```,frag : fragment object}
- ```Hls.ErrorDetails.FRAG_PARSING_ERROR```raised when fragment parsing fails
- data: { type : ```NETWORK_ERROR```, details : ```Hls.ErrorDetails.FRAG_PARSING_ERROR```, fatal : ```true/false```, reason : failure reason}
- ```Hls.ErrorDetails.BUFFER_APPEND_ERROR```raised when exception is raised while calling buffer append
- data: { type : ```NETWORK_ERROR```, details : ```Hls.ErrorDetails.BUFFER_APPEND_ERROR```, fatal : ```true```, frag : fragment object}
- ```Hls.ErrorDetails.BUFFER_APPENDING_ERROR```raised when exception is raised during buffer appending
- data: { type : ```NETWORK_ERROR```, details : ```Hls.ErrorDetails.BUFFER_APPENDING_ERROR```, fatal : ```false```, frag : fragment object}
## Objects
### Level
a level object represents a given quality level.
it contains quality level related info, retrieved from manifest, such as
* level bitrate
* used codecs
* video width/height
* level name
* level URL
see sample Level object below:
```js
{
url: ['http://levelURL.com','http://levelURLfailover.com']
bitrate: 246440,
name: "240",
codecs: "mp4a.40.5,avc1.42000d",
width: 320,
height: 136,
}
```
url is an array, that might contains several items if failover/redundant streams are found in the manifest.
### Level details
level detailed infos contains level details retrieved after level playlist parsing, they are specified below :
* start sequence number
* end sequence number
* level total duration
* level fragment target duration
* array of fragments info
* is this level a live playlist or not ?
see sample object below, available after corresponding LEVEL_LOADED event has been fired:
```js
{
startSN: 0,
endSN: 50,
totalduration: 510,
targetduration: 10,
fragments: Array[51],
live: false
}
```
### Fragment
the Fragment object contains fragment related info, such as
* fragment URL
* fragment duration
* fragment sequence number
* fragment start offset
* level Id
see sample object below:
```js
{
duration: 10,
level : 3,
sn: 35,
start : 30,
url: 'http://fragURL.com'
}
```

View file

@ -0,0 +1,28 @@
Copyright (c) 2015 Dailymotion (http://www.dailymotion.com)
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
src/remux/mp4-generator.js and src/demux/exp-golomb.js implementation in this project
are derived from the HLS library for video.js (https://github.com/videojs/videojs-contrib-hls)
That work is also covered by the Apache 2 License, following copyright:
Copyright (c) 2013-2015 Brightcove
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

View file

@ -0,0 +1,18 @@
{
"name": "hls.js",
"version": "0.3.11",
"description": "Media Source Extension - HLS library, by/for Dailymotion",
"homepage": "https://github.com/dailymotion/hls.js",
"authors": [
"Guillaume du Pontavice <guillaume.dupontavice@dailymotion.com>"
],
"main": "dist/hls.js",
"private": true,
"ignore": [
"**/.*",
"node_modules",
"bower_components",
"test",
"tests"
]
}

View file

@ -0,0 +1,47 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title></title>
</head>
<body>
<script src="../dist/hls.js"></script>
<video id="video" controls></video>
<script>
/* get stream from query string */
function getParameterByName(name) {
name = name.replace(/[\[]/, "\\[").replace(/[\]]/, "\\]");
var regex = new RegExp("[\\?&]" + name + "=([^&#]*)"),
results = regex.exec(location.search);
return results === null ? "" : decodeURIComponent(results[1].replace(/\+/g, " "));
}
var stream = getParameterByName('stream') || 'http://www.streambox.fr/playlists/x36xhzz/url_9/193039199_mp4_h264_aac_fhd_7.m3u8';
</script>
<script>
if(Hls.isSupported()) {
var video = document.getElementById('video');
var hls = new Hls();
hls.loadSource(stream);
hls.attachMedia(video);
hls.on(Hls.Events.MANIFEST_PARSED,function() {
video.play();
});
}
</script>
<script>
var video = document.getElementById('video');
window.onload = function(){
var i=0;
var el = document.getElementById('update');
function foo(){
i++;
el.innerHTML = 'animation:' + i+',decoded:' + video.webkitDecodedFrameCount + ',dropped:' + video.webkitDroppedFrameCount;
window.requestAnimationFrame(foo);
}
foo();
};
</script>
<div id="update"></div>
</body>
</html>

View file

@ -0,0 +1,574 @@
var eventLeftMargin = 180;
var eventRightMargin = 0;
function canvasLoadEventUpdate(canvas, minTime, maxTime, events) {
var ctx = canvas.getContext('2d');
for (var i =0, y_offset = 20; i < events.length; i++) {
var event = events[i], start = event.time, end = event.time + event.duration + event.latency;
if((start >= minTime && start <= maxTime)) {
y_offset+=20;
}
}
canvas.height = y_offset;
ctx.fillStyle = "green";
ctx.globalAlpha = 0.5;
ctx.fillRect(0,0,eventLeftMargin, canvas.height);
ctx.fillRect(canvas.width-eventRightMargin,0,eventRightMargin, canvas.height);
ctx.globalAlpha = 1;
//draw legend
var x_offset = 5;
ctx.font = "12px Arial";
legend = "load event";
ctx.fillStyle = "black";
ctx.fillText(legend,x_offset,15);
x_offset = eventLeftMargin+5;
legend = 'start - end';
ctx.fillStyle = "black";
ctx.fillText(legend,x_offset,15);
x_offset += ctx.measureText(legend).width+5;
legend = '[latency';
ctx.fillStyle = "orange";
ctx.fillText(legend,x_offset,15);
x_offset += ctx.measureText(legend).width+5;
legend = 'loading';
ctx.fillStyle = "green";
ctx.fillText(legend,x_offset,15);
x_offset += ctx.measureText(legend).width+5;
legend = 'parsing';
ctx.fillStyle = "blue";
ctx.fillText(legend,x_offset,15);
x_offset += ctx.measureText(legend).width+5;
legend = 'appending]';
ctx.fillStyle = "red";
ctx.fillText(legend,x_offset,15);
x_offset += ctx.measureText(legend).width+5;
legend = 'size bitrate';
ctx.fillStyle = "black";
ctx.fillText(legend,x_offset,15);
x_offset += ctx.measureText(legend).width+5;
for (i =0, y_offset = 20; i < events.length; i++) {
var event = events[i], start = Math.round(event.time), end = Math.round(event.time + event.duration + event.latency);
if((start >= minTime && start <= maxTime)) {
canvasDrawLoadEvent(ctx,y_offset,event,minTime,maxTime);
y_offset+=20;
}
}
}
function canvasVideoEventUpdate(canvas, minTime, maxTime, events) {
var ctx = canvas.getContext('2d');
for (var i =0, y_offset = 20; i < events.length; i++) {
var event = events[i], start = event.time, end = event.time;
if((start >= minTime && start <= maxTime)) {
y_offset+=20;
}
}
canvas.height = y_offset;
ctx.fillStyle = "green";
ctx.globalAlpha = 0.5;
ctx.fillRect(0,0,eventLeftMargin, canvas.height);
ctx.fillRect(canvas.width-eventRightMargin,0,eventRightMargin, canvas.height);
ctx.globalAlpha = 1;
//draw legend
var x_offset = 5;
ctx.font = "12px Arial";
legend = 'video event';
ctx.fillStyle = "black";
ctx.fillText(legend,x_offset,15);
x_offset = eventLeftMargin+5;
legend = 'time';
ctx.fillStyle = "black";
ctx.fillText(legend,x_offset,15);
x_offset += ctx.measureText(legend).width+5;
legend = '[duration]';
ctx.fillStyle = "blue";
ctx.fillText(legend,x_offset,15);
for (i =0, y_offset = 20; i < events.length; i++) {
var event = events[i], start = Math.round(event.time), end = Math.round(event.time);
if((start >= minTime && start <= maxTime)) {
canvasDrawVideoEvent(ctx,y_offset,event,minTime,maxTime);
y_offset+=20;
}
}
}
function canvasBufferWindowUpdate(canvas, minTime, maxTime, focusTime, events) {
var ctx = canvas.getContext('2d'),
minTimeBuffer, minTimePos,focusTimeBuffer,focusTimePos,
bufferChartStart = eventLeftMargin,
bufferChartWidth = ctx.canvas.width-eventLeftMargin-eventRightMargin;
ctx.clearRect (0,0,canvas.width, canvas.height);
if(events.length === 0) {
return;
}
ctx.fillStyle = "green";
ctx.globalAlpha = 0.5;
ctx.fillRect(0,0,eventLeftMargin, canvas.height);
ctx.globalAlpha = 1;
//draw legend
var x_offset = 5;
var y_offset = 0;
ctx.font = "15px Arial";
var maxBuffer = 0, firstEventIdx = -1, focusEventIdx= -1, event;
for (var i =0 ; i < events.length; i++) {
event = events[i];
maxBuffer = Math.max(maxBuffer, event.buffer+event.pos);
if(firstEventIdx === -1 && event.time >= minTime) {
firstEventIdx = Math.max(0,i-1);
}
if(focusEventIdx === -1 && event.time >= focusTime) {
focusEventIdx = Math.max(0,i-1);
}
}
// compute position and buffer length at pos minTime using linear approximation
if((firstEventIdx+1) < events.length) {
minTimePos = events[firstEventIdx].pos + (minTime-events[firstEventIdx].time)*(events[firstEventIdx+1].pos-events[firstEventIdx].pos)/(events[firstEventIdx+1].time-events[firstEventIdx].time);
minTimeBuffer = minTimePos + events[firstEventIdx].buffer + (minTime-events[firstEventIdx].time)*(events[firstEventIdx+1].buffer-events[firstEventIdx].buffer)/(events[firstEventIdx+1].time-events[firstEventIdx].time);
} else {
minTimeBuffer = 0;
minTimePos = 0;
}
// compute position and buffer length at pos focusTime using linear approximation
if((focusEventIdx+1) < events.length) {
focusTimePos = events[focusEventIdx].pos + (focusTime-events[focusEventIdx].time)*(events[focusEventIdx+1].pos-events[focusEventIdx].pos)/(events[focusEventIdx+1].time-events[focusEventIdx].time);
focusTimeBuffer = events[focusEventIdx].buffer + (focusTime-events[focusEventIdx].time)*(events[focusEventIdx+1].buffer-events[focusEventIdx].buffer)/(events[focusEventIdx+1].time-events[focusEventIdx].time);
} else {
focusTimePos = 0;
focusTimeBuffer = 0;
}
maxBuffer*=1.1;
y_offset += 15;
legend = 'play pos/buffer zoomed';
ctx.fillStyle = "black";
ctx.fillText(legend,x_offset,y_offset);
y_offset += 15;
legend = '[' + minTime + ',' + maxTime + ']';
ctx.fillText(legend,x_offset,y_offset);
y_offset += 15;
legend = 'focus time:' + focusTime + ' ms';
ctx.fillText(legend,x_offset,y_offset);
y_offset += 15;
legend = 'focus position:' + Math.round(focusTimePos) + ' ms';
ctx.fillText(legend,x_offset,y_offset);
y_offset += 15;
legend = 'focus buffer:' + Math.round(focusTimeBuffer) + ' ms';
ctx.fillText(legend,x_offset,y_offset);
ctx.fillStyle = "blue";
ctx.beginPath();
ctx.moveTo(bufferChartStart, ctx.canvas.height);
ctx.lineTo(bufferChartStart, ctx.canvas.height*(1 - minTimeBuffer/maxBuffer));
for (var i =firstEventIdx+1 ; i < events.length; i++) {
event = events[i];
x_offset = bufferChartStart + (bufferChartWidth*(event.time-minTime))/(maxTime-minTime);
y_offset = ctx.canvas.height*(1 - (event.buffer+event.pos)/maxBuffer);
ctx.lineTo(x_offset,y_offset);
}
ctx.lineTo(x_offset, canvas.height);
ctx.fill();
ctx.fillStyle = "brown";
ctx.beginPath();
ctx.moveTo(bufferChartStart, ctx.canvas.height);
ctx.lineTo(bufferChartStart, ctx.canvas.height*(1 - minTimePos/maxBuffer));
for (var i =firstEventIdx+1 ; i < events.length; i++) {
event = events[i];
x_offset = bufferChartStart + (bufferChartWidth*(event.time-minTime))/(maxTime-minTime);
y_offset = ctx.canvas.height*(1 - (event.pos)/maxBuffer);
ctx.lineTo(x_offset,y_offset);
}
ctx.lineTo(x_offset, canvas.height);
ctx.fill();
ctx.fillStyle = "white";
ctx.fillRect(canvas.width-eventRightMargin,0,eventRightMargin, canvas.height);
ctx.fillStyle = "green";
ctx.globalAlpha = 0.5;
ctx.fillRect(canvas.width-eventRightMargin,0,eventRightMargin, canvas.height);
ctx.globalAlpha = 1;
ctx.fillStyle = "black";
x_offset = bufferChartStart + (bufferChartWidth*(focusTime-minTime))/(maxTime-minTime);
ctx.moveTo(x_offset, ctx.canvas.height);
y_offset = ctx.canvas.height*(1 - (focusTimePos+focusTimeBuffer)/maxBuffer);
ctx.lineTo(x_offset,y_offset);
ctx.stroke();
}
function canvasBufferTimeRangeUpdate(canvas, minTime, maxTime, windowMinTime, windowMaxTime, events) {
var ctx = canvas.getContext('2d'),
bufferChartStart = eventLeftMargin,
bufferChartWidth = ctx.canvas.width-eventLeftMargin-eventRightMargin,
x_offset = 0,y_offset = 0,
event;
ctx.clearRect (0,0,canvas.width, canvas.height);
ctx.fillStyle = "green";
ctx.globalAlpha = 0.5;
ctx.fillRect(0,0,eventLeftMargin, canvas.height);
ctx.fillRect(canvas.width-eventRightMargin,0,eventRightMargin, canvas.height);
ctx.globalAlpha = 1;
x_offset = 5;
y_offset = 15;
legend = 'play pos/buffer';
ctx.fillStyle = "black";
ctx.font = "15px Arial";
ctx.fillText(legend,x_offset,y_offset);
if(events.length === 0) {
return;
}
var maxBuffer = 0;
for (var i =0 ; i < events.length; i++) {
maxBuffer = Math.max(maxBuffer, events[i].buffer + events[i].pos);
}
y_offset+=15;
legend = 'last pos:' + events[events.length-1].pos + ' ms';
ctx.fillText(legend,x_offset,y_offset);
y_offset+=15;
legend = 'last buffer:' + events[events.length-1].buffer + ' ms';
ctx.fillText(legend,x_offset,y_offset);
y_offset+=15;
legend = 'max buffer:' + maxBuffer + ' ms';
ctx.fillText(legend,x_offset,y_offset);
y_offset += 15;
legend = 'nb samples:' + events.length;
ctx.fillText(legend,x_offset,y_offset);
maxBuffer*=1.1;
ctx.fillStyle = "blue";
ctx.beginPath();
ctx.moveTo(bufferChartStart, ctx.canvas.height);
for (var i =0 ; i < events.length; i++) {
event = events[i];
x_offset = bufferChartStart + (bufferChartWidth*(event.time-minTime))/(maxTime-minTime);
y_offset = ctx.canvas.height*(1 - (event.buffer+event.pos)/maxBuffer);
ctx.lineTo(x_offset,y_offset);
}
ctx.lineTo(x_offset, canvas.height);
ctx.fill();
ctx.fillStyle = "brown";
ctx.beginPath();
ctx.moveTo(bufferChartStart, ctx.canvas.height);
for (var i =0 ; i < events.length; i++) {
event = events[i];
x_offset = bufferChartStart + (bufferChartWidth*(event.time-minTime))/(maxTime-minTime);
y_offset = ctx.canvas.height*(1 - event.pos/maxBuffer);
ctx.lineTo(x_offset,y_offset);
}
ctx.lineTo(x_offset, canvas.height);
ctx.fill();
ctx.globalAlpha = 0.7;
ctx.fillStyle = "grey";
var x_start = bufferChartStart;
var x_w = bufferChartWidth*(windowMinTime-minTime)/(maxTime-minTime);
ctx.fillRect(x_start,0,x_w, canvas.height);
var x_start = bufferChartStart+bufferChartWidth*(windowMaxTime-minTime)/(maxTime-minTime);
var x_w = canvas.width-x_start-eventRightMargin;
ctx.fillRect(x_start,0,x_w, canvas.height);
ctx.globalAlpha = 1;
}
function canvasBitrateEventUpdate(canvas, minTime, maxTime, windowMinTime, windowMaxTime, levelEvents, bitrateEvents) {
var ctx = canvas.getContext('2d'),
bufferChartStart = eventLeftMargin,
bufferChartWidth = ctx.canvas.width-eventLeftMargin-eventRightMargin,
x_offset = 0,y_offset = 0,
event, maxLevel, minLevel, sumLevel, maxBitrate, minBitrate, sumDuration;
ctx.clearRect (0,0,canvas.width, canvas.height);
if(levelEvents.length === 0) {
return;
}
maxBitrate = minBitrate = bitrateEvents[0].bitrate;
sumLevel = sumDuration = 0;
for (var i =0 ; i < bitrateEvents.length; i++) {
sumLevel += bitrateEvents[i].duration*bitrateEvents[i].level;
sumDuration += bitrateEvents[i].duration;
maxBitrate = Math.max(maxBitrate, bitrateEvents[i].bitrate);
minBitrate = Math.min(minBitrate, bitrateEvents[i].bitrate);
}
maxLevel = minLevel = levelEvents[0].id;
for (var i =0 ; i < levelEvents.length; i++) {
maxLevel = Math.max(maxLevel, levelEvents[i].id);
minLevel = Math.min(minLevel, levelEvents[i].id);
}
ctx.fillStyle = "green";
ctx.globalAlpha = 0.5;
ctx.fillRect(0,0,eventLeftMargin, canvas.height);
ctx.fillRect(canvas.width-eventRightMargin,0,eventRightMargin, canvas.height);
ctx.globalAlpha = 1;
x_offset = 5;
y_offset = 0;
ctx.fillStyle = "black";
ctx.font = "15px Arial";
y_offset+=15;
legend = 'last bitrate:' + (bitrateEvents[bitrateEvents.length-1].bitrate/1000).toFixed(2) + "Mb/s";
ctx.fillText(legend,x_offset,y_offset);
y_offset+=15;
legend = 'min bitrate:' + (minBitrate/1000).toFixed(2) + "Mb/s";
ctx.fillText(legend,x_offset,y_offset);
y_offset+=15;
legend = 'max bitrate:' + (maxBitrate/1000).toFixed(2) + "Mb/s";
ctx.fillText(legend,x_offset,y_offset);
y_offset+=15;
legend = 'min/last/max level:' + minLevel + '/' + levelEvents[levelEvents.length-1].id + '/' + maxLevel;
ctx.fillText(legend,x_offset,y_offset);
y_offset += 15;
legend = 'nb level switch:' + (levelEvents.length-1);
ctx.fillText(legend,x_offset,y_offset);
y_offset += 15;
legend = 'average level:' + (sumLevel/sumDuration).toFixed(2);
ctx.fillText(legend,x_offset,y_offset);
maxBitrate*=1.1;
ctx.strokeStyle = "blue";
ctx.beginPath();
ctx.moveTo(bufferChartStart, ctx.canvas.height);
for (var i =0 ; i < bitrateEvents.length; i++) {
event = bitrateEvents[i];
x_offset = bufferChartStart + (bufferChartWidth*(event.time-minTime))/(maxTime-minTime);
y_offset = ctx.canvas.height*(1 - event.bitrate/maxBitrate);
ctx.lineTo(x_offset,y_offset);
}
ctx.lineTo(bufferChartStart+bufferChartWidth, y_offset);
ctx.stroke();
ctx.strokeStyle = "black";
ctx.beginPath();
x_offset = bufferChartStart;
y_offset = ctx.canvas.height;
ctx.moveTo(x_offset, y_offset);
for (var i =0 ; i < levelEvents.length; i++) {
event = levelEvents[i];
x_offset = bufferChartStart + (bufferChartWidth*(event.time-minTime))/(maxTime-minTime);
ctx.lineTo(x_offset,y_offset);
y_offset = ctx.canvas.height*(1 - event.bitrate/maxBitrate);
ctx.lineTo(x_offset,y_offset);
}
ctx.lineTo(bufferChartStart+bufferChartWidth, y_offset);
ctx.stroke();
ctx.globalAlpha = 0.7;
ctx.fillStyle = "grey";
var x_start = bufferChartStart;
var x_w = bufferChartWidth*(windowMinTime-minTime)/(maxTime-minTime);
ctx.fillRect(x_start,0,x_w, canvas.height);
var x_start = bufferChartStart+bufferChartWidth*(windowMaxTime-minTime)/(maxTime-minTime);
var x_w = canvas.width-x_start-eventRightMargin;
ctx.fillRect(x_start,0,x_w, canvas.height);
ctx.globalAlpha = 1;
}
function canvasDrawLoadEvent(ctx,yoffset,event,minTime,maxTime) {
var legend,offset,x_start,x_w,
networkChartStart = eventLeftMargin,
networkChartWidth = ctx.canvas.width-eventLeftMargin-eventRightMargin,
tend = Math.round(event.time + event.duration + event.latency);
//draw start
ctx.fillStyle = "black";
ctx.font = "12px Arial";
legend = Math.round(event.time);
offset = ctx.measureText(legend).width+5;
x_start = networkChartStart-offset+networkChartWidth*(event.time-minTime)/(maxTime-minTime);
ctx.fillText(legend,x_start,yoffset+12);
//draw latency rectangle
ctx.fillStyle = "orange";
x_start = networkChartStart + networkChartWidth*(event.time-minTime)/(maxTime-minTime);
x_w = networkChartWidth*event.latency/(maxTime-minTime);
ctx.fillRect(x_start,yoffset,x_w, 15);
//draw download rectangle
ctx.fillStyle = "green";
x_start = networkChartStart + networkChartWidth*(event.time+event.latency-minTime)/(maxTime-minTime);
x_w = networkChartWidth*event.load/(maxTime-minTime);
ctx.fillRect(x_start,yoffset,x_w, 15);
if(event.parsing) {
//draw parsing rectangle
ctx.fillStyle = "blue";
x_start = networkChartStart + networkChartWidth*(event.time+event.latency+event.load-minTime)/(maxTime-minTime);
x_w = networkChartWidth*event.parsing/(maxTime-minTime);
ctx.fillRect(x_start,yoffset,x_w, 15);
if(event.buffer) {
//draw buffering rectangle
ctx.fillStyle = "red";
x_start = networkChartStart + networkChartWidth*(event.time+event.latency+event.load+event.parsing-minTime)/(maxTime-minTime);
x_w = networkChartWidth*event.buffer/(maxTime-minTime);
ctx.fillRect(x_start,yoffset,x_w, 15);
}
}
//draw end time
ctx.fillStyle = "black";
ctx.font = "12px Arial";
legend = tend;
x_start += x_w + 5;
ctx.fillText(legend,x_start,yoffset+12);
x_start += ctx.measureText(legend).width+5;
legend = "[" + Math.round(event.latency);
ctx.fillStyle = "orange";
ctx.fillText(legend,x_start,yoffset+12);
x_start += ctx.measureText(legend).width+5;
legend = Math.round(event.load);
if(!event.parsing) legend += "]";
ctx.fillStyle = "green";
ctx.fillText(legend,x_start,yoffset+12);
x_start += ctx.measureText(legend).width+5;
if(event.parsing) {
legend = Math.round(event.parsing);
if(!event.buffer) legend +="]";
ctx.fillStyle = "blue";
ctx.fillText(legend,x_start,yoffset+12);
x_start += ctx.measureText(legend).width+5;
if(event.buffer) {
legend = Math.round(event.buffer) + "]";
ctx.fillStyle = "red";
ctx.fillText(legend,x_start,yoffset+12);
x_start += ctx.measureText(legend).width+5;
}
}
if(event.size) {
if(event.size > 1000*1000) {
legend = (event.size/1000000).toFixed(1) + 'MB';
} else {
legend = Math.round(event.size/1000) + 'kB';
}
ctx.fillStyle = "black";
ctx.fillText(legend,x_start,yoffset+12);
x_start += ctx.measureText(legend).width+5;
}
if(event.bw) {
if(event.bw > 1000) {
legend = (event.bw/1000).toFixed(1) + 'Mbps';
} else {
legend = event.bw + ' kbps';
}
ctx.fillStyle = "black";
ctx.fillText(legend,x_start,yoffset+12);
x_start += ctx.measureText(legend).width+5;
}
// draw event name
ctx.fillStyle = "black";
ctx.font = "15px Arial";
legend = event.type;
if(event.id2 !== undefined) {
legend += ' ' + event.id2;
}
if(event.id !== undefined) {
if(event.type === 'fragment') {
legend += ' @';
}
legend += ' ' + event.id;
}
if(event.start !== undefined) {
legend += ' [' + event.start + ',' + event.end + ']';
}
ctx.fillText(legend,5,yoffset+15);
}
function canvasDrawVideoEvent(ctx,yoffset,event,minTime,maxTime) {
var legend,offset,x_start,x_w,
networkChartStart = eventLeftMargin,
networkChartWidth = ctx.canvas.width-eventLeftMargin-eventRightMargin;
// draw event name
ctx.fillStyle = "black";
ctx.font = "15px Arial";
legend = event.type;
if (event.name) legend+= ':' + event.name;
ctx.fillText(legend,5,yoffset+15);
//draw start time
ctx.fillStyle = "black";
ctx.font = "12px Arial";
legend = Math.round(event.time);
offset = ctx.measureText(legend).width+5;
x_start = networkChartStart-offset+networkChartWidth*(event.time-minTime)/(maxTime-minTime);
ctx.fillText(legend,x_start,yoffset+12);
//draw event rectangle
x_start = networkChartStart + networkChartWidth*(event.time-minTime)/(maxTime-minTime);
if(event.duration) {
x_w = networkChartWidth*event.duration/(maxTime-minTime);
} else {
x_w = 1;
}
ctx.fillRect(x_start,yoffset,x_w, 15);
if(event.duration) {
//draw end time
ctx.fillStyle = "black";
ctx.font = "12px Arial";
legend = Math.round(event.time+event.duration);
x_start += x_w + 5;
ctx.fillText(legend,x_start,yoffset+12);
x_start += ctx.measureText(legend).width+5;
legend = "[" + Math.round(event.duration) + "]";
ctx.fillStyle = "blue";
ctx.fillText(legend,x_start,yoffset+12);
}
}

View file

@ -0,0 +1,914 @@
<!DOCTYPE html>
<head>
<style>
header {
text-align: center;
}
#controls
{
width: 70%;
min-width: 615px;
padding: 3px;
margin: 0px auto 20px auto;
border: 1px solid #606060;
overflow: hidden;
}
.innerControls
{
display:block;
float: left;
width: 99%;
margin: 3px;
padding-left: 3px;
font-size: 8pt
}
.videoCentered
{
width: 720px;
margin-left: auto;
margin-right: auto;
display: block
}
.center
{
width: 70%;
min-width: 615px;
overflow: hidden;
margin-left: auto;
margin-right: auto;
display: block
}
#customButtons input { width: 25%; display : inline-block; text-align: center; font-size: 8pt;}
#toggleButtons button { width: 24%; display : inline-block; text-align: center; font-size: 8pt; background-color: #A0A0A0 }
</style>
<title>hls.js demo</title>
<link rel="icon" type="image/png" href="http://static1.dmcdn.net/images/favicon-32x32.png" sizes="32x32" />
<link rel="icon" type="image/png" href="http://static1.dmcdn.net/images/favicon-16x16.png" sizes="16x16" />
<script src="https://ajax.googleapis.com/ajax/libs/jquery/2.1.3/jquery.min.js"></script>
<link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.4/css/bootstrap.min.css">
<link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.4/css/bootstrap-theme.min.css">
<script src="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.4/js/bootstrap.min.js"></script>
</head>
<body>
<div class="header-container">
<header class="wrapper clearfix">
<h1 class="title"><a href="https://github.com/dailymotion/hls.js">hls.js</a> demo page</h1>
</header>
</div>
<div class="main-container">
<header>
<p>
test with your HLS streams below in Chrome, Firefox, IE11 or Safari !
<br>Advanced controls are also available at the bottom of this page.
</p>
</header>
<div id="controls">
<div id="customButtons"></div>
<select id="streamSelect" class="innerControls"><option value="" selected>(Enter custom URL below)</option></select>
<input id="streamURL" class="innerControls" type=text value=""/>
<label class="innerControls"><input id="enableStream" type=checkbox checked/> Enable Streaming</label>
<label class="innerControls"><input id="autoRecoverError" type=checkbox checked/> Auto-Recover Media Error</label>
<div id="StreamPermalink" class="innerControls"></div>
<div>
<select id="videoSize" style="float:left">
<option value="240">player size: tiny (240p)</option>
<option value="384">player size: small (384p)</option>
<option value="480">player size: medium (480p)</option>
<option value="720" selected>player size: large (720p)</option>
<option value="1080">player size: huge (1080p)</option>
</select>
<span id="currentResolution" style="float:right;font-size: 8pt;">-</span>
</div>
</div>
<video id="video" controls autoplay class="videoCentered"></video><br>
<canvas id="buffered_c" height="15" class="videoCentered" onclick="buffered_seek(event);"></canvas><br><br>
<pre id="HlsStatus" class="center"></pre>
<div class="center" id="toggleButtons">
<button type="button" class="btn btn-sm" onclick="$('#PlaybackControl').toggle();">toggle playback controls</button>
<button type="button" class="btn btn-sm" onclick="$('#QualityLevelControl').toggle();">toggle Quality Level controls</button>
<button type="button" class="btn btn-sm" onclick="$('#MetricsDisplay').toggle();toggleMetricsDisplay();">toggle Metrics Display</button>
<button type="button" class="btn btn-sm" onclick="$('#StatsDisplay').toggle();">toggle Stats Display</button>
</div>
<div id='PlaybackControl'>
<h4> Playback Control </h4>
<button type="button" class="btn btn-sm btn-info" onclick="$('#video')[0].play()">play</button>
<button type="button" class="btn btn-sm btn-info" onclick="$('#video')[0].pause()">pause</button>
<button type="button" class="btn btn-sm btn-info" onclick="$('#video')[0].currentTime+=10">currentTime+=10</button>
<button type="button" class="btn btn-sm btn-info" onclick="$('#video')[0].currentTime-=10">currentTime-=10</button>
<button type="button" class="btn btn-sm btn-info" onclick="$('#video')[0].currentTime=$('#seek_pos').val()">seek to </button>
<input type="text" id='seek_pos' size="8" onkeydown="if(window.event.keyCode=='13'){$('#video')[0].currentTime=$('#seek_pos').val();}"><br><br>
<button type="button" class="btn btn-xs btn-warning" onclick="hls.attachMedia($('#video')[0])">attach Video</button>
<button type="button" class="btn btn-xs btn-warning" onclick="hls.detachVideo()">detach Video</button><br>
<button type="button" class="btn btn-xs btn-warning" onclick="hls.startLoad()">recover Network Error</button>
<button type="button" class="btn btn-xs btn-warning" onclick="hls.recoverMediaError()">recover Media Error</button><br>
</div>
<div id='QualityLevelControl'>
<h4> Quality Control </h4>
<table>
<tr>
<td>current level</td>
<td width=10px></td>
<td> <div id="currentLevelControl" style="display: inline;"></div> </td>
</tr>
<tr>
<td> <p>next level</p></td>
<td> </td>
<td> <div id="nextLevelControl" style="display: inline;"></div> </td>
</tr>
<tr>
<td> <p>load level</p></td>
<td> </td>
<td> <div id="loadLevelControl" style="display: inline;"></div> </td>
</tr>
<tr>
<td> <p>cap level</p></td>
<td> </td>
<td> <div id="levelCappingControl" style="display: inline;"></div> </td>
</tr>
</table>
</div>
<div id='MetricsDisplay'>
<h4> Real Time Metrics Display </h4>
<div id="metricsButton">
<button type="button" class="btn btn-xs btn-info" onclick="$('#metricsButtonWindow').toggle();$('#metricsButtonFixed').toggle();windowSliding=!windowSliding; refreshCanvas()">toggle sliding/fixed window</button><br>
<div id="metricsButtonWindow">
<button type="button" class="btn btn-xs btn-info" onclick="timeRangeSetSliding(0)">window ALL</button>
<button type="button" class="btn btn-xs btn-info" onclick="timeRangeSetSliding(2000)">2s</button>
<button type="button" class="btn btn-xs btn-info" onclick="timeRangeSetSliding(5000)">5s</button>
<button type="button" class="btn btn-xs btn-info" onclick="timeRangeSetSliding(10000)">10s</button>
<button type="button" class="btn btn-xs btn-info" onclick="timeRangeSetSliding(20000)">20s</button>
<button type="button" class="btn btn-xs btn-info" onclick="timeRangeSetSliding(30000)">30s</button>
<button type="button" class="btn btn-xs btn-info" onclick="timeRangeSetSliding(60000)">60s</button>
<button type="button" class="btn btn-xs btn-info" onclick="timeRangeSetSliding(120000)">120s</button><br>
<button type="button" class="btn btn-xs btn-info" onclick="timeRangeZoomIn()">Window Zoom In</button>
<button type="button" class="btn btn-xs btn-info" onclick="timeRangeZoomOut()">Window Zoom Out</button><br>
<button type="button" class="btn btn-xs btn-info" onclick="timeRangeSlideLeft()"> <<< Window Slide </button>
<button type="button" class="btn btn-xs btn-info" onclick="timeRangeSlideRight()">Window Slide >>> </button><br>
</div>
<div id="metricsButtonFixed">
<button type="button" class="btn btn-xs btn-info" onclick="windowStart=$('#windowStart').val()">fixed window start(ms)</button>
<input type="text" id='windowStart' defaultValue="0" size="8" onkeydown="if(window.event.keyCode=='13'){windowStart=$('#windowStart').val();}">
<button type="button" class="btn btn-xs btn-info" onclick="windowEnd=$('#windowEnd').val()">fixed window end(ms)</button>
<input type="text" id='windowEnd' defaultValue="10000" size="8" onkeydown="if(window.event.keyCode=='13'){windowEnd=$('#windowEnd').val();}"><br>
</div>
<button type="button" class="btn btn-xs btn-success" onclick="goToMetrics()" style="font-size:18px">metrics link</button>
<button type="button" class="btn btn-xs btn-success" onclick="goToMetricsPermaLink()" style="font-size:18px">metrics permalink</button>
<button type="button" class="btn btn-xs btn-success" onclick="copyMetricsToClipBoard()" style="font-size:18px">copy metrics to clipboard</button>
<canvas id="bufferTimerange_c" width="640" height="100" style="border:1px solid #000000" onmousedown="timeRangeCanvasonMouseDown(event)" onmousemove="timeRangeCanvasonMouseMove(event)" onmouseup="timeRangeCanvasonMouseUp(event)" onmouseout="timeRangeCanvasonMouseOut(event);"></canvas>
<canvas id="bitrateTimerange_c" width="640" height="100" style="border:1px solid #000000;"></canvas>
<canvas id="bufferWindow_c" width="640" height="100" style="border:1px solid #000000" onmousemove="windowCanvasonMouseMove(event);"></canvas>
<canvas id="videoEvent_c" width="640" height="15" style="border:1px solid #000000;"></canvas>
<canvas id="loadEvent_c" width="640" height="15" style="border:1px solid #000000;"></canvas><br>
</div>
</div>
<div id='StatsDisplay'>
<h4> Stats Display </h4>
<pre id='HlsStats'></pre>
<div id="buffered_log"></div>
</div>
</div>
<br><br>
<script src="../../streams.js"></script>
<!-- live-reload script -->
<script src="//localhost:8001"></script>
<script src="../dist/hls.js"></script>
<script src="canvas.js"></script>
<script src="metrics.js"></script>
<script src="jsonpack.js"></script>
<script>
$(document).ready(function() {
$('#streamSelect').change(function() { $('#streamURL').val($('#streamSelect').val());loadStream($('#streamURL').val());});
$('#streamURL').change(function() { loadStream($('#streamURL').val());});
$('#videoSize').change(function() { $('#video').width($('#videoSize').val()); $('#buffered_c').width($('#videoSize').val()); });
$("#PlaybackControl").hide();
$("#QualityLevelControl").hide();
$("#MetricsDisplay").hide();
$("#StatsDisplay").hide();
$('#metricsButtonWindow').toggle(windowSliding);
$('#metricsButtonFixed').toggle(!windowSliding);
$('#enableStream').click(function() { enableStreaming = this.checked; loadStream($('#streamURL').val());});
$('#autoRecoverError').prop( "checked", true );
$('#autoRecoverError').click(function() { autoRecoverError = this.checked; });
});
'use strict';
var hls,events, stats, enableStreaming = true, autoRecoverError = true;
var video = $('#video')[0];
video.volume = 0.05;
var manifest = decodeURIComponent(location.search.split('src=')[1]);
if(manifest === 'undefined') {
manifest = 'http://www.streambox.fr/playlists/x36xhzz/x36xhzz.m3u8';
}
loadStream(manifest);
function loadStream(url) {
hideCanvas();
if(Hls.isSupported()) {
if(hls) {
hls.destroy();
if(hls.bufferTimer) {
clearInterval(hls.bufferTimer);
hls.bufferTimer = undefined;
}
hls = null;
}
$('#streamURL').val(url);
var hlsLink = document.URL.split('?')[0] + '?src=' + encodeURIComponent(url);
var description = 'permalink: ' + "<a href=\"" + hlsLink + "\">" + hlsLink + "</a>";
$("#StreamPermalink").html(description);
if(!enableStreaming) {
$("#HlsStatus").text("Streaming disabled");
return;
}
$("#HlsStatus").text('loading ' + url);
events = { url : url, t0 : performance.now(), load : [], buffer : [], video : [], level : [], bitrate : []};
recoverDecodingErrorDate = recoverSwapAudioCodecDate = null;
hls = new Hls({debug:true});
$("#HlsStatus").text('loading manifest and attaching video element...');
hls.loadSource(url);
hls.attachMedia(video);
hls.on(Hls.Events.MEDIA_ATTACHED,function() {
$("#HlsStatus").text('MediaSource attached...');
bufferingIdx = -1;
events.video.push({time : performance.now() - events.t0, type : "Media attached"});
});
hls.on(Hls.Events.MEDIA_DETACHED,function() {
$("#HlsStatus").text('MediaSource detached...');
bufferingIdx = -1;
events.video.push({time : performance.now() - events.t0, type : "Media detached"});
});
hls.on(Hls.Events.FRAG_PARSING_INIT_SEGMENT,function(event,data) {
showCanvas();
var event = {time : performance.now() - events.t0, type : "init segment"};
events.video.push(event);
});
hls.on(Hls.Events.FRAG_PARSING_METADATA, function(event, data) {
console.log("Id3 samples ", data.samples);
});
hls.on(Hls.Events.LEVEL_SWITCH,function(event,data) {
events.level.push({time : performance.now() - events.t0, id : data.level, bitrate : Math.round(hls.levels[data.level].bitrate/1000)});
updateLevelInfo();
});
hls.on(Hls.Events.MANIFEST_PARSED,function(event,data) {
var event = {
type : "manifest",
name : "",
start : 0,
end : data.levels.length,
time : data.stats.trequest - events.t0,
latency : data.stats.tfirst - data.stats.trequest,
load : data.stats.tload - data.stats.tfirst,
duration : data.stats.tload - data.stats.tfirst,
};
events.load.push(event);
refreshCanvas();
});
hls.on(Hls.Events.MANIFEST_PARSED,function(event,data) {
$("#HlsStatus").text("manifest successfully loaded," + hls.levels.length + " levels found");
stats = {levelNb: data.levels.length};
updateLevelInfo();
});
hls.on(Hls.Events.LEVEL_LOADED,function(event,data) {
events.isLive = data.details.live;
var event = {
type : "level",
id : data.level,
start : data.details.startSN,
end : data.details.endSN,
time : data.stats.trequest - events.t0,
latency : data.stats.tfirst - data.stats.trequest,
load : data.stats.tload - data.stats.tfirst,
parsing : data.stats.tparsed - data.stats.tload,
duration : data.stats.tload - data.stats.tfirst
};
events.load.push(event);
refreshCanvas();
});
hls.on(Hls.Events.FRAG_BUFFERED,function(event,data) {
var event = {
type : "fragment",
id : data.frag.level,
id2 : data.frag.sn,
time : data.stats.trequest - events.t0,
latency : data.stats.tfirst - data.stats.trequest,
load : data.stats.tload - data.stats.tfirst,
parsing : data.stats.tparsed - data.stats.tload,
buffer : data.stats.tbuffered - data.stats.tparsed,
duration : data.stats.tbuffered - data.stats.tfirst,
bw : Math.round(8*data.stats.length/(data.stats.tbuffered - data.stats.tfirst)),
size : data.stats.length
};
events.load.push(event);
events.bitrate.push({time : performance.now() - events.t0, bitrate : event.bw , duration : data.frag.duration, level : event.id});
if(hls.bufferTimer === undefined) {
events.buffer.push({ time : 0, buffer : 0, pos: 0});
hls.bufferTimer = window.setInterval(checkBuffer, 100);
}
refreshCanvas();
updateLevelInfo();
var latency = data.stats.tfirst - data.stats.trequest, process = data.stats.tbuffered - data.stats.trequest, bitrate = Math.round(8 * data.stats.length / (data.stats.tbuffered - data.stats.tfirst));
if (stats.fragBuffered) {
stats.fragMinLatency = Math.min(stats.fragMinLatency, latency);
stats.fragMaxLatency = Math.max(stats.fragMaxLatency, latency);
stats.fragMinProcess = Math.min(stats.fragMinProcess, process);
stats.fragMaxProcess = Math.max(stats.fragMaxProcess, process);
stats.fragMinKbps = Math.min(stats.fragMinKbps, bitrate);
stats.fragMaxKbps = Math.max(stats.fragMaxKbps, bitrate);
stats.autoLevelCappingMin = Math.min(stats.autoLevelCappingMin, hls.autoLevelCapping);
stats.autoLevelCappingMax = Math.max(stats.autoLevelCappingMax, hls.autoLevelCapping);
stats.fragBuffered++;
} else {
stats.fragMinLatency = stats.fragMaxLatency = latency;
stats.fragMinProcess = stats.fragMaxProcess = process;
stats.fragMinKbps = stats.fragMaxKbps = bitrate;
stats.fragBuffered = 1;
stats.fragBufferedBytes = 0;
stats.autoLevelCappingMin = stats.autoLevelCappingMax = hls.autoLevelCapping;
this.sumLatency = 0;
this.sumKbps = 0;
this.sumProcess = 0;
}
stats.fraglastLatency = latency;
this.sumLatency += latency;
stats.fragAvgLatency = Math.round(this.sumLatency / stats.fragBuffered);
stats.fragLastProcess = process;
this.sumProcess += process;
stats.fragAvgProcess = Math.round(this.sumProcess / stats.fragBuffered);
stats.fragLastKbps = bitrate;
this.sumKbps += bitrate;
stats.fragAvgKbps = Math.round(this.sumKbps / stats.fragBuffered);
stats.fragBufferedBytes += data.stats.length;
stats.autoLevelCappingLast = hls.autoLevelCapping;
});
hls.on(Hls.Events.FRAG_CHANGED,function(event,data) {
var event = {time : performance.now() - events.t0, type : 'frag changed', name : data.frag.sn + ' @ ' + data.frag.level };
events.video.push(event);
refreshCanvas();
updateLevelInfo();
var level = data.frag.level, autoLevel = data.frag.autoLevel;
if (stats.levelStart === undefined) {
stats.levelStart = level;
}
if (autoLevel) {
if (stats.fragChangedAuto) {
stats.autoLevelMin = Math.min(stats.autoLevelMin, level);
stats.autoLevelMax = Math.max(stats.autoLevelMax, level);
stats.fragChangedAuto++;
if (this.levelLastAuto && level !== stats.autoLevelLast) {
stats.autoLevelSwitch++;
}
} else {
stats.autoLevelMin = stats.autoLevelMax = level;
stats.autoLevelSwitch = 0;
stats.fragChangedAuto = 1;
this.sumAutoLevel = 0;
}
this.sumAutoLevel += level;
stats.autoLevelAvg = Math.round(1000 * this.sumAutoLevel / stats.fragChangedAuto) / 1000;
stats.autoLevelLast = level;
} else {
if (stats.fragChangedManual) {
stats.manualLevelMin = Math.min(stats.manualLevelMin, level);
stats.manualLevelMax = Math.max(stats.manualLevelMax, level);
stats.fragChangedManual++;
if (!this.levelLastAuto && level !== stats.manualLevelLast) {
stats.manualLevelSwitch++;
}
} else {
stats.manualLevelMin = stats.manualLevelMax = level;
stats.manualLevelSwitch = 0;
stats.fragChangedManual = 1;
}
stats.manualLevelLast = level;
}
this.levelLastAuto = autoLevel;
});
hls.on(Hls.Events.FRAG_LOAD_EMERGENCY_ABORTED,function(event,data) {
if (stats) {
if (stats.fragLoadEmergencyAborted === undefined) {
stats.fragLoadEmergencyAborted = 1;
} else {
stats.fragLoadEmergencyAborted++;
}
}
});
hls.on(Hls.Events.ERROR, function(event,data) {
switch(data.details) {
case Hls.ErrorDetails.MANIFEST_LOAD_ERROR:
try {
$("#HlsStatus").html("cannot Load <a href=\"" + data.url + "\">" + url + "</a><br>HTTP response code:" + data.response.status + "<br>" + data.response.statusText);
if(data.response.status === 0) {
$("#HlsStatus").append("this might be a CORS issue, consider installing <a href=\"https://chrome.google.com/webstore/detail/allow-control-allow-origi/nlfbmbojpeacfghkpbjhddihlkkiljbi\">Allow-Control-Allow-Origin</a> Chrome Extension");
}
} catch(err) {
$("#HlsStatus").html("cannot Load <a href=\"" + data.url + "\">" + url + "</a><br>Reason:Load " + data.event.type);
}
break;
case Hls.ErrorDetails.LEVEL_LOAD_ERROR:
$("#HlsStatus").text("error while trying to load level playlist");
break;
case Hls.ErrorDetails.LEVEL_SWITCH_ERROR:
$("#HlsStatus").text("error while trying to switch to level " + data.level);
break;
case Hls.ErrorDetails.FRAG_LOAD_ERROR:
$("#HlsStatus").text("error while trying to load fragment " + data.frag.url);
break;
case Hls.ErrorDetails.LEVEL_LOAD_TIMEOUT:
$("#HlsStatus").text("timeout while trying to load level playlist");
break;
case Hls.ErrorDetails.FRAG_DECRYPT_ERROR:
$("#HlsStatus").text("Decrypting Error:" + data.reason);
break;
case Hls.ErrorDetails.FRAG_PARSING_ERROR:
$("#HlsStatus").text("Parsing Error:" + data.reason);
break;
case Hls.ErrorDetails.BUFFER_APPEND_ERROR:
$("#HlsStatus").text("Buffer Append Error");
break;
case Hls.ErrorDetails.BUFFER_APPENDING_ERROR:
$("#HlsStatus").text("Buffer Appending Error");
break;
case Hls.ErrorDetails.FRAG_LOOP_LOADING_ERROR:
$("#HlsStatus").text("Frag Loop Loading Error");
break;
default:
break;
}
if(data.fatal) {
switch(data.type) {
case Hls.ErrorTypes.MEDIA_ERROR:
handleMediaError();
break;
case Hls.ErrorTypes.NETWORK_ERROR:
$("#HlsStatus").append(",network error ...");
break;
default:
$("#HlsStatus").append(", unrecoverable error");
hls.destroy();
break;
}
}
if(!stats) stats = {};
// track all errors independently
if (stats[data.details] === undefined) {
stats[data.details] = 1;
} else {
stats[data.details] += 1;
}
// track fatal error
if (data.fatal) {
if (stats.fatalError === undefined) {
stats.fatalError = 1;
} else {
stats.fatalError += 1;
}
}
$("#HlsStats").text(JSON.stringify(sortObject(stats),null,"\t"));
});
hls.on(Hls.Events.FPS_DROP,function(event,data) {
var evt = {time : performance.now() - events.t0, type : "frame drop", name : data.currentDropped + "/" + data.currentDecoded};
events.video.push(evt);
if (stats) {
if (stats.fpsDropEvent === undefined) {
stats.fpsDropEvent = 1;
} else {
stats.fpsDropEvent++;
}
stats.fpsTotalDroppedFrames = data.totalDroppedFrames;
}
});
video.addEventListener('resize', handleVideoEvent);
video.addEventListener('seeking', handleVideoEvent);
video.addEventListener('seeked', handleVideoEvent);
video.addEventListener('pause', handleVideoEvent);
video.addEventListener('play', handleVideoEvent);
video.addEventListener('canplay', handleVideoEvent);
video.addEventListener('canplaythrough', handleVideoEvent);
video.addEventListener('ended', handleVideoEvent);
video.addEventListener('playing', handleVideoEvent);
video.addEventListener('error', handleVideoEvent);
video.addEventListener('loadedmetadata', handleVideoEvent);
video.addEventListener('loadeddata', handleVideoEvent);
video.addEventListener('durationchange', handleVideoEvent);
} else {
if(navigator.userAgent.toLowerCase().indexOf('firefox') !== -1) {
$("#HlsStatus").text("you are using Firefox, it looks like MediaSource is not enabled,<br>please ensure the following keys are set appropriately in <b>about:config</b><br>media.mediasource.enabled=true<br>media.mediasource.mp4.enabled=true<br><b>media.mediasource.whitelist=false</b>");
} else {
$("#HlsStatus").text("your Browser does not support MediaSourceExtension / MP4 mediasource");
}
}
}
var lastSeekingIdx, lastStartPosition,lastDuration;
function handleVideoEvent(evt) {
var data = '';
switch(evt.type) {
case 'durationchange':
if(evt.target.duration - lastDuration <= 0.5) {
// some browsers reports several duration change events with almost the same value ... avoid spamming video events
return;
}
lastDuration = evt.target.duration;
data = Math.round(evt.target.duration*1000);
break;
case 'resize':
data = evt.target.videoWidth + '/' + evt.target.videoHeight;
break;
case 'loadedmetadata':
// data = 'duration:' + evt.target.duration + '/videoWidth:' + evt.target.videoWidth + '/videoHeight:' + evt.target.videoHeight;
// break;
case 'loadeddata':
case 'canplay':
case 'canplaythrough':
case 'ended':
case 'seeking':
case 'seeked':
case 'play':
case 'playing':
lastStartPosition = evt.target.currentTime;
case 'pause':
case 'waiting':
case 'stalled':
case 'error':
data = Math.round(evt.target.currentTime*1000);
if(evt.type === 'error') {
var errorTxt,mediaError=evt.currentTarget.error;
switch(mediaError.code) {
case mediaError.MEDIA_ERR_ABORTED:
errorTxt = "You aborted the video playback";
break;
case mediaError.MEDIA_ERR_DECODE:
errorTxt = "The video playback was aborted due to a corruption problem or because the video used features your browser did not support";
handleMediaError();
break;
case mediaError.MEDIA_ERR_NETWORK:
errorTxt = "A network error caused the video download to fail part-way";
break;
case mediaError.MEDIA_ERR_SRC_NOT_SUPPORTED:
errorTxt = "The video could not be loaded, either because the server or network failed or because the format is not supported";
break;
}
$("#HlsStatus").text(errorTxt);
console.error(errorTxt);
}
break;
// case 'progress':
// data = 'currentTime:' + evt.target.currentTime + ',bufferRange:[' + this.video.buffered.start(0) + ',' + this.video.buffered.end(0) + ']';
// break;
default:
break;
}
var event = {time : performance.now() - events.t0, type : evt.type, name : data};
events.video.push(event);
if(evt.type === 'seeking') {
lastSeekingIdx = events.video.length-1;
}
if(evt.type === 'seeked') {
events.video[lastSeekingIdx].duration = event.time - events.video[lastSeekingIdx].time;
}
}
var recoverDecodingErrorDate,recoverSwapAudioCodecDate;
function handleMediaError() {
if(autoRecoverError) {
var now = performance.now();
if(!recoverDecodingErrorDate || (now - recoverDecodingErrorDate) > 3000) {
recoverDecodingErrorDate = performance.now();
$("#HlsStatus").append(",try to recover media Error ...");
hls.recoverMediaError();
} else {
if(!recoverSwapAudioCodecDate || (now - recoverSwapAudioCodecDate) > 3000) {
recoverSwapAudioCodecDate = performance.now();
$("#HlsStatus").append(",try to swap Audio Codec and recover media Error ...");
hls.swapAudioCodec();
hls.recoverMediaError();
} else {
$("#HlsStatus").append(",cannot recover, last media error recovery failed ...");
}
}
}
}
function timeRangesToString(r) {
var log = "";
for (var i=0; i<r.length; i++) {
log += "[" + r.start(i) + "," + r.end(i) + "]";
}
return log;
}
var bufferingIdx = -1;
function checkBuffer() {
var v = $('#video')[0];
var canvas = $('#buffered_c')[0];
var ctx = canvas.getContext('2d');
var r = v.buffered;
var bufferingDuration;
ctx.fillStyle = "black";
ctx.fillRect(0,0,canvas.width,canvas.height);
ctx.fillStyle = "gray";
if (r) {
if(!canvas.width || canvas.width !== v.clientWidth) {
canvas.width = v.clientWidth;
}
var pos = v.currentTime,bufferLen;
for (var i=0, bufferLen=0; i<r.length; i++) {
var start = r.start(i)/v.duration * canvas.width;
var end = r.end(i)/v.duration * canvas.width;
ctx.fillRect(start, 3, Math.max(2, end-start), 10);
if(pos >= r.start(i) && pos < r.end(i)) {
// play position is inside this buffer TimeRange, retrieve end of buffer position and buffer length
bufferLen = r.end(i) - pos;
}
}
// check if we are in buffering / or playback ended state
if(bufferLen <= 0.1 && v.paused === false && (pos-lastStartPosition) > 0.5) {
// don't create buffering event if we are at the end of the playlist, don't report ended for live playlist
if(lastDuration -pos <= 0.5 && events.isLive === false) {
} else {
// we are not at the end of the playlist ... real buffering
if(bufferingIdx !== -1) {
bufferingDuration = performance.now() - events.t0 - events.video[bufferingIdx].time;
events.video[bufferingIdx].duration = bufferingDuration;
events.video[bufferingIdx].name = bufferingDuration;
} else {
events.video.push({ type : 'buffering' , time : performance.now() - events.t0 });
// we are in buffering state
bufferingIdx = events.video.length-1;
}
}
}
if(bufferLen > 0.1 && bufferingIdx !=-1) {
bufferingDuration = performance.now() - events.t0 - events.video[bufferingIdx].time;
events.video[bufferingIdx].duration = bufferingDuration;
events.video[bufferingIdx].name = bufferingDuration;
// we are out of buffering state
bufferingIdx = -1;
}
// update buffer/position for current Time
var event = { time : performance.now() - events.t0, buffer : Math.round(bufferLen*1000), pos: Math.round(pos*1000)};
var bufEvents = events.buffer, bufEventLen = bufEvents.length;
if(bufEventLen > 1) {
var event0 = bufEvents[bufEventLen-2],event1 = bufEvents[bufEventLen-1];
var slopeBuf0 = (event0.buffer - event1.buffer)/(event0.time-event1.time);
var slopeBuf1 = (event1.buffer - event.buffer)/(event1.time-event.time);
var slopePos0 = (event0.pos - event1.pos)/(event0.time-event1.time);
var slopePos1 = (event1.pos - event.pos)/(event1.time-event.time);
// compute slopes. if less than 30% difference, remove event1
if((slopeBuf0 === slopeBuf1 || Math.abs(slopeBuf0/slopeBuf1 -1) <= 0.3) &&
(slopePos0 === slopePos1 || Math.abs(slopePos0/slopePos1 -1) <= 0.3))
{
bufEvents.pop();
}
}
events.buffer.push(event);
refreshCanvas();
var decodedFrames, droppedFrames;
if(navigator.userAgent.toLowerCase().indexOf('firefox') !== -1) {
decodedFrames = v.mozDecodedFrames;
droppedFrames = v.mozParsedFrames-v.mozPresentedFrames;
} else {
decodedFrames = v.webkitDecodedFrameCount;
droppedFrames = v.webkitDroppedFrameCount;
}
var log = "Duration:"
+ v.duration + "<br>"
+ "Buffered:"
+ timeRangesToString(v.buffered) + "<br>"
+ "Seekable:"
+ timeRangesToString(v.seekable) + "<br>"
+ "Played:"
+ timeRangesToString(v.played) + "<br>"
+ "Decoded Frames:"
+ decodedFrames + "<br>"
+ "Dropped Frames:"
+ droppedFrames + "<br>";
$("#buffered_log").html(log);
$("#HlsStats").text(JSON.stringify(sortObject(stats),null,"\t"));
ctx.fillStyle = "blue";
var x = v.currentTime / v.duration * canvas.width;
ctx.fillRect(x, 0, 2, 15);
}
}
function sortObject(obj) {
if(typeof obj !== 'object')
return obj
var temp = {};
var keys = [];
for(var key in obj)
keys.push(key);
keys.sort();
for(var index in keys)
temp[keys[index]] = sortObject(obj[keys[index]]);
return temp;
}
function showCanvas() {
showMetrics();
$("#buffered_log").show();
$("#buffered_c").show();
}
function hideCanvas() {
hideMetrics();
$("#buffered_log").hide();
$("#buffered_c").hide();
}
function getMetrics() {
var json = JSON.stringify(events);
var jsonpacked = jsonpack.pack(json);
console.log("packing JSON from " + json.length + " to " + jsonpacked.length + " bytes");
return btoa(jsonpacked);
}
function copyMetricsToClipBoard() {
copyTextToClipboard(getMetrics());
}
function copyTextToClipboard(text) {
var textArea = document.createElement("textarea");
textArea.value = text;
document.body.appendChild(textArea);
textArea.select();
try {
var successful = document.execCommand('copy');
var msg = successful ? 'successful' : 'unsuccessful';
console.log('Copying text command was ' + msg);
} catch (err) {
console.log('Oops, unable to copy');
}
document.body.removeChild(textArea);
}
function goToMetrics() {
var url = document.URL;
url = url.substr(0,url.lastIndexOf("/")+1) + 'metrics.html';
console.log(url);
window.open(url,'_blank');
}
function goToMetricsPermaLink() {
var url = document.URL;
var b64 = getMetrics();
url = url.substr(0,url.lastIndexOf("/")+1) + 'metrics.html?data=' + b64;
console.log(url);
window.open(url,'_blank');
}
function minsecs(ts) {
var m = Math.floor(Math.floor(ts % 3600) / 60);
var s = Math.floor(ts % 60);
return m + ":" + (s < 10 ? "0" : "") + s;
}
function buffered_seek(event) {
var canvas = $("#buffered_c")[0];
var v = $('#video')[0];
var target = (event.clientX - canvas.offsetLeft) / canvas.width * v.duration;
v.currentTime = target;
}
function updateLevelInfo() {
var button_template = '<button type="button" class="btn btn-sm ';
var button_enabled = 'btn-primary" ';
var button_disabled = 'btn-success" ';
var html1 = button_template;
if(hls.autoLevelEnabled) {
html1 += button_enabled;
} else {
html1 += button_disabled;
}
html1 += 'onclick="hls.currentLevel=-1">auto</button>';
var html2 = button_template;
if(hls.autoLevelEnabled) {
html2 += button_enabled;
} else {
html2 += button_disabled;
}
html2 += 'onclick="hls.loadLevel=-1">auto</button>';
var html3 = button_template;
if(hls.autoLevelCapping === -1) {
html3 += button_enabled;
} else {
html3 += button_disabled;
}
html3 += 'onclick="hls.autoLevelCapping=-1;updateLevelInfo()">auto</button>';
var html4 = button_template;
if(hls.autoLevelEnabled) {
html4 += button_enabled;
} else {
html4 += button_disabled;
}
html4 += 'onclick="hls.nextLevel=-1">auto</button>';
for (var i=0; i < hls.levels.length; i++) {
html1 += button_template;
if(hls.currentLevel === i) {
html1 += button_enabled;
} else {
html1 += button_disabled;
}
var levelName = i, label = level2label(i);
if(label) {
levelName += '(' + level2label(i) + ')';
}
html1 += 'onclick="hls.currentLevel=' + i + '">' + levelName + '</button>';
html2 += button_template;
if(hls.loadLevel === i) {
html2 += button_enabled;
} else {
html2 += button_disabled;
}
html2 += 'onclick="hls.loadLevel=' + i + '">' + levelName + '</button>';
html3 += button_template;
if(hls.autoLevelCapping === i) {
html3 += button_enabled;
} else {
html3 += button_disabled;
}
html3 += 'onclick="hls.autoLevelCapping=' + i + ';updateLevelInfo()">' + levelName + '</button>';
html4 += button_template;
if(hls.nextLevel === i) {
html4 += button_enabled;
} else {
html4 += button_disabled;
}
html4 += 'onclick="hls.nextLevel=' + i + '">' + levelName + '</button>';
}
var v = $('#video')[0];
if(v.videoWidth) {
$("#currentResolution").html("video resolution:" + v.videoWidth + 'x' + v.videoHeight);
}
$("#currentLevelControl").html(html1);
$("#loadLevelControl").html(html2);
$("#levelCappingControl").html(html3);
$("#nextLevelControl").html(html4);
}
function level2label(index) {
if(hls && hls.levels.length-1 >= index) {
var level = hls.levels[index];
if (level.name) {
return level.name;
} else {
if (level.height) {
return(level.height + 'p / ' + Math.round(level.bitrate / 1024) + 'kb');
} else {
if(level.bitrate) {
return(Math.round(level.bitrate / 1024) + 'kb');
} else {
return null;
}
}
}
}
}
</script>
</body>
</html>

View file

@ -0,0 +1,578 @@
/*
Copyright (c) 2013, Rodrigo González, Sapienlab All Rights Reserved.
Available via MIT LICENSE. See https://github.com/roro89/jsonpack/blob/master/LICENSE.md for details.
*/
(function(define) {
define([], function() {
var TOKEN_TRUE = -1;
var TOKEN_FALSE = -2;
var TOKEN_NULL = -3;
var TOKEN_EMPTY_STRING = -4;
var TOKEN_UNDEFINED = -5;
var pack = function(json, options) {
// Canonizes the options
options = options || {};
// A shorthand for debugging
var verbose = options.verbose || false;
verbose && console.log('Normalize the JSON Object');
// JSON as Javascript Object (Not string representation)
json = typeof json === 'string' ? this.JSON.parse(json) : json;
verbose && console.log('Creating a empty dictionary');
// The dictionary
var dictionary = {
strings : [],
integers : [],
floats : []
};
verbose && console.log('Creating the AST');
// The AST
var ast = (function recursiveAstBuilder(item) {
verbose && console.log('Calling recursiveAstBuilder with ' + this.JSON.stringify(item));
// The type of the item
var type = typeof item;
// Case 7: The item is null
if (item === null) {
return {
type : 'null',
index : TOKEN_NULL
};
}
//add undefined
if (typeof item === 'undefined') {
return {
type : 'undefined',
index : TOKEN_UNDEFINED
};
}
// Case 1: The item is Array Object
if ( item instanceof Array) {
// Create a new sub-AST of type Array (@)
var ast = ['@'];
// Add each items
for (var i in item) {
if (!item.hasOwnProperty(i)) continue;
ast.push(recursiveAstBuilder(item[i]));
}
// And return
return ast;
}
// Case 2: The item is Object
if (type === 'object') {
// Create a new sub-AST of type Object ($)
var ast = ['$'];
// Add each items
for (var key in item) {
if (!item.hasOwnProperty(key))
continue;
ast.push(recursiveAstBuilder(key));
ast.push(recursiveAstBuilder(item[key]));
}
// And return
return ast;
}
// Case 3: The item empty string
if (item === '') {
return {
type : 'empty',
index : TOKEN_EMPTY_STRING
};
}
// Case 4: The item is String
if (type === 'string') {
// The index of that word in the dictionary
var index = _indexOf.call(dictionary.strings, item);
// If not, add to the dictionary and actualize the index
if (index == -1) {
dictionary.strings.push(_encode(item));
index = dictionary.strings.length - 1;
}
// Return the token
return {
type : 'strings',
index : index
};
}
// Case 5: The item is integer
if (type === 'number' && item % 1 === 0) {
// The index of that number in the dictionary
var index = _indexOf.call(dictionary.integers, item);
// If not, add to the dictionary and actualize the index
if (index == -1) {
dictionary.integers.push(_base10To36(item));
index = dictionary.integers.length - 1;
}
// Return the token
return {
type : 'integers',
index : index
};
}
// Case 6: The item is float
if (type === 'number') {
// The index of that number in the dictionary
var index = _indexOf.call(dictionary.floats, item);
// If not, add to the dictionary and actualize the index
if (index == -1) {
// Float not use base 36
dictionary.floats.push(item);
index = dictionary.floats.length - 1;
}
// Return the token
return {
type : 'floats',
index : index
};
}
// Case 7: The item is boolean
if (type === 'boolean') {
return {
type : 'boolean',
index : item ? TOKEN_TRUE : TOKEN_FALSE
};
}
// Default
throw new Error('Unexpected argument of type ' + typeof (item));
})(json);
// A set of shorthands proxies for the length of the dictionaries
var stringLength = dictionary.strings.length;
var integerLength = dictionary.integers.length;
var floatLength = dictionary.floats.length;
verbose && console.log('Parsing the dictionary');
// Create a raw dictionary
var packed = dictionary.strings.join('|');
packed += '^' + dictionary.integers.join('|');
packed += '^' + dictionary.floats.join('|');
verbose && console.log('Parsing the structure');
// And add the structure
packed += '^' + (function recursiveParser(item) {
verbose && console.log('Calling a recursiveParser with ' + this.JSON.stringify(item));
// If the item is Array, then is a object of
// type [object Object] or [object Array]
if ( item instanceof Array) {
// The packed resulting
var packed = item.shift();
for (var i in item) {
if (!item.hasOwnProperty(i))
continue;
packed += recursiveParser(item[i]) + '|';
}
return (packed[packed.length - 1] === '|' ? packed.slice(0, -1) : packed) + ']';
}
// A shorthand proxies
var type = item.type, index = item.index;
if (type === 'strings') {
// Just return the base 36 of index
return _base10To36(index);
}
if (type === 'integers') {
// Return a base 36 of index plus stringLength offset
return _base10To36(stringLength + index);
}
if (type === 'floats') {
// Return a base 36 of index plus stringLength and integerLength offset
return _base10To36(stringLength + integerLength + index);
}
if (type === 'boolean') {
return item.index;
}
if (type === 'null') {
return TOKEN_NULL;
}
if (type === 'undefined') {
return TOKEN_UNDEFINED;
}
if (type === 'empty') {
return TOKEN_EMPTY_STRING;
}
throw new TypeError('The item is alien!');
})(ast);
verbose && console.log('Ending parser');
// If debug, return a internal representation of dictionary and stuff
if (options.debug)
return {
dictionary : dictionary,
ast : ast,
packed : packed
};
return packed;
};
var unpack = function(packed, options) {
// Canonizes the options
options = options || {};
// A raw buffer
var rawBuffers = packed.split('^');
// Create a dictionary
options.verbose && console.log('Building dictionary');
var dictionary = [];
// Add the strings values
var buffer = rawBuffers[0];
if (buffer !== '') {
buffer = buffer.split('|');
options.verbose && console.log('Parse the strings dictionary');
for (var i=0, n=buffer.length; i<n; i++){
dictionary.push(_decode(buffer[i]));
}
}
// Add the integers values
buffer = rawBuffers[1];
if (buffer !== '') {
buffer = buffer.split('|');
options.verbose && console.log('Parse the integers dictionary');
for (var i=0, n=buffer.length; i<n; i++){
dictionary.push(_base36To10(buffer[i]));
}
}
// Add the floats values
buffer = rawBuffers[2];
if (buffer !== '') {
buffer = buffer.split('|')
options.verbose && console.log('Parse the floats dictionary');
for (var i=0, n=buffer.length; i<n; i++){
dictionary.push(parseFloat(buffer[i]));
}
}
// Free memory
delete buffer;
options.verbose && console.log('Tokenizing the structure');
// Tokenizer the structure
var number36 = '';
var tokens = [];
var len=rawBuffers[3].length;
for (var i = 0; i < len; i++) {
var symbol = rawBuffers[3].charAt(i);
if (symbol === '|' || symbol === '$' || symbol === '@' || symbol === ']') {
if (number36) {
tokens.push(_base36To10(number36));
number36 = '';
}
symbol !== '|' && tokens.push(symbol);
} else {
number36 += symbol;
}
}
// A shorthand proxy for tokens.length
var tokensLength = tokens.length;
// The index of the next token to read
var tokensIndex = 0;
options.verbose && console.log('Starting recursive parser');
return (function recursiveUnpackerParser() {
// Maybe '$' (object) or '@' (array)
var type = tokens[tokensIndex++];
options.verbose && console.log('Reading collection type ' + (type === '$' ? 'object' : 'Array'));
// Parse an array
if (type === '@') {
var node = [];
for (; tokensIndex < tokensLength; tokensIndex++) {
var value = tokens[tokensIndex];
options.verbose && console.log('Read ' + value + ' symbol');
if (value === ']')
return node;
if (value === '@' || value === '$') {
node.push(recursiveUnpackerParser());
} else {
switch(value) {
case TOKEN_TRUE:
node.push(true);
break;
case TOKEN_FALSE:
node.push(false);
break;
case TOKEN_NULL:
node.push(null);
break;
case TOKEN_UNDEFINED:
node.push(undefined);
break;
case TOKEN_EMPTY_STRING:
node.push('');
break;
default:
node.push(dictionary[value]);
}
}
}
options.verbose && console.log('Parsed ' + this.JSON.stringify(node));
return node;
}
// Parse a object
if (type === '$') {
var node = {};
for (; tokensIndex < tokensLength; tokensIndex++) {
var key = tokens[tokensIndex];
if (key === ']')
return node;
if (key === TOKEN_EMPTY_STRING)
key = '';
else
key = dictionary[key];
var value = tokens[++tokensIndex];
if (value === '@' || value === '$') {
node[key] = recursiveUnpackerParser();
} else {
switch(value) {
case TOKEN_TRUE:
node[key] = true;
break;
case TOKEN_FALSE:
node[key] = false;
break;
case TOKEN_NULL:
node[key] = null;
break;
case TOKEN_UNDEFINED:
node[key] = undefined;
break;
case TOKEN_EMPTY_STRING:
node[key] = '';
break;
default:
node[key] = dictionary[value];
}
}
}
options.verbose && console.log('Parsed ' + this.JSON.stringify(node));
return node;
}
throw new TypeError('Bad token ' + type + ' isn\'t a type');
})();
}
/**
* Get the index value of the dictionary
* @param {Object} dictionary a object that have two array attributes: 'string' and 'number'
* @param {Object} data
*/
var _indexOfDictionary = function(dictionary, value) {
// The type of the value
var type = typeof value;
// If is boolean, return a boolean token
if (type === 'boolean')
return value ? TOKEN_TRUE : TOKEN_FALSE;
// If is null, return a... yes! the null token
if (value === null)
return TOKEN_NULL;
//add undefined
if (typeof value === 'undefined')
return TOKEN_UNDEFINED;
if (value === '') {
return TOKEN_EMPTY_STRING;
}
if (type === 'string') {
value = _encode(value);
var index = _indexOf.call(dictionary.strings, value);
if (index === -1) {
dictionary.strings.push(value);
index = dictionary.strings.length - 1;
}
}
// If has an invalid JSON type (example a function)
if (type !== 'string' && type !== 'number') {
throw new Error('The type is not a JSON type');
};
if (type === 'string') {// string
value = _encode(value);
} else if (value % 1 === 0) {// integer
value = _base10To36(value);
} else {// float
}
// If is number, "serialize" the value
value = type === 'number' ? _base10To36(value) : _encode(value);
// Retrieve the index of that value in the dictionary
var index = _indexOf.call(dictionary[type], value);
// If that value is not in the dictionary
if (index === -1) {
// Push the value
dictionary[type].push(value);
// And return their index
index = dictionary[type].length - 1;
}
// If the type is a number, then add the '+' prefix character
// to differentiate that they is a number index. If not, then
// just return a 36-based representation of the index
return type === 'number' ? '+' + index : index;
};
var _encode = function(str) {
if ( typeof str !== 'string')
return str;
return str.replace(/[\+ \|\^\%]/g, function(a) {
return ({
' ' : '+',
'+' : '%2B',
'|' : '%7C',
'^' : '%5E',
'%' : '%25'
})[a]
});
};
var _decode = function(str) {
if ( typeof str !== 'string')
return str;
return str.replace(/\+|%2B|%7C|%5E|%25/g, function(a) {
return ({
'+' : ' ',
'%2B' : '+',
'%7C' : '|',
'%5E' : '^',
'%25' : '%'
})[a]
})
};
var _base10To36 = function(number) {
return Number.prototype.toString.call(number, 36).toUpperCase();
};
var _base36To10 = function(number) {
return parseInt(number, 36);
};
var _indexOf = Array.prototype.indexOf ||
function(obj, start) {
for (var i = (start || 0), j = this.length; i < j; i++) {
if (this[i] === obj) {
return i;
}
}
return -1;
};
return {
JSON : JSON,
pack : pack,
unpack : unpack
};
});
})( typeof define == 'undefined' || !define.amd ? function(deps, factory) {
var jsonpack = factory();
if ( typeof exports != 'undefined')
for (var key in jsonpack)
exports[key] = jsonpack[key];
else
window.jsonpack = jsonpack;
} : define);

View file

@ -0,0 +1,74 @@
<!DOCTYPE html>
<head>
<title>hls.js metrics page</title>
<link rel="icon" type="image/png" href="http://static1.dmcdn.net/images/favicon-32x32.png" sizes="32x32" />
<link rel="icon" type="image/png" href="http://static1.dmcdn.net/images/favicon-16x16.png" sizes="16x16" />
<link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.4/css/bootstrap.min.css">
<link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.4/css/bootstrap-theme.min.css">
<script src="https://ajax.googleapis.com/ajax/libs/jquery/2.1.3/jquery.min.js"></script>
<script src="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.4/js/bootstrap.min.js"></script>
</head>
<body>
<div class="header-container">
<header class="wrapper clearfix">
<h1 class="title">hls.js metrics page</h1>
</header>
</div>
<pre id='HlsDate'></pre>
<pre id='StreamPermalink'></pre>
<input id="metricsData" class="innerControls" type=text value=""/>
window size
<div id="metricsButton">
<button type="button" class="btn btn-xs btn-primary" onclick="timeRangeSetSliding(0)">window ALL</button>
<button type="button" class="btn btn-xs btn-primary" onclick="timeRangeSetSliding(2000)">2s</button>
<button type="button" class="btn btn-xs btn-primary" onclick="timeRangeSetSliding(5000)">5s</button>
<button type="button" class="btn btn-xs btn-primary" onclick="timeRangeSetSliding(10000)">10s</button>
<button type="button" class="btn btn-xs btn-primary" onclick="timeRangeSetSliding(20000)">20s</button>
<button type="button" class="btn btn-xs btn-primary" onclick="timeRangeSetSliding(30000)">30s</button>
<button type="button" class="btn btn-xs btn-primary" onclick="timeRangeSetSliding(60000)">60s</button>
<button type="button" class="btn btn-xs btn-primary" onclick="timeRangeSetSliding(120000)">120s</button><br>
<button type="button" class="btn btn-xs btn-primary" onclick="timeRangeZoomIn()">Window Zoom In</button>
<button type="button" class="btn btn-xs btn-primary" onclick="timeRangeZoomOut()">Window Zoom Out</button><br>
<button type="button" class="btn btn-xs btn-primary" onclick="timeRangeSlideLeft()"> <<< Window Slide </button>
<button type="button" class="btn btn-xs btn-primary" onclick="timeRangeSlideRight()">Window Slide >>> </button><br>
<button type="button" class="btn btn-xs btn-primary" onclick="windowStart=$('#windowStart').val()">fixed window start(ms)</button>
<input type="text" id='windowStart' defaultValue="0" size="8" onkeydown="if(window.event.keyCode=='13'){windowStart=$('#windowStart').val();}">
<button type="button" class="btn btn-xs btn-primary" onclick="windowEnd=$('#windowEnd').val()">fixed window end(ms)</button>
<input type="text" id='windowEnd' defaultValue="10000" size="8" onkeydown="if(window.event.keyCode=='13'){windowEnd=$('#windowEnd').val();}"><br>
<canvas id="bufferTimerange_c" width="640" height="100" style="border:1px solid #000000" onmousedown="timeRangeCanvasonMouseDown(event)" onmousemove="timeRangeCanvasonMouseMove(event)" onmouseup="timeRangeCanvasonMouseUp(event)" onmouseout="timeRangeCanvasonMouseOut(event)";></canvas>
<canvas id="bitrateTimerange_c" width="640" height="100" style="border:1px solid #000000";></canvas>
<canvas id="bufferWindow_c" width="640" height="100" style="border:1px solid #000000" onmousemove="windowCanvasonMouseMove(event)";></canvas>
<canvas id="videoEvent_c" width="640" height="15" style="border:1px solid #000000";></canvas>
<canvas id="loadEvent_c" width="640" height="15" style="border:1px solid #000000";></canvas><br>
</div>
<script src="canvas.js"></script>
<script src="metrics.js"></script>
<script src="jsonpack.js"></script>
<script>
$(document).ready(function() {
$('#metricsData').change(function() { events = jsonpack.unpack(atob($('#metricsData').val())); updateMetrics(); });
});
var data = location.search.split('data=')[1],events;
if (data) {
events = jsonpack.unpack(atob(decodeURIComponent(data)));
updateMetrics();
}
function updateMetrics() {
var hlsLink = document.URL.substr(0,document.URL.lastIndexOf("/")+1) + 'index.html?src=' + encodeURIComponent(events.url);
var description = 'playlist: ' + "<a href=\"" + events.url + "\">" + events.url + "</a>" + '<br>replay: ' + "<a href=\"" + hlsLink + "\">" + hlsLink + "</a>";
$("#StreamPermalink").html(description);
$("#HlsDate").text("session Start Date:" + new Date(events.t0));
metricsDisplayed=true;
showMetrics();
refreshCanvas();
}
</script>
</body>

View file

@ -0,0 +1,201 @@
function showMetrics() {
if(metricsDisplayed) {
var width = window.innerWidth-30;
$("#bufferWindow_c")[0].width =
$("#bitrateTimerange_c")[0].width =
$("#bufferTimerange_c")[0].width =
$("#videoEvent_c")[0].width =
$("#metricsButton")[0].width =
$("#loadEvent_c")[0].width = width;
$("#bufferWindow_c").show();
$("#bitrateTimerange_c").show();
$("#bufferTimerange_c").show();
$("#videoEvent_c").show();
$("#metricsButton").show();
$("#loadEvent_c").show();
}
}
function toggleMetricsDisplay() {
metricsDisplayed = !metricsDisplayed;
if(metricsDisplayed) {
showMetrics();
} else {
hideMetrics();
}
}
function hideMetrics() {
if(!metricsDisplayed) {
$("#bufferWindow_c").hide();
$("#bitrateTimerange_c").hide();
$("#bufferTimerange_c").hide();
$("#videoEvent_c").hide();
$("#metricsButton").hide();
$("#loadEvent_c").hide();
}
}
function timeRangeSetSliding(duration) {
windowDuration = duration;
windowSliding = true;
refreshCanvas();
}
var timeRangeMouseDown=false;
function timeRangeCanvasonMouseDown(evt) {
var canvas = evt.currentTarget,
bRect = canvas.getBoundingClientRect(),
mouseX = Math.round((evt.clientX - bRect.left)*(canvas.width/bRect.width));
windowStart = Math.max(0,Math.round((mouseX-eventLeftMargin) * getWindowTimeRange().now / (canvas.width-eventLeftMargin)));
windowEnd = windowStart+500;
timeRangeMouseDown = true;
windowSliding = false;
//console.log('windowStart/windowEnd:' + '/' + windowStart + '/' + windowEnd);
$("#windowStart").val(windowStart);
$("#windowEnd").val(windowEnd);
refreshCanvas();
}
function timeRangeCanvasonMouseMove(evt) {
if(timeRangeMouseDown) {
var canvas = evt.currentTarget,
bRect = canvas.getBoundingClientRect(),
mouseX = Math.round((evt.clientX - bRect.left)*(canvas.width/bRect.width)),
pos = Math.max(0,Math.round((mouseX-eventLeftMargin) * getWindowTimeRange().now / (canvas.width-eventLeftMargin)));
if(pos < windowStart) {
windowStart = pos;
} else {
windowEnd = pos;
}
if(windowStart === windowEnd) {
// to avoid division by zero ...
windowEnd +=50;
}
//console.log('windowStart/windowEnd:' + '/' + windowStart + '/' + windowEnd);
$("#windowStart").val(windowStart);
$("#windowEnd").val(windowEnd);
refreshCanvas();
}
}
function timeRangeCanvasonMouseUp(evt) {
timeRangeMouseDown = false;
}
function timeRangeCanvasonMouseOut(evt) {
timeRangeMouseDown = false;
}
function windowCanvasonMouseMove(evt) {
var canvas = evt.currentTarget,
bRect = canvas.getBoundingClientRect(),
mouseX = Math.round((evt.clientX - bRect.left)*(canvas.width/bRect.width)),
timeRange = getWindowTimeRange();
windowFocus = timeRange.min + Math.max(0,Math.round((mouseX-eventLeftMargin) * (timeRange.max - timeRange.min) / (canvas.width-eventLeftMargin)));
//console.log(windowFocus);
refreshCanvas();
}
var windowDuration=20000,windowSliding=true,windowStart=0,windowEnd=10000,windowFocus,metricsDisplayed=false;
$("#windowStart").val(windowStart);
$("#windowEnd").val(windowEnd);
function refreshCanvas() {
if(metricsDisplayed) {
try {
var windowTime = getWindowTimeRange();
canvasBufferTimeRangeUpdate($("#bufferTimerange_c")[0], 0, windowTime.now, windowTime.min,windowTime.max, events.buffer);
if(windowTime.min !== 0 || windowTime.max !== windowTime.now) {
$("#bufferWindow_c").show();
canvasBufferWindowUpdate($("#bufferWindow_c")[0], windowTime.min,windowTime.max, windowTime.focus, events.buffer);
} else {
$("#bufferWindow_c").hide();
}
canvasBitrateEventUpdate($("#bitrateTimerange_c")[0], 0, windowTime.now, windowTime.min,windowTime.max, events.level, events.bitrate);
canvasVideoEventUpdate($("#videoEvent_c")[0], windowTime.min,windowTime.max, events.video);
canvasLoadEventUpdate($("#loadEvent_c")[0], windowTime.min,windowTime.max, events.load);
} catch(err) {
console.log("refreshCanvas error:" +err.message);
}
}
}
function getWindowTimeRange() {
var tnow,minTime,maxTime;
if(events.buffer.length) {
tnow = events.buffer[events.buffer.length-1].time;
} else {
tnow = 0;
}
if(windowSliding) {
// let's show the requested window
if(windowDuration) {
minTime = Math.max(0, tnow-windowDuration),
maxTime = Math.min(minTime + windowDuration, tnow);
} else {
minTime = 0;
maxTime = tnow;
}
} else {
minTime = windowStart;
maxTime = windowEnd;
}
if(windowFocus === undefined || windowFocus < minTime || windowFocus > maxTime) {
windowFocus = minTime;
}
return { min : minTime, max: maxTime, now : tnow, focus : windowFocus}
}
function timeRangeZoomIn() {
if(windowSliding) {
windowDuration/=2;
} else {
var duration = windowEnd-windowStart;
windowStart+=duration/4;
windowEnd-=duration/4;
if(windowStart === windowEnd) {
windowEnd+=50;
}
}
$("#windowStart").val(windowStart);
$("#windowEnd").val(windowEnd);
refreshCanvas();
}
function timeRangeZoomOut() {
if(windowSliding) {
windowDuration*=2;
} else {
var duration = windowEnd-windowStart;
windowStart-=duration/2;
windowEnd+=duration/2;
windowStart=Math.max(0,windowStart);
windowEnd=Math.min(events.buffer[events.buffer.length-1].time,windowEnd);
}
$("#windowStart").val(windowStart);
$("#windowEnd").val(windowEnd);
refreshCanvas();
}
function timeRangeSlideLeft() {
var duration = windowEnd-windowStart;
windowStart-=duration/4;
windowEnd-=duration/4;
windowStart=Math.max(0,windowStart);
windowEnd=Math.min(events.buffer[events.buffer.length-1].time,windowEnd);
$("#windowStart").val(windowStart);
$("#windowEnd").val(windowEnd);
refreshCanvas();
}
function timeRangeSlideRight() {
var duration = windowEnd-windowStart;
windowStart+=duration/4;
windowEnd+=duration/4;
windowStart=Math.max(0,windowStart);
windowEnd=Math.min(events.buffer[events.buffer.length-1].time,windowEnd);
$("#windowStart").val(windowStart);
$("#windowEnd").val(windowEnd);
refreshCanvas();
}

View file

@ -0,0 +1,155 @@
##design principle
design idea is pretty simple :
- main functionalities are splitted into several subsystems
- each subsystem heavily relies on events for internal/external communications.
##code structure
- [src/hls.js][]
- definition of Hls Class. instantiate all subcomponents.
- [src/events.js][]
- definition of Hls.Events
- [src/errors.js][]
- definition of Hls.ErrorTypes and Hls.ErrorDetails
- [src/controller/mse-media-controller.js][]
- in charge of:
- ensuring that buffer is filled as per defined quality selection logic.
- monitoring current playback quality level (buffer controller maintains a map between media position and quality level)
- if buffer is not filled up appropriately (i.e. as per defined maximum buffer size, or as per defined quality level), buffer controller will trigger the following actions:
- retrieve "not buffered" media position greater then current playback position. this is performed by comparing video.buffered and video.currentTime.
- retrieve URL of fragment matching with this media position, and appropriate quality level
- trigger fragment loading
- monitor fragment loading speed:
- "expected time of fragment load completion" is computed using "fragment loading instant bandwidth".
- this time is compared to the "expected time of buffer starvation".
- if we have less than 2 fragments buffered and if "expected time of fragment load completion" is bigger than "expected time of buffer starvation" and also bigger than duration needed to load fragment at next quality level (determined by auto quality switch algorithm), current fragment loading is aborted, and an emergency switch down is triggered.
- trigger fragment parsing (TS demuxing and remuxing in MP4 boxes) upon loading completion
- trigger MP4 boxes appending in [SourceBuffer](http://www.w3.org/TR/media-source/#sourcebuffer) upon fragment parsing completion.
buffer controller actions are scheduled by a tick timer (invoked every 100ms) and actions are controlled by a state machine.
- [src/controller/fps-controller.js][]
- in charge of monitoring frame rate, and fire FPS_DROP event in case FPS drop exceeds configured threshold. disabled for now.
- [src/controller/level-controller.js][]
- in charge of scheduling playlist (re)loading and monitoring of fragment loading bitrate
- a timer is armed to periodically refresh active live playlist.
- [src/controller/abr-controller.js][]
- in charge of determining auto quality level.
- auto quality switch algorithm is pretty naive and simple ATM and similar to the one that could be found in google [StageFright](https://android.googlesource.com/platform/frameworks/av/+/master/media/libstagefright/httplive/LiveSession.cpp)
- [src/demux/aacdemuxer.js][]
- AAC ES demuxer
- extract ADTS samples from AAC ES
- [src/demux/demuxer.js][]
- demuxer abstraction interface, that will either use a [Worker](https://en.wikipedia.org/wiki/Web_worker) to demux or demux inline depending on config/browser capabilities.
- also handle fragment decryption using WebCrypto API (fragment decryption is performed in main thread)
- if Worker are disabled. demuxing will be performed in the main thread.
- if Worker are available/enabled,
- demuxer will instantiate a Worker
- post/listen to Worker message,
- and redispatch events as expected by hls.js.
- Fragments are sent as [transferable objects](https://developers.google.com/web/updates/2011/12/Transferable-Objects-Lightning-Fast) in order to minimize message passing overhead.
- [src/demux/demuxer-inline.js][]
- inline demuxer.
- probe fragments and instantiate appropriate demuxer depending on content type (TSDemuxer, AACDemuxer, ...)
- [src/demux/demuxer-worker.js][]
- demuxer web worker.
- listen to worker message, and trigger DemuxerInline upon reception of Fragments.
- provides MP4 Boxes back to main thread using [transferable objects](https://developers.google.com/web/updates/2011/12/Transferable-Objects-Lightning-Fast) in order to minimize message passing overhead.
- [src/demux/exp-golomb.js][]
- utility class to extract Exponential-Golomb coded data. needed by TS demuxer for SPS parsing.
- [src/demux/id3.js][]
- utility class that detect and parse ID3 tags, used by AAC demuxer
- [src/demux/tsdemuxer.js][]
- highly optimized TS demuxer:
- parse PAT, PMT
- extract PES packet from audio and video PIDs
- extract AVC/H264 NAL units and AAC/ADTS samples from PES packet
- trigger the remuxer upon parsing completion
- it also tries to workaround as best as it can audio codec switch (HE-AAC to AAC and vice versa), without having to restart the MediaSource.
- it also controls the remuxing process :
- upon discontinuity or level switch detection, it will also notifies the remuxer so that it can reset its state.
- [src/helper/level-helper.js][]
- helper class providing methods dealing with playlist sliding and fragment duration drift computation : after fragment parsing, start/end fragment timestamp will be used to adjust potential playlist drifts and live playlist sliding.
- [src/loader/fragment-loader.js][]
- in charge of loading fragments, use xhr-loader if not overrided by user config
- [src/loader/playlist-loader.js][]
- in charge of loading manifest, and level playlists, use xhr-loader if not overrided by user config.
- [src/loader/key-loader.js][]
- in charge of loading decryption key
- [src/remux/dummy-remuxer.js][]
- example dummy remuxer
- [src/remux/mp4-generator.js][]
- in charge of generating MP4 boxes
- generate Init Segment (moov)
- generate samples Box (moof and mdat)
- [src/remux/mp4-remuxer.js][]
- in charge of converting AVC/AAC samples provided by demuxer into fragmented ISO BMFF boxes, compatible with MediaSource
- this remuxer is able to deal with small gaps between fragments and ensure timestamp continuity.
- it notifies remuxing completion using events (```FRAG_PARSING_INIT_SEGMENT```and ```FRAG_PARSING_DATA```)
- [src/utils/binary-search.js][]
- binary search helper class
- [src/utils/hex.js][]
- Hex dump utils, useful for debug
- [src/utils/logger.js][]
- logging utils, useful for debug
- [src/utils/url.js][]
- convert base+relative URL into absolute URL
- [src/utils/xhr-loader.js][]
- XmlHttpRequest wrapper. it handles standard HTTP GET but also retries and timeout.
- retries : if xhr fails, HTTP GET will be retried after a predetermined delay. this delay is increasing following an exponential backoff. after a predetemined max number of retries, an error callback will be triggered.
- timeout: if load exceeds max allowed duration, a timeout callback will be triggered. it is up to the callback to decides whether the connection should be cancelled or not.
[src/hls.js]: src/hls.js
[src/events.js]: src/events.js
[src/errors.js]: src/errors.js
[src/stats.js]: src/stats.js
[src/controller/abr-controller.js]: src/controller/abr-controller.js
[src/controller/fps-controller.js]: src/controller/fps-controller.js
[src/controller/level-controller.js]: src/controller/level-controller.js
[src/controller/mse-media-controller.js]: src/controller/mse-media-controller.js
[src/demux/aacdemuxer.js]: src/demux/aacdemuxer.js
[src/demux/demuxer.js]: src/demux/demuxer.js
[src/demux/demuxer-inline.js]: src/demux/demuxer-inline.js
[src/demux/demuxer-worker.js]: src/demux/demuxer-worker.js
[src/demux/exp-golomb.js]: src/demux/exp-golomb.js
[src/demux/id3.js]: src/demux/id3.js
[src/demux/tsdemuxer.js]: src/demux/tsdemuxer.js
[src/helper/level-helper.js]: src/helper/level-helper.js
[src/loader/fragment-loader.js]: src/loader/fragment-loader.js
[src/loader/playlist-loader.js]: src/loader/playlist-loader.js
[src/loader/key-loader.js]: src/loader/key-loader.js
[src/remux/dummy-remuxer.js]: src/remux/dummy-remuxer.js
[src/remux/mp4-generator.js]: src/remux/mp4-generator.js
[src/remux/mp4-remuxer.js]: src/remux/mp4-remuxer.js
[src/utils/binary-search.js]: src/utils/binary-search.js
[src/utils/hex.js]: src/utils/hex.js
[src/utils/logger.js]: src/utils/logger.js
[src/utils/url.js]: src/utils/url.js
[src/utils/xhr-loader.js]: src/utils/xhr-loader.js
## Error detection and Handling
- ```MANIFEST_LOAD_ERROR``` is raised by [src/loader/playlist-loader.js][] upon xhr failure detected by [src/utils/xhr-loader.js][]. this error is marked as fatal and will not be recovered automatically. a call to ```hls.startLoad()``` could help recover it.
- ```MANIFEST_LOAD_TIMEOUT``` is raised by [src/loader/playlist-loader.js][] upon xhr timeout detected by [src/utils/xhr-loader.js][]. this error is marked as fatal and will not be recovered automatically. a call to ```hls.startLoad()``` could help recover it.
- ```MANIFEST_PARSING_ERROR``` is raised by [src/loader/playlist-loader.js][] if Manifest parsing fails (no EXTM3U delimiter, no levels found in Manifest, ...)
- ```LEVEL_LOAD_ERROR``` is raised by [src/loader/playlist-loader.js][] upon xhr failure detected by [src/utils/xhr-loader.js][]. this error is marked as fatal and will not be recovered automatically. a call to ```hls.startLoad()``` could help recover it.
- ```LEVEL_LOAD_TIMEOUT``` is raised by [src/loader/playlist-loader.js][] upon xhr timeout detected by [src/utils/xhr-loader.js][]. this error is marked as fatal and will not be recovered automatically. a call to ```hls.startLoad()``` could help recover it.
- ```LEVEL_SWITCH_ERROR``` is raised by [src/controller/level-controller.js][] if user tries to switch to an invalid level (invalid/out of range level id)
- ```FRAG_LOAD_ERROR``` is raised by [src/loader/fragment-loader.js][] upon xhr failure detected by [src/utils/xhr-loader.js][].
- if auto level switch is enabled and loaded frag level is greater than 0, this error is not fatal: in that case [src/controller/level-controller.js][] will trigger an emergency switch down to level 0.
- if frag level is 0 or auto level switch is disabled, this error is marked as fatal and a call to ```hls.startLoad()``` could help recover it.
- ```FRAG_LOOP_LOADING_ERROR``` is raised by [src/controller/mse-media-controller.js][] upon detection of same fragment being requested in loop. this could happen with badly formatted fragments.
- if auto level switch is enabled and loaded frag level is greater than 0, this error is not fatal: in that case [src/controller/level-controller.js][] will trigger an emergency switch down to level 0.
- if frag level is 0 or auto level switch is disabled, this error is marked as fatal and a call to ```hls.startLoad()``` could help recover it.
- ```FRAG_LOAD_TIMEOUT``` is raised by [src/loader/fragment-loader.js][] upon xhr timeout detected by [src/utils/xhr-loader.js][].
- if auto level switch is enabled and loaded frag level is greater than 0, this error is not fatal: in that case [src/controller/level-controller.js][] will trigger an emergency switch down to level 0.
- if frag level is 0 or auto level switch is disabled, this error is marked as fatal and a call to ```hls.startLoad()``` could help recover it.
- ```FRAG_PARSING_ERROR``` is raised by [src/demux/tsdemuxer.js][] upon TS parsing error. this error is not fatal.
- ```FRAG_DECRYPT_ERROR``` is raised by [src/demux/demuxer.js][] upon fragment decrypting error. this error is fatal.
- ```BUFFER_PREPARE_APPEND_ERROR``` is raised by [src/controller/mse-media-controller.js][] when an exception is raised when calling sourceBuffer.appendBuffer(). this error is non fatal and become fatal after config.appendErrorMaxRetry retries. when fatal, a call to ```hls.recoverMediaError()``` could help recover it.
- ```BUFFER_APPENDING_ERROR``` is raised by [src/controller/mse-media-controller.js][] after SourceBuffer appending error. this error is fatal and a call to ```hls.recoverMediaError()``` could help recover it.

File diff suppressed because it is too large Load diff

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View file

@ -0,0 +1,18 @@
{
"folders":
[
{
"path": ".",
"folder_exclude_patterns": [
".git",
"node_modules",
"dist"
],
"file_exclude_patterns": [
".gitignore",
"hls.js.sublime-project",
"hls.js.sublime-workspace"
]
}
]
}

View file

@ -0,0 +1,688 @@
(function() {
'use strict';
var
DataView = window.DataView,
/**
* Returns the string representation of an ASCII encoded four byte buffer.
* @param buffer {Uint8Array} a four-byte buffer to translate
* @return {string} the corresponding string
*/
parseType = function(buffer) {
var result = '';
result += String.fromCharCode(buffer[0]);
result += String.fromCharCode(buffer[1]);
result += String.fromCharCode(buffer[2]);
result += String.fromCharCode(buffer[3]);
return result;
},
parseMp4Date = function(seconds) {
return new Date(seconds * 1000 - 2082844800000);
},
parseSampleFlags = function(flags) {
return {
isLeading: (flags[0] & 0x0c) >>> 2,
dependsOn: flags[0] & 0x03,
isDependedOn: (flags[1] & 0xc0) >>> 6,
hasRedundancy: (flags[1] & 0x30) >>> 4,
paddingValue: (flags[1] & 0x0e) >>> 1,
isNonSyncSample: flags[1] & 0x01,
degradationPriority: (flags[2] << 8) | flags[3]
};
},
nalParse = function(avcStream) {
var
avcView = new DataView(avcStream.buffer, avcStream.byteOffset, avcStream.byteLength),
result = [],
i,
length;
for (i = 0; i < avcStream.length; i += length) {
length = avcView.getUint32(i);
i += 4;
switch(avcStream[i] & 0x1F) {
case 0x01:
result.push('NDR');
break;
case 0x05:
result.push('IDR');
break;
case 0x06:
result.push('SEI');
break;
case 0x07:
result.push('SPS');
break;
case 0x08:
result.push('PPS');
break;
case 0x09:
result.push('AUD');
break;
default:
result.push(avcStream[i] & 0x1F);
break;
}
}
return result;
},
// registry of handlers for individual mp4 box types
parse = {
// codingname, not a first-class box type. stsd entries share the
// same format as real boxes so the parsing infrastructure can be
// shared
avc1: function(data) {
var view = new DataView(data.buffer, data.byteOffset, data.byteLength);
return {
dataReferenceIndex: view.getUint16(6),
width: view.getUint16(24),
height: view.getUint16(26),
horizresolution: view.getUint16(28) + (view.getUint16(30) / 16),
vertresolution: view.getUint16(32) + (view.getUint16(34) / 16),
frameCount: view.getUint16(40),
depth: view.getUint16(74),
config: mp4toJSON(data.subarray(78, data.byteLength))
};
},
avcC: function(data) {
var
view = new DataView(data.buffer, data.byteOffset, data.byteLength),
result = {
configurationVersion: data[0],
avcProfileIndication: data[1],
profileCompatibility: data[2],
avcLevelIndication: data[3],
lengthSizeMinusOne: data[4] & 0x03,
sps: [],
pps: []
},
numOfSequenceParameterSets = data[5] & 0x1f,
numOfPictureParameterSets,
nalSize,
offset,
i;
// iterate past any SPSs
offset = 6;
for (i = 0; i < numOfSequenceParameterSets; i++) {
nalSize = view.getUint16(offset);
offset += 2;
result.sps.push(new Uint8Array(data.subarray(offset, offset + nalSize)));
offset += nalSize;
}
// iterate past any PPSs
numOfPictureParameterSets = data[offset];
offset++;
for (i = 0; i < numOfPictureParameterSets; i++) {
nalSize = view.getUint16(offset);
offset += 2;
result.pps.push(new Uint8Array(data.subarray(offset, offset + nalSize)));
offset += nalSize;
}
return result;
},
btrt: function(data) {
var view = new DataView(data.buffer, data.byteOffset, data.byteLength);
return {
bufferSizeDB: view.getUint32(0),
maxBitrate: view.getUint32(4),
avgBitrate: view.getUint32(8)
};
},
ftyp: function(data) {
var
view = new DataView(data.buffer, data.byteOffset, data.byteLength),
result = {
majorBrand: parseType(data.subarray(0, 4)),
minorVersion: view.getUint32(4),
compatibleBrands: []
},
i = 8;
while (i < data.byteLength) {
result.compatibleBrands.push(parseType(data.subarray(i, i + 4)));
i += 4;
}
return result;
},
dinf: function(data) {
return {
boxes: mp4toJSON(data)
};
},
dref: function(data) {
return {
version: data[0],
flags: new Uint8Array(data.subarray(1, 4)),
dataReferences: mp4toJSON(data.subarray(8))
};
},
hdlr: function(data) {
var
view = new DataView(data.buffer, data.byteOffset, data.byteLength),
result = {
version: view.getUint8(0),
flags: new Uint8Array(data.subarray(1, 4)),
handlerType: parseType(data.subarray(8, 12)),
name: ''
},
i = 8;
// parse out the name field
for (i = 24; i < data.byteLength; i++) {
if (data[i] === 0x00) {
// the name field is null-terminated
i++;
break;
}
result.name += String.fromCharCode(data[i]);
}
// decode UTF-8 to javascript's internal representation
// see http://ecmanaut.blogspot.com/2006/07/encoding-decoding-utf8-in-javascript.html
result.name = window.decodeURIComponent(window.escape(result.name));
return result;
},
mdat: function(data) {
return {
byteLength: data.byteLength,
nals: nalParse(data)
};
},
mdhd: function(data) {
var
view = new DataView(data.buffer, data.byteOffset, data.byteLength),
i = 4,
language,
result = {
version: view.getUint8(0),
flags: new Uint8Array(data.subarray(1, 4)),
language: ''
};
if (result.version === 1) {
i += 4;
result.creationTime = parseMp4Date(view.getUint32(i)); // truncating top 4 bytes
i += 8;
result.modificationTime = parseMp4Date(view.getUint32(i)); // truncating top 4 bytes
i += 4;
result.timescale = view.getUint32(i);
i += 8;
result.duration = view.getUint32(i); // truncating top 4 bytes
} else {
result.creationTime = parseMp4Date(view.getUint32(i));
i += 4;
result.modificationTime = parseMp4Date(view.getUint32(i));
i += 4;
result.timescale = view.getUint32(i);
i += 4;
result.duration = view.getUint32(i);
}
i += 4;
// language is stored as an ISO-639-2/T code in an array of three 5-bit fields
// each field is the packed difference between its ASCII value and 0x60
language = view.getUint16(i);
result.language += String.fromCharCode((language >> 10) + 0x60);
result.language += String.fromCharCode(((language & 0x03c0) >> 5) + 0x60);
result.language += String.fromCharCode((language & 0x1f) + 0x60);
return result;
},
mdia: function(data) {
return {
boxes: mp4toJSON(data)
};
},
mfhd: function(data) {
return {
version: data[0],
flags: new Uint8Array(data.subarray(1, 4)),
sequenceNumber: (data[4] << 24) |
(data[5] << 16) |
(data[6] << 8) |
(data[7])
};
},
minf: function(data) {
return {
boxes: mp4toJSON(data)
};
},
moof: function(data) {
return {
boxes: mp4toJSON(data)
};
},
moov: function(data) {
return {
boxes: mp4toJSON(data)
};
},
mvex: function(data) {
return {
boxes: mp4toJSON(data)
};
},
mvhd: function(data) {
var
view = new DataView(data.buffer, data.byteOffset, data.byteLength),
i = 4,
result = {
version: view.getUint8(0),
flags: new Uint8Array(data.subarray(1, 4))
};
if (result.version === 1) {
i += 4;
result.creationTime = parseMp4Date(view.getUint32(i)); // truncating top 4 bytes
i += 8;
result.modificationTime = parseMp4Date(view.getUint32(i)); // truncating top 4 bytes
i += 4;
result.timescale = view.getUint32(i);
i += 8;
result.duration = view.getUint32(i); // truncating top 4 bytes
} else {
result.creationTime = parseMp4Date(view.getUint32(i));
i += 4;
result.modificationTime = parseMp4Date(view.getUint32(i));
i += 4;
result.timescale = view.getUint32(i);
i += 4;
result.duration = view.getUint32(i);
}
i += 4;
// convert fixed-point, base 16 back to a number
result.rate = view.getUint16(i) + (view.getUint16(i + 2) / 16);
i += 4;
result.volume = view.getUint8(i) + (view.getUint8(i + 1) / 8);
i += 2;
i += 2;
i += 2 * 4;
result.matrix = new Uint32Array(data.subarray(i, i + (9 * 4)));
i += 9 * 4;
i += 6 * 4;
result.nextTrackId = view.getUint32(i);
return result;
},
pdin: function(data) {
var view = new DataView(data.buffer, data.byteOffset, data.byteLength);
return {
version: view.getUint8(0),
flags: new Uint8Array(data.subarray(1, 4)),
rate: view.getUint32(4),
initialDelay: view.getUint32(8)
};
},
sdtp: function(data) {
var
result = {
version: data[0],
flags: new Uint8Array(data.subarray(1, 4)),
samples: []
}, i;
for (i = 4; i < data.byteLength; i++) {
result.samples.push({
dependsOn: (data[i] & 0x30) >> 4,
isDependedOn: (data[i] & 0x0c) >> 2,
hasRedundancy: data[i] & 0x03
});
}
return result;
},
sidx: function(data) {
var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
result = {
version: data[0],
flags: new Uint8Array(data.subarray(1, 4)),
references: [],
referenceId: view.getUint32(4),
timescale: view.getUint32(8),
earliestPresentationTime: view.getUint32(12),
firstOffset: view.getUint32(16)
},
referenceCount = view.getUint16(22),
i;
for (i = 24; referenceCount; i += 12, referenceCount-- ) {
result.references.push({
referenceType: (data[i] & 0x80) >>> 7,
referencedSize: view.getUint32(i) & 0x7FFFFFFF,
subsegmentDuration: view.getUint32(i + 4),
startsWithSap: !!(data[i + 8] & 0x80),
sapType: (data[i + 8] & 0x70) >>> 4,
sapDeltaTime: view.getUint32(i + 8) & 0x0FFFFFFF
});
}
return result;
},
stbl: function(data) {
return {
boxes: mp4toJSON(data)
};
},
stco: function(data) {
var
view = new DataView(data.buffer, data.byteOffset, data.byteLength),
result = {
version: data[0],
flags: new Uint8Array(data.subarray(1, 4)),
chunkOffsets: []
},
entryCount = view.getUint32(4),
i;
for (i = 8; entryCount; i += 4, entryCount--) {
result.chunkOffsets.push(view.getUint32(i));
}
return result;
},
stsc: function(data) {
var
view = new DataView(data.buffer, data.byteOffset, data.byteLength),
entryCount = view.getUint32(4),
result = {
version: data[0],
flags: new Uint8Array(data.subarray(1, 4)),
sampleToChunks: []
},
i;
for (i = 8; entryCount; i += 12, entryCount--) {
result.sampleToChunks.push({
firstChunk: view.getUint32(i),
samplesPerChunk: view.getUint32(i + 4),
sampleDescriptionIndex: view.getUint32(i + 8)
});
}
return result;
},
stsd: function(data) {
return {
version: data[0],
flags: new Uint8Array(data.subarray(1, 4)),
sampleDescriptions: mp4toJSON(data.subarray(8))
};
},
stsz: function(data) {
var
view = new DataView(data.buffer, data.byteOffset, data.byteLength),
result = {
version: data[0],
flags: new Uint8Array(data.subarray(1, 4)),
sampleSize: view.getUint32(4),
entries: []
},
i;
for (i = 12; i < data.byteLength; i += 4) {
result.entries.push(view.getUint32(i));
}
return result;
},
stts: function(data) {
var
view = new DataView(data.buffer, data.byteOffset, data.byteLength),
result = {
version: data[0],
flags: new Uint8Array(data.subarray(1, 4)),
timeToSamples: []
},
entryCount = view.getUint32(4),
i;
for (i = 8; entryCount; i += 8, entryCount--) {
result.timeToSamples.push({
sampleCount: view.getUint32(i),
sampleDelta: view.getUint32(i + 4)
});
}
return result;
},
styp: function(data) {
return parse.ftyp(data);
},
tfdt: function(data) {
return {
version: data[0],
flags: new Uint8Array(data.subarray(1, 4)),
baseMediaDecodeTime: data[4] << 24 | data[5] << 16 | data[6] << 8 | data[7]
};
},
tfhd: function(data) {
var
view = new DataView(data.buffer, data.byteOffset, data.byteLength),
result = {
version: data[0],
flags: new Uint8Array(data.subarray(1, 4)),
trackId: view.getUint32(4)
},
baseDataOffsetPresent = result.flags[2] & 0x01,
sampleDescriptionIndexPresent = result.flags[2] & 0x02,
defaultSampleDurationPresent = result.flags[2] & 0x08,
defaultSampleSizePresent = result.flags[2] & 0x10,
defaultSampleFlagsPresent = result.flags[2] & 0x20,
i;
i = 8;
if (baseDataOffsetPresent) {
i += 4; // truncate top 4 bytes
result.baseDataOffset = view.getUint32(12);
i += 4;
}
if (sampleDescriptionIndexPresent) {
result.sampleDescriptionIndex = view.getUint32(i);
i += 4;
}
if (defaultSampleDurationPresent) {
result.defaultSampleDuration = view.getUint32(i);
i += 4;
}
if (defaultSampleSizePresent) {
result.defaultSampleSize = view.getUint32(i);
i += 4;
}
if (defaultSampleFlagsPresent) {
result.defaultSampleFlags = view.getUint32(i);
}
return result;
},
tkhd: function(data) {
var
view = new DataView(data.buffer, data.byteOffset, data.byteLength),
i = 4,
result = {
version: view.getUint8(0),
flags: new Uint8Array(data.subarray(1, 4)),
};
if (result.version === 1) {
i += 4;
result.creationTime = parseMp4Date(view.getUint32(i)); // truncating top 4 bytes
i += 8;
result.modificationTime = parseMp4Date(view.getUint32(i)); // truncating top 4 bytes
i += 4;
result.trackId = view.getUint32(i);
i += 4;
i += 8;
result.duration = view.getUint32(i); // truncating top 4 bytes
} else {
result.creationTime = parseMp4Date(view.getUint32(i));
i += 4;
result.modificationTime = parseMp4Date(view.getUint32(i));
i += 4;
result.trackId = view.getUint32(i);
i += 4;
i += 4;
result.duration = view.getUint32(i);
}
i += 4;
i += 2 * 4;
result.layer = view.getUint16(i);
i += 2;
result.alternateGroup = view.getUint16(i);
i += 2;
// convert fixed-point, base 16 back to a number
result.volume = view.getUint8(i) + (view.getUint8(i + 1) / 8);
i += 2;
i += 2;
result.matrix = new Uint32Array(data.subarray(i, i + (9 * 4)));
i += 9 * 4;
result.width = view.getUint16(i) + (view.getUint16(i + 2) / 16);
i += 4;
result.height = view.getUint16(i) + (view.getUint16(i + 2) / 16);
return result;
},
traf: function(data) {
return {
boxes: mp4toJSON(data)
};
},
trak: function(data) {
return {
boxes: mp4toJSON(data)
};
},
trex: function(data) {
var view = new DataView(data.buffer, data.byteOffset, data.byteLength);
return {
version: data[0],
flags: new Uint8Array(data.subarray(1, 4)),
trackId: view.getUint32(4),
defaultSampleDescriptionIndex: view.getUint32(8),
defaultSampleDuration: view.getUint32(12),
defaultSampleSize: view.getUint32(16),
sampleDependsOn: data[20] & 0x03,
sampleIsDependedOn: (data[21] & 0xc0) >> 6,
sampleHasRedundancy: (data[21] & 0x30) >> 4,
samplePaddingValue: (data[21] & 0x0e) >> 1,
sampleIsDifferenceSample: !!(data[21] & 0x01),
sampleDegradationPriority: view.getUint16(22)
};
},
trun: function(data) {
var
result = {
version: data[0],
flags: new Uint8Array(data.subarray(1, 4)),
samples: []
},
view = new DataView(data.buffer, data.byteOffset, data.byteLength),
dataOffsetPresent = result.flags[2] & 0x01,
firstSampleFlagsPresent = result.flags[2] & 0x04,
sampleDurationPresent = result.flags[1] & 0x01,
sampleSizePresent = result.flags[1] & 0x02,
sampleFlagsPresent = result.flags[1] & 0x04,
sampleCompositionTimeOffsetPresent = result.flags[1] & 0x08,
sampleCount = view.getUint32(4),
offset = 8,
sample;
if (dataOffsetPresent) {
result.dataOffset = view.getUint32(offset);
offset += 4;
}
if (firstSampleFlagsPresent && sampleCount) {
sample = {
flags: parseSampleFlags(data.subarray(offset, offset + 4))
};
offset += 4;
if (sampleDurationPresent) {
sample.duration = view.getUint32(offset);
offset += 4;
}
if (sampleSizePresent) {
sample.size = view.getUint32(offset);
offset += 4;
}
if (sampleCompositionTimeOffsetPresent) {
sample.compositionTimeOffset = view.getUint32(offset);
offset += 4;
}
result.samples.push(sample);
sampleCount--;
}
while (sampleCount--) {
sample = {};
if (sampleDurationPresent) {
sample.duration = view.getUint32(offset);
offset += 4;
}
if (sampleSizePresent) {
sample.size = view.getUint32(offset);
offset += 4;
}
if (sampleFlagsPresent) {
sample.flags = parseSampleFlags(data.subarray(offset, offset + 4));
offset += 4;
}
if (sampleCompositionTimeOffsetPresent) {
sample.compositionTimeOffset = view.getUint32(offset);
offset += 4;
}
result.samples.push(sample);
}
return result;
},
'url ': function(data) {
return {
version: data[0],
flags: new Uint8Array(data.subarray(1, 4))
};
},
vmhd: function(data) {
//var view = new DataView(data.buffer, data.byteOffset, data.byteLength);
return {
version: data[0],
flags: new Uint8Array(data.subarray(1, 4)),
//graphicsmode: view.getUint16(4),
//opcolor: new Uint16Array([view.getUint16(6),
// view.getUint16(8),
// view.getUint16(10)])
};
}
};
/**
* Return a javascript array of box objects parsed from an ISO base
* media file.
* @param data {Uint8Array} the binary data of the media to be inspected
* @return {array} a javascript array of potentially nested box objects
*/
var mp4toJSON = function(data) {
var
i = 0,
result = [],
view = new DataView(data.buffer, data.byteOffset, data.byteLength),
size,
type,
end,
box;
while (i < data.byteLength) {
// parse box data
size = view.getUint32(i),
type = parseType(data.subarray(i + 4, i + 8));
end = size > 1 ? i + size : data.byteLength;
// parse type-specific data
box = (parse[type] || function(data) {
return {
data: data
};
})(data.subarray(i + 8, end));
box.size = size;
box.type = type;
// store this box and move to the next
result.push(box);
i = end;
}
return result;
};
let MP4Inspect = {
mp4toJSON: mp4toJSON
};
export default MP4Inspect;
})();

View file

@ -0,0 +1,5 @@
#/bin/sh
git checkout gh-pages
git rebase master
git push origin gh-pages --force
git checkout master

View file

@ -0,0 +1,52 @@
{
"name": "hls.js",
"version": "0.3.11",
"description": "Media Source Extension - HLS library, by/for Dailymotion",
"homepage": "https://github.com/dailymotion/hls.js",
"authors": "Guillaume du Pontavice <guillaume.dupontavice@dailymotion.com>",
"repository": {
"type": "git",
"url": "https://github.com/dailymotion/hls.js"
},
"bugs": {
"url": "https://github.com/dailymotion/hls.js/issues"
},
"main": "src/hls.js",
"private": true,
"scripts": {
"clean": "find dist -mindepth 1 -delete",
"prebuild": "npm run clean & npm run test",
"build": "browserify -s Hls src/hls.js --debug | exorcist dist/hls.js.map -b . > dist/hls.js",
"postbuild": "npm run minify",
"minify": "uglifyjs dist/hls.js -c sequences=true,dead_code=true,conditionals=true,booleans=true,unused=true,if_return=true,join_vars=true,drop_console=true -m sort --screw-ie8 > dist/hls.min.js",
"watch": "watchify --debug -s Hls src/hls.js -o dist/hls.js",
"pretest": "npm run lint",
"test": "mocha --recursive tests/unit",
"lint": "jshint src/",
"serve": "http-server -p 8000 .",
"open": "opener http://localhost:8000/demo/",
"live-reload": "live-reload --port 8001 dist/",
"dev": "npm run open -s & parallelshell 'npm run live-reload -s' 'npm run serve -s' 'npm run watch -s'"
},
"browserify": {
"transform": [
"babelify"
]
},
"dependencies": {
"babelify": "^6.1.2",
"webworkify": "^1.0.2"
},
"devDependencies": {
"browserify": "^8.1.1",
"exorcist": "^0.4.0",
"http-server": "^0.7.4",
"jshint": "^2.5.11",
"live-reload": "^0.2.0",
"mocha": "^2.1.0",
"opener": "^1.4.0",
"parallelshell": "^1.0.3",
"uglify-js": "^2.4.23",
"watchify": "^2.2.1"
}
}

View file

@ -0,0 +1,84 @@
/*
* simple ABR Controller
*/
import Event from '../events';
class AbrController {
constructor(hls) {
this.hls = hls;
this.lastfetchlevel = 0;
this._autoLevelCapping = -1;
this._nextAutoLevel = -1;
this.onflp = this.onFragmentLoadProgress.bind(this);
hls.on(Event.FRAG_LOAD_PROGRESS, this.onflp);
}
destroy() {
this.hls.off(Event.FRAG_LOAD_PROGRESS, this.onflp);
}
onFragmentLoadProgress(event, data) {
var stats = data.stats;
if (stats.aborted === undefined) {
this.lastfetchduration = (performance.now() - stats.trequest) / 1000;
this.lastfetchlevel = data.frag.level;
this.lastbw = (stats.loaded * 8) / this.lastfetchduration;
//console.log(`fetchDuration:${this.lastfetchduration},bw:${(this.lastbw/1000).toFixed(0)}/${stats.aborted}`);
}
}
/** Return the capping/max level value that could be used by automatic level selection algorithm **/
get autoLevelCapping() {
return this._autoLevelCapping;
}
/** set the capping/max level value that could be used by automatic level selection algorithm **/
set autoLevelCapping(newLevel) {
this._autoLevelCapping = newLevel;
}
get nextAutoLevel() {
var lastbw = this.lastbw, hls = this.hls,adjustedbw, i, maxAutoLevel;
if (this._autoLevelCapping === -1) {
maxAutoLevel = hls.levels.length - 1;
} else {
maxAutoLevel = this._autoLevelCapping;
}
if (this._nextAutoLevel !== -1) {
var nextLevel = Math.min(this._nextAutoLevel,maxAutoLevel);
if (nextLevel === this.lastfetchlevel) {
this._nextAutoLevel = -1;
} else {
return nextLevel;
}
}
// follow algorithm captured from stagefright :
// https://android.googlesource.com/platform/frameworks/av/+/master/media/libstagefright/httplive/LiveSession.cpp
// Pick the highest bandwidth stream below or equal to estimated bandwidth.
for (i = 0; i <= maxAutoLevel; i++) {
// consider only 80% of the available bandwidth, but if we are switching up,
// be even more conservative (70%) to avoid overestimating and immediately
// switching back.
if (i <= this.lastfetchlevel) {
adjustedbw = 0.8 * lastbw;
} else {
adjustedbw = 0.7 * lastbw;
}
if (adjustedbw < hls.levels[i].bitrate) {
return Math.max(0, i - 1);
}
}
return i - 1;
}
set nextAutoLevel(nextLevel) {
this._nextAutoLevel = nextLevel;
}
}
export default AbrController;

View file

@ -0,0 +1,49 @@
/*
* FPS Controller
*/
import Event from '../events';
import {logger} from '../utils/logger';
class FPSController {
constructor(hls) {
this.hls = hls;
this.timer = setInterval(this.checkFPS, hls.config.fpsDroppedMonitoringPeriod);
}
destroy() {
if (this.timer) {
clearInterval(this.timer);
}
}
checkFPS() {
var v = this.hls.video;
if (v) {
var decodedFrames = v.webkitDecodedFrameCount, droppedFrames = v.webkitDroppedFrameCount, currentTime = new Date();
if (decodedFrames) {
if (this.lastTime) {
var currentPeriod = currentTime - this.lastTime;
var currentDropped = droppedFrames - this.lastDroppedFrames;
var currentDecoded = decodedFrames - this.lastDecodedFrames;
var decodedFPS = 1000 * currentDecoded / currentPeriod;
var droppedFPS = 1000 * currentDropped / currentPeriod;
if (droppedFPS > 0) {
logger.log(`checkFPS : droppedFPS/decodedFPS:${droppedFPS.toFixed(1)}/${decodedFPS.toFixed(1)}`);
if (currentDropped > this.hls.config.fpsDroppedMonitoringThreshold * currentDecoded) {
logger.warn('drop FPS ratio greater than max allowed value');
this.hls.trigger(Event.FPS_DROP, {currentDropped: currentDropped, currentDecoded: currentDecoded, totalDroppedFrames: droppedFrames});
}
}
}
this.lastTime = currentTime;
this.lastDroppedFrames = droppedFrames;
this.lastDecodedFrames = decodedFrames;
}
}
}
}
export default FPSController;

View file

@ -0,0 +1,254 @@
/*
* Level Controller
*/
import Event from '../events';
import {logger} from '../utils/logger';
import {ErrorTypes, ErrorDetails} from '../errors';
class LevelController {
constructor(hls) {
this.hls = hls;
this.onml = this.onManifestLoaded.bind(this);
this.onll = this.onLevelLoaded.bind(this);
this.onerr = this.onError.bind(this);
this.ontick = this.tick.bind(this);
hls.on(Event.MANIFEST_LOADED, this.onml);
hls.on(Event.LEVEL_LOADED, this.onll);
hls.on(Event.ERROR, this.onerr);
this._manualLevel = this._autoLevelCapping = -1;
}
destroy() {
var hls = this.hls;
hls.off(Event.MANIFEST_LOADED, this.onml);
hls.off(Event.LEVEL_LOADED, this.onll);
hls.off(Event.ERROR, this.onerr);
if (this.timer) {
clearInterval(this.timer);
}
this._manualLevel = -1;
}
onManifestLoaded(event, data) {
var levels0 = [], levels = [], bitrateStart, i, bitrateSet = {}, videoCodecFound = false, audioCodecFound = false;
// regroup redundant level together
data.levels.forEach(level => {
if(level.videoCodec) {
videoCodecFound = true;
}
if(level.audioCodec) {
audioCodecFound = true;
}
var redundantLevelId = bitrateSet[level.bitrate];
if (redundantLevelId === undefined) {
bitrateSet[level.bitrate] = levels0.length;
level.url = [level.url];
level.urlId = 0;
levels0.push(level);
} else {
levels0[redundantLevelId].url.push(level.url);
}
});
// remove audio-only level if we also have levels with audio+video codecs signalled
if(videoCodecFound && audioCodecFound) {
levels0.forEach(level => {
if(level.videoCodec) {
levels.push(level);
}
});
} else {
levels = levels0;
}
// only keep level with supported audio/video codecs
levels0 = levels0.filter(function(level) {
var checkSupported = function(codec) { return MediaSource.isTypeSupported(`video/mp4;codecs=${codec}`);};
var audioCodec = level.audioCodec, videoCodec = level.videoCodec;
return ((audioCodec && checkSupported(audioCodec)) || !audioCodec) &&
((videoCodec && checkSupported(videoCodec)) || !videoCodec);
});
// start bitrate is the first bitrate of the manifest
bitrateStart = levels[0].bitrate;
// sort level on bitrate
levels.sort(function (a, b) {
return a.bitrate - b.bitrate;
});
this._levels = levels;
// find index of first level in sorted levels
for (i = 0; i < levels.length; i++) {
if (levels[i].bitrate === bitrateStart) {
this._firstLevel = i;
logger.log(`manifest loaded,${levels.length} level(s) found, first bitrate:${bitrateStart}`);
break;
}
}
this.hls.trigger(Event.MANIFEST_PARSED, {levels: this._levels, firstLevel: this._firstLevel, stats: data.stats});
return;
}
get levels() {
return this._levels;
}
get level() {
return this._level;
}
set level(newLevel) {
if (this._level !== newLevel || this._levels[newLevel].details === undefined) {
this.setLevelInternal(newLevel);
}
}
setLevelInternal(newLevel) {
// check if level idx is valid
if (newLevel >= 0 && newLevel < this._levels.length) {
// stopping live reloading timer if any
if (this.timer) {
clearInterval(this.timer);
this.timer = null;
}
this._level = newLevel;
logger.log(`switching to level ${newLevel}`);
this.hls.trigger(Event.LEVEL_SWITCH, {level: newLevel});
var level = this._levels[newLevel];
// check if we need to load playlist for this level
if (level.details === undefined || level.details.live === true) {
// level not retrieved yet, or live playlist we need to (re)load it
logger.log(`(re)loading playlist for level ${newLevel}`);
var urlId = level.urlId;
this.hls.trigger(Event.LEVEL_LOADING, {url: level.url[urlId], level: newLevel, id: urlId});
}
} else {
// invalid level id given, trigger error
this.hls.trigger(Event.ERROR, {type : ErrorTypes.OTHER_ERROR, details: ErrorDetails.LEVEL_SWITCH_ERROR, level: newLevel, fatal: false, reason: 'invalid level idx'});
}
}
get manualLevel() {
return this._manualLevel;
}
set manualLevel(newLevel) {
this._manualLevel = newLevel;
if (newLevel !== -1) {
this.level = newLevel;
}
}
get firstLevel() {
return this._firstLevel;
}
set firstLevel(newLevel) {
this._firstLevel = newLevel;
}
get startLevel() {
if (this._startLevel === undefined) {
return this._firstLevel;
} else {
return this._startLevel;
}
}
set startLevel(newLevel) {
this._startLevel = newLevel;
}
onError(event, data) {
if(data.fatal) {
return;
}
var details = data.details, hls = this.hls, levelId, level;
// try to recover not fatal errors
switch(details) {
case ErrorDetails.FRAG_LOAD_ERROR:
case ErrorDetails.FRAG_LOAD_TIMEOUT:
case ErrorDetails.FRAG_LOOP_LOADING_ERROR:
case ErrorDetails.KEY_LOAD_ERROR:
case ErrorDetails.KEY_LOAD_TIMEOUT:
levelId = data.frag.level;
break;
case ErrorDetails.LEVEL_LOAD_ERROR:
case ErrorDetails.LEVEL_LOAD_TIMEOUT:
levelId = data.level;
break;
default:
break;
}
/* try to switch to a redundant stream if any available.
* if no redundant stream available, emergency switch down (if in auto mode and current level not 0)
* otherwise, we cannot recover this network error ....
*/
if (levelId !== undefined) {
level = this._levels[levelId];
if (level.urlId < (level.url.length - 1)) {
level.urlId++;
level.details = undefined;
logger.warn(`level controller,${details} for level ${levelId}: switching to redundant stream id ${level.urlId}`);
} else {
// we could try to recover if in auto mode and current level not lowest level (0)
let recoverable = ((this._manualLevel === -1) && levelId);
if (recoverable) {
logger.warn(`level controller,${details}: emergency switch-down for next fragment`);
hls.abrController.nextAutoLevel = 0;
} else if(level && level.details && level.details.live) {
logger.warn(`level controller,${details} on live stream, discard`);
} else {
logger.error(`cannot recover ${details} error`);
this._level = undefined;
// stopping live reloading timer if any
if (this.timer) {
clearInterval(this.timer);
this.timer = null;
}
// redispatch same error but with fatal set to true
data.fatal = true;
hls.trigger(event, data);
}
}
}
}
onLevelLoaded(event, data) {
// check if current playlist is a live playlist
if (data.details.live && !this.timer) {
// if live playlist we will have to reload it periodically
// set reload period to playlist target duration
this.timer = setInterval(this.ontick, 1000 * data.details.targetduration);
}
if (!data.details.live && this.timer) {
// playlist is not live and timer is armed : stopping it
clearInterval(this.timer);
this.timer = null;
}
}
tick() {
var levelId = this._level;
if (levelId !== undefined) {
var level = this._levels[levelId], urlId = level.urlId;
this.hls.trigger(Event.LEVEL_LOADING, {url: level.url[urlId], level: levelId, id: urlId});
}
}
nextLoadLevel() {
if (this._manualLevel !== -1) {
return this._manualLevel;
} else {
return this.hls.abrController.nextAutoLevel;
}
}
}
export default LevelController;

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,205 @@
/*
*
* This file contains an adaptation of the AES decryption algorithm
* from the Standford Javascript Cryptography Library. That work is
* covered by the following copyright and permissions notice:
*
* Copyright 2009-2010 Emily Stark, Mike Hamburg, Dan Boneh.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* The views and conclusions contained in the software and documentation
* are those of the authors and should not be interpreted as representing
* official policies, either expressed or implied, of the authors.
*/
class AES {
/**
* Schedule out an AES key for both encryption and decryption. This
* is a low-level class. Use a cipher mode to do bulk encryption.
*
* @constructor
* @param key {Array} The key as an array of 4, 6 or 8 words.
*/
constructor(key) {
/**
* The expanded S-box and inverse S-box tables. These will be computed
* on the client so that we don't have to send them down the wire.
*
* There are two tables, _tables[0] is for encryption and
* _tables[1] is for decryption.
*
* The first 4 sub-tables are the expanded S-box with MixColumns. The
* last (_tables[01][4]) is the S-box itself.
*
* @private
*/
this._tables = [[[],[],[],[],[]],[[],[],[],[],[]]];
this._precompute();
var i, j, tmp,
encKey, decKey,
sbox = this._tables[0][4], decTable = this._tables[1],
keyLen = key.length, rcon = 1;
if (keyLen !== 4 && keyLen !== 6 && keyLen !== 8) {
throw new Error('Invalid aes key size=' + keyLen);
}
encKey = key.slice(0);
decKey = [];
this._key = [encKey, decKey];
// schedule encryption keys
for (i = keyLen; i < 4 * keyLen + 28; i++) {
tmp = encKey[i-1];
// apply sbox
if (i%keyLen === 0 || (keyLen === 8 && i%keyLen === 4)) {
tmp = sbox[tmp>>>24]<<24 ^ sbox[tmp>>16&255]<<16 ^ sbox[tmp>>8&255]<<8 ^ sbox[tmp&255];
// shift rows and add rcon
if (i%keyLen === 0) {
tmp = tmp<<8 ^ tmp>>>24 ^ rcon<<24;
rcon = rcon<<1 ^ (rcon>>7)*283;
}
}
encKey[i] = encKey[i-keyLen] ^ tmp;
}
// schedule decryption keys
for (j = 0; i; j++, i--) {
tmp = encKey[j&3 ? i : i - 4];
if (i<=4 || j<4) {
decKey[j] = tmp;
} else {
decKey[j] = decTable[0][sbox[tmp>>>24 ]] ^
decTable[1][sbox[tmp>>16 & 255]] ^
decTable[2][sbox[tmp>>8 & 255]] ^
decTable[3][sbox[tmp & 255]];
}
}
}
/**
* Expand the S-box tables.
*
* @private
*/
_precompute() {
var encTable = this._tables[0], decTable = this._tables[1],
sbox = encTable[4], sboxInv = decTable[4],
i, x, xInv, d=[], th=[], x2, x4, x8, s, tEnc, tDec;
// Compute double and third tables
for (i = 0; i < 256; i++) {
th[( d[i] = i<<1 ^ (i>>7)*283 )^i]=i;
}
for (x = xInv = 0; !sbox[x]; x ^= x2 || 1, xInv = th[xInv] || 1) {
// Compute sbox
s = xInv ^ xInv<<1 ^ xInv<<2 ^ xInv<<3 ^ xInv<<4;
s = s>>8 ^ s&255 ^ 99;
sbox[x] = s;
sboxInv[s] = x;
// Compute MixColumns
x8 = d[x4 = d[x2 = d[x]]];
tDec = x8*0x1010101 ^ x4*0x10001 ^ x2*0x101 ^ x*0x1010100;
tEnc = d[s]*0x101 ^ s*0x1010100;
for (i = 0; i < 4; i++) {
encTable[i][x] = tEnc = tEnc<<24 ^ tEnc>>>8;
decTable[i][s] = tDec = tDec<<24 ^ tDec>>>8;
}
}
// Compactify. Considerable speedup on Firefox.
for (i = 0; i < 5; i++) {
encTable[i] = encTable[i].slice(0);
decTable[i] = decTable[i].slice(0);
}
}
/**
* Decrypt 16 bytes, specified as four 32-bit words.
* @param encrypted0 {number} the first word to decrypt
* @param encrypted1 {number} the second word to decrypt
* @param encrypted2 {number} the third word to decrypt
* @param encrypted3 {number} the fourth word to decrypt
* @param out {Int32Array} the array to write the decrypted words
* into
* @param offset {number} the offset into the output array to start
* writing results
* @return {Array} The plaintext.
*/
decrypt(encrypted0, encrypted1, encrypted2, encrypted3, out, offset) {
var key = this._key[1],
// state variables a,b,c,d are loaded with pre-whitened data
a = encrypted0 ^ key[0],
b = encrypted3 ^ key[1],
c = encrypted2 ^ key[2],
d = encrypted1 ^ key[3],
a2, b2, c2,
nInnerRounds = key.length / 4 - 2, // key.length === 2 ?
i,
kIndex = 4,
table = this._tables[1],
// load up the tables
table0 = table[0],
table1 = table[1],
table2 = table[2],
table3 = table[3],
sbox = table[4];
// Inner rounds. Cribbed from OpenSSL.
for (i = 0; i < nInnerRounds; i++) {
a2 = table0[a>>>24] ^ table1[b>>16 & 255] ^ table2[c>>8 & 255] ^ table3[d & 255] ^ key[kIndex];
b2 = table0[b>>>24] ^ table1[c>>16 & 255] ^ table2[d>>8 & 255] ^ table3[a & 255] ^ key[kIndex + 1];
c2 = table0[c>>>24] ^ table1[d>>16 & 255] ^ table2[a>>8 & 255] ^ table3[b & 255] ^ key[kIndex + 2];
d = table0[d>>>24] ^ table1[a>>16 & 255] ^ table2[b>>8 & 255] ^ table3[c & 255] ^ key[kIndex + 3];
kIndex += 4;
a=a2; b=b2; c=c2;
}
// Last round.
for (i = 0; i < 4; i++) {
out[(3 & -i) + offset] =
sbox[a>>>24 ]<<24 ^
sbox[b>>16 & 255]<<16 ^
sbox[c>>8 & 255]<<8 ^
sbox[d & 255] ^
key[kIndex++];
a2=a; a=b; b=c; c=d; d=a2;
}
}
}
export default AES;

View file

@ -0,0 +1,167 @@
/*
*
* This file contains an adaptation of the AES decryption algorithm
* from the Standford Javascript Cryptography Library. That work is
* covered by the following copyright and permissions notice:
*
* Copyright 2009-2010 Emily Stark, Mike Hamburg, Dan Boneh.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* The views and conclusions contained in the software and documentation
* are those of the authors and should not be interpreted as representing
* official policies, either expressed or implied, of the authors.
*/
import AES from './aes';
class AES128Decrypter {
constructor(key, initVector) {
this.key = key;
this.iv = initVector;
}
/**
* Convert network-order (big-endian) bytes into their little-endian
* representation.
*/
ntoh(word) {
return (word << 24) |
((word & 0xff00) << 8) |
((word & 0xff0000) >> 8) |
(word >>> 24);
}
/**
* Decrypt bytes using AES-128 with CBC and PKCS#7 padding.
* @param encrypted {Uint8Array} the encrypted bytes
* @param key {Uint32Array} the bytes of the decryption key
* @param initVector {Uint32Array} the initialization vector (IV) to
* use for the first round of CBC.
* @return {Uint8Array} the decrypted bytes
*
* @see http://en.wikipedia.org/wiki/Advanced_Encryption_Standard
* @see http://en.wikipedia.org/wiki/Block_cipher_mode_of_operation#Cipher_Block_Chaining_.28CBC.29
* @see https://tools.ietf.org/html/rfc2315
*/
doDecrypt(encrypted, key, initVector) {
var
// word-level access to the encrypted bytes
encrypted32 = new Int32Array(encrypted.buffer, encrypted.byteOffset, encrypted.byteLength >> 2),
decipher = new AES(Array.prototype.slice.call(key)),
// byte and word-level access for the decrypted output
decrypted = new Uint8Array(encrypted.byteLength),
decrypted32 = new Int32Array(decrypted.buffer),
// temporary variables for working with the IV, encrypted, and
// decrypted data
init0, init1, init2, init3,
encrypted0, encrypted1, encrypted2, encrypted3,
// iteration variable
wordIx;
// pull out the words of the IV to ensure we don't modify the
// passed-in reference and easier access
init0 = ~~initVector[0];
init1 = ~~initVector[1];
init2 = ~~initVector[2];
init3 = ~~initVector[3];
// decrypt four word sequences, applying cipher-block chaining (CBC)
// to each decrypted block
for (wordIx = 0; wordIx < encrypted32.length; wordIx += 4) {
// convert big-endian (network order) words into little-endian
// (javascript order)
encrypted0 = ~~this.ntoh(encrypted32[wordIx]);
encrypted1 = ~~this.ntoh(encrypted32[wordIx + 1]);
encrypted2 = ~~this.ntoh(encrypted32[wordIx + 2]);
encrypted3 = ~~this.ntoh(encrypted32[wordIx + 3]);
// decrypt the block
decipher.decrypt(encrypted0,
encrypted1,
encrypted2,
encrypted3,
decrypted32,
wordIx);
// XOR with the IV, and restore network byte-order to obtain the
// plaintext
decrypted32[wordIx] = this.ntoh(decrypted32[wordIx] ^ init0);
decrypted32[wordIx + 1] = this.ntoh(decrypted32[wordIx + 1] ^ init1);
decrypted32[wordIx + 2] = this.ntoh(decrypted32[wordIx + 2] ^ init2);
decrypted32[wordIx + 3] = this.ntoh(decrypted32[wordIx + 3] ^ init3);
// setup the IV for the next round
init0 = encrypted0;
init1 = encrypted1;
init2 = encrypted2;
init3 = encrypted3;
}
return decrypted;
}
localDecript(encrypted, key, initVector, decrypted) {
var bytes = this.doDecrypt(encrypted,
key,
initVector);
decrypted.set(bytes, encrypted.byteOffset);
}
decrypt(encrypted) {
var
step = 4 * 8000,
//encrypted32 = new Int32Array(encrypted.buffer),
encrypted32 = new Int32Array(encrypted),
decrypted = new Uint8Array(encrypted.byteLength),
i = 0;
// split up the encryption job and do the individual chunks asynchronously
var key = this.key;
var initVector = this.iv;
this.localDecript(encrypted32.subarray(i, i + step), key, initVector, decrypted);
for (i = step; i < encrypted32.length; i += step) {
initVector = new Uint32Array([
this.ntoh(encrypted32[i - 4]),
this.ntoh(encrypted32[i - 3]),
this.ntoh(encrypted32[i - 2]),
this.ntoh(encrypted32[i - 1])
]);
this.localDecript(encrypted32.subarray(i, i + step), key, initVector, decrypted);
}
return decrypted;
}
}
export default AES128Decrypter;

View file

@ -0,0 +1,86 @@
/*
* AES128 decryption.
*/
import AES128Decrypter from './aes128-decrypter';
import {ErrorTypes, ErrorDetails} from '../errors';
import {logger} from '../utils/logger';
class Decrypter {
constructor(hls) {
this.hls = hls;
try {
const browserCrypto = window ? window.crypto : crypto;
this.subtle = browserCrypto.subtle || browserCrypto.webkitSubtle;
this.disableWebCrypto = !this.subtle;
} catch (e) {
this.disableWebCrypto = true;
}
}
destroy() {
}
decrypt(data, key, iv, callback) {
if (this.disableWebCrypto && this.hls.config.enableSoftwareAES) {
this.decryptBySoftware(data, key, iv, callback);
} else {
this.decryptByWebCrypto(data, key, iv, callback);
}
}
decryptByWebCrypto(data, key, iv, callback) {
logger.log('decrypting by WebCrypto API');
this.subtle.importKey('raw', key, { name : 'AES-CBC', length : 128 }, false, ['decrypt']).
then((importedKey) => {
this.subtle.decrypt({ name : 'AES-CBC', iv : iv.buffer }, importedKey, data).
then(callback).
catch ((err) => {
this.onWebCryptoError(err, data, key, iv, callback);
});
}).
catch ((err) => {
this.onWebCryptoError(err, data, key, iv, callback);
});
}
decryptBySoftware(data, key8, iv8, callback) {
logger.log('decrypting by JavaScript Implementation');
var view = new DataView(key8.buffer);
var key = new Uint32Array([
view.getUint32(0),
view.getUint32(4),
view.getUint32(8),
view.getUint32(12)
]);
view = new DataView(iv8.buffer);
var iv = new Uint32Array([
view.getUint32(0),
view.getUint32(4),
view.getUint32(8),
view.getUint32(12)
]);
var decrypter = new AES128Decrypter(key, iv);
callback(decrypter.decrypt(data).buffer);
}
onWebCryptoError(err, data, key, iv, callback) {
if (this.hls.config.enableSoftwareAES) {
logger.log('disabling to use WebCrypto API');
this.disableWebCrypto = true;
this.decryptBySoftware(data, key, iv, callback);
}
else {
logger.error(`decrypting error : ${err.message}`);
this.hls.trigger(Event.ERROR, {type : ErrorTypes.MEDIA_ERROR, details : ErrorDetails.FRAG_DECRYPT_ERROR, fatal : true, reason : err.message});
}
}
}
export default Decrypter;

View file

@ -0,0 +1,208 @@
/**
* AAC demuxer
*/
import {logger} from '../utils/logger';
import ID3 from '../demux/id3';
import {ErrorTypes, ErrorDetails} from '../errors';
class AACDemuxer {
constructor(observer,remuxerClass) {
this.observer = observer;
this.remuxerClass = remuxerClass;
this.remuxer = new this.remuxerClass(observer);
this._aacTrack = {type: 'audio', id :-1, sequenceNumber: 0, samples : [], len : 0};
}
static probe(data) {
// check if data contains ID3 timestamp and ADTS sync worc
var id3 = new ID3(data), adtsStartOffset,len;
if(id3.hasTimeStamp) {
// look for ADTS header (0xFFFx)
for (adtsStartOffset = id3.length, len = data.length; adtsStartOffset < len - 1; adtsStartOffset++) {
if ((data[adtsStartOffset] === 0xff) && (data[adtsStartOffset+1] & 0xf0) === 0xf0) {
//logger.log('ADTS sync word found !');
return true;
}
}
}
return false;
}
// feed incoming data to the front of the parsing pipeline
push(data, audioCodec, videoCodec, timeOffset, cc, level, sn, duration) {
var id3 = new ID3(data), adtsStartOffset,len, track = this._aacTrack, pts = id3.timeStamp, config, nbSamples,adtsFrameSize,adtsHeaderLen,stamp,aacSample;
// look for ADTS header (0xFFFx)
for (adtsStartOffset = id3.length, len = data.length; adtsStartOffset < len - 1; adtsStartOffset++) {
if ((data[adtsStartOffset] === 0xff) && (data[adtsStartOffset+1] & 0xf0) === 0xf0) {
break;
}
}
if (!track.audiosamplerate) {
config = this._ADTStoAudioConfig(data, adtsStartOffset, audioCodec);
track.config = config.config;
track.audiosamplerate = config.samplerate;
track.channelCount = config.channelCount;
track.codec = config.codec;
track.timescale = this.remuxer.timescale;
track.duration = this.remuxer.timescale * duration;
logger.log(`parsed codec:${track.codec},rate:${config.samplerate},nb channel:${config.channelCount}`);
}
nbSamples = 0;
while ((adtsStartOffset + 5) < len) {
// retrieve frame size
adtsFrameSize = ((data[adtsStartOffset + 3] & 0x03) << 11);
// byte 4
adtsFrameSize |= (data[adtsStartOffset + 4] << 3);
// byte 5
adtsFrameSize |= ((data[adtsStartOffset + 5] & 0xE0) >>> 5);
adtsHeaderLen = (!!(data[adtsStartOffset + 1] & 0x01) ? 7 : 9);
adtsFrameSize -= adtsHeaderLen;
stamp = Math.round(90*pts + nbSamples * 1024 * 90000 / track.audiosamplerate);
//stamp = pes.pts;
//console.log('AAC frame, offset/length/pts:' + (adtsStartOffset+7) + '/' + adtsFrameSize + '/' + stamp.toFixed(0));
if ((adtsFrameSize > 0) && ((adtsStartOffset + adtsHeaderLen + adtsFrameSize) <= len)) {
aacSample = {unit: data.subarray(adtsStartOffset + adtsHeaderLen, adtsStartOffset + adtsHeaderLen + adtsFrameSize), pts: stamp, dts: stamp};
track.samples.push(aacSample);
track.len += adtsFrameSize;
adtsStartOffset += adtsFrameSize + adtsHeaderLen;
nbSamples++;
// look for ADTS header (0xFFFx)
for ( ; adtsStartOffset < (len - 1); adtsStartOffset++) {
if ((data[adtsStartOffset] === 0xff) && ((data[adtsStartOffset + 1] & 0xf0) === 0xf0)) {
break;
}
}
} else {
break;
}
}
this.remuxer.remux(this._aacTrack,{samples : []}, {samples : []}, timeOffset);
}
_ADTStoAudioConfig(data, offset, audioCodec) {
var adtsObjectType, // :int
adtsSampleingIndex, // :int
adtsExtensionSampleingIndex, // :int
adtsChanelConfig, // :int
config,
userAgent = navigator.userAgent.toLowerCase(),
adtsSampleingRates = [
96000, 88200,
64000, 48000,
44100, 32000,
24000, 22050,
16000, 12000,
11025, 8000,
7350];
// byte 2
adtsObjectType = ((data[offset + 2] & 0xC0) >>> 6) + 1;
adtsSampleingIndex = ((data[offset + 2] & 0x3C) >>> 2);
if(adtsSampleingIndex > adtsSampleingRates.length-1) {
this.observer.trigger(Event.ERROR, {type: ErrorTypes.MEDIA_ERROR, details: ErrorDetails.FRAG_PARSING_ERROR, fatal: true, reason: `invalid ADTS sampling index:${adtsSampleingIndex}`});
return;
}
adtsChanelConfig = ((data[offset + 2] & 0x01) << 2);
// byte 3
adtsChanelConfig |= ((data[offset + 3] & 0xC0) >>> 6);
logger.log(`manifest codec:${audioCodec},ADTS data:type:${adtsObjectType},sampleingIndex:${adtsSampleingIndex}[${adtsSampleingRates[adtsSampleingIndex]}Hz],channelConfig:${adtsChanelConfig}`);
// firefox: freq less than 24kHz = AAC SBR (HE-AAC)
if (userAgent.indexOf('firefox') !== -1) {
if (adtsSampleingIndex >= 6) {
adtsObjectType = 5;
config = new Array(4);
// HE-AAC uses SBR (Spectral Band Replication) , high frequencies are constructed from low frequencies
// there is a factor 2 between frame sample rate and output sample rate
// multiply frequency by 2 (see table below, equivalent to substract 3)
adtsExtensionSampleingIndex = adtsSampleingIndex - 3;
} else {
adtsObjectType = 2;
config = new Array(2);
adtsExtensionSampleingIndex = adtsSampleingIndex;
}
// Android : always use AAC
} else if (userAgent.indexOf('android') !== -1) {
adtsObjectType = 2;
config = new Array(2);
adtsExtensionSampleingIndex = adtsSampleingIndex;
} else {
/* for other browsers (chrome ...)
always force audio type to be HE-AAC SBR, as some browsers do not support audio codec switch properly (like Chrome ...)
*/
adtsObjectType = 5;
config = new Array(4);
// if (manifest codec is HE-AAC) OR (manifest codec not specified AND frequency less than 24kHz)
if ((audioCodec && audioCodec.indexOf('mp4a.40.5') !== -1) || (!audioCodec && adtsSampleingIndex >= 6)) {
// HE-AAC uses SBR (Spectral Band Replication) , high frequencies are constructed from low frequencies
// there is a factor 2 between frame sample rate and output sample rate
// multiply frequency by 2 (see table below, equivalent to substract 3)
adtsExtensionSampleingIndex = adtsSampleingIndex - 3;
} else {
// if (manifest codec is AAC) AND (frequency less than 24kHz OR nb channel is 1)
if (audioCodec && audioCodec.indexOf('mp4a.40.2') !== -1 && (adtsSampleingIndex >= 6 || adtsChanelConfig === 1)) {
adtsObjectType = 2;
config = new Array(2);
}
adtsExtensionSampleingIndex = adtsSampleingIndex;
}
}
/* refer to http://wiki.multimedia.cx/index.php?title=MPEG-4_Audio#Audio_Specific_Config
ISO 14496-3 (AAC).pdf - Table 1.13 Syntax of AudioSpecificConfig()
Audio Profile / Audio Object Type
0: Null
1: AAC Main
2: AAC LC (Low Complexity)
3: AAC SSR (Scalable Sample Rate)
4: AAC LTP (Long Term Prediction)
5: SBR (Spectral Band Replication)
6: AAC Scalable
sampling freq
0: 96000 Hz
1: 88200 Hz
2: 64000 Hz
3: 48000 Hz
4: 44100 Hz
5: 32000 Hz
6: 24000 Hz
7: 22050 Hz
8: 16000 Hz
9: 12000 Hz
10: 11025 Hz
11: 8000 Hz
12: 7350 Hz
13: Reserved
14: Reserved
15: frequency is written explictly
Channel Configurations
These are the channel configurations:
0: Defined in AOT Specifc Config
1: 1 channel: front-center
2: 2 channels: front-left, front-right
*/
// audioObjectType = profile => profile, the MPEG-4 Audio Object Type minus 1
config[0] = adtsObjectType << 3;
// samplingFrequencyIndex
config[0] |= (adtsSampleingIndex & 0x0E) >> 1;
config[1] |= (adtsSampleingIndex & 0x01) << 7;
// channelConfiguration
config[1] |= adtsChanelConfig << 3;
if (adtsObjectType === 5) {
// adtsExtensionSampleingIndex
config[1] |= (adtsExtensionSampleingIndex & 0x0E) >> 1;
config[2] = (adtsExtensionSampleingIndex & 0x01) << 7;
// adtsObjectType (force to 2, chrome is checking that object type is less than 5 ???
// https://chromium.googlesource.com/chromium/src.git/+/master/media/formats/mp4/aac.cc
config[2] |= 2 << 2;
config[3] = 0;
}
return {config: config, samplerate: adtsSampleingRates[adtsSampleingIndex], channelCount: adtsChanelConfig, codec: ('mp4a.40.' + adtsObjectType)};
}
destroy() {
}
}
export default AACDemuxer;

View file

@ -0,0 +1,41 @@
/* inline demuxer.
* probe fragments and instantiate appropriate demuxer depending on content type (TSDemuxer, AACDemuxer, ...)
*/
import Event from '../events';
import {ErrorTypes, ErrorDetails} from '../errors';
import AACDemuxer from '../demux/aacdemuxer';
import TSDemuxer from '../demux/tsdemuxer';
class DemuxerInline {
constructor(hls,remuxer) {
this.hls = hls;
this.remuxer = remuxer;
}
destroy() {
var demuxer = this.demuxer;
if (demuxer) {
demuxer.destroy();
}
}
push(data, audioCodec, videoCodec, timeOffset, cc, level, sn, duration) {
var demuxer = this.demuxer;
if (!demuxer) {
// probe for content type
if (TSDemuxer.probe(data)) {
demuxer = this.demuxer = new TSDemuxer(this.hls,this.remuxer);
} else if(AACDemuxer.probe(data)) {
demuxer = this.demuxer = new AACDemuxer(this.hls,this.remuxer);
} else {
this.hls.trigger(Event.ERROR, {type : ErrorTypes.MEDIA_ERROR, details: ErrorDetails.FRAG_PARSING_ERROR, fatal: true, reason: 'no demux matching with content found'});
return;
}
}
demuxer.push(data,audioCodec,videoCodec,timeOffset,cc,level,sn,duration);
}
}
export default DemuxerInline;

View file

@ -0,0 +1,78 @@
/* demuxer web worker.
* - listen to worker message, and trigger DemuxerInline upon reception of Fragments.
* - provides MP4 Boxes back to main thread using [transferable objects](https://developers.google.com/web/updates/2011/12/Transferable-Objects-Lightning-Fast) in order to minimize message passing overhead.
*/
import DemuxerInline from '../demux/demuxer-inline';
import Event from '../events';
import EventEmitter from 'events';
import MP4Remuxer from '../remux/mp4-remuxer';
var DemuxerWorker = function (self) {
// observer setup
var observer = new EventEmitter();
observer.trigger = function trigger (event, ...data) {
observer.emit(event, event, ...data);
};
observer.off = function off (event, ...data) {
observer.removeListener(event, ...data);
};
self.addEventListener('message', function (ev) {
//console.log('demuxer cmd:' + ev.data.cmd);
switch (ev.data.cmd) {
case 'init':
self.demuxer = new DemuxerInline(observer,MP4Remuxer);
break;
case 'demux':
var data = ev.data;
self.demuxer.push(new Uint8Array(data.data), data.audioCodec, data.videoCodec, data.timeOffset, data.cc, data.level, data.sn, data.duration);
break;
default:
break;
}
});
// listen to events triggered by TS Demuxer
observer.on(Event.FRAG_PARSING_INIT_SEGMENT, function(ev, data) {
var objData = {event: ev};
var objTransferable = [];
if (data.audioCodec) {
objData.audioCodec = data.audioCodec;
objData.audioMoov = data.audioMoov.buffer;
objData.audioChannelCount = data.audioChannelCount;
objTransferable.push(objData.audioMoov);
}
if (data.videoCodec) {
objData.videoCodec = data.videoCodec;
objData.videoMoov = data.videoMoov.buffer;
objData.videoWidth = data.videoWidth;
objData.videoHeight = data.videoHeight;
objTransferable.push(objData.videoMoov);
}
// pass moov as transferable object (no copy)
self.postMessage(objData,objTransferable);
});
observer.on(Event.FRAG_PARSING_DATA, function(ev, data) {
var objData = {event: ev, type: data.type, startPTS: data.startPTS, endPTS: data.endPTS, startDTS: data.startDTS, endDTS: data.endDTS, moof: data.moof.buffer, mdat: data.mdat.buffer, nb: data.nb};
// pass moof/mdat data as transferable object (no copy)
self.postMessage(objData, [objData.moof, objData.mdat]);
});
observer.on(Event.FRAG_PARSED, function(event) {
self.postMessage({event: event});
});
observer.on(Event.ERROR, function(event, data) {
self.postMessage({event: event, data: data});
});
observer.on(Event.FRAG_PARSING_METADATA, function(event, data) {
var objData = {event: event, samples: data.samples};
self.postMessage(objData);
});
};
export default DemuxerWorker;

View file

@ -0,0 +1,112 @@
import Event from '../events';
import DemuxerInline from '../demux/demuxer-inline';
import DemuxerWorker from '../demux/demuxer-worker';
import {logger} from '../utils/logger';
import MP4Remuxer from '../remux/mp4-remuxer';
import Decrypter from '../crypt/decrypter';
class Demuxer {
constructor(hls) {
this.hls = hls;
if (hls.config.enableWorker && (typeof(Worker) !== 'undefined')) {
logger.log('demuxing in webworker');
try {
var work = require('webworkify');
this.w = work(DemuxerWorker);
this.onwmsg = this.onWorkerMessage.bind(this);
this.w.addEventListener('message', this.onwmsg);
this.w.postMessage({cmd: 'init'});
} catch(err) {
logger.error('error while initializing DemuxerWorker, fallback on DemuxerInline');
this.demuxer = new DemuxerInline(hls,MP4Remuxer);
}
} else {
this.demuxer = new DemuxerInline(hls,MP4Remuxer);
}
this.demuxInitialized = true;
}
destroy() {
if (this.w) {
this.w.removeEventListener('message', this.onwmsg);
this.w.terminate();
this.w = null;
} else {
this.demuxer.destroy();
this.demuxer = null;
}
if (this.decrypter) {
this.decrypter.destroy();
this.decrypter = null;
}
}
pushDecrypted(data, audioCodec, videoCodec, timeOffset, cc, level, sn, duration) {
if (this.w) {
// post fragment payload as transferable objects (no copy)
this.w.postMessage({cmd: 'demux', data: data, audioCodec: audioCodec, videoCodec: videoCodec, timeOffset: timeOffset, cc: cc, level: level, sn : sn, duration: duration}, [data]);
} else {
this.demuxer.push(new Uint8Array(data), audioCodec, videoCodec, timeOffset, cc, level, sn, duration);
}
}
push(data, audioCodec, videoCodec, timeOffset, cc, level, sn, duration, decryptdata) {
if ((data.byteLength > 0) && (decryptdata != null) && (decryptdata.key != null) && (decryptdata.method === 'AES-128')) {
if (this.decrypter == null) {
this.decrypter = new Decrypter(this.hls);
}
var localthis = this;
this.decrypter.decrypt(data, decryptdata.key, decryptdata.iv, function(decryptedData){
localthis.pushDecrypted(decryptedData, audioCodec, videoCodec, timeOffset, cc, level, sn, duration);
});
} else {
this.pushDecrypted(data, audioCodec, videoCodec, timeOffset, cc, level, sn, duration);
}
}
onWorkerMessage(ev) {
//console.log('onWorkerMessage:' + ev.data.event);
switch(ev.data.event) {
case Event.FRAG_PARSING_INIT_SEGMENT:
var obj = {};
if (ev.data.audioMoov) {
obj.audioMoov = new Uint8Array(ev.data.audioMoov);
obj.audioCodec = ev.data.audioCodec;
obj.audioChannelCount = ev.data.audioChannelCount;
}
if (ev.data.videoMoov) {
obj.videoMoov = new Uint8Array(ev.data.videoMoov);
obj.videoCodec = ev.data.videoCodec;
obj.videoWidth = ev.data.videoWidth;
obj.videoHeight = ev.data.videoHeight;
}
this.hls.trigger(Event.FRAG_PARSING_INIT_SEGMENT, obj);
break;
case Event.FRAG_PARSING_DATA:
this.hls.trigger(Event.FRAG_PARSING_DATA,{
moof: new Uint8Array(ev.data.moof),
mdat: new Uint8Array(ev.data.mdat),
startPTS: ev.data.startPTS,
endPTS: ev.data.endPTS,
startDTS: ev.data.startDTS,
endDTS: ev.data.endDTS,
type: ev.data.type,
nb: ev.data.nb
});
break;
case Event.FRAG_PARSING_METADATA:
this.hls.trigger(Event.FRAG_PARSING_METADATA, {
samples: ev.data.samples
});
break;
default:
this.hls.trigger(ev.data.event, ev.data.data);
break;
}
}
}
export default Demuxer;

View file

@ -0,0 +1,280 @@
/**
* Parser for exponential Golomb codes, a variable-bitwidth number encoding scheme used by h264.
*/
import {logger} from '../utils/logger';
class ExpGolomb {
constructor(data) {
this.data = data;
// the number of bytes left to examine in this.data
this.bytesAvailable = this.data.byteLength;
// the current word being examined
this.word = 0; // :uint
// the number of bits left to examine in the current word
this.bitsAvailable = 0; // :uint
}
// ():void
loadWord() {
var
position = this.data.byteLength - this.bytesAvailable,
workingBytes = new Uint8Array(4),
availableBytes = Math.min(4, this.bytesAvailable);
if (availableBytes === 0) {
throw new Error('no bytes available');
}
workingBytes.set(this.data.subarray(position, position + availableBytes));
this.word = new DataView(workingBytes.buffer).getUint32(0);
// track the amount of this.data that has been processed
this.bitsAvailable = availableBytes * 8;
this.bytesAvailable -= availableBytes;
}
// (count:int):void
skipBits(count) {
var skipBytes; // :int
if (this.bitsAvailable > count) {
this.word <<= count;
this.bitsAvailable -= count;
} else {
count -= this.bitsAvailable;
skipBytes = count >> 3;
count -= (skipBytes >> 3);
this.bytesAvailable -= skipBytes;
this.loadWord();
this.word <<= count;
this.bitsAvailable -= count;
}
}
// (size:int):uint
readBits(size) {
var
bits = Math.min(this.bitsAvailable, size), // :uint
valu = this.word >>> (32 - bits); // :uint
if (size > 32) {
logger.error('Cannot read more than 32 bits at a time');
}
this.bitsAvailable -= bits;
if (this.bitsAvailable > 0) {
this.word <<= bits;
} else if (this.bytesAvailable > 0) {
this.loadWord();
}
bits = size - bits;
if (bits > 0) {
return valu << bits | this.readBits(bits);
} else {
return valu;
}
}
// ():uint
skipLZ() {
var leadingZeroCount; // :uint
for (leadingZeroCount = 0; leadingZeroCount < this.bitsAvailable; ++leadingZeroCount) {
if (0 !== (this.word & (0x80000000 >>> leadingZeroCount))) {
// the first bit of working word is 1
this.word <<= leadingZeroCount;
this.bitsAvailable -= leadingZeroCount;
return leadingZeroCount;
}
}
// we exhausted word and still have not found a 1
this.loadWord();
return leadingZeroCount + this.skipLZ();
}
// ():void
skipUEG() {
this.skipBits(1 + this.skipLZ());
}
// ():void
skipEG() {
this.skipBits(1 + this.skipLZ());
}
// ():uint
readUEG() {
var clz = this.skipLZ(); // :uint
return this.readBits(clz + 1) - 1;
}
// ():int
readEG() {
var valu = this.readUEG(); // :int
if (0x01 & valu) {
// the number is odd if the low order bit is set
return (1 + valu) >>> 1; // add 1 to make it even, and divide by 2
} else {
return -1 * (valu >>> 1); // divide by two then make it negative
}
}
// Some convenience functions
// :Boolean
readBoolean() {
return 1 === this.readBits(1);
}
// ():int
readUByte() {
return this.readBits(8);
}
/**
* Advance the ExpGolomb decoder past a scaling list. The scaling
* list is optionally transmitted as part of a sequence parameter
* set and is not relevant to transmuxing.
* @param count {number} the number of entries in this scaling list
* @see Recommendation ITU-T H.264, Section 7.3.2.1.1.1
*/
skipScalingList(count) {
var
lastScale = 8,
nextScale = 8,
j,
deltaScale;
for (j = 0; j < count; j++) {
if (nextScale !== 0) {
deltaScale = this.readEG();
nextScale = (lastScale + deltaScale + 256) % 256;
}
lastScale = (nextScale === 0) ? lastScale : nextScale;
}
}
/**
* Read a sequence parameter set and return some interesting video
* properties. A sequence parameter set is the H264 metadata that
* describes the properties of upcoming video frames.
* @param data {Uint8Array} the bytes of a sequence parameter set
* @return {object} an object with configuration parsed from the
* sequence parameter set, including the dimensions of the
* associated video frames.
*/
readSPS() {
var
frameCropLeftOffset = 0,
frameCropRightOffset = 0,
frameCropTopOffset = 0,
frameCropBottomOffset = 0,
sarScale = 1,
profileIdc,profileCompat,levelIdc,
numRefFramesInPicOrderCntCycle, picWidthInMbsMinus1,
picHeightInMapUnitsMinus1,
frameMbsOnlyFlag,
scalingListCount,
i;
this.readUByte();
profileIdc = this.readUByte(); // profile_idc
profileCompat = this.readBits(5); // constraint_set[0-4]_flag, u(5)
this.skipBits(3); // reserved_zero_3bits u(3),
levelIdc = this.readUByte(); //level_idc u(8)
this.skipUEG(); // seq_parameter_set_id
// some profiles have more optional data we don't need
if (profileIdc === 100 ||
profileIdc === 110 ||
profileIdc === 122 ||
profileIdc === 144) {
var chromaFormatIdc = this.readUEG();
if (chromaFormatIdc === 3) {
this.skipBits(1); // separate_colour_plane_flag
}
this.skipUEG(); // bit_depth_luma_minus8
this.skipUEG(); // bit_depth_chroma_minus8
this.skipBits(1); // qpprime_y_zero_transform_bypass_flag
if (this.readBoolean()) { // seq_scaling_matrix_present_flag
scalingListCount = (chromaFormatIdc !== 3) ? 8 : 12;
for (i = 0; i < scalingListCount; i++) {
if (this.readBoolean()) { // seq_scaling_list_present_flag[ i ]
if (i < 6) {
this.skipScalingList(16);
} else {
this.skipScalingList(64);
}
}
}
}
}
this.skipUEG(); // log2_max_frame_num_minus4
var picOrderCntType = this.readUEG();
if (picOrderCntType === 0) {
this.readUEG(); //log2_max_pic_order_cnt_lsb_minus4
} else if (picOrderCntType === 1) {
this.skipBits(1); // delta_pic_order_always_zero_flag
this.skipEG(); // offset_for_non_ref_pic
this.skipEG(); // offset_for_top_to_bottom_field
numRefFramesInPicOrderCntCycle = this.readUEG();
for(i = 0; i < numRefFramesInPicOrderCntCycle; i++) {
this.skipEG(); // offset_for_ref_frame[ i ]
}
}
this.skipUEG(); // max_num_ref_frames
this.skipBits(1); // gaps_in_frame_num_value_allowed_flag
picWidthInMbsMinus1 = this.readUEG();
picHeightInMapUnitsMinus1 = this.readUEG();
frameMbsOnlyFlag = this.readBits(1);
if (frameMbsOnlyFlag === 0) {
this.skipBits(1); // mb_adaptive_frame_field_flag
}
this.skipBits(1); // direct_8x8_inference_flag
if (this.readBoolean()) { // frame_cropping_flag
frameCropLeftOffset = this.readUEG();
frameCropRightOffset = this.readUEG();
frameCropTopOffset = this.readUEG();
frameCropBottomOffset = this.readUEG();
}
if (this.readBoolean()) {
// vui_parameters_present_flag
if (this.readBoolean()) {
// aspect_ratio_info_present_flag
let sarRatio;
const aspectRatioIdc = this.readUByte();
switch (aspectRatioIdc) {
//case 1: sarRatio = [1,1]; break;
case 2: sarRatio = [12,11]; break;
case 3: sarRatio = [10,11]; break;
case 4: sarRatio = [16,11]; break;
case 5: sarRatio = [40,33]; break;
case 6: sarRatio = [24,11]; break;
case 7: sarRatio = [20,11]; break;
case 8: sarRatio = [32,11]; break;
case 9: sarRatio = [80,33]; break;
case 10: sarRatio = [18,11]; break;
case 11: sarRatio = [15,11]; break;
case 12: sarRatio = [64,33]; break;
case 13: sarRatio = [160,99]; break;
case 14: sarRatio = [4,3]; break;
case 15: sarRatio = [3,2]; break;
case 16: sarRatio = [2,1]; break;
case 255: {
sarRatio = [this.readUByte() << 8 | this.readUByte(), this.readUByte() << 8 | this.readUByte()];
break;
}
}
if (sarRatio) {
sarScale = sarRatio[0] / sarRatio[1];
}
}
}
return {
width: (((picWidthInMbsMinus1 + 1) * 16) - frameCropLeftOffset * 2 - frameCropRightOffset * 2) * sarScale,
height: ((2 - frameMbsOnlyFlag) * (picHeightInMapUnitsMinus1 + 1) * 16) - ((frameMbsOnlyFlag? 2 : 4) * (frameCropTopOffset + frameCropBottomOffset))
};
}
readSliceType() {
// skip NALu type
this.readUByte();
// discard first_mb_in_slice
this.readUEG();
// return slice_type
return this.readUEG();
}
}
export default ExpGolomb;

View file

@ -0,0 +1,123 @@
/**
* ID3 parser
*/
import {logger} from '../utils/logger';
//import Hex from '../utils/hex';
class ID3 {
constructor(data) {
this._hasTimeStamp = false;
var offset = 0, byte1,byte2,byte3,byte4,tagSize,endPos,header,len;
do {
header = this.readUTF(data,offset,3);
offset+=3;
// first check for ID3 header
if (header === 'ID3') {
// skip 24 bits
offset += 3;
// retrieve tag(s) length
byte1 = data[offset++] & 0x7f;
byte2 = data[offset++] & 0x7f;
byte3 = data[offset++] & 0x7f;
byte4 = data[offset++] & 0x7f;
tagSize = (byte1 << 21) + (byte2 << 14) + (byte3 << 7) + byte4;
endPos = offset + tagSize;
//logger.log(`ID3 tag found, size/end: ${tagSize}/${endPos}`);
// read ID3 tags
this._parseID3Frames(data, offset,endPos);
offset = endPos;
} else if (header === '3DI') {
// http://id3.org/id3v2.4.0-structure chapter 3.4. ID3v2 footer
offset += 7;
logger.log(`3DI footer found, end: ${offset}`);
} else {
offset -= 3;
len = offset;
if (len) {
//logger.log(`ID3 len: ${len}`);
if (!this.hasTimeStamp) {
logger.warn('ID3 tag found, but no timestamp');
}
this._length = len;
}
return;
}
} while (true);
}
readUTF(data,start,len) {
var result = '',offset = start, end = start + len;
do {
result += String.fromCharCode(data[offset++]);
} while(offset < end);
return result;
}
_parseID3Frames(data,offset,endPos) {
var tagId,tagLen,tagStart,tagFlags,timestamp;
while(offset + 8 <= endPos) {
tagId = this.readUTF(data,offset,4);
offset +=4;
tagLen = data[offset++] << 24 +
data[offset++] << 16 +
data[offset++] << 8 +
data[offset++];
tagFlags = data[offset++] << 8 +
data[offset++];
tagStart = offset;
//logger.log("ID3 tag id:" + tagId);
switch(tagId) {
case 'PRIV':
//logger.log('parse frame:' + Hex.hexDump(data.subarray(offset,endPos)));
// owner should be "com.apple.streaming.transportStreamTimestamp"
if (this.readUTF(data,offset,44) === 'com.apple.streaming.transportStreamTimestamp') {
offset+=44;
// smelling even better ! we found the right descriptor
// skip null character (string end) + 3 first bytes
offset+= 4;
// timestamp is 33 bit expressed as a big-endian eight-octet number, with the upper 31 bits set to zero.
var pts33Bit = data[offset++] & 0x1;
this._hasTimeStamp = true;
timestamp = ((data[offset++] << 23) +
(data[offset++] << 15) +
(data[offset++] << 7) +
data[offset++]) /45;
if (pts33Bit) {
timestamp += 47721858.84; // 2^32 / 90
}
timestamp = Math.round(timestamp);
logger.trace(`ID3 timestamp found: ${timestamp}`);
this._timeStamp = timestamp;
}
break;
default:
break;
}
}
}
get hasTimeStamp() {
return this._hasTimeStamp;
}
get timeStamp() {
return this._timeStamp;
}
get length() {
return this._length;
}
}
export default ID3;

View file

@ -0,0 +1,665 @@
/**
* highly optimized TS demuxer:
* parse PAT, PMT
* extract PES packet from audio and video PIDs
* extract AVC/H264 NAL units and AAC/ADTS samples from PES packet
* trigger the remuxer upon parsing completion
* it also tries to workaround as best as it can audio codec switch (HE-AAC to AAC and vice versa), without having to restart the MediaSource.
* it also controls the remuxing process :
* upon discontinuity or level switch detection, it will also notifies the remuxer so that it can reset its state.
*/
import Event from '../events';
import ExpGolomb from './exp-golomb';
// import Hex from '../utils/hex';
import {logger} from '../utils/logger';
import {ErrorTypes, ErrorDetails} from '../errors';
class TSDemuxer {
constructor(observer,remuxerClass) {
this.observer = observer;
this.remuxerClass = remuxerClass;
this.lastCC = 0;
this.PES_TIMESCALE = 90000;
this.remuxer = new this.remuxerClass(observer);
}
static probe(data) {
// a TS fragment should contain at least 3 TS packets, a PAT, a PMT, and one PID, each starting with 0x47
if (data.length >= 3*188 && data[0] === 0x47 && data[188] === 0x47 && data[2*188] === 0x47) {
return true;
} else {
return false;
}
}
switchLevel() {
this.pmtParsed = false;
this._pmtId = -1;
this._avcTrack = {type: 'video', id :-1, sequenceNumber: 0, samples : [], len : 0, nbNalu : 0};
this._aacTrack = {type: 'audio', id :-1, sequenceNumber: 0, samples : [], len : 0};
this._id3Track = {type: 'id3', id :-1, sequenceNumber: 0, samples : [], len : 0};
this.remuxer.switchLevel();
}
insertDiscontinuity() {
this.switchLevel();
this.remuxer.insertDiscontinuity();
}
// feed incoming data to the front of the parsing pipeline
push(data, audioCodec, videoCodec, timeOffset, cc, level, sn, duration) {
var avcData, aacData, id3Data,
start, len = data.length, stt, pid, atf, offset;
this.audioCodec = audioCodec;
this.videoCodec = videoCodec;
this.timeOffset = timeOffset;
this._duration = duration;
this.contiguous = false;
if (cc !== this.lastCC) {
logger.log('discontinuity detected');
this.insertDiscontinuity();
this.lastCC = cc;
} else if (level !== this.lastLevel) {
logger.log('level switch detected');
this.switchLevel();
this.lastLevel = level;
} else if (sn === (this.lastSN+1)) {
this.contiguous = true;
}
this.lastSN = sn;
if(!this.contiguous) {
// flush any partial content
this.aacOverFlow = null;
}
var pmtParsed = this.pmtParsed,
avcId = this._avcTrack.id,
aacId = this._aacTrack.id,
id3Id = this._id3Track.id;
// loop through TS packets
for (start = 0; start < len; start += 188) {
if (data[start] === 0x47) {
stt = !!(data[start + 1] & 0x40);
// pid is a 13-bit field starting at the last bit of TS[1]
pid = ((data[start + 1] & 0x1f) << 8) + data[start + 2];
atf = (data[start + 3] & 0x30) >> 4;
// if an adaption field is present, its length is specified by the fifth byte of the TS packet header.
if (atf > 1) {
offset = start + 5 + data[start + 4];
// continue if there is only adaptation field
if (offset === (start + 188)) {
continue;
}
} else {
offset = start + 4;
}
if (pmtParsed) {
if (pid === avcId) {
if (stt) {
if (avcData) {
this._parseAVCPES(this._parsePES(avcData));
}
avcData = {data: [], size: 0};
}
if (avcData) {
avcData.data.push(data.subarray(offset, start + 188));
avcData.size += start + 188 - offset;
}
} else if (pid === aacId) {
if (stt) {
if (aacData) {
this._parseAACPES(this._parsePES(aacData));
}
aacData = {data: [], size: 0};
}
if (aacData) {
aacData.data.push(data.subarray(offset, start + 188));
aacData.size += start + 188 - offset;
}
} else if (pid === id3Id) {
if (stt) {
if (id3Data) {
this._parseID3PES(this._parsePES(id3Data));
}
id3Data = {data: [], size: 0};
}
if (id3Data) {
id3Data.data.push(data.subarray(offset, start + 188));
id3Data.size += start + 188 - offset;
}
}
} else {
if (stt) {
offset += data[offset] + 1;
}
if (pid === 0) {
this._parsePAT(data, offset);
} else if (pid === this._pmtId) {
this._parsePMT(data, offset);
pmtParsed = this.pmtParsed = true;
avcId = this._avcTrack.id;
aacId = this._aacTrack.id;
id3Id = this._id3Track.id;
}
}
} else {
this.observer.trigger(Event.ERROR, {type : ErrorTypes.MEDIA_ERROR, details: ErrorDetails.FRAG_PARSING_ERROR, fatal: false, reason: 'TS packet did not start with 0x47'});
}
}
// parse last PES packet
if (avcData) {
this._parseAVCPES(this._parsePES(avcData));
}
if (aacData) {
this._parseAACPES(this._parsePES(aacData));
}
if (id3Data) {
this._parseID3PES(this._parsePES(id3Data));
}
this.remux();
}
remux() {
this.remuxer.remux(this._aacTrack,this._avcTrack, this._id3Track, this.timeOffset, this.contiguous);
}
destroy() {
this.switchLevel();
this._initPTS = this._initDTS = undefined;
this._duration = 0;
}
_parsePAT(data, offset) {
// skip the PSI header and parse the first PMT entry
this._pmtId = (data[offset + 10] & 0x1F) << 8 | data[offset + 11];
//logger.log('PMT PID:' + this._pmtId);
}
_parsePMT(data, offset) {
var sectionLength, tableEnd, programInfoLength, pid;
sectionLength = (data[offset + 1] & 0x0f) << 8 | data[offset + 2];
tableEnd = offset + 3 + sectionLength - 4;
// to determine where the table is, we have to figure out how
// long the program info descriptors are
programInfoLength = (data[offset + 10] & 0x0f) << 8 | data[offset + 11];
// advance the offset to the first entry in the mapping table
offset += 12 + programInfoLength;
while (offset < tableEnd) {
pid = (data[offset + 1] & 0x1F) << 8 | data[offset + 2];
switch(data[offset]) {
// ISO/IEC 13818-7 ADTS AAC (MPEG-2 lower bit-rate audio)
case 0x0f:
//logger.log('AAC PID:' + pid);
this._aacTrack.id = pid;
break;
// Packetized metadata (ID3)
case 0x15:
//logger.log('ID3 PID:' + pid);
this._id3Track.id = pid;
break;
// ITU-T Rec. H.264 and ISO/IEC 14496-10 (lower bit-rate video)
case 0x1b:
//logger.log('AVC PID:' + pid);
this._avcTrack.id = pid;
break;
default:
logger.log('unkown stream type:' + data[offset]);
break;
}
// move to the next table entry
// skip past the elementary stream descriptors, if present
offset += ((data[offset + 3] & 0x0F) << 8 | data[offset + 4]) + 5;
}
}
_parsePES(stream) {
var i = 0, frag, pesFlags, pesPrefix, pesLen, pesHdrLen, pesData, pesPts, pesDts, payloadStartOffset;
//retrieve PTS/DTS from first fragment
frag = stream.data[0];
pesPrefix = (frag[0] << 16) + (frag[1] << 8) + frag[2];
if (pesPrefix === 1) {
pesLen = (frag[4] << 8) + frag[5];
pesFlags = frag[7];
if (pesFlags & 0xC0) {
/* PES header described here : http://dvd.sourceforge.net/dvdinfo/pes-hdr.html
as PTS / DTS is 33 bit we cannot use bitwise operator in JS,
as Bitwise operators treat their operands as a sequence of 32 bits */
pesPts = (frag[9] & 0x0E) * 536870912 +// 1 << 29
(frag[10] & 0xFF) * 4194304 +// 1 << 22
(frag[11] & 0xFE) * 16384 +// 1 << 14
(frag[12] & 0xFF) * 128 +// 1 << 7
(frag[13] & 0xFE) / 2;
// check if greater than 2^32 -1
if (pesPts > 4294967295) {
// decrement 2^33
pesPts -= 8589934592;
}
if (pesFlags & 0x40) {
pesDts = (frag[14] & 0x0E ) * 536870912 +// 1 << 29
(frag[15] & 0xFF ) * 4194304 +// 1 << 22
(frag[16] & 0xFE ) * 16384 +// 1 << 14
(frag[17] & 0xFF ) * 128 +// 1 << 7
(frag[18] & 0xFE ) / 2;
// check if greater than 2^32 -1
if (pesDts > 4294967295) {
// decrement 2^33
pesDts -= 8589934592;
}
} else {
pesDts = pesPts;
}
}
pesHdrLen = frag[8];
payloadStartOffset = pesHdrLen + 9;
// trim PES header
stream.data[0] = stream.data[0].subarray(payloadStartOffset);
stream.size -= payloadStartOffset;
//reassemble PES packet
pesData = new Uint8Array(stream.size);
// reassemble the packet
while (stream.data.length) {
frag = stream.data.shift();
pesData.set(frag, i);
i += frag.byteLength;
}
return {data: pesData, pts: pesPts, dts: pesDts, len: pesLen};
} else {
return null;
}
}
_parseAVCPES(pes) {
var track = this._avcTrack,
samples = track.samples,
units = this._parseAVCNALu(pes.data),
units2 = [],
debug = false,
key = false,
length = 0,
avcSample,
push;
// no NALu found
if (units.length === 0 && samples.length > 0) {
// append pes.data to previous NAL unit
var lastavcSample = samples[samples.length - 1];
var lastUnit = lastavcSample.units.units[lastavcSample.units.units.length - 1];
var tmp = new Uint8Array(lastUnit.data.byteLength + pes.data.byteLength);
tmp.set(lastUnit.data, 0);
tmp.set(pes.data, lastUnit.data.byteLength);
lastUnit.data = tmp;
lastavcSample.units.length += pes.data.byteLength;
track.len += pes.data.byteLength;
}
//free pes.data to save up some memory
pes.data = null;
var debugString = '';
units.forEach(unit => {
switch(unit.type) {
//NDR
case 1:
push = true;
if(debug) {
debugString += 'NDR ';
}
break;
//IDR
case 5:
push = true;
if(debug) {
debugString += 'IDR ';
}
key = true;
break;
case 6:
push = true;
if(debug) {
debugString += 'SEI ';
}
break;
//SPS
case 7:
push = true;
if(debug) {
debugString += 'SPS ';
}
if(!track.sps) {
var expGolombDecoder = new ExpGolomb(unit.data);
var config = expGolombDecoder.readSPS();
track.width = config.width;
track.height = config.height;
track.sps = [unit.data];
track.timescale = this.remuxer.timescale;
track.duration = this.remuxer.timescale * this._duration;
var codecarray = unit.data.subarray(1, 4);
var codecstring = 'avc1.';
for (var i = 0; i < 3; i++) {
var h = codecarray[i].toString(16);
if (h.length < 2) {
h = '0' + h;
}
codecstring += h;
}
track.codec = codecstring;
}
break;
//PPS
case 8:
push = true;
if(debug) {
debugString += 'PPS ';
}
if (!track.pps) {
track.pps = [unit.data];
}
break;
case 9:
push = true;
if(debug) {
debugString += 'AUD ';
}
break;
default:
push = false;
debugString += 'unknown NAL ' + unit.type + ' ';
break;
}
if(push) {
units2.push(unit);
length+=unit.data.byteLength;
}
});
if(debug || debugString.length) {
logger.log(debugString);
}
//build sample from PES
// Annex B to MP4 conversion to be done
if (units2.length) {
// only push AVC sample if keyframe already found. browsers expect a keyframe at first to start decoding
if (key === true || track.sps ) {
avcSample = {units: { units : units2, length : length}, pts: pes.pts, dts: pes.dts, key: key};
samples.push(avcSample);
track.len += length;
track.nbNalu += units2.length;
}
}
}
_parseAVCNALu(array) {
var i = 0, len = array.byteLength, value, overflow, state = 0;
var units = [], unit, unitType, lastUnitStart, lastUnitType;
//logger.log('PES:' + Hex.hexDump(array));
while (i < len) {
value = array[i++];
// finding 3 or 4-byte start codes (00 00 01 OR 00 00 00 01)
switch (state) {
case 0:
if (value === 0) {
state = 1;
}
break;
case 1:
if( value === 0) {
state = 2;
} else {
state = 0;
}
break;
case 2:
case 3:
if( value === 0) {
state = 3;
} else if (value === 1) {
unitType = array[i] & 0x1f;
//logger.log('find NALU @ offset:' + i + ',type:' + unitType);
if (lastUnitStart) {
unit = {data: array.subarray(lastUnitStart, i - state - 1), type: lastUnitType};
//logger.log('pushing NALU, type/size:' + unit.type + '/' + unit.data.byteLength);
units.push(unit);
} else {
// If NAL units are not starting right at the beginning of the PES packet, push preceding data into previous NAL unit.
overflow = i - state - 1;
if (overflow) {
//logger.log('first NALU found with overflow:' + overflow);
if (this._avcTrack.samples.length) {
var lastavcSample = this._avcTrack.samples[this._avcTrack.samples.length - 1];
var lastUnit = lastavcSample.units.units[lastavcSample.units.units.length - 1];
var tmp = new Uint8Array(lastUnit.data.byteLength + overflow);
tmp.set(lastUnit.data, 0);
tmp.set(array.subarray(0, overflow), lastUnit.data.byteLength);
lastUnit.data = tmp;
lastavcSample.units.length += overflow;
this._avcTrack.len += overflow;
}
}
}
lastUnitStart = i;
lastUnitType = unitType;
if (unitType === 1 || unitType === 5) {
// OPTI !!! if IDR/NDR unit, consider it is last NALu
i = len;
}
state = 0;
} else {
state = 0;
}
break;
default:
break;
}
}
if (lastUnitStart) {
unit = {data: array.subarray(lastUnitStart, len), type: lastUnitType};
units.push(unit);
//logger.log('pushing NALU, type/size:' + unit.type + '/' + unit.data.byteLength);
}
return units;
}
_parseAACPES(pes) {
var track = this._aacTrack, aacSample, data = pes.data, config, adtsFrameSize, adtsStartOffset, adtsHeaderLen, stamp, nbSamples, len;
if (this.aacOverFlow) {
var tmp = new Uint8Array(this.aacOverFlow.byteLength + data.byteLength);
tmp.set(this.aacOverFlow, 0);
tmp.set(data, this.aacOverFlow.byteLength);
data = tmp;
}
// look for ADTS header (0xFFFx)
for (adtsStartOffset = 0, len = data.length; adtsStartOffset < len - 1; adtsStartOffset++) {
if ((data[adtsStartOffset] === 0xff) && (data[adtsStartOffset+1] & 0xf0) === 0xf0) {
break;
}
}
// if ADTS header does not start straight from the beginning of the PES payload, raise an error
if (adtsStartOffset) {
var reason, fatal;
if (adtsStartOffset < len - 1) {
reason = `AAC PES did not start with ADTS header,offset:${adtsStartOffset}`;
fatal = false;
} else {
reason = 'no ADTS header found in AAC PES';
fatal = true;
}
this.observer.trigger(Event.ERROR, {type: ErrorTypes.MEDIA_ERROR, details: ErrorDetails.FRAG_PARSING_ERROR, fatal: fatal, reason: reason});
if (fatal) {
return;
}
}
if (!track.audiosamplerate) {
config = this._ADTStoAudioConfig(data, adtsStartOffset, this.audioCodec);
track.config = config.config;
track.audiosamplerate = config.samplerate;
track.channelCount = config.channelCount;
track.codec = config.codec;
track.timescale = this.remuxer.timescale;
track.duration = this.remuxer.timescale * this._duration;
logger.log(`parsed codec:${track.codec},rate:${config.samplerate},nb channel:${config.channelCount}`);
}
nbSamples = 0;
while ((adtsStartOffset + 5) < len) {
// retrieve frame size
adtsFrameSize = ((data[adtsStartOffset + 3] & 0x03) << 11);
// byte 4
adtsFrameSize |= (data[adtsStartOffset + 4] << 3);
// byte 5
adtsFrameSize |= ((data[adtsStartOffset + 5] & 0xE0) >>> 5);
adtsHeaderLen = (!!(data[adtsStartOffset + 1] & 0x01) ? 7 : 9);
adtsFrameSize -= adtsHeaderLen;
stamp = Math.round(pes.pts + nbSamples * 1024 * this.PES_TIMESCALE / track.audiosamplerate);
//stamp = pes.pts;
//console.log('AAC frame, offset/length/pts:' + (adtsStartOffset+7) + '/' + adtsFrameSize + '/' + stamp.toFixed(0));
if ((adtsFrameSize > 0) && ((adtsStartOffset + adtsHeaderLen + adtsFrameSize) <= len)) {
aacSample = {unit: data.subarray(adtsStartOffset + adtsHeaderLen, adtsStartOffset + adtsHeaderLen + adtsFrameSize), pts: stamp, dts: stamp};
this._aacTrack.samples.push(aacSample);
this._aacTrack.len += adtsFrameSize;
adtsStartOffset += adtsFrameSize + adtsHeaderLen;
nbSamples++;
// look for ADTS header (0xFFFx)
for ( ; adtsStartOffset < (len - 1); adtsStartOffset++) {
if ((data[adtsStartOffset] === 0xff) && ((data[adtsStartOffset + 1] & 0xf0) === 0xf0)) {
break;
}
}
} else {
break;
}
}
if (adtsStartOffset < len) {
this.aacOverFlow = data.subarray(adtsStartOffset, len);
} else {
this.aacOverFlow = null;
}
}
_ADTStoAudioConfig(data, offset, audioCodec) {
var adtsObjectType, // :int
adtsSampleingIndex, // :int
adtsExtensionSampleingIndex, // :int
adtsChanelConfig, // :int
config,
userAgent = navigator.userAgent.toLowerCase(),
adtsSampleingRates = [
96000, 88200,
64000, 48000,
44100, 32000,
24000, 22050,
16000, 12000,
11025, 8000,
7350];
// byte 2
adtsObjectType = ((data[offset + 2] & 0xC0) >>> 6) + 1;
adtsSampleingIndex = ((data[offset + 2] & 0x3C) >>> 2);
if(adtsSampleingIndex > adtsSampleingRates.length-1) {
this.observer.trigger(Event.ERROR, {type: ErrorTypes.MEDIA_ERROR, details: ErrorDetails.FRAG_PARSING_ERROR, fatal: true, reason: `invalid ADTS sampling index:${adtsSampleingIndex}`});
return;
}
adtsChanelConfig = ((data[offset + 2] & 0x01) << 2);
// byte 3
adtsChanelConfig |= ((data[offset + 3] & 0xC0) >>> 6);
logger.log(`manifest codec:${audioCodec},ADTS data:type:${adtsObjectType},sampleingIndex:${adtsSampleingIndex}[${adtsSampleingRates[adtsSampleingIndex]}Hz],channelConfig:${adtsChanelConfig}`);
// firefox: freq less than 24kHz = AAC SBR (HE-AAC)
if (userAgent.indexOf('firefox') !== -1) {
if (adtsSampleingIndex >= 6) {
adtsObjectType = 5;
config = new Array(4);
// HE-AAC uses SBR (Spectral Band Replication) , high frequencies are constructed from low frequencies
// there is a factor 2 between frame sample rate and output sample rate
// multiply frequency by 2 (see table below, equivalent to substract 3)
adtsExtensionSampleingIndex = adtsSampleingIndex - 3;
} else {
adtsObjectType = 2;
config = new Array(2);
adtsExtensionSampleingIndex = adtsSampleingIndex;
}
// Android : always use AAC
} else if (userAgent.indexOf('android') !== -1) {
adtsObjectType = 2;
config = new Array(2);
adtsExtensionSampleingIndex = adtsSampleingIndex;
} else {
/* for other browsers (chrome ...)
always force audio type to be HE-AAC SBR, as some browsers do not support audio codec switch properly (like Chrome ...)
*/
adtsObjectType = 5;
config = new Array(4);
// if (manifest codec is HE-AAC or HE-AACv2) OR (manifest codec not specified AND frequency less than 24kHz)
if ((audioCodec && ((audioCodec.indexOf('mp4a.40.29') !== -1) ||
(audioCodec.indexOf('mp4a.40.5') !== -1))) ||
(!audioCodec && adtsSampleingIndex >= 6)) {
// HE-AAC uses SBR (Spectral Band Replication) , high frequencies are constructed from low frequencies
// there is a factor 2 between frame sample rate and output sample rate
// multiply frequency by 2 (see table below, equivalent to substract 3)
adtsExtensionSampleingIndex = adtsSampleingIndex - 3;
} else {
// if (manifest codec is AAC) AND (frequency less than 24kHz OR nb channel is 1) OR (manifest codec not specified and mono audio)
// Chrome fails to play back with AAC LC mono when initialized with HE-AAC. This is not a problem with stereo.
if (audioCodec && audioCodec.indexOf('mp4a.40.2') !== -1 && (adtsSampleingIndex >= 6 || adtsChanelConfig === 1) ||
(!audioCodec && adtsChanelConfig === 1)) {
adtsObjectType = 2;
config = new Array(2);
}
adtsExtensionSampleingIndex = adtsSampleingIndex;
}
}
/* refer to http://wiki.multimedia.cx/index.php?title=MPEG-4_Audio#Audio_Specific_Config
ISO 14496-3 (AAC).pdf - Table 1.13 Syntax of AudioSpecificConfig()
Audio Profile / Audio Object Type
0: Null
1: AAC Main
2: AAC LC (Low Complexity)
3: AAC SSR (Scalable Sample Rate)
4: AAC LTP (Long Term Prediction)
5: SBR (Spectral Band Replication)
6: AAC Scalable
sampling freq
0: 96000 Hz
1: 88200 Hz
2: 64000 Hz
3: 48000 Hz
4: 44100 Hz
5: 32000 Hz
6: 24000 Hz
7: 22050 Hz
8: 16000 Hz
9: 12000 Hz
10: 11025 Hz
11: 8000 Hz
12: 7350 Hz
13: Reserved
14: Reserved
15: frequency is written explictly
Channel Configurations
These are the channel configurations:
0: Defined in AOT Specifc Config
1: 1 channel: front-center
2: 2 channels: front-left, front-right
*/
// audioObjectType = profile => profile, the MPEG-4 Audio Object Type minus 1
config[0] = adtsObjectType << 3;
// samplingFrequencyIndex
config[0] |= (adtsSampleingIndex & 0x0E) >> 1;
config[1] |= (adtsSampleingIndex & 0x01) << 7;
// channelConfiguration
config[1] |= adtsChanelConfig << 3;
if (adtsObjectType === 5) {
// adtsExtensionSampleingIndex
config[1] |= (adtsExtensionSampleingIndex & 0x0E) >> 1;
config[2] = (adtsExtensionSampleingIndex & 0x01) << 7;
// adtsObjectType (force to 2, chrome is checking that object type is less than 5 ???
// https://chromium.googlesource.com/chromium/src.git/+/master/media/formats/mp4/aac.cc
config[2] |= 2 << 2;
config[3] = 0;
}
return {config: config, samplerate: adtsSampleingRates[adtsSampleingIndex], channelCount: adtsChanelConfig, codec: ('mp4a.40.' + adtsObjectType)};
}
_parseID3PES(pes) {
this._id3Track.samples.push(pes);
}
}
export default TSDemuxer;

View file

@ -0,0 +1,41 @@
export const ErrorTypes = {
// Identifier for a network error (loading error / timeout ...)
NETWORK_ERROR: 'hlsNetworkError',
// Identifier for a media Error (video/parsing/mediasource error)
MEDIA_ERROR: 'hlsMediaError',
// Identifier for all other errors
OTHER_ERROR: 'hlsOtherError'
};
export const ErrorDetails = {
// Identifier for a manifest load error - data: { url : faulty URL, response : XHR response}
MANIFEST_LOAD_ERROR: 'manifestLoadError',
// Identifier for a manifest load timeout - data: { url : faulty URL, response : XHR response}
MANIFEST_LOAD_TIMEOUT: 'manifestLoadTimeOut',
// Identifier for a manifest parsing error - data: { url : faulty URL, reason : error reason}
MANIFEST_PARSING_ERROR: 'manifestParsingError',
// Identifier for playlist load error - data: { url : faulty URL, response : XHR response}
LEVEL_LOAD_ERROR: 'levelLoadError',
// Identifier for playlist load timeout - data: { url : faulty URL, response : XHR response}
LEVEL_LOAD_TIMEOUT: 'levelLoadTimeOut',
// Identifier for a level switch error - data: { level : faulty level Id, event : error description}
LEVEL_SWITCH_ERROR: 'levelSwitchError',
// Identifier for fragment load error - data: { frag : fragment object, response : XHR response}
FRAG_LOAD_ERROR: 'fragLoadError',
// Identifier for fragment loop loading error - data: { frag : fragment object}
FRAG_LOOP_LOADING_ERROR: 'fragLoopLoadingError',
// Identifier for fragment load timeout error - data: { frag : fragment object}
FRAG_LOAD_TIMEOUT: 'fragLoadTimeOut',
// Identifier for a fragment decryption error event - data: parsing error description
FRAG_DECRYPT_ERROR: 'fragDecryptError',
// Identifier for a fragment parsing error event - data: parsing error description
FRAG_PARSING_ERROR: 'fragParsingError',
// Identifier for decrypt key load error - data: { frag : fragment object, response : XHR response}
KEY_LOAD_ERROR: 'keyLoadError',
// Identifier for decrypt key load timeout error - data: { frag : fragment object}
KEY_LOAD_TIMEOUT: 'keyLoadTimeOut',
// Identifier for a buffer append error - data: append error description
BUFFER_APPEND_ERROR: 'bufferAppendError',
// Identifier for a buffer appending error event - data: appending error description
BUFFER_APPENDING_ERROR: 'bufferAppendingError'
};

View file

@ -0,0 +1,56 @@
export default {
// fired before MediaSource is attaching to media element - data: { media }
MEDIA_ATTACHING: 'hlsMediaAttaching',
// fired when MediaSource has been succesfully attached to media element - data: { }
MEDIA_ATTACHED: 'hlsMediaAttached',
// fired before detaching MediaSource from media element - data: { }
MEDIA_DETACHING: 'hlsMediaDetaching',
// fired when MediaSource has been detached from media element - data: { }
MEDIA_DETACHED: 'hlsMediaDetached',
// fired to signal that a manifest loading starts - data: { url : manifestURL}
MANIFEST_LOADING: 'hlsManifestLoading',
// fired after manifest has been loaded - data: { levels : [available quality levels] , url : manifestURL, stats : { trequest, tfirst, tload, mtime}}
MANIFEST_LOADED: 'hlsManifestLoaded',
// fired after manifest has been parsed - data: { levels : [available quality levels] , firstLevel : index of first quality level appearing in Manifest}
MANIFEST_PARSED: 'hlsManifestParsed',
// fired when a level playlist loading starts - data: { url : level URL level : id of level being loaded}
LEVEL_LOADING: 'hlsLevelLoading',
// fired when a level playlist loading finishes - data: { details : levelDetails object, level : id of loaded level, stats : { trequest, tfirst, tload, mtime} }
LEVEL_LOADED: 'hlsLevelLoaded',
// fired when a level's details have been updated based on previous details, after it has been loaded. - data: { details : levelDetails object, level : id of updated level }
LEVEL_UPDATED: 'hlsLevelUpdated',
// fired when a level's PTS information has been updated after parsing a fragment - data: { details : levelDetails object, level : id of updated level, drift: PTS drift observed when parsing last fragment }
LEVEL_PTS_UPDATED: 'hlsPTSUpdated',
// fired when a level switch is requested - data: { level : id of new level }
LEVEL_SWITCH: 'hlsLevelSwitch',
// fired when a fragment loading starts - data: { frag : fragment object}
FRAG_LOADING: 'hlsFragLoading',
// fired when a fragment loading is progressing - data: { frag : fragment object, { trequest, tfirst, loaded}}
FRAG_LOAD_PROGRESS: 'hlsFragLoadProgress',
// Identifier for fragment load aborting for emergency switch down - data: {frag : fragment object}
FRAG_LOAD_EMERGENCY_ABORTED: 'hlsFragLoadEmergencyAborted',
// fired when a fragment loading is completed - data: { frag : fragment object, payload : fragment payload, stats : { trequest, tfirst, tload, length}}
FRAG_LOADED: 'hlsFragLoaded',
// fired when Init Segment has been extracted from fragment - data: { moov : moov MP4 box, codecs : codecs found while parsing fragment}
FRAG_PARSING_INIT_SEGMENT: 'hlsFragParsingInitSegment',
// fired when parsing id3 is completed - data: { samples : [ id3 samples pes ] }
FRAG_PARSING_METADATA: 'hlsFraParsingMetadata',
// fired when moof/mdat have been extracted from fragment - data: { moof : moof MP4 box, mdat : mdat MP4 box}
FRAG_PARSING_DATA: 'hlsFragParsingData',
// fired when fragment parsing is completed - data: undefined
FRAG_PARSED: 'hlsFragParsed',
// fired when fragment remuxed MP4 boxes have all been appended into SourceBuffer - data: { frag : fragment object, stats : { trequest, tfirst, tload, tparsed, tbuffered, length} }
FRAG_BUFFERED: 'hlsFragBuffered',
// fired when fragment matching with current media position is changing - data : { frag : fragment object }
FRAG_CHANGED: 'hlsFragChanged',
// Identifier for a FPS drop event - data: {curentDropped, currentDecoded, totalDroppedFrames}
FPS_DROP: 'hlsFPSDrop',
// Identifier for an error event - data: { type : error type, details : error details, fatal : if true, hls.js cannot/will not try to recover, if false, hls.js will try to recover,other error specific data}
ERROR: 'hlsError',
// fired when hls.js instance starts destroying. Different from MEDIA_DETACHED as one could want to detach and reattach a media to the instance of hls.js to handle mid-rolls for example
DESTROYING: 'hlsDestroying',
// fired when a decrypt key loading starts - data: { frag : fragment object}
KEY_LOADING: 'hlsKeyLoading',
// fired when a decrypt key loading is completed - data: { frag : fragment object, payload : key payload, stats : { trequest, tfirst, tload, length}}
KEY_LOADED: 'hlsKeyLoaded',
};

View file

@ -0,0 +1,121 @@
/**
* Level Helper class, providing methods dealing with playlist sliding and drift
*/
import {logger} from '../utils/logger';
class LevelHelper {
static mergeDetails(oldDetails,newDetails) {
var start = Math.max(oldDetails.startSN,newDetails.startSN)-newDetails.startSN,
end = Math.min(oldDetails.endSN,newDetails.endSN)-newDetails.startSN,
delta = newDetails.startSN - oldDetails.startSN,
oldfragments = oldDetails.fragments,
newfragments = newDetails.fragments,
ccOffset =0,
PTSFrag;
// check if old/new playlists have fragments in common
if ( end < start) {
newDetails.PTSKnown = false;
return;
}
// loop through overlapping SN and update startPTS , cc, and duration if any found
for(var i = start ; i <= end ; i++) {
var oldFrag = oldfragments[delta+i],
newFrag = newfragments[i];
ccOffset = oldFrag.cc - newFrag.cc;
if (!isNaN(oldFrag.startPTS)) {
newFrag.start = newFrag.startPTS = oldFrag.startPTS;
newFrag.endPTS = oldFrag.endPTS;
newFrag.duration = oldFrag.duration;
PTSFrag = newFrag;
}
}
if(ccOffset) {
logger.log(`discontinuity sliding from playlist, take drift into account`);
for(i = 0 ; i < newfragments.length ; i++) {
newfragments[i].cc += ccOffset;
}
}
// if at least one fragment contains PTS info, recompute PTS information for all fragments
if(PTSFrag) {
LevelHelper.updateFragPTS(newDetails,PTSFrag.sn,PTSFrag.startPTS,PTSFrag.endPTS);
} else {
// adjust start by sliding offset
var sliding = oldfragments[delta].start;
for(i = 0 ; i < newfragments.length ; i++) {
newfragments[i].start += sliding;
}
}
// if we are here, it means we have fragments overlapping between
// old and new level. reliable PTS info is thus relying on old level
newDetails.PTSKnown = oldDetails.PTSKnown;
return;
}
static updateFragPTS(details,sn,startPTS,endPTS) {
var fragIdx, fragments, frag, i;
// exit if sn out of range
if (sn < details.startSN || sn > details.endSN) {
return 0;
}
fragIdx = sn - details.startSN;
fragments = details.fragments;
frag = fragments[fragIdx];
if(!isNaN(frag.startPTS)) {
startPTS = Math.max(startPTS,frag.startPTS);
endPTS = Math.min(endPTS, frag.endPTS);
}
var drift = startPTS - frag.start;
frag.start = frag.startPTS = startPTS;
frag.endPTS = endPTS;
frag.duration = endPTS - startPTS;
// adjust fragment PTS/duration from seqnum-1 to frag 0
for(i = fragIdx ; i > 0 ; i--) {
LevelHelper.updatePTS(fragments,i,i-1);
}
// adjust fragment PTS/duration from seqnum to last frag
for(i = fragIdx ; i < fragments.length - 1 ; i++) {
LevelHelper.updatePTS(fragments,i,i+1);
}
details.PTSKnown = true;
//logger.log(` frag start/end:${startPTS.toFixed(3)}/${endPTS.toFixed(3)}`);
return drift;
}
static updatePTS(fragments,fromIdx, toIdx) {
var fragFrom = fragments[fromIdx],fragTo = fragments[toIdx], fragToPTS = fragTo.startPTS;
// if we know startPTS[toIdx]
if(!isNaN(fragToPTS)) {
// update fragment duration.
// it helps to fix drifts between playlist reported duration and fragment real duration
if (toIdx > fromIdx) {
fragFrom.duration = fragToPTS-fragFrom.start;
if(fragFrom.duration < 0) {
logger.error(`negative duration computed for ${fragFrom}, there should be some duration drift between playlist and fragment!`);
}
} else {
fragTo.duration = fragFrom.start - fragToPTS;
if(fragTo.duration < 0) {
logger.error(`negative duration computed for ${fragTo}, there should be some duration drift between playlist and fragment!`);
}
}
} else {
// we dont know startPTS[toIdx]
if (toIdx > fromIdx) {
fragTo.start = fragFrom.start + fragFrom.duration;
} else {
fragTo.start = fragFrom.start - fragTo.duration;
}
}
}
}
export default LevelHelper;

View file

@ -0,0 +1,248 @@
/**
* HLS interface
*/
'use strict';
import Event from './events';
import {ErrorTypes, ErrorDetails} from './errors';
import PlaylistLoader from './loader/playlist-loader';
import FragmentLoader from './loader/fragment-loader';
import AbrController from './controller/abr-controller';
import MSEMediaController from './controller/mse-media-controller';
import LevelController from './controller/level-controller';
//import FPSController from './controller/fps-controller';
import {logger, enableLogs} from './utils/logger';
import XhrLoader from './utils/xhr-loader';
import EventEmitter from 'events';
import KeyLoader from './loader/key-loader';
class Hls {
static isSupported() {
return (window.MediaSource && window.MediaSource.isTypeSupported('video/mp4; codecs="avc1.42E01E,mp4a.40.2"'));
}
static get Events() {
return Event;
}
static get ErrorTypes() {
return ErrorTypes;
}
static get ErrorDetails() {
return ErrorDetails;
}
constructor(config = {}) {
var configDefault = {
autoStartLoad: true,
debug: false,
maxBufferLength: 30,
maxBufferSize: 60 * 1000 * 1000,
liveSyncDurationCount:3,
liveMaxLatencyDurationCount: Infinity,
maxMaxBufferLength: 600,
enableWorker: true,
enableSoftwareAES: true,
fragLoadingTimeOut: 20000,
fragLoadingMaxRetry: 1,
fragLoadingRetryDelay: 1000,
fragLoadingLoopThreshold: 3,
manifestLoadingTimeOut: 10000,
manifestLoadingMaxRetry: 1,
manifestLoadingRetryDelay: 1000,
// fpsDroppedMonitoringPeriod: 5000,
// fpsDroppedMonitoringThreshold: 0.2,
appendErrorMaxRetry: 200,
loader: XhrLoader,
fLoader: undefined,
pLoader: undefined,
abrController : AbrController,
mediaController: MSEMediaController
};
for (var prop in configDefault) {
if (prop in config) { continue; }
config[prop] = configDefault[prop];
}
if (config.liveMaxLatencyDurationCount !== undefined && config.liveMaxLatencyDurationCount <= config.liveSyncDurationCount) {
throw new Error('Illegal hls.js config: "liveMaxLatencyDurationCount" must be gt "liveSyncDurationCount"');
}
enableLogs(config.debug);
this.config = config;
// observer setup
var observer = this.observer = new EventEmitter();
observer.trigger = function trigger (event, ...data) {
observer.emit(event, event, ...data);
};
observer.off = function off (event, ...data) {
observer.removeListener(event, ...data);
};
this.on = observer.on.bind(observer);
this.off = observer.off.bind(observer);
this.trigger = observer.trigger.bind(observer);
this.playlistLoader = new PlaylistLoader(this);
this.fragmentLoader = new FragmentLoader(this);
this.levelController = new LevelController(this);
this.abrController = new config.abrController(this);
this.mediaController = new config.mediaController(this);
this.keyLoader = new KeyLoader(this);
//this.fpsController = new FPSController(this);
}
destroy() {
logger.log('destroy');
this.trigger(Event.DESTROYING);
this.detachMedia();
this.playlistLoader.destroy();
this.fragmentLoader.destroy();
this.levelController.destroy();
this.mediaController.destroy();
this.keyLoader.destroy();
//this.fpsController.destroy();
this.url = null;
this.observer.removeAllListeners();
}
attachMedia(media) {
logger.log('attachMedia');
this.media = media;
this.trigger(Event.MEDIA_ATTACHING, {media: media});
}
detachMedia() {
logger.log('detachMedia');
this.trigger(Event.MEDIA_DETACHING);
this.media = null;
}
loadSource(url) {
logger.log(`loadSource:${url}`);
this.url = url;
// when attaching to a source URL, trigger a playlist load
this.trigger(Event.MANIFEST_LOADING, {url: url});
}
startLoad() {
logger.log('startLoad');
this.mediaController.startLoad();
}
swapAudioCodec() {
logger.log('swapAudioCodec');
this.mediaController.swapAudioCodec();
}
recoverMediaError() {
logger.log('recoverMediaError');
var media = this.media;
this.detachMedia();
this.attachMedia(media);
}
/** Return all quality levels **/
get levels() {
return this.levelController.levels;
}
/** Return current playback quality level **/
get currentLevel() {
return this.mediaController.currentLevel;
}
/* set quality level immediately (-1 for automatic level selection) */
set currentLevel(newLevel) {
logger.log(`set currentLevel:${newLevel}`);
this.loadLevel = newLevel;
this.mediaController.immediateLevelSwitch();
}
/** Return next playback quality level (quality level of next fragment) **/
get nextLevel() {
return this.mediaController.nextLevel;
}
/* set quality level for next fragment (-1 for automatic level selection) */
set nextLevel(newLevel) {
logger.log(`set nextLevel:${newLevel}`);
this.levelController.manualLevel = newLevel;
this.mediaController.nextLevelSwitch();
}
/** Return the quality level of current/last loaded fragment **/
get loadLevel() {
return this.levelController.level;
}
/* set quality level for current/next loaded fragment (-1 for automatic level selection) */
set loadLevel(newLevel) {
logger.log(`set loadLevel:${newLevel}`);
this.levelController.manualLevel = newLevel;
}
/** Return the quality level of next loaded fragment **/
get nextLoadLevel() {
return this.levelController.nextLoadLevel();
}
/** set quality level of next loaded fragment **/
set nextLoadLevel(level) {
this.levelController.level = level;
}
/** Return first level (index of first level referenced in manifest)
**/
get firstLevel() {
return this.levelController.firstLevel;
}
/** set first level (index of first level referenced in manifest)
**/
set firstLevel(newLevel) {
logger.log(`set firstLevel:${newLevel}`);
this.levelController.firstLevel = newLevel;
}
/** Return start level (level of first fragment that will be played back)
if not overrided by user, first level appearing in manifest will be used as start level
if -1 : automatic start level selection, playback will start from level matching download bandwidth (determined from download of first segment)
**/
get startLevel() {
return this.levelController.startLevel;
}
/** set start level (level of first fragment that will be played back)
if not overrided by user, first level appearing in manifest will be used as start level
if -1 : automatic start level selection, playback will start from level matching download bandwidth (determined from download of first segment)
**/
set startLevel(newLevel) {
logger.log(`set startLevel:${newLevel}`);
this.levelController.startLevel = newLevel;
}
/** Return the capping/max level value that could be used by automatic level selection algorithm **/
get autoLevelCapping() {
return this.abrController.autoLevelCapping;
}
/** set the capping/max level value that could be used by automatic level selection algorithm **/
set autoLevelCapping(newLevel) {
logger.log(`set autoLevelCapping:${newLevel}`);
this.abrController.autoLevelCapping = newLevel;
}
/* check if we are in automatic level selection mode */
get autoLevelEnabled() {
return (this.levelController.manualLevel === -1);
}
/* return manual level */
get manualLevel() {
return this.levelController.manualLevel;
}
}
export default Hls;

View file

@ -0,0 +1,57 @@
/*
* Fragment Loader
*/
import Event from '../events';
import {ErrorTypes, ErrorDetails} from '../errors';
class FragmentLoader {
constructor(hls) {
this.hls = hls;
this.onfl = this.onFragLoading.bind(this);
hls.on(Event.FRAG_LOADING, this.onfl);
}
destroy() {
if (this.loader) {
this.loader.destroy();
this.loader = null;
}
this.hls.off(Event.FRAG_LOADING, this.onfl);
}
onFragLoading(event, data) {
var frag = data.frag;
this.frag = frag;
this.frag.loaded = 0;
var config = this.hls.config;
frag.loader = this.loader = typeof(config.fLoader) !== 'undefined' ? new config.fLoader(config) : new config.loader(config);
this.loader.load(frag.url, 'arraybuffer', this.loadsuccess.bind(this), this.loaderror.bind(this), this.loadtimeout.bind(this), config.fragLoadingTimeOut, config.fragLoadingMaxRetry, config.fragLoadingRetryDelay, this.loadprogress.bind(this), frag);
}
loadsuccess(event, stats) {
var payload = event.currentTarget.response;
stats.length = payload.byteLength;
// detach fragment loader on load success
this.frag.loader = undefined;
this.hls.trigger(Event.FRAG_LOADED, {payload: payload, frag: this.frag, stats: stats});
}
loaderror(event) {
this.loader.abort();
this.hls.trigger(Event.ERROR, {type: ErrorTypes.NETWORK_ERROR, details: ErrorDetails.FRAG_LOAD_ERROR, fatal: false, frag: this.frag, response: event});
}
loadtimeout() {
this.loader.abort();
this.hls.trigger(Event.ERROR, {type: ErrorTypes.NETWORK_ERROR, details: ErrorDetails.FRAG_LOAD_TIMEOUT, fatal: false, frag: this.frag});
}
loadprogress(event, stats) {
this.frag.loaded = stats.loaded;
this.hls.trigger(Event.FRAG_LOAD_PROGRESS, {frag: this.frag, stats: stats});
}
}
export default FragmentLoader;

View file

@ -0,0 +1,67 @@
/*
* Decrypt key Loader
*/
import Event from '../events';
import {ErrorTypes, ErrorDetails} from '../errors';
class KeyLoader {
constructor(hls) {
this.hls = hls;
this.decryptkey = null;
this.decrypturl = null;
this.ondkl = this.onDecryptKeyLoading.bind(this);
hls.on(Event.KEY_LOADING, this.ondkl);
}
destroy() {
if (this.loader) {
this.loader.destroy();
this.loader = null;
}
this.hls.off(Event.KEY_LOADING, this.ondkl);
}
onDecryptKeyLoading(event, data) {
var frag = this.frag = data.frag,
decryptdata = frag.decryptdata,
uri = decryptdata.uri;
// if uri is different from previous one or if decrypt key not retrieved yet
if (uri !== this.decrypturl || this.decryptkey === null) {
var config = this.hls.config;
frag.loader = this.loader = new config.loader(config);
this.decrypturl = uri;
this.decryptkey = null;
frag.loader.load(uri, 'arraybuffer', this.loadsuccess.bind(this), this.loaderror.bind(this), this.loadtimeout.bind(this), config.fragLoadingTimeOut, config.fragLoadingMaxRetry, config.fragLoadingRetryDelay, this.loadprogress.bind(this), frag);
} else if (this.decryptkey) {
// we already loaded this key, return it
decryptdata.key = this.decryptkey;
this.hls.trigger(Event.KEY_LOADED, {frag: frag});
}
}
loadsuccess(event) {
var frag = this.frag;
this.decryptkey = frag.decryptdata.key = new Uint8Array(event.currentTarget.response);
// detach fragment loader on load success
frag.loader = undefined;
this.hls.trigger(Event.KEY_LOADED, {frag: frag});
}
loaderror(event) {
this.loader.abort();
this.hls.trigger(Event.ERROR, {type: ErrorTypes.NETWORK_ERROR, details: ErrorDetails.KEY_LOAD_ERROR, fatal: false, frag: this.frag, response: event});
}
loadtimeout() {
this.loader.abort();
this.hls.trigger(Event.ERROR, {type: ErrorTypes.NETWORK_ERROR, details: ErrorDetails.KEY_LOAD_TIMEOUT, fatal: false, frag: this.frag});
}
loadprogress() {
}
}
export default KeyLoader;

View file

@ -0,0 +1,276 @@
/**
* Playlist Loader
*/
import Event from '../events';
import {ErrorTypes, ErrorDetails} from '../errors';
import URLHelper from '../utils/url';
//import {logger} from '../utils/logger';
class PlaylistLoader {
constructor(hls) {
this.hls = hls;
this.onml = this.onManifestLoading.bind(this);
this.onll = this.onLevelLoading.bind(this);
hls.on(Event.MANIFEST_LOADING, this.onml);
hls.on(Event.LEVEL_LOADING, this.onll);
}
destroy() {
if (this.loader) {
this.loader.destroy();
this.loader = null;
}
this.url = this.id = null;
this.hls.off(Event.MANIFEST_LOADING, this.onml);
this.hls.off(Event.LEVEL_LOADING, this.onll);
}
onManifestLoading(event, data) {
this.load(data.url, null);
}
onLevelLoading(event, data) {
this.load(data.url, data.level, data.id);
}
load(url, id1, id2) {
var config = this.hls.config;
this.url = url;
this.id = id1;
this.id2 = id2;
this.loader = typeof(config.pLoader) !== 'undefined' ? new config.pLoader(config) : new config.loader(config);
this.loader.load(url, '', this.loadsuccess.bind(this), this.loaderror.bind(this), this.loadtimeout.bind(this), config.manifestLoadingTimeOut, config.manifestLoadingMaxRetry, config.manifestLoadingRetryDelay);
}
resolve(url, baseUrl) {
return URLHelper.buildAbsoluteURL(baseUrl, url);
}
parseMasterPlaylist(string, baseurl) {
var levels = [], level = {}, result, codecs, codec;
// https://regex101.com is your friend
var re = /#EXT-X-STREAM-INF:([^\n\r]*(BAND)WIDTH=(\d+))?([^\n\r]*(CODECS)=\"([^\"\n\r]*)\",?)?([^\n\r]*(RES)OLUTION=(\d+)x(\d+))?([^\n\r]*(NAME)=\"(.*)\")?[^\n\r]*[\r\n]+([^\r\n]+)/g;
while ((result = re.exec(string)) != null){
result.shift();
result = result.filter(function(n) { return (n !== undefined); });
level.url = this.resolve(result.pop(), baseurl);
while (result.length > 0) {
switch (result.shift()) {
case 'RES':
level.width = parseInt(result.shift());
level.height = parseInt(result.shift());
break;
case 'BAND':
level.bitrate = parseInt(result.shift());
break;
case 'NAME':
level.name = result.shift();
break;
case 'CODECS':
codecs = result.shift().split(',');
while (codecs.length > 0) {
codec = codecs.shift();
if (codec.indexOf('avc1') !== -1) {
level.videoCodec = this.avc1toavcoti(codec);
} else {
level.audioCodec = codec;
}
}
break;
default:
break;
}
}
levels.push(level);
level = {};
}
return levels;
}
avc1toavcoti(codec) {
var result, avcdata = codec.split('.');
if (avcdata.length > 2) {
result = avcdata.shift() + '.';
result += parseInt(avcdata.shift()).toString(16);
result += ('00' + parseInt(avcdata.shift()).toString(16)).substr(-4);
} else {
result = codec;
}
return result;
}
parseKeyParamsByRegex(string, regexp) {
var result = regexp.exec(string);
if (result) {
result.shift();
result = result.filter(function(n) { return (n !== undefined); });
if (result.length === 2) {
return result[1];
}
}
return null;
}
cloneObj(obj) {
return JSON.parse(JSON.stringify(obj));
}
parseLevelPlaylist(string, baseurl, id) {
var currentSN = 0, totalduration = 0, level = {url: baseurl, fragments: [], live: true, startSN: 0}, result, regexp, cc = 0, frag, byteRangeEndOffset, byteRangeStartOffset;
var levelkey = {method : null, key : null, iv : null, uri : null};
regexp = /(?:#EXT-X-(MEDIA-SEQUENCE):(\d+))|(?:#EXT-X-(TARGETDURATION):(\d+))|(?:#EXT-X-(KEY):(.*))|(?:#EXT(INF):([\d\.]+)[^\r\n]*([\r\n]+[^#|\r\n]+)?)|(?:#EXT-X-(BYTERANGE):([\d]+[@[\d]*)]*[\r\n]+([^#|\r\n]+)?|(?:#EXT-X-(ENDLIST))|(?:#EXT-X-(DIS)CONTINUITY))/g;
while ((result = regexp.exec(string)) !== null) {
result.shift();
result = result.filter(function(n) { return (n !== undefined); });
switch (result[0]) {
case 'MEDIA-SEQUENCE':
currentSN = level.startSN = parseInt(result[1]);
break;
case 'TARGETDURATION':
level.targetduration = parseFloat(result[1]);
break;
case 'ENDLIST':
level.live = false;
break;
case 'DIS':
cc++;
break;
case 'BYTERANGE':
var params = result[1].split('@');
if (params.length === 1) {
byteRangeStartOffset = byteRangeEndOffset;
} else {
byteRangeStartOffset = parseInt(params[1]);
}
byteRangeEndOffset = parseInt(params[0]) + byteRangeStartOffset;
frag = level.fragments.length ? level.fragments[level.fragments.length - 1] : null;
if (frag && !frag.url) {
frag.byteRangeStartOffset = byteRangeStartOffset;
frag.byteRangeEndOffset = byteRangeEndOffset;
frag.url = this.resolve(result[2], baseurl);
}
break;
case 'INF':
var duration = parseFloat(result[1]);
if (!isNaN(duration)) {
var fragdecryptdata,
sn = currentSN++;
if (levelkey.method && levelkey.uri && !levelkey.iv) {
fragdecryptdata = this.cloneObj(levelkey);
var uint8View = new Uint8Array(16);
for (var i = 12; i < 16; i++) {
uint8View[i] = (sn >> 8*(15-i)) & 0xff;
}
fragdecryptdata.iv = uint8View;
} else {
fragdecryptdata = levelkey;
}
level.fragments.push({url: result[2] ? this.resolve(result[2], baseurl) : null, duration: duration, start: totalduration, sn: sn, level: id, cc: cc, byteRangeStartOffset: byteRangeStartOffset, byteRangeEndOffset: byteRangeEndOffset, decryptdata : fragdecryptdata});
totalduration += duration;
byteRangeStartOffset = null;
}
break;
case 'KEY':
// https://tools.ietf.org/html/draft-pantos-http-live-streaming-08#section-3.4.4
var decryptparams = result[1];
var decryptmethod = this.parseKeyParamsByRegex(decryptparams, /(METHOD)=([^,]*)/),
decrypturi = this.parseKeyParamsByRegex(decryptparams, /(URI)=["]([^,]*)["]/),
decryptiv = this.parseKeyParamsByRegex(decryptparams, /(IV)=([^,]*)/);
if (decryptmethod) {
levelkey = { method: null, key: null, iv: null, uri: null };
if ((decrypturi) && (decryptmethod === 'AES-128')) {
levelkey.method = decryptmethod;
// URI to get the key
levelkey.uri = this.resolve(decrypturi, baseurl);
levelkey.key = null;
// Initialization Vector (IV)
if (decryptiv) {
levelkey.iv = decryptiv;
if (levelkey.iv.substring(0, 2) === '0x') {
levelkey.iv = levelkey.iv.substring(2);
}
levelkey.iv = levelkey.iv.match(/.{8}/g);
levelkey.iv[0] = parseInt(levelkey.iv[0], 16);
levelkey.iv[1] = parseInt(levelkey.iv[1], 16);
levelkey.iv[2] = parseInt(levelkey.iv[2], 16);
levelkey.iv[3] = parseInt(levelkey.iv[3], 16);
levelkey.iv = new Uint32Array(levelkey.iv);
}
}
}
break;
default:
break;
}
}
//logger.log('found ' + level.fragments.length + ' fragments');
level.totalduration = totalduration;
level.endSN = currentSN - 1;
return level;
}
loadsuccess(event, stats) {
var string = event.currentTarget.responseText, url = event.currentTarget.responseURL, id = this.id, id2 = this.id2, hls = this.hls, levels;
// responseURL not supported on some browsers (it is used to detect URL redirection)
if (url === undefined) {
// fallback to initial URL
url = this.url;
}
stats.tload = performance.now();
stats.mtime = new Date(event.currentTarget.getResponseHeader('Last-Modified'));
if (string.indexOf('#EXTM3U') === 0) {
if (string.indexOf('#EXTINF:') > 0) {
// 1 level playlist
// if first request, fire manifest loaded event, level will be reloaded afterwards
// (this is to have a uniform logic for 1 level/multilevel playlists)
if (this.id === null) {
hls.trigger(Event.MANIFEST_LOADED, {levels: [{url: url}], url: url, stats: stats});
} else {
var levelDetails = this.parseLevelPlaylist(string, url, id);
stats.tparsed = performance.now();
hls.trigger(Event.LEVEL_LOADED, {details: levelDetails, level: id, id: id2, stats: stats});
}
} else {
levels = this.parseMasterPlaylist(string, url);
// multi level playlist, parse level info
if (levels.length) {
hls.trigger(Event.MANIFEST_LOADED, {levels: levels, url: url, stats: stats});
} else {
hls.trigger(Event.ERROR, {type: ErrorTypes.NETWORK_ERROR, details: ErrorDetails.MANIFEST_PARSING_ERROR, fatal: true, url: url, reason: 'no level found in manifest'});
}
}
} else {
hls.trigger(Event.ERROR, {type: ErrorTypes.NETWORK_ERROR, details: ErrorDetails.MANIFEST_PARSING_ERROR, fatal: true, url: url, reason: 'no EXTM3U delimiter'});
}
}
loaderror(event) {
var details, fatal;
if (this.id === null) {
details = ErrorDetails.MANIFEST_LOAD_ERROR;
fatal = true;
} else {
details = ErrorDetails.LEVEL_LOAD_ERROR;
fatal = false;
}
this.loader.abort();
this.hls.trigger(Event.ERROR, {type: ErrorTypes.NETWORK_ERROR, details: details, fatal: fatal, url: this.url, loader: this.loader, response: event.currentTarget, level: this.id, id: this.id2});
}
loadtimeout() {
var details, fatal;
if (this.id === null) {
details = ErrorDetails.MANIFEST_LOAD_TIMEOUT;
fatal = true;
} else {
details = ErrorDetails.LEVEL_LOAD_TIMEOUT;
fatal = false;
}
this.loader.abort();
this.hls.trigger(Event.ERROR, {type: ErrorTypes.NETWORK_ERROR, details: details, fatal: fatal, url: this.url, loader: this.loader, level: this.id, id: this.id2});
}
}
export default PlaylistLoader;

View file

@ -0,0 +1,65 @@
/**
* dummy remuxer
*/
class DummyRemuxer {
constructor(observer) {
this.PES_TIMESCALE = 90000;
this.observer = observer;
}
get timescale() {
return this.PES_TIMESCALE;
}
destroy() {
}
insertDiscontinuity() {
}
remux(audioTrack,videoTrack,id3Track,timeOffset) {
this._remuxAACSamples(audioTrack,timeOffset);
this._remuxAVCSamples(videoTrack,timeOffset);
this._remuxID3Samples(id3Track,timeOffset);
}
_remuxAVCSamples(track, timeOffset) {
var avcSample, unit;
// loop through track.samples
while (track.samples.length) {
avcSample = track.samples.shift();
// loop through AVC sample NALUs
while (avcSample.units.units.length) {
unit = avcSample.units.units.shift();
}
}
//please lint
timeOffset = timeOffset;
}
_remuxAACSamples(track,timeOffset) {
var aacSample,unit;
// loop through track.samples
while (track.samples.length) {
aacSample = track.samples.shift();
unit = aacSample.unit;
}
//please lint
timeOffset = timeOffset;
}
_remuxID3Samples(track,timeOffset) {
var id3Sample,unit;
// loop through track.samples
while (track.samples.length) {
id3Sample = track.samples.shift();
unit = id3Sample.unit;
}
//please lint
timeOffset = timeOffset;
}
}
export default DummyRemuxer;

View file

@ -0,0 +1,578 @@
/**
* Generate MP4 Box
*/
//import Hex from '../utils/hex';
class MP4 {
static init() {
MP4.types = {
avc1: [], // codingname
avcC: [],
btrt: [],
dinf: [],
dref: [],
esds: [],
ftyp: [],
hdlr: [],
mdat: [],
mdhd: [],
mdia: [],
mfhd: [],
minf: [],
moof: [],
moov: [],
mp4a: [],
mvex: [],
mvhd: [],
sdtp: [],
stbl: [],
stco: [],
stsc: [],
stsd: [],
stsz: [],
stts: [],
tfdt: [],
tfhd: [],
traf: [],
trak: [],
trun: [],
trex: [],
tkhd: [],
vmhd: [],
smhd: []
};
var i;
for (i in MP4.types) {
if (MP4.types.hasOwnProperty(i)) {
MP4.types[i] = [
i.charCodeAt(0),
i.charCodeAt(1),
i.charCodeAt(2),
i.charCodeAt(3)
];
}
}
MP4.MAJOR_BRAND = new Uint8Array([
'i'.charCodeAt(0),
's'.charCodeAt(0),
'o'.charCodeAt(0),
'm'.charCodeAt(0)
]);
MP4.AVC1_BRAND = new Uint8Array([
'a'.charCodeAt(0),
'v'.charCodeAt(0),
'c'.charCodeAt(0),
'1'.charCodeAt(0)
]);
MP4.MINOR_VERSION = new Uint8Array([0, 0, 0, 1]);
MP4.VIDEO_HDLR = new Uint8Array([
0x00, // version 0
0x00, 0x00, 0x00, // flags
0x00, 0x00, 0x00, 0x00, // pre_defined
0x76, 0x69, 0x64, 0x65, // handler_type: 'vide'
0x00, 0x00, 0x00, 0x00, // reserved
0x00, 0x00, 0x00, 0x00, // reserved
0x00, 0x00, 0x00, 0x00, // reserved
0x56, 0x69, 0x64, 0x65,
0x6f, 0x48, 0x61, 0x6e,
0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'VideoHandler'
]);
MP4.AUDIO_HDLR = new Uint8Array([
0x00, // version 0
0x00, 0x00, 0x00, // flags
0x00, 0x00, 0x00, 0x00, // pre_defined
0x73, 0x6f, 0x75, 0x6e, // handler_type: 'soun'
0x00, 0x00, 0x00, 0x00, // reserved
0x00, 0x00, 0x00, 0x00, // reserved
0x00, 0x00, 0x00, 0x00, // reserved
0x53, 0x6f, 0x75, 0x6e,
0x64, 0x48, 0x61, 0x6e,
0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'SoundHandler'
]);
MP4.HDLR_TYPES = {
'video': MP4.VIDEO_HDLR,
'audio': MP4.AUDIO_HDLR
};
MP4.DREF = new Uint8Array([
0x00, // version 0
0x00, 0x00, 0x00, // flags
0x00, 0x00, 0x00, 0x01, // entry_count
0x00, 0x00, 0x00, 0x0c, // entry_size
0x75, 0x72, 0x6c, 0x20, // 'url' type
0x00, // version 0
0x00, 0x00, 0x01 // entry_flags
]);
MP4.STCO = new Uint8Array([
0x00, // version
0x00, 0x00, 0x00, // flags
0x00, 0x00, 0x00, 0x00 // entry_count
]);
MP4.STSC = MP4.STCO;
MP4.STTS = MP4.STCO;
MP4.STSZ = new Uint8Array([
0x00, // version
0x00, 0x00, 0x00, // flags
0x00, 0x00, 0x00, 0x00, // sample_size
0x00, 0x00, 0x00, 0x00, // sample_count
]);
MP4.VMHD = new Uint8Array([
0x00, // version
0x00, 0x00, 0x01, // flags
0x00, 0x00, // graphicsmode
0x00, 0x00,
0x00, 0x00,
0x00, 0x00 // opcolor
]);
MP4.SMHD = new Uint8Array([
0x00, // version
0x00, 0x00, 0x00, // flags
0x00, 0x00, // balance
0x00, 0x00 // reserved
]);
MP4.STSD = new Uint8Array([
0x00, // version 0
0x00, 0x00, 0x00, // flags
0x00, 0x00, 0x00, 0x01]);// entry_count
MP4.FTYP = MP4.box(MP4.types.ftyp, MP4.MAJOR_BRAND, MP4.MINOR_VERSION, MP4.MAJOR_BRAND, MP4.AVC1_BRAND);
MP4.DINF = MP4.box(MP4.types.dinf, MP4.box(MP4.types.dref, MP4.DREF));
}
static box(type) {
var
payload = Array.prototype.slice.call(arguments, 1),
size = 0,
i = payload.length,
result,
view;
// calculate the total size we need to allocate
while (i--) {
size += payload[i].byteLength;
}
result = new Uint8Array(size + 8);
view = new DataView(result.buffer);
view.setUint32(0, result.byteLength);
result.set(type, 4);
// copy the payload into the result
for (i = 0, size = 8; i < payload.length; i++) {
result.set(payload[i], size);
size += payload[i].byteLength;
}
return result;
}
static hdlr(type) {
return MP4.box(MP4.types.hdlr, MP4.HDLR_TYPES[type]);
}
static mdat(data) {
return MP4.box(MP4.types.mdat, data);
}
static mdhd(timescale, duration) {
return MP4.box(MP4.types.mdhd, new Uint8Array([
0x00, // version 0
0x00, 0x00, 0x00, // flags
0x00, 0x00, 0x00, 0x02, // creation_time
0x00, 0x00, 0x00, 0x03, // modification_time
(timescale >> 24) & 0xFF,
(timescale >> 16) & 0xFF,
(timescale >> 8) & 0xFF,
timescale & 0xFF, // timescale
(duration >> 24),
(duration >> 16) & 0xFF,
(duration >> 8) & 0xFF,
duration & 0xFF, // duration
0x55, 0xc4, // 'und' language (undetermined)
0x00, 0x00
]));
}
static mdia(track) {
return MP4.box(MP4.types.mdia, MP4.mdhd(track.timescale, track.duration), MP4.hdlr(track.type), MP4.minf(track));
}
static mfhd(sequenceNumber) {
return MP4.box(MP4.types.mfhd, new Uint8Array([
0x00,
0x00, 0x00, 0x00, // flags
(sequenceNumber >> 24),
(sequenceNumber >> 16) & 0xFF,
(sequenceNumber >> 8) & 0xFF,
sequenceNumber & 0xFF, // sequence_number
]));
}
static minf(track) {
if (track.type === 'audio') {
return MP4.box(MP4.types.minf, MP4.box(MP4.types.smhd, MP4.SMHD), MP4.DINF, MP4.stbl(track));
} else {
return MP4.box(MP4.types.minf, MP4.box(MP4.types.vmhd, MP4.VMHD), MP4.DINF, MP4.stbl(track));
}
}
static moof(sn, baseMediaDecodeTime, track) {
return MP4.box(MP4.types.moof, MP4.mfhd(sn), MP4.traf(track,baseMediaDecodeTime));
}
/**
* @param tracks... (optional) {array} the tracks associated with this movie
*/
static moov(tracks) {
var
i = tracks.length,
boxes = [];
while (i--) {
boxes[i] = MP4.trak(tracks[i]);
}
return MP4.box.apply(null, [MP4.types.moov, MP4.mvhd(tracks[0].timescale, tracks[0].duration)].concat(boxes).concat(MP4.mvex(tracks)));
}
static mvex(tracks) {
var
i = tracks.length,
boxes = [];
while (i--) {
boxes[i] = MP4.trex(tracks[i]);
}
return MP4.box.apply(null, [MP4.types.mvex].concat(boxes));
}
static mvhd(timescale,duration) {
var
bytes = new Uint8Array([
0x00, // version 0
0x00, 0x00, 0x00, // flags
0x00, 0x00, 0x00, 0x01, // creation_time
0x00, 0x00, 0x00, 0x02, // modification_time
(timescale >> 24) & 0xFF,
(timescale >> 16) & 0xFF,
(timescale >> 8) & 0xFF,
timescale & 0xFF, // timescale
(duration >> 24) & 0xFF,
(duration >> 16) & 0xFF,
(duration >> 8) & 0xFF,
duration & 0xFF, // duration
0x00, 0x01, 0x00, 0x00, // 1.0 rate
0x01, 0x00, // 1.0 volume
0x00, 0x00, // reserved
0x00, 0x00, 0x00, 0x00, // reserved
0x00, 0x00, 0x00, 0x00, // reserved
0x00, 0x01, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x01, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x40, 0x00, 0x00, 0x00, // transformation: unity matrix
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, // pre_defined
0xff, 0xff, 0xff, 0xff // next_track_ID
]);
return MP4.box(MP4.types.mvhd, bytes);
}
static sdtp(track) {
var
samples = track.samples || [],
bytes = new Uint8Array(4 + samples.length),
flags,
i;
// leave the full box header (4 bytes) all zero
// write the sample table
for (i = 0; i < samples.length; i++) {
flags = samples[i].flags;
bytes[i + 4] = (flags.dependsOn << 4) |
(flags.isDependedOn << 2) |
(flags.hasRedundancy);
}
return MP4.box(MP4.types.sdtp, bytes);
}
static stbl(track) {
return MP4.box(MP4.types.stbl, MP4.stsd(track), MP4.box(MP4.types.stts, MP4.STTS), MP4.box(MP4.types.stsc, MP4.STSC), MP4.box(MP4.types.stsz, MP4.STSZ), MP4.box(MP4.types.stco, MP4.STCO));
}
static avc1(track) {
var sps = [], pps = [], i, data, len;
// assemble the SPSs
for (i = 0; i < track.sps.length; i++) {
data = track.sps[i];
len = data.byteLength;
sps.push((len >>> 8) & 0xFF);
sps.push((len & 0xFF));
sps = sps.concat(Array.prototype.slice.call(data)); // SPS
}
// assemble the PPSs
for (i = 0; i < track.pps.length; i++) {
data = track.pps[i];
len = data.byteLength;
pps.push((len >>> 8) & 0xFF);
pps.push((len & 0xFF));
pps = pps.concat(Array.prototype.slice.call(data));
}
var avcc = MP4.box(MP4.types.avcC, new Uint8Array([
0x01, // version
sps[3], // profile
sps[4], // profile compat
sps[5], // level
0xfc | 3, // lengthSizeMinusOne, hard-coded to 4 bytes
0xE0 | track.sps.length // 3bit reserved (111) + numOfSequenceParameterSets
].concat(sps).concat([
track.pps.length // numOfPictureParameterSets
]).concat(pps))); // "PPS"
//console.log('avcc:' + Hex.hexDump(avcc));
return MP4.box(MP4.types.avc1, new Uint8Array([
0x00, 0x00, 0x00, // reserved
0x00, 0x00, 0x00, // reserved
0x00, 0x01, // data_reference_index
0x00, 0x00, // pre_defined
0x00, 0x00, // reserved
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, // pre_defined
(track.width >> 8) & 0xFF,
track.width & 0xff, // width
(track.height >> 8) & 0xFF,
track.height & 0xff, // height
0x00, 0x48, 0x00, 0x00, // horizresolution
0x00, 0x48, 0x00, 0x00, // vertresolution
0x00, 0x00, 0x00, 0x00, // reserved
0x00, 0x01, // frame_count
0x13,
0x76, 0x69, 0x64, 0x65,
0x6f, 0x6a, 0x73, 0x2d,
0x63, 0x6f, 0x6e, 0x74,
0x72, 0x69, 0x62, 0x2d,
0x68, 0x6c, 0x73, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, // compressorname
0x00, 0x18, // depth = 24
0x11, 0x11]), // pre_defined = -1
avcc,
MP4.box(MP4.types.btrt, new Uint8Array([
0x00, 0x1c, 0x9c, 0x80, // bufferSizeDB
0x00, 0x2d, 0xc6, 0xc0, // maxBitrate
0x00, 0x2d, 0xc6, 0xc0])) // avgBitrate
);
}
static esds(track) {
return new Uint8Array([
0x00, // version 0
0x00, 0x00, 0x00, // flags
0x03, // descriptor_type
0x17+track.config.length, // length
0x00, 0x01, //es_id
0x00, // stream_priority
0x04, // descriptor_type
0x0f+track.config.length, // length
0x40, //codec : mpeg4_audio
0x15, // stream_type
0x00, 0x00, 0x00, // buffer_size
0x00, 0x00, 0x00, 0x00, // maxBitrate
0x00, 0x00, 0x00, 0x00, // avgBitrate
0x05 // descriptor_type
].concat([track.config.length]).concat(track.config).concat([0x06, 0x01, 0x02])); // GASpecificConfig)); // length + audio config descriptor
}
static mp4a(track) {
return MP4.box(MP4.types.mp4a, new Uint8Array([
0x00, 0x00, 0x00, // reserved
0x00, 0x00, 0x00, // reserved
0x00, 0x01, // data_reference_index
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, // reserved
0x00, track.channelCount, // channelcount
0x00, 0x10, // sampleSize:16bits
0x00, 0x00, 0x00, 0x00, // reserved2
(track.audiosamplerate >> 8) & 0xFF,
track.audiosamplerate & 0xff, //
0x00, 0x00]),
MP4.box(MP4.types.esds, MP4.esds(track)));
}
static stsd(track) {
if (track.type === 'audio') {
return MP4.box(MP4.types.stsd, MP4.STSD, MP4.mp4a(track));
} else {
return MP4.box(MP4.types.stsd, MP4.STSD, MP4.avc1(track));
}
}
static tkhd(track) {
return MP4.box(MP4.types.tkhd, new Uint8Array([
0x00, // version 0
0x00, 0x00, 0x07, // flags
0x00, 0x00, 0x00, 0x00, // creation_time
0x00, 0x00, 0x00, 0x00, // modification_time
(track.id >> 24) & 0xFF,
(track.id >> 16) & 0xFF,
(track.id >> 8) & 0xFF,
track.id & 0xFF, // track_ID
0x00, 0x00, 0x00, 0x00, // reserved
(track.duration >> 24),
(track.duration >> 16) & 0xFF,
(track.duration >> 8) & 0xFF,
track.duration & 0xFF, // duration
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, // reserved
0x00, 0x00, // layer
0x00, 0x00, // alternate_group
0x00, 0x00, // non-audio track volume
0x00, 0x00, // reserved
0x00, 0x01, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x01, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x40, 0x00, 0x00, 0x00, // transformation: unity matrix
(track.width >> 8) & 0xFF,
track.width & 0xFF,
0x00, 0x00, // width
(track.height >> 8) & 0xFF,
track.height & 0xFF,
0x00, 0x00 // height
]));
}
static traf(track,baseMediaDecodeTime) {
var sampleDependencyTable = MP4.sdtp(track);
return MP4.box(MP4.types.traf,
MP4.box(MP4.types.tfhd, new Uint8Array([
0x00, // version 0
0x00, 0x00, 0x00, // flags
(track.id >> 24),
(track.id >> 16) & 0XFF,
(track.id >> 8) & 0XFF,
(track.id & 0xFF) // track_ID
])),
MP4.box(MP4.types.tfdt, new Uint8Array([
0x00, // version 0
0x00, 0x00, 0x00, // flags
(baseMediaDecodeTime >>24),
(baseMediaDecodeTime >> 16) & 0XFF,
(baseMediaDecodeTime >> 8) & 0XFF,
(baseMediaDecodeTime & 0xFF) // baseMediaDecodeTime
])),
MP4.trun(track,
sampleDependencyTable.length +
16 + // tfhd
16 + // tfdt
8 + // traf header
16 + // mfhd
8 + // moof header
8), // mdat header
sampleDependencyTable);
}
/**
* Generate a track box.
* @param track {object} a track definition
* @return {Uint8Array} the track box
*/
static trak(track) {
track.duration = track.duration || 0xffffffff;
return MP4.box(MP4.types.trak, MP4.tkhd(track), MP4.mdia(track));
}
static trex(track) {
return MP4.box(MP4.types.trex, new Uint8Array([
0x00, // version 0
0x00, 0x00, 0x00, // flags
(track.id >> 24),
(track.id >> 16) & 0XFF,
(track.id >> 8) & 0XFF,
(track.id & 0xFF), // track_ID
0x00, 0x00, 0x00, 0x01, // default_sample_description_index
0x00, 0x00, 0x00, 0x00, // default_sample_duration
0x00, 0x00, 0x00, 0x00, // default_sample_size
0x00, 0x01, 0x00, 0x01 // default_sample_flags
]));
}
static trun(track, offset) {
var samples, sample, i, array;
samples = track.samples || [];
array = new Uint8Array(12 + (16 * samples.length));
offset += 8 + array.byteLength;
array.set([
0x00, // version 0
0x00, 0x0f, 0x01, // flags
(samples.length >>> 24) & 0xFF,
(samples.length >>> 16) & 0xFF,
(samples.length >>> 8) & 0xFF,
samples.length & 0xFF, // sample_count
(offset >>> 24) & 0xFF,
(offset >>> 16) & 0xFF,
(offset >>> 8) & 0xFF,
offset & 0xFF // data_offset
],0);
for (i = 0; i < samples.length; i++) {
sample = samples[i];
array.set([
(sample.duration >>> 24) & 0xFF,
(sample.duration >>> 16) & 0xFF,
(sample.duration >>> 8) & 0xFF,
sample.duration & 0xFF, // sample_duration
(sample.size >>> 24) & 0xFF,
(sample.size >>> 16) & 0xFF,
(sample.size >>> 8) & 0xFF,
sample.size & 0xFF, // sample_size
(sample.flags.isLeading << 2) | sample.flags.dependsOn,
(sample.flags.isDependedOn << 6) |
(sample.flags.hasRedundancy << 4) |
(sample.flags.paddingValue << 1) |
sample.flags.isNonSync,
sample.flags.degradPrio & 0xF0 << 8,
sample.flags.degradPrio & 0x0F, // sample_flags
(sample.cts >>> 24) & 0xFF,
(sample.cts >>> 16) & 0xFF,
(sample.cts >>> 8) & 0xFF,
sample.cts & 0xFF // sample_composition_time_offset
],12+16*i);
}
return MP4.box(MP4.types.trun, array);
}
static initSegment(tracks) {
if (!MP4.types) {
MP4.init();
}
var movie = MP4.moov(tracks), result;
result = new Uint8Array(MP4.FTYP.byteLength + movie.byteLength);
result.set(MP4.FTYP);
result.set(movie, MP4.FTYP.byteLength);
return result;
}
}
export default MP4;

View file

@ -0,0 +1,379 @@
/**
* fMP4 remuxer
*/
import Event from '../events';
import {logger} from '../utils/logger';
import MP4 from '../remux/mp4-generator';
import {ErrorTypes, ErrorDetails} from '../errors';
class MP4Remuxer {
constructor(observer) {
this.observer = observer;
this.ISGenerated = false;
this.PES2MP4SCALEFACTOR = 4;
this.PES_TIMESCALE = 90000;
this.MP4_TIMESCALE = this.PES_TIMESCALE / this.PES2MP4SCALEFACTOR;
}
get timescale() {
return this.MP4_TIMESCALE;
}
destroy() {
}
insertDiscontinuity() {
this._initPTS = this._initDTS = this.nextAacPts = this.nextAvcDts = undefined;
}
switchLevel() {
this.ISGenerated = false;
}
remux(audioTrack,videoTrack,id3Track,timeOffset, contiguous) {
// generate Init Segment if needed
if (!this.ISGenerated) {
this.generateIS(audioTrack,videoTrack,timeOffset);
}
//logger.log('nb AVC samples:' + videoTrack.samples.length);
if (videoTrack.samples.length) {
this.remuxVideo(videoTrack,timeOffset,contiguous);
}
//logger.log('nb AAC samples:' + audioTrack.samples.length);
if (audioTrack.samples.length) {
this.remuxAudio(audioTrack,timeOffset,contiguous);
}
//logger.log('nb ID3 samples:' + audioTrack.samples.length);
if (id3Track.samples.length) {
this.remuxID3(id3Track,timeOffset);
}
//notify end of parsing
this.observer.trigger(Event.FRAG_PARSED);
}
generateIS(audioTrack,videoTrack,timeOffset) {
var observer = this.observer,
audioSamples = audioTrack.samples,
videoSamples = videoTrack.samples,
nbAudio = audioSamples.length,
nbVideo = videoSamples.length,
pesTimeScale = this.PES_TIMESCALE;
if(nbAudio === 0 && nbVideo === 0) {
observer.trigger(Event.ERROR, {type : ErrorTypes.MEDIA_ERROR, details: ErrorDetails.FRAG_PARSING_ERROR, fatal: false, reason: 'no audio/video samples found'});
} else if (nbVideo === 0) {
//audio only
if (audioTrack.config) {
observer.trigger(Event.FRAG_PARSING_INIT_SEGMENT, {
audioMoov: MP4.initSegment([audioTrack]),
audioCodec : audioTrack.codec,
audioChannelCount : audioTrack.channelCount
});
this.ISGenerated = true;
}
if (this._initPTS === undefined) {
// remember first PTS of this demuxing context
this._initPTS = audioSamples[0].pts - pesTimeScale * timeOffset;
this._initDTS = audioSamples[0].dts - pesTimeScale * timeOffset;
}
} else
if (nbAudio === 0) {
//video only
if (videoTrack.sps && videoTrack.pps) {
observer.trigger(Event.FRAG_PARSING_INIT_SEGMENT, {
videoMoov: MP4.initSegment([videoTrack]),
videoCodec: videoTrack.codec,
videoWidth: videoTrack.width,
videoHeight: videoTrack.height
});
this.ISGenerated = true;
if (this._initPTS === undefined) {
// remember first PTS of this demuxing context
this._initPTS = videoSamples[0].pts - pesTimeScale * timeOffset;
this._initDTS = videoSamples[0].dts - pesTimeScale * timeOffset;
}
}
} else {
//audio and video
if (audioTrack.config && videoTrack.sps && videoTrack.pps) {
observer.trigger(Event.FRAG_PARSING_INIT_SEGMENT, {
audioMoov: MP4.initSegment([audioTrack]),
audioCodec: audioTrack.codec,
audioChannelCount: audioTrack.channelCount,
videoMoov: MP4.initSegment([videoTrack]),
videoCodec: videoTrack.codec,
videoWidth: videoTrack.width,
videoHeight: videoTrack.height
});
this.ISGenerated = true;
if (this._initPTS === undefined) {
// remember first PTS of this demuxing context
this._initPTS = Math.min(videoSamples[0].pts, audioSamples[0].pts) - pesTimeScale * timeOffset;
this._initDTS = Math.min(videoSamples[0].dts, audioSamples[0].dts) - pesTimeScale * timeOffset;
}
}
}
}
remuxVideo(track, timeOffset, contiguous) {
var view,
i = 8,
pesTimeScale = this.PES_TIMESCALE,
pes2mp4ScaleFactor = this.PES2MP4SCALEFACTOR,
avcSample,
mp4Sample,
mp4SampleLength,
unit,
mdat, moof,
firstPTS, firstDTS, lastDTS,
pts, dts, ptsnorm, dtsnorm,
samples = [];
/* concatenate the video data and construct the mdat in place
(need 8 more bytes to fill length and mpdat type) */
mdat = new Uint8Array(track.len + (4 * track.nbNalu) + 8);
view = new DataView(mdat.buffer);
view.setUint32(0, mdat.byteLength);
mdat.set(MP4.types.mdat, 4);
while (track.samples.length) {
avcSample = track.samples.shift();
mp4SampleLength = 0;
// convert NALU bitstream to MP4 format (prepend NALU with size field)
while (avcSample.units.units.length) {
unit = avcSample.units.units.shift();
view.setUint32(i, unit.data.byteLength);
i += 4;
mdat.set(unit.data, i);
i += unit.data.byteLength;
mp4SampleLength += 4 + unit.data.byteLength;
}
pts = avcSample.pts - this._initDTS;
dts = avcSample.dts - this._initDTS;
//logger.log('Video/PTS/DTS:' + pts + '/' + dts);
// if not first AVC sample of video track, normalize PTS/DTS with previous sample value
// and ensure that sample duration is positive
if (lastDTS !== undefined) {
ptsnorm = this._PTSNormalize(pts, lastDTS);
dtsnorm = this._PTSNormalize(dts, lastDTS);
mp4Sample.duration = (dtsnorm - lastDTS) / pes2mp4ScaleFactor;
if (mp4Sample.duration < 0) {
//logger.log('invalid sample duration at PTS/DTS::' + avcSample.pts + '/' + avcSample.dts + ':' + mp4Sample.duration);
mp4Sample.duration = 0;
}
} else {
var nextAvcDts = this.nextAvcDts,delta;
// first AVC sample of video track, normalize PTS/DTS
ptsnorm = this._PTSNormalize(pts, nextAvcDts);
dtsnorm = this._PTSNormalize(dts, nextAvcDts);
delta = Math.round((dtsnorm - nextAvcDts) / 90);
// if fragment are contiguous, or delta less than 600ms, ensure there is no overlap/hole between fragments
if (contiguous || Math.abs(delta) < 600) {
if (delta) {
if (delta > 1) {
logger.log(`AVC:${delta} ms hole between fragments detected,filling it`);
} else if (delta < -1) {
logger.log(`AVC:${(-delta)} ms overlapping between fragments detected`);
}
// set DTS to next DTS
dtsnorm = nextAvcDts;
// offset PTS as well, ensure that PTS is smaller or equal than new DTS
ptsnorm = Math.max(ptsnorm - delta, dtsnorm);
logger.log('Video/PTS/DTS adjusted:' + ptsnorm + '/' + dtsnorm);
}
}
// remember first PTS of our avcSamples, ensure value is positive
firstPTS = Math.max(0, ptsnorm);
firstDTS = Math.max(0, dtsnorm);
}
//console.log('PTS/DTS/initDTS/normPTS/normDTS/relative PTS : ${avcSample.pts}/${avcSample.dts}/${this._initDTS}/${ptsnorm}/${dtsnorm}/${(avcSample.pts/4294967296).toFixed(3)}');
mp4Sample = {
size: mp4SampleLength,
duration: 0,
cts: (ptsnorm - dtsnorm) / pes2mp4ScaleFactor,
flags: {
isLeading: 0,
isDependedOn: 0,
hasRedundancy: 0,
degradPrio: 0
}
};
if (avcSample.key === true) {
// the current sample is a key frame
mp4Sample.flags.dependsOn = 2;
mp4Sample.flags.isNonSync = 0;
} else {
mp4Sample.flags.dependsOn = 1;
mp4Sample.flags.isNonSync = 1;
}
samples.push(mp4Sample);
lastDTS = dtsnorm;
}
if (samples.length >= 2) {
mp4Sample.duration = samples[samples.length - 2].duration;
}
// next AVC sample DTS should be equal to last sample DTS + last sample duration
this.nextAvcDts = dtsnorm + mp4Sample.duration * pes2mp4ScaleFactor;
track.len = 0;
track.nbNalu = 0;
if(navigator.userAgent.toLowerCase().indexOf('chrome') > -1) {
// chrome workaround, mark first sample as being a Random Access Point to avoid sourcebuffer append issue
// https://code.google.com/p/chromium/issues/detail?id=229412
samples[0].flags.dependsOn = 2;
samples[0].flags.isNonSync = 0;
}
track.samples = samples;
moof = MP4.moof(track.sequenceNumber++, firstDTS / pes2mp4ScaleFactor, track);
track.samples = [];
this.observer.trigger(Event.FRAG_PARSING_DATA, {
moof: moof,
mdat: mdat,
startPTS: firstPTS / pesTimeScale,
endPTS: (ptsnorm + pes2mp4ScaleFactor * mp4Sample.duration) / pesTimeScale,
startDTS: firstDTS / pesTimeScale,
endDTS: (dtsnorm + pes2mp4ScaleFactor * mp4Sample.duration) / pesTimeScale,
type: 'video',
nb: samples.length
});
}
remuxAudio(track,timeOffset, contiguous) {
var view,
i = 8,
pesTimeScale = this.PES_TIMESCALE,
pes2mp4ScaleFactor = this.PES2MP4SCALEFACTOR,
aacSample, mp4Sample,
unit,
mdat, moof,
firstPTS, firstDTS, lastDTS,
pts, dts, ptsnorm, dtsnorm,
samples = [];
/* concatenate the audio data and construct the mdat in place
(need 8 more bytes to fill length and mdat type) */
mdat = new Uint8Array(track.len + 8);
view = new DataView(mdat.buffer);
view.setUint32(0, mdat.byteLength);
mdat.set(MP4.types.mdat, 4);
while (track.samples.length) {
aacSample = track.samples.shift();
unit = aacSample.unit;
mdat.set(unit, i);
i += unit.byteLength;
pts = aacSample.pts - this._initDTS;
dts = aacSample.dts - this._initDTS;
//logger.log('Audio/PTS:' + aacSample.pts.toFixed(0));
if (lastDTS !== undefined) {
ptsnorm = this._PTSNormalize(pts, lastDTS);
dtsnorm = this._PTSNormalize(dts, lastDTS);
// we use DTS to compute sample duration, but we use PTS to compute initPTS which is used to sync audio and video
mp4Sample.duration = (dtsnorm - lastDTS) / pes2mp4ScaleFactor;
if (mp4Sample.duration < 0) {
logger.log(`invalid AAC sample duration at PTS:${aacSample.pts}:${mp4Sample.duration}`);
mp4Sample.duration = 0;
}
} else {
var nextAacPts = this.nextAacPts,delta;
ptsnorm = this._PTSNormalize(pts, nextAacPts);
dtsnorm = this._PTSNormalize(dts, nextAacPts);
delta = Math.round(1000 * (ptsnorm - nextAacPts) / pesTimeScale);
// if fragment are contiguous, or delta less than 600ms, ensure there is no overlap/hole between fragments
if (contiguous || Math.abs(delta) < 600) {
// log delta
if (delta) {
if (delta > 1) {
logger.log(`${delta} ms hole between AAC samples detected,filling it`);
// set PTS to next PTS, and ensure PTS is greater or equal than last DTS
} else if (delta < -1) {
logger.log(`${(-delta)} ms overlapping between AAC samples detected`);
}
// set DTS to next DTS
ptsnorm = dtsnorm = nextAacPts;
}
}
// remember first PTS of our aacSamples, ensure value is positive
firstPTS = Math.max(0, ptsnorm);
firstDTS = Math.max(0, dtsnorm);
}
//console.log('PTS/DTS/initDTS/normPTS/normDTS/relative PTS : ${aacSample.pts}/${aacSample.dts}/${this._initDTS}/${ptsnorm}/${dtsnorm}/${(aacSample.pts/4294967296).toFixed(3)}');
mp4Sample = {
size: unit.byteLength,
cts: 0,
duration:0,
flags: {
isLeading: 0,
isDependedOn: 0,
hasRedundancy: 0,
degradPrio: 0,
dependsOn: 1,
}
};
samples.push(mp4Sample);
lastDTS = dtsnorm;
}
//set last sample duration as being identical to previous sample
if (samples.length >= 2) {
mp4Sample.duration = samples[samples.length - 2].duration;
}
// next aac sample PTS should be equal to last sample PTS + duration
this.nextAacPts = ptsnorm + pes2mp4ScaleFactor * mp4Sample.duration;
//logger.log('Audio/PTS/PTSend:' + aacSample.pts.toFixed(0) + '/' + this.nextAacDts.toFixed(0));
track.len = 0;
track.samples = samples;
moof = MP4.moof(track.sequenceNumber++, firstDTS / pes2mp4ScaleFactor, track);
track.samples = [];
this.observer.trigger(Event.FRAG_PARSING_DATA, {
moof: moof,
mdat: mdat,
startPTS: firstPTS / pesTimeScale,
endPTS: this.nextAacPts / pesTimeScale,
startDTS: firstDTS / pesTimeScale,
endDTS: (dtsnorm + pes2mp4ScaleFactor * mp4Sample.duration) / pesTimeScale,
type: 'audio',
nb: samples.length
});
}
remuxID3(track,timeOffset) {
var length = track.samples.length, sample;
// consume samples
if(length) {
for(var index = 0; index < length; index++) {
sample = track.samples[index];
// setting id3 pts, dts to relative time
// using this._initPTS and this._initDTS to calculate relative time
sample.pts = ((sample.pts - this._initPTS) / this.PES_TIMESCALE);
sample.dts = ((sample.dts - this._initDTS) / this.PES_TIMESCALE);
}
this.observer.trigger(Event.FRAG_PARSING_METADATA, {
samples:track.samples
});
}
track.samples = [];
timeOffset = timeOffset;
}
_PTSNormalize(value, reference) {
var offset;
if (reference === undefined) {
return value;
}
if (reference < value) {
// - 2^33
offset = -8589934592;
} else {
// + 2^33
offset = 8589934592;
}
/* PTS is 33bit (from 0 to 2^33 -1)
if diff between value and reference is bigger than half of the amplitude (2^32) then it means that
PTS looping occured. fill the gap */
while (Math.abs(value - reference) > 4294967296) {
value += offset;
}
return value;
}
}
export default MP4Remuxer;

View file

@ -0,0 +1,43 @@
var BinarySearch = {
/**
* Searches for an item in an array which matches a certain condition.
* This requires the condition to only match one item in the array,
* and for the array to be ordered.
*
* @param {Array} list The array to search.
* @param {Function} comparisonFunction
* Called and provided a candidate item as the first argument.
* Should return:
* > -1 if the item should be located at a lower index than the provided item.
* > 1 if the item should be located at a higher index than the provided item.
* > 0 if the item is the item you're looking for.
*
* @return {*} The object if it is found or null otherwise.
*/
search: function(list, comparisonFunction) {
var minIndex = 0;
var maxIndex = list.length - 1;
var currentIndex = null;
var currentElement = null;
while (minIndex <= maxIndex) {
currentIndex = (minIndex + maxIndex) / 2 | 0;
currentElement = list[currentIndex];
var comparisonResult = comparisonFunction(currentElement);
if (comparisonResult > 0) {
minIndex = currentIndex + 1;
}
else if (comparisonResult < 0) {
maxIndex = currentIndex - 1;
}
else {
return currentElement;
}
}
return null;
}
};
module.exports = BinarySearch;

View file

@ -0,0 +1,16 @@
class Hex {
static hexDump(array) {
var i, str = '';
for(i = 0; i < array.length; i++) {
var h = array[i].toString(16);
if (h.length < 2) {
h = '0' + h;
}
str += h;
}
return str;
}
}
export default Hex;

View file

@ -0,0 +1,73 @@
'use strict';
function noop() {}
const fakeLogger = {
trace: noop,
debug: noop,
log: noop,
warn: noop,
info: noop,
error: noop
};
let exportedLogger = fakeLogger;
//let lastCallTime;
// function formatMsgWithTimeInfo(type, msg) {
// const now = Date.now();
// const diff = lastCallTime ? '+' + (now - lastCallTime) : '0';
// lastCallTime = now;
// msg = (new Date(now)).toISOString() + ' | [' + type + '] > ' + msg + ' ( ' + diff + ' ms )';
// return msg;
// }
function formatMsg(type, msg) {
msg = '[' + type + '] > ' + msg;
return msg;
}
function consolePrintFn(type) {
const func = window.console[type];
if (func) {
return function(...args) {
if(args[0]) {
args[0] = formatMsg(type, args[0]);
}
func.apply(window.console, args);
};
}
return noop;
}
function exportLoggerFunctions(debugConfig, ...functions) {
functions.forEach(function(type) {
exportedLogger[type] = debugConfig[type] ? debugConfig[type].bind(debugConfig) : consolePrintFn(type);
});
}
export var enableLogs = function(debugConfig) {
if (debugConfig === true || typeof debugConfig === 'object') {
exportLoggerFunctions(debugConfig,
// Remove out from list here to hard-disable a log-level
//'trace',
'debug',
'log',
'info',
'warn',
'error'
);
// Some browsers don't allow to use bind on console object anyway
// fallback to default if needed
try {
exportedLogger.log();
} catch (e) {
exportedLogger = fakeLogger;
}
}
else {
exportedLogger = fakeLogger;
}
};
export var logger = exportedLogger;

View file

@ -0,0 +1,77 @@
var URLHelper = {
// build an absolute URL from a relative one using the provided baseURL
// if relativeURL is an absolute URL it will be returned as is.
buildAbsoluteURL: function(baseURL, relativeURL) {
// remove any remaining space and CRLF
relativeURL = relativeURL.trim();
if (/^[a-z]+:/i.test(relativeURL)) {
// complete url, not relative
return relativeURL;
}
var relativeURLQuery = null;
var relativeURLHash = null;
var relativeURLHashSplit = /^([^#]*)(.*)$/.exec(relativeURL);
if (relativeURLHashSplit) {
relativeURLHash = relativeURLHashSplit[2];
relativeURL = relativeURLHashSplit[1];
}
var relativeURLQuerySplit = /^([^\?]*)(.*)$/.exec(relativeURL);
if (relativeURLQuerySplit) {
relativeURLQuery = relativeURLQuerySplit[2];
relativeURL = relativeURLQuerySplit[1];
}
var baseURLHashSplit = /^([^#]*)(.*)$/.exec(baseURL);
if (baseURLHashSplit) {
baseURL = baseURLHashSplit[1];
}
var baseURLQuerySplit = /^([^\?]*)(.*)$/.exec(baseURL);
if (baseURLQuerySplit) {
baseURL = baseURLQuerySplit[1];
}
var baseURLDomainSplit = /^((([a-z]+):)?\/\/[a-z0-9\.-]+(:[0-9]+)?\/)(.*)$/i.exec(baseURL);
var baseURLProtocol = baseURLDomainSplit[3];
var baseURLDomain = baseURLDomainSplit[1];
var baseURLPath = baseURLDomainSplit[5];
var builtURL = null;
if (/^\/\//.test(relativeURL)) {
builtURL = baseURLProtocol+'://'+URLHelper.buildAbsolutePath('', relativeURL.substring(2));
}
else if (/^\//.test(relativeURL)) {
builtURL = baseURLDomain+URLHelper.buildAbsolutePath('', relativeURL.substring(1));
}
else {
var newPath = URLHelper.buildAbsolutePath(baseURLPath, relativeURL);
builtURL = baseURLDomain + newPath;
}
// put the query and hash parts back
if (relativeURLQuery) {
builtURL += relativeURLQuery;
}
if (relativeURLHash) {
builtURL += relativeURLHash;
}
return builtURL;
},
// build an absolute path using the provided basePath
// adapted from https://developer.mozilla.org/en-US/docs/Web/API/document/cookie#Using_relative_URLs_in_the_path_parameter
// this does not handle the case where relativePath is "/" or "//". These cases should be handled outside this.
buildAbsolutePath: function(basePath, relativePath) {
var sRelPath = relativePath;
var nUpLn, sDir = '', sPath = basePath.replace(/[^\/]*$/, sRelPath.replace(/(\/|^)(?:\.?\/+)+/g, '$1'));
for (var nEnd, nStart = 0; nEnd = sPath.indexOf('/../', nStart), nEnd > -1; nStart = nEnd + nUpLn) {
nUpLn = /^\/(?:\.\.\/)*/.exec(sPath.slice(nEnd))[0].length;
sDir = (sDir + sPath.substring(nStart, nEnd)).replace(new RegExp('(?:\\\/+[^\\\/]*){0,' + ((nUpLn - 1) / 3) + '}$'), '/');
}
return sDir + sPath.substr(nStart);
}
};
module.exports = URLHelper;

View file

@ -0,0 +1,104 @@
/**
* XHR based logger
*/
import {logger} from '../utils/logger';
class XhrLoader {
constructor(config) {
if (config && config.xhrSetup) {
this.xhrSetup = config.xhrSetup;
}
}
destroy() {
this.abort();
this.loader = null;
}
abort() {
if (this.loader && this.loader.readyState !== 4) {
this.stats.aborted = true;
this.loader.abort();
}
if (this.timeoutHandle) {
window.clearTimeout(this.timeoutHandle);
}
}
load(url, responseType, onSuccess, onError, onTimeout, timeout, maxRetry, retryDelay, onProgress = null, frag = null) {
this.url = url;
if (frag && !isNaN(frag.byteRangeStartOffset) && !isNaN(frag.byteRangeEndOffset)) {
this.byteRange = frag.byteRangeStartOffset + '-' + frag.byteRangeEndOffset;
}
this.responseType = responseType;
this.onSuccess = onSuccess;
this.onProgress = onProgress;
this.onTimeout = onTimeout;
this.onError = onError;
this.stats = {trequest: performance.now(), retry: 0};
this.timeout = timeout;
this.maxRetry = maxRetry;
this.retryDelay = retryDelay;
this.timeoutHandle = window.setTimeout(this.loadtimeout.bind(this), timeout);
this.loadInternal();
}
loadInternal() {
var xhr = this.loader = new XMLHttpRequest();
xhr.onload = this.loadsuccess.bind(this);
xhr.onerror = this.loaderror.bind(this);
xhr.onprogress = this.loadprogress.bind(this);
xhr.open('GET', this.url, true);
if (this.byteRange) {
xhr.setRequestHeader('Range', 'bytes=' + this.byteRange);
}
xhr.responseType = this.responseType;
this.stats.tfirst = null;
this.stats.loaded = 0;
if (this.xhrSetup) {
this.xhrSetup(xhr, this.url);
}
xhr.send();
}
loadsuccess(event) {
window.clearTimeout(this.timeoutHandle);
this.stats.tload = performance.now();
this.onSuccess(event, this.stats);
}
loaderror(event) {
if (this.stats.retry < this.maxRetry) {
logger.warn(`${event.type} while loading ${this.url}, retrying in ${this.retryDelay}...`);
this.destroy();
window.setTimeout(this.loadInternal.bind(this), this.retryDelay);
// exponential backoff
this.retryDelay = Math.min(2 * this.retryDelay, 64000);
this.stats.retry++;
} else {
window.clearTimeout(this.timeoutHandle);
logger.error(`${event.type} while loading ${this.url}` );
this.onError(event);
}
}
loadtimeout(event) {
logger.warn(`timeout while loading ${this.url}` );
this.onTimeout(event, this.stats);
}
loadprogress(event) {
var stats = this.stats;
if (stats.tfirst === null) {
stats.tfirst = performance.now();
}
stats.loaded = event.loaded;
if (this.onProgress) {
this.onProgress(event, stats);
}
}
}
export default XhrLoader;

View file

@ -29,14 +29,14 @@
"webcomponentsjs": "webcomponents/webcomponentsjs#^0.7.0"
},
"ignore": [],
"homepage": "https://github.com/polymerelements/iron-behaviors",
"homepage": "https://github.com/PolymerElements/iron-behaviors",
"_release": "1.0.12",
"_resolution": {
"type": "version",
"tag": "v1.0.12",
"commit": "657f526a2382a659cdf4e13be87ecc89261588a3"
},
"_source": "git://github.com/polymerelements/iron-behaviors.git",
"_source": "git://github.com/PolymerElements/iron-behaviors.git",
"_target": "^1.0.0",
"_originalSource": "polymerelements/iron-behaviors"
"_originalSource": "PolymerElements/iron-behaviors"
}

View file

@ -32,14 +32,14 @@
"iron-test-helpers": "PolymerElements/iron-test-helpers#^1.0.0"
},
"ignore": [],
"homepage": "https://github.com/PolymerElements/paper-ripple",
"homepage": "https://github.com/polymerelements/paper-ripple",
"_release": "1.0.5",
"_resolution": {
"type": "version",
"tag": "v1.0.5",
"commit": "d72e7a9a8ab518b901ed18dde492df3b87a93be5"
},
"_source": "git://github.com/PolymerElements/paper-ripple.git",
"_source": "git://github.com/polymerelements/paper-ripple.git",
"_target": "^1.0.0",
"_originalSource": "PolymerElements/paper-ripple"
"_originalSource": "polymerelements/paper-ripple"
}

View file

@ -1,6 +1,6 @@
{
"name": "paper-styles",
"version": "1.0.13",
"version": "1.1.0",
"description": "Common (global) styles for Material Design elements.",
"authors": [
"The Polymer Authors"
@ -29,11 +29,11 @@
"webcomponentsjs": "webcomponents/webcomponentsjs#^0.7.0",
"iron-component-page": "polymerelements/iron-component-page#^1.0.0"
},
"_release": "1.0.13",
"_release": "1.1.0",
"_resolution": {
"type": "version",
"tag": "v1.0.13",
"commit": "dd998025a0fc01bfe157dd72d5b91c7b5a70b909"
"tag": "1.1.0",
"commit": "4104485003ccbcf2c3c9eb542156930974294525"
},
"_source": "git://github.com/PolymerElements/paper-styles.git",
"_target": "^1.0.0",

View file

@ -1,6 +1,6 @@
{
"name": "paper-styles",
"version": "1.0.13",
"version": "1.1.0",
"description": "Common (global) styles for Material Design elements.",
"authors": [
"The Polymer Authors"

View file

@ -162,7 +162,7 @@ styles. All other styles should exist as single lines."
.paper-font-code1 {
font-size: 14px;
font-weight: 700;
font-weight: 500;
line-height: 20px;
}

View file

@ -10,30 +10,51 @@ subject to an additional IP rights grant found at http://polymer.github.io/PATEN
<link rel="import" href="../polymer/polymer.html">
<!-- Taken from https://www.google.com/design/spec/style/color.html#color-ui-color-application -->
<style is="custom-style">
:root {
/*
* Light background theme
*/
--light-theme-background-color: #ffffff;
--light-theme-base-color: #000000;
--light-theme-text-color: #212121;
--light-theme-secondary-color: #737373; /* for secondary text and icons */
--light-theme-disabled-color: #9b9b9b; /* disabled/hint text */
--light-theme-divider-color: #dbdbdb;
--dark-primary-color: #303f9f;
/*
* Dark background theme
*/
--dark-theme-background-color: #212121;
--dark-theme-base-color: #ffffff;
--dark-theme-text-color: #ffffff;
--dark-theme-secondary-color: #bcbcbc; /* for secondary text and icons */
--dark-theme-disabled-color: #646464; /* disabled/hint text */
--dark-theme-divider-color: #3c3c3c;
--default-primary-color: #3f51b5;
/*
* Primary and accent colors. Also see color.html for more colors.
*/
--light-primary-color: #c5cae9; /* --paper-indigo-100 */
--default-primary-color: #3f51b5; /* --paper-indigo-500 */
--dark-primary-color: #303f9f; /* --paper-indigo-700 */
--light-primary-color: #c5cae9;
--text-primary-color: #ffffff;
--accent-color: #ff4081;
--primary-background-color: #ffffff;
--primary-text-color: #212121;
--secondary-text-color: #757575;
--disabled-text-color: #bdbdbd;
--divider-color: #e0e0e0;
--accent-color: #ff4081; /* --paper-pink-a200 */
--light-accent-color: #ff80ab; /* --paper-pink-a100 */
--dark-accent-color: #f50057; /* --paper-pink-a400 */
/*
* Deprecated values because of their confusing names.
*/
--primary-text-color: var(--light-theme-text-color);
--text-primary-color: var(--dark-theme-text-color);
--primary-background-color: var(--light-theme-background-color);
--secondary-text-color: var(--light-theme-secondary-color);
--disabled-text-color:var(--light-theme-disabled-color);
--divider-color: var(--light-theme-divider-color);
}
</style>

View file

@ -19,7 +19,7 @@ subject to an additional IP rights grant found at http://polymer.github.io/PATEN
<style is="custom-style">
body {
font-family: 'Roboto', 'Noto', sans-serif;
@apply(--paper-font-common-base);
font-size: 14px;
margin: 0;
padding: 24px;

View file

@ -16,11 +16,6 @@ subject to an additional IP rights grant found at http://polymer.github.io/PATEN
:root {
/* Shared Styles */
/*
Unfortunately, we can't use nested rules
See https://github.com/Polymer/polymer/issues/1399
*/
--paper-font-common-base: {
font-family: 'Roboto', 'Noto', sans-serif;
-webkit-font-smoothing: antialiased;
@ -44,14 +39,8 @@ subject to an additional IP rights grant found at http://polymer.github.io/PATEN
/* Material Font Styles */
--paper-font-display4: {
/* @apply(--paper-font-common-base) */
font-family: 'Roboto', 'Noto', sans-serif;
-webkit-font-smoothing: antialiased;
/* @apply(--paper-font-common-nowrap); */
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
@apply(--paper-font-common-base);
@apply(--paper-font-common-nowrap);
font-size: 112px;
font-weight: 300;
@ -60,14 +49,8 @@ subject to an additional IP rights grant found at http://polymer.github.io/PATEN
};
--paper-font-display3: {
/* @apply(--paper-font-common-base) */
font-family: 'Roboto', 'Noto', sans-serif;
-webkit-font-smoothing: antialiased;
/* @apply(--paper-font-common-nowrap); */
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
@apply(--paper-font-common-base);
@apply(--paper-font-common-nowrap);
font-size: 56px;
font-weight: 400;
@ -76,10 +59,7 @@ subject to an additional IP rights grant found at http://polymer.github.io/PATEN
};
--paper-font-display2: {
/* @apply(--paper-font-common-base) */
font-family: 'Roboto', 'Noto', sans-serif;
-webkit-font-smoothing: antialiased;
@apply(--paper-font-common-base);
font-size: 45px;
font-weight: 400;
@ -88,10 +68,7 @@ subject to an additional IP rights grant found at http://polymer.github.io/PATEN
};
--paper-font-display1: {
/* @apply(--paper-font-common-base) */
font-family: 'Roboto', 'Noto', sans-serif;
-webkit-font-smoothing: antialiased;
@apply(--paper-font-common-base);
font-size: 34px;
font-weight: 400;
@ -100,10 +77,7 @@ subject to an additional IP rights grant found at http://polymer.github.io/PATEN
};
--paper-font-headline: {
/* @apply(--paper-font-common-base) */
font-family: 'Roboto', 'Noto', sans-serif;
-webkit-font-smoothing: antialiased;
@apply(--paper-font-common-base);
font-size: 24px;
font-weight: 400;
@ -112,14 +86,8 @@ subject to an additional IP rights grant found at http://polymer.github.io/PATEN
};
--paper-font-title: {
/* @apply(--paper-font-common-base) */
font-family: 'Roboto', 'Noto', sans-serif;
-webkit-font-smoothing: antialiased;
/* @apply(--paper-font-common-nowrap); */
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
@apply(--paper-font-common-base);
@apply(--paper-font-common-nowrap);
font-size: 20px;
font-weight: 500;
@ -127,10 +95,7 @@ subject to an additional IP rights grant found at http://polymer.github.io/PATEN
};
--paper-font-subhead: {
/* @apply(--paper-font-common-base) */
font-family: 'Roboto', 'Noto', sans-serif;
-webkit-font-smoothing: antialiased;
@apply(--paper-font-common-base);
font-size: 16px;
font-weight: 400;
@ -138,9 +103,7 @@ subject to an additional IP rights grant found at http://polymer.github.io/PATEN
};
--paper-font-body2: {
/* @apply(--paper-font-common-base) */
font-family: 'Roboto', 'Noto', sans-serif;
-webkit-font-smoothing: antialiased;
@apply(--paper-font-common-base);
font-size: 14px;
font-weight: 500;
@ -148,9 +111,7 @@ subject to an additional IP rights grant found at http://polymer.github.io/PATEN
};
--paper-font-body1: {
/* @apply(--paper-font-common-base) */
font-family: 'Roboto', 'Noto', sans-serif;
-webkit-font-smoothing: antialiased;
@apply(--paper-font-common-base);
font-size: 14px;
font-weight: 400;
@ -158,13 +119,8 @@ subject to an additional IP rights grant found at http://polymer.github.io/PATEN
};
--paper-font-caption: {
/* @apply(--paper-font-common-base) */
font-family: 'Roboto', 'Noto', sans-serif;
-webkit-font-smoothing: antialiased;
/* @apply(--paper-font-common-nowrap); */
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
@apply(--paper-font-common-base);
@apply(--paper-font-common-nowrap);
font-size: 12px;
font-weight: 400;
@ -173,14 +129,8 @@ subject to an additional IP rights grant found at http://polymer.github.io/PATEN
};
--paper-font-menu: {
/* @apply(--paper-font-common-base) */
font-family: 'Roboto', 'Noto', sans-serif;
-webkit-font-smoothing: antialiased;
/* @apply(--paper-font-common-nowrap); */
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
@apply(--paper-font-common-base);
@apply(--paper-font-common-nowrap);
font-size: 13px;
font-weight: 500;
@ -188,14 +138,8 @@ subject to an additional IP rights grant found at http://polymer.github.io/PATEN
};
--paper-font-button: {
/* @apply(--paper-font-common-base) */
font-family: 'Roboto', 'Noto', sans-serif;
-webkit-font-smoothing: antialiased;
/* @apply(--paper-font-common-nowrap); */
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
@apply(--paper-font-common-base);
@apply(--paper-font-common-nowrap);
font-size: 14px;
font-weight: 500;
@ -205,9 +149,7 @@ subject to an additional IP rights grant found at http://polymer.github.io/PATEN
};
--paper-font-code2: {
/* @apply(--paper-font-common-code); */
font-family: 'Roboto Mono', 'Consolas', 'Menlo', monospace;
-webkit-font-smoothing: antialiased;
@apply(--paper-font-common-code);
font-size: 14px;
font-weight: 700;
@ -215,9 +157,7 @@ subject to an additional IP rights grant found at http://polymer.github.io/PATEN
};
--paper-font-code1: {
/* @apply(--paper-font-common-code); */
font-family: 'Roboto Mono', 'Consolas', 'Menlo', monospace;
-webkit-font-smoothing: antialiased;
@apply(--paper-font-common-code);
font-size: 14px;
font-weight: 500;

View file

@ -1,7 +1,7 @@
{
"name": "webcomponentsjs",
"main": "webcomponents.js",
"version": "0.7.19",
"version": "0.7.20",
"homepage": "http://webcomponents.org",
"authors": [
"The Polymer Authors"
@ -16,13 +16,13 @@
"license": "BSD",
"ignore": [],
"devDependencies": {
"web-component-tester": "~3.3.10"
"web-component-tester": "^4.0.1"
},
"_release": "0.7.19",
"_release": "0.7.20",
"_resolution": {
"type": "version",
"tag": "v0.7.19",
"commit": "d2b2329a8453e93b0d0c4949bb83e7166088adaf"
"tag": "v0.7.20",
"commit": "ce9c8597696ec4bafc772c2126a3b1c7b0e948c0"
},
"_source": "git://github.com/Polymer/webcomponentsjs.git",
"_target": "^0.7.18",

View file

@ -7,7 +7,7 @@
* Code distributed by Google as part of the polymer project is also
* subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
*/
// @version 0.7.19
// @version 0.7.20
if (typeof WeakMap === "undefined") {
(function() {
var defineProperty = Object.defineProperty;

File diff suppressed because one or more lines are too long

View file

@ -7,7 +7,7 @@
* Code distributed by Google as part of the polymer project is also
* subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
*/
// @version 0.7.19
// @version 0.7.20
if (typeof WeakMap === "undefined") {
(function() {
var defineProperty = Object.defineProperty;
@ -496,6 +496,7 @@ window.HTMLImports = window.HTMLImports || {
if (importCount) {
for (var i = 0, imp; i < importCount && (imp = imports[i]); i++) {
if (isImportLoaded(imp)) {
newImports.push(this);
parsedCount++;
checkDone();
} else {

File diff suppressed because one or more lines are too long

View file

@ -7,7 +7,7 @@
* Code distributed by Google as part of the polymer project is also
* subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
*/
// @version 0.7.19
// @version 0.7.20
if (typeof WeakMap === "undefined") {
(function() {
var defineProperty = Object.defineProperty;

File diff suppressed because one or more lines are too long

View file

@ -7,7 +7,7 @@
* Code distributed by Google as part of the polymer project is also
* subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
*/
// @version 0.7.19
// @version 0.7.20
if (typeof WeakMap === "undefined") {
(function() {
var defineProperty = Object.defineProperty;
@ -306,6 +306,7 @@ window.ShadowDOMPolyfill = {};
});
});
}
scope.addForwardingProperties = addForwardingProperties;
scope.assert = assert;
scope.constructorTable = constructorTable;
scope.defineGetter = defineGetter;
@ -3295,6 +3296,7 @@ window.ShadowDOMPolyfill = {};
(function(scope) {
"use strict";
var addForwardingProperties = scope.addForwardingProperties;
var mixin = scope.mixin;
var registerWrapper = scope.registerWrapper;
var setWrapper = scope.setWrapper;
@ -3319,6 +3321,10 @@ window.ShadowDOMPolyfill = {};
unsafeUnwrap(this).texSubImage2D.apply(unsafeUnwrap(this), arguments);
}
});
var OriginalWebGLRenderingContextBase = Object.getPrototypeOf(OriginalWebGLRenderingContext.prototype);
if (OriginalWebGLRenderingContextBase !== Object.prototype) {
addForwardingProperties(OriginalWebGLRenderingContextBase, WebGLRenderingContext.prototype);
}
var instanceProperties = /WebKit/.test(navigator.userAgent) ? {
drawingBufferHeight: null,
drawingBufferWidth: null
@ -3405,7 +3411,10 @@ window.ShadowDOMPolyfill = {};
var unwrappedActiveElement = unwrap(this).ownerDocument.activeElement;
if (!unwrappedActiveElement || !unwrappedActiveElement.nodeType) return null;
var activeElement = wrap(unwrappedActiveElement);
while (!this.contains(activeElement)) {
if (activeElement === this.host) {
return null;
}
while (!this.contains(activeElement) && !this.host.contains(activeElement)) {
while (activeElement.parentNode) {
activeElement = activeElement.parentNode;
}
@ -4109,7 +4118,6 @@ window.ShadowDOMPolyfill = {};
if (!unwrappedActiveElement || !unwrappedActiveElement.nodeType) return null;
var activeElement = wrap(unwrappedActiveElement);
while (!this.contains(activeElement)) {
var lastHost = activeElement;
while (activeElement.parentNode) {
activeElement = activeElement.parentNode;
}

File diff suppressed because one or more lines are too long

View file

@ -1,7 +1,7 @@
{
"name": "webcomponentsjs",
"main": "webcomponents.js",
"version": "0.7.19",
"version": "0.7.20",
"homepage": "http://webcomponents.org",
"authors": [
"The Polymer Authors"
@ -16,6 +16,6 @@
"license": "BSD",
"ignore": [],
"devDependencies": {
"web-component-tester": "~3.3.10"
"web-component-tester": "^4.0.1"
}
}

View file

@ -1,6 +1,6 @@
{
"name": "webcomponents.js",
"version": "0.7.19",
"version": "0.7.20",
"description": "webcomponents.js",
"main": "webcomponents.js",
"directories": {
@ -15,6 +15,9 @@
"bugs": {
"url": "https://github.com/webcomponents/webcomponentsjs/issues"
},
"scripts": {
"test": "wct"
},
"homepage": "http://webcomponents.org",
"devDependencies": {
"gulp": "^3.8.8",
@ -23,6 +26,6 @@
"gulp-header": "^1.1.1",
"gulp-uglify": "^1.0.1",
"run-sequence": "^1.0.1",
"web-component-tester": "^3"
"web-component-tester": "^4.0.1"
}
}

View file

@ -7,7 +7,7 @@
* Code distributed by Google as part of the polymer project is also
* subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
*/
// @version 0.7.19
// @version 0.7.20
(function() {
window.WebComponents = window.WebComponents || {
flags: {}
@ -1143,6 +1143,7 @@ window.HTMLImports = window.HTMLImports || {
if (importCount) {
for (var i = 0, imp; i < importCount && (imp = imports[i]); i++) {
if (isImportLoaded(imp)) {
newImports.push(this);
parsedCount++;
checkDone();
} else {

File diff suppressed because one or more lines are too long

View file

@ -7,7 +7,7 @@
* Code distributed by Google as part of the polymer project is also
* subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
*/
// @version 0.7.19
// @version 0.7.20
(function() {
window.WebComponents = window.WebComponents || {
flags: {}
@ -352,6 +352,7 @@ if (WebComponents.flags.shadow) {
});
});
}
scope.addForwardingProperties = addForwardingProperties;
scope.assert = assert;
scope.constructorTable = constructorTable;
scope.defineGetter = defineGetter;
@ -3306,6 +3307,7 @@ if (WebComponents.flags.shadow) {
})(window.ShadowDOMPolyfill);
(function(scope) {
"use strict";
var addForwardingProperties = scope.addForwardingProperties;
var mixin = scope.mixin;
var registerWrapper = scope.registerWrapper;
var setWrapper = scope.setWrapper;
@ -3330,6 +3332,10 @@ if (WebComponents.flags.shadow) {
unsafeUnwrap(this).texSubImage2D.apply(unsafeUnwrap(this), arguments);
}
});
var OriginalWebGLRenderingContextBase = Object.getPrototypeOf(OriginalWebGLRenderingContext.prototype);
if (OriginalWebGLRenderingContextBase !== Object.prototype) {
addForwardingProperties(OriginalWebGLRenderingContextBase, WebGLRenderingContext.prototype);
}
var instanceProperties = /WebKit/.test(navigator.userAgent) ? {
drawingBufferHeight: null,
drawingBufferWidth: null
@ -3414,7 +3420,10 @@ if (WebComponents.flags.shadow) {
var unwrappedActiveElement = unwrap(this).ownerDocument.activeElement;
if (!unwrappedActiveElement || !unwrappedActiveElement.nodeType) return null;
var activeElement = wrap(unwrappedActiveElement);
while (!this.contains(activeElement)) {
if (activeElement === this.host) {
return null;
}
while (!this.contains(activeElement) && !this.host.contains(activeElement)) {
while (activeElement.parentNode) {
activeElement = activeElement.parentNode;
}
@ -4112,7 +4121,6 @@ if (WebComponents.flags.shadow) {
if (!unwrappedActiveElement || !unwrappedActiveElement.nodeType) return null;
var activeElement = wrap(unwrappedActiveElement);
while (!this.contains(activeElement)) {
var lastHost = activeElement;
while (activeElement.parentNode) {
activeElement = activeElement.parentNode;
}
@ -5908,6 +5916,7 @@ window.HTMLImports = window.HTMLImports || {
if (importCount) {
for (var i = 0, imp; i < importCount && (imp = imports[i]); i++) {
if (isImportLoaded(imp)) {
newImports.push(this);
parsedCount++;
checkDone();
} else {

File diff suppressed because one or more lines are too long

View file

@ -232,6 +232,16 @@
function renderRecording(context, defaultTimer, program) {
$('.itemName', context).html(program.Name);
$('.itemEpisodeName', context).html(program.EpisodeTitle || '');
$('.itemMiscInfo', context).html(LibraryBrowser.getMiscInfoHtml(program));
$('.itemMiscInfo a').each(function() {
$(this).replaceWith(this.innerHTML);
});
$('#chkNewOnly', context).checked(defaultTimer.RecordNewOnly);
$('#chkAllChannels', context).checked(defaultTimer.RecordAnyChannel);
$('#chkAnyTime', context).checked(defaultTimer.RecordAnyTime);

View file

@ -10,6 +10,12 @@
<div style="text-align: right;">
<paper-toggle-button class="chkAdvanced">${ButtonAdvanced}</paper-toggle-button>
</div>
<p><span class="itemName inlineItemName"></span></p>
<p class="itemEpisodeName"></p>
<p class="itemMiscInfo"></p>
<br />
<div style="display: none;" id="eligibleForSeriesFields">
<div>
<paper-checkbox id="chkRecordSeries">${OptionRecordSeries}</paper-checkbox>

View file

@ -339,13 +339,18 @@
var template = this.response;
ApiClient.getItem(Dashboard.getCurrentUserId(), itemId).then(function (item) {
var dlg = paperDialogHelper.createDialog();
var dlg = paperDialogHelper.createDialog({
size: 'small',
removeOnClose: true
});
var html = '';
html += '<h2 class="dialogHeader">';
html += '<paper-fab icon="arrow-back" mini class="btnCloseDialog"></paper-fab>';
html += '<div style="display:inline-block;margin-left:.6em;vertical-align:middle;">' + item.Name + '</div>';
html += '</h2>';
html += '<div class="dialogHeader">';
html += '<paper-icon-button icon="arrow-back" class="btnCancel"></paper-icon-button>';
html += '<div class="dialogHeaderTitle">';
html += item.Name;
html += '</div>';
html += '</div>';
html += '<div class="editorContent">';
html += Globalize.translateDocument(template);
@ -356,9 +361,6 @@
$('.subtitleSearchForm', dlg).off('submit', onSearchSubmit).on('submit', onSearchSubmit);
// Has to be assigned a z-index after the call to .open()
$(dlg).on('iron-overlay-closed', onDialogClosed);
paperDialogHelper.open(dlg);
var editorContent = dlg.querySelector('.editorContent');
@ -369,7 +371,7 @@
fillLanguages(editorContent, languages);
});
$('.btnCloseDialog', dlg).on('click', function () {
$('.btnCancel', dlg).on('click', function () {
paperDialogHelper.close(dlg);
});
@ -379,12 +381,6 @@
xhr.send();
}
function onDialogClosed() {
$(this).remove();
Dashboard.hideLoadingMsg();
}
return {
show: showEditor
};

View file

@ -94,7 +94,7 @@
}
function requireHlsPlayer(callback) {
require(['thirdparty/hls.min.js'], function (hls) {
require(['hlsjs'], function (hls) {
window.Hls = hls;
callback();
});

View file

@ -622,8 +622,8 @@
return false;
}
var overlayTarget = parentWithClass(e.target, 'cardOverlayTarget');
if (overlayTarget) {
var button = parentWithClass(e.target, 'btnUserItemRating');
if (button) {
e.stopPropagation();
e.preventDefault();
return false;

View file

@ -1756,6 +1756,7 @@ var AppInfo = {};
var urlArgs = "v=" + (window.dashboardVersion || new Date().getDate());
var bowerPath = "bower_components";
var apiClientBowerPath = "bower_components/emby-apiclient";
// Put the version into the bower path since we can't easily put a query string param on html imports
// Emby server will handle this
@ -1781,6 +1782,8 @@ var AppInfo = {};
fastclick: bowerPath + '/fastclick/lib/fastclick'
};
paths.hlsjs = bowerPath + "/hls.js/dist/hls.min";
if (Dashboard.isRunningInCordova()) {
paths.dialog = "cordova/dialog";
paths.prompt = "cordova/prompt";
@ -1791,8 +1794,8 @@ var AppInfo = {};
paths.dialog = "components/dialog";
paths.prompt = "components/prompt";
paths.sharingwidget = "components/sharingwidget";
paths.serverdiscovery = "apiclient/serverdiscovery";
paths.wakeonlan = "apiclient/wakeonlan";
paths.serverdiscovery = apiClientBowerPath + "/serverdiscovery";
paths.wakeonlan = apiClientBowerPath + "/wakeonlan";
}
var sha1Path = bowerPath + "/cryptojslib/components/sha1-min";
@ -1894,6 +1897,29 @@ var AppInfo = {};
} else {
define('registrationservices', ['scripts/registrationservices']);
}
if (Dashboard.isRunningInCordova()) {
define("localassetmanager", ["cordova/localassetmanager"]);
define("fileupload", ["cordova/fileupload"]);
} else {
define("localassetmanager", [apiClientBowerPath + "/localassetmanager"]);
define("fileupload", [apiClientBowerPath + "/fileupload"]);
}
define("connectservice", [apiClientBowerPath + "/connectservice"]);
define("apiclient-store", [apiClientBowerPath + "/store"]);
define("apiclient-events", [apiClientBowerPath + "/events"]);
define("apiclient-logger", [apiClientBowerPath + "/logger"]);
define("apiclient-credentials", [apiClientBowerPath + "/credentials"]);
define("apiclient-deferred", [apiClientBowerPath + "/deferred"]);
define("apiclient", [apiClientBowerPath + "/apiclient"]);
define("connectionmanager", [apiClientBowerPath + "/connectionmanager"]);
define("contentuploader", [apiClientBowerPath + "/sync/contentuploader"]);
define("serversync", [apiClientBowerPath + "/sync/serversync"]);
define("multiserversync", [apiClientBowerPath + "/sync/multiserversync"]);
define("offlineusersync", [apiClientBowerPath + "/sync/offlineusersync"]);
define("mediasync", [apiClientBowerPath + "/sync/mediasync"]);
}
function init(hostingAppInfo) {
@ -1906,12 +1932,6 @@ var AppInfo = {};
});
}
if (Dashboard.isRunningInCordova()) {
define("localassetmanager", ["cordova/localassetmanager"]);
} else {
define("localassetmanager", ["apiclient/localassetmanager"]);
}
if (Dashboard.isRunningInCordova() && browserInfo.android) {
define("nativedirectorychooser", ["cordova/android/nativedirectorychooser"]);
}
@ -1936,8 +1956,6 @@ var AppInfo = {};
define("localsync", ["scripts/localsync"]);
}
define("connectservice", ["apiclient/connectservice"]);
define("livetvcss", [], function () {
Dashboard.importCss('css/livetv.css');
return {};
@ -1962,18 +1980,6 @@ var AppInfo = {};
define("searchmenu", ["scripts/searchmenu"]);
}
define("contentuploader", ["apiclient/sync/contentuploader"]);
define("serversync", ["apiclient/sync/serversync"]);
define("multiserversync", ["apiclient/sync/multiserversync"]);
define("offlineusersync", ["apiclient/sync/offlineusersync"]);
define("mediasync", ["apiclient/sync/mediasync"]);
if (Dashboard.isRunningInCordova()) {
define("fileupload", ["cordova/fileupload"]);
} else {
define("fileupload", ["apiclient/fileupload"]);
}
define("buttonenabled", ["legacy/buttonenabled"]);
var deps = [];
@ -1984,7 +1990,7 @@ var AppInfo = {};
deps.push('scripts/mediacontroller');
deps.push('scripts/globalize');
deps.push('apiclient/events');
deps.push('apiclient-events');
deps.push('jQuery');
@ -2034,9 +2040,9 @@ var AppInfo = {};
deps.push('appstorage');
deps.push('scripts/mediaplayer');
deps.push('scripts/appsettings');
deps.push('apiclient/apiclient');
deps.push('apiclient/connectionmanager');
deps.push('apiclient/credentials');
deps.push('apiclient');
deps.push('connectionmanager');
deps.push('apiclient-credentials');
require(deps, function () {
@ -2185,7 +2191,7 @@ var AppInfo = {};
deps.push('scripts/sync');
deps.push('scripts/backdrops');
deps.push('scripts/librarymenu');
deps.push('apiclient/deferred');
deps.push('apiclient-deferred');
deps.push('css!css/card.css');
@ -2411,8 +2417,8 @@ var AppInfo = {};
var initialDependencies = [];
initialDependencies.push('isMobile');
initialDependencies.push('apiclient/logger');
initialDependencies.push('apiclient/store');
initialDependencies.push('apiclient-logger');
initialDependencies.push('apiclient-store');
initialDependencies.push('scripts/extensions');
var supportsNativeWebComponents = 'registerElement' in document && 'content' in document.createElement('template');

File diff suppressed because one or more lines are too long