1
0
Fork 0
mirror of https://github.com/DanielnetoDotCom/YouPHPTube synced 2025-10-03 01:39:24 +02:00

Update node modules

This commit is contained in:
Daniel Neto 2024-10-27 13:39:07 -03:00
parent d429e6f7d8
commit c05a371397
1993 changed files with 9729 additions and 44041 deletions

135
node_modules/.package-lock.json generated vendored
View file

@ -213,15 +213,15 @@
}
},
"node_modules/@videojs/http-streaming": {
"version": "3.13.3",
"resolved": "https://registry.npmjs.org/@videojs/http-streaming/-/http-streaming-3.13.3.tgz",
"integrity": "sha512-L7H+iTeqHeZ5PylzOx+pT3CVyzn4TALWYTJKkIc1pDaV/cTVfNGtG+9/vXPAydD+wR/xH1M9/t2JH8tn/DCT4w==",
"version": "3.14.2",
"resolved": "https://registry.npmjs.org/@videojs/http-streaming/-/http-streaming-3.14.2.tgz",
"integrity": "sha512-c+sg+rrrSrRekBZxd+sNpzjRteIcOEQRJllqCBcz6MrgSaGJGDzV1xhGSAFnxX8E/xfqQeF060us5474WwYi3Q==",
"dependencies": {
"@babel/runtime": "^7.12.5",
"@videojs/vhs-utils": "4.0.0",
"aes-decrypter": "4.0.1",
"@videojs/vhs-utils": "^4.1.1",
"aes-decrypter": "^4.0.2",
"global": "^4.4.0",
"m3u8-parser": "^7.1.0",
"m3u8-parser": "^7.2.0",
"mpd-parser": "^1.3.0",
"mux.js": "7.0.3",
"video.js": "^7 || ^8"
@ -234,39 +234,13 @@
"video.js": "^8.14.0"
}
},
"node_modules/@videojs/http-streaming/node_modules/aes-decrypter": {
"version": "4.0.1",
"resolved": "https://registry.npmjs.org/aes-decrypter/-/aes-decrypter-4.0.1.tgz",
"integrity": "sha512-H1nh/P9VZXUf17AA5NQfJML88CFjVBDuGkp5zDHa7oEhYN9TTpNLJknRY1ie0iSKWlDf6JRnJKaZVDSQdPy6Cg==",
"dependencies": {
"@babel/runtime": "^7.12.5",
"@videojs/vhs-utils": "^3.0.5",
"global": "^4.4.0",
"pkcs7": "^1.0.4"
}
},
"node_modules/@videojs/http-streaming/node_modules/aes-decrypter/node_modules/@videojs/vhs-utils": {
"version": "3.0.5",
"resolved": "https://registry.npmjs.org/@videojs/vhs-utils/-/vhs-utils-3.0.5.tgz",
"integrity": "sha512-PKVgdo8/GReqdx512F+ombhS+Bzogiofy1LgAj4tN8PfdBx3HSS7V5WfJotKTqtOWGwVfSWsrYN/t09/DSryrw==",
"dependencies": {
"@babel/runtime": "^7.12.5",
"global": "^4.4.0",
"url-toolkit": "^2.2.1"
},
"engines": {
"node": ">=8",
"npm": ">=5"
}
},
"node_modules/@videojs/vhs-utils": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/@videojs/vhs-utils/-/vhs-utils-4.0.0.tgz",
"integrity": "sha512-xJp7Yd4jMLwje2vHCUmi8MOUU76nxiwII3z4Eg3Ucb+6rrkFVGosrXlMgGnaLjq724j3wzNElRZ71D/CKrTtxg==",
"version": "4.1.1",
"resolved": "https://registry.npmjs.org/@videojs/vhs-utils/-/vhs-utils-4.1.1.tgz",
"integrity": "sha512-5iLX6sR2ownbv4Mtejw6Ax+naosGvoT9kY+gcuHzANyUZZ+4NpeNdKMUhb6ag0acYej1Y7cmr/F2+4PrggMiVA==",
"dependencies": {
"@babel/runtime": "^7.12.5",
"global": "^4.4.0",
"url-toolkit": "^2.2.1"
"global": "^4.4.0"
},
"engines": {
"node": ">=8",
@ -317,19 +291,6 @@
"pkcs7": "^1.0.4"
}
},
"node_modules/aes-decrypter/node_modules/@videojs/vhs-utils": {
"version": "4.1.1",
"resolved": "https://registry.npmjs.org/@videojs/vhs-utils/-/vhs-utils-4.1.1.tgz",
"integrity": "sha512-5iLX6sR2ownbv4Mtejw6Ax+naosGvoT9kY+gcuHzANyUZZ+4NpeNdKMUhb6ag0acYej1Y7cmr/F2+4PrggMiVA==",
"dependencies": {
"@babel/runtime": "^7.12.5",
"global": "^4.4.0"
},
"engines": {
"node": ">=8",
"npm": ">=5"
}
},
"node_modules/animate.css": {
"version": "4.1.1",
"resolved": "https://registry.npmjs.org/animate.css/-/animate.css-4.1.1.tgz",
@ -444,9 +405,9 @@
}
},
"node_modules/chart.js": {
"version": "4.4.4",
"resolved": "https://registry.npmjs.org/chart.js/-/chart.js-4.4.4.tgz",
"integrity": "sha512-emICKGBABnxhMjUjlYRR12PmOXhJ2eJjEHL2/dZlWjxRAZT1D8xplLFq5M0tMQK8ja+wBS/tuVEJB5C6r7VxJA==",
"version": "4.4.5",
"resolved": "https://registry.npmjs.org/chart.js/-/chart.js-4.4.5.tgz",
"integrity": "sha512-CVVjg1RYTJV9OCC8WeJPMx8gsV8K6WIyIEQUE3ui4AR9Hfgls9URri6Ja3hyMVBbTF8Q2KFa19PE815gWcWhng==",
"dependencies": {
"@kurkle/color": "^0.3.0"
},
@ -633,9 +594,9 @@
}
},
"node_modules/hls.js": {
"version": "1.5.15",
"resolved": "https://registry.npmjs.org/hls.js/-/hls.js-1.5.15.tgz",
"integrity": "sha512-6cD7xN6bycBHaXz2WyPIaHn/iXFizE5au2yvY5q9aC4wfihxAr16C9fUy4nxh2a3wOw0fEgLRa9dN6wsYjlpNg=="
"version": "1.5.17",
"resolved": "https://registry.npmjs.org/hls.js/-/hls.js-1.5.17.tgz",
"integrity": "sha512-wA66nnYFvQa1o4DO/BFgLNRKnBTVXpNeldGRBJ2Y0SvFtdwvFKCbqa9zhHoZLoxHhZ+jYsj3aIBkWQQCPNOhMw=="
},
"node_modules/ieee754": {
"version": "1.2.1",
@ -1036,19 +997,6 @@
"global": "^4.4.0"
}
},
"node_modules/m3u8-parser/node_modules/@videojs/vhs-utils": {
"version": "4.1.1",
"resolved": "https://registry.npmjs.org/@videojs/vhs-utils/-/vhs-utils-4.1.1.tgz",
"integrity": "sha512-5iLX6sR2ownbv4Mtejw6Ax+naosGvoT9kY+gcuHzANyUZZ+4NpeNdKMUhb6ag0acYej1Y7cmr/F2+4PrggMiVA==",
"dependencies": {
"@babel/runtime": "^7.12.5",
"global": "^4.4.0"
},
"engines": {
"node": ">=8",
"npm": ">=5"
}
},
"node_modules/min-document": {
"version": "2.19.0",
"resolved": "https://registry.npmjs.org/min-document/-/min-document-2.19.0.tgz",
@ -1066,9 +1014,9 @@
}
},
"node_modules/moment-timezone": {
"version": "0.5.45",
"resolved": "https://registry.npmjs.org/moment-timezone/-/moment-timezone-0.5.45.tgz",
"integrity": "sha512-HIWmqA86KcmCAhnMAN0wuDOARV/525R2+lOLotuGFzn4HO+FH+/645z2wx0Dt3iDv6/p61SIvKnDstISainhLQ==",
"version": "0.5.46",
"resolved": "https://registry.npmjs.org/moment-timezone/-/moment-timezone-0.5.46.tgz",
"integrity": "sha512-ZXm9b36esbe7OmdABqIWJuBBiLLwAjrN7CE+7sYdCCx82Nabt1wHDj8TVseS59QIlfFPbOoiBPm6ca9BioG4hw==",
"dependencies": {
"moment": "^2.29.4"
},
@ -1370,9 +1318,9 @@
}
},
"node_modules/tinymce": {
"version": "7.3.0",
"resolved": "https://registry.npmjs.org/tinymce/-/tinymce-7.3.0.tgz",
"integrity": "sha512-Ls4PgYlpk73XAxBSBqbVmSl8Mb3DuNfgF01GZ0lY6/MOEVRl3IL+VxC1Oe6165e8WqbqVsxO3Qj/PmoYNvQKGQ=="
"version": "7.4.1",
"resolved": "https://registry.npmjs.org/tinymce/-/tinymce-7.4.1.tgz",
"integrity": "sha512-g1Ieaio5YU+jLEQZkQyxTT8EY/im+TC/CFBPlqDBCNdsF8YQOeLMot+K6vmFOAXhNc85KhP1rC9Dn2X+iBFDGg=="
},
"node_modules/tinymce-langs": {
"version": "1.0.0",
@ -1442,17 +1390,17 @@
}
},
"node_modules/video.js": {
"version": "8.17.4",
"resolved": "https://registry.npmjs.org/video.js/-/video.js-8.17.4.tgz",
"integrity": "sha512-AECieAxKMKB/QgYK36ci50phfpWys6bFT6+pGMpSafeFYSoZaQ2Vpl83T9Qqcesv4TO7oNtiycnVeaBnrva2oA==",
"version": "8.18.1",
"resolved": "https://registry.npmjs.org/video.js/-/video.js-8.18.1.tgz",
"integrity": "sha512-oQ4M/HD2fFgEPHfmVMWxGykRFIpOmVhK0XZ4PSsPTgN2jH6E6+92f/RI2mDXDb0yu+Fxv9fxMUm0M7Z2K3Zo9w==",
"dependencies": {
"@babel/runtime": "^7.12.5",
"@videojs/http-streaming": "3.13.3",
"@videojs/vhs-utils": "^4.0.0",
"@videojs/http-streaming": "^3.14.2",
"@videojs/vhs-utils": "^4.1.1",
"@videojs/xhr": "2.7.0",
"aes-decrypter": "^4.0.1",
"aes-decrypter": "^4.0.2",
"global": "4.4.0",
"m3u8-parser": "^7.1.0",
"m3u8-parser": "^7.2.0",
"mpd-parser": "^1.2.2",
"mux.js": "^7.0.1",
"videojs-contrib-quality-levels": "4.1.0",
@ -1495,10 +1443,23 @@
"resolved": "https://registry.npmjs.org/videojs-font/-/videojs-font-4.2.0.tgz",
"integrity": "sha512-YPq+wiKoGy2/M7ccjmlvwi58z2xsykkkfNMyIg4xb7EZQQNwB71hcSsB3o75CqQV7/y5lXkXhI/rsGAS7jfEmQ=="
},
"node_modules/videojs-hls-quality-selector": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/videojs-hls-quality-selector/-/videojs-hls-quality-selector-2.0.0.tgz",
"integrity": "sha512-x0AQKGwryDdD94s1it+Jolb6j1mg4Q+c7g1PlCIG6dXBdipVPaZmg71fxaFZJgx1k326DFnRaWrLxQ72/TKd2A==",
"dependencies": {
"global": "^4.4.0",
"video.js": "^8"
},
"engines": {
"node": ">=14",
"npm": ">=6"
}
},
"node_modules/videojs-hotkeys": {
"version": "0.2.28",
"resolved": "https://registry.npmjs.org/videojs-hotkeys/-/videojs-hotkeys-0.2.28.tgz",
"integrity": "sha512-M8rlD5OSB3EDRdbS4MRNlGKFpA2sSIStmUPvy5zfl/NigzWaN6r4wnb32rEN0v97GiQwmUfXSmqrPNrXhiFQmQ=="
"version": "0.2.30",
"resolved": "https://registry.npmjs.org/videojs-hotkeys/-/videojs-hotkeys-0.2.30.tgz",
"integrity": "sha512-G8kEQZPapoWDoEajh2Nroy4bCN1qVEul5AuzZqBS7ZCG45K7hqTYKgf1+fmYvG8m8u84sZmVMUvSWZBjaFW66Q=="
},
"node_modules/videojs-ima": {
"version": "2.3.0",
@ -1693,9 +1654,9 @@
}
},
"node_modules/videojs-playlist": {
"version": "5.1.2",
"resolved": "https://registry.npmjs.org/videojs-playlist/-/videojs-playlist-5.1.2.tgz",
"integrity": "sha512-8YgNq/iL17RLTXpfWAkuhM0Sq4w/x5YPVaNbUycjfqqGL/bp3Nrmc2W0qkPfh0ryB7r4cHfJbtHYP7zlW3ZkdQ==",
"version": "5.2.0",
"resolved": "https://registry.npmjs.org/videojs-playlist/-/videojs-playlist-5.2.0.tgz",
"integrity": "sha512-Kyx6C5r7zmj6y97RrIlyji8JUEt0kUEfVyB4P6VMyEFVyCGlOlzlgPw2verznBp4uDfjVPPuAJKvNJ7x9O5NJw==",
"dependencies": {
"global": "^4.3.2",
"video.js": "^6 || ^7 || ^8"

View file

@ -1,3 +1,60 @@
<a name="3.14.2"></a>
## [3.14.2](https://github.com/videojs/http-streaming/compare/v3.14.1...v3.14.2) (2024-09-17)
### Bug Fixes
* audio segment on incorrect timeline HLS ([#1539](https://github.com/videojs/http-streaming/issues/1539)) ([e4e0c2d](https://github.com/videojs/http-streaming/commit/e4e0c2d))
<a name="3.14.1"></a>
## [3.14.1](https://github.com/videojs/http-streaming/compare/v3.14.0...v3.14.1) (2024-09-09)
### Bug Fixes
* allow vtt rollover with MPEGTS of 0 ([#1537](https://github.com/videojs/http-streaming/issues/1537)) ([2125ecf](https://github.com/videojs/http-streaming/commit/2125ecf))
<a name="3.14.0"></a>
# [3.14.0](https://github.com/videojs/http-streaming/compare/v3.11.0...v3.14.0) (2024-08-23)
### Features
* Add experimental support for ManagedMediaSource ([#1453](https://github.com/videojs/http-streaming/issues/1453)) ([247047a](https://github.com/videojs/http-streaming/commit/247047a))
* Custom Pixel Ratio ([#1497](https://github.com/videojs/http-streaming/issues/1497)) ([0e9d9d8](https://github.com/videojs/http-streaming/commit/0e9d9d8))
* streaming events and errors ([#1508](https://github.com/videojs/http-streaming/issues/1508)) ([c94a230](https://github.com/videojs/http-streaming/commit/c94a230))
### Bug Fixes
* audio segment on incorrect timeline ([#1530](https://github.com/videojs/http-streaming/issues/1530)) ([876ed8c](https://github.com/videojs/http-streaming/commit/876ed8c))
* bad timeline changes ([#1526](https://github.com/videojs/http-streaming/issues/1526)) ([7c63f4e](https://github.com/videojs/http-streaming/commit/7c63f4e))
* changeType on full codec change only ([#1474](https://github.com/videojs/http-streaming/issues/1474)) ([4e51778](https://github.com/videojs/http-streaming/commit/4e51778))
* enableFunction not passing playlist to fastQualityChange ([#1502](https://github.com/videojs/http-streaming/issues/1502)) ([e50ecb1](https://github.com/videojs/http-streaming/commit/e50ecb1))
* fastQualitySwitch stability ([#1525](https://github.com/videojs/http-streaming/issues/1525)) ([28cb9fd](https://github.com/videojs/http-streaming/commit/28cb9fd))
* fix repeated segments ([#1489](https://github.com/videojs/http-streaming/issues/1489)) ([ed8f6bd](https://github.com/videojs/http-streaming/commit/ed8f6bd))
* llHLS does not need forcedTimestampOffset ([#1501](https://github.com/videojs/http-streaming/issues/1501)) ([f5d1209](https://github.com/videojs/http-streaming/commit/f5d1209))
* remove extra abort call ([#1528](https://github.com/videojs/http-streaming/issues/1528)) ([7ec606f](https://github.com/videojs/http-streaming/commit/7ec606f))
* requestId init tag ([#1518](https://github.com/videojs/http-streaming/issues/1518)) ([a542ec8](https://github.com/videojs/http-streaming/commit/a542ec8))
* Resolve issue where live dash manifests without audio would hang ([#1524](https://github.com/videojs/http-streaming/issues/1524)) ([1ecf115](https://github.com/videojs/http-streaming/commit/1ecf115))
* use paren media sequence sync for audio and vtt, since they are opt-in features and can be enabled after main init ([#1505](https://github.com/videojs/http-streaming/issues/1505)) ([bdfe0e0](https://github.com/videojs/http-streaming/commit/bdfe0e0))
* videoTimestampOffset in sourceUpdater ([#1519](https://github.com/videojs/http-streaming/issues/1519)) ([d6851cc](https://github.com/videojs/http-streaming/commit/d6851cc))
### Chores
* Add log export to the demo page ([#1522](https://github.com/videojs/http-streaming/issues/1522)) ([0b4da7c](https://github.com/videojs/http-streaming/commit/0b4da7c))
* **demo:** Remove error on iOS on demo page ([#1493](https://github.com/videojs/http-streaming/issues/1493)) ([c50ba7e](https://github.com/videojs/http-streaming/commit/c50ba7e))
* Replace old quality selector ([#1482](https://github.com/videojs/http-streaming/issues/1482)) ([64376db](https://github.com/videojs/http-streaming/commit/64376db))
* Switch to our quality selector ([#1527](https://github.com/videojs/http-streaming/issues/1527)) ([e3d1c42](https://github.com/videojs/http-streaming/commit/e3d1c42))
* Update codecov action ([#1523](https://github.com/videojs/http-streaming/issues/1523)) ([bb9133c](https://github.com/videojs/http-streaming/commit/bb9133c))
* update contrib-eme to v5.3.1 ([#1512](https://github.com/videojs/http-streaming/issues/1512)) ([e46ba74](https://github.com/videojs/http-streaming/commit/e46ba74))
* update m3u8-parser, vhs-utils and aes-decrypter ([#1535](https://github.com/videojs/http-streaming/issues/1535)) ([dba1b79](https://github.com/videojs/http-streaming/commit/dba1b79))
* update mux.js to v7.0.3 ([#1498](https://github.com/videojs/http-streaming/issues/1498)) ([bebcafd](https://github.com/videojs/http-streaming/commit/bebcafd))
### Documentation
* removing duplicated step ([#1476](https://github.com/videojs/http-streaming/issues/1476)) ([e4acc57](https://github.com/videojs/http-streaming/commit/e4acc57))
### Reverts
* "fix: fix repeated segments issue during bandwidth update ([#1477](https://github.com/videojs/http-streaming/issues/1477))" ([#1488](https://github.com/videojs/http-streaming/issues/1488)) ([75f7b1a](https://github.com/videojs/http-streaming/commit/75f7b1a))
<a name="3.13.3"></a>
## [3.13.3](https://github.com/videojs/http-streaming/compare/v3.13.2...v3.13.3) (2024-08-12)

View file

@ -41,6 +41,7 @@ Video.js Compatibility: 7.x, 8.x
- [useCueTags](#usecuetags)
- [parse708captions](#parse708captions)
- [overrideNative](#overridenative)
- [experimentalUseMMS](#experimentalusemms)
- [playlistExclusionDuration](#playlistexclusionduration)
- [maxPlaylistRetries](#maxplaylistretries)
- [bandwidth](#bandwidth)
@ -349,6 +350,14 @@ var player = videojs('playerId', {
Since MSE playback may be desirable on all browsers with some native support other than Safari, `overrideNative: !videojs.browser.IS_SAFARI` could be used.
##### experimentalUseMMS
* Type: `boolean`
* can be used as an initialization option
Use ManagedMediaSource when available. If both ManagedMediaSource and MediaSource are present, ManagedMediaSource would be used. This will only be effective if `ovrerideNative` is true, because currently the only browsers that implement ManagedMediaSource also have native support. Safari on iPhone 17.1 has ManagedMediaSource, as does Safari 17 on desktop and iPad.
Currently, using this option will disable AirPlay.
##### playlistExclusionDuration
* Type: `number`
* can be used as an initialization option

View file

@ -1,4 +1,4 @@
/*! @name @videojs/http-streaming @version 3.13.3 @license Apache-2.0 */
/*! @name @videojs/http-streaming @version 3.14.2 @license Apache-2.0 */
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports, require('video.js'), require('@xmldom/xmldom')) :
typeof define === 'function' && define.amd ? define(['exports', 'video.js', '@xmldom/xmldom'], factory) :
@ -27,181 +27,7 @@
return _extends.apply(this, arguments);
}
var urlToolkit = {exports: {}};
(function (module, exports) {
// see https://tools.ietf.org/html/rfc1808
(function (root) {
var URL_REGEX = /^((?:[a-zA-Z0-9+\-.]+:)?)(\/\/[^\/?#]*)?((?:[^\/?#]*\/)*[^;?#]*)?(;[^?#]*)?(\?[^#]*)?(#[^]*)?$/;
var FIRST_SEGMENT_REGEX = /^([^\/?#]*)([^]*)$/;
var SLASH_DOT_REGEX = /(?:\/|^)\.(?=\/)/g;
var SLASH_DOT_DOT_REGEX = /(?:\/|^)\.\.\/(?!\.\.\/)[^\/]*(?=\/)/g;
var URLToolkit = {
// If opts.alwaysNormalize is true then the path will always be normalized even when it starts with / or //
// E.g
// With opts.alwaysNormalize = false (default, spec compliant)
// http://a.com/b/cd + /e/f/../g => http://a.com/e/f/../g
// With opts.alwaysNormalize = true (not spec compliant)
// http://a.com/b/cd + /e/f/../g => http://a.com/e/g
buildAbsoluteURL: function (baseURL, relativeURL, opts) {
opts = opts || {}; // remove any remaining space and CRLF
baseURL = baseURL.trim();
relativeURL = relativeURL.trim();
if (!relativeURL) {
// 2a) If the embedded URL is entirely empty, it inherits the
// entire base URL (i.e., is set equal to the base URL)
// and we are done.
if (!opts.alwaysNormalize) {
return baseURL;
}
var basePartsForNormalise = URLToolkit.parseURL(baseURL);
if (!basePartsForNormalise) {
throw new Error('Error trying to parse base URL.');
}
basePartsForNormalise.path = URLToolkit.normalizePath(basePartsForNormalise.path);
return URLToolkit.buildURLFromParts(basePartsForNormalise);
}
var relativeParts = URLToolkit.parseURL(relativeURL);
if (!relativeParts) {
throw new Error('Error trying to parse relative URL.');
}
if (relativeParts.scheme) {
// 2b) If the embedded URL starts with a scheme name, it is
// interpreted as an absolute URL and we are done.
if (!opts.alwaysNormalize) {
return relativeURL;
}
relativeParts.path = URLToolkit.normalizePath(relativeParts.path);
return URLToolkit.buildURLFromParts(relativeParts);
}
var baseParts = URLToolkit.parseURL(baseURL);
if (!baseParts) {
throw new Error('Error trying to parse base URL.');
}
if (!baseParts.netLoc && baseParts.path && baseParts.path[0] !== '/') {
// If netLoc missing and path doesn't start with '/', assume everthing before the first '/' is the netLoc
// This causes 'example.com/a' to be handled as '//example.com/a' instead of '/example.com/a'
var pathParts = FIRST_SEGMENT_REGEX.exec(baseParts.path);
baseParts.netLoc = pathParts[1];
baseParts.path = pathParts[2];
}
if (baseParts.netLoc && !baseParts.path) {
baseParts.path = '/';
}
var builtParts = {
// 2c) Otherwise, the embedded URL inherits the scheme of
// the base URL.
scheme: baseParts.scheme,
netLoc: relativeParts.netLoc,
path: null,
params: relativeParts.params,
query: relativeParts.query,
fragment: relativeParts.fragment
};
if (!relativeParts.netLoc) {
// 3) If the embedded URL's <net_loc> is non-empty, we skip to
// Step 7. Otherwise, the embedded URL inherits the <net_loc>
// (if any) of the base URL.
builtParts.netLoc = baseParts.netLoc; // 4) If the embedded URL path is preceded by a slash "/", the
// path is not relative and we skip to Step 7.
if (relativeParts.path[0] !== '/') {
if (!relativeParts.path) {
// 5) If the embedded URL path is empty (and not preceded by a
// slash), then the embedded URL inherits the base URL path
builtParts.path = baseParts.path; // 5a) if the embedded URL's <params> is non-empty, we skip to
// step 7; otherwise, it inherits the <params> of the base
// URL (if any) and
if (!relativeParts.params) {
builtParts.params = baseParts.params; // 5b) if the embedded URL's <query> is non-empty, we skip to
// step 7; otherwise, it inherits the <query> of the base
// URL (if any) and we skip to step 7.
if (!relativeParts.query) {
builtParts.query = baseParts.query;
}
}
} else {
// 6) The last segment of the base URL's path (anything
// following the rightmost slash "/", or the entire path if no
// slash is present) is removed and the embedded URL's path is
// appended in its place.
var baseURLPath = baseParts.path;
var newPath = baseURLPath.substring(0, baseURLPath.lastIndexOf('/') + 1) + relativeParts.path;
builtParts.path = URLToolkit.normalizePath(newPath);
}
}
}
if (builtParts.path === null) {
builtParts.path = opts.alwaysNormalize ? URLToolkit.normalizePath(relativeParts.path) : relativeParts.path;
}
return URLToolkit.buildURLFromParts(builtParts);
},
parseURL: function (url) {
var parts = URL_REGEX.exec(url);
if (!parts) {
return null;
}
return {
scheme: parts[1] || '',
netLoc: parts[2] || '',
path: parts[3] || '',
params: parts[4] || '',
query: parts[5] || '',
fragment: parts[6] || ''
};
},
normalizePath: function (path) {
// The following operations are
// then applied, in order, to the new path:
// 6a) All occurrences of "./", where "." is a complete path
// segment, are removed.
// 6b) If the path ends with "." as a complete path segment,
// that "." is removed.
path = path.split('').reverse().join('').replace(SLASH_DOT_REGEX, ''); // 6c) All occurrences of "<segment>/../", where <segment> is a
// complete path segment not equal to "..", are removed.
// Removal of these path segments is performed iteratively,
// removing the leftmost matching pattern on each iteration,
// until no matching pattern remains.
// 6d) If the path ends with "<segment>/..", where <segment> is a
// complete path segment not equal to "..", that
// "<segment>/.." is removed.
while (path.length !== (path = path.replace(SLASH_DOT_DOT_REGEX, '')).length) {}
return path.split('').reverse().join('');
},
buildURLFromParts: function (parts) {
return parts.scheme + parts.netLoc + parts.path + parts.params + parts.query + parts.fragment;
}
};
module.exports = URLToolkit;
})();
})(urlToolkit);
var URLToolkit = urlToolkit.exports;
var DEFAULT_LOCATION = 'http://example.com';
var DEFAULT_LOCATION = 'https://example.com';
var resolveUrl$1 = function resolveUrl(baseUrl, relativeUrl) {
// return early if we don't need to resolve
@ -212,37 +38,25 @@
if (/^data:/.test(baseUrl)) {
baseUrl = window.location && window.location.href || '';
} // IE11 supports URL but not the URL constructor
// feature detect the behavior we want
}
var nativeURL = typeof window.URL === 'function';
var protocolLess = /^\/\//.test(baseUrl); // remove location if window.location isn't available (i.e. we're in node)
// and if baseUrl isn't an absolute url
var removeLocation = !window.location && !/\/\//i.test(baseUrl); // if the base URL is relative then combine with the current location
if (nativeURL) {
baseUrl = new window.URL(baseUrl, window.location || DEFAULT_LOCATION);
} else if (!/\/\//i.test(baseUrl)) {
baseUrl = URLToolkit.buildAbsoluteURL(window.location && window.location.href || '', baseUrl);
baseUrl = new window.URL(baseUrl, window.location || DEFAULT_LOCATION);
var newUrl = new URL(relativeUrl, baseUrl); // if we're a protocol-less url, remove the protocol
// and if we're location-less, remove the location
// otherwise, return the url unmodified
if (removeLocation) {
return newUrl.href.slice(DEFAULT_LOCATION.length);
} else if (protocolLess) {
return newUrl.href.slice(newUrl.protocol.length);
}
if (nativeURL) {
var newUrl = new URL(relativeUrl, baseUrl); // if we're a protocol-less url, remove the protocol
// and if we're location-less, remove the location
// otherwise, return the url unmodified
if (removeLocation) {
return newUrl.href.slice(DEFAULT_LOCATION.length);
} else if (protocolLess) {
return newUrl.href.slice(newUrl.protocol.length);
}
return newUrl.href;
}
return URLToolkit.buildAbsoluteURL(baseUrl, relativeUrl);
return newUrl.href;
};
/**
@ -415,7 +229,7 @@
return array;
}
/*! @name m3u8-parser @version 7.1.0 @license Apache-2.0 */
/*! @name m3u8-parser @version 7.2.0 @license Apache-2.0 */
/**
* @file m3u8/line-stream.js
*/
@ -522,6 +336,30 @@
return result;
};
/**
* Converts a string into a resolution object
*
* @param {string} resolution a string such as 3840x2160
*
* @return {Object} An object representing the resolution
*
*/
const parseResolution = resolution => {
const split = resolution.split('x');
const result = {};
if (split[0]) {
result.width = parseInt(split[0], 10);
}
if (split[1]) {
result.height = parseInt(split[1], 10);
}
return result;
};
/**
* A line-level M3U8 parser event stream. It expects to receive input one
* line at a time and performs a context-free parse of its contents. A stream
@ -784,18 +622,7 @@
event.attributes = parseAttributes$1(match[1]);
if (event.attributes.RESOLUTION) {
const split = event.attributes.RESOLUTION.split('x');
const resolution = {};
if (split[0]) {
resolution.width = parseInt(split[0], 10);
}
if (split[1]) {
resolution.height = parseInt(split[1], 10);
}
event.attributes.RESOLUTION = resolution;
event.attributes.RESOLUTION = parseResolution(event.attributes.RESOLUTION);
}
if (event.attributes.BANDWIDTH) {
@ -951,7 +778,7 @@
return;
}
match = /^#EXT-X-CUE-IN:(.*)?$/.exec(newLine);
match = /^#EXT-X-CUE-IN:?(.*)?$/.exec(newLine);
if (match) {
event = {
@ -1154,6 +981,16 @@
return;
}
match = /^#EXT-X-I-FRAMES-ONLY/.exec(newLine);
if (match) {
this.trigger('data', {
type: 'tag',
tagType: 'i-frames-only'
});
return;
}
match = /^#EXT-X-CONTENT-STEERING:(.*)$/.exec(newLine);
if (match) {
@ -1164,6 +1001,51 @@
event.attributes = parseAttributes$1(match[1]);
this.trigger('data', event);
return;
}
match = /^#EXT-X-I-FRAME-STREAM-INF:(.*)$/.exec(newLine);
if (match) {
event = {
type: 'tag',
tagType: 'i-frame-playlist'
};
event.attributes = parseAttributes$1(match[1]);
if (event.attributes.URI) {
event.uri = event.attributes.URI;
}
if (event.attributes.BANDWIDTH) {
event.attributes.BANDWIDTH = parseInt(event.attributes.BANDWIDTH, 10);
}
if (event.attributes.RESOLUTION) {
event.attributes.RESOLUTION = parseResolution(event.attributes.RESOLUTION);
}
if (event.attributes['AVERAGE-BANDWIDTH']) {
event.attributes['AVERAGE-BANDWIDTH'] = parseInt(event.attributes['AVERAGE-BANDWIDTH'], 10);
}
if (event.attributes['FRAME-RATE']) {
event.attributes['FRAME-RATE'] = parseFloat(event.attributes['FRAME-RATE']);
}
this.trigger('data', event);
return;
}
match = /^#EXT-X-DEFINE:(.*)$/.exec(newLine);
if (match) {
event = {
type: 'tag',
tagType: 'define'
};
event.attributes = parseAttributes$1(match[1]);
this.trigger('data', event);
return;
} // unknown tag type
@ -1314,16 +1196,21 @@
* requires some property of the manifest object to be defaulted.
*
* @class Parser
* @param {Object} [opts] Options for the constructor, needed for substitutions
* @param {string} [opts.uri] URL to check for query params
* @param {Object} [opts.mainDefinitions] Definitions on main playlist that can be imported
* @extends Stream
*/
class Parser extends Stream {
constructor() {
constructor(opts = {}) {
super();
this.lineStream = new LineStream();
this.parseStream = new ParseStream();
this.lineStream.pipe(this.parseStream);
this.mainDefinitions = opts.mainDefinitions || {};
this.params = new URL(opts.uri, 'https://a.com').searchParams;
this.lastProgramDateTime = null;
/* eslint-disable consistent-this */
@ -1356,6 +1243,7 @@
allowCache: true,
discontinuityStarts: [],
dateRanges: [],
iFramePlaylists: [],
segments: []
}; // keep track of the last seen segment's byte range end, as segments are not required
// to provide the offset, in which case it defaults to the next byte after the
@ -1389,7 +1277,24 @@
this.parseStream.on('data', function (entry) {
let mediaGroup;
let rendition;
let rendition; // Replace variables in uris and attributes as defined in #EXT-X-DEFINE tags
if (self.manifest.definitions) {
for (const def in self.manifest.definitions) {
if (entry.uri) {
entry.uri = entry.uri.replace(`{$${def}}`, self.manifest.definitions[def]);
}
if (entry.attributes) {
for (const attr in entry.attributes) {
if (typeof entry.attributes[attr] === 'string') {
entry.attributes[attr] = entry.attributes[attr].replace(`{$${def}}`, self.manifest.definitions[def]);
}
}
}
}
}
({
tag() {
// switch based on the tag type
@ -1962,9 +1867,121 @@
this.manifest.independentSegments = true;
},
'i-frames-only'() {
this.manifest.iFramesOnly = true;
this.requiredCompatibilityversion(this.manifest.version, 4);
},
'content-steering'() {
this.manifest.contentSteering = camelCaseKeys(entry.attributes);
this.warnOnMissingAttributes_('#EXT-X-CONTENT-STEERING', entry.attributes, ['SERVER-URI']);
},
/** @this {Parser} */
define() {
this.manifest.definitions = this.manifest.definitions || {};
const addDef = (n, v) => {
if (n in this.manifest.definitions) {
// An EXT-X-DEFINE tag MUST NOT specify the same Variable Name as any other
// EXT-X-DEFINE tag in the same Playlist. Parsers that encounter duplicate
// Variable Name declarations MUST fail to parse the Playlist.
this.trigger('error', {
message: `EXT-X-DEFINE: Duplicate name ${n}`
});
return;
}
this.manifest.definitions[n] = v;
};
if ('QUERYPARAM' in entry.attributes) {
if ('NAME' in entry.attributes || 'IMPORT' in entry.attributes) {
// An EXT-X-DEFINE tag MUST contain either a NAME, an IMPORT, or a
// QUERYPARAM attribute, but only one of the three. Otherwise, the
// client MUST fail to parse the Playlist.
this.trigger('error', {
message: 'EXT-X-DEFINE: Invalid attributes'
});
return;
}
const val = this.params.get(entry.attributes.QUERYPARAM);
if (!val) {
// If the QUERYPARAM attribute value does not match any query parameter in
// the URI or the matching parameter has no associated value, the parser
// MUST fail to parse the Playlist. If more than one parameter matches,
// any of the associated values MAY be used.
this.trigger('error', {
message: `EXT-X-DEFINE: No query param ${entry.attributes.QUERYPARAM}`
});
return;
}
addDef(entry.attributes.QUERYPARAM, decodeURIComponent(val));
return;
}
if ('NAME' in entry.attributes) {
if ('IMPORT' in entry.attributes) {
// An EXT-X-DEFINE tag MUST contain either a NAME, an IMPORT, or a
// QUERYPARAM attribute, but only one of the three. Otherwise, the
// client MUST fail to parse the Playlist.
this.trigger('error', {
message: 'EXT-X-DEFINE: Invalid attributes'
});
return;
}
if (!('VALUE' in entry.attributes) || typeof entry.attributes.VALUE !== 'string') {
// This attribute is REQUIRED if the EXT-X-DEFINE tag has a NAME attribute.
// The quoted-string MAY be empty.
this.trigger('error', {
message: `EXT-X-DEFINE: No value for ${entry.attributes.NAME}`
});
return;
}
addDef(entry.attributes.NAME, entry.attributes.VALUE);
return;
}
if ('IMPORT' in entry.attributes) {
if (!this.mainDefinitions[entry.attributes.IMPORT]) {
// Covers two conditions, as mainDefinitions will always be empty on main
//
// EXT-X-DEFINE tags containing the IMPORT attribute MUST NOT occur in
// Multivariant Playlists; they are only allowed in Media Playlists.
//
// If the IMPORT attribute value does not match any Variable Name in the
// Multivariant Playlist, or if the Media Playlist loaded from a
// Multivariant Playlist, the parser MUST fail the Playlist.
this.trigger('error', {
message: `EXT-X-DEFINE: No value ${entry.attributes.IMPORT} to import, or IMPORT used on main playlist`
});
return;
}
addDef(entry.attributes.IMPORT, this.mainDefinitions[entry.attributes.IMPORT]);
return;
} // An EXT-X-DEFINE tag MUST contain either a NAME, an IMPORT, or a QUERYPARAM
// attribute, but only one of the three. Otherwise, the client MUST fail to
// parse the Playlist.
this.trigger('error', {
message: 'EXT-X-DEFINE: No attribute'
});
},
'i-frame-playlist'() {
this.manifest.iFramePlaylists.push({
attributes: entry.attributes,
uri: entry.uri,
timeline: currentTimeline
});
this.warnOnMissingAttributes_('#EXT-X-I-FRAME-STREAM-INF', entry.attributes, ['BANDWIDTH', 'URI']);
}
})[entry.tagType] || noop).call(self);
@ -2022,6 +2039,14 @@
});
}
requiredCompatibilityversion(currentVersion, targetVersion) {
if (currentVersion < targetVersion || !currentVersion) {
this.trigger('warn', {
message: `manifest must be at least version ${targetVersion}`
});
}
}
warnOnMissingAttributes_(identifier, attributes, required) {
const missing = [];
required.forEach(function (key) {
@ -2282,12 +2307,27 @@
return type + "/" + container + ";codecs=\"" + codecString + "\"";
};
var browserSupportsCodec = function browserSupportsCodec(codecString) {
/**
* Tests whether the codec is supported by MediaSource. Optionally also tests ManagedMediaSource.
*
* @param {string} codecString
* Codec to test
* @param {boolean} [withMMS]
* Whether to check if ManagedMediaSource supports it
* @return {boolean}
* Codec is supported
*/
var browserSupportsCodec = function browserSupportsCodec(codecString, withMMS) {
if (codecString === void 0) {
codecString = '';
}
return window.MediaSource && window.MediaSource.isTypeSupported && window.MediaSource.isTypeSupported(getMimeForCodec(codecString)) || false;
if (withMMS === void 0) {
withMMS = false;
}
return window.MediaSource && window.MediaSource.isTypeSupported && window.MediaSource.isTypeSupported(getMimeForCodec(codecString)) || withMMS && window.ManagedMediaSource && window.ManagedMediaSource.isTypeSupported && window.ManagedMediaSource.isTypeSupported(getMimeForCodec(codecString)) || false;
};
var muxerSupportsCodec = function muxerSupportsCodec(codecString) {
if (codecString === void 0) {
@ -9813,7 +9853,7 @@
const removeOldMediaGroupLabels = (update, newMain) => {
forEachMediaGroup$1(update, (properties, type, group, label) => {
if (!(label in newMain.mediaGroups[type][group])) {
if (!newMain.mediaGroups[type][group] || !(label in newMain.mediaGroups[type][group])) {
delete update.mediaGroups[type][group][label];
}
});
@ -23406,8 +23446,7 @@
});
if (waitingForTimelineChange && shouldFixBadTimelineChanges(segmentLoader.timelineChangeController_)) {
// Audio being behind should only happen on DASH sources.
if (segmentLoader.sourceType_ === 'dash' && isAudioTimelineBehind(segmentLoader)) {
if (isAudioTimelineBehind(segmentLoader)) {
segmentLoader.timelineChangeController_.trigger('audioTimelineBehind');
return;
}
@ -27798,7 +27837,7 @@ ${segmentInfoString(segmentInfo)}`); // If there's an init segment associated wi
const diff = mpegTsInSeconds - LOCAL + mappingObj.mapping;
segmentInfo.cues.forEach(cue => {
const duration = cue.endTime - cue.startTime;
const startTime = MPEGTS === 0 ? cue.startTime + diff : this.handleRollover_(cue.startTime + diff, mappingObj.time);
const startTime = this.handleRollover_(cue.startTime + diff, mappingObj.time);
cue.startTime = Math.max(startTime, 0);
cue.endTime = Math.max(startTime + duration, 0);
});
@ -29087,7 +29126,7 @@ ${segmentInfoString(segmentInfo)}`); // If there's an init segment associated wi
function unpad(padded) {
return padded.subarray(0, padded.byteLength - padded[padded.byteLength - 1]);
}
/*! @name aes-decrypter @version 4.0.1 @license Apache-2.0 */
/*! @name aes-decrypter @version 4.0.2 @license Apache-2.0 */
/**
* @file aes.js
@ -31163,7 +31202,8 @@ ${segmentInfoString(segmentInfo)}`); // If there's an init segment associated wi
cacheEncryptionKeys,
bufferBasedABR,
leastPixelDiffSelector,
captionServices
captionServices,
experimentalUseMMS
} = options;
if (!src) {
@ -31203,7 +31243,16 @@ ${segmentInfoString(segmentInfo)}`); // If there's an init segment associated wi
};
this.on('error', this.pauseLoading);
this.mediaTypes_ = createMediaTypes();
this.mediaSource = new window.MediaSource();
if (experimentalUseMMS && window.ManagedMediaSource) {
// Airplay source not yet implemented. Remote playback must be disabled.
this.tech_.el_.disableRemotePlayback = true;
this.mediaSource = new window.ManagedMediaSource();
videojs__default["default"].log('Using ManagedMediaSource');
} else if (window.MediaSource) {
this.mediaSource = new window.MediaSource();
}
this.handleDurationChange_ = this.handleDurationChange_.bind(this);
this.handleSourceOpen_ = this.handleSourceOpen_.bind(this);
this.handleSourceEnded_ = this.handleSourceEnded_.bind(this);
@ -31909,28 +31958,25 @@ ${segmentInfoString(segmentInfo)}`); // If there's an init segment associated wi
this.mainSegmentLoader_.on('ended', () => {
this.logger_('main segment loader ended');
this.onEndOfStream();
}); // In DASH, there is the possibility of the video segment and the audio segment
}); // There is the possibility of the video segment and the audio segment
// at a current time to be on different timelines. When this occurs, the player
// forwards playback to a point where these two segment types are back on the same
// timeline. This time will be just after the end of the audio segment that is on
// a previous timeline.
if (this.sourceType_ === 'dash') {
this.timelineChangeController_.on('audioTimelineBehind', () => {
const segmentInfo = this.audioSegmentLoader_.pendingSegment_;
this.timelineChangeController_.on('audioTimelineBehind', () => {
const segmentInfo = this.audioSegmentLoader_.pendingSegment_;
if (!segmentInfo || !segmentInfo.segment || !segmentInfo.segment.syncInfo) {
return;
} // Update the current time to just after the faulty audio segment.
// This moves playback to a spot where both audio and video segments
// are on the same timeline.
if (!segmentInfo || !segmentInfo.segment || !segmentInfo.segment.syncInfo) {
return;
} // Update the current time to just after the faulty audio segment.
// This moves playback to a spot where both audio and video segments
// are on the same timeline.
const newTime = segmentInfo.segment.syncInfo.end + 0.01;
this.tech_.setCurrentTime(newTime);
});
}
const newTime = segmentInfo.segment.syncInfo.end + 0.01;
this.tech_.setCurrentTime(newTime);
});
this.mainSegmentLoader_.on('earlyabort', event => {
// never try to early abort with the new ABR algorithm
if (this.bufferBasedABR) {
@ -34369,15 +34415,15 @@ ${segmentInfoString(segmentInfo)}`); // If there's an init segment associated wi
initPlugin(this, options);
};
var version$4 = "3.13.3";
var version$4 = "3.14.2";
var version$3 = "7.0.3";
var version$2 = "1.3.0";
var version$1 = "7.1.0";
var version$1 = "7.2.0";
var version = "4.0.1";
var version = "4.0.2";
const Vhs = {
PlaylistLoader,
@ -35637,7 +35683,12 @@ ${segmentInfoString(segmentInfo)}`); // If there's an init segment associated wi
VERSION: version$4,
canHandleSource(srcObj, options = {}) {
const localOptions = merge$1(videojs__default["default"].options, options);
const localOptions = merge$1(videojs__default["default"].options, options); // If not opting to experimentalUseMMS, and playback is only supported with MediaSource, cannot handle source
if (!localOptions.vhs.experimentalUseMMS && !browserSupportsCodec('avc1.4d400d,mp4a.40.2', false)) {
return false;
}
return VhsSourceHandler.canPlayType(srcObj.type, localOptions);
},
@ -35676,14 +35727,15 @@ ${segmentInfoString(segmentInfo)}`); // If there's an init segment associated wi
};
/**
* Check to see if the native MediaSource object exists and supports
* an MP4 container with both H.264 video and AAC-LC audio.
* Check to see if either the native MediaSource or ManagedMediaSource
* objectx exist and support an MP4 container with both H.264 video
* and AAC-LC audio.
*
* @return {boolean} if native media sources are supported
*/
const supportsNativeMediaSources = () => {
return browserSupportsCodec('avc1.4d400d,mp4a.40.2');
return browserSupportsCodec('avc1.4d400d,mp4a.40.2', true);
}; // register source handlers with the appropriate techs

View file

@ -1,4 +1,4 @@
/*! @name @videojs/http-streaming @version 3.13.3 @license Apache-2.0 */
/*! @name @videojs/http-streaming @version 3.14.2 @license Apache-2.0 */
'use strict';
Object.defineProperty(exports, '__esModule', { value: true });
@ -3856,7 +3856,7 @@ const parseMainXml = ({
const removeOldMediaGroupLabels = (update, newMain) => {
forEachMediaGroup(update, (properties, type, group, label) => {
if (!(label in newMain.mediaGroups[type][group])) {
if (!newMain.mediaGroups[type][group] || !(label in newMain.mediaGroups[type][group])) {
delete update.mediaGroups[type][group][label];
}
});
@ -17339,8 +17339,7 @@ const checkAndFixTimelines = segmentLoader => {
});
if (waitingForTimelineChange && shouldFixBadTimelineChanges(segmentLoader.timelineChangeController_)) {
// Audio being behind should only happen on DASH sources.
if (segmentLoader.sourceType_ === 'dash' && isAudioTimelineBehind(segmentLoader)) {
if (isAudioTimelineBehind(segmentLoader)) {
segmentLoader.timelineChangeController_.trigger('audioTimelineBehind');
return;
}
@ -21731,7 +21730,7 @@ class VTTSegmentLoader extends SegmentLoader {
const diff = mpegTsInSeconds - LOCAL + mappingObj.mapping;
segmentInfo.cues.forEach(cue => {
const duration = cue.endTime - cue.startTime;
const startTime = MPEGTS === 0 ? cue.startTime + diff : this.handleRollover_(cue.startTime + diff, mappingObj.time);
const startTime = this.handleRollover_(cue.startTime + diff, mappingObj.time);
cue.startTime = Math.max(startTime, 0);
cue.endTime = Math.max(startTime + duration, 0);
});
@ -23020,7 +23019,7 @@ const workerCode = transform(getWorkerString(function () {
function unpad(padded) {
return padded.subarray(0, padded.byteLength - padded[padded.byteLength - 1]);
}
/*! @name aes-decrypter @version 4.0.1 @license Apache-2.0 */
/*! @name aes-decrypter @version 4.0.2 @license Apache-2.0 */
/**
* @file aes.js
@ -25096,7 +25095,8 @@ class PlaylistController extends videojs__default["default"].EventTarget {
cacheEncryptionKeys,
bufferBasedABR,
leastPixelDiffSelector,
captionServices
captionServices,
experimentalUseMMS
} = options;
if (!src) {
@ -25136,7 +25136,16 @@ class PlaylistController extends videojs__default["default"].EventTarget {
};
this.on('error', this.pauseLoading);
this.mediaTypes_ = createMediaTypes();
this.mediaSource = new window__default["default"].MediaSource();
if (experimentalUseMMS && window__default["default"].ManagedMediaSource) {
// Airplay source not yet implemented. Remote playback must be disabled.
this.tech_.el_.disableRemotePlayback = true;
this.mediaSource = new window__default["default"].ManagedMediaSource();
videojs__default["default"].log('Using ManagedMediaSource');
} else if (window__default["default"].MediaSource) {
this.mediaSource = new window__default["default"].MediaSource();
}
this.handleDurationChange_ = this.handleDurationChange_.bind(this);
this.handleSourceOpen_ = this.handleSourceOpen_.bind(this);
this.handleSourceEnded_ = this.handleSourceEnded_.bind(this);
@ -25842,28 +25851,25 @@ class PlaylistController extends videojs__default["default"].EventTarget {
this.mainSegmentLoader_.on('ended', () => {
this.logger_('main segment loader ended');
this.onEndOfStream();
}); // In DASH, there is the possibility of the video segment and the audio segment
}); // There is the possibility of the video segment and the audio segment
// at a current time to be on different timelines. When this occurs, the player
// forwards playback to a point where these two segment types are back on the same
// timeline. This time will be just after the end of the audio segment that is on
// a previous timeline.
if (this.sourceType_ === 'dash') {
this.timelineChangeController_.on('audioTimelineBehind', () => {
const segmentInfo = this.audioSegmentLoader_.pendingSegment_;
this.timelineChangeController_.on('audioTimelineBehind', () => {
const segmentInfo = this.audioSegmentLoader_.pendingSegment_;
if (!segmentInfo || !segmentInfo.segment || !segmentInfo.segment.syncInfo) {
return;
} // Update the current time to just after the faulty audio segment.
// This moves playback to a spot where both audio and video segments
// are on the same timeline.
if (!segmentInfo || !segmentInfo.segment || !segmentInfo.segment.syncInfo) {
return;
} // Update the current time to just after the faulty audio segment.
// This moves playback to a spot where both audio and video segments
// are on the same timeline.
const newTime = segmentInfo.segment.syncInfo.end + 0.01;
this.tech_.setCurrentTime(newTime);
});
}
const newTime = segmentInfo.segment.syncInfo.end + 0.01;
this.tech_.setCurrentTime(newTime);
});
this.mainSegmentLoader_.on('earlyabort', event => {
// never try to early abort with the new ABR algorithm
if (this.bufferBasedABR) {
@ -28302,15 +28308,15 @@ const reloadSourceOnError = function (options) {
initPlugin(this, options);
};
var version$4 = "3.13.3";
var version$4 = "3.14.2";
var version$3 = "7.0.3";
var version$2 = "1.3.0";
var version$1 = "7.1.0";
var version$1 = "7.2.0";
var version = "4.0.1";
var version = "4.0.2";
const Vhs = {
PlaylistLoader,
@ -29570,7 +29576,12 @@ const VhsSourceHandler = {
VERSION: version$4,
canHandleSource(srcObj, options = {}) {
const localOptions = merge(videojs__default["default"].options, options);
const localOptions = merge(videojs__default["default"].options, options); // If not opting to experimentalUseMMS, and playback is only supported with MediaSource, cannot handle source
if (!localOptions.vhs.experimentalUseMMS && !codecs_js.browserSupportsCodec('avc1.4d400d,mp4a.40.2', false)) {
return false;
}
return VhsSourceHandler.canPlayType(srcObj.type, localOptions);
},
@ -29609,14 +29620,15 @@ const VhsSourceHandler = {
};
/**
* Check to see if the native MediaSource object exists and supports
* an MP4 container with both H.264 video and AAC-LC audio.
* Check to see if either the native MediaSource or ManagedMediaSource
* objectx exist and support an MP4 container with both H.264 video
* and AAC-LC audio.
*
* @return {boolean} if native media sources are supported
*/
const supportsNativeMediaSources = () => {
return codecs_js.browserSupportsCodec('avc1.4d400d,mp4a.40.2');
return codecs_js.browserSupportsCodec('avc1.4d400d,mp4a.40.2', true);
}; // register source handlers with the appropriate techs

View file

@ -1,4 +1,4 @@
/*! @name @videojs/http-streaming @version 3.13.3 @license Apache-2.0 */
/*! @name @videojs/http-streaming @version 3.14.2 @license Apache-2.0 */
import _extends from '@babel/runtime/helpers/extends';
import document from 'global/document';
import window$1 from 'global/window';
@ -3844,7 +3844,7 @@ const parseMainXml = ({
const removeOldMediaGroupLabels = (update, newMain) => {
forEachMediaGroup(update, (properties, type, group, label) => {
if (!(label in newMain.mediaGroups[type][group])) {
if (!newMain.mediaGroups[type][group] || !(label in newMain.mediaGroups[type][group])) {
delete update.mediaGroups[type][group][label];
}
});
@ -17327,8 +17327,7 @@ const checkAndFixTimelines = segmentLoader => {
});
if (waitingForTimelineChange && shouldFixBadTimelineChanges(segmentLoader.timelineChangeController_)) {
// Audio being behind should only happen on DASH sources.
if (segmentLoader.sourceType_ === 'dash' && isAudioTimelineBehind(segmentLoader)) {
if (isAudioTimelineBehind(segmentLoader)) {
segmentLoader.timelineChangeController_.trigger('audioTimelineBehind');
return;
}
@ -21719,7 +21718,7 @@ class VTTSegmentLoader extends SegmentLoader {
const diff = mpegTsInSeconds - LOCAL + mappingObj.mapping;
segmentInfo.cues.forEach(cue => {
const duration = cue.endTime - cue.startTime;
const startTime = MPEGTS === 0 ? cue.startTime + diff : this.handleRollover_(cue.startTime + diff, mappingObj.time);
const startTime = this.handleRollover_(cue.startTime + diff, mappingObj.time);
cue.startTime = Math.max(startTime, 0);
cue.endTime = Math.max(startTime + duration, 0);
});
@ -23008,7 +23007,7 @@ const workerCode = transform(getWorkerString(function () {
function unpad(padded) {
return padded.subarray(0, padded.byteLength - padded[padded.byteLength - 1]);
}
/*! @name aes-decrypter @version 4.0.1 @license Apache-2.0 */
/*! @name aes-decrypter @version 4.0.2 @license Apache-2.0 */
/**
* @file aes.js
@ -25084,7 +25083,8 @@ class PlaylistController extends videojs.EventTarget {
cacheEncryptionKeys,
bufferBasedABR,
leastPixelDiffSelector,
captionServices
captionServices,
experimentalUseMMS
} = options;
if (!src) {
@ -25124,7 +25124,16 @@ class PlaylistController extends videojs.EventTarget {
};
this.on('error', this.pauseLoading);
this.mediaTypes_ = createMediaTypes();
this.mediaSource = new window$1.MediaSource();
if (experimentalUseMMS && window$1.ManagedMediaSource) {
// Airplay source not yet implemented. Remote playback must be disabled.
this.tech_.el_.disableRemotePlayback = true;
this.mediaSource = new window$1.ManagedMediaSource();
videojs.log('Using ManagedMediaSource');
} else if (window$1.MediaSource) {
this.mediaSource = new window$1.MediaSource();
}
this.handleDurationChange_ = this.handleDurationChange_.bind(this);
this.handleSourceOpen_ = this.handleSourceOpen_.bind(this);
this.handleSourceEnded_ = this.handleSourceEnded_.bind(this);
@ -25830,28 +25839,25 @@ class PlaylistController extends videojs.EventTarget {
this.mainSegmentLoader_.on('ended', () => {
this.logger_('main segment loader ended');
this.onEndOfStream();
}); // In DASH, there is the possibility of the video segment and the audio segment
}); // There is the possibility of the video segment and the audio segment
// at a current time to be on different timelines. When this occurs, the player
// forwards playback to a point where these two segment types are back on the same
// timeline. This time will be just after the end of the audio segment that is on
// a previous timeline.
if (this.sourceType_ === 'dash') {
this.timelineChangeController_.on('audioTimelineBehind', () => {
const segmentInfo = this.audioSegmentLoader_.pendingSegment_;
this.timelineChangeController_.on('audioTimelineBehind', () => {
const segmentInfo = this.audioSegmentLoader_.pendingSegment_;
if (!segmentInfo || !segmentInfo.segment || !segmentInfo.segment.syncInfo) {
return;
} // Update the current time to just after the faulty audio segment.
// This moves playback to a spot where both audio and video segments
// are on the same timeline.
if (!segmentInfo || !segmentInfo.segment || !segmentInfo.segment.syncInfo) {
return;
} // Update the current time to just after the faulty audio segment.
// This moves playback to a spot where both audio and video segments
// are on the same timeline.
const newTime = segmentInfo.segment.syncInfo.end + 0.01;
this.tech_.setCurrentTime(newTime);
});
}
const newTime = segmentInfo.segment.syncInfo.end + 0.01;
this.tech_.setCurrentTime(newTime);
});
this.mainSegmentLoader_.on('earlyabort', event => {
// never try to early abort with the new ABR algorithm
if (this.bufferBasedABR) {
@ -28290,15 +28296,15 @@ const reloadSourceOnError = function (options) {
initPlugin(this, options);
};
var version$4 = "3.13.3";
var version$4 = "3.14.2";
var version$3 = "7.0.3";
var version$2 = "1.3.0";
var version$1 = "7.1.0";
var version$1 = "7.2.0";
var version = "4.0.1";
var version = "4.0.2";
const Vhs = {
PlaylistLoader,
@ -29558,7 +29564,12 @@ const VhsSourceHandler = {
VERSION: version$4,
canHandleSource(srcObj, options = {}) {
const localOptions = merge(videojs.options, options);
const localOptions = merge(videojs.options, options); // If not opting to experimentalUseMMS, and playback is only supported with MediaSource, cannot handle source
if (!localOptions.vhs.experimentalUseMMS && !browserSupportsCodec('avc1.4d400d,mp4a.40.2', false)) {
return false;
}
return VhsSourceHandler.canPlayType(srcObj.type, localOptions);
},
@ -29597,14 +29608,15 @@ const VhsSourceHandler = {
};
/**
* Check to see if the native MediaSource object exists and supports
* an MP4 container with both H.264 video and AAC-LC audio.
* Check to see if either the native MediaSource or ManagedMediaSource
* objectx exist and support an MP4 container with both H.264 video
* and AAC-LC audio.
*
* @return {boolean} if native media sources are supported
*/
const supportsNativeMediaSources = () => {
return browserSupportsCodec('avc1.4d400d,mp4a.40.2');
return browserSupportsCodec('avc1.4d400d,mp4a.40.2', true);
}; // register source handlers with the appropriate techs

View file

@ -1,4 +1,4 @@
/*! @name @videojs/http-streaming @version 3.13.3 @license Apache-2.0 */
/*! @name @videojs/http-streaming @version 3.14.2 @license Apache-2.0 */
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports, require('video.js'), require('@xmldom/xmldom')) :
typeof define === 'function' && define.amd ? define(['exports', 'video.js', '@xmldom/xmldom'], factory) :
@ -27,181 +27,7 @@
return _extends.apply(this, arguments);
}
var urlToolkit = {exports: {}};
(function (module, exports) {
// see https://tools.ietf.org/html/rfc1808
(function (root) {
var URL_REGEX = /^((?:[a-zA-Z0-9+\-.]+:)?)(\/\/[^\/?#]*)?((?:[^\/?#]*\/)*[^;?#]*)?(;[^?#]*)?(\?[^#]*)?(#[^]*)?$/;
var FIRST_SEGMENT_REGEX = /^([^\/?#]*)([^]*)$/;
var SLASH_DOT_REGEX = /(?:\/|^)\.(?=\/)/g;
var SLASH_DOT_DOT_REGEX = /(?:\/|^)\.\.\/(?!\.\.\/)[^\/]*(?=\/)/g;
var URLToolkit = {
// If opts.alwaysNormalize is true then the path will always be normalized even when it starts with / or //
// E.g
// With opts.alwaysNormalize = false (default, spec compliant)
// http://a.com/b/cd + /e/f/../g => http://a.com/e/f/../g
// With opts.alwaysNormalize = true (not spec compliant)
// http://a.com/b/cd + /e/f/../g => http://a.com/e/g
buildAbsoluteURL: function (baseURL, relativeURL, opts) {
opts = opts || {}; // remove any remaining space and CRLF
baseURL = baseURL.trim();
relativeURL = relativeURL.trim();
if (!relativeURL) {
// 2a) If the embedded URL is entirely empty, it inherits the
// entire base URL (i.e., is set equal to the base URL)
// and we are done.
if (!opts.alwaysNormalize) {
return baseURL;
}
var basePartsForNormalise = URLToolkit.parseURL(baseURL);
if (!basePartsForNormalise) {
throw new Error('Error trying to parse base URL.');
}
basePartsForNormalise.path = URLToolkit.normalizePath(basePartsForNormalise.path);
return URLToolkit.buildURLFromParts(basePartsForNormalise);
}
var relativeParts = URLToolkit.parseURL(relativeURL);
if (!relativeParts) {
throw new Error('Error trying to parse relative URL.');
}
if (relativeParts.scheme) {
// 2b) If the embedded URL starts with a scheme name, it is
// interpreted as an absolute URL and we are done.
if (!opts.alwaysNormalize) {
return relativeURL;
}
relativeParts.path = URLToolkit.normalizePath(relativeParts.path);
return URLToolkit.buildURLFromParts(relativeParts);
}
var baseParts = URLToolkit.parseURL(baseURL);
if (!baseParts) {
throw new Error('Error trying to parse base URL.');
}
if (!baseParts.netLoc && baseParts.path && baseParts.path[0] !== '/') {
// If netLoc missing and path doesn't start with '/', assume everthing before the first '/' is the netLoc
// This causes 'example.com/a' to be handled as '//example.com/a' instead of '/example.com/a'
var pathParts = FIRST_SEGMENT_REGEX.exec(baseParts.path);
baseParts.netLoc = pathParts[1];
baseParts.path = pathParts[2];
}
if (baseParts.netLoc && !baseParts.path) {
baseParts.path = '/';
}
var builtParts = {
// 2c) Otherwise, the embedded URL inherits the scheme of
// the base URL.
scheme: baseParts.scheme,
netLoc: relativeParts.netLoc,
path: null,
params: relativeParts.params,
query: relativeParts.query,
fragment: relativeParts.fragment
};
if (!relativeParts.netLoc) {
// 3) If the embedded URL's <net_loc> is non-empty, we skip to
// Step 7. Otherwise, the embedded URL inherits the <net_loc>
// (if any) of the base URL.
builtParts.netLoc = baseParts.netLoc; // 4) If the embedded URL path is preceded by a slash "/", the
// path is not relative and we skip to Step 7.
if (relativeParts.path[0] !== '/') {
if (!relativeParts.path) {
// 5) If the embedded URL path is empty (and not preceded by a
// slash), then the embedded URL inherits the base URL path
builtParts.path = baseParts.path; // 5a) if the embedded URL's <params> is non-empty, we skip to
// step 7; otherwise, it inherits the <params> of the base
// URL (if any) and
if (!relativeParts.params) {
builtParts.params = baseParts.params; // 5b) if the embedded URL's <query> is non-empty, we skip to
// step 7; otherwise, it inherits the <query> of the base
// URL (if any) and we skip to step 7.
if (!relativeParts.query) {
builtParts.query = baseParts.query;
}
}
} else {
// 6) The last segment of the base URL's path (anything
// following the rightmost slash "/", or the entire path if no
// slash is present) is removed and the embedded URL's path is
// appended in its place.
var baseURLPath = baseParts.path;
var newPath = baseURLPath.substring(0, baseURLPath.lastIndexOf('/') + 1) + relativeParts.path;
builtParts.path = URLToolkit.normalizePath(newPath);
}
}
}
if (builtParts.path === null) {
builtParts.path = opts.alwaysNormalize ? URLToolkit.normalizePath(relativeParts.path) : relativeParts.path;
}
return URLToolkit.buildURLFromParts(builtParts);
},
parseURL: function (url) {
var parts = URL_REGEX.exec(url);
if (!parts) {
return null;
}
return {
scheme: parts[1] || '',
netLoc: parts[2] || '',
path: parts[3] || '',
params: parts[4] || '',
query: parts[5] || '',
fragment: parts[6] || ''
};
},
normalizePath: function (path) {
// The following operations are
// then applied, in order, to the new path:
// 6a) All occurrences of "./", where "." is a complete path
// segment, are removed.
// 6b) If the path ends with "." as a complete path segment,
// that "." is removed.
path = path.split('').reverse().join('').replace(SLASH_DOT_REGEX, ''); // 6c) All occurrences of "<segment>/../", where <segment> is a
// complete path segment not equal to "..", are removed.
// Removal of these path segments is performed iteratively,
// removing the leftmost matching pattern on each iteration,
// until no matching pattern remains.
// 6d) If the path ends with "<segment>/..", where <segment> is a
// complete path segment not equal to "..", that
// "<segment>/.." is removed.
while (path.length !== (path = path.replace(SLASH_DOT_DOT_REGEX, '')).length) {}
return path.split('').reverse().join('');
},
buildURLFromParts: function (parts) {
return parts.scheme + parts.netLoc + parts.path + parts.params + parts.query + parts.fragment;
}
};
module.exports = URLToolkit;
})();
})(urlToolkit);
var URLToolkit = urlToolkit.exports;
var DEFAULT_LOCATION = 'http://example.com';
var DEFAULT_LOCATION = 'https://example.com';
var resolveUrl$1 = function resolveUrl(baseUrl, relativeUrl) {
// return early if we don't need to resolve
@ -212,37 +38,25 @@
if (/^data:/.test(baseUrl)) {
baseUrl = window.location && window.location.href || '';
} // IE11 supports URL but not the URL constructor
// feature detect the behavior we want
}
var nativeURL = typeof window.URL === 'function';
var protocolLess = /^\/\//.test(baseUrl); // remove location if window.location isn't available (i.e. we're in node)
// and if baseUrl isn't an absolute url
var removeLocation = !window.location && !/\/\//i.test(baseUrl); // if the base URL is relative then combine with the current location
if (nativeURL) {
baseUrl = new window.URL(baseUrl, window.location || DEFAULT_LOCATION);
} else if (!/\/\//i.test(baseUrl)) {
baseUrl = URLToolkit.buildAbsoluteURL(window.location && window.location.href || '', baseUrl);
baseUrl = new window.URL(baseUrl, window.location || DEFAULT_LOCATION);
var newUrl = new URL(relativeUrl, baseUrl); // if we're a protocol-less url, remove the protocol
// and if we're location-less, remove the location
// otherwise, return the url unmodified
if (removeLocation) {
return newUrl.href.slice(DEFAULT_LOCATION.length);
} else if (protocolLess) {
return newUrl.href.slice(newUrl.protocol.length);
}
if (nativeURL) {
var newUrl = new URL(relativeUrl, baseUrl); // if we're a protocol-less url, remove the protocol
// and if we're location-less, remove the location
// otherwise, return the url unmodified
if (removeLocation) {
return newUrl.href.slice(DEFAULT_LOCATION.length);
} else if (protocolLess) {
return newUrl.href.slice(newUrl.protocol.length);
}
return newUrl.href;
}
return URLToolkit.buildAbsoluteURL(baseUrl, relativeUrl);
return newUrl.href;
};
/**
@ -415,7 +229,7 @@
return array;
}
/*! @name m3u8-parser @version 7.1.0 @license Apache-2.0 */
/*! @name m3u8-parser @version 7.2.0 @license Apache-2.0 */
/**
* @file m3u8/line-stream.js
*/
@ -522,6 +336,30 @@
return result;
};
/**
* Converts a string into a resolution object
*
* @param {string} resolution a string such as 3840x2160
*
* @return {Object} An object representing the resolution
*
*/
const parseResolution = resolution => {
const split = resolution.split('x');
const result = {};
if (split[0]) {
result.width = parseInt(split[0], 10);
}
if (split[1]) {
result.height = parseInt(split[1], 10);
}
return result;
};
/**
* A line-level M3U8 parser event stream. It expects to receive input one
* line at a time and performs a context-free parse of its contents. A stream
@ -784,18 +622,7 @@
event.attributes = parseAttributes$1(match[1]);
if (event.attributes.RESOLUTION) {
const split = event.attributes.RESOLUTION.split('x');
const resolution = {};
if (split[0]) {
resolution.width = parseInt(split[0], 10);
}
if (split[1]) {
resolution.height = parseInt(split[1], 10);
}
event.attributes.RESOLUTION = resolution;
event.attributes.RESOLUTION = parseResolution(event.attributes.RESOLUTION);
}
if (event.attributes.BANDWIDTH) {
@ -951,7 +778,7 @@
return;
}
match = /^#EXT-X-CUE-IN:(.*)?$/.exec(newLine);
match = /^#EXT-X-CUE-IN:?(.*)?$/.exec(newLine);
if (match) {
event = {
@ -1154,6 +981,16 @@
return;
}
match = /^#EXT-X-I-FRAMES-ONLY/.exec(newLine);
if (match) {
this.trigger('data', {
type: 'tag',
tagType: 'i-frames-only'
});
return;
}
match = /^#EXT-X-CONTENT-STEERING:(.*)$/.exec(newLine);
if (match) {
@ -1164,6 +1001,51 @@
event.attributes = parseAttributes$1(match[1]);
this.trigger('data', event);
return;
}
match = /^#EXT-X-I-FRAME-STREAM-INF:(.*)$/.exec(newLine);
if (match) {
event = {
type: 'tag',
tagType: 'i-frame-playlist'
};
event.attributes = parseAttributes$1(match[1]);
if (event.attributes.URI) {
event.uri = event.attributes.URI;
}
if (event.attributes.BANDWIDTH) {
event.attributes.BANDWIDTH = parseInt(event.attributes.BANDWIDTH, 10);
}
if (event.attributes.RESOLUTION) {
event.attributes.RESOLUTION = parseResolution(event.attributes.RESOLUTION);
}
if (event.attributes['AVERAGE-BANDWIDTH']) {
event.attributes['AVERAGE-BANDWIDTH'] = parseInt(event.attributes['AVERAGE-BANDWIDTH'], 10);
}
if (event.attributes['FRAME-RATE']) {
event.attributes['FRAME-RATE'] = parseFloat(event.attributes['FRAME-RATE']);
}
this.trigger('data', event);
return;
}
match = /^#EXT-X-DEFINE:(.*)$/.exec(newLine);
if (match) {
event = {
type: 'tag',
tagType: 'define'
};
event.attributes = parseAttributes$1(match[1]);
this.trigger('data', event);
return;
} // unknown tag type
@ -1314,16 +1196,21 @@
* requires some property of the manifest object to be defaulted.
*
* @class Parser
* @param {Object} [opts] Options for the constructor, needed for substitutions
* @param {string} [opts.uri] URL to check for query params
* @param {Object} [opts.mainDefinitions] Definitions on main playlist that can be imported
* @extends Stream
*/
class Parser extends Stream {
constructor() {
constructor(opts = {}) {
super();
this.lineStream = new LineStream();
this.parseStream = new ParseStream();
this.lineStream.pipe(this.parseStream);
this.mainDefinitions = opts.mainDefinitions || {};
this.params = new URL(opts.uri, 'https://a.com').searchParams;
this.lastProgramDateTime = null;
/* eslint-disable consistent-this */
@ -1356,6 +1243,7 @@
allowCache: true,
discontinuityStarts: [],
dateRanges: [],
iFramePlaylists: [],
segments: []
}; // keep track of the last seen segment's byte range end, as segments are not required
// to provide the offset, in which case it defaults to the next byte after the
@ -1389,7 +1277,24 @@
this.parseStream.on('data', function (entry) {
let mediaGroup;
let rendition;
let rendition; // Replace variables in uris and attributes as defined in #EXT-X-DEFINE tags
if (self.manifest.definitions) {
for (const def in self.manifest.definitions) {
if (entry.uri) {
entry.uri = entry.uri.replace(`{$${def}}`, self.manifest.definitions[def]);
}
if (entry.attributes) {
for (const attr in entry.attributes) {
if (typeof entry.attributes[attr] === 'string') {
entry.attributes[attr] = entry.attributes[attr].replace(`{$${def}}`, self.manifest.definitions[def]);
}
}
}
}
}
({
tag() {
// switch based on the tag type
@ -1962,9 +1867,121 @@
this.manifest.independentSegments = true;
},
'i-frames-only'() {
this.manifest.iFramesOnly = true;
this.requiredCompatibilityversion(this.manifest.version, 4);
},
'content-steering'() {
this.manifest.contentSteering = camelCaseKeys(entry.attributes);
this.warnOnMissingAttributes_('#EXT-X-CONTENT-STEERING', entry.attributes, ['SERVER-URI']);
},
/** @this {Parser} */
define() {
this.manifest.definitions = this.manifest.definitions || {};
const addDef = (n, v) => {
if (n in this.manifest.definitions) {
// An EXT-X-DEFINE tag MUST NOT specify the same Variable Name as any other
// EXT-X-DEFINE tag in the same Playlist. Parsers that encounter duplicate
// Variable Name declarations MUST fail to parse the Playlist.
this.trigger('error', {
message: `EXT-X-DEFINE: Duplicate name ${n}`
});
return;
}
this.manifest.definitions[n] = v;
};
if ('QUERYPARAM' in entry.attributes) {
if ('NAME' in entry.attributes || 'IMPORT' in entry.attributes) {
// An EXT-X-DEFINE tag MUST contain either a NAME, an IMPORT, or a
// QUERYPARAM attribute, but only one of the three. Otherwise, the
// client MUST fail to parse the Playlist.
this.trigger('error', {
message: 'EXT-X-DEFINE: Invalid attributes'
});
return;
}
const val = this.params.get(entry.attributes.QUERYPARAM);
if (!val) {
// If the QUERYPARAM attribute value does not match any query parameter in
// the URI or the matching parameter has no associated value, the parser
// MUST fail to parse the Playlist. If more than one parameter matches,
// any of the associated values MAY be used.
this.trigger('error', {
message: `EXT-X-DEFINE: No query param ${entry.attributes.QUERYPARAM}`
});
return;
}
addDef(entry.attributes.QUERYPARAM, decodeURIComponent(val));
return;
}
if ('NAME' in entry.attributes) {
if ('IMPORT' in entry.attributes) {
// An EXT-X-DEFINE tag MUST contain either a NAME, an IMPORT, or a
// QUERYPARAM attribute, but only one of the three. Otherwise, the
// client MUST fail to parse the Playlist.
this.trigger('error', {
message: 'EXT-X-DEFINE: Invalid attributes'
});
return;
}
if (!('VALUE' in entry.attributes) || typeof entry.attributes.VALUE !== 'string') {
// This attribute is REQUIRED if the EXT-X-DEFINE tag has a NAME attribute.
// The quoted-string MAY be empty.
this.trigger('error', {
message: `EXT-X-DEFINE: No value for ${entry.attributes.NAME}`
});
return;
}
addDef(entry.attributes.NAME, entry.attributes.VALUE);
return;
}
if ('IMPORT' in entry.attributes) {
if (!this.mainDefinitions[entry.attributes.IMPORT]) {
// Covers two conditions, as mainDefinitions will always be empty on main
//
// EXT-X-DEFINE tags containing the IMPORT attribute MUST NOT occur in
// Multivariant Playlists; they are only allowed in Media Playlists.
//
// If the IMPORT attribute value does not match any Variable Name in the
// Multivariant Playlist, or if the Media Playlist loaded from a
// Multivariant Playlist, the parser MUST fail the Playlist.
this.trigger('error', {
message: `EXT-X-DEFINE: No value ${entry.attributes.IMPORT} to import, or IMPORT used on main playlist`
});
return;
}
addDef(entry.attributes.IMPORT, this.mainDefinitions[entry.attributes.IMPORT]);
return;
} // An EXT-X-DEFINE tag MUST contain either a NAME, an IMPORT, or a QUERYPARAM
// attribute, but only one of the three. Otherwise, the client MUST fail to
// parse the Playlist.
this.trigger('error', {
message: 'EXT-X-DEFINE: No attribute'
});
},
'i-frame-playlist'() {
this.manifest.iFramePlaylists.push({
attributes: entry.attributes,
uri: entry.uri,
timeline: currentTimeline
});
this.warnOnMissingAttributes_('#EXT-X-I-FRAME-STREAM-INF', entry.attributes, ['BANDWIDTH', 'URI']);
}
})[entry.tagType] || noop).call(self);
@ -2022,6 +2039,14 @@
});
}
requiredCompatibilityversion(currentVersion, targetVersion) {
if (currentVersion < targetVersion || !currentVersion) {
this.trigger('warn', {
message: `manifest must be at least version ${targetVersion}`
});
}
}
warnOnMissingAttributes_(identifier, attributes, required) {
const missing = [];
required.forEach(function (key) {
@ -2282,12 +2307,27 @@
return type + "/" + container + ";codecs=\"" + codecString + "\"";
};
var browserSupportsCodec = function browserSupportsCodec(codecString) {
/**
* Tests whether the codec is supported by MediaSource. Optionally also tests ManagedMediaSource.
*
* @param {string} codecString
* Codec to test
* @param {boolean} [withMMS]
* Whether to check if ManagedMediaSource supports it
* @return {boolean}
* Codec is supported
*/
var browserSupportsCodec = function browserSupportsCodec(codecString, withMMS) {
if (codecString === void 0) {
codecString = '';
}
return window.MediaSource && window.MediaSource.isTypeSupported && window.MediaSource.isTypeSupported(getMimeForCodec(codecString)) || false;
if (withMMS === void 0) {
withMMS = false;
}
return window.MediaSource && window.MediaSource.isTypeSupported && window.MediaSource.isTypeSupported(getMimeForCodec(codecString)) || withMMS && window.ManagedMediaSource && window.ManagedMediaSource.isTypeSupported && window.ManagedMediaSource.isTypeSupported(getMimeForCodec(codecString)) || false;
};
var muxerSupportsCodec = function muxerSupportsCodec(codecString) {
if (codecString === void 0) {
@ -9813,7 +9853,7 @@
const removeOldMediaGroupLabels = (update, newMain) => {
forEachMediaGroup$1(update, (properties, type, group, label) => {
if (!(label in newMain.mediaGroups[type][group])) {
if (!newMain.mediaGroups[type][group] || !(label in newMain.mediaGroups[type][group])) {
delete update.mediaGroups[type][group][label];
}
});
@ -23356,8 +23396,7 @@
});
if (waitingForTimelineChange && shouldFixBadTimelineChanges(segmentLoader.timelineChangeController_)) {
// Audio being behind should only happen on DASH sources.
if (segmentLoader.sourceType_ === 'dash' && isAudioTimelineBehind(segmentLoader)) {
if (isAudioTimelineBehind(segmentLoader)) {
segmentLoader.timelineChangeController_.trigger('audioTimelineBehind');
return;
}
@ -27748,7 +27787,7 @@ ${segmentInfoString(segmentInfo)}`); // If there's an init segment associated wi
const diff = mpegTsInSeconds - LOCAL + mappingObj.mapping;
segmentInfo.cues.forEach(cue => {
const duration = cue.endTime - cue.startTime;
const startTime = MPEGTS === 0 ? cue.startTime + diff : this.handleRollover_(cue.startTime + diff, mappingObj.time);
const startTime = this.handleRollover_(cue.startTime + diff, mappingObj.time);
cue.startTime = Math.max(startTime, 0);
cue.endTime = Math.max(startTime + duration, 0);
});
@ -29037,7 +29076,7 @@ ${segmentInfoString(segmentInfo)}`); // If there's an init segment associated wi
function unpad(padded) {
return padded.subarray(0, padded.byteLength - padded[padded.byteLength - 1]);
}
/*! @name aes-decrypter @version 4.0.1 @license Apache-2.0 */
/*! @name aes-decrypter @version 4.0.2 @license Apache-2.0 */
/**
* @file aes.js
@ -31113,7 +31152,8 @@ ${segmentInfoString(segmentInfo)}`); // If there's an init segment associated wi
cacheEncryptionKeys,
bufferBasedABR,
leastPixelDiffSelector,
captionServices
captionServices,
experimentalUseMMS
} = options;
if (!src) {
@ -31153,7 +31193,16 @@ ${segmentInfoString(segmentInfo)}`); // If there's an init segment associated wi
};
this.on('error', this.pauseLoading);
this.mediaTypes_ = createMediaTypes();
this.mediaSource = new window.MediaSource();
if (experimentalUseMMS && window.ManagedMediaSource) {
// Airplay source not yet implemented. Remote playback must be disabled.
this.tech_.el_.disableRemotePlayback = true;
this.mediaSource = new window.ManagedMediaSource();
videojs__default["default"].log('Using ManagedMediaSource');
} else if (window.MediaSource) {
this.mediaSource = new window.MediaSource();
}
this.handleDurationChange_ = this.handleDurationChange_.bind(this);
this.handleSourceOpen_ = this.handleSourceOpen_.bind(this);
this.handleSourceEnded_ = this.handleSourceEnded_.bind(this);
@ -31859,28 +31908,25 @@ ${segmentInfoString(segmentInfo)}`); // If there's an init segment associated wi
this.mainSegmentLoader_.on('ended', () => {
this.logger_('main segment loader ended');
this.onEndOfStream();
}); // In DASH, there is the possibility of the video segment and the audio segment
}); // There is the possibility of the video segment and the audio segment
// at a current time to be on different timelines. When this occurs, the player
// forwards playback to a point where these two segment types are back on the same
// timeline. This time will be just after the end of the audio segment that is on
// a previous timeline.
if (this.sourceType_ === 'dash') {
this.timelineChangeController_.on('audioTimelineBehind', () => {
const segmentInfo = this.audioSegmentLoader_.pendingSegment_;
this.timelineChangeController_.on('audioTimelineBehind', () => {
const segmentInfo = this.audioSegmentLoader_.pendingSegment_;
if (!segmentInfo || !segmentInfo.segment || !segmentInfo.segment.syncInfo) {
return;
} // Update the current time to just after the faulty audio segment.
// This moves playback to a spot where both audio and video segments
// are on the same timeline.
if (!segmentInfo || !segmentInfo.segment || !segmentInfo.segment.syncInfo) {
return;
} // Update the current time to just after the faulty audio segment.
// This moves playback to a spot where both audio and video segments
// are on the same timeline.
const newTime = segmentInfo.segment.syncInfo.end + 0.01;
this.tech_.setCurrentTime(newTime);
});
}
const newTime = segmentInfo.segment.syncInfo.end + 0.01;
this.tech_.setCurrentTime(newTime);
});
this.mainSegmentLoader_.on('earlyabort', event => {
// never try to early abort with the new ABR algorithm
if (this.bufferBasedABR) {
@ -34319,15 +34365,15 @@ ${segmentInfoString(segmentInfo)}`); // If there's an init segment associated wi
initPlugin(this, options);
};
var version$4 = "3.13.3";
var version$4 = "3.14.2";
var version$3 = "7.0.3";
var version$2 = "1.3.0";
var version$1 = "7.1.0";
var version$1 = "7.2.0";
var version = "4.0.1";
var version = "4.0.2";
const Vhs = {
PlaylistLoader,
@ -35587,7 +35633,12 @@ ${segmentInfoString(segmentInfo)}`); // If there's an init segment associated wi
VERSION: version$4,
canHandleSource(srcObj, options = {}) {
const localOptions = merge$1(videojs__default["default"].options, options);
const localOptions = merge$1(videojs__default["default"].options, options); // If not opting to experimentalUseMMS, and playback is only supported with MediaSource, cannot handle source
if (!localOptions.vhs.experimentalUseMMS && !browserSupportsCodec('avc1.4d400d,mp4a.40.2', false)) {
return false;
}
return VhsSourceHandler.canPlayType(srcObj.type, localOptions);
},
@ -35626,14 +35677,15 @@ ${segmentInfoString(segmentInfo)}`); // If there's an init segment associated wi
};
/**
* Check to see if the native MediaSource object exists and supports
* an MP4 container with both H.264 video and AAC-LC audio.
* Check to see if either the native MediaSource or ManagedMediaSource
* objectx exist and support an MP4 container with both H.264 video
* and AAC-LC audio.
*
* @return {boolean} if native media sources are supported
*/
const supportsNativeMediaSources = () => {
return browserSupportsCodec('avc1.4d400d,mp4a.40.2');
return browserSupportsCodec('avc1.4d400d,mp4a.40.2', true);
}; // register source handlers with the appropriate techs

File diff suppressed because one or more lines are too long

View file

@ -177,6 +177,11 @@
<label class="form-check-label" for="override-native">Override Native (reloads player)</label>
</div>
<div class="form-check">
<input id=use-mms type="checkbox" class="form-check-input" checked>
<label class="form-check-label" for="use-mms">[EXPERIMENTAL] Use ManagedMediaSource if available. Use in combination with override native (reloads player)</label>
</div>
<div class="form-check">
<input id=mirror-source type="checkbox" class="form-check-input" checked>
<label class="form-check-label" for="mirror-source">Mirror sources from player.src (reloads player, uses EXPERIMENTAL sourceset option)</label>
@ -274,6 +279,7 @@
</div>
</div>
<footer class="text-center p-3" id=unit-test-link>
<a href="test/debug.html">Run unit tests</a>
</footer>

View file

@ -1,102 +0,0 @@
<a name="4.0.1"></a>
## [4.0.1](https://github.com/videojs/aes-decrypter/compare/v4.0.0...v4.0.1) (2022-08-18)
### Chores
* do not run es-check on publish ([#87](https://github.com/videojs/aes-decrypter/issues/87)) ([6f0cbd9](https://github.com/videojs/aes-decrypter/commit/6f0cbd9))
<a name="4.0.0"></a>
# [4.0.0](https://github.com/videojs/aes-decrypter/compare/v3.1.3...v4.0.0) (2022-08-18)
### Chores
* **package:** remove IE11 support ([#86](https://github.com/videojs/aes-decrypter/issues/86)) ([3338e9b](https://github.com/videojs/aes-decrypter/commit/3338e9b))
### BREAKING CHANGES
* **package:** Internet Explorer is no longer supported.
<a name="3.1.3"></a>
## [3.1.3](https://github.com/videojs/aes-decrypter/compare/v3.1.2...v3.1.3) (2022-04-05)
### Bug Fixes
* update vhs-utils to 3.0.5 for tizen 2.4 support ([#85](https://github.com/videojs/aes-decrypter/issues/85)) ([1ab933b](https://github.com/videojs/aes-decrypter/commit/1ab933b))
<a name="3.1.2"></a>
## [3.1.2](https://github.com/videojs/aes-decrypter/compare/v3.1.1...v3.1.2) (2021-01-12)
### Bug Fixes
* cjs dist should only import cjs ([#83](https://github.com/videojs/aes-decrypter/issues/83)) ([a8a5fbf](https://github.com/videojs/aes-decrypter/commit/a8a5fbf))
<a name="3.1.1"></a>
## [3.1.1](https://github.com/videojs/aes-decrypter/compare/v3.1.0...v3.1.1) (2021-01-11)
### Chores
* update to use vhs-utils 3 ([#81](https://github.com/videojs/aes-decrypter/issues/81)) ([8ead5d9](https://github.com/videojs/aes-decrypter/commit/8ead5d9))
<a name="3.1.0"></a>
# [3.1.0](https://github.com/videojs/aes-decrypter/compare/v3.0.2...v3.1.0) (2020-11-03)
### Chores
* **package:** update to vhs-utils[@2](https://github.com/2) ([#80](https://github.com/videojs/aes-decrypter/issues/80)) ([63b9cb9](https://github.com/videojs/aes-decrypter/commit/63b9cb9))
<a name="3.0.2"></a>
## [3.0.2](https://github.com/videojs/aes-decrypter/compare/v3.0.1...v3.0.2) (2020-09-09)
### Chores
* **package:** update pkcs7 to remove engine check ([062c952](https://github.com/videojs/aes-decrypter/commit/062c952))
<a name="3.0.1"></a>
## [3.0.1](https://github.com/videojs/aes-decrypter/compare/v3.0.0...v3.0.1) (2019-08-21)
### Chores
* **package:** update rollup to version 0.66.0 ([#38](https://github.com/videojs/aes-decrypter/issues/38)) ([634556b](https://github.com/videojs/aes-decrypter/commit/634556b))
* bump videojs-generate-karma-config version ([#51](https://github.com/videojs/aes-decrypter/issues/51)) ([195b923](https://github.com/videojs/aes-decrypter/commit/195b923))
* **package:** update videojs-generate-karma-config to version 5.0.2 ([#57](https://github.com/videojs/aes-decrypter/issues/57)) ([be8bd81](https://github.com/videojs/aes-decrypter/commit/be8bd81))
* update generator version and use [@videojs](https://github.com/videojs)/vhs-utils ([#68](https://github.com/videojs/aes-decrypter/issues/68)) ([9a6ab2f](https://github.com/videojs/aes-decrypter/commit/9a6ab2f))
* Update to generator v7 standards ([#37](https://github.com/videojs/aes-decrypter/issues/37)) ([fcf96c4](https://github.com/videojs/aes-decrypter/commit/fcf96c4))
* Update videojs-generate-karma-config to the latest version 🚀 ([#42](https://github.com/videojs/aes-decrypter/issues/42)) ([2b16de3](https://github.com/videojs/aes-decrypter/commit/2b16de3))
* Update videojs-generate-karma-config to the latest version 🚀 ([#43](https://github.com/videojs/aes-decrypter/issues/43)) ([cb63ccd](https://github.com/videojs/aes-decrypter/commit/cb63ccd))
<a name="3.0.0"></a>
# [3.0.0](https://github.com/videojs/aes-decrypter/compare/v2.0.0...v3.0.0) (2017-07-24)
### Features
* Use Rollup for packaging ([bda57ab](https://github.com/videojs/aes-decrypter/commit/bda57ab))
### Chores
* prepare CHANGELOG for new process ([1a5175c](https://github.com/videojs/aes-decrypter/commit/1a5175c))
### BREAKING CHANGES
* revert to 1.x and stop using web crypto.
## 2.0.0 (2016-11-15)
* Use webcrypto for aes-cbc segment decryption when supported (#4)
* Lock the linter to a specific version
## 1.1.1 (2016-11-17)
* version to revert 1.1.0
## 1.0.3 (2016-06-16)
* dont do browserify-shim globally since we only use it in tests (#1)
## 1.0.2 (2016-06-16)
* specify browserify transform globally
## 1.0.1 (2016-06-16)
* fixing the build pipeline
## 1.0.0 (2016-06-16)
* initial

View file

@ -1,30 +0,0 @@
# CONTRIBUTING
We welcome contributions from everyone!
## Getting Started
Make sure you have Node.js 4.8 or higher and npm installed.
1. Fork this repository and clone your fork
1. Install dependencies: `npm install`
1. Run a development server: `npm start`
### Making Changes
Refer to the [video.js plugin conventions][conventions] for more detail on best practices and tooling for video.js plugin authorship.
When you've made your changes, push your commit(s) to your fork and issue a pull request against the original repository.
### Running Tests
Testing is a crucial part of any software project. For all but the most trivial changes (typos, etc) test cases are expected. Tests are run in actual browsers using [Karma][karma].
- In all available and supported browsers: `npm test`
- In a specific browser: `npm run test:chrome`, `npm run test:firefox`, etc.
- While development server is running (`npm start`), navigate to [`http://localhost:9999/test/`][local]
[karma]: http://karma-runner.github.io/
[local]: http://localhost:9999/test/
[conventions]: https://github.com/videojs/generator-videojs-plugin/blob/master/docs/conventions.md

View file

@ -1,13 +0,0 @@
Copyright Brightcove, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View file

@ -1,72 +0,0 @@
# aes-decrypter
[![Build Status](https://travis-ci.org/videojs/aes-decrypter.svg?branch=master)](https://travis-ci.org/videojs/aes-decrypter)
[![Greenkeeper badge](https://badges.greenkeeper.io/videojs/aes-decrypter.svg)](https://greenkeeper.io/)
[![Slack Status](http://slack.videojs.com/badge.svg)](http://slack.videojs.com)
[![NPM](https://nodei.co/npm/aes-decrypter.png?downloads=true&downloadRank=true)](https://nodei.co/npm/aes-decrypter/)
<!-- START doctoc generated TOC please keep comment here to allow auto update -->
<!-- DON'T EDIT THIS SECTION, INSTEAD RE-RUN doctoc TO UPDATE -->
- [Installation](#installation)
- [Usage](#usage)
- [License](#license)
<!-- END doctoc generated TOC please keep comment here to allow auto update -->
## Installation
```sh
npm install --save aes-decrypter
```
Also available to install globally:
```sh
npm install --global aes-decrypter
```
The npm installation is preferred, but Bower works, too.
```sh
bower install --save aes-decrypter
```
## Usage
To include decrypter on your website or npm application, use any of the following methods.
```js
var Decrypter = require('aes-decrypter').Decrypter;
var fs = require('fs');
var keyContent = fs.readFileSync('something.key');
var encryptedBytes = fs.readFileSync('somithing.txt');
// keyContent is a string of the aes-keys content
var keyContent = fs.readFileSync(keyFile);
var view = new DataView(keyContent.buffer);
var key.bytes = new Uint32Array([
view.getUint32(0),
view.getUint32(4),
view.getUint32(8),
view.getUint32(12)
]);
key.iv = new Uint32Array([
0, 0, 0, 0
]);
var d = new Decrypter(
encryptedBytes,
key.bytes,
key.iv,
function(err, decryptedBytes) {
// err always null
});
```
## [License](LICENSE)
Apache-2.0. Copyright (c) Brightcove, Inc.

View file

@ -1,431 +0,0 @@
/*! @name aes-decrypter @version 4.0.1 @license Apache-2.0 */
'use strict';
Object.defineProperty(exports, '__esModule', { value: true });
var Stream = require('@videojs/vhs-utils/cjs/stream.js');
var pkcs7 = require('pkcs7');
function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
var Stream__default = /*#__PURE__*/_interopDefaultLegacy(Stream);
/**
* @file aes.js
*
* This file contains an adaptation of the AES decryption algorithm
* from the Standford Javascript Cryptography Library. That work is
* covered by the following copyright and permissions notice:
*
* Copyright 2009-2010 Emily Stark, Mike Hamburg, Dan Boneh.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* The views and conclusions contained in the software and documentation
* are those of the authors and should not be interpreted as representing
* official policies, either expressed or implied, of the authors.
*/
/**
* Expand the S-box tables.
*
* @private
*/
const precompute = function () {
const tables = [[[], [], [], [], []], [[], [], [], [], []]];
const encTable = tables[0];
const decTable = tables[1];
const sbox = encTable[4];
const sboxInv = decTable[4];
let i;
let x;
let xInv;
const d = [];
const th = [];
let x2;
let x4;
let x8;
let s;
let tEnc;
let tDec; // Compute double and third tables
for (i = 0; i < 256; i++) {
th[(d[i] = i << 1 ^ (i >> 7) * 283) ^ i] = i;
}
for (x = xInv = 0; !sbox[x]; x ^= x2 || 1, xInv = th[xInv] || 1) {
// Compute sbox
s = xInv ^ xInv << 1 ^ xInv << 2 ^ xInv << 3 ^ xInv << 4;
s = s >> 8 ^ s & 255 ^ 99;
sbox[x] = s;
sboxInv[s] = x; // Compute MixColumns
x8 = d[x4 = d[x2 = d[x]]];
tDec = x8 * 0x1010101 ^ x4 * 0x10001 ^ x2 * 0x101 ^ x * 0x1010100;
tEnc = d[s] * 0x101 ^ s * 0x1010100;
for (i = 0; i < 4; i++) {
encTable[i][x] = tEnc = tEnc << 24 ^ tEnc >>> 8;
decTable[i][s] = tDec = tDec << 24 ^ tDec >>> 8;
}
} // Compactify. Considerable speedup on Firefox.
for (i = 0; i < 5; i++) {
encTable[i] = encTable[i].slice(0);
decTable[i] = decTable[i].slice(0);
}
return tables;
};
let aesTables = null;
/**
* Schedule out an AES key for both encryption and decryption. This
* is a low-level class. Use a cipher mode to do bulk encryption.
*
* @class AES
* @param key {Array} The key as an array of 4, 6 or 8 words.
*/
class AES {
constructor(key) {
/**
* The expanded S-box and inverse S-box tables. These will be computed
* on the client so that we don't have to send them down the wire.
*
* There are two tables, _tables[0] is for encryption and
* _tables[1] is for decryption.
*
* The first 4 sub-tables are the expanded S-box with MixColumns. The
* last (_tables[01][4]) is the S-box itself.
*
* @private
*/
// if we have yet to precompute the S-box tables
// do so now
if (!aesTables) {
aesTables = precompute();
} // then make a copy of that object for use
this._tables = [[aesTables[0][0].slice(), aesTables[0][1].slice(), aesTables[0][2].slice(), aesTables[0][3].slice(), aesTables[0][4].slice()], [aesTables[1][0].slice(), aesTables[1][1].slice(), aesTables[1][2].slice(), aesTables[1][3].slice(), aesTables[1][4].slice()]];
let i;
let j;
let tmp;
const sbox = this._tables[0][4];
const decTable = this._tables[1];
const keyLen = key.length;
let rcon = 1;
if (keyLen !== 4 && keyLen !== 6 && keyLen !== 8) {
throw new Error('Invalid aes key size');
}
const encKey = key.slice(0);
const decKey = [];
this._key = [encKey, decKey]; // schedule encryption keys
for (i = keyLen; i < 4 * keyLen + 28; i++) {
tmp = encKey[i - 1]; // apply sbox
if (i % keyLen === 0 || keyLen === 8 && i % keyLen === 4) {
tmp = sbox[tmp >>> 24] << 24 ^ sbox[tmp >> 16 & 255] << 16 ^ sbox[tmp >> 8 & 255] << 8 ^ sbox[tmp & 255]; // shift rows and add rcon
if (i % keyLen === 0) {
tmp = tmp << 8 ^ tmp >>> 24 ^ rcon << 24;
rcon = rcon << 1 ^ (rcon >> 7) * 283;
}
}
encKey[i] = encKey[i - keyLen] ^ tmp;
} // schedule decryption keys
for (j = 0; i; j++, i--) {
tmp = encKey[j & 3 ? i : i - 4];
if (i <= 4 || j < 4) {
decKey[j] = tmp;
} else {
decKey[j] = decTable[0][sbox[tmp >>> 24]] ^ decTable[1][sbox[tmp >> 16 & 255]] ^ decTable[2][sbox[tmp >> 8 & 255]] ^ decTable[3][sbox[tmp & 255]];
}
}
}
/**
* Decrypt 16 bytes, specified as four 32-bit words.
*
* @param {number} encrypted0 the first word to decrypt
* @param {number} encrypted1 the second word to decrypt
* @param {number} encrypted2 the third word to decrypt
* @param {number} encrypted3 the fourth word to decrypt
* @param {Int32Array} out the array to write the decrypted words
* into
* @param {number} offset the offset into the output array to start
* writing results
* @return {Array} The plaintext.
*/
decrypt(encrypted0, encrypted1, encrypted2, encrypted3, out, offset) {
const key = this._key[1]; // state variables a,b,c,d are loaded with pre-whitened data
let a = encrypted0 ^ key[0];
let b = encrypted3 ^ key[1];
let c = encrypted2 ^ key[2];
let d = encrypted1 ^ key[3];
let a2;
let b2;
let c2; // key.length === 2 ?
const nInnerRounds = key.length / 4 - 2;
let i;
let kIndex = 4;
const table = this._tables[1]; // load up the tables
const table0 = table[0];
const table1 = table[1];
const table2 = table[2];
const table3 = table[3];
const sbox = table[4]; // Inner rounds. Cribbed from OpenSSL.
for (i = 0; i < nInnerRounds; i++) {
a2 = table0[a >>> 24] ^ table1[b >> 16 & 255] ^ table2[c >> 8 & 255] ^ table3[d & 255] ^ key[kIndex];
b2 = table0[b >>> 24] ^ table1[c >> 16 & 255] ^ table2[d >> 8 & 255] ^ table3[a & 255] ^ key[kIndex + 1];
c2 = table0[c >>> 24] ^ table1[d >> 16 & 255] ^ table2[a >> 8 & 255] ^ table3[b & 255] ^ key[kIndex + 2];
d = table0[d >>> 24] ^ table1[a >> 16 & 255] ^ table2[b >> 8 & 255] ^ table3[c & 255] ^ key[kIndex + 3];
kIndex += 4;
a = a2;
b = b2;
c = c2;
} // Last round.
for (i = 0; i < 4; i++) {
out[(3 & -i) + offset] = sbox[a >>> 24] << 24 ^ sbox[b >> 16 & 255] << 16 ^ sbox[c >> 8 & 255] << 8 ^ sbox[d & 255] ^ key[kIndex++];
a2 = a;
a = b;
b = c;
c = d;
d = a2;
}
}
}
/**
* @file async-stream.js
*/
/**
* A wrapper around the Stream class to use setTimeout
* and run stream "jobs" Asynchronously
*
* @class AsyncStream
* @extends Stream
*/
class AsyncStream extends Stream__default["default"] {
constructor() {
super(Stream__default["default"]);
this.jobs = [];
this.delay = 1;
this.timeout_ = null;
}
/**
* process an async job
*
* @private
*/
processJob_() {
this.jobs.shift()();
if (this.jobs.length) {
this.timeout_ = setTimeout(this.processJob_.bind(this), this.delay);
} else {
this.timeout_ = null;
}
}
/**
* push a job into the stream
*
* @param {Function} job the job to push into the stream
*/
push(job) {
this.jobs.push(job);
if (!this.timeout_) {
this.timeout_ = setTimeout(this.processJob_.bind(this), this.delay);
}
}
}
/**
* @file decrypter.js
*
* An asynchronous implementation of AES-128 CBC decryption with
* PKCS#7 padding.
*/
/**
* Convert network-order (big-endian) bytes into their little-endian
* representation.
*/
const ntoh = function (word) {
return word << 24 | (word & 0xff00) << 8 | (word & 0xff0000) >> 8 | word >>> 24;
};
/**
* Decrypt bytes using AES-128 with CBC and PKCS#7 padding.
*
* @param {Uint8Array} encrypted the encrypted bytes
* @param {Uint32Array} key the bytes of the decryption key
* @param {Uint32Array} initVector the initialization vector (IV) to
* use for the first round of CBC.
* @return {Uint8Array} the decrypted bytes
*
* @see http://en.wikipedia.org/wiki/Advanced_Encryption_Standard
* @see http://en.wikipedia.org/wiki/Block_cipher_mode_of_operation#Cipher_Block_Chaining_.28CBC.29
* @see https://tools.ietf.org/html/rfc2315
*/
const decrypt = function (encrypted, key, initVector) {
// word-level access to the encrypted bytes
const encrypted32 = new Int32Array(encrypted.buffer, encrypted.byteOffset, encrypted.byteLength >> 2);
const decipher = new AES(Array.prototype.slice.call(key)); // byte and word-level access for the decrypted output
const decrypted = new Uint8Array(encrypted.byteLength);
const decrypted32 = new Int32Array(decrypted.buffer); // temporary variables for working with the IV, encrypted, and
// decrypted data
let init0;
let init1;
let init2;
let init3;
let encrypted0;
let encrypted1;
let encrypted2;
let encrypted3; // iteration variable
let wordIx; // pull out the words of the IV to ensure we don't modify the
// passed-in reference and easier access
init0 = initVector[0];
init1 = initVector[1];
init2 = initVector[2];
init3 = initVector[3]; // decrypt four word sequences, applying cipher-block chaining (CBC)
// to each decrypted block
for (wordIx = 0; wordIx < encrypted32.length; wordIx += 4) {
// convert big-endian (network order) words into little-endian
// (javascript order)
encrypted0 = ntoh(encrypted32[wordIx]);
encrypted1 = ntoh(encrypted32[wordIx + 1]);
encrypted2 = ntoh(encrypted32[wordIx + 2]);
encrypted3 = ntoh(encrypted32[wordIx + 3]); // decrypt the block
decipher.decrypt(encrypted0, encrypted1, encrypted2, encrypted3, decrypted32, wordIx); // XOR with the IV, and restore network byte-order to obtain the
// plaintext
decrypted32[wordIx] = ntoh(decrypted32[wordIx] ^ init0);
decrypted32[wordIx + 1] = ntoh(decrypted32[wordIx + 1] ^ init1);
decrypted32[wordIx + 2] = ntoh(decrypted32[wordIx + 2] ^ init2);
decrypted32[wordIx + 3] = ntoh(decrypted32[wordIx + 3] ^ init3); // setup the IV for the next round
init0 = encrypted0;
init1 = encrypted1;
init2 = encrypted2;
init3 = encrypted3;
}
return decrypted;
};
/**
* The `Decrypter` class that manages decryption of AES
* data through `AsyncStream` objects and the `decrypt`
* function
*
* @param {Uint8Array} encrypted the encrypted bytes
* @param {Uint32Array} key the bytes of the decryption key
* @param {Uint32Array} initVector the initialization vector (IV) to
* @param {Function} done the function to run when done
* @class Decrypter
*/
class Decrypter {
constructor(encrypted, key, initVector, done) {
const step = Decrypter.STEP;
const encrypted32 = new Int32Array(encrypted.buffer);
const decrypted = new Uint8Array(encrypted.byteLength);
let i = 0;
this.asyncStream_ = new AsyncStream(); // split up the encryption job and do the individual chunks asynchronously
this.asyncStream_.push(this.decryptChunk_(encrypted32.subarray(i, i + step), key, initVector, decrypted));
for (i = step; i < encrypted32.length; i += step) {
initVector = new Uint32Array([ntoh(encrypted32[i - 4]), ntoh(encrypted32[i - 3]), ntoh(encrypted32[i - 2]), ntoh(encrypted32[i - 1])]);
this.asyncStream_.push(this.decryptChunk_(encrypted32.subarray(i, i + step), key, initVector, decrypted));
} // invoke the done() callback when everything is finished
this.asyncStream_.push(function () {
// remove pkcs#7 padding from the decrypted bytes
done(null, pkcs7.unpad(decrypted));
});
}
/**
* a getter for step the maximum number of bytes to process at one time
*
* @return {number} the value of step 32000
*/
static get STEP() {
// 4 * 8000;
return 32000;
}
/**
* @private
*/
decryptChunk_(encrypted, key, initVector, decrypted) {
return function () {
const bytes = decrypt(encrypted, key, initVector);
decrypted.set(bytes, encrypted.byteOffset);
};
}
}
exports.AsyncStream = AsyncStream;
exports.Decrypter = Decrypter;
exports.decrypt = decrypt;

View file

@ -1,421 +0,0 @@
/*! @name aes-decrypter @version 4.0.1 @license Apache-2.0 */
import Stream from '@videojs/vhs-utils/es/stream.js';
import { unpad } from 'pkcs7';
/**
* @file aes.js
*
* This file contains an adaptation of the AES decryption algorithm
* from the Standford Javascript Cryptography Library. That work is
* covered by the following copyright and permissions notice:
*
* Copyright 2009-2010 Emily Stark, Mike Hamburg, Dan Boneh.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* The views and conclusions contained in the software and documentation
* are those of the authors and should not be interpreted as representing
* official policies, either expressed or implied, of the authors.
*/
/**
* Expand the S-box tables.
*
* @private
*/
const precompute = function () {
const tables = [[[], [], [], [], []], [[], [], [], [], []]];
const encTable = tables[0];
const decTable = tables[1];
const sbox = encTable[4];
const sboxInv = decTable[4];
let i;
let x;
let xInv;
const d = [];
const th = [];
let x2;
let x4;
let x8;
let s;
let tEnc;
let tDec; // Compute double and third tables
for (i = 0; i < 256; i++) {
th[(d[i] = i << 1 ^ (i >> 7) * 283) ^ i] = i;
}
for (x = xInv = 0; !sbox[x]; x ^= x2 || 1, xInv = th[xInv] || 1) {
// Compute sbox
s = xInv ^ xInv << 1 ^ xInv << 2 ^ xInv << 3 ^ xInv << 4;
s = s >> 8 ^ s & 255 ^ 99;
sbox[x] = s;
sboxInv[s] = x; // Compute MixColumns
x8 = d[x4 = d[x2 = d[x]]];
tDec = x8 * 0x1010101 ^ x4 * 0x10001 ^ x2 * 0x101 ^ x * 0x1010100;
tEnc = d[s] * 0x101 ^ s * 0x1010100;
for (i = 0; i < 4; i++) {
encTable[i][x] = tEnc = tEnc << 24 ^ tEnc >>> 8;
decTable[i][s] = tDec = tDec << 24 ^ tDec >>> 8;
}
} // Compactify. Considerable speedup on Firefox.
for (i = 0; i < 5; i++) {
encTable[i] = encTable[i].slice(0);
decTable[i] = decTable[i].slice(0);
}
return tables;
};
let aesTables = null;
/**
* Schedule out an AES key for both encryption and decryption. This
* is a low-level class. Use a cipher mode to do bulk encryption.
*
* @class AES
* @param key {Array} The key as an array of 4, 6 or 8 words.
*/
class AES {
constructor(key) {
/**
* The expanded S-box and inverse S-box tables. These will be computed
* on the client so that we don't have to send them down the wire.
*
* There are two tables, _tables[0] is for encryption and
* _tables[1] is for decryption.
*
* The first 4 sub-tables are the expanded S-box with MixColumns. The
* last (_tables[01][4]) is the S-box itself.
*
* @private
*/
// if we have yet to precompute the S-box tables
// do so now
if (!aesTables) {
aesTables = precompute();
} // then make a copy of that object for use
this._tables = [[aesTables[0][0].slice(), aesTables[0][1].slice(), aesTables[0][2].slice(), aesTables[0][3].slice(), aesTables[0][4].slice()], [aesTables[1][0].slice(), aesTables[1][1].slice(), aesTables[1][2].slice(), aesTables[1][3].slice(), aesTables[1][4].slice()]];
let i;
let j;
let tmp;
const sbox = this._tables[0][4];
const decTable = this._tables[1];
const keyLen = key.length;
let rcon = 1;
if (keyLen !== 4 && keyLen !== 6 && keyLen !== 8) {
throw new Error('Invalid aes key size');
}
const encKey = key.slice(0);
const decKey = [];
this._key = [encKey, decKey]; // schedule encryption keys
for (i = keyLen; i < 4 * keyLen + 28; i++) {
tmp = encKey[i - 1]; // apply sbox
if (i % keyLen === 0 || keyLen === 8 && i % keyLen === 4) {
tmp = sbox[tmp >>> 24] << 24 ^ sbox[tmp >> 16 & 255] << 16 ^ sbox[tmp >> 8 & 255] << 8 ^ sbox[tmp & 255]; // shift rows and add rcon
if (i % keyLen === 0) {
tmp = tmp << 8 ^ tmp >>> 24 ^ rcon << 24;
rcon = rcon << 1 ^ (rcon >> 7) * 283;
}
}
encKey[i] = encKey[i - keyLen] ^ tmp;
} // schedule decryption keys
for (j = 0; i; j++, i--) {
tmp = encKey[j & 3 ? i : i - 4];
if (i <= 4 || j < 4) {
decKey[j] = tmp;
} else {
decKey[j] = decTable[0][sbox[tmp >>> 24]] ^ decTable[1][sbox[tmp >> 16 & 255]] ^ decTable[2][sbox[tmp >> 8 & 255]] ^ decTable[3][sbox[tmp & 255]];
}
}
}
/**
* Decrypt 16 bytes, specified as four 32-bit words.
*
* @param {number} encrypted0 the first word to decrypt
* @param {number} encrypted1 the second word to decrypt
* @param {number} encrypted2 the third word to decrypt
* @param {number} encrypted3 the fourth word to decrypt
* @param {Int32Array} out the array to write the decrypted words
* into
* @param {number} offset the offset into the output array to start
* writing results
* @return {Array} The plaintext.
*/
decrypt(encrypted0, encrypted1, encrypted2, encrypted3, out, offset) {
const key = this._key[1]; // state variables a,b,c,d are loaded with pre-whitened data
let a = encrypted0 ^ key[0];
let b = encrypted3 ^ key[1];
let c = encrypted2 ^ key[2];
let d = encrypted1 ^ key[3];
let a2;
let b2;
let c2; // key.length === 2 ?
const nInnerRounds = key.length / 4 - 2;
let i;
let kIndex = 4;
const table = this._tables[1]; // load up the tables
const table0 = table[0];
const table1 = table[1];
const table2 = table[2];
const table3 = table[3];
const sbox = table[4]; // Inner rounds. Cribbed from OpenSSL.
for (i = 0; i < nInnerRounds; i++) {
a2 = table0[a >>> 24] ^ table1[b >> 16 & 255] ^ table2[c >> 8 & 255] ^ table3[d & 255] ^ key[kIndex];
b2 = table0[b >>> 24] ^ table1[c >> 16 & 255] ^ table2[d >> 8 & 255] ^ table3[a & 255] ^ key[kIndex + 1];
c2 = table0[c >>> 24] ^ table1[d >> 16 & 255] ^ table2[a >> 8 & 255] ^ table3[b & 255] ^ key[kIndex + 2];
d = table0[d >>> 24] ^ table1[a >> 16 & 255] ^ table2[b >> 8 & 255] ^ table3[c & 255] ^ key[kIndex + 3];
kIndex += 4;
a = a2;
b = b2;
c = c2;
} // Last round.
for (i = 0; i < 4; i++) {
out[(3 & -i) + offset] = sbox[a >>> 24] << 24 ^ sbox[b >> 16 & 255] << 16 ^ sbox[c >> 8 & 255] << 8 ^ sbox[d & 255] ^ key[kIndex++];
a2 = a;
a = b;
b = c;
c = d;
d = a2;
}
}
}
/**
* @file async-stream.js
*/
/**
* A wrapper around the Stream class to use setTimeout
* and run stream "jobs" Asynchronously
*
* @class AsyncStream
* @extends Stream
*/
class AsyncStream extends Stream {
constructor() {
super(Stream);
this.jobs = [];
this.delay = 1;
this.timeout_ = null;
}
/**
* process an async job
*
* @private
*/
processJob_() {
this.jobs.shift()();
if (this.jobs.length) {
this.timeout_ = setTimeout(this.processJob_.bind(this), this.delay);
} else {
this.timeout_ = null;
}
}
/**
* push a job into the stream
*
* @param {Function} job the job to push into the stream
*/
push(job) {
this.jobs.push(job);
if (!this.timeout_) {
this.timeout_ = setTimeout(this.processJob_.bind(this), this.delay);
}
}
}
/**
* @file decrypter.js
*
* An asynchronous implementation of AES-128 CBC decryption with
* PKCS#7 padding.
*/
/**
* Convert network-order (big-endian) bytes into their little-endian
* representation.
*/
const ntoh = function (word) {
return word << 24 | (word & 0xff00) << 8 | (word & 0xff0000) >> 8 | word >>> 24;
};
/**
* Decrypt bytes using AES-128 with CBC and PKCS#7 padding.
*
* @param {Uint8Array} encrypted the encrypted bytes
* @param {Uint32Array} key the bytes of the decryption key
* @param {Uint32Array} initVector the initialization vector (IV) to
* use for the first round of CBC.
* @return {Uint8Array} the decrypted bytes
*
* @see http://en.wikipedia.org/wiki/Advanced_Encryption_Standard
* @see http://en.wikipedia.org/wiki/Block_cipher_mode_of_operation#Cipher_Block_Chaining_.28CBC.29
* @see https://tools.ietf.org/html/rfc2315
*/
const decrypt = function (encrypted, key, initVector) {
// word-level access to the encrypted bytes
const encrypted32 = new Int32Array(encrypted.buffer, encrypted.byteOffset, encrypted.byteLength >> 2);
const decipher = new AES(Array.prototype.slice.call(key)); // byte and word-level access for the decrypted output
const decrypted = new Uint8Array(encrypted.byteLength);
const decrypted32 = new Int32Array(decrypted.buffer); // temporary variables for working with the IV, encrypted, and
// decrypted data
let init0;
let init1;
let init2;
let init3;
let encrypted0;
let encrypted1;
let encrypted2;
let encrypted3; // iteration variable
let wordIx; // pull out the words of the IV to ensure we don't modify the
// passed-in reference and easier access
init0 = initVector[0];
init1 = initVector[1];
init2 = initVector[2];
init3 = initVector[3]; // decrypt four word sequences, applying cipher-block chaining (CBC)
// to each decrypted block
for (wordIx = 0; wordIx < encrypted32.length; wordIx += 4) {
// convert big-endian (network order) words into little-endian
// (javascript order)
encrypted0 = ntoh(encrypted32[wordIx]);
encrypted1 = ntoh(encrypted32[wordIx + 1]);
encrypted2 = ntoh(encrypted32[wordIx + 2]);
encrypted3 = ntoh(encrypted32[wordIx + 3]); // decrypt the block
decipher.decrypt(encrypted0, encrypted1, encrypted2, encrypted3, decrypted32, wordIx); // XOR with the IV, and restore network byte-order to obtain the
// plaintext
decrypted32[wordIx] = ntoh(decrypted32[wordIx] ^ init0);
decrypted32[wordIx + 1] = ntoh(decrypted32[wordIx + 1] ^ init1);
decrypted32[wordIx + 2] = ntoh(decrypted32[wordIx + 2] ^ init2);
decrypted32[wordIx + 3] = ntoh(decrypted32[wordIx + 3] ^ init3); // setup the IV for the next round
init0 = encrypted0;
init1 = encrypted1;
init2 = encrypted2;
init3 = encrypted3;
}
return decrypted;
};
/**
* The `Decrypter` class that manages decryption of AES
* data through `AsyncStream` objects and the `decrypt`
* function
*
* @param {Uint8Array} encrypted the encrypted bytes
* @param {Uint32Array} key the bytes of the decryption key
* @param {Uint32Array} initVector the initialization vector (IV) to
* @param {Function} done the function to run when done
* @class Decrypter
*/
class Decrypter {
constructor(encrypted, key, initVector, done) {
const step = Decrypter.STEP;
const encrypted32 = new Int32Array(encrypted.buffer);
const decrypted = new Uint8Array(encrypted.byteLength);
let i = 0;
this.asyncStream_ = new AsyncStream(); // split up the encryption job and do the individual chunks asynchronously
this.asyncStream_.push(this.decryptChunk_(encrypted32.subarray(i, i + step), key, initVector, decrypted));
for (i = step; i < encrypted32.length; i += step) {
initVector = new Uint32Array([ntoh(encrypted32[i - 4]), ntoh(encrypted32[i - 3]), ntoh(encrypted32[i - 2]), ntoh(encrypted32[i - 1])]);
this.asyncStream_.push(this.decryptChunk_(encrypted32.subarray(i, i + step), key, initVector, decrypted));
} // invoke the done() callback when everything is finished
this.asyncStream_.push(function () {
// remove pkcs#7 padding from the decrypted bytes
done(null, unpad(decrypted));
});
}
/**
* a getter for step the maximum number of bytes to process at one time
*
* @return {number} the value of step 32000
*/
static get STEP() {
// 4 * 8000;
return 32000;
}
/**
* @private
*/
decryptChunk_(encrypted, key, initVector, decrypted) {
return function () {
const bytes = decrypt(encrypted, key, initVector);
decrypted.set(bytes, encrypted.byteOffset);
};
}
}
export { AsyncStream, Decrypter, decrypt };

View file

@ -1,563 +0,0 @@
/*! @name aes-decrypter @version 4.0.1 @license Apache-2.0 */
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) :
typeof define === 'function' && define.amd ? define(['exports'], factory) :
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.aesDecrypter = {}));
})(this, (function (exports) { 'use strict';
/**
* @file aes.js
*
* This file contains an adaptation of the AES decryption algorithm
* from the Standford Javascript Cryptography Library. That work is
* covered by the following copyright and permissions notice:
*
* Copyright 2009-2010 Emily Stark, Mike Hamburg, Dan Boneh.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* The views and conclusions contained in the software and documentation
* are those of the authors and should not be interpreted as representing
* official policies, either expressed or implied, of the authors.
*/
/**
* Expand the S-box tables.
*
* @private
*/
const precompute = function () {
const tables = [[[], [], [], [], []], [[], [], [], [], []]];
const encTable = tables[0];
const decTable = tables[1];
const sbox = encTable[4];
const sboxInv = decTable[4];
let i;
let x;
let xInv;
const d = [];
const th = [];
let x2;
let x4;
let x8;
let s;
let tEnc;
let tDec; // Compute double and third tables
for (i = 0; i < 256; i++) {
th[(d[i] = i << 1 ^ (i >> 7) * 283) ^ i] = i;
}
for (x = xInv = 0; !sbox[x]; x ^= x2 || 1, xInv = th[xInv] || 1) {
// Compute sbox
s = xInv ^ xInv << 1 ^ xInv << 2 ^ xInv << 3 ^ xInv << 4;
s = s >> 8 ^ s & 255 ^ 99;
sbox[x] = s;
sboxInv[s] = x; // Compute MixColumns
x8 = d[x4 = d[x2 = d[x]]];
tDec = x8 * 0x1010101 ^ x4 * 0x10001 ^ x2 * 0x101 ^ x * 0x1010100;
tEnc = d[s] * 0x101 ^ s * 0x1010100;
for (i = 0; i < 4; i++) {
encTable[i][x] = tEnc = tEnc << 24 ^ tEnc >>> 8;
decTable[i][s] = tDec = tDec << 24 ^ tDec >>> 8;
}
} // Compactify. Considerable speedup on Firefox.
for (i = 0; i < 5; i++) {
encTable[i] = encTable[i].slice(0);
decTable[i] = decTable[i].slice(0);
}
return tables;
};
let aesTables = null;
/**
* Schedule out an AES key for both encryption and decryption. This
* is a low-level class. Use a cipher mode to do bulk encryption.
*
* @class AES
* @param key {Array} The key as an array of 4, 6 or 8 words.
*/
class AES {
constructor(key) {
/**
* The expanded S-box and inverse S-box tables. These will be computed
* on the client so that we don't have to send them down the wire.
*
* There are two tables, _tables[0] is for encryption and
* _tables[1] is for decryption.
*
* The first 4 sub-tables are the expanded S-box with MixColumns. The
* last (_tables[01][4]) is the S-box itself.
*
* @private
*/
// if we have yet to precompute the S-box tables
// do so now
if (!aesTables) {
aesTables = precompute();
} // then make a copy of that object for use
this._tables = [[aesTables[0][0].slice(), aesTables[0][1].slice(), aesTables[0][2].slice(), aesTables[0][3].slice(), aesTables[0][4].slice()], [aesTables[1][0].slice(), aesTables[1][1].slice(), aesTables[1][2].slice(), aesTables[1][3].slice(), aesTables[1][4].slice()]];
let i;
let j;
let tmp;
const sbox = this._tables[0][4];
const decTable = this._tables[1];
const keyLen = key.length;
let rcon = 1;
if (keyLen !== 4 && keyLen !== 6 && keyLen !== 8) {
throw new Error('Invalid aes key size');
}
const encKey = key.slice(0);
const decKey = [];
this._key = [encKey, decKey]; // schedule encryption keys
for (i = keyLen; i < 4 * keyLen + 28; i++) {
tmp = encKey[i - 1]; // apply sbox
if (i % keyLen === 0 || keyLen === 8 && i % keyLen === 4) {
tmp = sbox[tmp >>> 24] << 24 ^ sbox[tmp >> 16 & 255] << 16 ^ sbox[tmp >> 8 & 255] << 8 ^ sbox[tmp & 255]; // shift rows and add rcon
if (i % keyLen === 0) {
tmp = tmp << 8 ^ tmp >>> 24 ^ rcon << 24;
rcon = rcon << 1 ^ (rcon >> 7) * 283;
}
}
encKey[i] = encKey[i - keyLen] ^ tmp;
} // schedule decryption keys
for (j = 0; i; j++, i--) {
tmp = encKey[j & 3 ? i : i - 4];
if (i <= 4 || j < 4) {
decKey[j] = tmp;
} else {
decKey[j] = decTable[0][sbox[tmp >>> 24]] ^ decTable[1][sbox[tmp >> 16 & 255]] ^ decTable[2][sbox[tmp >> 8 & 255]] ^ decTable[3][sbox[tmp & 255]];
}
}
}
/**
* Decrypt 16 bytes, specified as four 32-bit words.
*
* @param {number} encrypted0 the first word to decrypt
* @param {number} encrypted1 the second word to decrypt
* @param {number} encrypted2 the third word to decrypt
* @param {number} encrypted3 the fourth word to decrypt
* @param {Int32Array} out the array to write the decrypted words
* into
* @param {number} offset the offset into the output array to start
* writing results
* @return {Array} The plaintext.
*/
decrypt(encrypted0, encrypted1, encrypted2, encrypted3, out, offset) {
const key = this._key[1]; // state variables a,b,c,d are loaded with pre-whitened data
let a = encrypted0 ^ key[0];
let b = encrypted3 ^ key[1];
let c = encrypted2 ^ key[2];
let d = encrypted1 ^ key[3];
let a2;
let b2;
let c2; // key.length === 2 ?
const nInnerRounds = key.length / 4 - 2;
let i;
let kIndex = 4;
const table = this._tables[1]; // load up the tables
const table0 = table[0];
const table1 = table[1];
const table2 = table[2];
const table3 = table[3];
const sbox = table[4]; // Inner rounds. Cribbed from OpenSSL.
for (i = 0; i < nInnerRounds; i++) {
a2 = table0[a >>> 24] ^ table1[b >> 16 & 255] ^ table2[c >> 8 & 255] ^ table3[d & 255] ^ key[kIndex];
b2 = table0[b >>> 24] ^ table1[c >> 16 & 255] ^ table2[d >> 8 & 255] ^ table3[a & 255] ^ key[kIndex + 1];
c2 = table0[c >>> 24] ^ table1[d >> 16 & 255] ^ table2[a >> 8 & 255] ^ table3[b & 255] ^ key[kIndex + 2];
d = table0[d >>> 24] ^ table1[a >> 16 & 255] ^ table2[b >> 8 & 255] ^ table3[c & 255] ^ key[kIndex + 3];
kIndex += 4;
a = a2;
b = b2;
c = c2;
} // Last round.
for (i = 0; i < 4; i++) {
out[(3 & -i) + offset] = sbox[a >>> 24] << 24 ^ sbox[b >> 16 & 255] << 16 ^ sbox[c >> 8 & 255] << 8 ^ sbox[d & 255] ^ key[kIndex++];
a2 = a;
a = b;
b = c;
c = d;
d = a2;
}
}
}
/**
* @file stream.js
*/
/**
* A lightweight readable stream implemention that handles event dispatching.
*
* @class Stream
*/
var Stream = /*#__PURE__*/function () {
function Stream() {
this.listeners = {};
}
/**
* Add a listener for a specified event type.
*
* @param {string} type the event name
* @param {Function} listener the callback to be invoked when an event of
* the specified type occurs
*/
var _proto = Stream.prototype;
_proto.on = function on(type, listener) {
if (!this.listeners[type]) {
this.listeners[type] = [];
}
this.listeners[type].push(listener);
}
/**
* Remove a listener for a specified event type.
*
* @param {string} type the event name
* @param {Function} listener a function previously registered for this
* type of event through `on`
* @return {boolean} if we could turn it off or not
*/
;
_proto.off = function off(type, listener) {
if (!this.listeners[type]) {
return false;
}
var index = this.listeners[type].indexOf(listener); // TODO: which is better?
// In Video.js we slice listener functions
// on trigger so that it does not mess up the order
// while we loop through.
//
// Here we slice on off so that the loop in trigger
// can continue using it's old reference to loop without
// messing up the order.
this.listeners[type] = this.listeners[type].slice(0);
this.listeners[type].splice(index, 1);
return index > -1;
}
/**
* Trigger an event of the specified type on this stream. Any additional
* arguments to this function are passed as parameters to event listeners.
*
* @param {string} type the event name
*/
;
_proto.trigger = function trigger(type) {
var callbacks = this.listeners[type];
if (!callbacks) {
return;
} // Slicing the arguments on every invocation of this method
// can add a significant amount of overhead. Avoid the
// intermediate object creation for the common case of a
// single callback argument
if (arguments.length === 2) {
var length = callbacks.length;
for (var i = 0; i < length; ++i) {
callbacks[i].call(this, arguments[1]);
}
} else {
var args = Array.prototype.slice.call(arguments, 1);
var _length = callbacks.length;
for (var _i = 0; _i < _length; ++_i) {
callbacks[_i].apply(this, args);
}
}
}
/**
* Destroys the stream and cleans up.
*/
;
_proto.dispose = function dispose() {
this.listeners = {};
}
/**
* Forwards all `data` events on this stream to the destination stream. The
* destination stream should provide a method `push` to receive the data
* events as they arrive.
*
* @param {Stream} destination the stream that will receive all `data` events
* @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
*/
;
_proto.pipe = function pipe(destination) {
this.on('data', function (data) {
destination.push(data);
});
};
return Stream;
}();
/**
* @file async-stream.js
*/
/**
* A wrapper around the Stream class to use setTimeout
* and run stream "jobs" Asynchronously
*
* @class AsyncStream
* @extends Stream
*/
class AsyncStream extends Stream {
constructor() {
super(Stream);
this.jobs = [];
this.delay = 1;
this.timeout_ = null;
}
/**
* process an async job
*
* @private
*/
processJob_() {
this.jobs.shift()();
if (this.jobs.length) {
this.timeout_ = setTimeout(this.processJob_.bind(this), this.delay);
} else {
this.timeout_ = null;
}
}
/**
* push a job into the stream
*
* @param {Function} job the job to push into the stream
*/
push(job) {
this.jobs.push(job);
if (!this.timeout_) {
this.timeout_ = setTimeout(this.processJob_.bind(this), this.delay);
}
}
}
/*! @name pkcs7 @version 1.0.4 @license Apache-2.0 */
/**
* Returns the subarray of a Uint8Array without PKCS#7 padding.
*
* @param padded {Uint8Array} unencrypted bytes that have been padded
* @return {Uint8Array} the unpadded bytes
* @see http://tools.ietf.org/html/rfc5652
*/
function unpad(padded) {
return padded.subarray(0, padded.byteLength - padded[padded.byteLength - 1]);
}
/**
* @file decrypter.js
*
* An asynchronous implementation of AES-128 CBC decryption with
* PKCS#7 padding.
*/
/**
* Convert network-order (big-endian) bytes into their little-endian
* representation.
*/
const ntoh = function (word) {
return word << 24 | (word & 0xff00) << 8 | (word & 0xff0000) >> 8 | word >>> 24;
};
/**
* Decrypt bytes using AES-128 with CBC and PKCS#7 padding.
*
* @param {Uint8Array} encrypted the encrypted bytes
* @param {Uint32Array} key the bytes of the decryption key
* @param {Uint32Array} initVector the initialization vector (IV) to
* use for the first round of CBC.
* @return {Uint8Array} the decrypted bytes
*
* @see http://en.wikipedia.org/wiki/Advanced_Encryption_Standard
* @see http://en.wikipedia.org/wiki/Block_cipher_mode_of_operation#Cipher_Block_Chaining_.28CBC.29
* @see https://tools.ietf.org/html/rfc2315
*/
const decrypt = function (encrypted, key, initVector) {
// word-level access to the encrypted bytes
const encrypted32 = new Int32Array(encrypted.buffer, encrypted.byteOffset, encrypted.byteLength >> 2);
const decipher = new AES(Array.prototype.slice.call(key)); // byte and word-level access for the decrypted output
const decrypted = new Uint8Array(encrypted.byteLength);
const decrypted32 = new Int32Array(decrypted.buffer); // temporary variables for working with the IV, encrypted, and
// decrypted data
let init0;
let init1;
let init2;
let init3;
let encrypted0;
let encrypted1;
let encrypted2;
let encrypted3; // iteration variable
let wordIx; // pull out the words of the IV to ensure we don't modify the
// passed-in reference and easier access
init0 = initVector[0];
init1 = initVector[1];
init2 = initVector[2];
init3 = initVector[3]; // decrypt four word sequences, applying cipher-block chaining (CBC)
// to each decrypted block
for (wordIx = 0; wordIx < encrypted32.length; wordIx += 4) {
// convert big-endian (network order) words into little-endian
// (javascript order)
encrypted0 = ntoh(encrypted32[wordIx]);
encrypted1 = ntoh(encrypted32[wordIx + 1]);
encrypted2 = ntoh(encrypted32[wordIx + 2]);
encrypted3 = ntoh(encrypted32[wordIx + 3]); // decrypt the block
decipher.decrypt(encrypted0, encrypted1, encrypted2, encrypted3, decrypted32, wordIx); // XOR with the IV, and restore network byte-order to obtain the
// plaintext
decrypted32[wordIx] = ntoh(decrypted32[wordIx] ^ init0);
decrypted32[wordIx + 1] = ntoh(decrypted32[wordIx + 1] ^ init1);
decrypted32[wordIx + 2] = ntoh(decrypted32[wordIx + 2] ^ init2);
decrypted32[wordIx + 3] = ntoh(decrypted32[wordIx + 3] ^ init3); // setup the IV for the next round
init0 = encrypted0;
init1 = encrypted1;
init2 = encrypted2;
init3 = encrypted3;
}
return decrypted;
};
/**
* The `Decrypter` class that manages decryption of AES
* data through `AsyncStream` objects and the `decrypt`
* function
*
* @param {Uint8Array} encrypted the encrypted bytes
* @param {Uint32Array} key the bytes of the decryption key
* @param {Uint32Array} initVector the initialization vector (IV) to
* @param {Function} done the function to run when done
* @class Decrypter
*/
class Decrypter {
constructor(encrypted, key, initVector, done) {
const step = Decrypter.STEP;
const encrypted32 = new Int32Array(encrypted.buffer);
const decrypted = new Uint8Array(encrypted.byteLength);
let i = 0;
this.asyncStream_ = new AsyncStream(); // split up the encryption job and do the individual chunks asynchronously
this.asyncStream_.push(this.decryptChunk_(encrypted32.subarray(i, i + step), key, initVector, decrypted));
for (i = step; i < encrypted32.length; i += step) {
initVector = new Uint32Array([ntoh(encrypted32[i - 4]), ntoh(encrypted32[i - 3]), ntoh(encrypted32[i - 2]), ntoh(encrypted32[i - 1])]);
this.asyncStream_.push(this.decryptChunk_(encrypted32.subarray(i, i + step), key, initVector, decrypted));
} // invoke the done() callback when everything is finished
this.asyncStream_.push(function () {
// remove pkcs#7 padding from the decrypted bytes
done(null, unpad(decrypted));
});
}
/**
* a getter for step the maximum number of bytes to process at one time
*
* @return {number} the value of step 32000
*/
static get STEP() {
// 4 * 8000;
return 32000;
}
/**
* @private
*/
decryptChunk_(encrypted, key, initVector, decrypted) {
return function () {
const bytes = decrypt(encrypted, key, initVector);
decrypted.set(bytes, encrypted.byteOffset);
};
}
}
exports.AsyncStream = AsyncStream;
exports.Decrypter = Decrypter;
exports.decrypt = decrypt;
Object.defineProperty(exports, '__esModule', { value: true });
}));

View file

@ -1,3 +0,0 @@
/*! @name aes-decrypter @version 4.0.1 @license Apache-2.0 */
!function(t,e){"object"==typeof exports&&"undefined"!=typeof module?e(exports):"function"==typeof define&&define.amd?define(["exports"],e):e((t="undefined"!=typeof globalThis?globalThis:t||self).aesDecrypter={})}(this,(function(t){"use strict";let e=null;class s{constructor(t){let s,i,n;e||(e=function(){const t=[[[],[],[],[],[]],[[],[],[],[],[]]],e=t[0],s=t[1],i=e[4],n=s[4];let r,o,l;const c=[],h=[];let u,f,a,y,p,b;for(r=0;r<256;r++)h[(c[r]=r<<1^283*(r>>7))^r]=r;for(o=l=0;!i[o];o^=u||1,l=h[l]||1)for(y=l^l<<1^l<<2^l<<3^l<<4,y=y>>8^255&y^99,i[o]=y,n[y]=o,a=c[f=c[u=c[o]]],b=16843009*a^65537*f^257*u^16843008*o,p=257*c[y]^16843008*y,r=0;r<4;r++)e[r][o]=p=p<<24^p>>>8,s[r][y]=b=b<<24^b>>>8;for(r=0;r<5;r++)e[r]=e[r].slice(0),s[r]=s[r].slice(0);return t}()),this._tables=[[e[0][0].slice(),e[0][1].slice(),e[0][2].slice(),e[0][3].slice(),e[0][4].slice()],[e[1][0].slice(),e[1][1].slice(),e[1][2].slice(),e[1][3].slice(),e[1][4].slice()]];const r=this._tables[0][4],o=this._tables[1],l=t.length;let c=1;if(4!==l&&6!==l&&8!==l)throw new Error("Invalid aes key size");const h=t.slice(0),u=[];for(this._key=[h,u],s=l;s<4*l+28;s++)n=h[s-1],(s%l==0||8===l&&s%l==4)&&(n=r[n>>>24]<<24^r[n>>16&255]<<16^r[n>>8&255]<<8^r[255&n],s%l==0&&(n=n<<8^n>>>24^c<<24,c=c<<1^283*(c>>7))),h[s]=h[s-l]^n;for(i=0;s;i++,s--)n=h[3&i?s:s-4],u[i]=s<=4||i<4?n:o[0][r[n>>>24]]^o[1][r[n>>16&255]]^o[2][r[n>>8&255]]^o[3][r[255&n]]}decrypt(t,e,s,i,n,r){const o=this._key[1];let l,c,h,u=t^o[0],f=i^o[1],a=s^o[2],y=e^o[3];const p=o.length/4-2;let b,d=4;const _=this._tables[1],g=_[0],m=_[1],w=_[2],v=_[3],A=_[4];for(b=0;b<p;b++)l=g[u>>>24]^m[f>>16&255]^w[a>>8&255]^v[255&y]^o[d],c=g[f>>>24]^m[a>>16&255]^w[y>>8&255]^v[255&u]^o[d+1],h=g[a>>>24]^m[y>>16&255]^w[u>>8&255]^v[255&f]^o[d+2],y=g[y>>>24]^m[u>>16&255]^w[f>>8&255]^v[255&a]^o[d+3],d+=4,u=l,f=c,a=h;for(b=0;b<4;b++)n[(3&-b)+r]=A[u>>>24]<<24^A[f>>16&255]<<16^A[a>>8&255]<<8^A[255&y]^o[d++],l=u,u=f,f=a,a=y,y=l}}var i=function(){function t(){this.listeners={}}var e=t.prototype;return e.on=function(t,e){this.listeners[t]||(this.listeners[t]=[]),this.listeners[t].push(e)},e.off=function(t,e){if(!this.listeners[t])return!1;var s=this.listeners[t].indexOf(e);return this.listeners[t]=this.listeners[t].slice(0),this.listeners[t].splice(s,1),s>-1},e.trigger=function(t){var e=this.listeners[t];if(e)if(2===arguments.length)for(var s=e.length,i=0;i<s;++i)e[i].call(this,arguments[1]);else for(var n=Array.prototype.slice.call(arguments,1),r=e.length,o=0;o<r;++o)e[o].apply(this,n)},e.dispose=function(){this.listeners={}},e.pipe=function(t){this.on("data",(function(e){t.push(e)}))},t}();class n extends i{constructor(){super(i),this.jobs=[],this.delay=1,this.timeout_=null}processJob_(){this.jobs.shift()(),this.jobs.length?this.timeout_=setTimeout(this.processJob_.bind(this),this.delay):this.timeout_=null}push(t){this.jobs.push(t),this.timeout_||(this.timeout_=setTimeout(this.processJob_.bind(this),this.delay))}}
/*! @name pkcs7 @version 1.0.4 @license Apache-2.0 */const r=function(t){return t<<24|(65280&t)<<8|(16711680&t)>>8|t>>>24},o=function(t,e,i){const n=new Int32Array(t.buffer,t.byteOffset,t.byteLength>>2),o=new s(Array.prototype.slice.call(e)),l=new Uint8Array(t.byteLength),c=new Int32Array(l.buffer);let h,u,f,a,y,p,b,d,_;for(h=i[0],u=i[1],f=i[2],a=i[3],_=0;_<n.length;_+=4)y=r(n[_]),p=r(n[_+1]),b=r(n[_+2]),d=r(n[_+3]),o.decrypt(y,p,b,d,c,_),c[_]=r(c[_]^h),c[_+1]=r(c[_+1]^u),c[_+2]=r(c[_+2]^f),c[_+3]=r(c[_+3]^a),h=y,u=p,f=b,a=d;return l};class l{constructor(t,e,s,i){const o=l.STEP,c=new Int32Array(t.buffer),h=new Uint8Array(t.byteLength);let u=0;for(this.asyncStream_=new n,this.asyncStream_.push(this.decryptChunk_(c.subarray(u,u+o),e,s,h)),u=o;u<c.length;u+=o)s=new Uint32Array([r(c[u-4]),r(c[u-3]),r(c[u-2]),r(c[u-1])]),this.asyncStream_.push(this.decryptChunk_(c.subarray(u,u+o),e,s,h));this.asyncStream_.push((function(){var t;i(null,(t=h).subarray(0,t.byteLength-t[t.byteLength-1]))}))}static get STEP(){return 32e3}decryptChunk_(t,e,s,i){return function(){const n=o(t,e,s);i.set(n,t.byteOffset)}}}t.AsyncStream=n,t.Decrypter=l,t.decrypt=o,Object.defineProperty(t,"__esModule",{value:!0})}));

View file

@ -1,15 +0,0 @@
<!doctype html>
<html>
<head>
<meta charset="utf-8">
<title>aes-decrypter Demo</title>
<script src="dist/aes-decrypter.js"></script>
</head>
<body>
<p>To test this out, open up your developer console.</p>
<ul>
<li><a href="test/">Run unit tests in browser.</a></li>
<li><a href="docs/api/">Read generated docs.</a></li>
</ul>
</body>
</html>

View file

@ -1,169 +0,0 @@
<a name="3.0.5"></a>
## [3.0.5](https://github.com/videojs/vhs-utils/compare/v3.0.4...v3.0.5) (2022-03-16)
### Bug Fixes
* ArrayBuffer.isView may not be available everywhere ([#33](https://github.com/videojs/vhs-utils/issues/33)) ([bff9147](https://github.com/videojs/vhs-utils/commit/bff9147)), closes [#1134](https://github.com/videojs/vhs-utils/issues/1134)
<a name="3.0.4"></a>
## [3.0.4](https://github.com/videojs/vhs-utils/compare/v3.0.3...v3.0.4) (2021-09-22)
### Bug Fixes
* mark global/window/document as external globals ([#30](https://github.com/videojs/vhs-utils/issues/30)) ([8216630](https://github.com/videojs/vhs-utils/commit/8216630))
### Chores
* don't run tests on version ([#31](https://github.com/videojs/vhs-utils/issues/31)) ([24dab1d](https://github.com/videojs/vhs-utils/commit/24dab1d))
* switch generate-formats to shared [@brandonocasey](https://github.com/brandonocasey)/spawn-promise ([873b43f](https://github.com/videojs/vhs-utils/commit/873b43f))
<a name="3.0.3"></a>
## [3.0.3](https://github.com/videojs/vhs-utils/compare/v3.0.2...v3.0.3) (2021-07-26)
### Bug Fixes
* detect mp4 starting with moof/moov box as mp4 ([#29](https://github.com/videojs/vhs-utils/issues/29)) ([51d995d](https://github.com/videojs/vhs-utils/commit/51d995d))
* look at all program map tables for ts stream types ([#28](https://github.com/videojs/vhs-utils/issues/28)) ([1edb519](https://github.com/videojs/vhs-utils/commit/1edb519))
<a name="3.0.2"></a>
## [3.0.2](https://github.com/videojs/vhs-utils/compare/v3.0.1...v3.0.2) (2021-05-20)
### Bug Fixes
* properly handle data URIs ([#27](https://github.com/videojs/vhs-utils/issues/27)) ([9b10245](https://github.com/videojs/vhs-utils/commit/9b10245)), closes [videojs/video.js#7240](https://github.com/videojs/video.js/issues/7240)
<a name="3.0.1"></a>
## [3.0.1](https://github.com/videojs/vhs-utils/compare/v3.0.0...v3.0.1) (2021-04-29)
### Bug Fixes
* binary issues ([e9f5079](https://github.com/videojs/vhs-utils/commit/e9f5079))
### Chores
* update vjsverify ([105c26a](https://github.com/videojs/vhs-utils/commit/105c26a))
### Performance Improvements
* use native URL when available ([#26](https://github.com/videojs/vhs-utils/issues/26)) ([e7eaab9](https://github.com/videojs/vhs-utils/commit/e7eaab9))
<a name="3.0.0"></a>
# [3.0.0](https://github.com/videojs/vhs-utils/compare/v2.3.0...v3.0.0) (2020-12-18)
### Features
* Extend our current container parsing logic and add logic for parsing codecs from files ([#14](https://github.com/videojs/vhs-utils/issues/14)) ([d425956](https://github.com/videojs/vhs-utils/commit/d425956))
* parse any number of codecs rather than just the last audio or the last video codec. ([#23](https://github.com/videojs/vhs-utils/issues/23)) ([33ec9f5](https://github.com/videojs/vhs-utils/commit/33ec9f5))
* use [@videojs](https://github.com/videojs)/babel-config to transpile code to cjs/es for node ([#20](https://github.com/videojs/vhs-utils/issues/20)) ([c6dbd0b](https://github.com/videojs/vhs-utils/commit/c6dbd0b))
### Chores
* switch from travis to github ci ([#24](https://github.com/videojs/vhs-utils/issues/24)) ([cfee30b](https://github.com/videojs/vhs-utils/commit/cfee30b))
### BREAKING CHANGES
* cjs dist files changed from './dist' to './cjs'
* parseCodecs now returns an array of codecs that where parsed so that we can support any number of codecs instead of just two.
* toUint8 in byte-helpers functions slightly differently
* getId3Offset is exported from id3-helpers rather than containers
We can now parse the container for and many of the codecs within (where applicable) for mp4, avi, ts, mkv, webm, ogg, wav, aac, ac3 (and ec3 which is contained in ac3 files), mp3, flac, raw h265, and raw h264.
Codec parsing has also been extended to parse codec details in a file for vp09, avc (h264), hevc (h265), av1, and opus
Finally we have the following additional features to our parsing of codec/container information:
* skipping multiple id3 tags at the start of a file for flac, mp3, and aac
* discarding emulation prevention bits (in h264, h265)
* parsing raw h264/h265 to get codec params for ts, avi, and even raw h264/h265 files
<a name="2.3.0"></a>
# [2.3.0](https://github.com/videojs/vhs-utils/compare/v2.2.1...v2.3.0) (2020-12-03)
### Features
* parse unknown and text codecs ([#19](https://github.com/videojs/vhs-utils/issues/19)) ([9c90076](https://github.com/videojs/vhs-utils/commit/9c90076))
### Chores
* Add repository info to package.json ([#22](https://github.com/videojs/vhs-utils/issues/22)) ([a22ae78](https://github.com/videojs/vhs-utils/commit/a22ae78))
<a name="2.2.1"></a>
## [2.2.1](https://github.com/videojs/stream/compare/v2.2.0...v2.2.1) (2020-10-06)
### Bug Fixes
* check for multiple id3 sections in a file (#21) ([759a039](https://github.com/videojs/stream/commit/759a039)), closes [#21](https://github.com/videojs/stream/issues/21)
* parse unknown codecs as audio or video (#15) ([cd2c9bb](https://github.com/videojs/stream/commit/cd2c9bb)), closes [#15](https://github.com/videojs/stream/issues/15)
### Reverts
* "fix: parse unknown codecs as audio or video (#15)" (#18) ([9983be8](https://github.com/videojs/stream/commit/9983be8)), closes [#15](https://github.com/videojs/stream/issues/15) [#18](https://github.com/videojs/stream/issues/18)
<a name="2.2.0"></a>
# [2.2.0](https://github.com/videojs/stream/compare/v2.1.0...v2.2.0) (2020-05-01)
### Features
* Add a function to concat typed arrays into one Uint8Array (#13) ([e733509](https://github.com/videojs/stream/commit/e733509)), closes [#13](https://github.com/videojs/stream/issues/13)
<a name="2.1.0"></a>
# [2.1.0](https://github.com/videojs/stream/compare/v2.0.0...v2.1.0) (2020-04-27)
### Features
* Add functions for byte manipulation and segment container detection (#12) ([325f677](https://github.com/videojs/stream/commit/325f677)), closes [#12](https://github.com/videojs/stream/issues/12)
<a name="2.0.0"></a>
# [2.0.0](https://github.com/videojs/stream/compare/v1.3.0...v2.0.0) (2020-04-07)
### Features
* **codec:** changes to handle muxer/browser/video/audio support separately (#10) ([1f92865](https://github.com/videojs/stream/commit/1f92865)), closes [#10](https://github.com/videojs/stream/issues/10)
### Bug Fixes
* Allow VP9 and AV1 codecs through in VHS ([b32e35b](https://github.com/videojs/stream/commit/b32e35b))
### BREAKING CHANGES
* **codec:** parseCodecs output has been changed. It now returns an object that can have an audio or video property, depending on the codecs found. Those properties are object that contain type. and details. Type being the codec name and details being codec specific information usually with a leading period.
* **codec:** `audioProfileFromDefault` has been renamed to `codecsFromDefault` and now returns all output from `parseCodecs` not just audio or audio profile.
<a name="1.3.0"></a>
# [1.3.0](https://github.com/videojs/vhs-utils/compare/v1.2.1...v1.3.0) (2020-02-05)
### Features
* add forEachMediaGroup in media-groups module (#8) ([a1eacf4](https://github.com/videojs/vhs-utils/commit/a1eacf4)), closes [#8](https://github.com/videojs/vhs-utils/issues/8)
<a name="1.2.1"></a>
## [1.2.1](https://github.com/videojs/vhs-utils/compare/v1.2.0...v1.2.1) (2020-01-15)
### Bug Fixes
* include videojs in VHS JSON media type (#7) ([da072f0](https://github.com/videojs/vhs-utils/commit/da072f0)), closes [#7](https://github.com/videojs/vhs-utils/issues/7)
<a name="1.2.0"></a>
# [1.2.0](https://github.com/videojs/vhs-utils/compare/v1.1.0...v1.2.0) (2019-12-06)
### Features
* add media-types module with simpleTypeFromSourceType function (#4) ([d3ebd3f](https://github.com/videojs/vhs-utils/commit/d3ebd3f)), closes [#4](https://github.com/videojs/vhs-utils/issues/4)
* add VHS codec parsing and translation functions (#5) ([4fe0e22](https://github.com/videojs/vhs-utils/commit/4fe0e22)), closes [#5](https://github.com/videojs/vhs-utils/issues/5)
<a name="1.1.0"></a>
# [1.1.0](https://github.com/videojs/stream/compare/v1.0.0...v1.1.0) (2019-08-30)
### Features
* node support and more stream tests ([315ab8d](https://github.com/videojs/stream/commit/315ab8d))
<a name="1.0.0"></a>
# 1.0.0 (2019-08-21)
### Features
* clones from mpd-parser, m3u8-parser, mux.js, aes-decrypter, and vhs ([5e89042](https://github.com/videojs/stream/commit/5e89042))

View file

@ -1,19 +0,0 @@
Copyright (c) brandonocasey <brandonocasey@gmail.com>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View file

@ -1,41 +0,0 @@
<!-- START doctoc generated TOC please keep comment here to allow auto update -->
<!-- DON'T EDIT THIS SECTION, INSTEAD RE-RUN doctoc TO UPDATE -->
- [@videojs/vhs-utils](#videojsvhs-utils)
- [Installation](#installation)
- [Usage](#usage)
<!-- END doctoc generated TOC please keep comment here to allow auto update -->
# @videojs/vhs-utils
vhs-utils serves two purposes:
1. It extracts objects and functions shared throughout @videojs/http-streaming code to save on package size. See [the original @videojs/http-streaming PR](https://github.com/videojs/http-streaming/pull/637) for details.
2. It exports generic functions from VHS that may be useful to plugin authors.
## Installation
```sh
npm install --save @videojs/vhs-utils
```
## Usage
All utility functions are published under dist and can be required/imported like so:
> es import using es dist
```js
import resolveUrl from '@videojs/vhs-utils/es/resolve-url';
```
> cjs import using cjs dist
```js
const resolveUrl = require('@videojs/vhs-utils/cjs/resolve-url');
```
> depricated cjs dist
```js
const resolveUrl = require('@videojs/vhs-utils/dist/resolve-url');
```

View file

@ -1,333 +0,0 @@
"use strict";
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.reverseBytes = exports.sliceBytes = exports.bytesMatch = exports.concatTypedArrays = exports.stringToBytes = exports.bytesToString = exports.numberToBytes = exports.bytesToNumber = exports.IS_LITTLE_ENDIAN = exports.IS_BIG_ENDIAN = exports.ENDIANNESS = exports.toBinaryString = exports.toHexString = exports.toUint8 = exports.isTypedArray = exports.isArrayBufferView = exports.padStart = exports.countBytes = exports.countBits = void 0;
var _window = _interopRequireDefault(require("global/window"));
// const log2 = Math.log2 ? Math.log2 : (x) => (Math.log(x) / Math.log(2));
var repeat = function repeat(str, len) {
var acc = '';
while (len--) {
acc += str;
}
return acc;
}; // count the number of bits it would take to represent a number
// we used to do this with log2 but BigInt does not support builtin math
// Math.ceil(log2(x));
var countBits = function countBits(x) {
return x.toString(2).length;
}; // count the number of whole bytes it would take to represent a number
exports.countBits = countBits;
var countBytes = function countBytes(x) {
return Math.ceil(countBits(x) / 8);
};
exports.countBytes = countBytes;
var padStart = function padStart(b, len, str) {
if (str === void 0) {
str = ' ';
}
return (repeat(str, len) + b.toString()).slice(-len);
};
exports.padStart = padStart;
var isArrayBufferView = function isArrayBufferView(obj) {
if (ArrayBuffer.isView === 'function') {
return ArrayBuffer.isView(obj);
}
return obj && obj.buffer instanceof ArrayBuffer;
};
exports.isArrayBufferView = isArrayBufferView;
var isTypedArray = function isTypedArray(obj) {
return isArrayBufferView(obj);
};
exports.isTypedArray = isTypedArray;
var toUint8 = function toUint8(bytes) {
if (bytes instanceof Uint8Array) {
return bytes;
}
if (!Array.isArray(bytes) && !isTypedArray(bytes) && !(bytes instanceof ArrayBuffer)) {
// any non-number or NaN leads to empty uint8array
// eslint-disable-next-line
if (typeof bytes !== 'number' || typeof bytes === 'number' && bytes !== bytes) {
bytes = 0;
} else {
bytes = [bytes];
}
}
return new Uint8Array(bytes && bytes.buffer || bytes, bytes && bytes.byteOffset || 0, bytes && bytes.byteLength || 0);
};
exports.toUint8 = toUint8;
var toHexString = function toHexString(bytes) {
bytes = toUint8(bytes);
var str = '';
for (var i = 0; i < bytes.length; i++) {
str += padStart(bytes[i].toString(16), 2, '0');
}
return str;
};
exports.toHexString = toHexString;
var toBinaryString = function toBinaryString(bytes) {
bytes = toUint8(bytes);
var str = '';
for (var i = 0; i < bytes.length; i++) {
str += padStart(bytes[i].toString(2), 8, '0');
}
return str;
};
exports.toBinaryString = toBinaryString;
var BigInt = _window.default.BigInt || Number;
var BYTE_TABLE = [BigInt('0x1'), BigInt('0x100'), BigInt('0x10000'), BigInt('0x1000000'), BigInt('0x100000000'), BigInt('0x10000000000'), BigInt('0x1000000000000'), BigInt('0x100000000000000'), BigInt('0x10000000000000000')];
var ENDIANNESS = function () {
var a = new Uint16Array([0xFFCC]);
var b = new Uint8Array(a.buffer, a.byteOffset, a.byteLength);
if (b[0] === 0xFF) {
return 'big';
}
if (b[0] === 0xCC) {
return 'little';
}
return 'unknown';
}();
exports.ENDIANNESS = ENDIANNESS;
var IS_BIG_ENDIAN = ENDIANNESS === 'big';
exports.IS_BIG_ENDIAN = IS_BIG_ENDIAN;
var IS_LITTLE_ENDIAN = ENDIANNESS === 'little';
exports.IS_LITTLE_ENDIAN = IS_LITTLE_ENDIAN;
var bytesToNumber = function bytesToNumber(bytes, _temp) {
var _ref = _temp === void 0 ? {} : _temp,
_ref$signed = _ref.signed,
signed = _ref$signed === void 0 ? false : _ref$signed,
_ref$le = _ref.le,
le = _ref$le === void 0 ? false : _ref$le;
bytes = toUint8(bytes);
var fn = le ? 'reduce' : 'reduceRight';
var obj = bytes[fn] ? bytes[fn] : Array.prototype[fn];
var number = obj.call(bytes, function (total, byte, i) {
var exponent = le ? i : Math.abs(i + 1 - bytes.length);
return total + BigInt(byte) * BYTE_TABLE[exponent];
}, BigInt(0));
if (signed) {
var max = BYTE_TABLE[bytes.length] / BigInt(2) - BigInt(1);
number = BigInt(number);
if (number > max) {
number -= max;
number -= max;
number -= BigInt(2);
}
}
return Number(number);
};
exports.bytesToNumber = bytesToNumber;
var numberToBytes = function numberToBytes(number, _temp2) {
var _ref2 = _temp2 === void 0 ? {} : _temp2,
_ref2$le = _ref2.le,
le = _ref2$le === void 0 ? false : _ref2$le;
// eslint-disable-next-line
if (typeof number !== 'bigint' && typeof number !== 'number' || typeof number === 'number' && number !== number) {
number = 0;
}
number = BigInt(number);
var byteCount = countBytes(number);
var bytes = new Uint8Array(new ArrayBuffer(byteCount));
for (var i = 0; i < byteCount; i++) {
var byteIndex = le ? i : Math.abs(i + 1 - bytes.length);
bytes[byteIndex] = Number(number / BYTE_TABLE[i] & BigInt(0xFF));
if (number < 0) {
bytes[byteIndex] = Math.abs(~bytes[byteIndex]);
bytes[byteIndex] -= i === 0 ? 1 : 2;
}
}
return bytes;
};
exports.numberToBytes = numberToBytes;
var bytesToString = function bytesToString(bytes) {
if (!bytes) {
return '';
} // TODO: should toUint8 handle cases where we only have 8 bytes
// but report more since this is a Uint16+ Array?
bytes = Array.prototype.slice.call(bytes);
var string = String.fromCharCode.apply(null, toUint8(bytes));
try {
return decodeURIComponent(escape(string));
} catch (e) {// if decodeURIComponent/escape fails, we are dealing with partial
// or full non string data. Just return the potentially garbled string.
}
return string;
};
exports.bytesToString = bytesToString;
var stringToBytes = function stringToBytes(string, stringIsBytes) {
if (typeof string !== 'string' && string && typeof string.toString === 'function') {
string = string.toString();
}
if (typeof string !== 'string') {
return new Uint8Array();
} // If the string already is bytes, we don't have to do this
// otherwise we do this so that we split multi length characters
// into individual bytes
if (!stringIsBytes) {
string = unescape(encodeURIComponent(string));
}
var view = new Uint8Array(string.length);
for (var i = 0; i < string.length; i++) {
view[i] = string.charCodeAt(i);
}
return view;
};
exports.stringToBytes = stringToBytes;
var concatTypedArrays = function concatTypedArrays() {
for (var _len = arguments.length, buffers = new Array(_len), _key = 0; _key < _len; _key++) {
buffers[_key] = arguments[_key];
}
buffers = buffers.filter(function (b) {
return b && (b.byteLength || b.length) && typeof b !== 'string';
});
if (buffers.length <= 1) {
// for 0 length we will return empty uint8
// for 1 length we return the first uint8
return toUint8(buffers[0]);
}
var totalLen = buffers.reduce(function (total, buf, i) {
return total + (buf.byteLength || buf.length);
}, 0);
var tempBuffer = new Uint8Array(totalLen);
var offset = 0;
buffers.forEach(function (buf) {
buf = toUint8(buf);
tempBuffer.set(buf, offset);
offset += buf.byteLength;
});
return tempBuffer;
};
/**
* Check if the bytes "b" are contained within bytes "a".
*
* @param {Uint8Array|Array} a
* Bytes to check in
*
* @param {Uint8Array|Array} b
* Bytes to check for
*
* @param {Object} options
* options
*
* @param {Array|Uint8Array} [offset=0]
* offset to use when looking at bytes in a
*
* @param {Array|Uint8Array} [mask=[]]
* mask to use on bytes before comparison.
*
* @return {boolean}
* If all bytes in b are inside of a, taking into account
* bit masks.
*/
exports.concatTypedArrays = concatTypedArrays;
var bytesMatch = function bytesMatch(a, b, _temp3) {
var _ref3 = _temp3 === void 0 ? {} : _temp3,
_ref3$offset = _ref3.offset,
offset = _ref3$offset === void 0 ? 0 : _ref3$offset,
_ref3$mask = _ref3.mask,
mask = _ref3$mask === void 0 ? [] : _ref3$mask;
a = toUint8(a);
b = toUint8(b); // ie 11 does not support uint8 every
var fn = b.every ? b.every : Array.prototype.every;
return b.length && a.length - offset >= b.length && // ie 11 doesn't support every on uin8
fn.call(b, function (bByte, i) {
var aByte = mask[i] ? mask[i] & a[offset + i] : a[offset + i];
return bByte === aByte;
});
};
exports.bytesMatch = bytesMatch;
var sliceBytes = function sliceBytes(src, start, end) {
if (Uint8Array.prototype.slice) {
return Uint8Array.prototype.slice.call(src, start, end);
}
return new Uint8Array(Array.prototype.slice.call(src, start, end));
};
exports.sliceBytes = sliceBytes;
var reverseBytes = function reverseBytes(src) {
if (src.reverse) {
return src.reverse();
}
return Array.prototype.reverse.call(src);
};
exports.reverseBytes = reverseBytes;

View file

@ -1,112 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.getHvcCodec = exports.getAvcCodec = exports.getAv1Codec = void 0;
var _byteHelpers = require("./byte-helpers.js");
// https://aomediacodec.github.io/av1-isobmff/#av1codecconfigurationbox-syntax
// https://developer.mozilla.org/en-US/docs/Web/Media/Formats/codecs_parameter#AV1
var getAv1Codec = function getAv1Codec(bytes) {
var codec = '';
var profile = bytes[1] >>> 3;
var level = bytes[1] & 0x1F;
var tier = bytes[2] >>> 7;
var highBitDepth = (bytes[2] & 0x40) >> 6;
var twelveBit = (bytes[2] & 0x20) >> 5;
var monochrome = (bytes[2] & 0x10) >> 4;
var chromaSubsamplingX = (bytes[2] & 0x08) >> 3;
var chromaSubsamplingY = (bytes[2] & 0x04) >> 2;
var chromaSamplePosition = bytes[2] & 0x03;
codec += profile + "." + (0, _byteHelpers.padStart)(level, 2, '0');
if (tier === 0) {
codec += 'M';
} else if (tier === 1) {
codec += 'H';
}
var bitDepth;
if (profile === 2 && highBitDepth) {
bitDepth = twelveBit ? 12 : 10;
} else {
bitDepth = highBitDepth ? 10 : 8;
}
codec += "." + (0, _byteHelpers.padStart)(bitDepth, 2, '0'); // TODO: can we parse color range??
codec += "." + monochrome;
codec += "." + chromaSubsamplingX + chromaSubsamplingY + chromaSamplePosition;
return codec;
};
exports.getAv1Codec = getAv1Codec;
var getAvcCodec = function getAvcCodec(bytes) {
var profileId = (0, _byteHelpers.toHexString)(bytes[1]);
var constraintFlags = (0, _byteHelpers.toHexString)(bytes[2] & 0xFC);
var levelId = (0, _byteHelpers.toHexString)(bytes[3]);
return "" + profileId + constraintFlags + levelId;
};
exports.getAvcCodec = getAvcCodec;
var getHvcCodec = function getHvcCodec(bytes) {
var codec = '';
var profileSpace = bytes[1] >> 6;
var profileId = bytes[1] & 0x1F;
var tierFlag = (bytes[1] & 0x20) >> 5;
var profileCompat = bytes.subarray(2, 6);
var constraintIds = bytes.subarray(6, 12);
var levelId = bytes[12];
if (profileSpace === 1) {
codec += 'A';
} else if (profileSpace === 2) {
codec += 'B';
} else if (profileSpace === 3) {
codec += 'C';
}
codec += profileId + "."; // ffmpeg does this in big endian
var profileCompatVal = parseInt((0, _byteHelpers.toBinaryString)(profileCompat).split('').reverse().join(''), 2); // apple does this in little endian...
if (profileCompatVal > 255) {
profileCompatVal = parseInt((0, _byteHelpers.toBinaryString)(profileCompat), 2);
}
codec += profileCompatVal.toString(16) + ".";
if (tierFlag === 0) {
codec += 'L';
} else {
codec += 'H';
}
codec += levelId;
var constraints = '';
for (var i = 0; i < constraintIds.length; i++) {
var v = constraintIds[i];
if (v) {
if (constraints) {
constraints += '.';
}
constraints += v.toString(16);
}
}
if (constraints) {
codec += "." + constraints;
}
return codec;
};
exports.getHvcCodec = getHvcCodec;

View file

@ -1,297 +0,0 @@
"use strict";
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.DEFAULT_VIDEO_CODEC = exports.DEFAULT_AUDIO_CODEC = exports.muxerSupportsCodec = exports.browserSupportsCodec = exports.getMimeForCodec = exports.isTextCodec = exports.isAudioCodec = exports.isVideoCodec = exports.codecsFromDefault = exports.parseCodecs = exports.mapLegacyAvcCodecs = exports.translateLegacyCodecs = exports.translateLegacyCodec = void 0;
var _window = _interopRequireDefault(require("global/window"));
var regexs = {
// to determine mime types
mp4: /^(av0?1|avc0?[1234]|vp0?9|flac|opus|mp3|mp4a|mp4v|stpp.ttml.im1t)/,
webm: /^(vp0?[89]|av0?1|opus|vorbis)/,
ogg: /^(vp0?[89]|theora|flac|opus|vorbis)/,
// to determine if a codec is audio or video
video: /^(av0?1|avc0?[1234]|vp0?[89]|hvc1|hev1|theora|mp4v)/,
audio: /^(mp4a|flac|vorbis|opus|ac-[34]|ec-3|alac|mp3|speex|aac)/,
text: /^(stpp.ttml.im1t)/,
// mux.js support regex
muxerVideo: /^(avc0?1)/,
muxerAudio: /^(mp4a)/,
// match nothing as muxer does not support text right now.
// there cannot never be a character before the start of a string
// so this matches nothing.
muxerText: /a^/
};
var mediaTypes = ['video', 'audio', 'text'];
var upperMediaTypes = ['Video', 'Audio', 'Text'];
/**
* Replace the old apple-style `avc1.<dd>.<dd>` codec string with the standard
* `avc1.<hhhhhh>`
*
* @param {string} codec
* Codec string to translate
* @return {string}
* The translated codec string
*/
var translateLegacyCodec = function translateLegacyCodec(codec) {
if (!codec) {
return codec;
}
return codec.replace(/avc1\.(\d+)\.(\d+)/i, function (orig, profile, avcLevel) {
var profileHex = ('00' + Number(profile).toString(16)).slice(-2);
var avcLevelHex = ('00' + Number(avcLevel).toString(16)).slice(-2);
return 'avc1.' + profileHex + '00' + avcLevelHex;
});
};
/**
* Replace the old apple-style `avc1.<dd>.<dd>` codec strings with the standard
* `avc1.<hhhhhh>`
*
* @param {string[]} codecs
* An array of codec strings to translate
* @return {string[]}
* The translated array of codec strings
*/
exports.translateLegacyCodec = translateLegacyCodec;
var translateLegacyCodecs = function translateLegacyCodecs(codecs) {
return codecs.map(translateLegacyCodec);
};
/**
* Replace codecs in the codec string with the old apple-style `avc1.<dd>.<dd>` to the
* standard `avc1.<hhhhhh>`.
*
* @param {string} codecString
* The codec string
* @return {string}
* The codec string with old apple-style codecs replaced
*
* @private
*/
exports.translateLegacyCodecs = translateLegacyCodecs;
var mapLegacyAvcCodecs = function mapLegacyAvcCodecs(codecString) {
return codecString.replace(/avc1\.(\d+)\.(\d+)/i, function (match) {
return translateLegacyCodecs([match])[0];
});
};
/**
* @typedef {Object} ParsedCodecInfo
* @property {number} codecCount
* Number of codecs parsed
* @property {string} [videoCodec]
* Parsed video codec (if found)
* @property {string} [videoObjectTypeIndicator]
* Video object type indicator (if found)
* @property {string|null} audioProfile
* Audio profile
*/
/**
* Parses a codec string to retrieve the number of codecs specified, the video codec and
* object type indicator, and the audio profile.
*
* @param {string} [codecString]
* The codec string to parse
* @return {ParsedCodecInfo}
* Parsed codec info
*/
exports.mapLegacyAvcCodecs = mapLegacyAvcCodecs;
var parseCodecs = function parseCodecs(codecString) {
if (codecString === void 0) {
codecString = '';
}
var codecs = codecString.split(',');
var result = [];
codecs.forEach(function (codec) {
codec = codec.trim();
var codecType;
mediaTypes.forEach(function (name) {
var match = regexs[name].exec(codec.toLowerCase());
if (!match || match.length <= 1) {
return;
}
codecType = name; // maintain codec case
var type = codec.substring(0, match[1].length);
var details = codec.replace(type, '');
result.push({
type: type,
details: details,
mediaType: name
});
});
if (!codecType) {
result.push({
type: codec,
details: '',
mediaType: 'unknown'
});
}
});
return result;
};
/**
* Returns a ParsedCodecInfo object for the default alternate audio playlist if there is
* a default alternate audio playlist for the provided audio group.
*
* @param {Object} master
* The master playlist
* @param {string} audioGroupId
* ID of the audio group for which to find the default codec info
* @return {ParsedCodecInfo}
* Parsed codec info
*/
exports.parseCodecs = parseCodecs;
var codecsFromDefault = function codecsFromDefault(master, audioGroupId) {
if (!master.mediaGroups.AUDIO || !audioGroupId) {
return null;
}
var audioGroup = master.mediaGroups.AUDIO[audioGroupId];
if (!audioGroup) {
return null;
}
for (var name in audioGroup) {
var audioType = audioGroup[name];
if (audioType.default && audioType.playlists) {
// codec should be the same for all playlists within the audio type
return parseCodecs(audioType.playlists[0].attributes.CODECS);
}
}
return null;
};
exports.codecsFromDefault = codecsFromDefault;
var isVideoCodec = function isVideoCodec(codec) {
if (codec === void 0) {
codec = '';
}
return regexs.video.test(codec.trim().toLowerCase());
};
exports.isVideoCodec = isVideoCodec;
var isAudioCodec = function isAudioCodec(codec) {
if (codec === void 0) {
codec = '';
}
return regexs.audio.test(codec.trim().toLowerCase());
};
exports.isAudioCodec = isAudioCodec;
var isTextCodec = function isTextCodec(codec) {
if (codec === void 0) {
codec = '';
}
return regexs.text.test(codec.trim().toLowerCase());
};
exports.isTextCodec = isTextCodec;
var getMimeForCodec = function getMimeForCodec(codecString) {
if (!codecString || typeof codecString !== 'string') {
return;
}
var codecs = codecString.toLowerCase().split(',').map(function (c) {
return translateLegacyCodec(c.trim());
}); // default to video type
var type = 'video'; // only change to audio type if the only codec we have is
// audio
if (codecs.length === 1 && isAudioCodec(codecs[0])) {
type = 'audio';
} else if (codecs.length === 1 && isTextCodec(codecs[0])) {
// text uses application/<container> for now
type = 'application';
} // default the container to mp4
var container = 'mp4'; // every codec must be able to go into the container
// for that container to be the correct one
if (codecs.every(function (c) {
return regexs.mp4.test(c);
})) {
container = 'mp4';
} else if (codecs.every(function (c) {
return regexs.webm.test(c);
})) {
container = 'webm';
} else if (codecs.every(function (c) {
return regexs.ogg.test(c);
})) {
container = 'ogg';
}
return type + "/" + container + ";codecs=\"" + codecString + "\"";
};
exports.getMimeForCodec = getMimeForCodec;
var browserSupportsCodec = function browserSupportsCodec(codecString) {
if (codecString === void 0) {
codecString = '';
}
return _window.default.MediaSource && _window.default.MediaSource.isTypeSupported && _window.default.MediaSource.isTypeSupported(getMimeForCodec(codecString)) || false;
};
exports.browserSupportsCodec = browserSupportsCodec;
var muxerSupportsCodec = function muxerSupportsCodec(codecString) {
if (codecString === void 0) {
codecString = '';
}
return codecString.toLowerCase().split(',').every(function (codec) {
codec = codec.trim(); // any match is supported.
for (var i = 0; i < upperMediaTypes.length; i++) {
var type = upperMediaTypes[i];
if (regexs["muxer" + type].test(codec)) {
return true;
}
}
return false;
});
};
exports.muxerSupportsCodec = muxerSupportsCodec;
var DEFAULT_AUDIO_CODEC = 'mp4a.40.2';
exports.DEFAULT_AUDIO_CODEC = DEFAULT_AUDIO_CODEC;
var DEFAULT_VIDEO_CODEC = 'avc1.4d400d';
exports.DEFAULT_VIDEO_CODEC = DEFAULT_VIDEO_CODEC;

View file

@ -1,202 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.isLikelyFmp4MediaSegment = exports.detectContainerForBytes = exports.isLikely = void 0;
var _byteHelpers = require("./byte-helpers.js");
var _mp4Helpers = require("./mp4-helpers.js");
var _ebmlHelpers = require("./ebml-helpers.js");
var _id3Helpers = require("./id3-helpers.js");
var _nalHelpers = require("./nal-helpers.js");
var CONSTANTS = {
// "webm" string literal in hex
'webm': (0, _byteHelpers.toUint8)([0x77, 0x65, 0x62, 0x6d]),
// "matroska" string literal in hex
'matroska': (0, _byteHelpers.toUint8)([0x6d, 0x61, 0x74, 0x72, 0x6f, 0x73, 0x6b, 0x61]),
// "fLaC" string literal in hex
'flac': (0, _byteHelpers.toUint8)([0x66, 0x4c, 0x61, 0x43]),
// "OggS" string literal in hex
'ogg': (0, _byteHelpers.toUint8)([0x4f, 0x67, 0x67, 0x53]),
// ac-3 sync byte, also works for ec-3 as that is simply a codec
// of ac-3
'ac3': (0, _byteHelpers.toUint8)([0x0b, 0x77]),
// "RIFF" string literal in hex used for wav and avi
'riff': (0, _byteHelpers.toUint8)([0x52, 0x49, 0x46, 0x46]),
// "AVI" string literal in hex
'avi': (0, _byteHelpers.toUint8)([0x41, 0x56, 0x49]),
// "WAVE" string literal in hex
'wav': (0, _byteHelpers.toUint8)([0x57, 0x41, 0x56, 0x45]),
// "ftyp3g" string literal in hex
'3gp': (0, _byteHelpers.toUint8)([0x66, 0x74, 0x79, 0x70, 0x33, 0x67]),
// "ftyp" string literal in hex
'mp4': (0, _byteHelpers.toUint8)([0x66, 0x74, 0x79, 0x70]),
// "styp" string literal in hex
'fmp4': (0, _byteHelpers.toUint8)([0x73, 0x74, 0x79, 0x70]),
// "ftypqt" string literal in hex
'mov': (0, _byteHelpers.toUint8)([0x66, 0x74, 0x79, 0x70, 0x71, 0x74]),
// moov string literal in hex
'moov': (0, _byteHelpers.toUint8)([0x6D, 0x6F, 0x6F, 0x76]),
// moof string literal in hex
'moof': (0, _byteHelpers.toUint8)([0x6D, 0x6F, 0x6F, 0x66])
};
var _isLikely = {
aac: function aac(bytes) {
var offset = (0, _id3Helpers.getId3Offset)(bytes);
return (0, _byteHelpers.bytesMatch)(bytes, [0xFF, 0x10], {
offset: offset,
mask: [0xFF, 0x16]
});
},
mp3: function mp3(bytes) {
var offset = (0, _id3Helpers.getId3Offset)(bytes);
return (0, _byteHelpers.bytesMatch)(bytes, [0xFF, 0x02], {
offset: offset,
mask: [0xFF, 0x06]
});
},
webm: function webm(bytes) {
var docType = (0, _ebmlHelpers.findEbml)(bytes, [_ebmlHelpers.EBML_TAGS.EBML, _ebmlHelpers.EBML_TAGS.DocType])[0]; // check if DocType EBML tag is webm
return (0, _byteHelpers.bytesMatch)(docType, CONSTANTS.webm);
},
mkv: function mkv(bytes) {
var docType = (0, _ebmlHelpers.findEbml)(bytes, [_ebmlHelpers.EBML_TAGS.EBML, _ebmlHelpers.EBML_TAGS.DocType])[0]; // check if DocType EBML tag is matroska
return (0, _byteHelpers.bytesMatch)(docType, CONSTANTS.matroska);
},
mp4: function mp4(bytes) {
// if this file is another base media file format, it is not mp4
if (_isLikely['3gp'](bytes) || _isLikely.mov(bytes)) {
return false;
} // if this file starts with a ftyp or styp box its mp4
if ((0, _byteHelpers.bytesMatch)(bytes, CONSTANTS.mp4, {
offset: 4
}) || (0, _byteHelpers.bytesMatch)(bytes, CONSTANTS.fmp4, {
offset: 4
})) {
return true;
} // if this file starts with a moof/moov box its mp4
if ((0, _byteHelpers.bytesMatch)(bytes, CONSTANTS.moof, {
offset: 4
}) || (0, _byteHelpers.bytesMatch)(bytes, CONSTANTS.moov, {
offset: 4
})) {
return true;
}
},
mov: function mov(bytes) {
return (0, _byteHelpers.bytesMatch)(bytes, CONSTANTS.mov, {
offset: 4
});
},
'3gp': function gp(bytes) {
return (0, _byteHelpers.bytesMatch)(bytes, CONSTANTS['3gp'], {
offset: 4
});
},
ac3: function ac3(bytes) {
var offset = (0, _id3Helpers.getId3Offset)(bytes);
return (0, _byteHelpers.bytesMatch)(bytes, CONSTANTS.ac3, {
offset: offset
});
},
ts: function ts(bytes) {
if (bytes.length < 189 && bytes.length >= 1) {
return bytes[0] === 0x47;
}
var i = 0; // check the first 376 bytes for two matching sync bytes
while (i + 188 < bytes.length && i < 188) {
if (bytes[i] === 0x47 && bytes[i + 188] === 0x47) {
return true;
}
i += 1;
}
return false;
},
flac: function flac(bytes) {
var offset = (0, _id3Helpers.getId3Offset)(bytes);
return (0, _byteHelpers.bytesMatch)(bytes, CONSTANTS.flac, {
offset: offset
});
},
ogg: function ogg(bytes) {
return (0, _byteHelpers.bytesMatch)(bytes, CONSTANTS.ogg);
},
avi: function avi(bytes) {
return (0, _byteHelpers.bytesMatch)(bytes, CONSTANTS.riff) && (0, _byteHelpers.bytesMatch)(bytes, CONSTANTS.avi, {
offset: 8
});
},
wav: function wav(bytes) {
return (0, _byteHelpers.bytesMatch)(bytes, CONSTANTS.riff) && (0, _byteHelpers.bytesMatch)(bytes, CONSTANTS.wav, {
offset: 8
});
},
'h264': function h264(bytes) {
// find seq_parameter_set_rbsp
return (0, _nalHelpers.findH264Nal)(bytes, 7, 3).length;
},
'h265': function h265(bytes) {
// find video_parameter_set_rbsp or seq_parameter_set_rbsp
return (0, _nalHelpers.findH265Nal)(bytes, [32, 33], 3).length;
}
}; // get all the isLikely functions
// but make sure 'ts' is above h264 and h265
// but below everything else as it is the least specific
var isLikelyTypes = Object.keys(_isLikely) // remove ts, h264, h265
.filter(function (t) {
return t !== 'ts' && t !== 'h264' && t !== 'h265';
}) // add it back to the bottom
.concat(['ts', 'h264', 'h265']); // make sure we are dealing with uint8 data.
isLikelyTypes.forEach(function (type) {
var isLikelyFn = _isLikely[type];
_isLikely[type] = function (bytes) {
return isLikelyFn((0, _byteHelpers.toUint8)(bytes));
};
}); // export after wrapping
var isLikely = _isLikely; // A useful list of file signatures can be found here
// https://en.wikipedia.org/wiki/List_of_file_signatures
exports.isLikely = isLikely;
var detectContainerForBytes = function detectContainerForBytes(bytes) {
bytes = (0, _byteHelpers.toUint8)(bytes);
for (var i = 0; i < isLikelyTypes.length; i++) {
var type = isLikelyTypes[i];
if (isLikely[type](bytes)) {
return type;
}
}
return '';
}; // fmp4 is not a container
exports.detectContainerForBytes = detectContainerForBytes;
var isLikelyFmp4MediaSegment = function isLikelyFmp4MediaSegment(bytes) {
return (0, _mp4Helpers.findBox)(bytes, ['moof']).length > 0;
};
exports.isLikelyFmp4MediaSegment = isLikelyFmp4MediaSegment;

View file

@ -1,27 +0,0 @@
"use strict";
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = decodeB64ToUint8Array;
var _window = _interopRequireDefault(require("global/window"));
var atob = function atob(s) {
return _window.default.atob ? _window.default.atob(s) : Buffer.from(s, 'base64').toString('binary');
};
function decodeB64ToUint8Array(b64Text) {
var decodedString = atob(b64Text);
var array = new Uint8Array(decodedString.length);
for (var i = 0; i < decodedString.length; i++) {
array[i] = decodedString.charCodeAt(i);
}
return array;
}
module.exports = exports.default;

View file

@ -1,518 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.parseData = exports.parseTracks = exports.decodeBlock = exports.findEbml = exports.EBML_TAGS = void 0;
var _byteHelpers = require("./byte-helpers");
var _codecHelpers = require("./codec-helpers.js");
// relevant specs for this parser:
// https://matroska-org.github.io/libebml/specs.html
// https://www.matroska.org/technical/elements.html
// https://www.webmproject.org/docs/container/
var EBML_TAGS = {
EBML: (0, _byteHelpers.toUint8)([0x1A, 0x45, 0xDF, 0xA3]),
DocType: (0, _byteHelpers.toUint8)([0x42, 0x82]),
Segment: (0, _byteHelpers.toUint8)([0x18, 0x53, 0x80, 0x67]),
SegmentInfo: (0, _byteHelpers.toUint8)([0x15, 0x49, 0xA9, 0x66]),
Tracks: (0, _byteHelpers.toUint8)([0x16, 0x54, 0xAE, 0x6B]),
Track: (0, _byteHelpers.toUint8)([0xAE]),
TrackNumber: (0, _byteHelpers.toUint8)([0xd7]),
DefaultDuration: (0, _byteHelpers.toUint8)([0x23, 0xe3, 0x83]),
TrackEntry: (0, _byteHelpers.toUint8)([0xAE]),
TrackType: (0, _byteHelpers.toUint8)([0x83]),
FlagDefault: (0, _byteHelpers.toUint8)([0x88]),
CodecID: (0, _byteHelpers.toUint8)([0x86]),
CodecPrivate: (0, _byteHelpers.toUint8)([0x63, 0xA2]),
VideoTrack: (0, _byteHelpers.toUint8)([0xe0]),
AudioTrack: (0, _byteHelpers.toUint8)([0xe1]),
// Not used yet, but will be used for live webm/mkv
// see https://www.matroska.org/technical/basics.html#block-structure
// see https://www.matroska.org/technical/basics.html#simpleblock-structure
Cluster: (0, _byteHelpers.toUint8)([0x1F, 0x43, 0xB6, 0x75]),
Timestamp: (0, _byteHelpers.toUint8)([0xE7]),
TimestampScale: (0, _byteHelpers.toUint8)([0x2A, 0xD7, 0xB1]),
BlockGroup: (0, _byteHelpers.toUint8)([0xA0]),
BlockDuration: (0, _byteHelpers.toUint8)([0x9B]),
Block: (0, _byteHelpers.toUint8)([0xA1]),
SimpleBlock: (0, _byteHelpers.toUint8)([0xA3])
};
/**
* This is a simple table to determine the length
* of things in ebml. The length is one based (starts at 1,
* rather than zero) and for every zero bit before a one bit
* we add one to length. We also need this table because in some
* case we have to xor all the length bits from another value.
*/
exports.EBML_TAGS = EBML_TAGS;
var LENGTH_TABLE = [128, 64, 32, 16, 8, 4, 2, 1];
var getLength = function getLength(byte) {
var len = 1;
for (var i = 0; i < LENGTH_TABLE.length; i++) {
if (byte & LENGTH_TABLE[i]) {
break;
}
len++;
}
return len;
}; // length in ebml is stored in the first 4 to 8 bits
// of the first byte. 4 for the id length and 8 for the
// data size length. Length is measured by converting the number to binary
// then 1 + the number of zeros before a 1 is encountered starting
// from the left.
var getvint = function getvint(bytes, offset, removeLength, signed) {
if (removeLength === void 0) {
removeLength = true;
}
if (signed === void 0) {
signed = false;
}
var length = getLength(bytes[offset]);
var valueBytes = bytes.subarray(offset, offset + length); // NOTE that we do **not** subarray here because we need to copy these bytes
// as they will be modified below to remove the dataSizeLen bits and we do not
// want to modify the original data. normally we could just call slice on
// uint8array but ie 11 does not support that...
if (removeLength) {
valueBytes = Array.prototype.slice.call(bytes, offset, offset + length);
valueBytes[0] ^= LENGTH_TABLE[length - 1];
}
return {
length: length,
value: (0, _byteHelpers.bytesToNumber)(valueBytes, {
signed: signed
}),
bytes: valueBytes
};
};
var normalizePath = function normalizePath(path) {
if (typeof path === 'string') {
return path.match(/.{1,2}/g).map(function (p) {
return normalizePath(p);
});
}
if (typeof path === 'number') {
return (0, _byteHelpers.numberToBytes)(path);
}
return path;
};
var normalizePaths = function normalizePaths(paths) {
if (!Array.isArray(paths)) {
return [normalizePath(paths)];
}
return paths.map(function (p) {
return normalizePath(p);
});
};
var getInfinityDataSize = function getInfinityDataSize(id, bytes, offset) {
if (offset >= bytes.length) {
return bytes.length;
}
var innerid = getvint(bytes, offset, false);
if ((0, _byteHelpers.bytesMatch)(id.bytes, innerid.bytes)) {
return offset;
}
var dataHeader = getvint(bytes, offset + innerid.length);
return getInfinityDataSize(id, bytes, offset + dataHeader.length + dataHeader.value + innerid.length);
};
/**
* Notes on the EBLM format.
*
* EBLM uses "vints" tags. Every vint tag contains
* two parts
*
* 1. The length from the first byte. You get this by
* converting the byte to binary and counting the zeros
* before a 1. Then you add 1 to that. Examples
* 00011111 = length 4 because there are 3 zeros before a 1.
* 00100000 = length 3 because there are 2 zeros before a 1.
* 00000011 = length 7 because there are 6 zeros before a 1.
*
* 2. The bits used for length are removed from the first byte
* Then all the bytes are merged into a value. NOTE: this
* is not the case for id ebml tags as there id includes
* length bits.
*
*/
var findEbml = function findEbml(bytes, paths) {
paths = normalizePaths(paths);
bytes = (0, _byteHelpers.toUint8)(bytes);
var results = [];
if (!paths.length) {
return results;
}
var i = 0;
while (i < bytes.length) {
var id = getvint(bytes, i, false);
var dataHeader = getvint(bytes, i + id.length);
var dataStart = i + id.length + dataHeader.length; // dataSize is unknown or this is a live stream
if (dataHeader.value === 0x7f) {
dataHeader.value = getInfinityDataSize(id, bytes, dataStart);
if (dataHeader.value !== bytes.length) {
dataHeader.value -= dataStart;
}
}
var dataEnd = dataStart + dataHeader.value > bytes.length ? bytes.length : dataStart + dataHeader.value;
var data = bytes.subarray(dataStart, dataEnd);
if ((0, _byteHelpers.bytesMatch)(paths[0], id.bytes)) {
if (paths.length === 1) {
// this is the end of the paths and we've found the tag we were
// looking for
results.push(data);
} else {
// recursively search for the next tag inside of the data
// of this one
results = results.concat(findEbml(data, paths.slice(1)));
}
}
var totalLength = id.length + dataHeader.length + data.length; // move past this tag entirely, we are not looking for it
i += totalLength;
}
return results;
}; // see https://www.matroska.org/technical/basics.html#block-structure
exports.findEbml = findEbml;
var decodeBlock = function decodeBlock(block, type, timestampScale, clusterTimestamp) {
var duration;
if (type === 'group') {
duration = findEbml(block, [EBML_TAGS.BlockDuration])[0];
if (duration) {
duration = (0, _byteHelpers.bytesToNumber)(duration);
duration = 1 / timestampScale * duration * timestampScale / 1000;
}
block = findEbml(block, [EBML_TAGS.Block])[0];
type = 'block'; // treat data as a block after this point
}
var dv = new DataView(block.buffer, block.byteOffset, block.byteLength);
var trackNumber = getvint(block, 0);
var timestamp = dv.getInt16(trackNumber.length, false);
var flags = block[trackNumber.length + 2];
var data = block.subarray(trackNumber.length + 3); // pts/dts in seconds
var ptsdts = 1 / timestampScale * (clusterTimestamp + timestamp) * timestampScale / 1000; // return the frame
var parsed = {
duration: duration,
trackNumber: trackNumber.value,
keyframe: type === 'simple' && flags >> 7 === 1,
invisible: (flags & 0x08) >> 3 === 1,
lacing: (flags & 0x06) >> 1,
discardable: type === 'simple' && (flags & 0x01) === 1,
frames: [],
pts: ptsdts,
dts: ptsdts,
timestamp: timestamp
};
if (!parsed.lacing) {
parsed.frames.push(data);
return parsed;
}
var numberOfFrames = data[0] + 1;
var frameSizes = [];
var offset = 1; // Fixed
if (parsed.lacing === 2) {
var sizeOfFrame = (data.length - offset) / numberOfFrames;
for (var i = 0; i < numberOfFrames; i++) {
frameSizes.push(sizeOfFrame);
}
} // xiph
if (parsed.lacing === 1) {
for (var _i = 0; _i < numberOfFrames - 1; _i++) {
var size = 0;
do {
size += data[offset];
offset++;
} while (data[offset - 1] === 0xFF);
frameSizes.push(size);
}
} // ebml
if (parsed.lacing === 3) {
// first vint is unsinged
// after that vints are singed and
// based on a compounding size
var _size = 0;
for (var _i2 = 0; _i2 < numberOfFrames - 1; _i2++) {
var vint = _i2 === 0 ? getvint(data, offset) : getvint(data, offset, true, true);
_size += vint.value;
frameSizes.push(_size);
offset += vint.length;
}
}
frameSizes.forEach(function (size) {
parsed.frames.push(data.subarray(offset, offset + size));
offset += size;
});
return parsed;
}; // VP9 Codec Feature Metadata (CodecPrivate)
// https://www.webmproject.org/docs/container/
exports.decodeBlock = decodeBlock;
var parseVp9Private = function parseVp9Private(bytes) {
var i = 0;
var params = {};
while (i < bytes.length) {
var id = bytes[i] & 0x7f;
var len = bytes[i + 1];
var val = void 0;
if (len === 1) {
val = bytes[i + 2];
} else {
val = bytes.subarray(i + 2, i + 2 + len);
}
if (id === 1) {
params.profile = val;
} else if (id === 2) {
params.level = val;
} else if (id === 3) {
params.bitDepth = val;
} else if (id === 4) {
params.chromaSubsampling = val;
} else {
params[id] = val;
}
i += 2 + len;
}
return params;
};
var parseTracks = function parseTracks(bytes) {
bytes = (0, _byteHelpers.toUint8)(bytes);
var decodedTracks = [];
var tracks = findEbml(bytes, [EBML_TAGS.Segment, EBML_TAGS.Tracks, EBML_TAGS.Track]);
if (!tracks.length) {
tracks = findEbml(bytes, [EBML_TAGS.Tracks, EBML_TAGS.Track]);
}
if (!tracks.length) {
tracks = findEbml(bytes, [EBML_TAGS.Track]);
}
if (!tracks.length) {
return decodedTracks;
}
tracks.forEach(function (track) {
var trackType = findEbml(track, EBML_TAGS.TrackType)[0];
if (!trackType || !trackType.length) {
return;
} // 1 is video, 2 is audio, 17 is subtitle
// other values are unimportant in this context
if (trackType[0] === 1) {
trackType = 'video';
} else if (trackType[0] === 2) {
trackType = 'audio';
} else if (trackType[0] === 17) {
trackType = 'subtitle';
} else {
return;
} // todo parse language
var decodedTrack = {
rawCodec: (0, _byteHelpers.bytesToString)(findEbml(track, [EBML_TAGS.CodecID])[0]),
type: trackType,
codecPrivate: findEbml(track, [EBML_TAGS.CodecPrivate])[0],
number: (0, _byteHelpers.bytesToNumber)(findEbml(track, [EBML_TAGS.TrackNumber])[0]),
defaultDuration: (0, _byteHelpers.bytesToNumber)(findEbml(track, [EBML_TAGS.DefaultDuration])[0]),
default: findEbml(track, [EBML_TAGS.FlagDefault])[0],
rawData: track
};
var codec = '';
if (/V_MPEG4\/ISO\/AVC/.test(decodedTrack.rawCodec)) {
codec = "avc1." + (0, _codecHelpers.getAvcCodec)(decodedTrack.codecPrivate);
} else if (/V_MPEGH\/ISO\/HEVC/.test(decodedTrack.rawCodec)) {
codec = "hev1." + (0, _codecHelpers.getHvcCodec)(decodedTrack.codecPrivate);
} else if (/V_MPEG4\/ISO\/ASP/.test(decodedTrack.rawCodec)) {
if (decodedTrack.codecPrivate) {
codec = 'mp4v.20.' + decodedTrack.codecPrivate[4].toString();
} else {
codec = 'mp4v.20.9';
}
} else if (/^V_THEORA/.test(decodedTrack.rawCodec)) {
codec = 'theora';
} else if (/^V_VP8/.test(decodedTrack.rawCodec)) {
codec = 'vp8';
} else if (/^V_VP9/.test(decodedTrack.rawCodec)) {
if (decodedTrack.codecPrivate) {
var _parseVp9Private = parseVp9Private(decodedTrack.codecPrivate),
profile = _parseVp9Private.profile,
level = _parseVp9Private.level,
bitDepth = _parseVp9Private.bitDepth,
chromaSubsampling = _parseVp9Private.chromaSubsampling;
codec = 'vp09.';
codec += (0, _byteHelpers.padStart)(profile, 2, '0') + ".";
codec += (0, _byteHelpers.padStart)(level, 2, '0') + ".";
codec += (0, _byteHelpers.padStart)(bitDepth, 2, '0') + ".";
codec += "" + (0, _byteHelpers.padStart)(chromaSubsampling, 2, '0'); // Video -> Colour -> Ebml name
var matrixCoefficients = findEbml(track, [0xE0, [0x55, 0xB0], [0x55, 0xB1]])[0] || [];
var videoFullRangeFlag = findEbml(track, [0xE0, [0x55, 0xB0], [0x55, 0xB9]])[0] || [];
var transferCharacteristics = findEbml(track, [0xE0, [0x55, 0xB0], [0x55, 0xBA]])[0] || [];
var colourPrimaries = findEbml(track, [0xE0, [0x55, 0xB0], [0x55, 0xBB]])[0] || []; // if we find any optional codec parameter specify them all.
if (matrixCoefficients.length || videoFullRangeFlag.length || transferCharacteristics.length || colourPrimaries.length) {
codec += "." + (0, _byteHelpers.padStart)(colourPrimaries[0], 2, '0');
codec += "." + (0, _byteHelpers.padStart)(transferCharacteristics[0], 2, '0');
codec += "." + (0, _byteHelpers.padStart)(matrixCoefficients[0], 2, '0');
codec += "." + (0, _byteHelpers.padStart)(videoFullRangeFlag[0], 2, '0');
}
} else {
codec = 'vp9';
}
} else if (/^V_AV1/.test(decodedTrack.rawCodec)) {
codec = "av01." + (0, _codecHelpers.getAv1Codec)(decodedTrack.codecPrivate);
} else if (/A_ALAC/.test(decodedTrack.rawCodec)) {
codec = 'alac';
} else if (/A_MPEG\/L2/.test(decodedTrack.rawCodec)) {
codec = 'mp2';
} else if (/A_MPEG\/L3/.test(decodedTrack.rawCodec)) {
codec = 'mp3';
} else if (/^A_AAC/.test(decodedTrack.rawCodec)) {
if (decodedTrack.codecPrivate) {
codec = 'mp4a.40.' + (decodedTrack.codecPrivate[0] >>> 3).toString();
} else {
codec = 'mp4a.40.2';
}
} else if (/^A_AC3/.test(decodedTrack.rawCodec)) {
codec = 'ac-3';
} else if (/^A_PCM/.test(decodedTrack.rawCodec)) {
codec = 'pcm';
} else if (/^A_MS\/ACM/.test(decodedTrack.rawCodec)) {
codec = 'speex';
} else if (/^A_EAC3/.test(decodedTrack.rawCodec)) {
codec = 'ec-3';
} else if (/^A_VORBIS/.test(decodedTrack.rawCodec)) {
codec = 'vorbis';
} else if (/^A_FLAC/.test(decodedTrack.rawCodec)) {
codec = 'flac';
} else if (/^A_OPUS/.test(decodedTrack.rawCodec)) {
codec = 'opus';
}
decodedTrack.codec = codec;
decodedTracks.push(decodedTrack);
});
return decodedTracks.sort(function (a, b) {
return a.number - b.number;
});
};
exports.parseTracks = parseTracks;
var parseData = function parseData(data, tracks) {
var allBlocks = [];
var segment = findEbml(data, [EBML_TAGS.Segment])[0];
var timestampScale = findEbml(segment, [EBML_TAGS.SegmentInfo, EBML_TAGS.TimestampScale])[0]; // in nanoseconds, defaults to 1ms
if (timestampScale && timestampScale.length) {
timestampScale = (0, _byteHelpers.bytesToNumber)(timestampScale);
} else {
timestampScale = 1000000;
}
var clusters = findEbml(segment, [EBML_TAGS.Cluster]);
if (!tracks) {
tracks = parseTracks(segment);
}
clusters.forEach(function (cluster, ci) {
var simpleBlocks = findEbml(cluster, [EBML_TAGS.SimpleBlock]).map(function (b) {
return {
type: 'simple',
data: b
};
});
var blockGroups = findEbml(cluster, [EBML_TAGS.BlockGroup]).map(function (b) {
return {
type: 'group',
data: b
};
});
var timestamp = findEbml(cluster, [EBML_TAGS.Timestamp])[0] || 0;
if (timestamp && timestamp.length) {
timestamp = (0, _byteHelpers.bytesToNumber)(timestamp);
} // get all blocks then sort them into the correct order
var blocks = simpleBlocks.concat(blockGroups).sort(function (a, b) {
return a.data.byteOffset - b.data.byteOffset;
});
blocks.forEach(function (block, bi) {
var decoded = decodeBlock(block.data, block.type, timestampScale, timestamp);
allBlocks.push(decoded);
});
});
return {
tracks: tracks,
blocks: allBlocks
};
};
exports.parseData = parseData;

View file

@ -1,408 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.parseFormatForBytes = void 0;
var _byteHelpers = require("./byte-helpers.js");
var _ebmlHelpers = require("./ebml-helpers.js");
var _mp4Helpers = require("./mp4-helpers.js");
var _riffHelpers = require("./riff-helpers.js");
var _oggHelpers = require("./ogg-helpers.js");
var _containers = require("./containers.js");
var _nalHelpers = require("./nal-helpers.js");
var _m2tsHelpers = require("./m2ts-helpers.js");
var _codecHelpers = require("./codec-helpers.js");
var _id3Helpers = require("./id3-helpers.js");
// https://docs.microsoft.com/en-us/windows/win32/medfound/audio-subtype-guids
// https://tools.ietf.org/html/rfc2361
var wFormatTagCodec = function wFormatTagCodec(wFormatTag) {
wFormatTag = (0, _byteHelpers.toUint8)(wFormatTag);
if ((0, _byteHelpers.bytesMatch)(wFormatTag, [0x00, 0x55])) {
return 'mp3';
} else if ((0, _byteHelpers.bytesMatch)(wFormatTag, [0x16, 0x00]) || (0, _byteHelpers.bytesMatch)(wFormatTag, [0x00, 0xFF])) {
return 'aac';
} else if ((0, _byteHelpers.bytesMatch)(wFormatTag, [0x70, 0x4f])) {
return 'opus';
} else if ((0, _byteHelpers.bytesMatch)(wFormatTag, [0x6C, 0x61])) {
return 'alac';
} else if ((0, _byteHelpers.bytesMatch)(wFormatTag, [0xF1, 0xAC])) {
return 'flac';
} else if ((0, _byteHelpers.bytesMatch)(wFormatTag, [0x20, 0x00])) {
return 'ac-3';
} else if ((0, _byteHelpers.bytesMatch)(wFormatTag, [0xFF, 0xFE])) {
return 'ec-3';
} else if ((0, _byteHelpers.bytesMatch)(wFormatTag, [0x00, 0x50])) {
return 'mp2';
} else if ((0, _byteHelpers.bytesMatch)(wFormatTag, [0x56, 0x6f])) {
return 'vorbis';
} else if ((0, _byteHelpers.bytesMatch)(wFormatTag, [0xA1, 0x09])) {
return 'speex';
}
return '';
};
var formatMimetype = function formatMimetype(name, codecs) {
var codecString = ['video', 'audio'].reduce(function (acc, type) {
if (codecs[type]) {
acc += (acc.length ? ',' : '') + codecs[type];
}
return acc;
}, '');
return (codecs.video ? 'video' : 'audio') + "/" + name + (codecString ? ";codecs=\"" + codecString + "\"" : '');
};
var parseCodecFrom = {
mov: function mov(bytes) {
// mov and mp4 both use a nearly identical box structure.
var retval = parseCodecFrom.mp4(bytes);
if (retval.mimetype) {
retval.mimetype = retval.mimetype.replace('mp4', 'quicktime');
}
return retval;
},
mp4: function mp4(bytes) {
bytes = (0, _byteHelpers.toUint8)(bytes);
var codecs = {};
var tracks = (0, _mp4Helpers.parseTracks)(bytes);
for (var i = 0; i < tracks.length; i++) {
var track = tracks[i];
if (track.type === 'audio' && !codecs.audio) {
codecs.audio = track.codec;
}
if (track.type === 'video' && !codecs.video) {
codecs.video = track.codec;
}
}
return {
codecs: codecs,
mimetype: formatMimetype('mp4', codecs)
};
},
'3gp': function gp(bytes) {
return {
codecs: {},
mimetype: 'video/3gpp'
};
},
ogg: function ogg(bytes) {
var pages = (0, _oggHelpers.getPages)(bytes, 0, 4);
var codecs = {};
pages.forEach(function (page) {
if ((0, _byteHelpers.bytesMatch)(page, [0x4F, 0x70, 0x75, 0x73], {
offset: 28
})) {
codecs.audio = 'opus';
} else if ((0, _byteHelpers.bytesMatch)(page, [0x56, 0x50, 0x38, 0x30], {
offset: 29
})) {
codecs.video = 'vp8';
} else if ((0, _byteHelpers.bytesMatch)(page, [0x74, 0x68, 0x65, 0x6F, 0x72, 0x61], {
offset: 29
})) {
codecs.video = 'theora';
} else if ((0, _byteHelpers.bytesMatch)(page, [0x46, 0x4C, 0x41, 0x43], {
offset: 29
})) {
codecs.audio = 'flac';
} else if ((0, _byteHelpers.bytesMatch)(page, [0x53, 0x70, 0x65, 0x65, 0x78], {
offset: 28
})) {
codecs.audio = 'speex';
} else if ((0, _byteHelpers.bytesMatch)(page, [0x76, 0x6F, 0x72, 0x62, 0x69, 0x73], {
offset: 29
})) {
codecs.audio = 'vorbis';
}
});
return {
codecs: codecs,
mimetype: formatMimetype('ogg', codecs)
};
},
wav: function wav(bytes) {
var format = (0, _riffHelpers.findFourCC)(bytes, ['WAVE', 'fmt'])[0];
var wFormatTag = Array.prototype.slice.call(format, 0, 2).reverse();
var mimetype = 'audio/vnd.wave';
var codecs = {
audio: wFormatTagCodec(wFormatTag)
};
var codecString = wFormatTag.reduce(function (acc, v) {
if (v) {
acc += (0, _byteHelpers.toHexString)(v);
}
return acc;
}, '');
if (codecString) {
mimetype += ";codec=" + codecString;
}
if (codecString && !codecs.audio) {
codecs.audio = codecString;
}
return {
codecs: codecs,
mimetype: mimetype
};
},
avi: function avi(bytes) {
var movi = (0, _riffHelpers.findFourCC)(bytes, ['AVI', 'movi'])[0];
var strls = (0, _riffHelpers.findFourCC)(bytes, ['AVI', 'hdrl', 'strl']);
var codecs = {};
strls.forEach(function (strl) {
var strh = (0, _riffHelpers.findFourCC)(strl, ['strh'])[0];
var strf = (0, _riffHelpers.findFourCC)(strl, ['strf'])[0]; // now parse AVIStreamHeader to get codec and type:
// https://docs.microsoft.com/en-us/previous-versions/windows/desktop/api/avifmt/ns-avifmt-avistreamheader
var type = (0, _byteHelpers.bytesToString)(strh.subarray(0, 4));
var codec;
var codecType;
if (type === 'vids') {
// https://docs.microsoft.com/en-us/windows/win32/api/wingdi/ns-wingdi-bitmapinfoheader
var handler = (0, _byteHelpers.bytesToString)(strh.subarray(4, 8));
var compression = (0, _byteHelpers.bytesToString)(strf.subarray(16, 20)); // look for 00dc (compressed video fourcc code) or 00db (uncompressed video fourcc code)
var videoData = (0, _riffHelpers.findFourCC)(movi, ['00dc'])[0] || (0, _riffHelpers.findFourCC)(movi, ['00db'][0]);
if (handler === 'H264' || compression === 'H264') {
if (videoData && videoData.length) {
codec = parseCodecFrom.h264(videoData).codecs.video;
} else {
codec = 'avc1';
}
} else if (handler === 'HEVC' || compression === 'HEVC') {
if (videoData && videoData.length) {
codec = parseCodecFrom.h265(videoData).codecs.video;
} else {
codec = 'hev1';
}
} else if (handler === 'FMP4' || compression === 'FMP4') {
if (movi.length) {
codec = 'mp4v.20.' + movi[12].toString();
} else {
codec = 'mp4v.20';
}
} else if (handler === 'VP80' || compression === 'VP80') {
codec = 'vp8';
} else if (handler === 'VP90' || compression === 'VP90') {
codec = 'vp9';
} else if (handler === 'AV01' || compression === 'AV01') {
codec = 'av01';
} else if (handler === 'theo' || compression === 'theora') {
codec = 'theora';
} else {
if (videoData && videoData.length) {
var result = (0, _containers.detectContainerForBytes)(videoData);
if (result === 'h264') {
codec = parseCodecFrom.h264(movi).codecs.video;
}
if (result === 'h265') {
codec = parseCodecFrom.h265(movi).codecs.video;
}
}
if (!codec) {
codec = handler || compression;
}
}
codecType = 'video';
} else if (type === 'auds') {
codecType = 'audio'; // look for 00wb (audio data fourcc)
// const audioData = findFourCC(movi, ['01wb']);
var wFormatTag = Array.prototype.slice.call(strf, 0, 2).reverse();
codecs.audio = wFormatTagCodec(wFormatTag);
} else {
return;
}
if (codec) {
codecs[codecType] = codec;
}
});
return {
codecs: codecs,
mimetype: formatMimetype('avi', codecs)
};
},
ts: function ts(bytes) {
var result = (0, _m2tsHelpers.parseTs)(bytes);
var codecs = {};
Object.keys(result.streams).forEach(function (esPid) {
var stream = result.streams[esPid];
if (stream.codec === 'avc1' && stream.packets.length) {
stream.codec = parseCodecFrom.h264(stream.packets[0]).codecs.video;
} else if (stream.codec === 'hev1' && stream.packets.length) {
stream.codec = parseCodecFrom.h265(stream.packets[0]).codecs.video;
}
codecs[stream.type] = stream.codec;
});
return {
codecs: codecs,
mimetype: formatMimetype('mp2t', codecs)
};
},
webm: function webm(bytes) {
// mkv and webm both use ebml to store code info
var retval = parseCodecFrom.mkv(bytes);
if (retval.mimetype) {
retval.mimetype = retval.mimetype.replace('x-matroska', 'webm');
}
return retval;
},
mkv: function mkv(bytes) {
var codecs = {};
var tracks = (0, _ebmlHelpers.parseTracks)(bytes);
for (var i = 0; i < tracks.length; i++) {
var track = tracks[i];
if (track.type === 'audio' && !codecs.audio) {
codecs.audio = track.codec;
}
if (track.type === 'video' && !codecs.video) {
codecs.video = track.codec;
}
}
return {
codecs: codecs,
mimetype: formatMimetype('x-matroska', codecs)
};
},
aac: function aac(bytes) {
return {
codecs: {
audio: 'aac'
},
mimetype: 'audio/aac'
};
},
ac3: function ac3(bytes) {
// past id3 and syncword
var offset = (0, _id3Helpers.getId3Offset)(bytes) + 2; // default to ac-3
var codec = 'ac-3';
if ((0, _byteHelpers.bytesMatch)(bytes, [0xB8, 0xE0], {
offset: offset
})) {
codec = 'ac-3'; // 0x01, 0x7F
} else if ((0, _byteHelpers.bytesMatch)(bytes, [0x01, 0x7f], {
offset: offset
})) {
codec = 'ec-3';
}
return {
codecs: {
audio: codec
},
mimetype: 'audio/vnd.dolby.dd-raw'
};
},
mp3: function mp3(bytes) {
return {
codecs: {
audio: 'mp3'
},
mimetype: 'audio/mpeg'
};
},
flac: function flac(bytes) {
return {
codecs: {
audio: 'flac'
},
mimetype: 'audio/flac'
};
},
'h264': function h264(bytes) {
// find seq_parameter_set_rbsp to get encoding settings for codec
var nal = (0, _nalHelpers.findH264Nal)(bytes, 7, 3);
var retval = {
codecs: {
video: 'avc1'
},
mimetype: 'video/h264'
};
if (nal.length) {
retval.codecs.video += "." + (0, _codecHelpers.getAvcCodec)(nal);
}
return retval;
},
'h265': function h265(bytes) {
var retval = {
codecs: {
video: 'hev1'
},
mimetype: 'video/h265'
}; // find video_parameter_set_rbsp or seq_parameter_set_rbsp
// to get encoding settings for codec
var nal = (0, _nalHelpers.findH265Nal)(bytes, [32, 33], 3);
if (nal.length) {
var type = nal[0] >> 1 & 0x3F; // profile_tier_level starts at byte 5 for video_parameter_set_rbsp
// byte 2 for seq_parameter_set_rbsp
retval.codecs.video += "." + (0, _codecHelpers.getHvcCodec)(nal.subarray(type === 32 ? 5 : 2));
}
return retval;
}
};
var parseFormatForBytes = function parseFormatForBytes(bytes) {
bytes = (0, _byteHelpers.toUint8)(bytes);
var result = {
codecs: {},
container: (0, _containers.detectContainerForBytes)(bytes),
mimetype: ''
};
var parseCodecFn = parseCodecFrom[result.container];
if (parseCodecFn) {
var parsed = parseCodecFn ? parseCodecFn(bytes) : {};
result.codecs = parsed.codecs || {};
result.mimetype = parsed.mimetype || '';
}
return result;
};
exports.parseFormatForBytes = parseFormatForBytes;

View file

@ -1,51 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.getId3Offset = exports.getId3Size = void 0;
var _byteHelpers = require("./byte-helpers.js");
var ID3 = (0, _byteHelpers.toUint8)([0x49, 0x44, 0x33]);
var getId3Size = function getId3Size(bytes, offset) {
if (offset === void 0) {
offset = 0;
}
bytes = (0, _byteHelpers.toUint8)(bytes);
var flags = bytes[offset + 5];
var returnSize = bytes[offset + 6] << 21 | bytes[offset + 7] << 14 | bytes[offset + 8] << 7 | bytes[offset + 9];
var footerPresent = (flags & 16) >> 4;
if (footerPresent) {
return returnSize + 20;
}
return returnSize + 10;
};
exports.getId3Size = getId3Size;
var getId3Offset = function getId3Offset(bytes, offset) {
if (offset === void 0) {
offset = 0;
}
bytes = (0, _byteHelpers.toUint8)(bytes);
if (bytes.length - offset < 10 || !(0, _byteHelpers.bytesMatch)(bytes, ID3, {
offset: offset
})) {
return offset;
}
offset += getId3Size(bytes, offset); // recursive check for id3 tags as some files
// have multiple ID3 tag sections even though
// they should not.
return getId3Offset(bytes, offset);
};
exports.getId3Offset = getId3Offset;

View file

@ -1,36 +0,0 @@
"use strict";
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
var _interopRequireWildcard = require("@babel/runtime/helpers/interopRequireWildcard");
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = void 0;
var codecs = _interopRequireWildcard(require("./codecs"));
var byteHelpers = _interopRequireWildcard(require("./byte-helpers.js"));
var containers = _interopRequireWildcard(require("./containers.js"));
var _decodeB64ToUint8Array = _interopRequireDefault(require("./decode-b64-to-uint8-array.js"));
var mediaGroups = _interopRequireWildcard(require("./media-groups.js"));
var _resolveUrl = _interopRequireDefault(require("./resolve-url.js"));
var _stream = _interopRequireDefault(require("./stream.js"));
var _default = {
codecs: codecs,
byteHelpers: byteHelpers,
containers: containers,
decodeB64ToUint8Array: _decodeB64ToUint8Array.default,
mediaGroups: mediaGroups,
resolveUrl: _resolveUrl.default,
Stream: _stream.default
};
exports.default = _default;
module.exports = exports.default;

View file

@ -1,118 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.parseTs = void 0;
var _byteHelpers = require("./byte-helpers.js");
var SYNC_BYTE = 0x47; // use of maxPes is deprecated as we should always look at
// all pes packets to prevent being caught off guard by changes
// in that stream that happen after the pes specified
var parseTs = function parseTs(bytes, maxPes) {
if (maxPes === void 0) {
maxPes = Infinity;
}
bytes = (0, _byteHelpers.toUint8)(bytes);
var startIndex = 0;
var endIndex = 188;
var pmt = {};
var pesCount = 0;
while (endIndex < bytes.byteLength && pesCount < maxPes) {
if (bytes[startIndex] !== SYNC_BYTE && bytes[endIndex] !== SYNC_BYTE) {
endIndex += 1;
startIndex += 1;
continue;
}
var packet = bytes.subarray(startIndex, endIndex);
var pid = (packet[1] & 0x1f) << 8 | packet[2];
var hasPusi = !!(packet[1] & 0x40);
var hasAdaptationHeader = (packet[3] & 0x30) >>> 4 > 0x01;
var payloadOffset = 4 + (hasAdaptationHeader ? packet[4] + 1 : 0);
if (hasPusi) {
payloadOffset += packet[payloadOffset] + 1;
}
if (pid === 0 && !pmt.pid) {
pmt.pid = (packet[payloadOffset + 10] & 0x1f) << 8 | packet[payloadOffset + 11];
} else if (pmt.pid && pid === pmt.pid) {
var isNotForward = packet[payloadOffset + 5] & 0x01; // ignore forward pmt delarations
if (!isNotForward) {
continue;
}
pmt.streams = pmt.streams || {};
var sectionLength = (packet[payloadOffset + 1] & 0x0f) << 8 | packet[payloadOffset + 2];
var tableEnd = 3 + sectionLength - 4;
var programInfoLength = (packet[payloadOffset + 10] & 0x0f) << 8 | packet[payloadOffset + 11];
var offset = 12 + programInfoLength;
while (offset < tableEnd) {
// add an entry that maps the elementary_pid to the stream_type
var i = payloadOffset + offset;
var type = packet[i];
var esPid = (packet[i + 1] & 0x1F) << 8 | packet[i + 2];
var esLength = (packet[i + 3] & 0x0f) << 8 | packet[i + 4];
var esInfo = packet.subarray(i + 5, i + 5 + esLength);
var stream = pmt.streams[esPid] = {
esInfo: esInfo,
typeNumber: type,
packets: [],
type: '',
codec: ''
};
if (type === 0x06 && (0, _byteHelpers.bytesMatch)(esInfo, [0x4F, 0x70, 0x75, 0x73], {
offset: 2
})) {
stream.type = 'audio';
stream.codec = 'opus';
} else if (type === 0x1B || type === 0x20) {
stream.type = 'video';
stream.codec = 'avc1';
} else if (type === 0x24) {
stream.type = 'video';
stream.codec = 'hev1';
} else if (type === 0x10) {
stream.type = 'video';
stream.codec = 'mp4v.20';
} else if (type === 0x0F) {
stream.type = 'audio';
stream.codec = 'aac';
} else if (type === 0x81) {
stream.type = 'audio';
stream.codec = 'ac-3';
} else if (type === 0x87) {
stream.type = 'audio';
stream.codec = 'ec-3';
} else if (type === 0x03 || type === 0x04) {
stream.type = 'audio';
stream.codec = 'mp3';
}
offset += esLength + 5;
}
} else if (pmt.pid && pmt.streams) {
pmt.streams[pid].packets.push(packet.subarray(payloadOffset));
pesCount++;
}
startIndex += 188;
endIndex += 188;
}
if (!pmt.streams) {
pmt.streams = {};
}
return pmt;
};
exports.parseTs = parseTs;

View file

@ -1,30 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.forEachMediaGroup = void 0;
/**
* Loops through all supported media groups in master and calls the provided
* callback for each group
*
* @param {Object} master
* The parsed master manifest object
* @param {string[]} groups
* The media groups to call the callback for
* @param {Function} callback
* Callback to call for each media group
*/
var forEachMediaGroup = function forEachMediaGroup(master, groups, callback) {
groups.forEach(function (mediaType) {
for (var groupKey in master.mediaGroups[mediaType]) {
for (var labelKey in master.mediaGroups[mediaType][groupKey]) {
var mediaProperties = master.mediaGroups[mediaType][groupKey][labelKey];
callback(mediaProperties, mediaType, groupKey, labelKey);
}
}
});
};
exports.forEachMediaGroup = forEachMediaGroup;

View file

@ -1,44 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.simpleTypeFromSourceType = void 0;
var MPEGURL_REGEX = /^(audio|video|application)\/(x-|vnd\.apple\.)?mpegurl/i;
var DASH_REGEX = /^application\/dash\+xml/i;
/**
* Returns a string that describes the type of source based on a video source object's
* media type.
*
* @see {@link https://dev.w3.org/html5/pf-summary/video.html#dom-source-type|Source Type}
*
* @param {string} type
* Video source object media type
* @return {('hls'|'dash'|'vhs-json'|null)}
* VHS source type string
*/
var simpleTypeFromSourceType = function simpleTypeFromSourceType(type) {
if (MPEGURL_REGEX.test(type)) {
return 'hls';
}
if (DASH_REGEX.test(type)) {
return 'dash';
} // Denotes the special case of a manifest object passed to http-streaming instead of a
// source URL.
//
// See https://en.wikipedia.org/wiki/Media_type for details on specifying media types.
//
// In this case, vnd stands for vendor, video.js for the organization, VHS for this
// project, and the +json suffix identifies the structure of the media type.
if (type === 'application/vnd.videojs.vhs+json') {
return 'vhs-json';
}
return null;
};
exports.simpleTypeFromSourceType = simpleTypeFromSourceType;

View file

@ -1,581 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.parseMediaInfo = exports.parseTracks = exports.addSampleDescription = exports.buildFrameTable = exports.findNamedBox = exports.findBox = exports.parseDescriptors = void 0;
var _byteHelpers = require("./byte-helpers.js");
var _codecHelpers = require("./codec-helpers.js");
var _opusHelpers = require("./opus-helpers.js");
var normalizePath = function normalizePath(path) {
if (typeof path === 'string') {
return (0, _byteHelpers.stringToBytes)(path);
}
if (typeof path === 'number') {
return path;
}
return path;
};
var normalizePaths = function normalizePaths(paths) {
if (!Array.isArray(paths)) {
return [normalizePath(paths)];
}
return paths.map(function (p) {
return normalizePath(p);
});
};
var DESCRIPTORS;
var parseDescriptors = function parseDescriptors(bytes) {
bytes = (0, _byteHelpers.toUint8)(bytes);
var results = [];
var i = 0;
while (bytes.length > i) {
var tag = bytes[i];
var size = 0;
var headerSize = 0; // tag
headerSize++;
var byte = bytes[headerSize]; // first byte
headerSize++;
while (byte & 0x80) {
size = (byte & 0x7F) << 7;
byte = bytes[headerSize];
headerSize++;
}
size += byte & 0x7F;
for (var z = 0; z < DESCRIPTORS.length; z++) {
var _DESCRIPTORS$z = DESCRIPTORS[z],
id = _DESCRIPTORS$z.id,
parser = _DESCRIPTORS$z.parser;
if (tag === id) {
results.push(parser(bytes.subarray(headerSize, headerSize + size)));
break;
}
}
i += size + headerSize;
}
return results;
};
exports.parseDescriptors = parseDescriptors;
DESCRIPTORS = [{
id: 0x03,
parser: function parser(bytes) {
var desc = {
tag: 0x03,
id: bytes[0] << 8 | bytes[1],
flags: bytes[2],
size: 3,
dependsOnEsId: 0,
ocrEsId: 0,
descriptors: [],
url: ''
}; // depends on es id
if (desc.flags & 0x80) {
desc.dependsOnEsId = bytes[desc.size] << 8 | bytes[desc.size + 1];
desc.size += 2;
} // url
if (desc.flags & 0x40) {
var len = bytes[desc.size];
desc.url = (0, _byteHelpers.bytesToString)(bytes.subarray(desc.size + 1, desc.size + 1 + len));
desc.size += len;
} // ocr es id
if (desc.flags & 0x20) {
desc.ocrEsId = bytes[desc.size] << 8 | bytes[desc.size + 1];
desc.size += 2;
}
desc.descriptors = parseDescriptors(bytes.subarray(desc.size)) || [];
return desc;
}
}, {
id: 0x04,
parser: function parser(bytes) {
// DecoderConfigDescriptor
var desc = {
tag: 0x04,
oti: bytes[0],
streamType: bytes[1],
bufferSize: bytes[2] << 16 | bytes[3] << 8 | bytes[4],
maxBitrate: bytes[5] << 24 | bytes[6] << 16 | bytes[7] << 8 | bytes[8],
avgBitrate: bytes[9] << 24 | bytes[10] << 16 | bytes[11] << 8 | bytes[12],
descriptors: parseDescriptors(bytes.subarray(13))
};
return desc;
}
}, {
id: 0x05,
parser: function parser(bytes) {
// DecoderSpecificInfo
return {
tag: 0x05,
bytes: bytes
};
}
}, {
id: 0x06,
parser: function parser(bytes) {
// SLConfigDescriptor
return {
tag: 0x06,
bytes: bytes
};
}
}];
/**
* find any number of boxes by name given a path to it in an iso bmff
* such as mp4.
*
* @param {TypedArray} bytes
* bytes for the iso bmff to search for boxes in
*
* @param {Uint8Array[]|string[]|string|Uint8Array} name
* An array of paths or a single path representing the name
* of boxes to search through in bytes. Paths may be
* uint8 (character codes) or strings.
*
* @param {boolean} [complete=false]
* Should we search only for complete boxes on the final path.
* This is very useful when you do not want to get back partial boxes
* in the case of streaming files.
*
* @return {Uint8Array[]}
* An array of the end paths that we found.
*/
var findBox = function findBox(bytes, paths, complete) {
if (complete === void 0) {
complete = false;
}
paths = normalizePaths(paths);
bytes = (0, _byteHelpers.toUint8)(bytes);
var results = [];
if (!paths.length) {
// short-circuit the search for empty paths
return results;
}
var i = 0;
while (i < bytes.length) {
var size = (bytes[i] << 24 | bytes[i + 1] << 16 | bytes[i + 2] << 8 | bytes[i + 3]) >>> 0;
var type = bytes.subarray(i + 4, i + 8); // invalid box format.
if (size === 0) {
break;
}
var end = i + size;
if (end > bytes.length) {
// this box is bigger than the number of bytes we have
// and complete is set, we cannot find any more boxes.
if (complete) {
break;
}
end = bytes.length;
}
var data = bytes.subarray(i + 8, end);
if ((0, _byteHelpers.bytesMatch)(type, paths[0])) {
if (paths.length === 1) {
// this is the end of the path and we've found the box we were
// looking for
results.push(data);
} else {
// recursively search for the next box along the path
results.push.apply(results, findBox(data, paths.slice(1), complete));
}
}
i = end;
} // we've finished searching all of bytes
return results;
};
/**
* Search for a single matching box by name in an iso bmff format like
* mp4. This function is useful for finding codec boxes which
* can be placed arbitrarily in sample descriptions depending
* on the version of the file or file type.
*
* @param {TypedArray} bytes
* bytes for the iso bmff to search for boxes in
*
* @param {string|Uint8Array} name
* The name of the box to find.
*
* @return {Uint8Array[]}
* a subarray of bytes representing the name boxed we found.
*/
exports.findBox = findBox;
var findNamedBox = function findNamedBox(bytes, name) {
name = normalizePath(name);
if (!name.length) {
// short-circuit the search for empty paths
return bytes.subarray(bytes.length);
}
var i = 0;
while (i < bytes.length) {
if ((0, _byteHelpers.bytesMatch)(bytes.subarray(i, i + name.length), name)) {
var size = (bytes[i - 4] << 24 | bytes[i - 3] << 16 | bytes[i - 2] << 8 | bytes[i - 1]) >>> 0;
var end = size > 1 ? i + size : bytes.byteLength;
return bytes.subarray(i + 4, end);
}
i++;
} // we've finished searching all of bytes
return bytes.subarray(bytes.length);
};
exports.findNamedBox = findNamedBox;
var parseSamples = function parseSamples(data, entrySize, parseEntry) {
if (entrySize === void 0) {
entrySize = 4;
}
if (parseEntry === void 0) {
parseEntry = function parseEntry(d) {
return (0, _byteHelpers.bytesToNumber)(d);
};
}
var entries = [];
if (!data || !data.length) {
return entries;
}
var entryCount = (0, _byteHelpers.bytesToNumber)(data.subarray(4, 8));
for (var i = 8; entryCount; i += entrySize, entryCount--) {
entries.push(parseEntry(data.subarray(i, i + entrySize)));
}
return entries;
};
var buildFrameTable = function buildFrameTable(stbl, timescale) {
var keySamples = parseSamples(findBox(stbl, ['stss'])[0]);
var chunkOffsets = parseSamples(findBox(stbl, ['stco'])[0]);
var timeToSamples = parseSamples(findBox(stbl, ['stts'])[0], 8, function (entry) {
return {
sampleCount: (0, _byteHelpers.bytesToNumber)(entry.subarray(0, 4)),
sampleDelta: (0, _byteHelpers.bytesToNumber)(entry.subarray(4, 8))
};
});
var samplesToChunks = parseSamples(findBox(stbl, ['stsc'])[0], 12, function (entry) {
return {
firstChunk: (0, _byteHelpers.bytesToNumber)(entry.subarray(0, 4)),
samplesPerChunk: (0, _byteHelpers.bytesToNumber)(entry.subarray(4, 8)),
sampleDescriptionIndex: (0, _byteHelpers.bytesToNumber)(entry.subarray(8, 12))
};
});
var stsz = findBox(stbl, ['stsz'])[0]; // stsz starts with a 4 byte sampleSize which we don't need
var sampleSizes = parseSamples(stsz && stsz.length && stsz.subarray(4) || null);
var frames = [];
for (var chunkIndex = 0; chunkIndex < chunkOffsets.length; chunkIndex++) {
var samplesInChunk = void 0;
for (var i = 0; i < samplesToChunks.length; i++) {
var sampleToChunk = samplesToChunks[i];
var isThisOne = chunkIndex + 1 >= sampleToChunk.firstChunk && (i + 1 >= samplesToChunks.length || chunkIndex + 1 < samplesToChunks[i + 1].firstChunk);
if (isThisOne) {
samplesInChunk = sampleToChunk.samplesPerChunk;
break;
}
}
var chunkOffset = chunkOffsets[chunkIndex];
for (var _i = 0; _i < samplesInChunk; _i++) {
var frameEnd = sampleSizes[frames.length]; // if we don't have key samples every frame is a keyframe
var keyframe = !keySamples.length;
if (keySamples.length && keySamples.indexOf(frames.length + 1) !== -1) {
keyframe = true;
}
var frame = {
keyframe: keyframe,
start: chunkOffset,
end: chunkOffset + frameEnd
};
for (var k = 0; k < timeToSamples.length; k++) {
var _timeToSamples$k = timeToSamples[k],
sampleCount = _timeToSamples$k.sampleCount,
sampleDelta = _timeToSamples$k.sampleDelta;
if (frames.length <= sampleCount) {
// ms to ns
var lastTimestamp = frames.length ? frames[frames.length - 1].timestamp : 0;
frame.timestamp = lastTimestamp + sampleDelta / timescale * 1000;
frame.duration = sampleDelta;
break;
}
}
frames.push(frame);
chunkOffset += frameEnd;
}
}
return frames;
};
exports.buildFrameTable = buildFrameTable;
var addSampleDescription = function addSampleDescription(track, bytes) {
var codec = (0, _byteHelpers.bytesToString)(bytes.subarray(0, 4));
if (track.type === 'video') {
track.info = track.info || {};
track.info.width = bytes[28] << 8 | bytes[29];
track.info.height = bytes[30] << 8 | bytes[31];
} else if (track.type === 'audio') {
track.info = track.info || {};
track.info.channels = bytes[20] << 8 | bytes[21];
track.info.bitDepth = bytes[22] << 8 | bytes[23];
track.info.sampleRate = bytes[28] << 8 | bytes[29];
}
if (codec === 'avc1') {
var avcC = findNamedBox(bytes, 'avcC'); // AVCDecoderConfigurationRecord
codec += "." + (0, _codecHelpers.getAvcCodec)(avcC);
track.info.avcC = avcC; // TODO: do we need to parse all this?
/* {
configurationVersion: avcC[0],
profile: avcC[1],
profileCompatibility: avcC[2],
level: avcC[3],
lengthSizeMinusOne: avcC[4] & 0x3
};
let spsNalUnitCount = avcC[5] & 0x1F;
const spsNalUnits = track.info.avc.spsNalUnits = [];
// past spsNalUnitCount
let offset = 6;
while (spsNalUnitCount--) {
const nalLen = avcC[offset] << 8 | avcC[offset + 1];
spsNalUnits.push(avcC.subarray(offset + 2, offset + 2 + nalLen));
offset += nalLen + 2;
}
let ppsNalUnitCount = avcC[offset];
const ppsNalUnits = track.info.avc.ppsNalUnits = [];
// past ppsNalUnitCount
offset += 1;
while (ppsNalUnitCount--) {
const nalLen = avcC[offset] << 8 | avcC[offset + 1];
ppsNalUnits.push(avcC.subarray(offset + 2, offset + 2 + nalLen));
offset += nalLen + 2;
}*/
// HEVCDecoderConfigurationRecord
} else if (codec === 'hvc1' || codec === 'hev1') {
codec += "." + (0, _codecHelpers.getHvcCodec)(findNamedBox(bytes, 'hvcC'));
} else if (codec === 'mp4a' || codec === 'mp4v') {
var esds = findNamedBox(bytes, 'esds');
var esDescriptor = parseDescriptors(esds.subarray(4))[0];
var decoderConfig = esDescriptor && esDescriptor.descriptors.filter(function (_ref) {
var tag = _ref.tag;
return tag === 0x04;
})[0];
if (decoderConfig) {
// most codecs do not have a further '.'
// such as 0xa5 for ac-3 and 0xa6 for e-ac-3
codec += '.' + (0, _byteHelpers.toHexString)(decoderConfig.oti);
if (decoderConfig.oti === 0x40) {
codec += '.' + (decoderConfig.descriptors[0].bytes[0] >> 3).toString();
} else if (decoderConfig.oti === 0x20) {
codec += '.' + decoderConfig.descriptors[0].bytes[4].toString();
} else if (decoderConfig.oti === 0xdd) {
codec = 'vorbis';
}
} else if (track.type === 'audio') {
codec += '.40.2';
} else {
codec += '.20.9';
}
} else if (codec === 'av01') {
// AV1DecoderConfigurationRecord
codec += "." + (0, _codecHelpers.getAv1Codec)(findNamedBox(bytes, 'av1C'));
} else if (codec === 'vp09') {
// VPCodecConfigurationRecord
var vpcC = findNamedBox(bytes, 'vpcC'); // https://www.webmproject.org/vp9/mp4/
var profile = vpcC[0];
var level = vpcC[1];
var bitDepth = vpcC[2] >> 4;
var chromaSubsampling = (vpcC[2] & 0x0F) >> 1;
var videoFullRangeFlag = (vpcC[2] & 0x0F) >> 3;
var colourPrimaries = vpcC[3];
var transferCharacteristics = vpcC[4];
var matrixCoefficients = vpcC[5];
codec += "." + (0, _byteHelpers.padStart)(profile, 2, '0');
codec += "." + (0, _byteHelpers.padStart)(level, 2, '0');
codec += "." + (0, _byteHelpers.padStart)(bitDepth, 2, '0');
codec += "." + (0, _byteHelpers.padStart)(chromaSubsampling, 2, '0');
codec += "." + (0, _byteHelpers.padStart)(colourPrimaries, 2, '0');
codec += "." + (0, _byteHelpers.padStart)(transferCharacteristics, 2, '0');
codec += "." + (0, _byteHelpers.padStart)(matrixCoefficients, 2, '0');
codec += "." + (0, _byteHelpers.padStart)(videoFullRangeFlag, 2, '0');
} else if (codec === 'theo') {
codec = 'theora';
} else if (codec === 'spex') {
codec = 'speex';
} else if (codec === '.mp3') {
codec = 'mp4a.40.34';
} else if (codec === 'msVo') {
codec = 'vorbis';
} else if (codec === 'Opus') {
codec = 'opus';
var dOps = findNamedBox(bytes, 'dOps');
track.info.opus = (0, _opusHelpers.parseOpusHead)(dOps); // TODO: should this go into the webm code??
// Firefox requires a codecDelay for opus playback
// see https://bugzilla.mozilla.org/show_bug.cgi?id=1276238
track.info.codecDelay = 6500000;
} else {
codec = codec.toLowerCase();
}
/* eslint-enable */
// flac, ac-3, ec-3, opus
track.codec = codec;
};
exports.addSampleDescription = addSampleDescription;
var parseTracks = function parseTracks(bytes, frameTable) {
if (frameTable === void 0) {
frameTable = true;
}
bytes = (0, _byteHelpers.toUint8)(bytes);
var traks = findBox(bytes, ['moov', 'trak'], true);
var tracks = [];
traks.forEach(function (trak) {
var track = {
bytes: trak
};
var mdia = findBox(trak, ['mdia'])[0];
var hdlr = findBox(mdia, ['hdlr'])[0];
var trakType = (0, _byteHelpers.bytesToString)(hdlr.subarray(8, 12));
if (trakType === 'soun') {
track.type = 'audio';
} else if (trakType === 'vide') {
track.type = 'video';
} else {
track.type = trakType;
}
var tkhd = findBox(trak, ['tkhd'])[0];
if (tkhd) {
var view = new DataView(tkhd.buffer, tkhd.byteOffset, tkhd.byteLength);
var tkhdVersion = view.getUint8(0);
track.number = tkhdVersion === 0 ? view.getUint32(12) : view.getUint32(20);
}
var mdhd = findBox(mdia, ['mdhd'])[0];
if (mdhd) {
// mdhd is a FullBox, meaning it will have its own version as the first byte
var version = mdhd[0];
var index = version === 0 ? 12 : 20;
track.timescale = (mdhd[index] << 24 | mdhd[index + 1] << 16 | mdhd[index + 2] << 8 | mdhd[index + 3]) >>> 0;
}
var stbl = findBox(mdia, ['minf', 'stbl'])[0];
var stsd = findBox(stbl, ['stsd'])[0];
var descriptionCount = (0, _byteHelpers.bytesToNumber)(stsd.subarray(4, 8));
var offset = 8; // add codec and codec info
while (descriptionCount--) {
var len = (0, _byteHelpers.bytesToNumber)(stsd.subarray(offset, offset + 4));
var sampleDescriptor = stsd.subarray(offset + 4, offset + 4 + len);
addSampleDescription(track, sampleDescriptor);
offset += 4 + len;
}
if (frameTable) {
track.frameTable = buildFrameTable(stbl, track.timescale);
} // codec has no sub parameters
tracks.push(track);
});
return tracks;
};
exports.parseTracks = parseTracks;
var parseMediaInfo = function parseMediaInfo(bytes) {
var mvhd = findBox(bytes, ['moov', 'mvhd'], true)[0];
if (!mvhd || !mvhd.length) {
return;
}
var info = {}; // ms to ns
// mvhd v1 has 8 byte duration and other fields too
if (mvhd[0] === 1) {
info.timestampScale = (0, _byteHelpers.bytesToNumber)(mvhd.subarray(20, 24));
info.duration = (0, _byteHelpers.bytesToNumber)(mvhd.subarray(24, 32));
} else {
info.timestampScale = (0, _byteHelpers.bytesToNumber)(mvhd.subarray(12, 16));
info.duration = (0, _byteHelpers.bytesToNumber)(mvhd.subarray(16, 20));
}
info.bytes = mvhd;
return info;
};
exports.parseMediaInfo = parseMediaInfo;

View file

@ -1,135 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.findH265Nal = exports.findH264Nal = exports.findNal = exports.discardEmulationPreventionBytes = exports.EMULATION_PREVENTION = exports.NAL_TYPE_TWO = exports.NAL_TYPE_ONE = void 0;
var _byteHelpers = require("./byte-helpers.js");
var NAL_TYPE_ONE = (0, _byteHelpers.toUint8)([0x00, 0x00, 0x00, 0x01]);
exports.NAL_TYPE_ONE = NAL_TYPE_ONE;
var NAL_TYPE_TWO = (0, _byteHelpers.toUint8)([0x00, 0x00, 0x01]);
exports.NAL_TYPE_TWO = NAL_TYPE_TWO;
var EMULATION_PREVENTION = (0, _byteHelpers.toUint8)([0x00, 0x00, 0x03]);
/**
* Expunge any "Emulation Prevention" bytes from a "Raw Byte
* Sequence Payload"
*
* @param data {Uint8Array} the bytes of a RBSP from a NAL
* unit
* @return {Uint8Array} the RBSP without any Emulation
* Prevention Bytes
*/
exports.EMULATION_PREVENTION = EMULATION_PREVENTION;
var discardEmulationPreventionBytes = function discardEmulationPreventionBytes(bytes) {
var positions = [];
var i = 1; // Find all `Emulation Prevention Bytes`
while (i < bytes.length - 2) {
if ((0, _byteHelpers.bytesMatch)(bytes.subarray(i, i + 3), EMULATION_PREVENTION)) {
positions.push(i + 2);
i++;
}
i++;
} // If no Emulation Prevention Bytes were found just return the original
// array
if (positions.length === 0) {
return bytes;
} // Create a new array to hold the NAL unit data
var newLength = bytes.length - positions.length;
var newData = new Uint8Array(newLength);
var sourceIndex = 0;
for (i = 0; i < newLength; sourceIndex++, i++) {
if (sourceIndex === positions[0]) {
// Skip this byte
sourceIndex++; // Remove this position index
positions.shift();
}
newData[i] = bytes[sourceIndex];
}
return newData;
};
exports.discardEmulationPreventionBytes = discardEmulationPreventionBytes;
var findNal = function findNal(bytes, dataType, types, nalLimit) {
if (nalLimit === void 0) {
nalLimit = Infinity;
}
bytes = (0, _byteHelpers.toUint8)(bytes);
types = [].concat(types);
var i = 0;
var nalStart;
var nalsFound = 0; // keep searching until:
// we reach the end of bytes
// we reach the maximum number of nals they want to seach
// NOTE: that we disregard nalLimit when we have found the start
// of the nal we want so that we can find the end of the nal we want.
while (i < bytes.length && (nalsFound < nalLimit || nalStart)) {
var nalOffset = void 0;
if ((0, _byteHelpers.bytesMatch)(bytes.subarray(i), NAL_TYPE_ONE)) {
nalOffset = 4;
} else if ((0, _byteHelpers.bytesMatch)(bytes.subarray(i), NAL_TYPE_TWO)) {
nalOffset = 3;
} // we are unsynced,
// find the next nal unit
if (!nalOffset) {
i++;
continue;
}
nalsFound++;
if (nalStart) {
return discardEmulationPreventionBytes(bytes.subarray(nalStart, i));
}
var nalType = void 0;
if (dataType === 'h264') {
nalType = bytes[i + nalOffset] & 0x1f;
} else if (dataType === 'h265') {
nalType = bytes[i + nalOffset] >> 1 & 0x3f;
}
if (types.indexOf(nalType) !== -1) {
nalStart = i + nalOffset;
} // nal header is 1 length for h264, and 2 for h265
i += nalOffset + (dataType === 'h264' ? 1 : 2);
}
return bytes.subarray(0, 0);
};
exports.findNal = findNal;
var findH264Nal = function findH264Nal(bytes, type, nalLimit) {
return findNal(bytes, 'h264', type, nalLimit);
};
exports.findH264Nal = findH264Nal;
var findH265Nal = function findH265Nal(bytes, type, nalLimit) {
return findNal(bytes, 'h265', type, nalLimit);
};
exports.findH265Nal = findH265Nal;

View file

@ -1,39 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.getPages = void 0;
var _byteHelpers = require("./byte-helpers");
var SYNC_WORD = (0, _byteHelpers.toUint8)([0x4f, 0x67, 0x67, 0x53]);
var getPages = function getPages(bytes, start, end) {
if (end === void 0) {
end = Infinity;
}
bytes = (0, _byteHelpers.toUint8)(bytes);
var pages = [];
var i = 0;
while (i < bytes.length && pages.length < end) {
// we are unsynced,
// find the next syncword
if (!(0, _byteHelpers.bytesMatch)(bytes, SYNC_WORD, {
offset: i
})) {
i++;
continue;
}
var segmentLength = bytes[i + 27];
pages.push(bytes.subarray(i, i + 28 + segmentLength));
i += pages[pages.length - 1].length;
}
return pages.slice(start, end);
};
exports.getPages = getPages;

View file

@ -1,65 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.setOpusHead = exports.parseOpusHead = exports.OPUS_HEAD = void 0;
var OPUS_HEAD = new Uint8Array([// O, p, u, s
0x4f, 0x70, 0x75, 0x73, // H, e, a, d
0x48, 0x65, 0x61, 0x64]); // https://wiki.xiph.org/OggOpus
// https://vfrmaniac.fushizen.eu/contents/opus_in_isobmff.html
// https://opus-codec.org/docs/opusfile_api-0.7/structOpusHead.html
exports.OPUS_HEAD = OPUS_HEAD;
var parseOpusHead = function parseOpusHead(bytes) {
var view = new DataView(bytes.buffer, bytes.byteOffset, bytes.byteLength);
var version = view.getUint8(0); // version 0, from mp4, does not use littleEndian.
var littleEndian = version !== 0;
var config = {
version: version,
channels: view.getUint8(1),
preSkip: view.getUint16(2, littleEndian),
sampleRate: view.getUint32(4, littleEndian),
outputGain: view.getUint16(8, littleEndian),
channelMappingFamily: view.getUint8(10)
};
if (config.channelMappingFamily > 0 && bytes.length > 10) {
config.streamCount = view.getUint8(11);
config.twoChannelStreamCount = view.getUint8(12);
config.channelMapping = [];
for (var c = 0; c < config.channels; c++) {
config.channelMapping.push(view.getUint8(13 + c));
}
}
return config;
};
exports.parseOpusHead = parseOpusHead;
var setOpusHead = function setOpusHead(config) {
var size = config.channelMappingFamily <= 0 ? 11 : 12 + config.channels;
var view = new DataView(new ArrayBuffer(size));
var littleEndian = config.version !== 0;
view.setUint8(0, config.version);
view.setUint8(1, config.channels);
view.setUint16(2, config.preSkip, littleEndian);
view.setUint32(4, config.sampleRate, littleEndian);
view.setUint16(8, config.outputGain, littleEndian);
view.setUint8(10, config.channelMappingFamily);
if (config.channelMappingFamily > 0) {
view.setUint8(11, config.streamCount);
config.channelMapping.foreach(function (cm, i) {
view.setUint8(12 + i, cm);
});
}
return new Uint8Array(view.buffer);
};
exports.setOpusHead = setOpusHead;

View file

@ -1,60 +0,0 @@
"use strict";
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = void 0;
var _urlToolkit = _interopRequireDefault(require("url-toolkit"));
var _window = _interopRequireDefault(require("global/window"));
var DEFAULT_LOCATION = 'http://example.com';
var resolveUrl = function resolveUrl(baseUrl, relativeUrl) {
// return early if we don't need to resolve
if (/^[a-z]+:/i.test(relativeUrl)) {
return relativeUrl;
} // if baseUrl is a data URI, ignore it and resolve everything relative to window.location
if (/^data:/.test(baseUrl)) {
baseUrl = _window.default.location && _window.default.location.href || '';
} // IE11 supports URL but not the URL constructor
// feature detect the behavior we want
var nativeURL = typeof _window.default.URL === 'function';
var protocolLess = /^\/\//.test(baseUrl); // remove location if window.location isn't available (i.e. we're in node)
// and if baseUrl isn't an absolute url
var removeLocation = !_window.default.location && !/\/\//i.test(baseUrl); // if the base URL is relative then combine with the current location
if (nativeURL) {
baseUrl = new _window.default.URL(baseUrl, _window.default.location || DEFAULT_LOCATION);
} else if (!/\/\//i.test(baseUrl)) {
baseUrl = _urlToolkit.default.buildAbsoluteURL(_window.default.location && _window.default.location.href || '', baseUrl);
}
if (nativeURL) {
var newUrl = new URL(relativeUrl, baseUrl); // if we're a protocol-less url, remove the protocol
// and if we're location-less, remove the location
// otherwise, return the url unmodified
if (removeLocation) {
return newUrl.href.slice(DEFAULT_LOCATION.length);
} else if (protocolLess) {
return newUrl.href.slice(newUrl.protocol.length);
}
return newUrl.href;
}
return _urlToolkit.default.buildAbsoluteURL(baseUrl, relativeUrl);
};
var _default = resolveUrl;
exports.default = _default;
module.exports = exports.default;

View file

@ -1,84 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.findFourCC = void 0;
var _byteHelpers = require("./byte-helpers.js");
var CONSTANTS = {
LIST: (0, _byteHelpers.toUint8)([0x4c, 0x49, 0x53, 0x54]),
RIFF: (0, _byteHelpers.toUint8)([0x52, 0x49, 0x46, 0x46]),
WAVE: (0, _byteHelpers.toUint8)([0x57, 0x41, 0x56, 0x45])
};
var normalizePath = function normalizePath(path) {
if (typeof path === 'string') {
return (0, _byteHelpers.stringToBytes)(path);
}
if (typeof path === 'number') {
return path;
}
return path;
};
var normalizePaths = function normalizePaths(paths) {
if (!Array.isArray(paths)) {
return [normalizePath(paths)];
}
return paths.map(function (p) {
return normalizePath(p);
});
};
var findFourCC = function findFourCC(bytes, paths) {
paths = normalizePaths(paths);
bytes = (0, _byteHelpers.toUint8)(bytes);
var results = [];
if (!paths.length) {
// short-circuit the search for empty paths
return results;
}
var i = 0;
while (i < bytes.length) {
var type = bytes.subarray(i, i + 4);
var size = (bytes[i + 7] << 24 | bytes[i + 6] << 16 | bytes[i + 5] << 8 | bytes[i + 4]) >>> 0; // skip LIST/RIFF and get the actual type
if ((0, _byteHelpers.bytesMatch)(type, CONSTANTS.LIST) || (0, _byteHelpers.bytesMatch)(type, CONSTANTS.RIFF) || (0, _byteHelpers.bytesMatch)(type, CONSTANTS.WAVE)) {
type = bytes.subarray(i + 8, i + 12);
i += 4;
size -= 4;
}
var data = bytes.subarray(i + 8, i + 8 + size);
if ((0, _byteHelpers.bytesMatch)(type, paths[0])) {
if (paths.length === 1) {
// this is the end of the path and we've found the box we were
// looking for
results.push(data);
} else {
// recursively search for the next box along the path
var subresults = findFourCC(data, paths.slice(1));
if (subresults.length) {
results = results.concat(subresults);
}
}
}
i += 8 + data.length;
} // we've finished searching all of bytes
return results;
};
exports.findFourCC = findFourCC;

View file

@ -1,129 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = void 0;
/**
* @file stream.js
*/
/**
* A lightweight readable stream implemention that handles event dispatching.
*
* @class Stream
*/
var Stream = /*#__PURE__*/function () {
function Stream() {
this.listeners = {};
}
/**
* Add a listener for a specified event type.
*
* @param {string} type the event name
* @param {Function} listener the callback to be invoked when an event of
* the specified type occurs
*/
var _proto = Stream.prototype;
_proto.on = function on(type, listener) {
if (!this.listeners[type]) {
this.listeners[type] = [];
}
this.listeners[type].push(listener);
}
/**
* Remove a listener for a specified event type.
*
* @param {string} type the event name
* @param {Function} listener a function previously registered for this
* type of event through `on`
* @return {boolean} if we could turn it off or not
*/
;
_proto.off = function off(type, listener) {
if (!this.listeners[type]) {
return false;
}
var index = this.listeners[type].indexOf(listener); // TODO: which is better?
// In Video.js we slice listener functions
// on trigger so that it does not mess up the order
// while we loop through.
//
// Here we slice on off so that the loop in trigger
// can continue using it's old reference to loop without
// messing up the order.
this.listeners[type] = this.listeners[type].slice(0);
this.listeners[type].splice(index, 1);
return index > -1;
}
/**
* Trigger an event of the specified type on this stream. Any additional
* arguments to this function are passed as parameters to event listeners.
*
* @param {string} type the event name
*/
;
_proto.trigger = function trigger(type) {
var callbacks = this.listeners[type];
if (!callbacks) {
return;
} // Slicing the arguments on every invocation of this method
// can add a significant amount of overhead. Avoid the
// intermediate object creation for the common case of a
// single callback argument
if (arguments.length === 2) {
var length = callbacks.length;
for (var i = 0; i < length; ++i) {
callbacks[i].call(this, arguments[1]);
}
} else {
var args = Array.prototype.slice.call(arguments, 1);
var _length = callbacks.length;
for (var _i = 0; _i < _length; ++_i) {
callbacks[_i].apply(this, args);
}
}
}
/**
* Destroys the stream and cleans up.
*/
;
_proto.dispose = function dispose() {
this.listeners = {};
}
/**
* Forwards all `data` events on this stream to the destination stream. The
* destination stream should provide a method `push` to receive the data
* events as they arrive.
*
* @param {Stream} destination the stream that will receive all `data` events
* @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
*/
;
_proto.pipe = function pipe(destination) {
this.on('data', function (data) {
destination.push(data);
});
};
return Stream;
}();
exports.default = Stream;
module.exports = exports.default;

File diff suppressed because one or more lines are too long

View file

@ -1,271 +0,0 @@
import window from 'global/window'; // const log2 = Math.log2 ? Math.log2 : (x) => (Math.log(x) / Math.log(2));
var repeat = function repeat(str, len) {
var acc = '';
while (len--) {
acc += str;
}
return acc;
}; // count the number of bits it would take to represent a number
// we used to do this with log2 but BigInt does not support builtin math
// Math.ceil(log2(x));
export var countBits = function countBits(x) {
return x.toString(2).length;
}; // count the number of whole bytes it would take to represent a number
export var countBytes = function countBytes(x) {
return Math.ceil(countBits(x) / 8);
};
export var padStart = function padStart(b, len, str) {
if (str === void 0) {
str = ' ';
}
return (repeat(str, len) + b.toString()).slice(-len);
};
export var isArrayBufferView = function isArrayBufferView(obj) {
if (ArrayBuffer.isView === 'function') {
return ArrayBuffer.isView(obj);
}
return obj && obj.buffer instanceof ArrayBuffer;
};
export var isTypedArray = function isTypedArray(obj) {
return isArrayBufferView(obj);
};
export var toUint8 = function toUint8(bytes) {
if (bytes instanceof Uint8Array) {
return bytes;
}
if (!Array.isArray(bytes) && !isTypedArray(bytes) && !(bytes instanceof ArrayBuffer)) {
// any non-number or NaN leads to empty uint8array
// eslint-disable-next-line
if (typeof bytes !== 'number' || typeof bytes === 'number' && bytes !== bytes) {
bytes = 0;
} else {
bytes = [bytes];
}
}
return new Uint8Array(bytes && bytes.buffer || bytes, bytes && bytes.byteOffset || 0, bytes && bytes.byteLength || 0);
};
export var toHexString = function toHexString(bytes) {
bytes = toUint8(bytes);
var str = '';
for (var i = 0; i < bytes.length; i++) {
str += padStart(bytes[i].toString(16), 2, '0');
}
return str;
};
export var toBinaryString = function toBinaryString(bytes) {
bytes = toUint8(bytes);
var str = '';
for (var i = 0; i < bytes.length; i++) {
str += padStart(bytes[i].toString(2), 8, '0');
}
return str;
};
var BigInt = window.BigInt || Number;
var BYTE_TABLE = [BigInt('0x1'), BigInt('0x100'), BigInt('0x10000'), BigInt('0x1000000'), BigInt('0x100000000'), BigInt('0x10000000000'), BigInt('0x1000000000000'), BigInt('0x100000000000000'), BigInt('0x10000000000000000')];
export var ENDIANNESS = function () {
var a = new Uint16Array([0xFFCC]);
var b = new Uint8Array(a.buffer, a.byteOffset, a.byteLength);
if (b[0] === 0xFF) {
return 'big';
}
if (b[0] === 0xCC) {
return 'little';
}
return 'unknown';
}();
export var IS_BIG_ENDIAN = ENDIANNESS === 'big';
export var IS_LITTLE_ENDIAN = ENDIANNESS === 'little';
export var bytesToNumber = function bytesToNumber(bytes, _temp) {
var _ref = _temp === void 0 ? {} : _temp,
_ref$signed = _ref.signed,
signed = _ref$signed === void 0 ? false : _ref$signed,
_ref$le = _ref.le,
le = _ref$le === void 0 ? false : _ref$le;
bytes = toUint8(bytes);
var fn = le ? 'reduce' : 'reduceRight';
var obj = bytes[fn] ? bytes[fn] : Array.prototype[fn];
var number = obj.call(bytes, function (total, byte, i) {
var exponent = le ? i : Math.abs(i + 1 - bytes.length);
return total + BigInt(byte) * BYTE_TABLE[exponent];
}, BigInt(0));
if (signed) {
var max = BYTE_TABLE[bytes.length] / BigInt(2) - BigInt(1);
number = BigInt(number);
if (number > max) {
number -= max;
number -= max;
number -= BigInt(2);
}
}
return Number(number);
};
export var numberToBytes = function numberToBytes(number, _temp2) {
var _ref2 = _temp2 === void 0 ? {} : _temp2,
_ref2$le = _ref2.le,
le = _ref2$le === void 0 ? false : _ref2$le;
// eslint-disable-next-line
if (typeof number !== 'bigint' && typeof number !== 'number' || typeof number === 'number' && number !== number) {
number = 0;
}
number = BigInt(number);
var byteCount = countBytes(number);
var bytes = new Uint8Array(new ArrayBuffer(byteCount));
for (var i = 0; i < byteCount; i++) {
var byteIndex = le ? i : Math.abs(i + 1 - bytes.length);
bytes[byteIndex] = Number(number / BYTE_TABLE[i] & BigInt(0xFF));
if (number < 0) {
bytes[byteIndex] = Math.abs(~bytes[byteIndex]);
bytes[byteIndex] -= i === 0 ? 1 : 2;
}
}
return bytes;
};
export var bytesToString = function bytesToString(bytes) {
if (!bytes) {
return '';
} // TODO: should toUint8 handle cases where we only have 8 bytes
// but report more since this is a Uint16+ Array?
bytes = Array.prototype.slice.call(bytes);
var string = String.fromCharCode.apply(null, toUint8(bytes));
try {
return decodeURIComponent(escape(string));
} catch (e) {// if decodeURIComponent/escape fails, we are dealing with partial
// or full non string data. Just return the potentially garbled string.
}
return string;
};
export var stringToBytes = function stringToBytes(string, stringIsBytes) {
if (typeof string !== 'string' && string && typeof string.toString === 'function') {
string = string.toString();
}
if (typeof string !== 'string') {
return new Uint8Array();
} // If the string already is bytes, we don't have to do this
// otherwise we do this so that we split multi length characters
// into individual bytes
if (!stringIsBytes) {
string = unescape(encodeURIComponent(string));
}
var view = new Uint8Array(string.length);
for (var i = 0; i < string.length; i++) {
view[i] = string.charCodeAt(i);
}
return view;
};
export var concatTypedArrays = function concatTypedArrays() {
for (var _len = arguments.length, buffers = new Array(_len), _key = 0; _key < _len; _key++) {
buffers[_key] = arguments[_key];
}
buffers = buffers.filter(function (b) {
return b && (b.byteLength || b.length) && typeof b !== 'string';
});
if (buffers.length <= 1) {
// for 0 length we will return empty uint8
// for 1 length we return the first uint8
return toUint8(buffers[0]);
}
var totalLen = buffers.reduce(function (total, buf, i) {
return total + (buf.byteLength || buf.length);
}, 0);
var tempBuffer = new Uint8Array(totalLen);
var offset = 0;
buffers.forEach(function (buf) {
buf = toUint8(buf);
tempBuffer.set(buf, offset);
offset += buf.byteLength;
});
return tempBuffer;
};
/**
* Check if the bytes "b" are contained within bytes "a".
*
* @param {Uint8Array|Array} a
* Bytes to check in
*
* @param {Uint8Array|Array} b
* Bytes to check for
*
* @param {Object} options
* options
*
* @param {Array|Uint8Array} [offset=0]
* offset to use when looking at bytes in a
*
* @param {Array|Uint8Array} [mask=[]]
* mask to use on bytes before comparison.
*
* @return {boolean}
* If all bytes in b are inside of a, taking into account
* bit masks.
*/
export var bytesMatch = function bytesMatch(a, b, _temp3) {
var _ref3 = _temp3 === void 0 ? {} : _temp3,
_ref3$offset = _ref3.offset,
offset = _ref3$offset === void 0 ? 0 : _ref3$offset,
_ref3$mask = _ref3.mask,
mask = _ref3$mask === void 0 ? [] : _ref3$mask;
a = toUint8(a);
b = toUint8(b); // ie 11 does not support uint8 every
var fn = b.every ? b.every : Array.prototype.every;
return b.length && a.length - offset >= b.length && // ie 11 doesn't support every on uin8
fn.call(b, function (bByte, i) {
var aByte = mask[i] ? mask[i] & a[offset + i] : a[offset + i];
return bByte === aByte;
});
};
export var sliceBytes = function sliceBytes(src, start, end) {
if (Uint8Array.prototype.slice) {
return Uint8Array.prototype.slice.call(src, start, end);
}
return new Uint8Array(Array.prototype.slice.call(src, start, end));
};
export var reverseBytes = function reverseBytes(src) {
if (src.reverse) {
return src.reverse();
}
return Array.prototype.reverse.call(src);
};

View file

@ -1,96 +0,0 @@
import { padStart, toHexString, toBinaryString } from './byte-helpers.js'; // https://aomediacodec.github.io/av1-isobmff/#av1codecconfigurationbox-syntax
// https://developer.mozilla.org/en-US/docs/Web/Media/Formats/codecs_parameter#AV1
export var getAv1Codec = function getAv1Codec(bytes) {
var codec = '';
var profile = bytes[1] >>> 3;
var level = bytes[1] & 0x1F;
var tier = bytes[2] >>> 7;
var highBitDepth = (bytes[2] & 0x40) >> 6;
var twelveBit = (bytes[2] & 0x20) >> 5;
var monochrome = (bytes[2] & 0x10) >> 4;
var chromaSubsamplingX = (bytes[2] & 0x08) >> 3;
var chromaSubsamplingY = (bytes[2] & 0x04) >> 2;
var chromaSamplePosition = bytes[2] & 0x03;
codec += profile + "." + padStart(level, 2, '0');
if (tier === 0) {
codec += 'M';
} else if (tier === 1) {
codec += 'H';
}
var bitDepth;
if (profile === 2 && highBitDepth) {
bitDepth = twelveBit ? 12 : 10;
} else {
bitDepth = highBitDepth ? 10 : 8;
}
codec += "." + padStart(bitDepth, 2, '0'); // TODO: can we parse color range??
codec += "." + monochrome;
codec += "." + chromaSubsamplingX + chromaSubsamplingY + chromaSamplePosition;
return codec;
};
export var getAvcCodec = function getAvcCodec(bytes) {
var profileId = toHexString(bytes[1]);
var constraintFlags = toHexString(bytes[2] & 0xFC);
var levelId = toHexString(bytes[3]);
return "" + profileId + constraintFlags + levelId;
};
export var getHvcCodec = function getHvcCodec(bytes) {
var codec = '';
var profileSpace = bytes[1] >> 6;
var profileId = bytes[1] & 0x1F;
var tierFlag = (bytes[1] & 0x20) >> 5;
var profileCompat = bytes.subarray(2, 6);
var constraintIds = bytes.subarray(6, 12);
var levelId = bytes[12];
if (profileSpace === 1) {
codec += 'A';
} else if (profileSpace === 2) {
codec += 'B';
} else if (profileSpace === 3) {
codec += 'C';
}
codec += profileId + "."; // ffmpeg does this in big endian
var profileCompatVal = parseInt(toBinaryString(profileCompat).split('').reverse().join(''), 2); // apple does this in little endian...
if (profileCompatVal > 255) {
profileCompatVal = parseInt(toBinaryString(profileCompat), 2);
}
codec += profileCompatVal.toString(16) + ".";
if (tierFlag === 0) {
codec += 'L';
} else {
codec += 'H';
}
codec += levelId;
var constraints = '';
for (var i = 0; i < constraintIds.length; i++) {
var v = constraintIds[i];
if (v) {
if (constraints) {
constraints += '.';
}
constraints += v.toString(16);
}
}
if (constraints) {
codec += "." + constraints;
}
return codec;
};

View file

@ -1,253 +0,0 @@
import window from 'global/window';
var regexs = {
// to determine mime types
mp4: /^(av0?1|avc0?[1234]|vp0?9|flac|opus|mp3|mp4a|mp4v|stpp.ttml.im1t)/,
webm: /^(vp0?[89]|av0?1|opus|vorbis)/,
ogg: /^(vp0?[89]|theora|flac|opus|vorbis)/,
// to determine if a codec is audio or video
video: /^(av0?1|avc0?[1234]|vp0?[89]|hvc1|hev1|theora|mp4v)/,
audio: /^(mp4a|flac|vorbis|opus|ac-[34]|ec-3|alac|mp3|speex|aac)/,
text: /^(stpp.ttml.im1t)/,
// mux.js support regex
muxerVideo: /^(avc0?1)/,
muxerAudio: /^(mp4a)/,
// match nothing as muxer does not support text right now.
// there cannot never be a character before the start of a string
// so this matches nothing.
muxerText: /a^/
};
var mediaTypes = ['video', 'audio', 'text'];
var upperMediaTypes = ['Video', 'Audio', 'Text'];
/**
* Replace the old apple-style `avc1.<dd>.<dd>` codec string with the standard
* `avc1.<hhhhhh>`
*
* @param {string} codec
* Codec string to translate
* @return {string}
* The translated codec string
*/
export var translateLegacyCodec = function translateLegacyCodec(codec) {
if (!codec) {
return codec;
}
return codec.replace(/avc1\.(\d+)\.(\d+)/i, function (orig, profile, avcLevel) {
var profileHex = ('00' + Number(profile).toString(16)).slice(-2);
var avcLevelHex = ('00' + Number(avcLevel).toString(16)).slice(-2);
return 'avc1.' + profileHex + '00' + avcLevelHex;
});
};
/**
* Replace the old apple-style `avc1.<dd>.<dd>` codec strings with the standard
* `avc1.<hhhhhh>`
*
* @param {string[]} codecs
* An array of codec strings to translate
* @return {string[]}
* The translated array of codec strings
*/
export var translateLegacyCodecs = function translateLegacyCodecs(codecs) {
return codecs.map(translateLegacyCodec);
};
/**
* Replace codecs in the codec string with the old apple-style `avc1.<dd>.<dd>` to the
* standard `avc1.<hhhhhh>`.
*
* @param {string} codecString
* The codec string
* @return {string}
* The codec string with old apple-style codecs replaced
*
* @private
*/
export var mapLegacyAvcCodecs = function mapLegacyAvcCodecs(codecString) {
return codecString.replace(/avc1\.(\d+)\.(\d+)/i, function (match) {
return translateLegacyCodecs([match])[0];
});
};
/**
* @typedef {Object} ParsedCodecInfo
* @property {number} codecCount
* Number of codecs parsed
* @property {string} [videoCodec]
* Parsed video codec (if found)
* @property {string} [videoObjectTypeIndicator]
* Video object type indicator (if found)
* @property {string|null} audioProfile
* Audio profile
*/
/**
* Parses a codec string to retrieve the number of codecs specified, the video codec and
* object type indicator, and the audio profile.
*
* @param {string} [codecString]
* The codec string to parse
* @return {ParsedCodecInfo}
* Parsed codec info
*/
export var parseCodecs = function parseCodecs(codecString) {
if (codecString === void 0) {
codecString = '';
}
var codecs = codecString.split(',');
var result = [];
codecs.forEach(function (codec) {
codec = codec.trim();
var codecType;
mediaTypes.forEach(function (name) {
var match = regexs[name].exec(codec.toLowerCase());
if (!match || match.length <= 1) {
return;
}
codecType = name; // maintain codec case
var type = codec.substring(0, match[1].length);
var details = codec.replace(type, '');
result.push({
type: type,
details: details,
mediaType: name
});
});
if (!codecType) {
result.push({
type: codec,
details: '',
mediaType: 'unknown'
});
}
});
return result;
};
/**
* Returns a ParsedCodecInfo object for the default alternate audio playlist if there is
* a default alternate audio playlist for the provided audio group.
*
* @param {Object} master
* The master playlist
* @param {string} audioGroupId
* ID of the audio group for which to find the default codec info
* @return {ParsedCodecInfo}
* Parsed codec info
*/
export var codecsFromDefault = function codecsFromDefault(master, audioGroupId) {
if (!master.mediaGroups.AUDIO || !audioGroupId) {
return null;
}
var audioGroup = master.mediaGroups.AUDIO[audioGroupId];
if (!audioGroup) {
return null;
}
for (var name in audioGroup) {
var audioType = audioGroup[name];
if (audioType.default && audioType.playlists) {
// codec should be the same for all playlists within the audio type
return parseCodecs(audioType.playlists[0].attributes.CODECS);
}
}
return null;
};
export var isVideoCodec = function isVideoCodec(codec) {
if (codec === void 0) {
codec = '';
}
return regexs.video.test(codec.trim().toLowerCase());
};
export var isAudioCodec = function isAudioCodec(codec) {
if (codec === void 0) {
codec = '';
}
return regexs.audio.test(codec.trim().toLowerCase());
};
export var isTextCodec = function isTextCodec(codec) {
if (codec === void 0) {
codec = '';
}
return regexs.text.test(codec.trim().toLowerCase());
};
export var getMimeForCodec = function getMimeForCodec(codecString) {
if (!codecString || typeof codecString !== 'string') {
return;
}
var codecs = codecString.toLowerCase().split(',').map(function (c) {
return translateLegacyCodec(c.trim());
}); // default to video type
var type = 'video'; // only change to audio type if the only codec we have is
// audio
if (codecs.length === 1 && isAudioCodec(codecs[0])) {
type = 'audio';
} else if (codecs.length === 1 && isTextCodec(codecs[0])) {
// text uses application/<container> for now
type = 'application';
} // default the container to mp4
var container = 'mp4'; // every codec must be able to go into the container
// for that container to be the correct one
if (codecs.every(function (c) {
return regexs.mp4.test(c);
})) {
container = 'mp4';
} else if (codecs.every(function (c) {
return regexs.webm.test(c);
})) {
container = 'webm';
} else if (codecs.every(function (c) {
return regexs.ogg.test(c);
})) {
container = 'ogg';
}
return type + "/" + container + ";codecs=\"" + codecString + "\"";
};
export var browserSupportsCodec = function browserSupportsCodec(codecString) {
if (codecString === void 0) {
codecString = '';
}
return window.MediaSource && window.MediaSource.isTypeSupported && window.MediaSource.isTypeSupported(getMimeForCodec(codecString)) || false;
};
export var muxerSupportsCodec = function muxerSupportsCodec(codecString) {
if (codecString === void 0) {
codecString = '';
}
return codecString.toLowerCase().split(',').every(function (codec) {
codec = codec.trim(); // any match is supported.
for (var i = 0; i < upperMediaTypes.length; i++) {
var type = upperMediaTypes[i];
if (regexs["muxer" + type].test(codec)) {
return true;
}
}
return false;
});
};
export var DEFAULT_AUDIO_CODEC = 'mp4a.40.2';
export var DEFAULT_VIDEO_CODEC = 'avc1.4d400d';

View file

@ -1,183 +0,0 @@
import { toUint8, bytesMatch } from './byte-helpers.js';
import { findBox } from './mp4-helpers.js';
import { findEbml, EBML_TAGS } from './ebml-helpers.js';
import { getId3Offset } from './id3-helpers.js';
import { findH264Nal, findH265Nal } from './nal-helpers.js';
var CONSTANTS = {
// "webm" string literal in hex
'webm': toUint8([0x77, 0x65, 0x62, 0x6d]),
// "matroska" string literal in hex
'matroska': toUint8([0x6d, 0x61, 0x74, 0x72, 0x6f, 0x73, 0x6b, 0x61]),
// "fLaC" string literal in hex
'flac': toUint8([0x66, 0x4c, 0x61, 0x43]),
// "OggS" string literal in hex
'ogg': toUint8([0x4f, 0x67, 0x67, 0x53]),
// ac-3 sync byte, also works for ec-3 as that is simply a codec
// of ac-3
'ac3': toUint8([0x0b, 0x77]),
// "RIFF" string literal in hex used for wav and avi
'riff': toUint8([0x52, 0x49, 0x46, 0x46]),
// "AVI" string literal in hex
'avi': toUint8([0x41, 0x56, 0x49]),
// "WAVE" string literal in hex
'wav': toUint8([0x57, 0x41, 0x56, 0x45]),
// "ftyp3g" string literal in hex
'3gp': toUint8([0x66, 0x74, 0x79, 0x70, 0x33, 0x67]),
// "ftyp" string literal in hex
'mp4': toUint8([0x66, 0x74, 0x79, 0x70]),
// "styp" string literal in hex
'fmp4': toUint8([0x73, 0x74, 0x79, 0x70]),
// "ftypqt" string literal in hex
'mov': toUint8([0x66, 0x74, 0x79, 0x70, 0x71, 0x74]),
// moov string literal in hex
'moov': toUint8([0x6D, 0x6F, 0x6F, 0x76]),
// moof string literal in hex
'moof': toUint8([0x6D, 0x6F, 0x6F, 0x66])
};
var _isLikely = {
aac: function aac(bytes) {
var offset = getId3Offset(bytes);
return bytesMatch(bytes, [0xFF, 0x10], {
offset: offset,
mask: [0xFF, 0x16]
});
},
mp3: function mp3(bytes) {
var offset = getId3Offset(bytes);
return bytesMatch(bytes, [0xFF, 0x02], {
offset: offset,
mask: [0xFF, 0x06]
});
},
webm: function webm(bytes) {
var docType = findEbml(bytes, [EBML_TAGS.EBML, EBML_TAGS.DocType])[0]; // check if DocType EBML tag is webm
return bytesMatch(docType, CONSTANTS.webm);
},
mkv: function mkv(bytes) {
var docType = findEbml(bytes, [EBML_TAGS.EBML, EBML_TAGS.DocType])[0]; // check if DocType EBML tag is matroska
return bytesMatch(docType, CONSTANTS.matroska);
},
mp4: function mp4(bytes) {
// if this file is another base media file format, it is not mp4
if (_isLikely['3gp'](bytes) || _isLikely.mov(bytes)) {
return false;
} // if this file starts with a ftyp or styp box its mp4
if (bytesMatch(bytes, CONSTANTS.mp4, {
offset: 4
}) || bytesMatch(bytes, CONSTANTS.fmp4, {
offset: 4
})) {
return true;
} // if this file starts with a moof/moov box its mp4
if (bytesMatch(bytes, CONSTANTS.moof, {
offset: 4
}) || bytesMatch(bytes, CONSTANTS.moov, {
offset: 4
})) {
return true;
}
},
mov: function mov(bytes) {
return bytesMatch(bytes, CONSTANTS.mov, {
offset: 4
});
},
'3gp': function gp(bytes) {
return bytesMatch(bytes, CONSTANTS['3gp'], {
offset: 4
});
},
ac3: function ac3(bytes) {
var offset = getId3Offset(bytes);
return bytesMatch(bytes, CONSTANTS.ac3, {
offset: offset
});
},
ts: function ts(bytes) {
if (bytes.length < 189 && bytes.length >= 1) {
return bytes[0] === 0x47;
}
var i = 0; // check the first 376 bytes for two matching sync bytes
while (i + 188 < bytes.length && i < 188) {
if (bytes[i] === 0x47 && bytes[i + 188] === 0x47) {
return true;
}
i += 1;
}
return false;
},
flac: function flac(bytes) {
var offset = getId3Offset(bytes);
return bytesMatch(bytes, CONSTANTS.flac, {
offset: offset
});
},
ogg: function ogg(bytes) {
return bytesMatch(bytes, CONSTANTS.ogg);
},
avi: function avi(bytes) {
return bytesMatch(bytes, CONSTANTS.riff) && bytesMatch(bytes, CONSTANTS.avi, {
offset: 8
});
},
wav: function wav(bytes) {
return bytesMatch(bytes, CONSTANTS.riff) && bytesMatch(bytes, CONSTANTS.wav, {
offset: 8
});
},
'h264': function h264(bytes) {
// find seq_parameter_set_rbsp
return findH264Nal(bytes, 7, 3).length;
},
'h265': function h265(bytes) {
// find video_parameter_set_rbsp or seq_parameter_set_rbsp
return findH265Nal(bytes, [32, 33], 3).length;
}
}; // get all the isLikely functions
// but make sure 'ts' is above h264 and h265
// but below everything else as it is the least specific
var isLikelyTypes = Object.keys(_isLikely) // remove ts, h264, h265
.filter(function (t) {
return t !== 'ts' && t !== 'h264' && t !== 'h265';
}) // add it back to the bottom
.concat(['ts', 'h264', 'h265']); // make sure we are dealing with uint8 data.
isLikelyTypes.forEach(function (type) {
var isLikelyFn = _isLikely[type];
_isLikely[type] = function (bytes) {
return isLikelyFn(toUint8(bytes));
};
}); // export after wrapping
export var isLikely = _isLikely; // A useful list of file signatures can be found here
// https://en.wikipedia.org/wiki/List_of_file_signatures
export var detectContainerForBytes = function detectContainerForBytes(bytes) {
bytes = toUint8(bytes);
for (var i = 0; i < isLikelyTypes.length; i++) {
var type = isLikelyTypes[i];
if (isLikely[type](bytes)) {
return type;
}
}
return '';
}; // fmp4 is not a container
export var isLikelyFmp4MediaSegment = function isLikelyFmp4MediaSegment(bytes) {
return findBox(bytes, ['moof']).length > 0;
};

View file

@ -1,16 +0,0 @@
import window from 'global/window';
var atob = function atob(s) {
return window.atob ? window.atob(s) : Buffer.from(s, 'base64').toString('binary');
};
export default function decodeB64ToUint8Array(b64Text) {
var decodedString = atob(b64Text);
var array = new Uint8Array(decodedString.length);
for (var i = 0; i < decodedString.length; i++) {
array[i] = decodedString.charCodeAt(i);
}
return array;
}

View file

@ -1,497 +0,0 @@
import { toUint8, bytesToNumber, bytesMatch, bytesToString, numberToBytes, padStart } from './byte-helpers';
import { getAvcCodec, getHvcCodec, getAv1Codec } from './codec-helpers.js'; // relevant specs for this parser:
// https://matroska-org.github.io/libebml/specs.html
// https://www.matroska.org/technical/elements.html
// https://www.webmproject.org/docs/container/
export var EBML_TAGS = {
EBML: toUint8([0x1A, 0x45, 0xDF, 0xA3]),
DocType: toUint8([0x42, 0x82]),
Segment: toUint8([0x18, 0x53, 0x80, 0x67]),
SegmentInfo: toUint8([0x15, 0x49, 0xA9, 0x66]),
Tracks: toUint8([0x16, 0x54, 0xAE, 0x6B]),
Track: toUint8([0xAE]),
TrackNumber: toUint8([0xd7]),
DefaultDuration: toUint8([0x23, 0xe3, 0x83]),
TrackEntry: toUint8([0xAE]),
TrackType: toUint8([0x83]),
FlagDefault: toUint8([0x88]),
CodecID: toUint8([0x86]),
CodecPrivate: toUint8([0x63, 0xA2]),
VideoTrack: toUint8([0xe0]),
AudioTrack: toUint8([0xe1]),
// Not used yet, but will be used for live webm/mkv
// see https://www.matroska.org/technical/basics.html#block-structure
// see https://www.matroska.org/technical/basics.html#simpleblock-structure
Cluster: toUint8([0x1F, 0x43, 0xB6, 0x75]),
Timestamp: toUint8([0xE7]),
TimestampScale: toUint8([0x2A, 0xD7, 0xB1]),
BlockGroup: toUint8([0xA0]),
BlockDuration: toUint8([0x9B]),
Block: toUint8([0xA1]),
SimpleBlock: toUint8([0xA3])
};
/**
* This is a simple table to determine the length
* of things in ebml. The length is one based (starts at 1,
* rather than zero) and for every zero bit before a one bit
* we add one to length. We also need this table because in some
* case we have to xor all the length bits from another value.
*/
var LENGTH_TABLE = [128, 64, 32, 16, 8, 4, 2, 1];
var getLength = function getLength(byte) {
var len = 1;
for (var i = 0; i < LENGTH_TABLE.length; i++) {
if (byte & LENGTH_TABLE[i]) {
break;
}
len++;
}
return len;
}; // length in ebml is stored in the first 4 to 8 bits
// of the first byte. 4 for the id length and 8 for the
// data size length. Length is measured by converting the number to binary
// then 1 + the number of zeros before a 1 is encountered starting
// from the left.
var getvint = function getvint(bytes, offset, removeLength, signed) {
if (removeLength === void 0) {
removeLength = true;
}
if (signed === void 0) {
signed = false;
}
var length = getLength(bytes[offset]);
var valueBytes = bytes.subarray(offset, offset + length); // NOTE that we do **not** subarray here because we need to copy these bytes
// as they will be modified below to remove the dataSizeLen bits and we do not
// want to modify the original data. normally we could just call slice on
// uint8array but ie 11 does not support that...
if (removeLength) {
valueBytes = Array.prototype.slice.call(bytes, offset, offset + length);
valueBytes[0] ^= LENGTH_TABLE[length - 1];
}
return {
length: length,
value: bytesToNumber(valueBytes, {
signed: signed
}),
bytes: valueBytes
};
};
var normalizePath = function normalizePath(path) {
if (typeof path === 'string') {
return path.match(/.{1,2}/g).map(function (p) {
return normalizePath(p);
});
}
if (typeof path === 'number') {
return numberToBytes(path);
}
return path;
};
var normalizePaths = function normalizePaths(paths) {
if (!Array.isArray(paths)) {
return [normalizePath(paths)];
}
return paths.map(function (p) {
return normalizePath(p);
});
};
var getInfinityDataSize = function getInfinityDataSize(id, bytes, offset) {
if (offset >= bytes.length) {
return bytes.length;
}
var innerid = getvint(bytes, offset, false);
if (bytesMatch(id.bytes, innerid.bytes)) {
return offset;
}
var dataHeader = getvint(bytes, offset + innerid.length);
return getInfinityDataSize(id, bytes, offset + dataHeader.length + dataHeader.value + innerid.length);
};
/**
* Notes on the EBLM format.
*
* EBLM uses "vints" tags. Every vint tag contains
* two parts
*
* 1. The length from the first byte. You get this by
* converting the byte to binary and counting the zeros
* before a 1. Then you add 1 to that. Examples
* 00011111 = length 4 because there are 3 zeros before a 1.
* 00100000 = length 3 because there are 2 zeros before a 1.
* 00000011 = length 7 because there are 6 zeros before a 1.
*
* 2. The bits used for length are removed from the first byte
* Then all the bytes are merged into a value. NOTE: this
* is not the case for id ebml tags as there id includes
* length bits.
*
*/
export var findEbml = function findEbml(bytes, paths) {
paths = normalizePaths(paths);
bytes = toUint8(bytes);
var results = [];
if (!paths.length) {
return results;
}
var i = 0;
while (i < bytes.length) {
var id = getvint(bytes, i, false);
var dataHeader = getvint(bytes, i + id.length);
var dataStart = i + id.length + dataHeader.length; // dataSize is unknown or this is a live stream
if (dataHeader.value === 0x7f) {
dataHeader.value = getInfinityDataSize(id, bytes, dataStart);
if (dataHeader.value !== bytes.length) {
dataHeader.value -= dataStart;
}
}
var dataEnd = dataStart + dataHeader.value > bytes.length ? bytes.length : dataStart + dataHeader.value;
var data = bytes.subarray(dataStart, dataEnd);
if (bytesMatch(paths[0], id.bytes)) {
if (paths.length === 1) {
// this is the end of the paths and we've found the tag we were
// looking for
results.push(data);
} else {
// recursively search for the next tag inside of the data
// of this one
results = results.concat(findEbml(data, paths.slice(1)));
}
}
var totalLength = id.length + dataHeader.length + data.length; // move past this tag entirely, we are not looking for it
i += totalLength;
}
return results;
}; // see https://www.matroska.org/technical/basics.html#block-structure
export var decodeBlock = function decodeBlock(block, type, timestampScale, clusterTimestamp) {
var duration;
if (type === 'group') {
duration = findEbml(block, [EBML_TAGS.BlockDuration])[0];
if (duration) {
duration = bytesToNumber(duration);
duration = 1 / timestampScale * duration * timestampScale / 1000;
}
block = findEbml(block, [EBML_TAGS.Block])[0];
type = 'block'; // treat data as a block after this point
}
var dv = new DataView(block.buffer, block.byteOffset, block.byteLength);
var trackNumber = getvint(block, 0);
var timestamp = dv.getInt16(trackNumber.length, false);
var flags = block[trackNumber.length + 2];
var data = block.subarray(trackNumber.length + 3); // pts/dts in seconds
var ptsdts = 1 / timestampScale * (clusterTimestamp + timestamp) * timestampScale / 1000; // return the frame
var parsed = {
duration: duration,
trackNumber: trackNumber.value,
keyframe: type === 'simple' && flags >> 7 === 1,
invisible: (flags & 0x08) >> 3 === 1,
lacing: (flags & 0x06) >> 1,
discardable: type === 'simple' && (flags & 0x01) === 1,
frames: [],
pts: ptsdts,
dts: ptsdts,
timestamp: timestamp
};
if (!parsed.lacing) {
parsed.frames.push(data);
return parsed;
}
var numberOfFrames = data[0] + 1;
var frameSizes = [];
var offset = 1; // Fixed
if (parsed.lacing === 2) {
var sizeOfFrame = (data.length - offset) / numberOfFrames;
for (var i = 0; i < numberOfFrames; i++) {
frameSizes.push(sizeOfFrame);
}
} // xiph
if (parsed.lacing === 1) {
for (var _i = 0; _i < numberOfFrames - 1; _i++) {
var size = 0;
do {
size += data[offset];
offset++;
} while (data[offset - 1] === 0xFF);
frameSizes.push(size);
}
} // ebml
if (parsed.lacing === 3) {
// first vint is unsinged
// after that vints are singed and
// based on a compounding size
var _size = 0;
for (var _i2 = 0; _i2 < numberOfFrames - 1; _i2++) {
var vint = _i2 === 0 ? getvint(data, offset) : getvint(data, offset, true, true);
_size += vint.value;
frameSizes.push(_size);
offset += vint.length;
}
}
frameSizes.forEach(function (size) {
parsed.frames.push(data.subarray(offset, offset + size));
offset += size;
});
return parsed;
}; // VP9 Codec Feature Metadata (CodecPrivate)
// https://www.webmproject.org/docs/container/
var parseVp9Private = function parseVp9Private(bytes) {
var i = 0;
var params = {};
while (i < bytes.length) {
var id = bytes[i] & 0x7f;
var len = bytes[i + 1];
var val = void 0;
if (len === 1) {
val = bytes[i + 2];
} else {
val = bytes.subarray(i + 2, i + 2 + len);
}
if (id === 1) {
params.profile = val;
} else if (id === 2) {
params.level = val;
} else if (id === 3) {
params.bitDepth = val;
} else if (id === 4) {
params.chromaSubsampling = val;
} else {
params[id] = val;
}
i += 2 + len;
}
return params;
};
export var parseTracks = function parseTracks(bytes) {
bytes = toUint8(bytes);
var decodedTracks = [];
var tracks = findEbml(bytes, [EBML_TAGS.Segment, EBML_TAGS.Tracks, EBML_TAGS.Track]);
if (!tracks.length) {
tracks = findEbml(bytes, [EBML_TAGS.Tracks, EBML_TAGS.Track]);
}
if (!tracks.length) {
tracks = findEbml(bytes, [EBML_TAGS.Track]);
}
if (!tracks.length) {
return decodedTracks;
}
tracks.forEach(function (track) {
var trackType = findEbml(track, EBML_TAGS.TrackType)[0];
if (!trackType || !trackType.length) {
return;
} // 1 is video, 2 is audio, 17 is subtitle
// other values are unimportant in this context
if (trackType[0] === 1) {
trackType = 'video';
} else if (trackType[0] === 2) {
trackType = 'audio';
} else if (trackType[0] === 17) {
trackType = 'subtitle';
} else {
return;
} // todo parse language
var decodedTrack = {
rawCodec: bytesToString(findEbml(track, [EBML_TAGS.CodecID])[0]),
type: trackType,
codecPrivate: findEbml(track, [EBML_TAGS.CodecPrivate])[0],
number: bytesToNumber(findEbml(track, [EBML_TAGS.TrackNumber])[0]),
defaultDuration: bytesToNumber(findEbml(track, [EBML_TAGS.DefaultDuration])[0]),
default: findEbml(track, [EBML_TAGS.FlagDefault])[0],
rawData: track
};
var codec = '';
if (/V_MPEG4\/ISO\/AVC/.test(decodedTrack.rawCodec)) {
codec = "avc1." + getAvcCodec(decodedTrack.codecPrivate);
} else if (/V_MPEGH\/ISO\/HEVC/.test(decodedTrack.rawCodec)) {
codec = "hev1." + getHvcCodec(decodedTrack.codecPrivate);
} else if (/V_MPEG4\/ISO\/ASP/.test(decodedTrack.rawCodec)) {
if (decodedTrack.codecPrivate) {
codec = 'mp4v.20.' + decodedTrack.codecPrivate[4].toString();
} else {
codec = 'mp4v.20.9';
}
} else if (/^V_THEORA/.test(decodedTrack.rawCodec)) {
codec = 'theora';
} else if (/^V_VP8/.test(decodedTrack.rawCodec)) {
codec = 'vp8';
} else if (/^V_VP9/.test(decodedTrack.rawCodec)) {
if (decodedTrack.codecPrivate) {
var _parseVp9Private = parseVp9Private(decodedTrack.codecPrivate),
profile = _parseVp9Private.profile,
level = _parseVp9Private.level,
bitDepth = _parseVp9Private.bitDepth,
chromaSubsampling = _parseVp9Private.chromaSubsampling;
codec = 'vp09.';
codec += padStart(profile, 2, '0') + ".";
codec += padStart(level, 2, '0') + ".";
codec += padStart(bitDepth, 2, '0') + ".";
codec += "" + padStart(chromaSubsampling, 2, '0'); // Video -> Colour -> Ebml name
var matrixCoefficients = findEbml(track, [0xE0, [0x55, 0xB0], [0x55, 0xB1]])[0] || [];
var videoFullRangeFlag = findEbml(track, [0xE0, [0x55, 0xB0], [0x55, 0xB9]])[0] || [];
var transferCharacteristics = findEbml(track, [0xE0, [0x55, 0xB0], [0x55, 0xBA]])[0] || [];
var colourPrimaries = findEbml(track, [0xE0, [0x55, 0xB0], [0x55, 0xBB]])[0] || []; // if we find any optional codec parameter specify them all.
if (matrixCoefficients.length || videoFullRangeFlag.length || transferCharacteristics.length || colourPrimaries.length) {
codec += "." + padStart(colourPrimaries[0], 2, '0');
codec += "." + padStart(transferCharacteristics[0], 2, '0');
codec += "." + padStart(matrixCoefficients[0], 2, '0');
codec += "." + padStart(videoFullRangeFlag[0], 2, '0');
}
} else {
codec = 'vp9';
}
} else if (/^V_AV1/.test(decodedTrack.rawCodec)) {
codec = "av01." + getAv1Codec(decodedTrack.codecPrivate);
} else if (/A_ALAC/.test(decodedTrack.rawCodec)) {
codec = 'alac';
} else if (/A_MPEG\/L2/.test(decodedTrack.rawCodec)) {
codec = 'mp2';
} else if (/A_MPEG\/L3/.test(decodedTrack.rawCodec)) {
codec = 'mp3';
} else if (/^A_AAC/.test(decodedTrack.rawCodec)) {
if (decodedTrack.codecPrivate) {
codec = 'mp4a.40.' + (decodedTrack.codecPrivate[0] >>> 3).toString();
} else {
codec = 'mp4a.40.2';
}
} else if (/^A_AC3/.test(decodedTrack.rawCodec)) {
codec = 'ac-3';
} else if (/^A_PCM/.test(decodedTrack.rawCodec)) {
codec = 'pcm';
} else if (/^A_MS\/ACM/.test(decodedTrack.rawCodec)) {
codec = 'speex';
} else if (/^A_EAC3/.test(decodedTrack.rawCodec)) {
codec = 'ec-3';
} else if (/^A_VORBIS/.test(decodedTrack.rawCodec)) {
codec = 'vorbis';
} else if (/^A_FLAC/.test(decodedTrack.rawCodec)) {
codec = 'flac';
} else if (/^A_OPUS/.test(decodedTrack.rawCodec)) {
codec = 'opus';
}
decodedTrack.codec = codec;
decodedTracks.push(decodedTrack);
});
return decodedTracks.sort(function (a, b) {
return a.number - b.number;
});
};
export var parseData = function parseData(data, tracks) {
var allBlocks = [];
var segment = findEbml(data, [EBML_TAGS.Segment])[0];
var timestampScale = findEbml(segment, [EBML_TAGS.SegmentInfo, EBML_TAGS.TimestampScale])[0]; // in nanoseconds, defaults to 1ms
if (timestampScale && timestampScale.length) {
timestampScale = bytesToNumber(timestampScale);
} else {
timestampScale = 1000000;
}
var clusters = findEbml(segment, [EBML_TAGS.Cluster]);
if (!tracks) {
tracks = parseTracks(segment);
}
clusters.forEach(function (cluster, ci) {
var simpleBlocks = findEbml(cluster, [EBML_TAGS.SimpleBlock]).map(function (b) {
return {
type: 'simple',
data: b
};
});
var blockGroups = findEbml(cluster, [EBML_TAGS.BlockGroup]).map(function (b) {
return {
type: 'group',
data: b
};
});
var timestamp = findEbml(cluster, [EBML_TAGS.Timestamp])[0] || 0;
if (timestamp && timestamp.length) {
timestamp = bytesToNumber(timestamp);
} // get all blocks then sort them into the correct order
var blocks = simpleBlocks.concat(blockGroups).sort(function (a, b) {
return a.data.byteOffset - b.data.byteOffset;
});
blocks.forEach(function (block, bi) {
var decoded = decodeBlock(block.data, block.type, timestampScale, timestamp);
allBlocks.push(decoded);
});
});
return {
tracks: tracks,
blocks: allBlocks
};
};

View file

@ -1,388 +0,0 @@
import { bytesToString, toUint8, toHexString, bytesMatch } from './byte-helpers.js';
import { parseTracks as parseEbmlTracks } from './ebml-helpers.js';
import { parseTracks as parseMp4Tracks } from './mp4-helpers.js';
import { findFourCC } from './riff-helpers.js';
import { getPages } from './ogg-helpers.js';
import { detectContainerForBytes } from './containers.js';
import { findH264Nal, findH265Nal } from './nal-helpers.js';
import { parseTs } from './m2ts-helpers.js';
import { getAvcCodec, getHvcCodec } from './codec-helpers.js';
import { getId3Offset } from './id3-helpers.js'; // https://docs.microsoft.com/en-us/windows/win32/medfound/audio-subtype-guids
// https://tools.ietf.org/html/rfc2361
var wFormatTagCodec = function wFormatTagCodec(wFormatTag) {
wFormatTag = toUint8(wFormatTag);
if (bytesMatch(wFormatTag, [0x00, 0x55])) {
return 'mp3';
} else if (bytesMatch(wFormatTag, [0x16, 0x00]) || bytesMatch(wFormatTag, [0x00, 0xFF])) {
return 'aac';
} else if (bytesMatch(wFormatTag, [0x70, 0x4f])) {
return 'opus';
} else if (bytesMatch(wFormatTag, [0x6C, 0x61])) {
return 'alac';
} else if (bytesMatch(wFormatTag, [0xF1, 0xAC])) {
return 'flac';
} else if (bytesMatch(wFormatTag, [0x20, 0x00])) {
return 'ac-3';
} else if (bytesMatch(wFormatTag, [0xFF, 0xFE])) {
return 'ec-3';
} else if (bytesMatch(wFormatTag, [0x00, 0x50])) {
return 'mp2';
} else if (bytesMatch(wFormatTag, [0x56, 0x6f])) {
return 'vorbis';
} else if (bytesMatch(wFormatTag, [0xA1, 0x09])) {
return 'speex';
}
return '';
};
var formatMimetype = function formatMimetype(name, codecs) {
var codecString = ['video', 'audio'].reduce(function (acc, type) {
if (codecs[type]) {
acc += (acc.length ? ',' : '') + codecs[type];
}
return acc;
}, '');
return (codecs.video ? 'video' : 'audio') + "/" + name + (codecString ? ";codecs=\"" + codecString + "\"" : '');
};
var parseCodecFrom = {
mov: function mov(bytes) {
// mov and mp4 both use a nearly identical box structure.
var retval = parseCodecFrom.mp4(bytes);
if (retval.mimetype) {
retval.mimetype = retval.mimetype.replace('mp4', 'quicktime');
}
return retval;
},
mp4: function mp4(bytes) {
bytes = toUint8(bytes);
var codecs = {};
var tracks = parseMp4Tracks(bytes);
for (var i = 0; i < tracks.length; i++) {
var track = tracks[i];
if (track.type === 'audio' && !codecs.audio) {
codecs.audio = track.codec;
}
if (track.type === 'video' && !codecs.video) {
codecs.video = track.codec;
}
}
return {
codecs: codecs,
mimetype: formatMimetype('mp4', codecs)
};
},
'3gp': function gp(bytes) {
return {
codecs: {},
mimetype: 'video/3gpp'
};
},
ogg: function ogg(bytes) {
var pages = getPages(bytes, 0, 4);
var codecs = {};
pages.forEach(function (page) {
if (bytesMatch(page, [0x4F, 0x70, 0x75, 0x73], {
offset: 28
})) {
codecs.audio = 'opus';
} else if (bytesMatch(page, [0x56, 0x50, 0x38, 0x30], {
offset: 29
})) {
codecs.video = 'vp8';
} else if (bytesMatch(page, [0x74, 0x68, 0x65, 0x6F, 0x72, 0x61], {
offset: 29
})) {
codecs.video = 'theora';
} else if (bytesMatch(page, [0x46, 0x4C, 0x41, 0x43], {
offset: 29
})) {
codecs.audio = 'flac';
} else if (bytesMatch(page, [0x53, 0x70, 0x65, 0x65, 0x78], {
offset: 28
})) {
codecs.audio = 'speex';
} else if (bytesMatch(page, [0x76, 0x6F, 0x72, 0x62, 0x69, 0x73], {
offset: 29
})) {
codecs.audio = 'vorbis';
}
});
return {
codecs: codecs,
mimetype: formatMimetype('ogg', codecs)
};
},
wav: function wav(bytes) {
var format = findFourCC(bytes, ['WAVE', 'fmt'])[0];
var wFormatTag = Array.prototype.slice.call(format, 0, 2).reverse();
var mimetype = 'audio/vnd.wave';
var codecs = {
audio: wFormatTagCodec(wFormatTag)
};
var codecString = wFormatTag.reduce(function (acc, v) {
if (v) {
acc += toHexString(v);
}
return acc;
}, '');
if (codecString) {
mimetype += ";codec=" + codecString;
}
if (codecString && !codecs.audio) {
codecs.audio = codecString;
}
return {
codecs: codecs,
mimetype: mimetype
};
},
avi: function avi(bytes) {
var movi = findFourCC(bytes, ['AVI', 'movi'])[0];
var strls = findFourCC(bytes, ['AVI', 'hdrl', 'strl']);
var codecs = {};
strls.forEach(function (strl) {
var strh = findFourCC(strl, ['strh'])[0];
var strf = findFourCC(strl, ['strf'])[0]; // now parse AVIStreamHeader to get codec and type:
// https://docs.microsoft.com/en-us/previous-versions/windows/desktop/api/avifmt/ns-avifmt-avistreamheader
var type = bytesToString(strh.subarray(0, 4));
var codec;
var codecType;
if (type === 'vids') {
// https://docs.microsoft.com/en-us/windows/win32/api/wingdi/ns-wingdi-bitmapinfoheader
var handler = bytesToString(strh.subarray(4, 8));
var compression = bytesToString(strf.subarray(16, 20)); // look for 00dc (compressed video fourcc code) or 00db (uncompressed video fourcc code)
var videoData = findFourCC(movi, ['00dc'])[0] || findFourCC(movi, ['00db'][0]);
if (handler === 'H264' || compression === 'H264') {
if (videoData && videoData.length) {
codec = parseCodecFrom.h264(videoData).codecs.video;
} else {
codec = 'avc1';
}
} else if (handler === 'HEVC' || compression === 'HEVC') {
if (videoData && videoData.length) {
codec = parseCodecFrom.h265(videoData).codecs.video;
} else {
codec = 'hev1';
}
} else if (handler === 'FMP4' || compression === 'FMP4') {
if (movi.length) {
codec = 'mp4v.20.' + movi[12].toString();
} else {
codec = 'mp4v.20';
}
} else if (handler === 'VP80' || compression === 'VP80') {
codec = 'vp8';
} else if (handler === 'VP90' || compression === 'VP90') {
codec = 'vp9';
} else if (handler === 'AV01' || compression === 'AV01') {
codec = 'av01';
} else if (handler === 'theo' || compression === 'theora') {
codec = 'theora';
} else {
if (videoData && videoData.length) {
var result = detectContainerForBytes(videoData);
if (result === 'h264') {
codec = parseCodecFrom.h264(movi).codecs.video;
}
if (result === 'h265') {
codec = parseCodecFrom.h265(movi).codecs.video;
}
}
if (!codec) {
codec = handler || compression;
}
}
codecType = 'video';
} else if (type === 'auds') {
codecType = 'audio'; // look for 00wb (audio data fourcc)
// const audioData = findFourCC(movi, ['01wb']);
var wFormatTag = Array.prototype.slice.call(strf, 0, 2).reverse();
codecs.audio = wFormatTagCodec(wFormatTag);
} else {
return;
}
if (codec) {
codecs[codecType] = codec;
}
});
return {
codecs: codecs,
mimetype: formatMimetype('avi', codecs)
};
},
ts: function ts(bytes) {
var result = parseTs(bytes);
var codecs = {};
Object.keys(result.streams).forEach(function (esPid) {
var stream = result.streams[esPid];
if (stream.codec === 'avc1' && stream.packets.length) {
stream.codec = parseCodecFrom.h264(stream.packets[0]).codecs.video;
} else if (stream.codec === 'hev1' && stream.packets.length) {
stream.codec = parseCodecFrom.h265(stream.packets[0]).codecs.video;
}
codecs[stream.type] = stream.codec;
});
return {
codecs: codecs,
mimetype: formatMimetype('mp2t', codecs)
};
},
webm: function webm(bytes) {
// mkv and webm both use ebml to store code info
var retval = parseCodecFrom.mkv(bytes);
if (retval.mimetype) {
retval.mimetype = retval.mimetype.replace('x-matroska', 'webm');
}
return retval;
},
mkv: function mkv(bytes) {
var codecs = {};
var tracks = parseEbmlTracks(bytes);
for (var i = 0; i < tracks.length; i++) {
var track = tracks[i];
if (track.type === 'audio' && !codecs.audio) {
codecs.audio = track.codec;
}
if (track.type === 'video' && !codecs.video) {
codecs.video = track.codec;
}
}
return {
codecs: codecs,
mimetype: formatMimetype('x-matroska', codecs)
};
},
aac: function aac(bytes) {
return {
codecs: {
audio: 'aac'
},
mimetype: 'audio/aac'
};
},
ac3: function ac3(bytes) {
// past id3 and syncword
var offset = getId3Offset(bytes) + 2; // default to ac-3
var codec = 'ac-3';
if (bytesMatch(bytes, [0xB8, 0xE0], {
offset: offset
})) {
codec = 'ac-3'; // 0x01, 0x7F
} else if (bytesMatch(bytes, [0x01, 0x7f], {
offset: offset
})) {
codec = 'ec-3';
}
return {
codecs: {
audio: codec
},
mimetype: 'audio/vnd.dolby.dd-raw'
};
},
mp3: function mp3(bytes) {
return {
codecs: {
audio: 'mp3'
},
mimetype: 'audio/mpeg'
};
},
flac: function flac(bytes) {
return {
codecs: {
audio: 'flac'
},
mimetype: 'audio/flac'
};
},
'h264': function h264(bytes) {
// find seq_parameter_set_rbsp to get encoding settings for codec
var nal = findH264Nal(bytes, 7, 3);
var retval = {
codecs: {
video: 'avc1'
},
mimetype: 'video/h264'
};
if (nal.length) {
retval.codecs.video += "." + getAvcCodec(nal);
}
return retval;
},
'h265': function h265(bytes) {
var retval = {
codecs: {
video: 'hev1'
},
mimetype: 'video/h265'
}; // find video_parameter_set_rbsp or seq_parameter_set_rbsp
// to get encoding settings for codec
var nal = findH265Nal(bytes, [32, 33], 3);
if (nal.length) {
var type = nal[0] >> 1 & 0x3F; // profile_tier_level starts at byte 5 for video_parameter_set_rbsp
// byte 2 for seq_parameter_set_rbsp
retval.codecs.video += "." + getHvcCodec(nal.subarray(type === 32 ? 5 : 2));
}
return retval;
}
};
export var parseFormatForBytes = function parseFormatForBytes(bytes) {
bytes = toUint8(bytes);
var result = {
codecs: {},
container: detectContainerForBytes(bytes),
mimetype: ''
};
var parseCodecFn = parseCodecFrom[result.container];
if (parseCodecFn) {
var parsed = parseCodecFn ? parseCodecFn(bytes) : {};
result.codecs = parsed.codecs || {};
result.mimetype = parsed.mimetype || '';
}
return result;
};

View file

@ -1,37 +0,0 @@
import { toUint8, bytesMatch } from './byte-helpers.js';
var ID3 = toUint8([0x49, 0x44, 0x33]);
export var getId3Size = function getId3Size(bytes, offset) {
if (offset === void 0) {
offset = 0;
}
bytes = toUint8(bytes);
var flags = bytes[offset + 5];
var returnSize = bytes[offset + 6] << 21 | bytes[offset + 7] << 14 | bytes[offset + 8] << 7 | bytes[offset + 9];
var footerPresent = (flags & 16) >> 4;
if (footerPresent) {
return returnSize + 20;
}
return returnSize + 10;
};
export var getId3Offset = function getId3Offset(bytes, offset) {
if (offset === void 0) {
offset = 0;
}
bytes = toUint8(bytes);
if (bytes.length - offset < 10 || !bytesMatch(bytes, ID3, {
offset: offset
})) {
return offset;
}
offset += getId3Size(bytes, offset); // recursive check for id3 tags as some files
// have multiple ID3 tag sections even though
// they should not.
return getId3Offset(bytes, offset);
};

View file

@ -1,16 +0,0 @@
import * as codecs from './codecs';
import * as byteHelpers from './byte-helpers.js';
import * as containers from './containers.js';
import decodeB64ToUint8Array from './decode-b64-to-uint8-array.js';
import * as mediaGroups from './media-groups.js';
import resolveUrl from './resolve-url.js';
import Stream from './stream.js';
export default {
codecs: codecs,
byteHelpers: byteHelpers,
containers: containers,
decodeB64ToUint8Array: decodeB64ToUint8Array,
mediaGroups: mediaGroups,
resolveUrl: resolveUrl,
Stream: Stream
};

View file

@ -1,108 +0,0 @@
import { bytesMatch, toUint8 } from './byte-helpers.js';
var SYNC_BYTE = 0x47; // use of maxPes is deprecated as we should always look at
// all pes packets to prevent being caught off guard by changes
// in that stream that happen after the pes specified
export var parseTs = function parseTs(bytes, maxPes) {
if (maxPes === void 0) {
maxPes = Infinity;
}
bytes = toUint8(bytes);
var startIndex = 0;
var endIndex = 188;
var pmt = {};
var pesCount = 0;
while (endIndex < bytes.byteLength && pesCount < maxPes) {
if (bytes[startIndex] !== SYNC_BYTE && bytes[endIndex] !== SYNC_BYTE) {
endIndex += 1;
startIndex += 1;
continue;
}
var packet = bytes.subarray(startIndex, endIndex);
var pid = (packet[1] & 0x1f) << 8 | packet[2];
var hasPusi = !!(packet[1] & 0x40);
var hasAdaptationHeader = (packet[3] & 0x30) >>> 4 > 0x01;
var payloadOffset = 4 + (hasAdaptationHeader ? packet[4] + 1 : 0);
if (hasPusi) {
payloadOffset += packet[payloadOffset] + 1;
}
if (pid === 0 && !pmt.pid) {
pmt.pid = (packet[payloadOffset + 10] & 0x1f) << 8 | packet[payloadOffset + 11];
} else if (pmt.pid && pid === pmt.pid) {
var isNotForward = packet[payloadOffset + 5] & 0x01; // ignore forward pmt delarations
if (!isNotForward) {
continue;
}
pmt.streams = pmt.streams || {};
var sectionLength = (packet[payloadOffset + 1] & 0x0f) << 8 | packet[payloadOffset + 2];
var tableEnd = 3 + sectionLength - 4;
var programInfoLength = (packet[payloadOffset + 10] & 0x0f) << 8 | packet[payloadOffset + 11];
var offset = 12 + programInfoLength;
while (offset < tableEnd) {
// add an entry that maps the elementary_pid to the stream_type
var i = payloadOffset + offset;
var type = packet[i];
var esPid = (packet[i + 1] & 0x1F) << 8 | packet[i + 2];
var esLength = (packet[i + 3] & 0x0f) << 8 | packet[i + 4];
var esInfo = packet.subarray(i + 5, i + 5 + esLength);
var stream = pmt.streams[esPid] = {
esInfo: esInfo,
typeNumber: type,
packets: [],
type: '',
codec: ''
};
if (type === 0x06 && bytesMatch(esInfo, [0x4F, 0x70, 0x75, 0x73], {
offset: 2
})) {
stream.type = 'audio';
stream.codec = 'opus';
} else if (type === 0x1B || type === 0x20) {
stream.type = 'video';
stream.codec = 'avc1';
} else if (type === 0x24) {
stream.type = 'video';
stream.codec = 'hev1';
} else if (type === 0x10) {
stream.type = 'video';
stream.codec = 'mp4v.20';
} else if (type === 0x0F) {
stream.type = 'audio';
stream.codec = 'aac';
} else if (type === 0x81) {
stream.type = 'audio';
stream.codec = 'ac-3';
} else if (type === 0x87) {
stream.type = 'audio';
stream.codec = 'ec-3';
} else if (type === 0x03 || type === 0x04) {
stream.type = 'audio';
stream.codec = 'mp3';
}
offset += esLength + 5;
}
} else if (pmt.pid && pmt.streams) {
pmt.streams[pid].packets.push(packet.subarray(payloadOffset));
pesCount++;
}
startIndex += 188;
endIndex += 188;
}
if (!pmt.streams) {
pmt.streams = {};
}
return pmt;
};

View file

@ -1,21 +0,0 @@
/**
* Loops through all supported media groups in master and calls the provided
* callback for each group
*
* @param {Object} master
* The parsed master manifest object
* @param {string[]} groups
* The media groups to call the callback for
* @param {Function} callback
* Callback to call for each media group
*/
export var forEachMediaGroup = function forEachMediaGroup(master, groups, callback) {
groups.forEach(function (mediaType) {
for (var groupKey in master.mediaGroups[mediaType]) {
for (var labelKey in master.mediaGroups[mediaType][groupKey]) {
var mediaProperties = master.mediaGroups[mediaType][groupKey][labelKey];
callback(mediaProperties, mediaType, groupKey, labelKey);
}
}
});
};

View file

@ -1,36 +0,0 @@
var MPEGURL_REGEX = /^(audio|video|application)\/(x-|vnd\.apple\.)?mpegurl/i;
var DASH_REGEX = /^application\/dash\+xml/i;
/**
* Returns a string that describes the type of source based on a video source object's
* media type.
*
* @see {@link https://dev.w3.org/html5/pf-summary/video.html#dom-source-type|Source Type}
*
* @param {string} type
* Video source object media type
* @return {('hls'|'dash'|'vhs-json'|null)}
* VHS source type string
*/
export var simpleTypeFromSourceType = function simpleTypeFromSourceType(type) {
if (MPEGURL_REGEX.test(type)) {
return 'hls';
}
if (DASH_REGEX.test(type)) {
return 'dash';
} // Denotes the special case of a manifest object passed to http-streaming instead of a
// source URL.
//
// See https://en.wikipedia.org/wiki/Media_type for details on specifying media types.
//
// In this case, vnd stands for vendor, video.js for the organization, VHS for this
// project, and the +json suffix identifies the structure of the media type.
if (type === 'application/vnd.videojs.vhs+json') {
return 'vhs-json';
}
return null;
};

View file

@ -1,553 +0,0 @@
import { stringToBytes, toUint8, bytesMatch, bytesToString, toHexString, padStart, bytesToNumber } from './byte-helpers.js';
import { getAvcCodec, getHvcCodec, getAv1Codec } from './codec-helpers.js';
import { parseOpusHead } from './opus-helpers.js';
var normalizePath = function normalizePath(path) {
if (typeof path === 'string') {
return stringToBytes(path);
}
if (typeof path === 'number') {
return path;
}
return path;
};
var normalizePaths = function normalizePaths(paths) {
if (!Array.isArray(paths)) {
return [normalizePath(paths)];
}
return paths.map(function (p) {
return normalizePath(p);
});
};
var DESCRIPTORS;
export var parseDescriptors = function parseDescriptors(bytes) {
bytes = toUint8(bytes);
var results = [];
var i = 0;
while (bytes.length > i) {
var tag = bytes[i];
var size = 0;
var headerSize = 0; // tag
headerSize++;
var byte = bytes[headerSize]; // first byte
headerSize++;
while (byte & 0x80) {
size = (byte & 0x7F) << 7;
byte = bytes[headerSize];
headerSize++;
}
size += byte & 0x7F;
for (var z = 0; z < DESCRIPTORS.length; z++) {
var _DESCRIPTORS$z = DESCRIPTORS[z],
id = _DESCRIPTORS$z.id,
parser = _DESCRIPTORS$z.parser;
if (tag === id) {
results.push(parser(bytes.subarray(headerSize, headerSize + size)));
break;
}
}
i += size + headerSize;
}
return results;
};
DESCRIPTORS = [{
id: 0x03,
parser: function parser(bytes) {
var desc = {
tag: 0x03,
id: bytes[0] << 8 | bytes[1],
flags: bytes[2],
size: 3,
dependsOnEsId: 0,
ocrEsId: 0,
descriptors: [],
url: ''
}; // depends on es id
if (desc.flags & 0x80) {
desc.dependsOnEsId = bytes[desc.size] << 8 | bytes[desc.size + 1];
desc.size += 2;
} // url
if (desc.flags & 0x40) {
var len = bytes[desc.size];
desc.url = bytesToString(bytes.subarray(desc.size + 1, desc.size + 1 + len));
desc.size += len;
} // ocr es id
if (desc.flags & 0x20) {
desc.ocrEsId = bytes[desc.size] << 8 | bytes[desc.size + 1];
desc.size += 2;
}
desc.descriptors = parseDescriptors(bytes.subarray(desc.size)) || [];
return desc;
}
}, {
id: 0x04,
parser: function parser(bytes) {
// DecoderConfigDescriptor
var desc = {
tag: 0x04,
oti: bytes[0],
streamType: bytes[1],
bufferSize: bytes[2] << 16 | bytes[3] << 8 | bytes[4],
maxBitrate: bytes[5] << 24 | bytes[6] << 16 | bytes[7] << 8 | bytes[8],
avgBitrate: bytes[9] << 24 | bytes[10] << 16 | bytes[11] << 8 | bytes[12],
descriptors: parseDescriptors(bytes.subarray(13))
};
return desc;
}
}, {
id: 0x05,
parser: function parser(bytes) {
// DecoderSpecificInfo
return {
tag: 0x05,
bytes: bytes
};
}
}, {
id: 0x06,
parser: function parser(bytes) {
// SLConfigDescriptor
return {
tag: 0x06,
bytes: bytes
};
}
}];
/**
* find any number of boxes by name given a path to it in an iso bmff
* such as mp4.
*
* @param {TypedArray} bytes
* bytes for the iso bmff to search for boxes in
*
* @param {Uint8Array[]|string[]|string|Uint8Array} name
* An array of paths or a single path representing the name
* of boxes to search through in bytes. Paths may be
* uint8 (character codes) or strings.
*
* @param {boolean} [complete=false]
* Should we search only for complete boxes on the final path.
* This is very useful when you do not want to get back partial boxes
* in the case of streaming files.
*
* @return {Uint8Array[]}
* An array of the end paths that we found.
*/
export var findBox = function findBox(bytes, paths, complete) {
if (complete === void 0) {
complete = false;
}
paths = normalizePaths(paths);
bytes = toUint8(bytes);
var results = [];
if (!paths.length) {
// short-circuit the search for empty paths
return results;
}
var i = 0;
while (i < bytes.length) {
var size = (bytes[i] << 24 | bytes[i + 1] << 16 | bytes[i + 2] << 8 | bytes[i + 3]) >>> 0;
var type = bytes.subarray(i + 4, i + 8); // invalid box format.
if (size === 0) {
break;
}
var end = i + size;
if (end > bytes.length) {
// this box is bigger than the number of bytes we have
// and complete is set, we cannot find any more boxes.
if (complete) {
break;
}
end = bytes.length;
}
var data = bytes.subarray(i + 8, end);
if (bytesMatch(type, paths[0])) {
if (paths.length === 1) {
// this is the end of the path and we've found the box we were
// looking for
results.push(data);
} else {
// recursively search for the next box along the path
results.push.apply(results, findBox(data, paths.slice(1), complete));
}
}
i = end;
} // we've finished searching all of bytes
return results;
};
/**
* Search for a single matching box by name in an iso bmff format like
* mp4. This function is useful for finding codec boxes which
* can be placed arbitrarily in sample descriptions depending
* on the version of the file or file type.
*
* @param {TypedArray} bytes
* bytes for the iso bmff to search for boxes in
*
* @param {string|Uint8Array} name
* The name of the box to find.
*
* @return {Uint8Array[]}
* a subarray of bytes representing the name boxed we found.
*/
export var findNamedBox = function findNamedBox(bytes, name) {
name = normalizePath(name);
if (!name.length) {
// short-circuit the search for empty paths
return bytes.subarray(bytes.length);
}
var i = 0;
while (i < bytes.length) {
if (bytesMatch(bytes.subarray(i, i + name.length), name)) {
var size = (bytes[i - 4] << 24 | bytes[i - 3] << 16 | bytes[i - 2] << 8 | bytes[i - 1]) >>> 0;
var end = size > 1 ? i + size : bytes.byteLength;
return bytes.subarray(i + 4, end);
}
i++;
} // we've finished searching all of bytes
return bytes.subarray(bytes.length);
};
var parseSamples = function parseSamples(data, entrySize, parseEntry) {
if (entrySize === void 0) {
entrySize = 4;
}
if (parseEntry === void 0) {
parseEntry = function parseEntry(d) {
return bytesToNumber(d);
};
}
var entries = [];
if (!data || !data.length) {
return entries;
}
var entryCount = bytesToNumber(data.subarray(4, 8));
for (var i = 8; entryCount; i += entrySize, entryCount--) {
entries.push(parseEntry(data.subarray(i, i + entrySize)));
}
return entries;
};
export var buildFrameTable = function buildFrameTable(stbl, timescale) {
var keySamples = parseSamples(findBox(stbl, ['stss'])[0]);
var chunkOffsets = parseSamples(findBox(stbl, ['stco'])[0]);
var timeToSamples = parseSamples(findBox(stbl, ['stts'])[0], 8, function (entry) {
return {
sampleCount: bytesToNumber(entry.subarray(0, 4)),
sampleDelta: bytesToNumber(entry.subarray(4, 8))
};
});
var samplesToChunks = parseSamples(findBox(stbl, ['stsc'])[0], 12, function (entry) {
return {
firstChunk: bytesToNumber(entry.subarray(0, 4)),
samplesPerChunk: bytesToNumber(entry.subarray(4, 8)),
sampleDescriptionIndex: bytesToNumber(entry.subarray(8, 12))
};
});
var stsz = findBox(stbl, ['stsz'])[0]; // stsz starts with a 4 byte sampleSize which we don't need
var sampleSizes = parseSamples(stsz && stsz.length && stsz.subarray(4) || null);
var frames = [];
for (var chunkIndex = 0; chunkIndex < chunkOffsets.length; chunkIndex++) {
var samplesInChunk = void 0;
for (var i = 0; i < samplesToChunks.length; i++) {
var sampleToChunk = samplesToChunks[i];
var isThisOne = chunkIndex + 1 >= sampleToChunk.firstChunk && (i + 1 >= samplesToChunks.length || chunkIndex + 1 < samplesToChunks[i + 1].firstChunk);
if (isThisOne) {
samplesInChunk = sampleToChunk.samplesPerChunk;
break;
}
}
var chunkOffset = chunkOffsets[chunkIndex];
for (var _i = 0; _i < samplesInChunk; _i++) {
var frameEnd = sampleSizes[frames.length]; // if we don't have key samples every frame is a keyframe
var keyframe = !keySamples.length;
if (keySamples.length && keySamples.indexOf(frames.length + 1) !== -1) {
keyframe = true;
}
var frame = {
keyframe: keyframe,
start: chunkOffset,
end: chunkOffset + frameEnd
};
for (var k = 0; k < timeToSamples.length; k++) {
var _timeToSamples$k = timeToSamples[k],
sampleCount = _timeToSamples$k.sampleCount,
sampleDelta = _timeToSamples$k.sampleDelta;
if (frames.length <= sampleCount) {
// ms to ns
var lastTimestamp = frames.length ? frames[frames.length - 1].timestamp : 0;
frame.timestamp = lastTimestamp + sampleDelta / timescale * 1000;
frame.duration = sampleDelta;
break;
}
}
frames.push(frame);
chunkOffset += frameEnd;
}
}
return frames;
};
export var addSampleDescription = function addSampleDescription(track, bytes) {
var codec = bytesToString(bytes.subarray(0, 4));
if (track.type === 'video') {
track.info = track.info || {};
track.info.width = bytes[28] << 8 | bytes[29];
track.info.height = bytes[30] << 8 | bytes[31];
} else if (track.type === 'audio') {
track.info = track.info || {};
track.info.channels = bytes[20] << 8 | bytes[21];
track.info.bitDepth = bytes[22] << 8 | bytes[23];
track.info.sampleRate = bytes[28] << 8 | bytes[29];
}
if (codec === 'avc1') {
var avcC = findNamedBox(bytes, 'avcC'); // AVCDecoderConfigurationRecord
codec += "." + getAvcCodec(avcC);
track.info.avcC = avcC; // TODO: do we need to parse all this?
/* {
configurationVersion: avcC[0],
profile: avcC[1],
profileCompatibility: avcC[2],
level: avcC[3],
lengthSizeMinusOne: avcC[4] & 0x3
};
let spsNalUnitCount = avcC[5] & 0x1F;
const spsNalUnits = track.info.avc.spsNalUnits = [];
// past spsNalUnitCount
let offset = 6;
while (spsNalUnitCount--) {
const nalLen = avcC[offset] << 8 | avcC[offset + 1];
spsNalUnits.push(avcC.subarray(offset + 2, offset + 2 + nalLen));
offset += nalLen + 2;
}
let ppsNalUnitCount = avcC[offset];
const ppsNalUnits = track.info.avc.ppsNalUnits = [];
// past ppsNalUnitCount
offset += 1;
while (ppsNalUnitCount--) {
const nalLen = avcC[offset] << 8 | avcC[offset + 1];
ppsNalUnits.push(avcC.subarray(offset + 2, offset + 2 + nalLen));
offset += nalLen + 2;
}*/
// HEVCDecoderConfigurationRecord
} else if (codec === 'hvc1' || codec === 'hev1') {
codec += "." + getHvcCodec(findNamedBox(bytes, 'hvcC'));
} else if (codec === 'mp4a' || codec === 'mp4v') {
var esds = findNamedBox(bytes, 'esds');
var esDescriptor = parseDescriptors(esds.subarray(4))[0];
var decoderConfig = esDescriptor && esDescriptor.descriptors.filter(function (_ref) {
var tag = _ref.tag;
return tag === 0x04;
})[0];
if (decoderConfig) {
// most codecs do not have a further '.'
// such as 0xa5 for ac-3 and 0xa6 for e-ac-3
codec += '.' + toHexString(decoderConfig.oti);
if (decoderConfig.oti === 0x40) {
codec += '.' + (decoderConfig.descriptors[0].bytes[0] >> 3).toString();
} else if (decoderConfig.oti === 0x20) {
codec += '.' + decoderConfig.descriptors[0].bytes[4].toString();
} else if (decoderConfig.oti === 0xdd) {
codec = 'vorbis';
}
} else if (track.type === 'audio') {
codec += '.40.2';
} else {
codec += '.20.9';
}
} else if (codec === 'av01') {
// AV1DecoderConfigurationRecord
codec += "." + getAv1Codec(findNamedBox(bytes, 'av1C'));
} else if (codec === 'vp09') {
// VPCodecConfigurationRecord
var vpcC = findNamedBox(bytes, 'vpcC'); // https://www.webmproject.org/vp9/mp4/
var profile = vpcC[0];
var level = vpcC[1];
var bitDepth = vpcC[2] >> 4;
var chromaSubsampling = (vpcC[2] & 0x0F) >> 1;
var videoFullRangeFlag = (vpcC[2] & 0x0F) >> 3;
var colourPrimaries = vpcC[3];
var transferCharacteristics = vpcC[4];
var matrixCoefficients = vpcC[5];
codec += "." + padStart(profile, 2, '0');
codec += "." + padStart(level, 2, '0');
codec += "." + padStart(bitDepth, 2, '0');
codec += "." + padStart(chromaSubsampling, 2, '0');
codec += "." + padStart(colourPrimaries, 2, '0');
codec += "." + padStart(transferCharacteristics, 2, '0');
codec += "." + padStart(matrixCoefficients, 2, '0');
codec += "." + padStart(videoFullRangeFlag, 2, '0');
} else if (codec === 'theo') {
codec = 'theora';
} else if (codec === 'spex') {
codec = 'speex';
} else if (codec === '.mp3') {
codec = 'mp4a.40.34';
} else if (codec === 'msVo') {
codec = 'vorbis';
} else if (codec === 'Opus') {
codec = 'opus';
var dOps = findNamedBox(bytes, 'dOps');
track.info.opus = parseOpusHead(dOps); // TODO: should this go into the webm code??
// Firefox requires a codecDelay for opus playback
// see https://bugzilla.mozilla.org/show_bug.cgi?id=1276238
track.info.codecDelay = 6500000;
} else {
codec = codec.toLowerCase();
}
/* eslint-enable */
// flac, ac-3, ec-3, opus
track.codec = codec;
};
export var parseTracks = function parseTracks(bytes, frameTable) {
if (frameTable === void 0) {
frameTable = true;
}
bytes = toUint8(bytes);
var traks = findBox(bytes, ['moov', 'trak'], true);
var tracks = [];
traks.forEach(function (trak) {
var track = {
bytes: trak
};
var mdia = findBox(trak, ['mdia'])[0];
var hdlr = findBox(mdia, ['hdlr'])[0];
var trakType = bytesToString(hdlr.subarray(8, 12));
if (trakType === 'soun') {
track.type = 'audio';
} else if (trakType === 'vide') {
track.type = 'video';
} else {
track.type = trakType;
}
var tkhd = findBox(trak, ['tkhd'])[0];
if (tkhd) {
var view = new DataView(tkhd.buffer, tkhd.byteOffset, tkhd.byteLength);
var tkhdVersion = view.getUint8(0);
track.number = tkhdVersion === 0 ? view.getUint32(12) : view.getUint32(20);
}
var mdhd = findBox(mdia, ['mdhd'])[0];
if (mdhd) {
// mdhd is a FullBox, meaning it will have its own version as the first byte
var version = mdhd[0];
var index = version === 0 ? 12 : 20;
track.timescale = (mdhd[index] << 24 | mdhd[index + 1] << 16 | mdhd[index + 2] << 8 | mdhd[index + 3]) >>> 0;
}
var stbl = findBox(mdia, ['minf', 'stbl'])[0];
var stsd = findBox(stbl, ['stsd'])[0];
var descriptionCount = bytesToNumber(stsd.subarray(4, 8));
var offset = 8; // add codec and codec info
while (descriptionCount--) {
var len = bytesToNumber(stsd.subarray(offset, offset + 4));
var sampleDescriptor = stsd.subarray(offset + 4, offset + 4 + len);
addSampleDescription(track, sampleDescriptor);
offset += 4 + len;
}
if (frameTable) {
track.frameTable = buildFrameTable(stbl, track.timescale);
} // codec has no sub parameters
tracks.push(track);
});
return tracks;
};
export var parseMediaInfo = function parseMediaInfo(bytes) {
var mvhd = findBox(bytes, ['moov', 'mvhd'], true)[0];
if (!mvhd || !mvhd.length) {
return;
}
var info = {}; // ms to ns
// mvhd v1 has 8 byte duration and other fields too
if (mvhd[0] === 1) {
info.timestampScale = bytesToNumber(mvhd.subarray(20, 24));
info.duration = bytesToNumber(mvhd.subarray(24, 32));
} else {
info.timestampScale = bytesToNumber(mvhd.subarray(12, 16));
info.duration = bytesToNumber(mvhd.subarray(16, 20));
}
info.bytes = mvhd;
return info;
};

View file

@ -1,112 +0,0 @@
import { bytesMatch, toUint8 } from './byte-helpers.js';
export var NAL_TYPE_ONE = toUint8([0x00, 0x00, 0x00, 0x01]);
export var NAL_TYPE_TWO = toUint8([0x00, 0x00, 0x01]);
export var EMULATION_PREVENTION = toUint8([0x00, 0x00, 0x03]);
/**
* Expunge any "Emulation Prevention" bytes from a "Raw Byte
* Sequence Payload"
*
* @param data {Uint8Array} the bytes of a RBSP from a NAL
* unit
* @return {Uint8Array} the RBSP without any Emulation
* Prevention Bytes
*/
export var discardEmulationPreventionBytes = function discardEmulationPreventionBytes(bytes) {
var positions = [];
var i = 1; // Find all `Emulation Prevention Bytes`
while (i < bytes.length - 2) {
if (bytesMatch(bytes.subarray(i, i + 3), EMULATION_PREVENTION)) {
positions.push(i + 2);
i++;
}
i++;
} // If no Emulation Prevention Bytes were found just return the original
// array
if (positions.length === 0) {
return bytes;
} // Create a new array to hold the NAL unit data
var newLength = bytes.length - positions.length;
var newData = new Uint8Array(newLength);
var sourceIndex = 0;
for (i = 0; i < newLength; sourceIndex++, i++) {
if (sourceIndex === positions[0]) {
// Skip this byte
sourceIndex++; // Remove this position index
positions.shift();
}
newData[i] = bytes[sourceIndex];
}
return newData;
};
export var findNal = function findNal(bytes, dataType, types, nalLimit) {
if (nalLimit === void 0) {
nalLimit = Infinity;
}
bytes = toUint8(bytes);
types = [].concat(types);
var i = 0;
var nalStart;
var nalsFound = 0; // keep searching until:
// we reach the end of bytes
// we reach the maximum number of nals they want to seach
// NOTE: that we disregard nalLimit when we have found the start
// of the nal we want so that we can find the end of the nal we want.
while (i < bytes.length && (nalsFound < nalLimit || nalStart)) {
var nalOffset = void 0;
if (bytesMatch(bytes.subarray(i), NAL_TYPE_ONE)) {
nalOffset = 4;
} else if (bytesMatch(bytes.subarray(i), NAL_TYPE_TWO)) {
nalOffset = 3;
} // we are unsynced,
// find the next nal unit
if (!nalOffset) {
i++;
continue;
}
nalsFound++;
if (nalStart) {
return discardEmulationPreventionBytes(bytes.subarray(nalStart, i));
}
var nalType = void 0;
if (dataType === 'h264') {
nalType = bytes[i + nalOffset] & 0x1f;
} else if (dataType === 'h265') {
nalType = bytes[i + nalOffset] >> 1 & 0x3f;
}
if (types.indexOf(nalType) !== -1) {
nalStart = i + nalOffset;
} // nal header is 1 length for h264, and 2 for h265
i += nalOffset + (dataType === 'h264' ? 1 : 2);
}
return bytes.subarray(0, 0);
};
export var findH264Nal = function findH264Nal(bytes, type, nalLimit) {
return findNal(bytes, 'h264', type, nalLimit);
};
export var findH265Nal = function findH265Nal(bytes, type, nalLimit) {
return findNal(bytes, 'h265', type, nalLimit);
};

View file

@ -1,28 +0,0 @@
import { bytesMatch, toUint8 } from './byte-helpers';
var SYNC_WORD = toUint8([0x4f, 0x67, 0x67, 0x53]);
export var getPages = function getPages(bytes, start, end) {
if (end === void 0) {
end = Infinity;
}
bytes = toUint8(bytes);
var pages = [];
var i = 0;
while (i < bytes.length && pages.length < end) {
// we are unsynced,
// find the next syncword
if (!bytesMatch(bytes, SYNC_WORD, {
offset: i
})) {
i++;
continue;
}
var segmentLength = bytes[i + 27];
pages.push(bytes.subarray(i, i + 28 + segmentLength));
i += pages[pages.length - 1].length;
}
return pages.slice(start, end);
};

View file

@ -1,52 +0,0 @@
export var OPUS_HEAD = new Uint8Array([// O, p, u, s
0x4f, 0x70, 0x75, 0x73, // H, e, a, d
0x48, 0x65, 0x61, 0x64]); // https://wiki.xiph.org/OggOpus
// https://vfrmaniac.fushizen.eu/contents/opus_in_isobmff.html
// https://opus-codec.org/docs/opusfile_api-0.7/structOpusHead.html
export var parseOpusHead = function parseOpusHead(bytes) {
var view = new DataView(bytes.buffer, bytes.byteOffset, bytes.byteLength);
var version = view.getUint8(0); // version 0, from mp4, does not use littleEndian.
var littleEndian = version !== 0;
var config = {
version: version,
channels: view.getUint8(1),
preSkip: view.getUint16(2, littleEndian),
sampleRate: view.getUint32(4, littleEndian),
outputGain: view.getUint16(8, littleEndian),
channelMappingFamily: view.getUint8(10)
};
if (config.channelMappingFamily > 0 && bytes.length > 10) {
config.streamCount = view.getUint8(11);
config.twoChannelStreamCount = view.getUint8(12);
config.channelMapping = [];
for (var c = 0; c < config.channels; c++) {
config.channelMapping.push(view.getUint8(13 + c));
}
}
return config;
};
export var setOpusHead = function setOpusHead(config) {
var size = config.channelMappingFamily <= 0 ? 11 : 12 + config.channels;
var view = new DataView(new ArrayBuffer(size));
var littleEndian = config.version !== 0;
view.setUint8(0, config.version);
view.setUint8(1, config.channels);
view.setUint16(2, config.preSkip, littleEndian);
view.setUint32(4, config.sampleRate, littleEndian);
view.setUint16(8, config.outputGain, littleEndian);
view.setUint8(10, config.channelMappingFamily);
if (config.channelMappingFamily > 0) {
view.setUint8(11, config.streamCount);
config.channelMapping.foreach(function (cm, i) {
view.setUint8(12 + i, cm);
});
}
return new Uint8Array(view.buffer);
};

View file

@ -1,47 +0,0 @@
import URLToolkit from 'url-toolkit';
import window from 'global/window';
var DEFAULT_LOCATION = 'http://example.com';
var resolveUrl = function resolveUrl(baseUrl, relativeUrl) {
// return early if we don't need to resolve
if (/^[a-z]+:/i.test(relativeUrl)) {
return relativeUrl;
} // if baseUrl is a data URI, ignore it and resolve everything relative to window.location
if (/^data:/.test(baseUrl)) {
baseUrl = window.location && window.location.href || '';
} // IE11 supports URL but not the URL constructor
// feature detect the behavior we want
var nativeURL = typeof window.URL === 'function';
var protocolLess = /^\/\//.test(baseUrl); // remove location if window.location isn't available (i.e. we're in node)
// and if baseUrl isn't an absolute url
var removeLocation = !window.location && !/\/\//i.test(baseUrl); // if the base URL is relative then combine with the current location
if (nativeURL) {
baseUrl = new window.URL(baseUrl, window.location || DEFAULT_LOCATION);
} else if (!/\/\//i.test(baseUrl)) {
baseUrl = URLToolkit.buildAbsoluteURL(window.location && window.location.href || '', baseUrl);
}
if (nativeURL) {
var newUrl = new URL(relativeUrl, baseUrl); // if we're a protocol-less url, remove the protocol
// and if we're location-less, remove the location
// otherwise, return the url unmodified
if (removeLocation) {
return newUrl.href.slice(DEFAULT_LOCATION.length);
} else if (protocolLess) {
return newUrl.href.slice(newUrl.protocol.length);
}
return newUrl.href;
}
return URLToolkit.buildAbsoluteURL(baseUrl, relativeUrl);
};
export default resolveUrl;

View file

@ -1,74 +0,0 @@
import { toUint8, stringToBytes, bytesMatch } from './byte-helpers.js';
var CONSTANTS = {
LIST: toUint8([0x4c, 0x49, 0x53, 0x54]),
RIFF: toUint8([0x52, 0x49, 0x46, 0x46]),
WAVE: toUint8([0x57, 0x41, 0x56, 0x45])
};
var normalizePath = function normalizePath(path) {
if (typeof path === 'string') {
return stringToBytes(path);
}
if (typeof path === 'number') {
return path;
}
return path;
};
var normalizePaths = function normalizePaths(paths) {
if (!Array.isArray(paths)) {
return [normalizePath(paths)];
}
return paths.map(function (p) {
return normalizePath(p);
});
};
export var findFourCC = function findFourCC(bytes, paths) {
paths = normalizePaths(paths);
bytes = toUint8(bytes);
var results = [];
if (!paths.length) {
// short-circuit the search for empty paths
return results;
}
var i = 0;
while (i < bytes.length) {
var type = bytes.subarray(i, i + 4);
var size = (bytes[i + 7] << 24 | bytes[i + 6] << 16 | bytes[i + 5] << 8 | bytes[i + 4]) >>> 0; // skip LIST/RIFF and get the actual type
if (bytesMatch(type, CONSTANTS.LIST) || bytesMatch(type, CONSTANTS.RIFF) || bytesMatch(type, CONSTANTS.WAVE)) {
type = bytes.subarray(i + 8, i + 12);
i += 4;
size -= 4;
}
var data = bytes.subarray(i + 8, i + 8 + size);
if (bytesMatch(type, paths[0])) {
if (paths.length === 1) {
// this is the end of the path and we've found the box we were
// looking for
results.push(data);
} else {
// recursively search for the next box along the path
var subresults = findFourCC(data, paths.slice(1));
if (subresults.length) {
results = results.concat(subresults);
}
}
}
i += 8 + data.length;
} // we've finished searching all of bytes
return results;
};

View file

@ -1,121 +0,0 @@
/**
* @file stream.js
*/
/**
* A lightweight readable stream implemention that handles event dispatching.
*
* @class Stream
*/
var Stream = /*#__PURE__*/function () {
function Stream() {
this.listeners = {};
}
/**
* Add a listener for a specified event type.
*
* @param {string} type the event name
* @param {Function} listener the callback to be invoked when an event of
* the specified type occurs
*/
var _proto = Stream.prototype;
_proto.on = function on(type, listener) {
if (!this.listeners[type]) {
this.listeners[type] = [];
}
this.listeners[type].push(listener);
}
/**
* Remove a listener for a specified event type.
*
* @param {string} type the event name
* @param {Function} listener a function previously registered for this
* type of event through `on`
* @return {boolean} if we could turn it off or not
*/
;
_proto.off = function off(type, listener) {
if (!this.listeners[type]) {
return false;
}
var index = this.listeners[type].indexOf(listener); // TODO: which is better?
// In Video.js we slice listener functions
// on trigger so that it does not mess up the order
// while we loop through.
//
// Here we slice on off so that the loop in trigger
// can continue using it's old reference to loop without
// messing up the order.
this.listeners[type] = this.listeners[type].slice(0);
this.listeners[type].splice(index, 1);
return index > -1;
}
/**
* Trigger an event of the specified type on this stream. Any additional
* arguments to this function are passed as parameters to event listeners.
*
* @param {string} type the event name
*/
;
_proto.trigger = function trigger(type) {
var callbacks = this.listeners[type];
if (!callbacks) {
return;
} // Slicing the arguments on every invocation of this method
// can add a significant amount of overhead. Avoid the
// intermediate object creation for the common case of a
// single callback argument
if (arguments.length === 2) {
var length = callbacks.length;
for (var i = 0; i < length; ++i) {
callbacks[i].call(this, arguments[1]);
}
} else {
var args = Array.prototype.slice.call(arguments, 1);
var _length = callbacks.length;
for (var _i = 0; _i < _length; ++_i) {
callbacks[_i].apply(this, args);
}
}
}
/**
* Destroys the stream and cleans up.
*/
;
_proto.dispose = function dispose() {
this.listeners = {};
}
/**
* Forwards all `data` events on this stream to the destination stream. The
* destination stream should provide a method `push` to receive the data
* events as they arrive.
*
* @param {Stream} destination the stream that will receive all `data` events
* @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
*/
;
_proto.pipe = function pipe(destination) {
this.on('data', function (data) {
destination.push(data);
});
};
return Stream;
}();
export { Stream as default };

View file

@ -1,18 +0,0 @@
<!doctype html>
<html>
<head>
<meta charset="utf-8">
<title>@videojs/vhs-utils Demo</title>
</head>
<body>
<h1>Test things with window.vhsUtils.* in the console</h1>
<ul>
<li><a href="/test/debug.html">Run unit tests in browser.</a></li>
</ul>
<script src="dist/vhs-utils.js"></script>
<script>
console.log('Test things with window.vhsUtils.*');
</script>
</body>
</html>

View file

@ -1,96 +0,0 @@
const fs = require('fs');
const path = require('path');
const baseDir = path.join(__dirname, '..');
const formatDir = path.join(baseDir, 'test', 'fixtures', 'formats');
const parsingDir = path.join(baseDir, 'test', 'fixtures', 'parsing');
const getFiles = (dir) => (fs.readdirSync(dir) || []).reduce((acc, d) => {
d = path.resolve(dir, d);
const stat = fs.statSync(d);
if (!stat.isDirectory()) {
return acc;
}
const subfiles = fs.readdirSync(d).map((f) => path.resolve(d, f));
return acc.concat(subfiles);
}, []);
const buildDataString = function(files, id) {
const data = {};
files.forEach((file) => {
// read the file directly as a buffer before converting to base64
const base64 = fs.readFileSync(file).toString('base64');
data[path.basename(file)] = base64;
});
const dataExportStrings = Object.keys(data).reduce((acc, key) => {
// use a function since the segment may be cleared out on usage
acc.push(`${id}Files['${key}'] = () => {
cache['${key}'] = cache['${key}'] || base64ToUint8Array('${data[key]}');
const dest = new Uint8Array(cache['${key}'].byteLength);
dest.set(cache['${key}']);
return dest;
};`);
return acc;
}, []);
const file =
'/* istanbul ignore file */\n' +
'\n' +
`import base64ToUint8Array from "${path.resolve(baseDir, 'src/decode-b64-to-uint8-array.js')}";\n` +
'const cache = {};\n' +
`const ${id}Files = {};\n` +
dataExportStrings.join('\n') +
`export default ${id}Files`;
return file;
};
/* we refer to them as .js, so that babel and other plugins can work on them */
const formatsKey = 'create-test-data!formats.js';
const parsingKey = 'create-test-data!parsing.js';
module.exports = function() {
return {
name: 'createTestData',
buildStart() {
this.addWatchFile(formatDir);
this.addWatchFile(parsingDir);
getFiles(formatDir).forEach((file) => this.addWatchFile(file));
getFiles(parsingDir).forEach((file) => this.addWatchFile(file));
},
resolveId(importee, importer) {
// if this is not an id we can resolve return
if (importee.indexOf('create-test-data!') !== 0) {
return;
}
const name = importee.split('!')[1];
if (name.indexOf('formats') !== -1) {
return formatsKey;
}
if (name.indexOf('parsing') !== -1) {
return parsingKey;
}
return null;
},
load(id) {
if (id === formatsKey) {
return buildDataString.call(this, getFiles(formatDir), 'format');
}
if (id === parsingKey) {
return buildDataString.call(this, getFiles(parsingDir), 'parsing');
}
}
};
};

View file

@ -1,16 +0,0 @@
const generate = require('videojs-generate-karma-config');
module.exports = function(config) {
// see https://github.com/videojs/videojs-generate-karma-config
// for options
const options = {
serverBrowsers() {
return [];
}
};
config = generate(config, options);
// any other custom stuff not supported by options here!
};

View file

@ -1,28 +0,0 @@
const createTestData = require('./create-test-data.js');
const generate = require('videojs-generate-rollup-config');
// see https://github.com/videojs/videojs-generate-rollup-config
// for options
const options = {
input: 'src/index.js',
exportName: 'vhsUtils',
distName: 'vhs-utils',
primedPlugins(defaults) {
return Object.assign(defaults, {
createTestData: createTestData()
});
},
plugins(defaults) {
defaults.test.splice(0, 0, 'createTestData');
return defaults;
}
};
const config = generate(options);
if (config.builds.module) {
delete config.builds.module;
}
// Add additonal builds/customization here!
// export the builds to rollup
export default Object.values(config.builds);

View file

@ -1,279 +0,0 @@
import window from 'global/window';
// const log2 = Math.log2 ? Math.log2 : (x) => (Math.log(x) / Math.log(2));
const repeat = function(str, len) {
let acc = '';
while (len--) {
acc += str;
}
return acc;
};
// count the number of bits it would take to represent a number
// we used to do this with log2 but BigInt does not support builtin math
// Math.ceil(log2(x));
export const countBits = (x) => x.toString(2).length;
// count the number of whole bytes it would take to represent a number
export const countBytes = (x) => Math.ceil(countBits(x) / 8);
export const padStart = (b, len, str = ' ') => (repeat(str, len) + b.toString()).slice(-len);
export const isArrayBufferView = (obj) => {
if (ArrayBuffer.isView === 'function') {
return ArrayBuffer.isView(obj);
}
return obj && obj.buffer instanceof ArrayBuffer;
};
export const isTypedArray = (obj) => isArrayBufferView(obj);
export const toUint8 = function(bytes) {
if (bytes instanceof Uint8Array) {
return bytes;
}
if (!Array.isArray(bytes) && !isTypedArray(bytes) && !(bytes instanceof ArrayBuffer)) {
// any non-number or NaN leads to empty uint8array
// eslint-disable-next-line
if (typeof bytes !== 'number' || (typeof bytes === 'number' && bytes !== bytes)) {
bytes = 0;
} else {
bytes = [bytes];
}
}
return new Uint8Array(
bytes && bytes.buffer || bytes,
bytes && bytes.byteOffset || 0,
bytes && bytes.byteLength || 0
);
};
export const toHexString = function(bytes) {
bytes = toUint8(bytes);
let str = '';
for (let i = 0; i < bytes.length; i++) {
str += padStart(bytes[i].toString(16), 2, '0');
}
return str;
};
export const toBinaryString = function(bytes) {
bytes = toUint8(bytes);
let str = '';
for (let i = 0; i < bytes.length; i++) {
str += padStart(bytes[i].toString(2), 8, '0');
}
return str;
};
const BigInt = window.BigInt || Number;
const BYTE_TABLE = [
BigInt('0x1'),
BigInt('0x100'),
BigInt('0x10000'),
BigInt('0x1000000'),
BigInt('0x100000000'),
BigInt('0x10000000000'),
BigInt('0x1000000000000'),
BigInt('0x100000000000000'),
BigInt('0x10000000000000000')
];
export const ENDIANNESS = (function() {
const a = new Uint16Array([0xFFCC]);
const b = new Uint8Array(a.buffer, a.byteOffset, a.byteLength);
if (b[0] === 0xFF) {
return 'big';
}
if (b[0] === 0xCC) {
return 'little';
}
return 'unknown';
})();
export const IS_BIG_ENDIAN = ENDIANNESS === 'big';
export const IS_LITTLE_ENDIAN = ENDIANNESS === 'little';
export const bytesToNumber = function(bytes, {signed = false, le = false} = {}) {
bytes = toUint8(bytes);
const fn = le ? 'reduce' : 'reduceRight';
const obj = bytes[fn] ? bytes[fn] : Array.prototype[fn];
let number = obj.call(bytes, function(total, byte, i) {
const exponent = le ? i : Math.abs(i + 1 - bytes.length);
return total + (BigInt(byte) * BYTE_TABLE[exponent]);
}, BigInt(0));
if (signed) {
const max = BYTE_TABLE[bytes.length] / BigInt(2) - BigInt(1);
number = BigInt(number);
if (number > max) {
number -= max;
number -= max;
number -= BigInt(2);
}
}
return Number(number);
};
export const numberToBytes = function(number, {le = false} = {}) {
// eslint-disable-next-line
if ((typeof number !== 'bigint' && typeof number !== 'number') || (typeof number === 'number' && number !== number)) {
number = 0;
}
number = BigInt(number);
const byteCount = countBytes(number);
const bytes = new Uint8Array(new ArrayBuffer(byteCount));
for (let i = 0; i < byteCount; i++) {
const byteIndex = le ? i : Math.abs(i + 1 - bytes.length);
bytes[byteIndex] = Number((number / BYTE_TABLE[i]) & BigInt(0xFF));
if (number < 0) {
bytes[byteIndex] = Math.abs(~bytes[byteIndex]);
bytes[byteIndex] -= i === 0 ? 1 : 2;
}
}
return bytes;
};
export const bytesToString = (bytes) => {
if (!bytes) {
return '';
}
// TODO: should toUint8 handle cases where we only have 8 bytes
// but report more since this is a Uint16+ Array?
bytes = Array.prototype.slice.call(bytes);
const string = String.fromCharCode.apply(null, toUint8(bytes));
try {
return decodeURIComponent(escape(string));
} catch (e) {
// if decodeURIComponent/escape fails, we are dealing with partial
// or full non string data. Just return the potentially garbled string.
}
return string;
};
export const stringToBytes = (string, stringIsBytes) => {
if (typeof string !== 'string' && string && typeof string.toString === 'function') {
string = string.toString();
}
if (typeof string !== 'string') {
return new Uint8Array();
}
// If the string already is bytes, we don't have to do this
// otherwise we do this so that we split multi length characters
// into individual bytes
if (!stringIsBytes) {
string = unescape(encodeURIComponent(string));
}
const view = new Uint8Array(string.length);
for (let i = 0; i < string.length; i++) {
view[i] = string.charCodeAt(i);
}
return view;
};
export const concatTypedArrays = (...buffers) => {
buffers = buffers.filter((b) => b && (b.byteLength || b.length) && typeof b !== 'string');
if (buffers.length <= 1) {
// for 0 length we will return empty uint8
// for 1 length we return the first uint8
return toUint8(buffers[0]);
}
const totalLen = buffers.reduce((total, buf, i) => total + (buf.byteLength || buf.length), 0);
const tempBuffer = new Uint8Array(totalLen);
let offset = 0;
buffers.forEach(function(buf) {
buf = toUint8(buf);
tempBuffer.set(buf, offset);
offset += buf.byteLength;
});
return tempBuffer;
};
/**
* Check if the bytes "b" are contained within bytes "a".
*
* @param {Uint8Array|Array} a
* Bytes to check in
*
* @param {Uint8Array|Array} b
* Bytes to check for
*
* @param {Object} options
* options
*
* @param {Array|Uint8Array} [offset=0]
* offset to use when looking at bytes in a
*
* @param {Array|Uint8Array} [mask=[]]
* mask to use on bytes before comparison.
*
* @return {boolean}
* If all bytes in b are inside of a, taking into account
* bit masks.
*/
export const bytesMatch = (a, b, {offset = 0, mask = []} = {}) => {
a = toUint8(a);
b = toUint8(b);
// ie 11 does not support uint8 every
const fn = b.every ? b.every : Array.prototype.every;
return b.length &&
a.length - offset >= b.length &&
// ie 11 doesn't support every on uin8
fn.call(b, (bByte, i) => {
const aByte = (mask[i] ? (mask[i] & a[offset + i]) : a[offset + i]);
return bByte === aByte;
});
};
export const sliceBytes = function(src, start, end) {
if (Uint8Array.prototype.slice) {
return Uint8Array.prototype.slice.call(src, start, end);
}
return new Uint8Array(Array.prototype.slice.call(src, start, end));
};
export const reverseBytes = function(src) {
if (src.reverse) {
return src.reverse();
}
return Array.prototype.reverse.call(src);
};

View file

@ -1,106 +0,0 @@
import {padStart, toHexString, toBinaryString} from './byte-helpers.js';
// https://aomediacodec.github.io/av1-isobmff/#av1codecconfigurationbox-syntax
// https://developer.mozilla.org/en-US/docs/Web/Media/Formats/codecs_parameter#AV1
export const getAv1Codec = function(bytes) {
let codec = '';
const profile = bytes[1] >>> 3;
const level = bytes[1] & 0x1F;
const tier = bytes[2] >>> 7;
const highBitDepth = (bytes[2] & 0x40) >> 6;
const twelveBit = (bytes[2] & 0x20) >> 5;
const monochrome = (bytes[2] & 0x10) >> 4;
const chromaSubsamplingX = (bytes[2] & 0x08) >> 3;
const chromaSubsamplingY = (bytes[2] & 0x04) >> 2;
const chromaSamplePosition = bytes[2] & 0x03;
codec += `${profile}.${padStart(level, 2, '0')}`;
if (tier === 0) {
codec += 'M';
} else if (tier === 1) {
codec += 'H';
}
let bitDepth;
if (profile === 2 && highBitDepth) {
bitDepth = twelveBit ? 12 : 10;
} else {
bitDepth = highBitDepth ? 10 : 8;
}
codec += `.${padStart(bitDepth, 2, '0')}`;
// TODO: can we parse color range??
codec += `.${monochrome}`;
codec += `.${chromaSubsamplingX}${chromaSubsamplingY}${chromaSamplePosition}`;
return codec;
};
export const getAvcCodec = function(bytes) {
const profileId = toHexString(bytes[1]);
const constraintFlags = toHexString(bytes[2] & 0xFC);
const levelId = toHexString(bytes[3]);
return `${profileId}${constraintFlags}${levelId}`;
};
export const getHvcCodec = function(bytes) {
let codec = '';
const profileSpace = bytes[1] >> 6;
const profileId = bytes[1] & 0x1F;
const tierFlag = (bytes[1] & 0x20) >> 5;
const profileCompat = bytes.subarray(2, 6);
const constraintIds = bytes.subarray(6, 12);
const levelId = bytes[12];
if (profileSpace === 1) {
codec += 'A';
} else if (profileSpace === 2) {
codec += 'B';
} else if (profileSpace === 3) {
codec += 'C';
}
codec += `${profileId}.`;
// ffmpeg does this in big endian
let profileCompatVal = parseInt(toBinaryString(profileCompat).split('').reverse().join(''), 2);
// apple does this in little endian...
if (profileCompatVal > 255) {
profileCompatVal = parseInt(toBinaryString(profileCompat), 2);
}
codec += `${profileCompatVal.toString(16)}.`;
if (tierFlag === 0) {
codec += 'L';
} else {
codec += 'H';
}
codec += levelId;
let constraints = '';
for (let i = 0; i < constraintIds.length; i++) {
const v = constraintIds[i];
if (v) {
if (constraints) {
constraints += '.';
}
constraints += v.toString(16);
}
}
if (constraints) {
codec += `.${constraints}`;
}
return codec;
};

View file

@ -1,225 +0,0 @@
import window from 'global/window';
const regexs = {
// to determine mime types
mp4: /^(av0?1|avc0?[1234]|vp0?9|flac|opus|mp3|mp4a|mp4v|stpp.ttml.im1t)/,
webm: /^(vp0?[89]|av0?1|opus|vorbis)/,
ogg: /^(vp0?[89]|theora|flac|opus|vorbis)/,
// to determine if a codec is audio or video
video: /^(av0?1|avc0?[1234]|vp0?[89]|hvc1|hev1|theora|mp4v)/,
audio: /^(mp4a|flac|vorbis|opus|ac-[34]|ec-3|alac|mp3|speex|aac)/,
text: /^(stpp.ttml.im1t)/,
// mux.js support regex
muxerVideo: /^(avc0?1)/,
muxerAudio: /^(mp4a)/,
// match nothing as muxer does not support text right now.
// there cannot never be a character before the start of a string
// so this matches nothing.
muxerText: /a^/
};
const mediaTypes = ['video', 'audio', 'text'];
const upperMediaTypes = ['Video', 'Audio', 'Text'];
/**
* Replace the old apple-style `avc1.<dd>.<dd>` codec string with the standard
* `avc1.<hhhhhh>`
*
* @param {string} codec
* Codec string to translate
* @return {string}
* The translated codec string
*/
export const translateLegacyCodec = function(codec) {
if (!codec) {
return codec;
}
return codec.replace(/avc1\.(\d+)\.(\d+)/i, function(orig, profile, avcLevel) {
const profileHex = ('00' + Number(profile).toString(16)).slice(-2);
const avcLevelHex = ('00' + Number(avcLevel).toString(16)).slice(-2);
return 'avc1.' + profileHex + '00' + avcLevelHex;
});
};
/**
* Replace the old apple-style `avc1.<dd>.<dd>` codec strings with the standard
* `avc1.<hhhhhh>`
*
* @param {string[]} codecs
* An array of codec strings to translate
* @return {string[]}
* The translated array of codec strings
*/
export const translateLegacyCodecs = function(codecs) {
return codecs.map(translateLegacyCodec);
};
/**
* Replace codecs in the codec string with the old apple-style `avc1.<dd>.<dd>` to the
* standard `avc1.<hhhhhh>`.
*
* @param {string} codecString
* The codec string
* @return {string}
* The codec string with old apple-style codecs replaced
*
* @private
*/
export const mapLegacyAvcCodecs = function(codecString) {
return codecString.replace(/avc1\.(\d+)\.(\d+)/i, (match) => {
return translateLegacyCodecs([match])[0];
});
};
/**
* @typedef {Object} ParsedCodecInfo
* @property {number} codecCount
* Number of codecs parsed
* @property {string} [videoCodec]
* Parsed video codec (if found)
* @property {string} [videoObjectTypeIndicator]
* Video object type indicator (if found)
* @property {string|null} audioProfile
* Audio profile
*/
/**
* Parses a codec string to retrieve the number of codecs specified, the video codec and
* object type indicator, and the audio profile.
*
* @param {string} [codecString]
* The codec string to parse
* @return {ParsedCodecInfo}
* Parsed codec info
*/
export const parseCodecs = function(codecString = '') {
const codecs = codecString.split(',');
const result = [];
codecs.forEach(function(codec) {
codec = codec.trim();
let codecType;
mediaTypes.forEach(function(name) {
const match = regexs[name].exec(codec.toLowerCase());
if (!match || match.length <= 1) {
return;
}
codecType = name;
// maintain codec case
const type = codec.substring(0, match[1].length);
const details = codec.replace(type, '');
result.push({type, details, mediaType: name});
});
if (!codecType) {
result.push({type: codec, details: '', mediaType: 'unknown'});
}
});
return result;
};
/**
* Returns a ParsedCodecInfo object for the default alternate audio playlist if there is
* a default alternate audio playlist for the provided audio group.
*
* @param {Object} master
* The master playlist
* @param {string} audioGroupId
* ID of the audio group for which to find the default codec info
* @return {ParsedCodecInfo}
* Parsed codec info
*/
export const codecsFromDefault = (master, audioGroupId) => {
if (!master.mediaGroups.AUDIO || !audioGroupId) {
return null;
}
const audioGroup = master.mediaGroups.AUDIO[audioGroupId];
if (!audioGroup) {
return null;
}
for (const name in audioGroup) {
const audioType = audioGroup[name];
if (audioType.default && audioType.playlists) {
// codec should be the same for all playlists within the audio type
return parseCodecs(audioType.playlists[0].attributes.CODECS);
}
}
return null;
};
export const isVideoCodec = (codec = '') => regexs.video.test(codec.trim().toLowerCase());
export const isAudioCodec = (codec = '') => regexs.audio.test(codec.trim().toLowerCase());
export const isTextCodec = (codec = '') => regexs.text.test(codec.trim().toLowerCase());
export const getMimeForCodec = (codecString) => {
if (!codecString || typeof codecString !== 'string') {
return;
}
const codecs = codecString
.toLowerCase()
.split(',')
.map((c) => translateLegacyCodec(c.trim()));
// default to video type
let type = 'video';
// only change to audio type if the only codec we have is
// audio
if (codecs.length === 1 && isAudioCodec(codecs[0])) {
type = 'audio';
} else if (codecs.length === 1 && isTextCodec(codecs[0])) {
// text uses application/<container> for now
type = 'application';
}
// default the container to mp4
let container = 'mp4';
// every codec must be able to go into the container
// for that container to be the correct one
if (codecs.every((c) => regexs.mp4.test(c))) {
container = 'mp4';
} else if (codecs.every((c) => regexs.webm.test(c))) {
container = 'webm';
} else if (codecs.every((c) => regexs.ogg.test(c))) {
container = 'ogg';
}
return `${type}/${container};codecs="${codecString}"`;
};
export const browserSupportsCodec = (codecString = '') => window.MediaSource &&
window.MediaSource.isTypeSupported &&
window.MediaSource.isTypeSupported(getMimeForCodec(codecString)) || false;
export const muxerSupportsCodec = (codecString = '') => codecString.toLowerCase().split(',').every((codec) => {
codec = codec.trim();
// any match is supported.
for (let i = 0; i < upperMediaTypes.length; i++) {
const type = upperMediaTypes[i];
if (regexs[`muxer${type}`].test(codec)) {
return true;
}
}
return false;
});
export const DEFAULT_AUDIO_CODEC = 'mp4a.40.2';
export const DEFAULT_VIDEO_CODEC = 'avc1.4d400d';

View file

@ -1,189 +0,0 @@
import {toUint8, bytesMatch} from './byte-helpers.js';
import {findBox} from './mp4-helpers.js';
import {findEbml, EBML_TAGS} from './ebml-helpers.js';
import {getId3Offset} from './id3-helpers.js';
import {findH264Nal, findH265Nal} from './nal-helpers.js';
const CONSTANTS = {
// "webm" string literal in hex
'webm': toUint8([0x77, 0x65, 0x62, 0x6d]),
// "matroska" string literal in hex
'matroska': toUint8([0x6d, 0x61, 0x74, 0x72, 0x6f, 0x73, 0x6b, 0x61]),
// "fLaC" string literal in hex
'flac': toUint8([0x66, 0x4c, 0x61, 0x43]),
// "OggS" string literal in hex
'ogg': toUint8([0x4f, 0x67, 0x67, 0x53]),
// ac-3 sync byte, also works for ec-3 as that is simply a codec
// of ac-3
'ac3': toUint8([0x0b, 0x77]),
// "RIFF" string literal in hex used for wav and avi
'riff': toUint8([0x52, 0x49, 0x46, 0x46]),
// "AVI" string literal in hex
'avi': toUint8([0x41, 0x56, 0x49]),
// "WAVE" string literal in hex
'wav': toUint8([0x57, 0x41, 0x56, 0x45]),
// "ftyp3g" string literal in hex
'3gp': toUint8([0x66, 0x74, 0x79, 0x70, 0x33, 0x67]),
// "ftyp" string literal in hex
'mp4': toUint8([0x66, 0x74, 0x79, 0x70]),
// "styp" string literal in hex
'fmp4': toUint8([0x73, 0x74, 0x79, 0x70]),
// "ftypqt" string literal in hex
'mov': toUint8([0x66, 0x74, 0x79, 0x70, 0x71, 0x74]),
// moov string literal in hex
'moov': toUint8([0x6D, 0x6F, 0x6F, 0x76]),
// moof string literal in hex
'moof': toUint8([0x6D, 0x6F, 0x6F, 0x66])
};
const _isLikely = {
aac(bytes) {
const offset = getId3Offset(bytes);
return bytesMatch(bytes, [0xFF, 0x10], {offset, mask: [0xFF, 0x16]});
},
mp3(bytes) {
const offset = getId3Offset(bytes);
return bytesMatch(bytes, [0xFF, 0x02], {offset, mask: [0xFF, 0x06]});
},
webm(bytes) {
const docType = findEbml(bytes, [EBML_TAGS.EBML, EBML_TAGS.DocType])[0];
// check if DocType EBML tag is webm
return bytesMatch(docType, CONSTANTS.webm);
},
mkv(bytes) {
const docType = findEbml(bytes, [EBML_TAGS.EBML, EBML_TAGS.DocType])[0];
// check if DocType EBML tag is matroska
return bytesMatch(docType, CONSTANTS.matroska);
},
mp4(bytes) {
// if this file is another base media file format, it is not mp4
if (_isLikely['3gp'](bytes) || _isLikely.mov(bytes)) {
return false;
}
// if this file starts with a ftyp or styp box its mp4
if (bytesMatch(bytes, CONSTANTS.mp4, {offset: 4}) || bytesMatch(bytes, CONSTANTS.fmp4, {offset: 4})) {
return true;
}
// if this file starts with a moof/moov box its mp4
if (bytesMatch(bytes, CONSTANTS.moof, {offset: 4}) || bytesMatch(bytes, CONSTANTS.moov, {offset: 4})) {
return true;
}
},
mov(bytes) {
return bytesMatch(bytes, CONSTANTS.mov, {offset: 4});
},
'3gp'(bytes) {
return bytesMatch(bytes, CONSTANTS['3gp'], {offset: 4});
},
ac3(bytes) {
const offset = getId3Offset(bytes);
return bytesMatch(bytes, CONSTANTS.ac3, {offset});
},
ts(bytes) {
if (bytes.length < 189 && bytes.length >= 1) {
return bytes[0] === 0x47;
}
let i = 0;
// check the first 376 bytes for two matching sync bytes
while (i + 188 < bytes.length && i < 188) {
if (bytes[i] === 0x47 && bytes[i + 188] === 0x47) {
return true;
}
i += 1;
}
return false;
},
flac(bytes) {
const offset = getId3Offset(bytes);
return bytesMatch(bytes, CONSTANTS.flac, {offset});
},
ogg(bytes) {
return bytesMatch(bytes, CONSTANTS.ogg);
},
avi(bytes) {
return bytesMatch(bytes, CONSTANTS.riff) &&
bytesMatch(bytes, CONSTANTS.avi, {offset: 8});
},
wav(bytes) {
return bytesMatch(bytes, CONSTANTS.riff) &&
bytesMatch(bytes, CONSTANTS.wav, {offset: 8});
},
'h264'(bytes) {
// find seq_parameter_set_rbsp
return findH264Nal(bytes, 7, 3).length;
},
'h265'(bytes) {
// find video_parameter_set_rbsp or seq_parameter_set_rbsp
return findH265Nal(bytes, [32, 33], 3).length;
}
};
// get all the isLikely functions
// but make sure 'ts' is above h264 and h265
// but below everything else as it is the least specific
const isLikelyTypes = Object.keys(_isLikely)
// remove ts, h264, h265
.filter((t) => t !== 'ts' && t !== 'h264' && t !== 'h265')
// add it back to the bottom
.concat(['ts', 'h264', 'h265']);
// make sure we are dealing with uint8 data.
isLikelyTypes.forEach(function(type) {
const isLikelyFn = _isLikely[type];
_isLikely[type] = (bytes) => isLikelyFn(toUint8(bytes));
});
// export after wrapping
export const isLikely = _isLikely;
// A useful list of file signatures can be found here
// https://en.wikipedia.org/wiki/List_of_file_signatures
export const detectContainerForBytes = (bytes) => {
bytes = toUint8(bytes);
for (let i = 0; i < isLikelyTypes.length; i++) {
const type = isLikelyTypes[i];
if (isLikely[type](bytes)) {
return type;
}
}
return '';
};
// fmp4 is not a container
export const isLikelyFmp4MediaSegment = (bytes) => {
return findBox(bytes, ['moof']).length > 0;
};

View file

@ -1,13 +0,0 @@
import window from 'global/window';
const atob = (s) => window.atob ? window.atob(s) : Buffer.from(s, 'base64').toString('binary');
export default function decodeB64ToUint8Array(b64Text) {
const decodedString = atob(b64Text);
const array = new Uint8Array(decodedString.length);
for (let i = 0; i < decodedString.length; i++) {
array[i] = decodedString.charCodeAt(i);
}
return array;
}

View file

@ -1,503 +0,0 @@
import {
toUint8,
bytesToNumber,
bytesMatch,
bytesToString,
numberToBytes,
padStart
} from './byte-helpers';
import {getAvcCodec, getHvcCodec, getAv1Codec} from './codec-helpers.js';
// relevant specs for this parser:
// https://matroska-org.github.io/libebml/specs.html
// https://www.matroska.org/technical/elements.html
// https://www.webmproject.org/docs/container/
export const EBML_TAGS = {
EBML: toUint8([0x1A, 0x45, 0xDF, 0xA3]),
DocType: toUint8([0x42, 0x82]),
Segment: toUint8([0x18, 0x53, 0x80, 0x67]),
SegmentInfo: toUint8([0x15, 0x49, 0xA9, 0x66]),
Tracks: toUint8([0x16, 0x54, 0xAE, 0x6B]),
Track: toUint8([0xAE]),
TrackNumber: toUint8([0xd7]),
DefaultDuration: toUint8([0x23, 0xe3, 0x83]),
TrackEntry: toUint8([0xAE]),
TrackType: toUint8([0x83]),
FlagDefault: toUint8([0x88]),
CodecID: toUint8([0x86]),
CodecPrivate: toUint8([0x63, 0xA2]),
VideoTrack: toUint8([0xe0]),
AudioTrack: toUint8([0xe1]),
// Not used yet, but will be used for live webm/mkv
// see https://www.matroska.org/technical/basics.html#block-structure
// see https://www.matroska.org/technical/basics.html#simpleblock-structure
Cluster: toUint8([0x1F, 0x43, 0xB6, 0x75]),
Timestamp: toUint8([0xE7]),
TimestampScale: toUint8([0x2A, 0xD7, 0xB1]),
BlockGroup: toUint8([0xA0]),
BlockDuration: toUint8([0x9B]),
Block: toUint8([0xA1]),
SimpleBlock: toUint8([0xA3])
};
/**
* This is a simple table to determine the length
* of things in ebml. The length is one based (starts at 1,
* rather than zero) and for every zero bit before a one bit
* we add one to length. We also need this table because in some
* case we have to xor all the length bits from another value.
*/
const LENGTH_TABLE = [
0b10000000,
0b01000000,
0b00100000,
0b00010000,
0b00001000,
0b00000100,
0b00000010,
0b00000001
];
const getLength = function(byte) {
let len = 1;
for (let i = 0; i < LENGTH_TABLE.length; i++) {
if (byte & LENGTH_TABLE[i]) {
break;
}
len++;
}
return len;
};
// length in ebml is stored in the first 4 to 8 bits
// of the first byte. 4 for the id length and 8 for the
// data size length. Length is measured by converting the number to binary
// then 1 + the number of zeros before a 1 is encountered starting
// from the left.
const getvint = function(bytes, offset, removeLength = true, signed = false) {
const length = getLength(bytes[offset]);
let valueBytes = bytes.subarray(offset, offset + length);
// NOTE that we do **not** subarray here because we need to copy these bytes
// as they will be modified below to remove the dataSizeLen bits and we do not
// want to modify the original data. normally we could just call slice on
// uint8array but ie 11 does not support that...
if (removeLength) {
valueBytes = Array.prototype.slice.call(bytes, offset, offset + length);
valueBytes[0] ^= LENGTH_TABLE[length - 1];
}
return {
length,
value: bytesToNumber(valueBytes, {signed}),
bytes: valueBytes
};
};
const normalizePath = function(path) {
if (typeof path === 'string') {
return path.match(/.{1,2}/g).map((p) => normalizePath(p));
}
if (typeof path === 'number') {
return numberToBytes(path);
}
return path;
};
const normalizePaths = function(paths) {
if (!Array.isArray(paths)) {
return [normalizePath(paths)];
}
return paths.map((p) => normalizePath(p));
};
const getInfinityDataSize = (id, bytes, offset) => {
if (offset >= bytes.length) {
return bytes.length;
}
const innerid = getvint(bytes, offset, false);
if (bytesMatch(id.bytes, innerid.bytes)) {
return offset;
}
const dataHeader = getvint(bytes, offset + innerid.length);
return getInfinityDataSize(id, bytes, offset + dataHeader.length + dataHeader.value + innerid.length);
};
/**
* Notes on the EBLM format.
*
* EBLM uses "vints" tags. Every vint tag contains
* two parts
*
* 1. The length from the first byte. You get this by
* converting the byte to binary and counting the zeros
* before a 1. Then you add 1 to that. Examples
* 00011111 = length 4 because there are 3 zeros before a 1.
* 00100000 = length 3 because there are 2 zeros before a 1.
* 00000011 = length 7 because there are 6 zeros before a 1.
*
* 2. The bits used for length are removed from the first byte
* Then all the bytes are merged into a value. NOTE: this
* is not the case for id ebml tags as there id includes
* length bits.
*
*/
export const findEbml = function(bytes, paths) {
paths = normalizePaths(paths);
bytes = toUint8(bytes);
let results = [];
if (!paths.length) {
return results;
}
let i = 0;
while (i < bytes.length) {
const id = getvint(bytes, i, false);
const dataHeader = getvint(bytes, i + id.length);
const dataStart = i + id.length + dataHeader.length;
// dataSize is unknown or this is a live stream
if (dataHeader.value === 0x7f) {
dataHeader.value = getInfinityDataSize(id, bytes, dataStart);
if (dataHeader.value !== bytes.length) {
dataHeader.value -= dataStart;
}
}
const dataEnd = (dataStart + dataHeader.value) > bytes.length ? bytes.length : (dataStart + dataHeader.value);
const data = bytes.subarray(dataStart, dataEnd);
if (bytesMatch(paths[0], id.bytes)) {
if (paths.length === 1) {
// this is the end of the paths and we've found the tag we were
// looking for
results.push(data);
} else {
// recursively search for the next tag inside of the data
// of this one
results = results.concat(findEbml(data, paths.slice(1)));
}
}
const totalLength = id.length + dataHeader.length + data.length;
// move past this tag entirely, we are not looking for it
i += totalLength;
}
return results;
};
// see https://www.matroska.org/technical/basics.html#block-structure
export const decodeBlock = function(block, type, timestampScale, clusterTimestamp) {
let duration;
if (type === 'group') {
duration = findEbml(block, [EBML_TAGS.BlockDuration])[0];
if (duration) {
duration = bytesToNumber(duration);
duration = (((1 / timestampScale) * (duration)) * timestampScale) / 1000;
}
block = findEbml(block, [EBML_TAGS.Block])[0];
type = 'block';
// treat data as a block after this point
}
const dv = new DataView(block.buffer, block.byteOffset, block.byteLength);
const trackNumber = getvint(block, 0);
const timestamp = dv.getInt16(trackNumber.length, false);
const flags = block[trackNumber.length + 2];
const data = block.subarray(trackNumber.length + 3);
// pts/dts in seconds
const ptsdts = (((1 / timestampScale) * (clusterTimestamp + timestamp)) * timestampScale) / 1000;
// return the frame
const parsed = {
duration,
trackNumber: trackNumber.value,
keyframe: type === 'simple' && (flags >> 7) === 1,
invisible: ((flags & 0x08) >> 3) === 1,
lacing: ((flags & 0x06) >> 1),
discardable: type === 'simple' && (flags & 0x01) === 1,
frames: [],
pts: ptsdts,
dts: ptsdts,
timestamp
};
if (!parsed.lacing) {
parsed.frames.push(data);
return parsed;
}
const numberOfFrames = data[0] + 1;
const frameSizes = [];
let offset = 1;
// Fixed
if (parsed.lacing === 2) {
const sizeOfFrame = (data.length - offset) / numberOfFrames;
for (let i = 0; i < numberOfFrames; i++) {
frameSizes.push(sizeOfFrame);
}
}
// xiph
if (parsed.lacing === 1) {
for (let i = 0; i < numberOfFrames - 1; i++) {
let size = 0;
do {
size += data[offset];
offset++;
} while (data[offset - 1] === 0xFF);
frameSizes.push(size);
}
}
// ebml
if (parsed.lacing === 3) {
// first vint is unsinged
// after that vints are singed and
// based on a compounding size
let size = 0;
for (let i = 0; i < numberOfFrames - 1; i++) {
const vint = i === 0 ? getvint(data, offset) : getvint(data, offset, true, true);
size += vint.value;
frameSizes.push(size);
offset += vint.length;
}
}
frameSizes.forEach(function(size) {
parsed.frames.push(data.subarray(offset, offset + size));
offset += size;
});
return parsed;
};
// VP9 Codec Feature Metadata (CodecPrivate)
// https://www.webmproject.org/docs/container/
const parseVp9Private = (bytes) => {
let i = 0;
const params = {};
while (i < bytes.length) {
const id = bytes[i] & 0x7f;
const len = bytes[i + 1];
let val;
if (len === 1) {
val = bytes[i + 2];
} else {
val = bytes.subarray(i + 2, i + 2 + len);
}
if (id === 1) {
params.profile = val;
} else if (id === 2) {
params.level = val;
} else if (id === 3) {
params.bitDepth = val;
} else if (id === 4) {
params.chromaSubsampling = val;
} else {
params[id] = val;
}
i += 2 + len;
}
return params;
};
export const parseTracks = function(bytes) {
bytes = toUint8(bytes);
const decodedTracks = [];
let tracks = findEbml(bytes, [EBML_TAGS.Segment, EBML_TAGS.Tracks, EBML_TAGS.Track]);
if (!tracks.length) {
tracks = findEbml(bytes, [EBML_TAGS.Tracks, EBML_TAGS.Track]);
}
if (!tracks.length) {
tracks = findEbml(bytes, [EBML_TAGS.Track]);
}
if (!tracks.length) {
return decodedTracks;
}
tracks.forEach(function(track) {
let trackType = findEbml(track, EBML_TAGS.TrackType)[0];
if (!trackType || !trackType.length) {
return;
}
// 1 is video, 2 is audio, 17 is subtitle
// other values are unimportant in this context
if (trackType[0] === 1) {
trackType = 'video';
} else if (trackType[0] === 2) {
trackType = 'audio';
} else if (trackType[0] === 17) {
trackType = 'subtitle';
} else {
return;
}
// todo parse language
const decodedTrack = {
rawCodec: bytesToString(findEbml(track, [EBML_TAGS.CodecID])[0]),
type: trackType,
codecPrivate: findEbml(track, [EBML_TAGS.CodecPrivate])[0],
number: bytesToNumber(findEbml(track, [EBML_TAGS.TrackNumber])[0]),
defaultDuration: bytesToNumber(findEbml(track, [EBML_TAGS.DefaultDuration])[0]),
default: findEbml(track, [EBML_TAGS.FlagDefault])[0],
rawData: track
};
let codec = '';
if ((/V_MPEG4\/ISO\/AVC/).test(decodedTrack.rawCodec)) {
codec = `avc1.${getAvcCodec(decodedTrack.codecPrivate)}`;
} else if ((/V_MPEGH\/ISO\/HEVC/).test(decodedTrack.rawCodec)) {
codec = `hev1.${getHvcCodec(decodedTrack.codecPrivate)}`;
} else if ((/V_MPEG4\/ISO\/ASP/).test(decodedTrack.rawCodec)) {
if (decodedTrack.codecPrivate) {
codec = 'mp4v.20.' + decodedTrack.codecPrivate[4].toString();
} else {
codec = 'mp4v.20.9';
}
} else if ((/^V_THEORA/).test(decodedTrack.rawCodec)) {
codec = 'theora';
} else if ((/^V_VP8/).test(decodedTrack.rawCodec)) {
codec = 'vp8';
} else if ((/^V_VP9/).test(decodedTrack.rawCodec)) {
if (decodedTrack.codecPrivate) {
const {profile, level, bitDepth, chromaSubsampling} = parseVp9Private(decodedTrack.codecPrivate);
codec = 'vp09.';
codec += `${padStart(profile, 2, '0')}.`;
codec += `${padStart(level, 2, '0')}.`;
codec += `${padStart(bitDepth, 2, '0')}.`;
codec += `${padStart(chromaSubsampling, 2, '0')}`;
// Video -> Colour -> Ebml name
const matrixCoefficients = findEbml(track, [0xE0, [0x55, 0xB0], [0x55, 0xB1]])[0] || [];
const videoFullRangeFlag = findEbml(track, [0xE0, [0x55, 0xB0], [0x55, 0xB9]])[0] || [];
const transferCharacteristics = findEbml(track, [0xE0, [0x55, 0xB0], [0x55, 0xBA]])[0] || [];
const colourPrimaries = findEbml(track, [0xE0, [0x55, 0xB0], [0x55, 0xBB]])[0] || [];
// if we find any optional codec parameter specify them all.
if (matrixCoefficients.length ||
videoFullRangeFlag.length ||
transferCharacteristics.length ||
colourPrimaries.length) {
codec += `.${padStart(colourPrimaries[0], 2, '0')}`;
codec += `.${padStart(transferCharacteristics[0], 2, '0')}`;
codec += `.${padStart(matrixCoefficients[0], 2, '0')}`;
codec += `.${padStart(videoFullRangeFlag[0], 2, '0')}`;
}
} else {
codec = 'vp9';
}
} else if ((/^V_AV1/).test(decodedTrack.rawCodec)) {
codec = `av01.${getAv1Codec(decodedTrack.codecPrivate)}`;
} else if ((/A_ALAC/).test(decodedTrack.rawCodec)) {
codec = 'alac';
} else if ((/A_MPEG\/L2/).test(decodedTrack.rawCodec)) {
codec = 'mp2';
} else if ((/A_MPEG\/L3/).test(decodedTrack.rawCodec)) {
codec = 'mp3';
} else if ((/^A_AAC/).test(decodedTrack.rawCodec)) {
if (decodedTrack.codecPrivate) {
codec = 'mp4a.40.' + (decodedTrack.codecPrivate[0] >>> 3).toString();
} else {
codec = 'mp4a.40.2';
}
} else if ((/^A_AC3/).test(decodedTrack.rawCodec)) {
codec = 'ac-3';
} else if ((/^A_PCM/).test(decodedTrack.rawCodec)) {
codec = 'pcm';
} else if ((/^A_MS\/ACM/).test(decodedTrack.rawCodec)) {
codec = 'speex';
} else if ((/^A_EAC3/).test(decodedTrack.rawCodec)) {
codec = 'ec-3';
} else if ((/^A_VORBIS/).test(decodedTrack.rawCodec)) {
codec = 'vorbis';
} else if ((/^A_FLAC/).test(decodedTrack.rawCodec)) {
codec = 'flac';
} else if ((/^A_OPUS/).test(decodedTrack.rawCodec)) {
codec = 'opus';
}
decodedTrack.codec = codec;
decodedTracks.push(decodedTrack);
});
return decodedTracks.sort((a, b) => a.number - b.number);
};
export const parseData = function(data, tracks) {
const allBlocks = [];
const segment = findEbml(data, [EBML_TAGS.Segment])[0];
let timestampScale = findEbml(segment, [EBML_TAGS.SegmentInfo, EBML_TAGS.TimestampScale])[0];
// in nanoseconds, defaults to 1ms
if (timestampScale && timestampScale.length) {
timestampScale = bytesToNumber(timestampScale);
} else {
timestampScale = 1000000;
}
const clusters = findEbml(segment, [EBML_TAGS.Cluster]);
if (!tracks) {
tracks = parseTracks(segment);
}
clusters.forEach(function(cluster, ci) {
const simpleBlocks = findEbml(cluster, [EBML_TAGS.SimpleBlock]).map((b) => ({type: 'simple', data: b}));
const blockGroups = findEbml(cluster, [EBML_TAGS.BlockGroup]).map((b) => ({type: 'group', data: b}));
let timestamp = findEbml(cluster, [EBML_TAGS.Timestamp])[0] || 0;
if (timestamp && timestamp.length) {
timestamp = bytesToNumber(timestamp);
}
// get all blocks then sort them into the correct order
const blocks = simpleBlocks
.concat(blockGroups)
.sort((a, b) => a.data.byteOffset - b.data.byteOffset);
blocks.forEach(function(block, bi) {
const decoded = decodeBlock(block.data, block.type, timestampScale, timestamp);
allBlocks.push(decoded);
});
});
return {tracks, blocks: allBlocks};
};

View file

@ -1,338 +0,0 @@
import {bytesToString, toUint8, toHexString, bytesMatch} from './byte-helpers.js';
import {parseTracks as parseEbmlTracks} from './ebml-helpers.js';
import {parseTracks as parseMp4Tracks} from './mp4-helpers.js';
import {findFourCC} from './riff-helpers.js';
import {getPages} from './ogg-helpers.js';
import {detectContainerForBytes} from './containers.js';
import {findH264Nal, findH265Nal} from './nal-helpers.js';
import {parseTs} from './m2ts-helpers.js';
import {getAvcCodec, getHvcCodec} from './codec-helpers.js';
import {getId3Offset} from './id3-helpers.js';
// https://docs.microsoft.com/en-us/windows/win32/medfound/audio-subtype-guids
// https://tools.ietf.org/html/rfc2361
const wFormatTagCodec = function(wFormatTag) {
wFormatTag = toUint8(wFormatTag);
if (bytesMatch(wFormatTag, [0x00, 0x55])) {
return 'mp3';
} else if (bytesMatch(wFormatTag, [0x16, 0x00]) || bytesMatch(wFormatTag, [0x00, 0xFF])) {
return 'aac';
} else if (bytesMatch(wFormatTag, [0x70, 0x4f])) {
return 'opus';
} else if (bytesMatch(wFormatTag, [0x6C, 0x61])) {
return 'alac';
} else if (bytesMatch(wFormatTag, [0xF1, 0xAC])) {
return 'flac';
} else if (bytesMatch(wFormatTag, [0x20, 0x00])) {
return 'ac-3';
} else if (bytesMatch(wFormatTag, [0xFF, 0xFE])) {
return 'ec-3';
} else if (bytesMatch(wFormatTag, [0x00, 0x50])) {
return 'mp2';
} else if (bytesMatch(wFormatTag, [0x56, 0x6f])) {
return 'vorbis';
} else if (bytesMatch(wFormatTag, [0xA1, 0x09])) {
return 'speex';
}
return '';
};
const formatMimetype = (name, codecs) => {
const codecString = ['video', 'audio'].reduce((acc, type) => {
if (codecs[type]) {
acc += (acc.length ? ',' : '') + codecs[type];
}
return acc;
}, '');
return `${(codecs.video ? 'video' : 'audio')}/${name}${codecString ? `;codecs="${codecString}"` : ''}`;
};
const parseCodecFrom = {
mov(bytes) {
// mov and mp4 both use a nearly identical box structure.
const retval = parseCodecFrom.mp4(bytes);
if (retval.mimetype) {
retval.mimetype = retval.mimetype.replace('mp4', 'quicktime');
}
return retval;
},
mp4(bytes) {
bytes = toUint8(bytes);
const codecs = {};
const tracks = parseMp4Tracks(bytes);
for (let i = 0; i < tracks.length; i++) {
const track = tracks[i];
if (track.type === 'audio' && !codecs.audio) {
codecs.audio = track.codec;
}
if (track.type === 'video' && !codecs.video) {
codecs.video = track.codec;
}
}
return {codecs, mimetype: formatMimetype('mp4', codecs)};
},
'3gp'(bytes) {
return {codecs: {}, mimetype: 'video/3gpp'};
},
ogg(bytes) {
const pages = getPages(bytes, 0, 4);
const codecs = {};
pages.forEach(function(page) {
if (bytesMatch(page, [0x4F, 0x70, 0x75, 0x73], {offset: 28})) {
codecs.audio = 'opus';
} else if (bytesMatch(page, [0x56, 0x50, 0x38, 0x30], {offset: 29})) {
codecs.video = 'vp8';
} else if (bytesMatch(page, [0x74, 0x68, 0x65, 0x6F, 0x72, 0x61], {offset: 29})) {
codecs.video = 'theora';
} else if (bytesMatch(page, [0x46, 0x4C, 0x41, 0x43], {offset: 29})) {
codecs.audio = 'flac';
} else if (bytesMatch(page, [0x53, 0x70, 0x65, 0x65, 0x78], {offset: 28})) {
codecs.audio = 'speex';
} else if (bytesMatch(page, [0x76, 0x6F, 0x72, 0x62, 0x69, 0x73], {offset: 29})) {
codecs.audio = 'vorbis';
}
});
return {codecs, mimetype: formatMimetype('ogg', codecs)};
},
wav(bytes) {
const format = findFourCC(bytes, ['WAVE', 'fmt'])[0];
const wFormatTag = Array.prototype.slice.call(format, 0, 2).reverse();
let mimetype = 'audio/vnd.wave';
const codecs = {
audio: wFormatTagCodec(wFormatTag)
};
const codecString = wFormatTag.reduce(function(acc, v) {
if (v) {
acc += toHexString(v);
}
return acc;
}, '');
if (codecString) {
mimetype += `;codec=${codecString}`;
}
if (codecString && !codecs.audio) {
codecs.audio = codecString;
}
return {codecs, mimetype};
},
avi(bytes) {
const movi = findFourCC(bytes, ['AVI', 'movi'])[0];
const strls = findFourCC(bytes, ['AVI', 'hdrl', 'strl']);
const codecs = {};
strls.forEach(function(strl) {
const strh = findFourCC(strl, ['strh'])[0];
const strf = findFourCC(strl, ['strf'])[0];
// now parse AVIStreamHeader to get codec and type:
// https://docs.microsoft.com/en-us/previous-versions/windows/desktop/api/avifmt/ns-avifmt-avistreamheader
const type = bytesToString(strh.subarray(0, 4));
let codec;
let codecType;
if (type === 'vids') {
// https://docs.microsoft.com/en-us/windows/win32/api/wingdi/ns-wingdi-bitmapinfoheader
const handler = bytesToString(strh.subarray(4, 8));
const compression = bytesToString(strf.subarray(16, 20));
// look for 00dc (compressed video fourcc code) or 00db (uncompressed video fourcc code)
const videoData = findFourCC(movi, ['00dc'])[0] || findFourCC(movi, ['00db'][0]);
if (handler === 'H264' || compression === 'H264') {
if (videoData && videoData.length) {
codec = parseCodecFrom.h264(videoData).codecs.video;
} else {
codec = 'avc1';
}
} else if (handler === 'HEVC' || compression === 'HEVC') {
if (videoData && videoData.length) {
codec = parseCodecFrom.h265(videoData).codecs.video;
} else {
codec = 'hev1';
}
} else if (handler === 'FMP4' || compression === 'FMP4') {
if (movi.length) {
codec = 'mp4v.20.' + movi[12].toString();
} else {
codec = 'mp4v.20';
}
} else if (handler === 'VP80' || compression === 'VP80') {
codec = 'vp8';
} else if (handler === 'VP90' || compression === 'VP90') {
codec = 'vp9';
} else if (handler === 'AV01' || compression === 'AV01') {
codec = 'av01';
} else if (handler === 'theo' || compression === 'theora') {
codec = 'theora';
} else {
if (videoData && videoData.length) {
const result = detectContainerForBytes(videoData);
if (result === 'h264') {
codec = parseCodecFrom.h264(movi).codecs.video;
}
if (result === 'h265') {
codec = parseCodecFrom.h265(movi).codecs.video;
}
}
if (!codec) {
codec = handler || compression;
}
}
codecType = 'video';
} else if (type === 'auds') {
codecType = 'audio';
// look for 00wb (audio data fourcc)
// const audioData = findFourCC(movi, ['01wb']);
const wFormatTag = Array.prototype.slice.call(strf, 0, 2).reverse();
codecs.audio = wFormatTagCodec(wFormatTag);
} else {
return;
}
if (codec) {
codecs[codecType] = codec;
}
});
return {codecs, mimetype: formatMimetype('avi', codecs)};
},
ts(bytes) {
const result = parseTs(bytes);
const codecs = {};
Object.keys(result.streams).forEach(function(esPid) {
const stream = result.streams[esPid];
if (stream.codec === 'avc1' && stream.packets.length) {
stream.codec = parseCodecFrom.h264(stream.packets[0]).codecs.video;
} else if (stream.codec === 'hev1' && stream.packets.length) {
stream.codec = parseCodecFrom.h265(stream.packets[0]).codecs.video;
}
codecs[stream.type] = stream.codec;
});
return {codecs, mimetype: formatMimetype('mp2t', codecs)};
},
webm(bytes) {
// mkv and webm both use ebml to store code info
const retval = parseCodecFrom.mkv(bytes);
if (retval.mimetype) {
retval.mimetype = retval.mimetype.replace('x-matroska', 'webm');
}
return retval;
},
mkv(bytes) {
const codecs = {};
const tracks = parseEbmlTracks(bytes);
for (let i = 0; i < tracks.length; i++) {
const track = tracks[i];
if (track.type === 'audio' && !codecs.audio) {
codecs.audio = track.codec;
}
if (track.type === 'video' && !codecs.video) {
codecs.video = track.codec;
}
}
return {codecs, mimetype: formatMimetype('x-matroska', codecs)};
},
aac(bytes) {
return {codecs: {audio: 'aac'}, mimetype: 'audio/aac'};
},
ac3(bytes) {
// past id3 and syncword
const offset = getId3Offset(bytes) + 2;
// default to ac-3
let codec = 'ac-3';
if (bytesMatch(bytes, [0xB8, 0xE0], {offset})) {
codec = 'ac-3';
// 0x01, 0x7F
} else if (bytesMatch(bytes, [0x01, 0x7f], {offset})) {
codec = 'ec-3';
}
return {codecs: {audio: codec}, mimetype: 'audio/vnd.dolby.dd-raw'};
},
mp3(bytes) {
return {codecs: {audio: 'mp3'}, mimetype: 'audio/mpeg'};
},
flac(bytes) {
return {codecs: {audio: 'flac'}, mimetype: 'audio/flac'};
},
'h264'(bytes) {
// find seq_parameter_set_rbsp to get encoding settings for codec
const nal = findH264Nal(bytes, 7, 3);
const retval = {codecs: {video: 'avc1'}, mimetype: 'video/h264'};
if (nal.length) {
retval.codecs.video += `.${getAvcCodec(nal)}`;
}
return retval;
},
'h265'(bytes) {
const retval = {codecs: {video: 'hev1'}, mimetype: 'video/h265'};
// find video_parameter_set_rbsp or seq_parameter_set_rbsp
// to get encoding settings for codec
const nal = findH265Nal(bytes, [32, 33], 3);
if (nal.length) {
const type = (nal[0] >> 1) & 0x3F;
// profile_tier_level starts at byte 5 for video_parameter_set_rbsp
// byte 2 for seq_parameter_set_rbsp
retval.codecs.video += `.${getHvcCodec(nal.subarray(type === 32 ? 5 : 2))}`;
}
return retval;
}
};
export const parseFormatForBytes = (bytes) => {
bytes = toUint8(bytes);
const result = {
codecs: {},
container: detectContainerForBytes(bytes),
mimetype: ''
};
const parseCodecFn = parseCodecFrom[result.container];
if (parseCodecFn) {
const parsed = parseCodecFn ? parseCodecFn(bytes) : {};
result.codecs = parsed.codecs || {};
result.mimetype = parsed.mimetype || '';
}
return result;
};

View file

@ -1,35 +0,0 @@
import {toUint8, bytesMatch} from './byte-helpers.js';
const ID3 = toUint8([0x49, 0x44, 0x33]);
export const getId3Size = function(bytes, offset = 0) {
bytes = toUint8(bytes);
const flags = bytes[offset + 5];
const returnSize = (bytes[offset + 6] << 21) |
(bytes[offset + 7] << 14) |
(bytes[offset + 8] << 7) |
(bytes[offset + 9]);
const footerPresent = (flags & 16) >> 4;
if (footerPresent) {
return returnSize + 20;
}
return returnSize + 10;
};
export const getId3Offset = function(bytes, offset = 0) {
bytes = toUint8(bytes);
if ((bytes.length - offset) < 10 || !bytesMatch(bytes, ID3, {offset})) {
return offset;
}
offset += getId3Size(bytes, offset);
// recursive check for id3 tags as some files
// have multiple ID3 tag sections even though
// they should not.
return getId3Offset(bytes, offset);
};

View file

@ -1,17 +0,0 @@
import * as codecs from './codecs';
import * as byteHelpers from './byte-helpers.js';
import * as containers from './containers.js';
import decodeB64ToUint8Array from './decode-b64-to-uint8-array.js';
import * as mediaGroups from './media-groups.js';
import resolveUrl from './resolve-url.js';
import Stream from './stream.js';
export default {
codecs,
byteHelpers,
containers,
decodeB64ToUint8Array,
mediaGroups,
resolveUrl,
Stream
};

View file

@ -1,104 +0,0 @@
import {bytesMatch, toUint8} from './byte-helpers.js';
const SYNC_BYTE = 0x47;
// use of maxPes is deprecated as we should always look at
// all pes packets to prevent being caught off guard by changes
// in that stream that happen after the pes specified
export const parseTs = function(bytes, maxPes = Infinity) {
bytes = toUint8(bytes);
let startIndex = 0;
let endIndex = 188;
const pmt = {};
let pesCount = 0;
while (endIndex < bytes.byteLength && pesCount < maxPes) {
if (bytes[startIndex] !== SYNC_BYTE && bytes[endIndex] !== SYNC_BYTE) {
endIndex += 1;
startIndex += 1;
continue;
}
const packet = bytes.subarray(startIndex, endIndex);
const pid = (((packet[1] & 0x1f) << 8) | packet[2]);
const hasPusi = !!(packet[1] & 0x40);
const hasAdaptationHeader = (((packet[3] & 0x30) >>> 4) > 0x01);
let payloadOffset = 4 + (hasAdaptationHeader ? (packet[4] + 1) : 0);
if (hasPusi) {
payloadOffset += packet[payloadOffset] + 1;
}
if (pid === 0 && !pmt.pid) {
pmt.pid = (packet[payloadOffset + 10] & 0x1f) << 8 | packet[payloadOffset + 11];
} else if (pmt.pid && pid === pmt.pid) {
const isNotForward = packet[payloadOffset + 5] & 0x01;
// ignore forward pmt delarations
if (!isNotForward) {
continue;
}
pmt.streams = pmt.streams || {};
const sectionLength = (packet[payloadOffset + 1] & 0x0f) << 8 | packet[payloadOffset + 2];
const tableEnd = 3 + sectionLength - 4;
const programInfoLength = (packet[payloadOffset + 10] & 0x0f) << 8 | packet[payloadOffset + 11];
let offset = 12 + programInfoLength;
while (offset < tableEnd) {
// add an entry that maps the elementary_pid to the stream_type
const i = payloadOffset + offset;
const type = packet[i];
const esPid = (packet[i + 1] & 0x1F) << 8 | packet[i + 2];
const esLength = ((packet[i + 3] & 0x0f) << 8 | (packet[i + 4]));
const esInfo = packet.subarray(i + 5, i + 5 + esLength);
const stream = pmt.streams[esPid] = {
esInfo,
typeNumber: type,
packets: [],
type: '',
codec: ''
};
if (type === 0x06 && bytesMatch(esInfo, [0x4F, 0x70, 0x75, 0x73], {offset: 2})) {
stream.type = 'audio';
stream.codec = 'opus';
} else if (type === 0x1B || type === 0x20) {
stream.type = 'video';
stream.codec = 'avc1';
} else if (type === 0x24) {
stream.type = 'video';
stream.codec = 'hev1';
} else if (type === 0x10) {
stream.type = 'video';
stream.codec = 'mp4v.20';
} else if (type === 0x0F) {
stream.type = 'audio';
stream.codec = 'aac';
} else if (type === 0x81) {
stream.type = 'audio';
stream.codec = 'ac-3';
} else if (type === 0x87) {
stream.type = 'audio';
stream.codec = 'ec-3';
} else if (type === 0x03 || type === 0x04) {
stream.type = 'audio';
stream.codec = 'mp3';
}
offset += esLength + 5;
}
} else if (pmt.pid && pmt.streams) {
pmt.streams[pid].packets.push(packet.subarray(payloadOffset));
pesCount++;
}
startIndex += 188;
endIndex += 188;
}
if (!pmt.streams) {
pmt.streams = {};
}
return pmt;
};

View file

@ -1,22 +0,0 @@
/**
* Loops through all supported media groups in master and calls the provided
* callback for each group
*
* @param {Object} master
* The parsed master manifest object
* @param {string[]} groups
* The media groups to call the callback for
* @param {Function} callback
* Callback to call for each media group
*/
export const forEachMediaGroup = (master, groups, callback) => {
groups.forEach((mediaType) => {
for (const groupKey in master.mediaGroups[mediaType]) {
for (const labelKey in master.mediaGroups[mediaType][groupKey]) {
const mediaProperties = master.mediaGroups[mediaType][groupKey][labelKey];
callback(mediaProperties, mediaType, groupKey, labelKey);
}
}
});
};

View file

@ -1,36 +0,0 @@
const MPEGURL_REGEX = /^(audio|video|application)\/(x-|vnd\.apple\.)?mpegurl/i;
const DASH_REGEX = /^application\/dash\+xml/i;
/**
* Returns a string that describes the type of source based on a video source object's
* media type.
*
* @see {@link https://dev.w3.org/html5/pf-summary/video.html#dom-source-type|Source Type}
*
* @param {string} type
* Video source object media type
* @return {('hls'|'dash'|'vhs-json'|null)}
* VHS source type string
*/
export const simpleTypeFromSourceType = (type) => {
if (MPEGURL_REGEX.test(type)) {
return 'hls';
}
if (DASH_REGEX.test(type)) {
return 'dash';
}
// Denotes the special case of a manifest object passed to http-streaming instead of a
// source URL.
//
// See https://en.wikipedia.org/wiki/Media_type for details on specifying media types.
//
// In this case, vnd stands for vendor, video.js for the organization, VHS for this
// project, and the +json suffix identifies the structure of the media type.
if (type === 'application/vnd.videojs.vhs+json') {
return 'vhs-json';
}
return null;
};

View file

@ -1,564 +0,0 @@
import {
stringToBytes,
toUint8,
bytesMatch,
bytesToString,
toHexString,
padStart,
bytesToNumber
} from './byte-helpers.js';
import {getAvcCodec, getHvcCodec, getAv1Codec} from './codec-helpers.js';
import {parseOpusHead} from './opus-helpers.js';
const normalizePath = function(path) {
if (typeof path === 'string') {
return stringToBytes(path);
}
if (typeof path === 'number') {
return path;
}
return path;
};
const normalizePaths = function(paths) {
if (!Array.isArray(paths)) {
return [normalizePath(paths)];
}
return paths.map((p) => normalizePath(p));
};
let DESCRIPTORS;
export const parseDescriptors = function(bytes) {
bytes = toUint8(bytes);
const results = [];
let i = 0;
while (bytes.length > i) {
const tag = bytes[i];
let size = 0;
let headerSize = 0;
// tag
headerSize++;
let byte = bytes[headerSize];
// first byte
headerSize++;
while (byte & 0x80) {
size = (byte & 0x7F) << 7;
byte = bytes[headerSize];
headerSize++;
}
size += byte & 0x7F;
for (let z = 0; z < DESCRIPTORS.length; z++) {
const {id, parser} = DESCRIPTORS[z];
if (tag === id) {
results.push(parser(bytes.subarray(headerSize, headerSize + size)));
break;
}
}
i += size + headerSize;
}
return results;
};
DESCRIPTORS = [
{id: 0x03, parser(bytes) {
const desc = {
tag: 0x03,
id: bytes[0] << 8 | bytes[1],
flags: bytes[2],
size: 3,
dependsOnEsId: 0,
ocrEsId: 0,
descriptors: [],
url: ''
};
// depends on es id
if (desc.flags & 0x80) {
desc.dependsOnEsId = bytes[desc.size] << 8 | bytes[desc.size + 1];
desc.size += 2;
}
// url
if (desc.flags & 0x40) {
const len = bytes[desc.size];
desc.url = bytesToString(bytes.subarray(desc.size + 1, desc.size + 1 + len));
desc.size += len;
}
// ocr es id
if (desc.flags & 0x20) {
desc.ocrEsId = bytes[desc.size] << 8 | bytes[desc.size + 1];
desc.size += 2;
}
desc.descriptors = parseDescriptors(bytes.subarray(desc.size)) || [];
return desc;
}},
{id: 0x04, parser(bytes) {
// DecoderConfigDescriptor
const desc = {
tag: 0x04,
oti: bytes[0],
streamType: bytes[1],
bufferSize: bytes[2] << 16 | bytes [3] << 8 | bytes[4],
maxBitrate: bytes[5] << 24 | bytes[6] << 16 | bytes [7] << 8 | bytes[8],
avgBitrate: bytes[9] << 24 | bytes[10] << 16 | bytes [11] << 8 | bytes[12],
descriptors: parseDescriptors(bytes.subarray(13))
};
return desc;
}},
{id: 0x05, parser(bytes) {
// DecoderSpecificInfo
return {tag: 0x05, bytes};
}},
{id: 0x06, parser(bytes) {
// SLConfigDescriptor
return {tag: 0x06, bytes};
}}
];
/**
* find any number of boxes by name given a path to it in an iso bmff
* such as mp4.
*
* @param {TypedArray} bytes
* bytes for the iso bmff to search for boxes in
*
* @param {Uint8Array[]|string[]|string|Uint8Array} name
* An array of paths or a single path representing the name
* of boxes to search through in bytes. Paths may be
* uint8 (character codes) or strings.
*
* @param {boolean} [complete=false]
* Should we search only for complete boxes on the final path.
* This is very useful when you do not want to get back partial boxes
* in the case of streaming files.
*
* @return {Uint8Array[]}
* An array of the end paths that we found.
*/
export const findBox = function(bytes, paths, complete = false) {
paths = normalizePaths(paths);
bytes = toUint8(bytes);
const results = [];
if (!paths.length) {
// short-circuit the search for empty paths
return results;
}
let i = 0;
while (i < bytes.length) {
const size = (bytes[i] << 24 | bytes[i + 1] << 16 | bytes[i + 2] << 8 | bytes[i + 3]) >>> 0;
const type = bytes.subarray(i + 4, i + 8);
// invalid box format.
if (size === 0) {
break;
}
let end = i + size;
if (end > bytes.length) {
// this box is bigger than the number of bytes we have
// and complete is set, we cannot find any more boxes.
if (complete) {
break;
}
end = bytes.length;
}
const data = bytes.subarray(i + 8, end);
if (bytesMatch(type, paths[0])) {
if (paths.length === 1) {
// this is the end of the path and we've found the box we were
// looking for
results.push(data);
} else {
// recursively search for the next box along the path
results.push.apply(results, findBox(data, paths.slice(1), complete));
}
}
i = end;
}
// we've finished searching all of bytes
return results;
};
/**
* Search for a single matching box by name in an iso bmff format like
* mp4. This function is useful for finding codec boxes which
* can be placed arbitrarily in sample descriptions depending
* on the version of the file or file type.
*
* @param {TypedArray} bytes
* bytes for the iso bmff to search for boxes in
*
* @param {string|Uint8Array} name
* The name of the box to find.
*
* @return {Uint8Array[]}
* a subarray of bytes representing the name boxed we found.
*/
export const findNamedBox = function(bytes, name) {
name = normalizePath(name);
if (!name.length) {
// short-circuit the search for empty paths
return bytes.subarray(bytes.length);
}
let i = 0;
while (i < bytes.length) {
if (bytesMatch(bytes.subarray(i, i + name.length), name)) {
const size = (bytes[i - 4] << 24 | bytes[i - 3] << 16 | bytes[i - 2] << 8 | bytes[i - 1]) >>> 0;
const end = size > 1 ? i + size : bytes.byteLength;
return bytes.subarray(i + 4, end);
}
i++;
}
// we've finished searching all of bytes
return bytes.subarray(bytes.length);
};
const parseSamples = function(data, entrySize = 4, parseEntry = (d) => bytesToNumber(d)) {
const entries = [];
if (!data || !data.length) {
return entries;
}
let entryCount = bytesToNumber(data.subarray(4, 8));
for (let i = 8; entryCount; i += entrySize, entryCount--) {
entries.push(parseEntry(data.subarray(i, i + entrySize)));
}
return entries;
};
export const buildFrameTable = function(stbl, timescale) {
const keySamples = parseSamples(findBox(stbl, ['stss'])[0]);
const chunkOffsets = parseSamples(findBox(stbl, ['stco'])[0]);
const timeToSamples = parseSamples(findBox(stbl, ['stts'])[0], 8, (entry) => ({
sampleCount: bytesToNumber(entry.subarray(0, 4)),
sampleDelta: bytesToNumber(entry.subarray(4, 8))
}));
const samplesToChunks = parseSamples(findBox(stbl, ['stsc'])[0], 12, (entry) => ({
firstChunk: bytesToNumber(entry.subarray(0, 4)),
samplesPerChunk: bytesToNumber(entry.subarray(4, 8)),
sampleDescriptionIndex: bytesToNumber(entry.subarray(8, 12))
}));
const stsz = findBox(stbl, ['stsz'])[0];
// stsz starts with a 4 byte sampleSize which we don't need
const sampleSizes = parseSamples(stsz && stsz.length && stsz.subarray(4) || null);
const frames = [];
for (let chunkIndex = 0; chunkIndex < chunkOffsets.length; chunkIndex++) {
let samplesInChunk;
for (let i = 0; i < samplesToChunks.length; i++) {
const sampleToChunk = samplesToChunks[i];
const isThisOne = (chunkIndex + 1) >= sampleToChunk.firstChunk &&
(i + 1 >= samplesToChunks.length || (chunkIndex + 1) < samplesToChunks[i + 1].firstChunk);
if (isThisOne) {
samplesInChunk = sampleToChunk.samplesPerChunk;
break;
}
}
let chunkOffset = chunkOffsets[chunkIndex];
for (let i = 0; i < samplesInChunk; i++) {
const frameEnd = sampleSizes[frames.length];
// if we don't have key samples every frame is a keyframe
let keyframe = !keySamples.length;
if (keySamples.length && keySamples.indexOf(frames.length + 1) !== -1) {
keyframe = true;
}
const frame = {
keyframe,
start: chunkOffset,
end: chunkOffset + frameEnd
};
for (let k = 0; k < timeToSamples.length; k++) {
const {sampleCount, sampleDelta} = timeToSamples[k];
if ((frames.length) <= sampleCount) {
// ms to ns
const lastTimestamp = frames.length ? frames[frames.length - 1].timestamp : 0;
frame.timestamp = lastTimestamp + ((sampleDelta / timescale) * 1000);
frame.duration = sampleDelta;
break;
}
}
frames.push(frame);
chunkOffset += frameEnd;
}
}
return frames;
};
export const addSampleDescription = function(track, bytes) {
let codec = bytesToString(bytes.subarray(0, 4));
if (track.type === 'video') {
track.info = track.info || {};
track.info.width = bytes[28] << 8 | bytes[29];
track.info.height = bytes[30] << 8 | bytes[31];
} else if (track.type === 'audio') {
track.info = track.info || {};
track.info.channels = bytes[20] << 8 | bytes[21];
track.info.bitDepth = bytes[22] << 8 | bytes[23];
track.info.sampleRate = bytes[28] << 8 | bytes[29];
}
if (codec === 'avc1') {
const avcC = findNamedBox(bytes, 'avcC');
// AVCDecoderConfigurationRecord
codec += `.${getAvcCodec(avcC)}`;
track.info.avcC = avcC;
// TODO: do we need to parse all this?
/* {
configurationVersion: avcC[0],
profile: avcC[1],
profileCompatibility: avcC[2],
level: avcC[3],
lengthSizeMinusOne: avcC[4] & 0x3
};
let spsNalUnitCount = avcC[5] & 0x1F;
const spsNalUnits = track.info.avc.spsNalUnits = [];
// past spsNalUnitCount
let offset = 6;
while (spsNalUnitCount--) {
const nalLen = avcC[offset] << 8 | avcC[offset + 1];
spsNalUnits.push(avcC.subarray(offset + 2, offset + 2 + nalLen));
offset += nalLen + 2;
}
let ppsNalUnitCount = avcC[offset];
const ppsNalUnits = track.info.avc.ppsNalUnits = [];
// past ppsNalUnitCount
offset += 1;
while (ppsNalUnitCount--) {
const nalLen = avcC[offset] << 8 | avcC[offset + 1];
ppsNalUnits.push(avcC.subarray(offset + 2, offset + 2 + nalLen));
offset += nalLen + 2;
}*/
// HEVCDecoderConfigurationRecord
} else if (codec === 'hvc1' || codec === 'hev1') {
codec += `.${getHvcCodec(findNamedBox(bytes, 'hvcC'))}`;
} else if (codec === 'mp4a' || codec === 'mp4v') {
const esds = findNamedBox(bytes, 'esds');
const esDescriptor = parseDescriptors(esds.subarray(4))[0];
const decoderConfig = esDescriptor && esDescriptor.descriptors.filter(({tag}) => tag === 0x04)[0];
if (decoderConfig) {
// most codecs do not have a further '.'
// such as 0xa5 for ac-3 and 0xa6 for e-ac-3
codec += '.' + toHexString(decoderConfig.oti);
if (decoderConfig.oti === 0x40) {
codec += '.' + (decoderConfig.descriptors[0].bytes[0] >> 3).toString();
} else if (decoderConfig.oti === 0x20) {
codec += '.' + (decoderConfig.descriptors[0].bytes[4]).toString();
} else if (decoderConfig.oti === 0xdd) {
codec = 'vorbis';
}
} else if (track.type === 'audio') {
codec += '.40.2';
} else {
codec += '.20.9';
}
} else if (codec === 'av01') {
// AV1DecoderConfigurationRecord
codec += `.${getAv1Codec(findNamedBox(bytes, 'av1C'))}`;
} else if (codec === 'vp09') {
// VPCodecConfigurationRecord
const vpcC = findNamedBox(bytes, 'vpcC');
// https://www.webmproject.org/vp9/mp4/
const profile = vpcC[0];
const level = vpcC[1];
const bitDepth = vpcC[2] >> 4;
const chromaSubsampling = (vpcC[2] & 0x0F) >> 1;
const videoFullRangeFlag = (vpcC[2] & 0x0F) >> 3;
const colourPrimaries = vpcC[3];
const transferCharacteristics = vpcC[4];
const matrixCoefficients = vpcC[5];
codec += `.${padStart(profile, 2, '0')}`;
codec += `.${padStart(level, 2, '0')}`;
codec += `.${padStart(bitDepth, 2, '0')}`;
codec += `.${padStart(chromaSubsampling, 2, '0')}`;
codec += `.${padStart(colourPrimaries, 2, '0')}`;
codec += `.${padStart(transferCharacteristics, 2, '0')}`;
codec += `.${padStart(matrixCoefficients, 2, '0')}`;
codec += `.${padStart(videoFullRangeFlag, 2, '0')}`;
} else if (codec === 'theo') {
codec = 'theora';
} else if (codec === 'spex') {
codec = 'speex';
} else if (codec === '.mp3') {
codec = 'mp4a.40.34';
} else if (codec === 'msVo') {
codec = 'vorbis';
} else if (codec === 'Opus') {
codec = 'opus';
const dOps = findNamedBox(bytes, 'dOps');
track.info.opus = parseOpusHead(dOps);
// TODO: should this go into the webm code??
// Firefox requires a codecDelay for opus playback
// see https://bugzilla.mozilla.org/show_bug.cgi?id=1276238
track.info.codecDelay = 6500000;
} else {
codec = codec.toLowerCase();
}
/* eslint-enable */
// flac, ac-3, ec-3, opus
track.codec = codec;
};
export const parseTracks = function(bytes, frameTable = true) {
bytes = toUint8(bytes);
const traks = findBox(bytes, ['moov', 'trak'], true);
const tracks = [];
traks.forEach(function(trak) {
const track = {bytes: trak};
const mdia = findBox(trak, ['mdia'])[0];
const hdlr = findBox(mdia, ['hdlr'])[0];
const trakType = bytesToString(hdlr.subarray(8, 12));
if (trakType === 'soun') {
track.type = 'audio';
} else if (trakType === 'vide') {
track.type = 'video';
} else {
track.type = trakType;
}
const tkhd = findBox(trak, ['tkhd'])[0];
if (tkhd) {
const view = new DataView(tkhd.buffer, tkhd.byteOffset, tkhd.byteLength);
const tkhdVersion = view.getUint8(0);
track.number = (tkhdVersion === 0) ? view.getUint32(12) : view.getUint32(20);
}
const mdhd = findBox(mdia, ['mdhd'])[0];
if (mdhd) {
// mdhd is a FullBox, meaning it will have its own version as the first byte
const version = mdhd[0];
const index = version === 0 ? 12 : 20;
track.timescale = (
mdhd[index] << 24 |
mdhd[index + 1] << 16 |
mdhd[index + 2] << 8 |
mdhd[index + 3]
) >>> 0;
}
const stbl = findBox(mdia, ['minf', 'stbl'])[0];
const stsd = findBox(stbl, ['stsd'])[0];
let descriptionCount = bytesToNumber(stsd.subarray(4, 8));
let offset = 8;
// add codec and codec info
while (descriptionCount--) {
const len = bytesToNumber(stsd.subarray(offset, offset + 4));
const sampleDescriptor = stsd.subarray(offset + 4, offset + 4 + len);
addSampleDescription(track, sampleDescriptor);
offset += 4 + len;
}
if (frameTable) {
track.frameTable = buildFrameTable(stbl, track.timescale);
}
// codec has no sub parameters
tracks.push(track);
});
return tracks;
};
export const parseMediaInfo = function(bytes) {
const mvhd = findBox(bytes, ['moov', 'mvhd'], true)[0];
if (!mvhd || !mvhd.length) {
return;
}
const info = {};
// ms to ns
// mvhd v1 has 8 byte duration and other fields too
if (mvhd[0] === 1) {
info.timestampScale = bytesToNumber(mvhd.subarray(20, 24));
info.duration = bytesToNumber(mvhd.subarray(24, 32));
} else {
info.timestampScale = bytesToNumber(mvhd.subarray(12, 16));
info.duration = bytesToNumber(mvhd.subarray(16, 20));
}
info.bytes = mvhd;
return info;
};

View file

@ -1,109 +0,0 @@
import {bytesMatch, toUint8} from './byte-helpers.js';
export const NAL_TYPE_ONE = toUint8([0x00, 0x00, 0x00, 0x01]);
export const NAL_TYPE_TWO = toUint8([0x00, 0x00, 0x01]);
export const EMULATION_PREVENTION = toUint8([0x00, 0x00, 0x03]);
/**
* Expunge any "Emulation Prevention" bytes from a "Raw Byte
* Sequence Payload"
*
* @param data {Uint8Array} the bytes of a RBSP from a NAL
* unit
* @return {Uint8Array} the RBSP without any Emulation
* Prevention Bytes
*/
export const discardEmulationPreventionBytes = function(bytes) {
const positions = [];
let i = 1;
// Find all `Emulation Prevention Bytes`
while (i < bytes.length - 2) {
if (bytesMatch(bytes.subarray(i, i + 3), EMULATION_PREVENTION)) {
positions.push(i + 2);
i++;
}
i++;
}
// If no Emulation Prevention Bytes were found just return the original
// array
if (positions.length === 0) {
return bytes;
}
// Create a new array to hold the NAL unit data
const newLength = bytes.length - positions.length;
const newData = new Uint8Array(newLength);
let sourceIndex = 0;
for (i = 0; i < newLength; sourceIndex++, i++) {
if (sourceIndex === positions[0]) {
// Skip this byte
sourceIndex++;
// Remove this position index
positions.shift();
}
newData[i] = bytes[sourceIndex];
}
return newData;
};
export const findNal = function(bytes, dataType, types, nalLimit = Infinity) {
bytes = toUint8(bytes);
types = [].concat(types);
let i = 0;
let nalStart;
let nalsFound = 0;
// keep searching until:
// we reach the end of bytes
// we reach the maximum number of nals they want to seach
// NOTE: that we disregard nalLimit when we have found the start
// of the nal we want so that we can find the end of the nal we want.
while (i < bytes.length && (nalsFound < nalLimit || nalStart)) {
let nalOffset;
if (bytesMatch(bytes.subarray(i), NAL_TYPE_ONE)) {
nalOffset = 4;
} else if (bytesMatch(bytes.subarray(i), NAL_TYPE_TWO)) {
nalOffset = 3;
}
// we are unsynced,
// find the next nal unit
if (!nalOffset) {
i++;
continue;
}
nalsFound++;
if (nalStart) {
return discardEmulationPreventionBytes(bytes.subarray(nalStart, i));
}
let nalType;
if (dataType === 'h264') {
nalType = (bytes[i + nalOffset] & 0x1f);
} else if (dataType === 'h265') {
nalType = (bytes[i + nalOffset] >> 1) & 0x3f;
}
if (types.indexOf(nalType) !== -1) {
nalStart = i + nalOffset;
}
// nal header is 1 length for h264, and 2 for h265
i += nalOffset + (dataType === 'h264' ? 1 : 2);
}
return bytes.subarray(0, 0);
};
export const findH264Nal = (bytes, type, nalLimit) => findNal(bytes, 'h264', type, nalLimit);
export const findH265Nal = (bytes, type, nalLimit) => findNal(bytes, 'h265', type, nalLimit);

View file

@ -1,27 +0,0 @@
import {bytesMatch, toUint8} from './byte-helpers';
const SYNC_WORD = toUint8([0x4f, 0x67, 0x67, 0x53]);
export const getPages = function(bytes, start, end = Infinity) {
bytes = toUint8(bytes);
const pages = [];
let i = 0;
while (i < bytes.length && pages.length < end) {
// we are unsynced,
// find the next syncword
if (!bytesMatch(bytes, SYNC_WORD, {offset: i})) {
i++;
continue;
}
const segmentLength = bytes[i + 27];
pages.push(bytes.subarray(i, i + 28 + segmentLength));
i += pages[pages.length - 1].length;
}
return pages.slice(start, end);
};

View file

@ -1,61 +0,0 @@
export const OPUS_HEAD = new Uint8Array([
// O, p, u, s
0x4f, 0x70, 0x75, 0x73,
// H, e, a, d
0x48, 0x65, 0x61, 0x64
]);
// https://wiki.xiph.org/OggOpus
// https://vfrmaniac.fushizen.eu/contents/opus_in_isobmff.html
// https://opus-codec.org/docs/opusfile_api-0.7/structOpusHead.html
export const parseOpusHead = function(bytes) {
const view = new DataView(bytes.buffer, bytes.byteOffset, bytes.byteLength);
const version = view.getUint8(0);
// version 0, from mp4, does not use littleEndian.
const littleEndian = version !== 0;
const config = {
version,
channels: view.getUint8(1),
preSkip: view.getUint16(2, littleEndian),
sampleRate: view.getUint32(4, littleEndian),
outputGain: view.getUint16(8, littleEndian),
channelMappingFamily: view.getUint8(10)
};
if (config.channelMappingFamily > 0 && bytes.length > 10) {
config.streamCount = view.getUint8(11);
config.twoChannelStreamCount = view.getUint8(12);
config.channelMapping = [];
for (let c = 0; c < config.channels; c++) {
config.channelMapping.push(view.getUint8(13 + c));
}
}
return config;
};
export const setOpusHead = function(config) {
const size = config.channelMappingFamily <= 0 ? 11 : (12 + config.channels);
const view = new DataView(new ArrayBuffer(size));
const littleEndian = config.version !== 0;
view.setUint8(0, config.version);
view.setUint8(1, config.channels);
view.setUint16(2, config.preSkip, littleEndian);
view.setUint32(4, config.sampleRate, littleEndian);
view.setUint16(8, config.outputGain, littleEndian);
view.setUint8(10, config.channelMappingFamily);
if (config.channelMappingFamily > 0) {
view.setUint8(11, config.streamCount);
config.channelMapping.foreach(function(cm, i) {
view.setUint8(12 + i, cm);
});
}
return new Uint8Array(view.buffer);
};

View file

@ -1,51 +0,0 @@
import URLToolkit from 'url-toolkit';
import window from 'global/window';
const DEFAULT_LOCATION = 'http://example.com';
const resolveUrl = (baseUrl, relativeUrl) => {
// return early if we don't need to resolve
if ((/^[a-z]+:/i).test(relativeUrl)) {
return relativeUrl;
}
// if baseUrl is a data URI, ignore it and resolve everything relative to window.location
if ((/^data:/).test(baseUrl)) {
baseUrl = window.location && window.location.href || '';
}
// IE11 supports URL but not the URL constructor
// feature detect the behavior we want
const nativeURL = typeof window.URL === 'function';
const protocolLess = (/^\/\//.test(baseUrl));
// remove location if window.location isn't available (i.e. we're in node)
// and if baseUrl isn't an absolute url
const removeLocation = !window.location && !(/\/\//i).test(baseUrl);
// if the base URL is relative then combine with the current location
if (nativeURL) {
baseUrl = new window.URL(baseUrl, window.location || DEFAULT_LOCATION);
} else if (!(/\/\//i).test(baseUrl)) {
baseUrl = URLToolkit.buildAbsoluteURL(window.location && window.location.href || '', baseUrl);
}
if (nativeURL) {
const newUrl = new URL(relativeUrl, baseUrl);
// if we're a protocol-less url, remove the protocol
// and if we're location-less, remove the location
// otherwise, return the url unmodified
if (removeLocation) {
return newUrl.href.slice(DEFAULT_LOCATION.length);
} else if (protocolLess) {
return newUrl.href.slice(newUrl.protocol.length);
}
return newUrl.href;
}
return URLToolkit.buildAbsoluteURL(baseUrl, relativeUrl);
};
export default resolveUrl;

View file

@ -1,75 +0,0 @@
import {toUint8, stringToBytes, bytesMatch} from './byte-helpers.js';
const CONSTANTS = {
LIST: toUint8([0x4c, 0x49, 0x53, 0x54]),
RIFF: toUint8([0x52, 0x49, 0x46, 0x46]),
WAVE: toUint8([0x57, 0x41, 0x56, 0x45])
};
const normalizePath = function(path) {
if (typeof path === 'string') {
return stringToBytes(path);
}
if (typeof path === 'number') {
return path;
}
return path;
};
const normalizePaths = function(paths) {
if (!Array.isArray(paths)) {
return [normalizePath(paths)];
}
return paths.map((p) => normalizePath(p));
};
export const findFourCC = function(bytes, paths) {
paths = normalizePaths(paths);
bytes = toUint8(bytes);
let results = [];
if (!paths.length) {
// short-circuit the search for empty paths
return results;
}
let i = 0;
while (i < bytes.length) {
let type = bytes.subarray(i, i + 4);
let size = ((bytes[i + 7] << 24 | bytes[i + 6] << 16 | bytes[i + 5] << 8 | bytes[i + 4]) >>> 0);
// skip LIST/RIFF and get the actual type
if (bytesMatch(type, CONSTANTS.LIST) || bytesMatch(type, CONSTANTS.RIFF) || bytesMatch(type, CONSTANTS.WAVE)) {
type = bytes.subarray(i + 8, i + 12);
i += 4;
size -= 4;
}
const data = bytes.subarray(i + 8, i + 8 + size);
if (bytesMatch(type, paths[0])) {
if (paths.length === 1) {
// this is the end of the path and we've found the box we were
// looking for
results.push(data);
} else {
// recursively search for the next box along the path
const subresults = findFourCC(data, paths.slice(1));
if (subresults.length) {
results = results.concat(subresults);
}
}
}
i += 8 + data.length;
}
// we've finished searching all of bytes
return results;
};

View file

@ -1,108 +0,0 @@
/**
* @file stream.js
*/
/**
* A lightweight readable stream implemention that handles event dispatching.
*
* @class Stream
*/
export default class Stream {
constructor() {
this.listeners = {};
}
/**
* Add a listener for a specified event type.
*
* @param {string} type the event name
* @param {Function} listener the callback to be invoked when an event of
* the specified type occurs
*/
on(type, listener) {
if (!this.listeners[type]) {
this.listeners[type] = [];
}
this.listeners[type].push(listener);
}
/**
* Remove a listener for a specified event type.
*
* @param {string} type the event name
* @param {Function} listener a function previously registered for this
* type of event through `on`
* @return {boolean} if we could turn it off or not
*/
off(type, listener) {
if (!this.listeners[type]) {
return false;
}
const index = this.listeners[type].indexOf(listener);
// TODO: which is better?
// In Video.js we slice listener functions
// on trigger so that it does not mess up the order
// while we loop through.
//
// Here we slice on off so that the loop in trigger
// can continue using it's old reference to loop without
// messing up the order.
this.listeners[type] = this.listeners[type].slice(0);
this.listeners[type].splice(index, 1);
return index > -1;
}
/**
* Trigger an event of the specified type on this stream. Any additional
* arguments to this function are passed as parameters to event listeners.
*
* @param {string} type the event name
*/
trigger(type) {
const callbacks = this.listeners[type];
if (!callbacks) {
return;
}
// Slicing the arguments on every invocation of this method
// can add a significant amount of overhead. Avoid the
// intermediate object creation for the common case of a
// single callback argument
if (arguments.length === 2) {
const length = callbacks.length;
for (let i = 0; i < length; ++i) {
callbacks[i].call(this, arguments[1]);
}
} else {
const args = Array.prototype.slice.call(arguments, 1);
const length = callbacks.length;
for (let i = 0; i < length; ++i) {
callbacks[i].apply(this, args);
}
}
}
/**
* Destroys the stream and cleans up.
*/
dispose() {
this.listeners = {};
}
/**
* Forwards all `data` events on this stream to the destination stream. The
* destination stream should provide a method `push` to receive the data
* events as they arrive.
*
* @param {Stream} destination the stream that will receive all `data` events
* @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
*/
pipe(destination) {
this.on('data', function(data) {
destination.push(data);
});
}
}

View file

@ -1,93 +0,0 @@
{
"name": "aes-decrypter",
"version": "4.0.1",
"description": "decrypt aes-128 content using a key",
"main": "dist/aes-decrypter.cjs.js",
"module": "dist/aes-decrypter.es.js",
"contributors": [
"gkatsev",
"imbcmdth",
"dmlap",
"bcasey"
],
"scripts": {
"build-test": "cross-env-shell TEST_BUNDLE_ONLY=1 'npm run build'",
"build-prod": "cross-env-shell NO_TEST_BUNDLE=1 'npm run build'",
"build": "npm-run-all -s clean -p build:*",
"build:js": "rollup -c scripts/rollup.config.js",
"clean": "shx rm -rf ./dist ./test/dist && shx mkdir -p ./dist ./test/dist",
"lint": "vjsstandard",
"prepublishOnly": "npm-run-all build-prod && vjsverify --verbose --skip-es-check",
"start": "npm-run-all -p server watch",
"server": "karma start scripts/karma.conf.js --singleRun=false --auto-watch",
"test": "npm-run-all lint build-test && karma start scripts/karma.conf.js",
"posttest": "shx cat test/dist/coverage/text.txt",
"preversion": "npm test",
"version": "is-prerelease || npm run update-changelog && git add CHANGELOG.md",
"update-changelog": "conventional-changelog -p videojs -i CHANGELOG.md -s",
"watch": "npm-run-all -p watch:*",
"watch:js": "npm run build:js -- -w"
},
"author": "Brightcove, Inc.",
"license": "Apache-2.0",
"vjsstandard": {
"ignore": [
"dist",
"docs",
"test/dist"
]
},
"files": [
"CONTRIBUTING.md",
"dist/",
"docs/",
"index.html",
"scripts/",
"src/",
"test/"
],
"dependencies": {
"@babel/runtime": "^7.12.5",
"@videojs/vhs-utils": "^3.0.5",
"global": "^4.4.0",
"pkcs7": "^1.0.4"
},
"devDependencies": {
"@rollup/plugin-replace": "^2.3.4",
"@videojs/generator-helpers": "~2.0.1",
"karma": "^5.2.3",
"rollup": "^2.38.0",
"sinon": "^9.2.3",
"videojs-generate-karma-config": "^8.0.1",
"videojs-generate-rollup-config": "~7.0.0",
"videojs-generator-verify": "~3.0.1",
"videojs-standard": "^8.0.4"
},
"generator-videojs-plugin": {
"version": "7.7.3"
},
"directories": {
"test": "test"
},
"repository": {
"type": "git",
"url": "git+https://github.com/videojs/aes-decrypter.git"
},
"bugs": {
"url": "https://github.com/videojs/aes-decrypter/issues"
},
"homepage": "https://github.com/videojs/aes-decrypter#readme",
"keywords": [
"videojs",
"videojs-plugin"
],
"husky": {
"hooks": {
"pre-commit": "lint-staged"
}
},
"lint-staged": {
"*.js": "vjsstandard --fix",
"README.md": "doctoc --notitle"
}
}

View file

@ -1,36 +0,0 @@
const generate = require('videojs-generate-rollup-config');
const replace = require('@rollup/plugin-replace');
// see https://github.com/videojs/videojs-generate-rollup-config
// for options
const options = {
input: 'src/index.js',
externals(defaults) {
defaults.module.push('pkcs7');
defaults.module.push('@videojs/vhs-utils');
return defaults;
},
primedPlugins(defaults) {
// when using "require" rather than import
// require cjs module
defaults.replace = replace({
// single quote replace
"require('@videojs/vhs-utils/es": "require('@videojs/vhs-utils/cjs",
// double quote replace
'require("@videojs/vhs-utils/es': 'require("@videojs/vhs-utils/cjs'
});
return defaults;
},
plugins(defaults) {
defaults.module.unshift('replace');
return defaults;
}
};
const config = generate(options);
// Add additonal builds/customization here!
// export the builds to rollup
export default Object.values(config.builds);

View file

@ -1,258 +0,0 @@
/**
* @file aes.js
*
* This file contains an adaptation of the AES decryption algorithm
* from the Standford Javascript Cryptography Library. That work is
* covered by the following copyright and permissions notice:
*
* Copyright 2009-2010 Emily Stark, Mike Hamburg, Dan Boneh.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* The views and conclusions contained in the software and documentation
* are those of the authors and should not be interpreted as representing
* official policies, either expressed or implied, of the authors.
*/
/**
* Expand the S-box tables.
*
* @private
*/
const precompute = function() {
const tables = [[[], [], [], [], []], [[], [], [], [], []]];
const encTable = tables[0];
const decTable = tables[1];
const sbox = encTable[4];
const sboxInv = decTable[4];
let i;
let x;
let xInv;
const d = [];
const th = [];
let x2;
let x4;
let x8;
let s;
let tEnc;
let tDec;
// Compute double and third tables
for (i = 0; i < 256; i++) {
th[(d[i] = i << 1 ^ (i >> 7) * 283) ^ i] = i;
}
for (x = xInv = 0; !sbox[x]; x ^= x2 || 1, xInv = th[xInv] || 1) {
// Compute sbox
s = xInv ^ xInv << 1 ^ xInv << 2 ^ xInv << 3 ^ xInv << 4;
s = s >> 8 ^ s & 255 ^ 99;
sbox[x] = s;
sboxInv[s] = x;
// Compute MixColumns
x8 = d[x4 = d[x2 = d[x]]];
tDec = x8 * 0x1010101 ^ x4 * 0x10001 ^ x2 * 0x101 ^ x * 0x1010100;
tEnc = d[s] * 0x101 ^ s * 0x1010100;
for (i = 0; i < 4; i++) {
encTable[i][x] = tEnc = tEnc << 24 ^ tEnc >>> 8;
decTable[i][s] = tDec = tDec << 24 ^ tDec >>> 8;
}
}
// Compactify. Considerable speedup on Firefox.
for (i = 0; i < 5; i++) {
encTable[i] = encTable[i].slice(0);
decTable[i] = decTable[i].slice(0);
}
return tables;
};
let aesTables = null;
/**
* Schedule out an AES key for both encryption and decryption. This
* is a low-level class. Use a cipher mode to do bulk encryption.
*
* @class AES
* @param key {Array} The key as an array of 4, 6 or 8 words.
*/
export default class AES {
constructor(key) {
/**
* The expanded S-box and inverse S-box tables. These will be computed
* on the client so that we don't have to send them down the wire.
*
* There are two tables, _tables[0] is for encryption and
* _tables[1] is for decryption.
*
* The first 4 sub-tables are the expanded S-box with MixColumns. The
* last (_tables[01][4]) is the S-box itself.
*
* @private
*/
// if we have yet to precompute the S-box tables
// do so now
if (!aesTables) {
aesTables = precompute();
}
// then make a copy of that object for use
this._tables = [[aesTables[0][0].slice(),
aesTables[0][1].slice(),
aesTables[0][2].slice(),
aesTables[0][3].slice(),
aesTables[0][4].slice()],
[aesTables[1][0].slice(),
aesTables[1][1].slice(),
aesTables[1][2].slice(),
aesTables[1][3].slice(),
aesTables[1][4].slice()]];
let i;
let j;
let tmp;
const sbox = this._tables[0][4];
const decTable = this._tables[1];
const keyLen = key.length;
let rcon = 1;
if (keyLen !== 4 && keyLen !== 6 && keyLen !== 8) {
throw new Error('Invalid aes key size');
}
const encKey = key.slice(0);
const decKey = [];
this._key = [encKey, decKey];
// schedule encryption keys
for (i = keyLen; i < 4 * keyLen + 28; i++) {
tmp = encKey[i - 1];
// apply sbox
if (i % keyLen === 0 || (keyLen === 8 && i % keyLen === 4)) {
tmp = sbox[tmp >>> 24] << 24 ^
sbox[tmp >> 16 & 255] << 16 ^
sbox[tmp >> 8 & 255] << 8 ^
sbox[tmp & 255];
// shift rows and add rcon
if (i % keyLen === 0) {
tmp = tmp << 8 ^ tmp >>> 24 ^ rcon << 24;
rcon = rcon << 1 ^ (rcon >> 7) * 283;
}
}
encKey[i] = encKey[i - keyLen] ^ tmp;
}
// schedule decryption keys
for (j = 0; i; j++, i--) {
tmp = encKey[j & 3 ? i : i - 4];
if (i <= 4 || j < 4) {
decKey[j] = tmp;
} else {
decKey[j] = decTable[0][sbox[tmp >>> 24 ]] ^
decTable[1][sbox[tmp >> 16 & 255]] ^
decTable[2][sbox[tmp >> 8 & 255]] ^
decTable[3][sbox[tmp & 255]];
}
}
}
/**
* Decrypt 16 bytes, specified as four 32-bit words.
*
* @param {number} encrypted0 the first word to decrypt
* @param {number} encrypted1 the second word to decrypt
* @param {number} encrypted2 the third word to decrypt
* @param {number} encrypted3 the fourth word to decrypt
* @param {Int32Array} out the array to write the decrypted words
* into
* @param {number} offset the offset into the output array to start
* writing results
* @return {Array} The plaintext.
*/
decrypt(encrypted0, encrypted1, encrypted2, encrypted3, out, offset) {
const key = this._key[1];
// state variables a,b,c,d are loaded with pre-whitened data
let a = encrypted0 ^ key[0];
let b = encrypted3 ^ key[1];
let c = encrypted2 ^ key[2];
let d = encrypted1 ^ key[3];
let a2;
let b2;
let c2;
// key.length === 2 ?
const nInnerRounds = key.length / 4 - 2;
let i;
let kIndex = 4;
const table = this._tables[1];
// load up the tables
const table0 = table[0];
const table1 = table[1];
const table2 = table[2];
const table3 = table[3];
const sbox = table[4];
// Inner rounds. Cribbed from OpenSSL.
for (i = 0; i < nInnerRounds; i++) {
a2 = table0[a >>> 24] ^
table1[b >> 16 & 255] ^
table2[c >> 8 & 255] ^
table3[d & 255] ^
key[kIndex];
b2 = table0[b >>> 24] ^
table1[c >> 16 & 255] ^
table2[d >> 8 & 255] ^
table3[a & 255] ^
key[kIndex + 1];
c2 = table0[c >>> 24] ^
table1[d >> 16 & 255] ^
table2[a >> 8 & 255] ^
table3[b & 255] ^
key[kIndex + 2];
d = table0[d >>> 24] ^
table1[a >> 16 & 255] ^
table2[b >> 8 & 255] ^
table3[c & 255] ^
key[kIndex + 3];
kIndex += 4;
a = a2; b = b2; c = c2;
}
// Last round.
for (i = 0; i < 4; i++) {
out[(3 & -i) + offset] =
sbox[a >>> 24] << 24 ^
sbox[b >> 16 & 255] << 16 ^
sbox[c >> 8 & 255] << 8 ^
sbox[d & 255] ^
key[kIndex++];
a2 = a; a = b; b = c; c = d; d = a2;
}
}
}

View file

@ -1,53 +0,0 @@
/**
* @file async-stream.js
*/
import Stream from '@videojs/vhs-utils/es/stream.js';
/**
* A wrapper around the Stream class to use setTimeout
* and run stream "jobs" Asynchronously
*
* @class AsyncStream
* @extends Stream
*/
export default class AsyncStream extends Stream {
constructor() {
super(Stream);
this.jobs = [];
this.delay = 1;
this.timeout_ = null;
}
/**
* process an async job
*
* @private
*/
processJob_() {
this.jobs.shift()();
if (this.jobs.length) {
this.timeout_ = setTimeout(
this.processJob_.bind(this),
this.delay
);
} else {
this.timeout_ = null;
}
}
/**
* push a job into the stream
*
* @param {Function} job the job to push into the stream
*/
push(job) {
this.jobs.push(job);
if (!this.timeout_) {
this.timeout_ = setTimeout(
this.processJob_.bind(this),
this.delay
);
}
}
}

View file

@ -1,179 +0,0 @@
/**
* @file decrypter.js
*
* An asynchronous implementation of AES-128 CBC decryption with
* PKCS#7 padding.
*/
import AES from './aes';
import AsyncStream from './async-stream';
import {unpad} from 'pkcs7';
/**
* Convert network-order (big-endian) bytes into their little-endian
* representation.
*/
const ntoh = function(word) {
return (word << 24) |
((word & 0xff00) << 8) |
((word & 0xff0000) >> 8) |
(word >>> 24);
};
/**
* Decrypt bytes using AES-128 with CBC and PKCS#7 padding.
*
* @param {Uint8Array} encrypted the encrypted bytes
* @param {Uint32Array} key the bytes of the decryption key
* @param {Uint32Array} initVector the initialization vector (IV) to
* use for the first round of CBC.
* @return {Uint8Array} the decrypted bytes
*
* @see http://en.wikipedia.org/wiki/Advanced_Encryption_Standard
* @see http://en.wikipedia.org/wiki/Block_cipher_mode_of_operation#Cipher_Block_Chaining_.28CBC.29
* @see https://tools.ietf.org/html/rfc2315
*/
const decrypt = function(encrypted, key, initVector) {
// word-level access to the encrypted bytes
const encrypted32 = new Int32Array(
encrypted.buffer,
encrypted.byteOffset,
encrypted.byteLength >> 2
);
const decipher = new AES(Array.prototype.slice.call(key));
// byte and word-level access for the decrypted output
const decrypted = new Uint8Array(encrypted.byteLength);
const decrypted32 = new Int32Array(decrypted.buffer);
// temporary variables for working with the IV, encrypted, and
// decrypted data
let init0;
let init1;
let init2;
let init3;
let encrypted0;
let encrypted1;
let encrypted2;
let encrypted3;
// iteration variable
let wordIx;
// pull out the words of the IV to ensure we don't modify the
// passed-in reference and easier access
init0 = initVector[0];
init1 = initVector[1];
init2 = initVector[2];
init3 = initVector[3];
// decrypt four word sequences, applying cipher-block chaining (CBC)
// to each decrypted block
for (wordIx = 0; wordIx < encrypted32.length; wordIx += 4) {
// convert big-endian (network order) words into little-endian
// (javascript order)
encrypted0 = ntoh(encrypted32[wordIx]);
encrypted1 = ntoh(encrypted32[wordIx + 1]);
encrypted2 = ntoh(encrypted32[wordIx + 2]);
encrypted3 = ntoh(encrypted32[wordIx + 3]);
// decrypt the block
decipher.decrypt(
encrypted0,
encrypted1,
encrypted2,
encrypted3,
decrypted32,
wordIx
);
// XOR with the IV, and restore network byte-order to obtain the
// plaintext
decrypted32[wordIx] = ntoh(decrypted32[wordIx] ^ init0);
decrypted32[wordIx + 1] = ntoh(decrypted32[wordIx + 1] ^ init1);
decrypted32[wordIx + 2] = ntoh(decrypted32[wordIx + 2] ^ init2);
decrypted32[wordIx + 3] = ntoh(decrypted32[wordIx + 3] ^ init3);
// setup the IV for the next round
init0 = encrypted0;
init1 = encrypted1;
init2 = encrypted2;
init3 = encrypted3;
}
return decrypted;
};
/**
* The `Decrypter` class that manages decryption of AES
* data through `AsyncStream` objects and the `decrypt`
* function
*
* @param {Uint8Array} encrypted the encrypted bytes
* @param {Uint32Array} key the bytes of the decryption key
* @param {Uint32Array} initVector the initialization vector (IV) to
* @param {Function} done the function to run when done
* @class Decrypter
*/
class Decrypter {
constructor(encrypted, key, initVector, done) {
const step = Decrypter.STEP;
const encrypted32 = new Int32Array(encrypted.buffer);
const decrypted = new Uint8Array(encrypted.byteLength);
let i = 0;
this.asyncStream_ = new AsyncStream();
// split up the encryption job and do the individual chunks asynchronously
this.asyncStream_.push(this.decryptChunk_(
encrypted32.subarray(i, i + step),
key,
initVector,
decrypted
));
for (i = step; i < encrypted32.length; i += step) {
initVector = new Uint32Array([ntoh(encrypted32[i - 4]),
ntoh(encrypted32[i - 3]),
ntoh(encrypted32[i - 2]),
ntoh(encrypted32[i - 1])]);
this.asyncStream_.push(this.decryptChunk_(
encrypted32.subarray(i, i + step),
key,
initVector,
decrypted
));
}
// invoke the done() callback when everything is finished
this.asyncStream_.push(function() {
// remove pkcs#7 padding from the decrypted bytes
done(null, unpad(decrypted));
});
}
/**
* a getter for step the maximum number of bytes to process at one time
*
* @return {number} the value of step 32000
*/
static get STEP() {
// 4 * 8000;
return 32000;
}
/**
* @private
*/
decryptChunk_(encrypted, key, initVector, decrypted) {
return function() {
const bytes = decrypt(encrypted, key, initVector);
decrypted.set(bytes, encrypted.byteOffset);
};
}
}
export {
Decrypter,
decrypt
};

View file

@ -1,18 +0,0 @@
/**
* @file index.js
*
* Index module to easily import the primary components of AES-128
* decryption. Like this:
*
* ```js
* import {Decrypter, decrypt, AsyncStream} from 'aes-decrypter';
* ```
*/
import {decrypt, Decrypter} from './decrypter';
import AsyncStream from './async-stream';
export {
decrypt,
Decrypter,
AsyncStream
};

View file

@ -1,6 +1,6 @@
{
"name": "@videojs/http-streaming",
"version": "3.13.3",
"version": "3.14.2",
"description": "Play back HLS and DASH with Video.js, even where it's not natively supported",
"main": "dist/videojs-http-streaming.cjs.js",
"module": "dist/videojs-http-streaming.es.js",
@ -58,10 +58,10 @@
],
"dependencies": {
"@babel/runtime": "^7.12.5",
"@videojs/vhs-utils": "4.0.0",
"aes-decrypter": "4.0.1",
"@videojs/vhs-utils": "^4.1.1",
"aes-decrypter": "^4.0.2",
"global": "^4.4.0",
"m3u8-parser": "^7.1.0",
"m3u8-parser": "^7.2.0",
"mpd-parser": "^1.3.0",
"mux.js": "7.0.3",
"video.js": "^7 || ^8"

View file

@ -471,6 +471,7 @@
'network-info',
'dts-offset',
'override-native',
'use-mms',
'preload',
'mirror-source',
'forced-subtitles'
@ -521,6 +522,7 @@
'llhls',
'buffer-water',
'override-native',
'use-mms',
'liveui',
'pixel-diff-selector',
'network-info',
@ -587,6 +589,7 @@
var videoEl = document.createElement('video-js');
videoEl.setAttribute('controls', '');
videoEl.setAttribute('playsInline', '');
videoEl.setAttribute('preload', stateEls.preload.options[stateEls.preload.selectedIndex].value || 'auto');
videoEl.className = 'vjs-default-skin';
fixture.appendChild(videoEl);
@ -602,6 +605,7 @@
html5: {
vhs: {
overrideNative: getInputValue(stateEls['override-native']),
experimentalUseMMS: getInputValue(stateEls['use-mms']),
bufferBasedABR: getInputValue(stateEls['buffer-water']),
llhls: getInputValue(stateEls.llhls),
exactManifestTimings: getInputValue(stateEls['exact-manifest-timings']),
@ -612,7 +616,6 @@
}
}
});
setupPlayerStats(player);
setupSegmentMetadata(player);
setupContentSteeringData(player);

View file

@ -145,7 +145,7 @@ export const parseMainXml = ({
*/
const removeOldMediaGroupLabels = (update, newMain) => {
forEachMediaGroup(update, (properties, type, group, label) => {
if (!(label in newMain.mediaGroups[type][group])) {
if (!newMain.mediaGroups[type][group] || !(label in newMain.mediaGroups[type][group])) {
delete update.mediaGroups[type][group][label];
}
});

View file

@ -165,7 +165,8 @@ export class PlaylistController extends videojs.EventTarget {
cacheEncryptionKeys,
bufferBasedABR,
leastPixelDiffSelector,
captionServices
captionServices,
experimentalUseMMS
} = options;
if (!src) {
@ -210,7 +211,14 @@ export class PlaylistController extends videojs.EventTarget {
this.mediaTypes_ = createMediaTypes();
this.mediaSource = new window.MediaSource();
if (experimentalUseMMS && window.ManagedMediaSource) {
// Airplay source not yet implemented. Remote playback must be disabled.
this.tech_.el_.disableRemotePlayback = true;
this.mediaSource = new window.ManagedMediaSource();
videojs.log('Using ManagedMediaSource');
} else if (window.MediaSource) {
this.mediaSource = new window.MediaSource();
}
this.handleDurationChange_ = this.handleDurationChange_.bind(this);
this.handleSourceOpen_ = this.handleSourceOpen_.bind(this);
@ -929,27 +937,25 @@ export class PlaylistController extends videojs.EventTarget {
this.onEndOfStream();
});
// In DASH, there is the possibility of the video segment and the audio segment
// There is the possibility of the video segment and the audio segment
// at a current time to be on different timelines. When this occurs, the player
// forwards playback to a point where these two segment types are back on the same
// timeline. This time will be just after the end of the audio segment that is on
// a previous timeline.
if (this.sourceType_ === 'dash') {
this.timelineChangeController_.on('audioTimelineBehind', () => {
const segmentInfo = this.audioSegmentLoader_.pendingSegment_;
this.timelineChangeController_.on('audioTimelineBehind', () => {
const segmentInfo = this.audioSegmentLoader_.pendingSegment_;
if (!segmentInfo || !segmentInfo.segment || !segmentInfo.segment.syncInfo) {
return;
}
if (!segmentInfo || !segmentInfo.segment || !segmentInfo.segment.syncInfo) {
return;
}
// Update the current time to just after the faulty audio segment.
// This moves playback to a spot where both audio and video segments
// are on the same timeline.
const newTime = segmentInfo.segment.syncInfo.end + 0.01;
// Update the current time to just after the faulty audio segment.
// This moves playback to a spot where both audio and video segments
// are on the same timeline.
const newTime = segmentInfo.segment.syncInfo.end + 0.01;
this.tech_.setCurrentTime(newTime);
});
}
this.tech_.setCurrentTime(newTime);
});
this.mainSegmentLoader_.on('earlyabort', (event) => {
// never try to early abort with the new ABR algorithm

View file

@ -475,8 +475,7 @@ const checkAndFixTimelines = (segmentLoader) => {
});
if (waitingForTimelineChange && shouldFixBadTimelineChanges(segmentLoader.timelineChangeController_)) {
// Audio being behind should only happen on DASH sources.
if (segmentLoader.sourceType_ === 'dash' && isAudioTimelineBehind(segmentLoader)) {
if (isAudioTimelineBehind(segmentLoader)) {
segmentLoader.timelineChangeController_.trigger('audioTimelineBehind');
return;
}

View file

@ -1369,6 +1369,11 @@ const VhsSourceHandler = {
canHandleSource(srcObj, options = {}) {
const localOptions = merge(videojs.options, options);
// If not opting to experimentalUseMMS, and playback is only supported with MediaSource, cannot handle source
if (!localOptions.vhs.experimentalUseMMS && !browserSupportsCodec('avc1.4d400d,mp4a.40.2', false)) {
return false;
}
return VhsSourceHandler.canPlayType(srcObj.type, localOptions);
},
handleSource(source, tech, options = {}) {
@ -1403,13 +1408,14 @@ const VhsSourceHandler = {
};
/**
* Check to see if the native MediaSource object exists and supports
* an MP4 container with both H.264 video and AAC-LC audio.
* Check to see if either the native MediaSource or ManagedMediaSource
* objectx exist and support an MP4 container with both H.264 video
* and AAC-LC audio.
*
* @return {boolean} if native media sources are supported
*/
const supportsNativeMediaSources = () => {
return browserSupportsCodec('avc1.4d400d,mp4a.40.2');
return browserSupportsCodec('avc1.4d400d,mp4a.40.2', true);
};
// register source handlers with the appropriate techs

View file

@ -494,9 +494,7 @@ export default class VTTSegmentLoader extends SegmentLoader {
segmentInfo.cues.forEach((cue) => {
const duration = cue.endTime - cue.startTime;
const startTime = MPEGTS === 0 ?
cue.startTime + diff :
this.handleRollover_(cue.startTime + diff, mappingObj.time);
const startTime = this.handleRollover_(cue.startTime + diff, mappingObj.time);
cue.startTime = Math.max(startTime, 0);
cue.endTime = Math.max(startTime + duration, 0);

View file

@ -1,3 +1,17 @@
<a name="4.1.1"></a>
## [4.1.1](https://github.com/videojs/vhs-utils/compare/v4.1.0...v4.1.1) (2024-07-10)
### Code Refactoring
* remove url-toolkit dependency ([#38](https://github.com/videojs/vhs-utils/issues/38)) ([e124470](https://github.com/videojs/vhs-utils/commit/e124470))
<a name="4.1.0"></a>
# [4.1.0](https://github.com/videojs/vhs-utils/compare/v4.0.0...v4.1.0) (2023-11-28)
### Features
* Check for ManagedMediaSource support ([#37](https://github.com/videojs/vhs-utils/issues/37)) ([6700c5f](https://github.com/videojs/vhs-utils/commit/6700c5f))
<a name="4.0.0"></a>
# [4.0.0](https://github.com/videojs/vhs-utils/compare/v3.0.5...v4.0.0) (2022-08-19)

View file

@ -257,15 +257,30 @@ var getMimeForCodec = function getMimeForCodec(codecString) {
return type + "/" + container + ";codecs=\"" + codecString + "\"";
};
/**
* Tests whether the codec is supported by MediaSource. Optionally also tests ManagedMediaSource.
*
* @param {string} codecString
* Codec to test
* @param {boolean} [withMMS]
* Whether to check if ManagedMediaSource supports it
* @return {boolean}
* Codec is supported
*/
exports.getMimeForCodec = getMimeForCodec;
var browserSupportsCodec = function browserSupportsCodec(codecString) {
var browserSupportsCodec = function browserSupportsCodec(codecString, withMMS) {
if (codecString === void 0) {
codecString = '';
}
return _window.default.MediaSource && _window.default.MediaSource.isTypeSupported && _window.default.MediaSource.isTypeSupported(getMimeForCodec(codecString)) || false;
if (withMMS === void 0) {
withMMS = false;
}
return _window.default.MediaSource && _window.default.MediaSource.isTypeSupported && _window.default.MediaSource.isTypeSupported(getMimeForCodec(codecString)) || withMMS && _window.default.ManagedMediaSource && _window.default.ManagedMediaSource.isTypeSupported && _window.default.ManagedMediaSource.isTypeSupported(getMimeForCodec(codecString)) || false;
};
exports.browserSupportsCodec = browserSupportsCodec;

View file

@ -7,11 +7,9 @@ Object.defineProperty(exports, "__esModule", {
});
exports.default = void 0;
var _urlToolkit = _interopRequireDefault(require("url-toolkit"));
var _window = _interopRequireDefault(require("global/window"));
var DEFAULT_LOCATION = 'http://example.com';
var DEFAULT_LOCATION = 'https://example.com';
var resolveUrl = function resolveUrl(baseUrl, relativeUrl) {
// return early if we don't need to resolve
@ -22,37 +20,25 @@ var resolveUrl = function resolveUrl(baseUrl, relativeUrl) {
if (/^data:/.test(baseUrl)) {
baseUrl = _window.default.location && _window.default.location.href || '';
} // IE11 supports URL but not the URL constructor
// feature detect the behavior we want
}
var nativeURL = typeof _window.default.URL === 'function';
var protocolLess = /^\/\//.test(baseUrl); // remove location if window.location isn't available (i.e. we're in node)
// and if baseUrl isn't an absolute url
var removeLocation = !_window.default.location && !/\/\//i.test(baseUrl); // if the base URL is relative then combine with the current location
if (nativeURL) {
baseUrl = new _window.default.URL(baseUrl, _window.default.location || DEFAULT_LOCATION);
} else if (!/\/\//i.test(baseUrl)) {
baseUrl = _urlToolkit.default.buildAbsoluteURL(_window.default.location && _window.default.location.href || '', baseUrl);
baseUrl = new _window.default.URL(baseUrl, _window.default.location || DEFAULT_LOCATION);
var newUrl = new URL(relativeUrl, baseUrl); // if we're a protocol-less url, remove the protocol
// and if we're location-less, remove the location
// otherwise, return the url unmodified
if (removeLocation) {
return newUrl.href.slice(DEFAULT_LOCATION.length);
} else if (protocolLess) {
return newUrl.href.slice(newUrl.protocol.length);
}
if (nativeURL) {
var newUrl = new URL(relativeUrl, baseUrl); // if we're a protocol-less url, remove the protocol
// and if we're location-less, remove the location
// otherwise, return the url unmodified
if (removeLocation) {
return newUrl.href.slice(DEFAULT_LOCATION.length);
} else if (protocolLess) {
return newUrl.href.slice(newUrl.protocol.length);
}
return newUrl.href;
}
return _urlToolkit.default.buildAbsoluteURL(baseUrl, relativeUrl);
return newUrl.href;
};
var _default = resolveUrl;

Some files were not shown because too many files have changed in this diff Show more