mirror of
https://github.com/DanielnetoDotCom/YouPHPTube
synced 2025-10-05 19:42:38 +02:00
add inputmask
This commit is contained in:
parent
ab84f1e730
commit
e07838c6c7
203 changed files with 29712 additions and 5216 deletions
542
node_modules/hls.js/dist/hls.js
generated
vendored
542
node_modules/hls.js/dist/hls.js
generated
vendored
File diff suppressed because it is too large
Load diff
20
node_modules/hls.js/dist/hls.js.d.ts
generated
vendored
20
node_modules/hls.js/dist/hls.js.d.ts
generated
vendored
|
@ -191,8 +191,8 @@ declare class BaseStreamController extends TaskLoop implements NetworkComponentA
|
|||
protected fragmentTracker: FragmentTracker;
|
||||
protected transmuxer: TransmuxerInterface | null;
|
||||
protected _state: string;
|
||||
protected media?: any;
|
||||
protected mediaBuffer?: any;
|
||||
protected media: HTMLMediaElement | null;
|
||||
protected mediaBuffer: Bufferable | null;
|
||||
protected config: HlsConfig;
|
||||
protected bitrateTest: boolean;
|
||||
protected lastCurrentTime: number;
|
||||
|
@ -223,6 +223,7 @@ declare class BaseStreamController extends TaskLoop implements NetworkComponentA
|
|||
protected onMediaSeeking(): void;
|
||||
protected onMediaEnded(): void;
|
||||
onKeyLoaded(event: Events.KEY_LOADED, data: KeyLoadedData): void;
|
||||
protected onLevelSwitching(event: Events.LEVEL_SWITCHING, data: LevelSwitchingData): void;
|
||||
protected onHandlerDestroying(): void;
|
||||
protected onHandlerDestroyed(): void;
|
||||
protected loadKey(frag: Fragment, details: LevelDetails): void;
|
||||
|
@ -232,6 +233,7 @@ declare class BaseStreamController extends TaskLoop implements NetworkComponentA
|
|||
protected _loadInitSegment(frag: Fragment): void;
|
||||
protected fragContextChanged(frag: Fragment | null): boolean;
|
||||
protected fragBufferedComplete(frag: Fragment, part: Part | null): void;
|
||||
protected seekToStartPos(): void;
|
||||
protected _handleFragmentLoadComplete(fragLoadedEndData: PartsLoadedData): void;
|
||||
protected _handleFragmentLoadProgress(frag: FragLoadedData): void;
|
||||
protected _doFragLoad(frag: Fragment, details?: LevelDetails, targetBufferTime?: number | null, progressCallback?: FragmentLoadProgressCallback): Promise<PartsLoadedData | FragLoadedData | null>;
|
||||
|
@ -245,7 +247,7 @@ declare class BaseStreamController extends TaskLoop implements NetworkComponentA
|
|||
} | null;
|
||||
protected bufferFragmentData(data: RemuxedTrack, frag: Fragment, part: Part | null, chunkMeta: ChunkMetadata): void;
|
||||
protected flushBufferGap(frag: Fragment): void;
|
||||
protected getFwdBufferInfo(bufferable: Bufferable, type: PlaylistLevelType): {
|
||||
protected getFwdBufferInfo(bufferable: Bufferable | null, type: PlaylistLevelType): {
|
||||
len: number;
|
||||
start: number;
|
||||
end: number;
|
||||
|
@ -269,7 +271,7 @@ declare class BaseStreamController extends TaskLoop implements NetworkComponentA
|
|||
protected onFragmentOrKeyLoadError(filterType: PlaylistLevelType, data: ErrorData): void;
|
||||
protected afterBufferFlushed(media: Bufferable, bufferType: SourceBufferName, playlistType: PlaylistLevelType): void;
|
||||
protected resetLoadingState(): void;
|
||||
protected resetLiveStartWhenNotLoaded(level: number): boolean;
|
||||
protected resetStartWhenNotLoaded(level: number): void;
|
||||
private updateLevelTiming;
|
||||
protected resetTransmuxer(): void;
|
||||
set state(nextState: string);
|
||||
|
@ -2506,9 +2508,8 @@ declare class StreamController extends BaseStreamController implements NetworkCo
|
|||
swapAudioCodec(): void;
|
||||
/**
|
||||
* Seeks to the set startPosition if not equal to the mediaElement's current time.
|
||||
* @private
|
||||
*/
|
||||
private seekToStartPos;
|
||||
protected seekToStartPos(): void;
|
||||
private _getAudioCodec;
|
||||
private _loadBitrateTestFrag;
|
||||
private _handleTransmuxComplete;
|
||||
|
@ -2591,7 +2592,7 @@ declare class SubtitleStreamController extends BaseStreamController implements N
|
|||
_handleFragmentLoadComplete(fragLoadedData: FragLoadedData): void;
|
||||
doTick(): void;
|
||||
protected loadFragment(frag: Fragment, levelDetails: LevelDetails, targetBufferTime: number): void;
|
||||
get mediaBufferTimeRanges(): TimeRange[];
|
||||
get mediaBufferTimeRanges(): Bufferable;
|
||||
}
|
||||
|
||||
declare class SubtitleTrackController extends BasePlaylistController {
|
||||
|
@ -2796,11 +2797,6 @@ export declare type TimelineControllerConfig = {
|
|||
renderTextTracksNatively: boolean;
|
||||
};
|
||||
|
||||
declare interface TimeRange {
|
||||
start: number;
|
||||
end: number;
|
||||
}
|
||||
|
||||
export declare interface Track {
|
||||
id: 'audio' | 'main';
|
||||
buffer?: SourceBuffer;
|
||||
|
|
2
node_modules/hls.js/dist/hls.js.map
generated
vendored
2
node_modules/hls.js/dist/hls.js.map
generated
vendored
File diff suppressed because one or more lines are too long
484
node_modules/hls.js/dist/hls.light.js
generated
vendored
484
node_modules/hls.js/dist/hls.light.js
generated
vendored
File diff suppressed because it is too large
Load diff
2
node_modules/hls.js/dist/hls.light.js.map
generated
vendored
2
node_modules/hls.js/dist/hls.light.js.map
generated
vendored
File diff suppressed because one or more lines are too long
2
node_modules/hls.js/dist/hls.light.min.js
generated
vendored
2
node_modules/hls.js/dist/hls.light.min.js
generated
vendored
File diff suppressed because one or more lines are too long
2
node_modules/hls.js/dist/hls.light.min.js.map
generated
vendored
2
node_modules/hls.js/dist/hls.light.min.js.map
generated
vendored
File diff suppressed because one or more lines are too long
2
node_modules/hls.js/dist/hls.min.js
generated
vendored
2
node_modules/hls.js/dist/hls.min.js
generated
vendored
File diff suppressed because one or more lines are too long
2
node_modules/hls.js/dist/hls.min.js.map
generated
vendored
2
node_modules/hls.js/dist/hls.min.js.map
generated
vendored
File diff suppressed because one or more lines are too long
2
node_modules/hls.js/package.json
generated
vendored
2
node_modules/hls.js/package.json
generated
vendored
|
@ -1 +1 @@
|
|||
{"name":"hls.js","license":"Apache-2.0","description":"JavaScript HLS client using MediaSourceExtension","homepage":"https://github.com/video-dev/hls.js","authors":"Guillaume du Pontavice <g.du.pontavice@gmail.com>","repository":{"type":"git","url":"https://github.com/video-dev/hls.js"},"bugs":{"url":"https://github.com/video-dev/hls.js/issues"},"main":"./dist/hls.js","types":"./dist/hls.js.d.ts","files":["dist/**/*","src/**/*"],"publishConfig":{"access":"public"},"scripts":{"build":"webpack --progress && npm run build:types","build:ci":"webpack && tsc --build tsconfig-lib.json && api-extractor run","build:debug":"webpack --progress --env debug --env demo","build:watch":"webpack --progress --env debug --env demo --watch","build:types":"tsc --build tsconfig-lib.json && api-extractor run --local","dev":"webpack serve --progress --env debug --env demo --port 8000 --static .","docs":"esdoc","lint":"eslint src/ tests/ --ext .js --ext .ts","lint:fix":"npm run lint -- --fix","lint:quiet":"npm run lint -- --quiet","lint:staged":"lint-staged","prettier":"prettier --write .","prettier:verify":"prettier --check .","pretest":"npm run lint","sanity-check":"npm run lint && npm run prettier:verify && npm run type-check && npm run docs && npm run build:types && npm run build && npm run test:unit","start":"npm run dev","test":"npm run test:unit && npm run test:func","test:unit":"karma start karma.conf.js","test:unit:debug":"DEBUG_UNIT_TESTS=1 karma start karma.conf.js --auto-watch --no-single-run --browsers Chrome","test:unit:watch":"karma start karma.conf.js --auto-watch --no-single-run","test:func":"BABEL_ENV=development mocha --require @babel/register tests/functional/auto/setup.js --timeout 40000 --exit","test:func:light":"BABEL_ENV=development HLSJS_LIGHT=1 mocha --require @babel/register tests/functional/auto/setup.js --timeout 40000 --exit","test:func:sauce":"SAUCE=1 UA=safari OS='OS X 10.15' BABEL_ENV=development mocha --require @babel/register tests/functional/auto/setup.js --timeout 40000 --exit","type-check":"tsc --noEmit","type-check:watch":"npm run type-check -- --watch","prepare":"husky install"},"devDependencies":{"@babel/core":"7.18.10","@babel/helper-module-imports":"7.18.6","@babel/plugin-proposal-class-properties":"7.18.6","@babel/plugin-proposal-object-rest-spread":"7.18.9","@babel/plugin-proposal-optional-chaining":"7.18.9","@babel/plugin-transform-object-assign":"7.18.6","@babel/preset-env":"7.18.10","@babel/preset-typescript":"7.18.6","@babel/register":"7.18.9","@itsjamie/esdoc-cli":"0.5.0","@itsjamie/esdoc-core":"0.5.0","@itsjamie/esdoc-ecmascript-proposal-plugin":"0.5.0","@itsjamie/esdoc-standard-plugin":"0.5.0","@itsjamie/esdoc-typescript-plugin":"0.5.0","@microsoft/api-extractor":"7.28.6","@types/chai":"4.3.3","@types/chart.js":"2.9.37","@types/mocha":"9.1.1","@types/sinon-chai":"3.2.8","@typescript-eslint/eslint-plugin":"5.23.0","@typescript-eslint/parser":"5.23.0","babel-loader":"8.2.5","babel-plugin-transform-remove-console":"6.9.4","chai":"4.3.6","chart.js":"2.9.4","chromedriver":"104.0.0","eslint":"8.21.0","eslint-config-prettier":"8.5.0","eslint-plugin-import":"2.26.0","eslint-plugin-mocha":"10.1.0","eslint-plugin-node":"11.1.0","eslint-plugin-promise":"6.0.0","eventemitter3":"4.0.7","http-server":"14.1.1","husky":"8.0.1","istanbul-instrumenter-loader":"3.0.1","jsonpack":"1.1.5","karma":"6.4.0","karma-chrome-launcher":"3.1.1","karma-coverage-istanbul-reporter":"3.0.3","karma-mocha":"2.0.1","karma-mocha-reporter":"2.2.5","karma-sinon-chai":"2.0.2","karma-sourcemap-loader":"0.3.8","karma-webpack":"4.0.2","lint-staged":"13.0.3","micromatch":"4.0.5","mocha":"10.0.0","netlify-cli":"10.7.1","prettier":"2.7.1","promise-polyfill":"8.2.3","sauce-connect-launcher":"1.3.2","selenium-webdriver":"4.4.0","semver":"7.3.7","sinon":"14.0.0","sinon-chai":"3.7.0","typescript":"4.7.4","url-toolkit":"2.2.5","webpack":"4.46.0","webpack-cli":"4.10.0","webpack-dev-server":"4.9.3","webpack-merge":"5.8.0","webworkify-webpack":"2.1.5"},"version":"1.2.1"}
|
||||
{"name":"hls.js","license":"Apache-2.0","description":"JavaScript HLS client using MediaSourceExtension","homepage":"https://github.com/video-dev/hls.js","authors":"Guillaume du Pontavice <g.du.pontavice@gmail.com>","repository":{"type":"git","url":"https://github.com/video-dev/hls.js"},"bugs":{"url":"https://github.com/video-dev/hls.js/issues"},"main":"./dist/hls.js","types":"./dist/hls.js.d.ts","files":["dist/**/*","src/**/*"],"publishConfig":{"access":"public"},"scripts":{"build":"webpack --progress && npm run build:types","build:ci":"webpack && tsc --build tsconfig-lib.json && api-extractor run","build:debug":"webpack --progress --env debug --env demo","build:watch":"webpack --progress --env debug --env demo --watch","build:types":"tsc --build tsconfig-lib.json && api-extractor run --local","dev":"webpack serve --progress --env debug --env demo --port 8000 --static .","docs":"esdoc","lint":"eslint src/ tests/ --ext .js --ext .ts","lint:fix":"npm run lint -- --fix","lint:quiet":"npm run lint -- --quiet","lint:staged":"lint-staged","prettier":"prettier --write .","prettier:verify":"prettier --check .","pretest":"npm run lint","sanity-check":"npm run lint && npm run prettier:verify && npm run type-check && npm run docs && npm run build:types && npm run build && npm run test:unit","start":"npm run dev","test":"npm run test:unit && npm run test:func","test:unit":"karma start karma.conf.js","test:unit:debug":"DEBUG_UNIT_TESTS=1 karma start karma.conf.js --auto-watch --no-single-run --browsers Chrome","test:unit:watch":"karma start karma.conf.js --auto-watch --no-single-run","test:func":"BABEL_ENV=development mocha --require @babel/register tests/functional/auto/setup.js --timeout 40000 --exit","test:func:light":"BABEL_ENV=development HLSJS_LIGHT=1 mocha --require @babel/register tests/functional/auto/setup.js --timeout 40000 --exit","test:func:sauce":"SAUCE=1 UA=safari OS='OS X 10.15' BABEL_ENV=development mocha --require @babel/register tests/functional/auto/setup.js --timeout 40000 --exit","type-check":"tsc --noEmit","type-check:watch":"npm run type-check -- --watch","prepare":"husky install"},"devDependencies":{"@babel/core":"7.18.13","@babel/helper-module-imports":"7.18.6","@babel/plugin-proposal-class-properties":"7.18.6","@babel/plugin-proposal-object-rest-spread":"7.18.9","@babel/plugin-proposal-optional-chaining":"7.18.9","@babel/plugin-transform-object-assign":"7.18.6","@babel/preset-env":"7.18.10","@babel/preset-typescript":"7.18.6","@babel/register":"7.18.9","@itsjamie/esdoc-cli":"0.5.0","@itsjamie/esdoc-core":"0.5.0","@itsjamie/esdoc-ecmascript-proposal-plugin":"0.5.0","@itsjamie/esdoc-standard-plugin":"0.5.0","@itsjamie/esdoc-typescript-plugin":"0.5.0","@microsoft/api-extractor":"7.29.5","@types/chai":"4.3.3","@types/chart.js":"2.9.37","@types/mocha":"9.1.1","@types/sinon-chai":"3.2.8","@typescript-eslint/eslint-plugin":"5.23.0","@typescript-eslint/parser":"5.23.0","babel-loader":"8.2.5","babel-plugin-transform-remove-console":"6.9.4","chai":"4.3.6","chart.js":"2.9.4","chromedriver":"104.0.0","eslint":"8.23.0","eslint-config-prettier":"8.5.0","eslint-plugin-import":"2.26.0","eslint-plugin-mocha":"10.1.0","eslint-plugin-node":"11.1.0","eslint-plugin-promise":"6.0.1","eventemitter3":"4.0.7","http-server":"14.1.1","husky":"8.0.1","istanbul-instrumenter-loader":"3.0.1","jsonpack":"1.1.5","karma":"6.4.0","karma-chrome-launcher":"3.1.1","karma-coverage-istanbul-reporter":"3.0.3","karma-mocha":"2.0.1","karma-mocha-reporter":"2.2.5","karma-sinon-chai":"2.0.2","karma-sourcemap-loader":"0.3.8","karma-webpack":"4.0.2","lint-staged":"13.0.3","micromatch":"4.0.5","mocha":"10.0.0","netlify-cli":"10.18.0","node-fetch":"^3.2.10","prettier":"2.7.1","promise-polyfill":"8.2.3","sauce-connect-launcher":"1.3.2","selenium-webdriver":"4.4.0","semver":"7.3.7","sinon":"14.0.0","sinon-chai":"3.7.0","typescript":"4.8.2","url-toolkit":"2.2.5","webpack":"4.46.0","webpack-cli":"4.10.0","webpack-dev-server":"4.10.1","webpack-merge":"5.8.0","webworkify-webpack":"2.1.5"},"version":"1.2.3"}
|
10
node_modules/hls.js/src/controller/abr-controller.ts
generated
vendored
10
node_modules/hls.js/src/controller/abr-controller.ts
generated
vendored
|
@ -240,7 +240,6 @@ class AbrController implements ComponentAPI {
|
|||
id: frag.type,
|
||||
};
|
||||
this.onFragBuffered(Events.FRAG_BUFFERED, fragBufferedData);
|
||||
frag.bitrateTest = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -294,15 +293,16 @@ class AbrController implements ComponentAPI {
|
|||
const forcedAutoLevel = this._nextAutoLevel;
|
||||
const bwEstimator = this.bwEstimator;
|
||||
// in case next auto level has been forced, and bw not available or not reliable, return forced value
|
||||
if (
|
||||
forcedAutoLevel !== -1 &&
|
||||
(!bwEstimator || !bwEstimator.canEstimate())
|
||||
) {
|
||||
if (forcedAutoLevel !== -1 && !bwEstimator.canEstimate()) {
|
||||
return forcedAutoLevel;
|
||||
}
|
||||
|
||||
// compute next level using ABR logic
|
||||
let nextABRAutoLevel = this.getNextABRAutoLevel();
|
||||
// use forced auto level when ABR selected level has errored
|
||||
if (forcedAutoLevel !== -1 && this.hls.levels[nextABRAutoLevel].loadError) {
|
||||
return forcedAutoLevel;
|
||||
}
|
||||
// if forced auto level has been defined, use it to cap ABR computed quality level
|
||||
if (forcedAutoLevel !== -1) {
|
||||
nextABRAutoLevel = Math.min(forcedAutoLevel, nextABRAutoLevel);
|
||||
|
|
20
node_modules/hls.js/src/controller/audio-stream-controller.ts
generated
vendored
20
node_modules/hls.js/src/controller/audio-stream-controller.ts
generated
vendored
|
@ -1,6 +1,6 @@
|
|||
import BaseStreamController, { State } from './base-stream-controller';
|
||||
import { Events } from '../events';
|
||||
import { BufferHelper } from '../utils/buffer-helper';
|
||||
import { Bufferable, BufferHelper } from '../utils/buffer-helper';
|
||||
import { FragmentState } from './fragment-tracker';
|
||||
import { Level } from '../types/level';
|
||||
import { PlaylistLevelType } from '../types/loader';
|
||||
|
@ -46,7 +46,7 @@ class AudioStreamController
|
|||
extends BaseStreamController
|
||||
implements NetworkComponentAPI
|
||||
{
|
||||
private videoBuffer: any | null = null;
|
||||
private videoBuffer: Bufferable | null = null;
|
||||
private videoTrackCC: number = -1;
|
||||
private waitingVideoCC: number = -1;
|
||||
private audioSwitch: boolean = false;
|
||||
|
@ -171,6 +171,7 @@ class AudioStreamController
|
|||
// if current time is gt than retryDate, or if media seeking let's switch to IDLE state to retry loading
|
||||
if (!retryDate || now >= retryDate || this.media?.seeking) {
|
||||
this.log('RetryDate reached, switch back to IDLE state');
|
||||
this.resetStartWhenNotLoaded(this.trackId);
|
||||
this.state = State.IDLE;
|
||||
}
|
||||
break;
|
||||
|
@ -289,17 +290,18 @@ class AudioStreamController
|
|||
return;
|
||||
}
|
||||
|
||||
if (this.bufferFlushed) {
|
||||
const bufferable = this.mediaBuffer ? this.mediaBuffer : this.media;
|
||||
if (this.bufferFlushed && bufferable) {
|
||||
this.bufferFlushed = false;
|
||||
this.afterBufferFlushed(
|
||||
this.mediaBuffer ? this.mediaBuffer : this.media,
|
||||
bufferable,
|
||||
ElementaryStreamTypes.AUDIO,
|
||||
PlaylistLevelType.AUDIO
|
||||
);
|
||||
}
|
||||
|
||||
const bufferInfo = this.getFwdBufferInfo(
|
||||
this.mediaBuffer ? this.mediaBuffer : this.media,
|
||||
bufferable,
|
||||
PlaylistLevelType.AUDIO
|
||||
);
|
||||
if (bufferInfo === null) {
|
||||
|
@ -328,7 +330,7 @@ class AudioStreamController
|
|||
const start = fragments[0].start;
|
||||
let targetBufferTime = bufferInfo.end;
|
||||
|
||||
if (audioSwitch) {
|
||||
if (audioSwitch && media) {
|
||||
const pos = this.getLoadPosition();
|
||||
targetBufferTime = pos;
|
||||
// if currentTime (pos) is less than alt audio playlist start time, it means that alt audio is ahead of currentTime
|
||||
|
@ -587,10 +589,10 @@ class AudioStreamController
|
|||
onBufferCreated(event: Events.BUFFER_CREATED, data: BufferCreatedData) {
|
||||
const audioTrack = data.tracks.audio;
|
||||
if (audioTrack) {
|
||||
this.mediaBuffer = audioTrack.buffer;
|
||||
this.mediaBuffer = audioTrack.buffer || null;
|
||||
}
|
||||
if (data.tracks.video) {
|
||||
this.videoBuffer = data.tracks.video.buffer;
|
||||
this.videoBuffer = data.tracks.video.buffer || null;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -694,7 +696,7 @@ class AudioStreamController
|
|||
this.warn(
|
||||
`The loading context changed while buffering fragment ${chunkMeta.sn} of level ${chunkMeta.level}. This chunk will not be buffered.`
|
||||
);
|
||||
this.resetLiveStartWhenNotLoaded(chunkMeta.level);
|
||||
this.resetStartWhenNotLoaded(chunkMeta.level);
|
||||
return;
|
||||
}
|
||||
const {
|
||||
|
|
89
node_modules/hls.js/src/controller/base-stream-controller.ts
generated
vendored
89
node_modules/hls.js/src/controller/base-stream-controller.ts
generated
vendored
|
@ -24,7 +24,10 @@ import FragmentLoader, {
|
|||
LoadError,
|
||||
} from '../loader/fragment-loader';
|
||||
import { LevelDetails } from '../loader/level-details';
|
||||
import {
|
||||
import Decrypter from '../crypt/decrypter';
|
||||
import TimeRanges from '../utils/time-ranges';
|
||||
import { PlaylistLevelType } from '../types/loader';
|
||||
import type {
|
||||
BufferAppendingData,
|
||||
ErrorData,
|
||||
FragLoadedData,
|
||||
|
@ -32,10 +35,8 @@ import {
|
|||
KeyLoadedData,
|
||||
MediaAttachingData,
|
||||
BufferFlushingData,
|
||||
LevelSwitchingData,
|
||||
} from '../types/events';
|
||||
import Decrypter from '../crypt/decrypter';
|
||||
import TimeRanges from '../utils/time-ranges';
|
||||
import { PlaylistLevelType } from '../types/loader';
|
||||
import type { FragmentTracker } from './fragment-tracker';
|
||||
import type { Level } from '../types/level';
|
||||
import type { RemuxedTrack } from '../types/remuxer';
|
||||
|
@ -74,8 +75,8 @@ export default class BaseStreamController
|
|||
protected fragmentTracker: FragmentTracker;
|
||||
protected transmuxer: TransmuxerInterface | null = null;
|
||||
protected _state: string = State.STOPPED;
|
||||
protected media?: any;
|
||||
protected mediaBuffer?: any;
|
||||
protected media: HTMLMediaElement | null = null;
|
||||
protected mediaBuffer: Bufferable | null = null;
|
||||
protected config: HlsConfig;
|
||||
protected bitrateTest: boolean = false;
|
||||
protected lastCurrentTime: number = 0;
|
||||
|
@ -108,6 +109,7 @@ export default class BaseStreamController
|
|||
this.config = hls.config;
|
||||
this.decrypter = new Decrypter(hls as HlsEventEmitter, hls.config);
|
||||
hls.on(Events.KEY_LOADED, this.onKeyLoaded, this);
|
||||
hls.on(Events.LEVEL_SWITCHING, this.onLevelSwitching, this);
|
||||
}
|
||||
|
||||
protected doTick() {
|
||||
|
@ -133,7 +135,7 @@ export default class BaseStreamController
|
|||
this.state = State.STOPPED;
|
||||
}
|
||||
|
||||
protected _streamEnded(bufferInfo, levelDetails: LevelDetails) {
|
||||
protected _streamEnded(bufferInfo, levelDetails: LevelDetails): boolean {
|
||||
const { fragCurrent, fragmentTracker } = this;
|
||||
// we just got done loading the final fragment and there is no other buffered range after ...
|
||||
// rationale is that in case there are any buffered ranges after, it means that there are unbuffered portion in between
|
||||
|
@ -141,6 +143,7 @@ export default class BaseStreamController
|
|||
if (
|
||||
!levelDetails.live &&
|
||||
fragCurrent &&
|
||||
this.media &&
|
||||
// NOTE: Because of the way parts are currently parsed/represented in the playlist, we can end up
|
||||
// in situations where the current fragment is actually greater than levelDetails.endSN. While
|
||||
// this feels like the "wrong place" to account for that, this is a narrower/safer change than
|
||||
|
@ -176,10 +179,10 @@ export default class BaseStreamController
|
|||
data: MediaAttachingData
|
||||
) {
|
||||
const media = (this.media = this.mediaBuffer = data.media);
|
||||
this.onvseeking = this.onMediaSeeking.bind(this);
|
||||
this.onvended = this.onMediaEnded.bind(this);
|
||||
media.addEventListener('seeking', this.onvseeking as EventListener);
|
||||
media.addEventListener('ended', this.onvended as EventListener);
|
||||
this.onvseeking = this.onMediaSeeking.bind(this) as EventListener;
|
||||
this.onvended = this.onMediaEnded.bind(this) as EventListener;
|
||||
media.addEventListener('seeking', this.onvseeking);
|
||||
media.addEventListener('ended', this.onvended);
|
||||
const config = this.config;
|
||||
if (this.levels && config.autoStartLoad && this.state === State.STOPPED) {
|
||||
this.startLoad(config.startPosition);
|
||||
|
@ -194,7 +197,7 @@ export default class BaseStreamController
|
|||
}
|
||||
|
||||
// remove video listeners
|
||||
if (media) {
|
||||
if (media && this.onvseeking && this.onvended) {
|
||||
media.removeEventListener('seeking', this.onvseeking);
|
||||
media.removeEventListener('ended', this.onvended);
|
||||
this.onvseeking = this.onvended = null;
|
||||
|
@ -209,7 +212,7 @@ export default class BaseStreamController
|
|||
const { config, fragCurrent, media, mediaBuffer, state } = this;
|
||||
const currentTime: number = media ? media.currentTime : 0;
|
||||
const bufferInfo = BufferHelper.bufferInfo(
|
||||
mediaBuffer || media,
|
||||
mediaBuffer ? mediaBuffer : media,
|
||||
currentTime,
|
||||
config.maxBufferHole
|
||||
);
|
||||
|
@ -274,6 +277,13 @@ export default class BaseStreamController
|
|||
}
|
||||
}
|
||||
|
||||
protected onLevelSwitching(
|
||||
event: Events.LEVEL_SWITCHING,
|
||||
data: LevelSwitchingData
|
||||
): void {
|
||||
this.fragLoadError = 0;
|
||||
}
|
||||
|
||||
protected onHandlerDestroying() {
|
||||
this.stopLoad();
|
||||
super.onHandlerDestroying();
|
||||
|
@ -282,6 +292,7 @@ export default class BaseStreamController
|
|||
protected onHandlerDestroyed() {
|
||||
this.state = State.STOPPED;
|
||||
this.hls.off(Events.KEY_LOADED, this.onKeyLoaded, this);
|
||||
this.hls.off(Events.LEVEL_SWITCHING, this.onLevelSwitching, this);
|
||||
if (this.fragmentLoader) {
|
||||
this.fragmentLoader.destroy();
|
||||
}
|
||||
|
@ -367,7 +378,7 @@ export default class BaseStreamController
|
|||
this._handleFragmentLoadComplete(data);
|
||||
})
|
||||
.catch((reason) => {
|
||||
if (this.state === State.STOPPED) {
|
||||
if (this.state === State.STOPPED || this.state === State.ERROR) {
|
||||
return;
|
||||
}
|
||||
this.warn(reason);
|
||||
|
@ -471,6 +482,9 @@ export default class BaseStreamController
|
|||
this.tick();
|
||||
})
|
||||
.catch((reason) => {
|
||||
if (this.state === State.STOPPED || this.state === State.ERROR) {
|
||||
return;
|
||||
}
|
||||
this.warn(reason);
|
||||
this.resetFragmentLoading(frag);
|
||||
});
|
||||
|
@ -494,12 +508,29 @@ export default class BaseStreamController
|
|||
part ? ' part: ' + part.index : ''
|
||||
} of ${this.logPrefix === '[stream-controller]' ? 'level' : 'track'} ${
|
||||
frag.level
|
||||
} ${TimeRanges.toString(BufferHelper.getBuffered(media))}`
|
||||
} ${
|
||||
media
|
||||
? TimeRanges.toString(BufferHelper.getBuffered(media))
|
||||
: '(detached)'
|
||||
}`
|
||||
);
|
||||
this.state = State.IDLE;
|
||||
if (!media) {
|
||||
return;
|
||||
}
|
||||
if (
|
||||
!this.loadedmetadata &&
|
||||
media.buffered.length &&
|
||||
this.fragCurrent === this.fragPrevious
|
||||
) {
|
||||
this.loadedmetadata = true;
|
||||
this.seekToStartPos();
|
||||
}
|
||||
this.tick();
|
||||
}
|
||||
|
||||
protected seekToStartPos() {}
|
||||
|
||||
protected _handleFragmentLoadComplete(fragLoadedEndData: PartsLoadedData) {
|
||||
const { transmuxer } = this;
|
||||
if (!transmuxer) {
|
||||
|
@ -745,7 +776,7 @@ export default class BaseStreamController
|
|||
}
|
||||
|
||||
protected getFwdBufferInfo(
|
||||
bufferable: Bufferable,
|
||||
bufferable: Bufferable | null,
|
||||
type: PlaylistLevelType
|
||||
): {
|
||||
len: number;
|
||||
|
@ -1010,12 +1041,12 @@ export default class BaseStreamController
|
|||
|
||||
if (frag) {
|
||||
const curSNIdx = frag.sn - levelDetails.startSN;
|
||||
const sameLevel = fragPrevious && frag.level === fragPrevious.level;
|
||||
const nextFrag = fragments[curSNIdx + 1];
|
||||
if (fragPrevious && frag.sn === fragPrevious.sn && !loadingParts) {
|
||||
// Force the next fragment to load if the previous one was already selected. This can occasionally happen with
|
||||
// non-uniform fragment durations
|
||||
const sameLevel = fragPrevious && frag.level === fragPrevious.level;
|
||||
if (sameLevel) {
|
||||
const nextFrag = fragments[curSNIdx + 1];
|
||||
if (
|
||||
frag.sn < endSN &&
|
||||
this.fragmentTracker.getState(nextFrag) !== FragmentState.OK
|
||||
|
@ -1182,7 +1213,11 @@ export default class BaseStreamController
|
|||
}
|
||||
|
||||
protected resetFragmentLoading(frag: Fragment) {
|
||||
if (!this.fragCurrent || !this.fragContextChanged(frag)) {
|
||||
if (
|
||||
!this.fragCurrent ||
|
||||
(!this.fragContextChanged(frag) &&
|
||||
this.state !== State.FRAG_LOADING_WAITING_RETRY)
|
||||
) {
|
||||
this.state = State.IDLE;
|
||||
}
|
||||
}
|
||||
|
@ -1210,8 +1245,9 @@ export default class BaseStreamController
|
|||
const config = this.config;
|
||||
// keep retrying until the limit will be reached
|
||||
if (this.fragLoadError + 1 <= config.fragLoadingMaxRetry) {
|
||||
if (this.resetLiveStartWhenNotLoaded(frag.level)) {
|
||||
return;
|
||||
if (!this.loadedmetadata) {
|
||||
this.startFragRequested = false;
|
||||
this.nextLoadPosition = this.startPosition;
|
||||
}
|
||||
// exponential backoff capped to config.fragLoadingMaxRetryTimeout
|
||||
const delay = Math.min(
|
||||
|
@ -1271,22 +1307,21 @@ export default class BaseStreamController
|
|||
this.state = State.IDLE;
|
||||
}
|
||||
|
||||
protected resetLiveStartWhenNotLoaded(level: number): boolean {
|
||||
// if loadedmetadata is not set, it means that we are emergency switch down on first frag
|
||||
protected resetStartWhenNotLoaded(level: number): void {
|
||||
// if loadedmetadata is not set, it means that first frag request failed
|
||||
// in that case, reset startFragRequested flag
|
||||
if (!this.loadedmetadata) {
|
||||
this.startFragRequested = false;
|
||||
const details = this.levels ? this.levels[level].details : null;
|
||||
if (details?.live) {
|
||||
// We can't afford to retry after a delay in a live scenario. Update the start position and return to IDLE.
|
||||
// Update the start position and return to IDLE to recover live start
|
||||
this.startPosition = -1;
|
||||
this.setStartPosition(details, 0);
|
||||
this.resetLoadingState();
|
||||
return true;
|
||||
} else {
|
||||
this.nextLoadPosition = this.startPosition;
|
||||
}
|
||||
this.nextLoadPosition = this.startPosition;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
private updateLevelTiming(
|
||||
|
|
1
node_modules/hls.js/src/controller/buffer-controller.ts
generated
vendored
1
node_modules/hls.js/src/controller/buffer-controller.ts
generated
vendored
|
@ -420,6 +420,7 @@ export default class BufferController implements ComponentAPI {
|
|||
`[buffer-controller]: Failed ${hls.config.appendErrorMaxRetry} times to append segment in sourceBuffer`
|
||||
);
|
||||
event.fatal = true;
|
||||
hls.stopLoad();
|
||||
}
|
||||
}
|
||||
hls.trigger(Events.ERROR, event);
|
||||
|
|
35
node_modules/hls.js/src/controller/level-controller.ts
generated
vendored
35
node_modules/hls.js/src/controller/level-controller.ts
generated
vendored
|
@ -344,15 +344,21 @@ export default class LevelController extends BasePlaylistController {
|
|||
case ErrorDetails.KEY_LOAD_ERROR:
|
||||
case ErrorDetails.KEY_LOAD_TIMEOUT:
|
||||
if (data.frag) {
|
||||
const level = this._levels[data.frag.level];
|
||||
// Share fragment error count accross media options (main, audio, subs)
|
||||
// This allows for level based rendition switching when media option assets fail
|
||||
const variantLevelIndex =
|
||||
data.frag.type === PlaylistLevelType.MAIN
|
||||
? data.frag.level
|
||||
: this.currentLevelIndex;
|
||||
const level = this._levels[variantLevelIndex];
|
||||
// Set levelIndex when we're out of fragment retries
|
||||
if (level) {
|
||||
level.fragmentError++;
|
||||
if (level.fragmentError > this.hls.config.fragLoadingMaxRetry) {
|
||||
levelIndex = data.frag.level;
|
||||
levelIndex = variantLevelIndex;
|
||||
}
|
||||
} else {
|
||||
levelIndex = data.frag.level;
|
||||
levelIndex = variantLevelIndex;
|
||||
}
|
||||
}
|
||||
break;
|
||||
|
@ -369,7 +375,7 @@ export default class LevelController extends BasePlaylistController {
|
|||
levelError = true;
|
||||
break;
|
||||
case ErrorDetails.REMUX_ALLOC_ERROR:
|
||||
levelIndex = data.level;
|
||||
levelIndex = data.level ?? this.currentLevelIndex;
|
||||
levelError = true;
|
||||
break;
|
||||
}
|
||||
|
@ -412,13 +418,20 @@ export default class LevelController extends BasePlaylistController {
|
|||
errorEvent.levelRetry = true;
|
||||
this.redundantFailover(levelIndex);
|
||||
} else if (this.manualLevelIndex === -1) {
|
||||
// Search for available level in auto level selection mode, cycling from highest to lowest bitrate
|
||||
const nextLevel =
|
||||
levelIndex === 0 ? this._levels.length - 1 : levelIndex - 1;
|
||||
if (
|
||||
this.currentLevelIndex !== nextLevel &&
|
||||
this._levels[nextLevel].loadError === 0
|
||||
) {
|
||||
// Search for next level to retry
|
||||
let nextLevel = -1;
|
||||
const levels = this._levels;
|
||||
for (let i = levels.length; i--; ) {
|
||||
const candidate = (i + this.currentLevelIndex) % levels.length;
|
||||
if (
|
||||
candidate !== this.currentLevelIndex &&
|
||||
levels[candidate].loadError === 0
|
||||
) {
|
||||
nextLevel = candidate;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (nextLevel > -1 && this.currentLevelIndex !== nextLevel) {
|
||||
this.warn(`${errorDetails}: switch to ${nextLevel}`);
|
||||
errorEvent.levelRetry = true;
|
||||
this.hls.nextAutoLevel = nextLevel;
|
||||
|
|
35
node_modules/hls.js/src/controller/stream-controller.ts
generated
vendored
35
node_modules/hls.js/src/controller/stream-controller.ts
generated
vendored
|
@ -120,7 +120,7 @@ export default class StreamController
|
|||
// determine load level
|
||||
let startLevel = hls.startLevel;
|
||||
if (startLevel === -1) {
|
||||
if (hls.config.testBandwidth) {
|
||||
if (hls.config.testBandwidth && this.levels.length > 1) {
|
||||
// -1 : guess start Level by doing a bitrate test by loading first fragment of lowest quality level
|
||||
startLevel = 0;
|
||||
this.bitrateTest = true;
|
||||
|
@ -183,6 +183,7 @@ export default class StreamController
|
|||
// if current time is gt than retryDate, or if media seeking let's switch to IDLE state to retry loading
|
||||
if (!retryDate || now >= retryDate || this.media?.seeking) {
|
||||
this.log('retryDate reached, switch back to IDLE state');
|
||||
this.resetStartWhenNotLoaded(this.level);
|
||||
this.state = State.IDLE;
|
||||
}
|
||||
}
|
||||
|
@ -307,7 +308,9 @@ export default class StreamController
|
|||
this.audioOnly && !this.altAudio
|
||||
? ElementaryStreamTypes.AUDIO
|
||||
: ElementaryStreamTypes.VIDEO;
|
||||
this.afterBufferFlushed(media, type, PlaylistLevelType.MAIN);
|
||||
if (media) {
|
||||
this.afterBufferFlushed(media, type, PlaylistLevelType.MAIN);
|
||||
}
|
||||
frag = this.getNextFragment(this.nextLoadPosition, levelDetails);
|
||||
}
|
||||
if (!frag) {
|
||||
|
@ -334,14 +337,10 @@ export default class StreamController
|
|||
// Check if fragment is not loaded
|
||||
const fragState = this.fragmentTracker.getState(frag);
|
||||
this.fragCurrent = frag;
|
||||
if (
|
||||
fragState === FragmentState.NOT_LOADED ||
|
||||
fragState === FragmentState.PARTIAL
|
||||
) {
|
||||
if (fragState === FragmentState.NOT_LOADED) {
|
||||
if (frag.sn === 'initSegment') {
|
||||
this._loadInitSegment(frag);
|
||||
} else if (this.bitrateTest) {
|
||||
frag.bitrateTest = true;
|
||||
this.log(
|
||||
`Fragment ${frag.sn} of level ${frag.level} is being downloaded to test bitrate and will not be buffered`
|
||||
);
|
||||
|
@ -509,7 +508,7 @@ export default class StreamController
|
|||
|
||||
protected onMediaDetaching() {
|
||||
const { media } = this;
|
||||
if (media) {
|
||||
if (media && this.onvplaying && this.onvseeked) {
|
||||
media.removeEventListener('playing', this.onvplaying);
|
||||
media.removeEventListener('seeked', this.onvseeked);
|
||||
this.onvplaying = this.onvseeked = null;
|
||||
|
@ -532,7 +531,7 @@ export default class StreamController
|
|||
const media = this.media;
|
||||
const currentTime = media ? media.currentTime : null;
|
||||
if (Number.isFinite(currentTime)) {
|
||||
this.log(`Media seeked to ${currentTime.toFixed(3)}`);
|
||||
this.log(`Media seeked to ${(currentTime as number).toFixed(3)}`);
|
||||
}
|
||||
|
||||
// tick to speed up FRAG_CHANGED triggering
|
||||
|
@ -915,13 +914,7 @@ export default class StreamController
|
|||
return;
|
||||
}
|
||||
|
||||
// Check combined buffer
|
||||
const buffered = BufferHelper.getBuffered(media);
|
||||
|
||||
if (!this.loadedmetadata && buffered.length) {
|
||||
this.loadedmetadata = true;
|
||||
this.seekToStartPos();
|
||||
} else {
|
||||
if (this.loadedmetadata || !BufferHelper.getBuffered(media).length) {
|
||||
// Resolve gaps using the main buffer, whose ranges are the intersections of the A/V sourcebuffers
|
||||
const activeFrag = this.state !== State.IDLE ? this.fragCurrent : null;
|
||||
gapController.poll(this.lastCurrentTime, activeFrag);
|
||||
|
@ -970,10 +963,12 @@ export default class StreamController
|
|||
|
||||
/**
|
||||
* Seeks to the set startPosition if not equal to the mediaElement's current time.
|
||||
* @private
|
||||
*/
|
||||
private seekToStartPos() {
|
||||
protected seekToStartPos() {
|
||||
const { media } = this;
|
||||
if (!media) {
|
||||
return;
|
||||
}
|
||||
const currentTime = media.currentTime;
|
||||
let startPosition = this.startPosition;
|
||||
// only adjust currentTime if different from startPosition or if startPosition not buffered
|
||||
|
@ -1019,6 +1014,7 @@ export default class StreamController
|
|||
}
|
||||
|
||||
private _loadBitrateTestFrag(frag: Fragment) {
|
||||
frag.bitrateTest = true;
|
||||
this._doFragLoad(frag).then((data) => {
|
||||
const { hls } = this;
|
||||
if (!data || hls.nextLoadLevel || this.fragContextChanged(frag)) {
|
||||
|
@ -1036,6 +1032,7 @@ export default class StreamController
|
|||
stats.buffering.end =
|
||||
self.performance.now();
|
||||
hls.trigger(Events.FRAG_LOADED, data as FragLoadedData);
|
||||
frag.bitrateTest = false;
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -1049,7 +1046,7 @@ export default class StreamController
|
|||
this.warn(
|
||||
`The loading context changed while buffering fragment ${chunkMeta.sn} of level ${chunkMeta.level}. This chunk will not be buffered.`
|
||||
);
|
||||
this.resetLiveStartWhenNotLoaded(chunkMeta.level);
|
||||
this.resetStartWhenNotLoaded(chunkMeta.level);
|
||||
return;
|
||||
}
|
||||
const { frag, part, level } = context;
|
||||
|
|
53
node_modules/hls.js/src/controller/subtitle-stream-controller.ts
generated
vendored
53
node_modules/hls.js/src/controller/subtitle-stream-controller.ts
generated
vendored
|
@ -1,5 +1,5 @@
|
|||
import { Events } from '../events';
|
||||
import { BufferHelper } from '../utils/buffer-helper';
|
||||
import { Bufferable, BufferHelper } from '../utils/buffer-helper';
|
||||
import { findFragmentByPTS } from './fragment-finders';
|
||||
import { alignMediaPlaylistByPDT } from '../utils/discontinuities';
|
||||
import { addSliding } from './level-helper';
|
||||
|
@ -351,7 +351,7 @@ export class SubtitleStreamController
|
|||
const targetDuration = trackDetails.targetduration;
|
||||
const { config, media } = this;
|
||||
const bufferedInfo = BufferHelper.bufferedInfo(
|
||||
this.mediaBufferTimeRanges,
|
||||
this.tracksBuffered[this.currentTrackId] || [],
|
||||
media.currentTime - targetDuration,
|
||||
config.maxBufferHole
|
||||
);
|
||||
|
@ -397,12 +397,16 @@ export class SubtitleStreamController
|
|||
return;
|
||||
}
|
||||
|
||||
// only load if fragment is not loaded
|
||||
if (
|
||||
this.fragmentTracker.getState(foundFrag) !== FragmentState.NOT_LOADED
|
||||
) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (foundFrag.encrypted) {
|
||||
this.loadKey(foundFrag, trackDetails);
|
||||
} else if (
|
||||
this.fragmentTracker.getState(foundFrag) === FragmentState.NOT_LOADED
|
||||
) {
|
||||
// only load if fragment is not loaded
|
||||
} else {
|
||||
this.loadFragment(foundFrag, trackDetails, targetBufferTime);
|
||||
}
|
||||
}
|
||||
|
@ -421,7 +425,40 @@ export class SubtitleStreamController
|
|||
}
|
||||
}
|
||||
|
||||
get mediaBufferTimeRanges(): TimeRange[] {
|
||||
return this.tracksBuffered[this.currentTrackId] || [];
|
||||
get mediaBufferTimeRanges(): Bufferable {
|
||||
return new BufferableInstance(
|
||||
this.tracksBuffered[this.currentTrackId] || []
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
class BufferableInstance implements Bufferable {
|
||||
public readonly buffered: TimeRanges;
|
||||
|
||||
constructor(timeranges: TimeRange[]) {
|
||||
const getRange = (
|
||||
name: 'start' | 'end',
|
||||
index: number,
|
||||
length: number
|
||||
): number => {
|
||||
index = index >>> 0;
|
||||
if (index > length - 1) {
|
||||
throw new DOMException(
|
||||
`Failed to execute '${name}' on 'TimeRanges': The index provided (${index}) is greater than the maximum bound (${length})`
|
||||
);
|
||||
}
|
||||
return timeranges[index][name];
|
||||
};
|
||||
this.buffered = {
|
||||
get length() {
|
||||
return timeranges.length;
|
||||
},
|
||||
end(index: number): number {
|
||||
return getRange('end', index, timeranges.length);
|
||||
},
|
||||
start(index: number): number {
|
||||
return getRange('start', index, timeranges.length);
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
2
node_modules/hls.js/src/controller/timeline-controller.ts
generated
vendored
2
node_modules/hls.js/src/controller/timeline-controller.ts
generated
vendored
|
@ -745,7 +745,7 @@ function newVTTCCs(): VTTCCs {
|
|||
0: {
|
||||
start: 0,
|
||||
prevCC: -1,
|
||||
new: false,
|
||||
new: true,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
|
1
node_modules/hls.js/src/demux/aacdemuxer.ts
generated
vendored
1
node_modules/hls.js/src/demux/aacdemuxer.ts
generated
vendored
|
@ -11,7 +11,6 @@ import type { HlsConfig } from '../config';
|
|||
class AACDemuxer extends BaseAudioDemuxer {
|
||||
private readonly observer: HlsEventEmitter;
|
||||
private readonly config: HlsConfig;
|
||||
static readonly minProbeByteLength: number = 9;
|
||||
|
||||
constructor(observer, config) {
|
||||
super();
|
||||
|
|
2
node_modules/hls.js/src/demux/mp3demuxer.ts
generated
vendored
2
node_modules/hls.js/src/demux/mp3demuxer.ts
generated
vendored
|
@ -7,8 +7,6 @@ import { logger } from '../utils/logger';
|
|||
import * as MpegAudio from './mpegaudio';
|
||||
|
||||
class MP3Demuxer extends BaseAudioDemuxer {
|
||||
static readonly minProbeByteLength: number = 4;
|
||||
|
||||
resetInitSegment(
|
||||
initSegment: Uint8Array | undefined,
|
||||
audioCodec: string | undefined,
|
||||
|
|
1
node_modules/hls.js/src/demux/mp4demuxer.ts
generated
vendored
1
node_modules/hls.js/src/demux/mp4demuxer.ts
generated
vendored
|
@ -27,7 +27,6 @@ import type { HlsConfig } from '../config';
|
|||
const emsgSchemePattern = /\/emsg[-/]ID3/i;
|
||||
|
||||
class MP4Demuxer implements Demuxer {
|
||||
static readonly minProbeByteLength = 1024;
|
||||
private remainderData: Uint8Array | null = null;
|
||||
private timeOffset: number = 0;
|
||||
private config: HlsConfig;
|
||||
|
|
92
node_modules/hls.js/src/demux/transmuxer.ts
generated
vendored
92
node_modules/hls.js/src/demux/transmuxer.ts
generated
vendored
|
@ -8,8 +8,6 @@ import TSDemuxer, { TypeSupported } from '../demux/tsdemuxer';
|
|||
import MP3Demuxer from '../demux/mp3demuxer';
|
||||
import MP4Remuxer from '../remux/mp4-remuxer';
|
||||
import PassThroughRemuxer from '../remux/passthrough-remuxer';
|
||||
import ChunkCache from './chunk-cache';
|
||||
import { appendUint8Array } from '../utils/mp4-tools';
|
||||
import { logger } from '../utils/logger';
|
||||
import type { Demuxer, DemuxerResult, KeyData } from '../types/demuxer';
|
||||
import type { Remuxer } from '../types/remuxer';
|
||||
|
@ -40,11 +38,6 @@ const muxConfig: MuxConfig[] = [
|
|||
{ demux: MP3Demuxer, remux: MP4Remuxer },
|
||||
];
|
||||
|
||||
let minProbeByteLength = 1024;
|
||||
muxConfig.forEach(({ demux }) => {
|
||||
minProbeByteLength = Math.max(minProbeByteLength, demux.minProbeByteLength);
|
||||
});
|
||||
|
||||
export default class Transmuxer {
|
||||
private observer: HlsEventEmitter;
|
||||
private typeSupported: TypeSupported;
|
||||
|
@ -58,7 +51,6 @@ export default class Transmuxer {
|
|||
private decryptionPromise: Promise<TransmuxerResult> | null = null;
|
||||
private transmuxConfig!: TransmuxConfig;
|
||||
private currentTransmuxState!: TransmuxState;
|
||||
private cache: ChunkCache = new ChunkCache();
|
||||
|
||||
constructor(
|
||||
observer: HlsEventEmitter,
|
||||
|
@ -91,11 +83,40 @@ export default class Transmuxer {
|
|||
stats.executeStart = now();
|
||||
|
||||
let uintData: Uint8Array = new Uint8Array(data);
|
||||
const { cache, config, currentTransmuxState, transmuxConfig } = this;
|
||||
const { config, currentTransmuxState, transmuxConfig } = this;
|
||||
if (state) {
|
||||
this.currentTransmuxState = state;
|
||||
}
|
||||
|
||||
const {
|
||||
contiguous,
|
||||
discontinuity,
|
||||
trackSwitch,
|
||||
accurateTimeOffset,
|
||||
timeOffset,
|
||||
initSegmentChange,
|
||||
} = state || currentTransmuxState;
|
||||
const {
|
||||
audioCodec,
|
||||
videoCodec,
|
||||
defaultInitPts,
|
||||
duration,
|
||||
initSegmentData,
|
||||
} = transmuxConfig;
|
||||
|
||||
// Reset muxers before probing to ensure that their state is clean, even if flushing occurs before a successful probe
|
||||
if (discontinuity || trackSwitch || initSegmentChange) {
|
||||
this.resetInitSegment(initSegmentData, audioCodec, videoCodec, duration);
|
||||
}
|
||||
|
||||
if (discontinuity || initSegmentChange) {
|
||||
this.resetInitialTimestamp(defaultInitPts);
|
||||
}
|
||||
|
||||
if (!contiguous) {
|
||||
this.resetContiguity();
|
||||
}
|
||||
|
||||
const keyData = getEncryptionType(uintData, decryptdata);
|
||||
if (keyData && keyData.method === 'AES-128') {
|
||||
const decrypter = this.getDecrypter();
|
||||
|
@ -131,40 +152,7 @@ export default class Transmuxer {
|
|||
}
|
||||
}
|
||||
|
||||
const {
|
||||
contiguous,
|
||||
discontinuity,
|
||||
trackSwitch,
|
||||
accurateTimeOffset,
|
||||
timeOffset,
|
||||
initSegmentChange,
|
||||
} = state || currentTransmuxState;
|
||||
const {
|
||||
audioCodec,
|
||||
videoCodec,
|
||||
defaultInitPts,
|
||||
duration,
|
||||
initSegmentData,
|
||||
} = transmuxConfig;
|
||||
|
||||
// Reset muxers before probing to ensure that their state is clean, even if flushing occurs before a successful probe
|
||||
if (discontinuity || trackSwitch || initSegmentChange) {
|
||||
this.resetInitSegment(initSegmentData, audioCodec, videoCodec, duration);
|
||||
}
|
||||
|
||||
if (discontinuity || initSegmentChange) {
|
||||
this.resetInitialTimestamp(defaultInitPts);
|
||||
}
|
||||
|
||||
if (!contiguous) {
|
||||
this.resetContiguity();
|
||||
}
|
||||
|
||||
if (this.needsProbing(uintData, discontinuity, trackSwitch)) {
|
||||
if (cache.dataLength) {
|
||||
const cachedData = cache.flush();
|
||||
uintData = appendUint8Array(cachedData, uintData);
|
||||
}
|
||||
this.configureTransmuxer(uintData, transmuxConfig);
|
||||
}
|
||||
|
||||
|
@ -192,7 +180,7 @@ export default class Transmuxer {
|
|||
const stats = chunkMeta.transmuxing;
|
||||
stats.executeStart = now();
|
||||
|
||||
const { decrypter, cache, currentTransmuxState, decryptionPromise } = this;
|
||||
const { decrypter, currentTransmuxState, decryptionPromise } = this;
|
||||
|
||||
if (decryptionPromise) {
|
||||
// Upon resolution, the decryption promise calls push() and returns its TransmuxerResult up the stack. Therefore
|
||||
|
@ -217,19 +205,15 @@ export default class Transmuxer {
|
|||
}
|
||||
}
|
||||
|
||||
const bytesSeen = cache.dataLength;
|
||||
cache.reset();
|
||||
const { demuxer, remuxer } = this;
|
||||
if (!demuxer || !remuxer) {
|
||||
// If probing failed, and each demuxer saw enough bytes to be able to probe, then Hls.js has been given content its not able to handle
|
||||
if (bytesSeen >= minProbeByteLength) {
|
||||
this.observer.emit(Events.ERROR, Events.ERROR, {
|
||||
type: ErrorTypes.MEDIA_ERROR,
|
||||
details: ErrorDetails.FRAG_PARSING_ERROR,
|
||||
fatal: true,
|
||||
reason: 'no demux matching with content found',
|
||||
});
|
||||
}
|
||||
// If probing failed, then Hls.js has been given content its not able to handle
|
||||
this.observer.emit(Events.ERROR, Events.ERROR, {
|
||||
type: ErrorTypes.MEDIA_ERROR,
|
||||
details: ErrorDetails.FRAG_PARSING_ERROR,
|
||||
fatal: true,
|
||||
reason: 'no demux matching with content found',
|
||||
});
|
||||
stats.executeEnd = now();
|
||||
return [emptyResult(chunkMeta)];
|
||||
}
|
||||
|
|
54
node_modules/hls.js/src/demux/tsdemuxer.ts
generated
vendored
54
node_modules/hls.js/src/demux/tsdemuxer.ts
generated
vendored
|
@ -57,8 +57,6 @@ export interface TypeSupported {
|
|||
}
|
||||
|
||||
class TSDemuxer implements Demuxer {
|
||||
static readonly minProbeByteLength = 188;
|
||||
|
||||
private readonly observer: HlsEventEmitter;
|
||||
private readonly config: HlsConfig;
|
||||
private typeSupported: TypeSupported;
|
||||
|
@ -89,37 +87,8 @@ class TSDemuxer implements Demuxer {
|
|||
}
|
||||
|
||||
static probe(data: Uint8Array) {
|
||||
const syncOffset = TSDemuxer.syncOffset(data);
|
||||
if (syncOffset < 0) {
|
||||
return false;
|
||||
} else {
|
||||
if (syncOffset) {
|
||||
logger.warn(
|
||||
`MPEG2-TS detected but first sync word found @ offset ${syncOffset}, junk ahead ?`
|
||||
);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
static syncOffset(data: Uint8Array) {
|
||||
// scan 1000 first bytes
|
||||
const scanwindow = Math.min(1000, data.length - 3 * 188);
|
||||
let i = 0;
|
||||
while (i < scanwindow) {
|
||||
// a TS fragment should contain at least 3 TS packets, a PAT, a PMT, and one PID, each starting with 0x47
|
||||
if (
|
||||
data[i] === 0x47 &&
|
||||
data[i + 188] === 0x47 &&
|
||||
data[i + 2 * 188] === 0x47
|
||||
) {
|
||||
return i;
|
||||
} else {
|
||||
i++;
|
||||
}
|
||||
}
|
||||
return -1;
|
||||
// a TS init segment should contain at least 2 TS packets: PAT and PMT, each starting with 0x47
|
||||
return data[0] === 0x47 && data[188] === 0x47;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -172,6 +141,7 @@ class TSDemuxer implements Demuxer {
|
|||
// flush any partial content
|
||||
this.aacOverFlow = null;
|
||||
this.avcSample = null;
|
||||
this.remainderData = null;
|
||||
this.audioCodec = audioCodec;
|
||||
this.videoCodec = videoCodec;
|
||||
this._duration = trackDuration;
|
||||
|
@ -216,7 +186,7 @@ class TSDemuxer implements Demuxer {
|
|||
let id3Id = id3Track.pid;
|
||||
let audioData = audioTrack.pesData;
|
||||
let id3Data = id3Track.pesData;
|
||||
let unknownPIDs = false;
|
||||
let unknownPID: number | null = null;
|
||||
let pmtParsed = this.pmtParsed;
|
||||
let pmtId = this._pmtId;
|
||||
|
||||
|
@ -237,9 +207,7 @@ class TSDemuxer implements Demuxer {
|
|||
};
|
||||
}
|
||||
|
||||
const syncOffset = Math.max(0, TSDemuxer.syncOffset(data));
|
||||
|
||||
len -= (len + syncOffset) % 188;
|
||||
len -= len % 188;
|
||||
if (len < data.byteLength && !flush) {
|
||||
this.remainderData = new Uint8Array(
|
||||
data.buffer,
|
||||
|
@ -250,7 +218,7 @@ class TSDemuxer implements Demuxer {
|
|||
|
||||
// loop through TS packets
|
||||
let tsPacketErrors = 0;
|
||||
for (let start = syncOffset; start < len; start += 188) {
|
||||
for (let start = 0; start < len; start += 188) {
|
||||
if (data[start] === 0x47) {
|
||||
const stt = !!(data[start + 1] & 0x40);
|
||||
// pid is a 13-bit field starting at the last bit of TS[1]
|
||||
|
@ -354,11 +322,9 @@ class TSDemuxer implements Demuxer {
|
|||
id3Track.pid = id3Id;
|
||||
}
|
||||
|
||||
if (unknownPIDs && !pmtParsed) {
|
||||
logger.log('reparse from beginning');
|
||||
unknownPIDs = false;
|
||||
// we set it to -188, the += 188 in the for loop will reset start to 0
|
||||
start = syncOffset - 188;
|
||||
if (unknownPID !== null && !pmtParsed) {
|
||||
logger.log(`unknown PID '${unknownPID}' in TS found`);
|
||||
unknownPID = null;
|
||||
}
|
||||
pmtParsed = this.pmtParsed = true;
|
||||
break;
|
||||
|
@ -367,7 +333,7 @@ class TSDemuxer implements Demuxer {
|
|||
case 0x1fff:
|
||||
break;
|
||||
default:
|
||||
unknownPIDs = true;
|
||||
unknownPID = pid;
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
|
|
9
node_modules/hls.js/src/hls.ts
generated
vendored
9
node_modules/hls.js/src/hls.ts
generated
vendored
|
@ -140,12 +140,15 @@ export default class Hls implements HlsEventEmitter {
|
|||
// fpsController uses streamController to switch when frames are being dropped
|
||||
fpsController.setStreamController(streamController);
|
||||
|
||||
const networkControllers = [levelController, streamController];
|
||||
const networkControllers = [
|
||||
playListLoader,
|
||||
keyLoader,
|
||||
levelController,
|
||||
streamController,
|
||||
];
|
||||
|
||||
this.networkControllers = networkControllers;
|
||||
const coreComponents = [
|
||||
playListLoader,
|
||||
keyLoader,
|
||||
abrController,
|
||||
bufferController,
|
||||
capLevelController,
|
||||
|
|
24
node_modules/hls.js/src/loader/key-loader.ts
generated
vendored
24
node_modules/hls.js/src/loader/key-loader.ts
generated
vendored
|
@ -15,14 +15,14 @@ import {
|
|||
Loader,
|
||||
FragmentLoaderContext,
|
||||
} from '../types/loader';
|
||||
import type { ComponentAPI } from '../types/component-api';
|
||||
import type { NetworkComponentAPI } from '../types/component-api';
|
||||
import type { KeyLoadingData } from '../types/events';
|
||||
|
||||
interface KeyLoaderContext extends LoaderContext {
|
||||
frag: Fragment;
|
||||
}
|
||||
|
||||
export default class KeyLoader implements ComponentAPI {
|
||||
export default class KeyLoader implements NetworkComponentAPI {
|
||||
private hls: Hls;
|
||||
public loaders = {};
|
||||
public decryptkey: Uint8Array | null = null;
|
||||
|
@ -31,19 +31,24 @@ export default class KeyLoader implements ComponentAPI {
|
|||
constructor(hls: Hls) {
|
||||
this.hls = hls;
|
||||
|
||||
this._registerListeners();
|
||||
this.registerListeners();
|
||||
}
|
||||
|
||||
private _registerListeners() {
|
||||
public startLoad(startPosition: number): void {}
|
||||
|
||||
public stopLoad(): void {
|
||||
this.destroyInternalLoaders();
|
||||
}
|
||||
|
||||
private registerListeners() {
|
||||
this.hls.on(Events.KEY_LOADING, this.onKeyLoading, this);
|
||||
}
|
||||
|
||||
private _unregisterListeners() {
|
||||
private unregisterListeners() {
|
||||
this.hls.off(Events.KEY_LOADING, this.onKeyLoading);
|
||||
}
|
||||
|
||||
destroy(): void {
|
||||
this._unregisterListeners();
|
||||
private destroyInternalLoaders(): void {
|
||||
for (const loaderName in this.loaders) {
|
||||
const loader = this.loaders[loaderName];
|
||||
if (loader) {
|
||||
|
@ -53,6 +58,11 @@ export default class KeyLoader implements ComponentAPI {
|
|||
this.loaders = {};
|
||||
}
|
||||
|
||||
destroy(): void {
|
||||
this.unregisterListeners();
|
||||
this.destroyInternalLoaders();
|
||||
}
|
||||
|
||||
onKeyLoading(event: Events.KEY_LOADING, data: KeyLoadingData) {
|
||||
const { frag } = data;
|
||||
const type = frag.type;
|
||||
|
|
9
node_modules/hls.js/src/loader/playlist-loader.ts
generated
vendored
9
node_modules/hls.js/src/loader/playlist-loader.ts
generated
vendored
|
@ -33,6 +33,7 @@ import type {
|
|||
ManifestLoadingData,
|
||||
TrackLoadingData,
|
||||
} from '../types/events';
|
||||
import { NetworkComponentAPI } from '../types/component-api';
|
||||
|
||||
function mapContextToLevelType(
|
||||
context: PlaylistLoaderContext
|
||||
|
@ -63,7 +64,7 @@ function getResponseUrl(
|
|||
return url;
|
||||
}
|
||||
|
||||
class PlaylistLoader {
|
||||
class PlaylistLoader implements NetworkComponentAPI {
|
||||
private readonly hls: Hls;
|
||||
private readonly loaders: {
|
||||
[key: string]: Loader<LoaderContext>;
|
||||
|
@ -74,6 +75,12 @@ class PlaylistLoader {
|
|||
this.registerListeners();
|
||||
}
|
||||
|
||||
public startLoad(startPosition: number): void {}
|
||||
|
||||
public stopLoad(): void {
|
||||
this.destroyInternalLoaders();
|
||||
}
|
||||
|
||||
private registerListeners() {
|
||||
const { hls } = this;
|
||||
hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this);
|
||||
|
|
2
node_modules/hls.js/src/remux/mp4-generator.ts
generated
vendored
2
node_modules/hls.js/src/remux/mp4-generator.ts
generated
vendored
|
@ -1084,7 +1084,7 @@ class MP4 {
|
|||
offset += 8 + arraylen;
|
||||
array.set(
|
||||
[
|
||||
0x00, // version 0
|
||||
track.type === 'video' ? 0x01 : 0x00, // version 1 for video with signed-int sample_composition_time_offset
|
||||
0x00,
|
||||
0x0f,
|
||||
0x01, // flags
|
||||
|
|
124
node_modules/hls.js/src/remux/mp4-remuxer.ts
generated
vendored
124
node_modules/hls.js/src/remux/mp4-remuxer.ts
generated
vendored
|
@ -33,7 +33,6 @@ const MPEG_AUDIO_SAMPLE_PER_FRAME = 1152;
|
|||
|
||||
let chromeVersion: number | null = null;
|
||||
let safariWebkitVersion: number | null = null;
|
||||
let requiresPositiveDts: boolean = false;
|
||||
|
||||
export default class MP4Remuxer implements Remuxer {
|
||||
private observer: HlsEventEmitter;
|
||||
|
@ -68,10 +67,6 @@ export default class MP4Remuxer implements Remuxer {
|
|||
const result = navigator.userAgent.match(/Safari\/(\d+)/i);
|
||||
safariWebkitVersion = result ? parseInt(result[1]) : 0;
|
||||
}
|
||||
requiresPositiveDts = !(
|
||||
(!!chromeVersion && chromeVersion >= 75) ||
|
||||
(!!safariWebkitVersion && safariWebkitVersion >= 600)
|
||||
);
|
||||
}
|
||||
|
||||
destroy() {}
|
||||
|
@ -392,7 +387,6 @@ export default class MP4Remuxer implements Remuxer {
|
|||
let lastDTS;
|
||||
let minPTS: number = Number.POSITIVE_INFINITY;
|
||||
let maxPTS: number = Number.NEGATIVE_INFINITY;
|
||||
let ptsDtsShift = 0;
|
||||
let sortSamples = false;
|
||||
|
||||
// if parsed fragment is contiguous with last one, let's use last DTS value as reference
|
||||
|
@ -411,13 +405,6 @@ export default class MP4Remuxer implements Remuxer {
|
|||
const sample = inputSamples[i];
|
||||
sample.pts = normalizePts(sample.pts - initPTS, nextAvcDts);
|
||||
sample.dts = normalizePts(sample.dts - initPTS, nextAvcDts);
|
||||
if (sample.dts > sample.pts) {
|
||||
const PTS_DTS_SHIFT_TOLERANCE_90KHZ = 90000 * 0.2;
|
||||
ptsDtsShift = Math.max(
|
||||
Math.min(ptsDtsShift, sample.pts - sample.dts),
|
||||
-1 * PTS_DTS_SHIFT_TOLERANCE_90KHZ
|
||||
);
|
||||
}
|
||||
if (sample.dts < inputSamples[i > 0 ? i - 1 : i].dts) {
|
||||
sortSamples = true;
|
||||
}
|
||||
|
@ -436,49 +423,13 @@ export default class MP4Remuxer implements Remuxer {
|
|||
firstDTS = inputSamples[0].dts;
|
||||
lastDTS = inputSamples[inputSamples.length - 1].dts;
|
||||
|
||||
// on Safari let's signal the same sample duration for all samples
|
||||
// sample duration (as expected by trun MP4 boxes), should be the delta between sample DTS
|
||||
// Sample duration (as expected by trun MP4 boxes), should be the delta between sample DTS
|
||||
// set this constant duration as being the avg delta between consecutive DTS.
|
||||
const inputDuration = lastDTS - firstDTS;
|
||||
const averageSampleDuration = inputDuration
|
||||
? Math.round(inputDuration / (nbSamples - 1))
|
||||
: mp4SampleDuration || track.inputTimeScale / 30;
|
||||
|
||||
// handle broken streams with PTS < DTS, tolerance up 0.2 seconds
|
||||
if (ptsDtsShift < 0) {
|
||||
if (ptsDtsShift < averageSampleDuration * -2) {
|
||||
// Fix for "CNN special report, with CC" in test-streams (including Safari browser)
|
||||
// With large PTS < DTS errors such as this, we want to correct CTS while maintaining increasing DTS values
|
||||
logger.warn(
|
||||
`PTS < DTS detected in video samples, offsetting DTS from PTS by ${toMsFromMpegTsClock(
|
||||
-averageSampleDuration,
|
||||
true
|
||||
)} ms`
|
||||
);
|
||||
let lastDts = ptsDtsShift;
|
||||
for (let i = 0; i < nbSamples; i++) {
|
||||
inputSamples[i].dts = lastDts = Math.max(
|
||||
lastDts,
|
||||
inputSamples[i].pts - averageSampleDuration
|
||||
);
|
||||
inputSamples[i].pts = Math.max(lastDts, inputSamples[i].pts);
|
||||
}
|
||||
} else {
|
||||
// Fix for "Custom IV with bad PTS DTS" in test-streams
|
||||
// With smaller PTS < DTS errors we can simply move all DTS back. This increases CTS without causing buffer gaps or decode errors in Safari
|
||||
logger.warn(
|
||||
`PTS < DTS detected in video samples, shifting DTS by ${toMsFromMpegTsClock(
|
||||
ptsDtsShift,
|
||||
true
|
||||
)} ms to overcome this issue`
|
||||
);
|
||||
for (let i = 0; i < nbSamples; i++) {
|
||||
inputSamples[i].dts = inputSamples[i].dts + ptsDtsShift;
|
||||
}
|
||||
}
|
||||
firstDTS = inputSamples[0].dts;
|
||||
}
|
||||
|
||||
// if fragment are contiguous, detect hole/overlapping between fragments
|
||||
if (contiguous) {
|
||||
// check timestamp continuity across consecutive fragments (this is to remove inter-fragment gap/hole)
|
||||
|
@ -517,9 +468,8 @@ export default class MP4Remuxer implements Remuxer {
|
|||
}
|
||||
}
|
||||
|
||||
if (requiresPositiveDts) {
|
||||
firstDTS = Math.max(0, firstDTS);
|
||||
}
|
||||
firstDTS = Math.max(0, firstDTS);
|
||||
|
||||
let nbNalu = 0;
|
||||
let naluLen = 0;
|
||||
for (let i = 0; i < nbSamples; i++) {
|
||||
|
@ -536,11 +486,9 @@ export default class MP4Remuxer implements Remuxer {
|
|||
nbNalu += nbUnits;
|
||||
sample.length = sampleLen;
|
||||
|
||||
// normalize PTS/DTS
|
||||
// ensure sample monotonic DTS
|
||||
sample.dts = Math.max(sample.dts, firstDTS);
|
||||
// ensure that computed value is greater or equal than sample DTS
|
||||
sample.pts = Math.max(sample.pts, sample.dts, 0);
|
||||
|
||||
minPTS = Math.min(sample.pts, minPTS);
|
||||
maxPTS = Math.max(sample.pts, maxPTS);
|
||||
}
|
||||
|
@ -567,6 +515,10 @@ export default class MP4Remuxer implements Remuxer {
|
|||
mdat.set(MP4.types.mdat, 4);
|
||||
|
||||
let stretchedLastFrame = false;
|
||||
let minDtsDelta = Number.POSITIVE_INFINITY;
|
||||
let minPtsDelta = Number.POSITIVE_INFINITY;
|
||||
let maxDtsDelta = Number.NEGATIVE_INFINITY;
|
||||
let maxPtsDelta = Number.NEGATIVE_INFINITY;
|
||||
for (let i = 0; i < nbSamples; i++) {
|
||||
const avcSample = inputSamples[i];
|
||||
const avcSampleUnits = avcSample.units;
|
||||
|
@ -584,14 +536,20 @@ export default class MP4Remuxer implements Remuxer {
|
|||
}
|
||||
|
||||
// expected sample duration is the Decoding Timestamp diff of consecutive samples
|
||||
let ptsDelta;
|
||||
if (i < nbSamples - 1) {
|
||||
mp4SampleDuration = inputSamples[i + 1].dts - avcSample.dts;
|
||||
ptsDelta = inputSamples[i + 1].pts - avcSample.pts;
|
||||
} else {
|
||||
const config = this.config;
|
||||
const lastFrameDuration =
|
||||
i > 0
|
||||
? avcSample.dts - inputSamples[i - 1].dts
|
||||
: averageSampleDuration;
|
||||
ptsDelta =
|
||||
i > 0
|
||||
? avcSample.pts - inputSamples[i - 1].pts
|
||||
: averageSampleDuration;
|
||||
if (config.stretchShortVideoTrack && this.nextAudioPts !== null) {
|
||||
// In some cases, a segment's audio track duration may exceed the video track duration.
|
||||
// Since we've already remuxed audio, and we know how long the audio track is, we look to
|
||||
|
@ -627,6 +585,10 @@ export default class MP4Remuxer implements Remuxer {
|
|||
}
|
||||
}
|
||||
const compositionTimeOffset = Math.round(avcSample.pts - avcSample.dts);
|
||||
minDtsDelta = Math.min(minDtsDelta, mp4SampleDuration);
|
||||
maxDtsDelta = Math.max(maxDtsDelta, mp4SampleDuration);
|
||||
minPtsDelta = Math.min(minPtsDelta, ptsDelta);
|
||||
maxPtsDelta = Math.max(maxPtsDelta, ptsDelta);
|
||||
|
||||
outputSamples.push(
|
||||
new Mp4Sample(
|
||||
|
@ -638,12 +600,43 @@ export default class MP4Remuxer implements Remuxer {
|
|||
);
|
||||
}
|
||||
|
||||
if (outputSamples.length && chromeVersion && chromeVersion < 70) {
|
||||
// Chrome workaround, mark first sample as being a Random Access Point (keyframe) to avoid sourcebuffer append issue
|
||||
// https://code.google.com/p/chromium/issues/detail?id=229412
|
||||
const flags = outputSamples[0].flags;
|
||||
flags.dependsOn = 2;
|
||||
flags.isNonSync = 0;
|
||||
if (outputSamples.length) {
|
||||
if (chromeVersion) {
|
||||
if (chromeVersion < 70) {
|
||||
// Chrome workaround, mark first sample as being a Random Access Point (keyframe) to avoid sourcebuffer append issue
|
||||
// https://code.google.com/p/chromium/issues/detail?id=229412
|
||||
const flags = outputSamples[0].flags;
|
||||
flags.dependsOn = 2;
|
||||
flags.isNonSync = 0;
|
||||
}
|
||||
} else if (safariWebkitVersion) {
|
||||
// Fix for "CNN special report, with CC" in test-streams (Safari browser only)
|
||||
// Ignore DTS when frame durations are irregular. Safari MSE does not handle this leading to gaps.
|
||||
if (
|
||||
maxPtsDelta - minPtsDelta < maxDtsDelta - minDtsDelta &&
|
||||
averageSampleDuration / maxDtsDelta < 0.025 &&
|
||||
outputSamples[0].cts === 0
|
||||
) {
|
||||
logger.warn(
|
||||
'Found irregular gaps in sample duration. Using PTS instead of DTS to determine MP4 sample duration.'
|
||||
);
|
||||
let dts = firstDTS;
|
||||
for (let i = 0, len = outputSamples.length; i < len; i++) {
|
||||
const nextDts = dts + outputSamples[i].duration;
|
||||
const pts = dts + outputSamples[i].cts;
|
||||
if (i < len - 1) {
|
||||
const nextPts = nextDts + outputSamples[i + 1].cts;
|
||||
outputSamples[i].duration = nextPts - pts;
|
||||
} else {
|
||||
outputSamples[i].duration = i
|
||||
? outputSamples[i - 1].duration
|
||||
: averageSampleDuration;
|
||||
}
|
||||
outputSamples[i].cts = 0;
|
||||
dts = nextDts;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
console.assert(
|
||||
|
@ -1096,7 +1089,12 @@ class Mp4Sample {
|
|||
public cts: number;
|
||||
public flags: Mp4SampleFlags;
|
||||
|
||||
constructor(isKeyframe: boolean, duration, size, cts) {
|
||||
constructor(
|
||||
isKeyframe: boolean,
|
||||
duration: number,
|
||||
size: number,
|
||||
cts: number
|
||||
) {
|
||||
this.duration = duration;
|
||||
this.size = size;
|
||||
this.cts = cts;
|
||||
|
|
37
node_modules/hls.js/src/remux/passthrough-remuxer.ts
generated
vendored
37
node_modules/hls.js/src/remux/passthrough-remuxer.ts
generated
vendored
|
@ -32,17 +32,17 @@ class PassThroughRemuxer implements Remuxer {
|
|||
private initData?: InitData;
|
||||
private initPTS?: number;
|
||||
private initTracks?: TrackSet;
|
||||
private lastEndDTS: number | null = null;
|
||||
private lastEndTime: number | null = null;
|
||||
|
||||
public destroy() {}
|
||||
|
||||
public resetTimeStamp(defaultInitPTS) {
|
||||
this.initPTS = defaultInitPTS;
|
||||
this.lastEndDTS = null;
|
||||
this.lastEndTime = null;
|
||||
}
|
||||
|
||||
public resetNextTimestamp() {
|
||||
this.lastEndDTS = null;
|
||||
this.lastEndTime = null;
|
||||
}
|
||||
|
||||
public resetInitSegment(
|
||||
|
@ -117,7 +117,7 @@ class PassThroughRemuxer implements Remuxer {
|
|||
textTrack: DemuxedUserdataTrack,
|
||||
timeOffset: number
|
||||
): RemuxerResult {
|
||||
let { initPTS, lastEndDTS } = this;
|
||||
let { initPTS, lastEndTime } = this;
|
||||
const result: RemuxerResult = {
|
||||
audio: undefined,
|
||||
video: undefined,
|
||||
|
@ -129,8 +129,8 @@ class PassThroughRemuxer implements Remuxer {
|
|||
// If we haven't yet set a lastEndDTS, or it was reset, set it to the provided timeOffset. We want to use the
|
||||
// lastEndDTS over timeOffset whenever possible; during progressive playback, the media source will not update
|
||||
// the media duration (which is what timeOffset is provided as) before we need to process the next chunk.
|
||||
if (!Number.isFinite(lastEndDTS!)) {
|
||||
lastEndDTS = this.lastEndDTS = timeOffset || 0;
|
||||
if (!Number.isFinite(lastEndTime!)) {
|
||||
lastEndTime = this.lastEndTime = timeOffset || 0;
|
||||
}
|
||||
|
||||
// The binary segment data is added to the videoTrack in the mp4demuxer. We don't check to see if the data is only
|
||||
|
@ -159,20 +159,20 @@ class PassThroughRemuxer implements Remuxer {
|
|||
this.emitInitSegment = false;
|
||||
}
|
||||
|
||||
const startDTS = getStartDTS(initData, data);
|
||||
if (!Number.isFinite(initPTS!)) {
|
||||
this.initPTS =
|
||||
initSegment.initPTS =
|
||||
initPTS =
|
||||
computeInitPTS(initData, data, lastEndDTS);
|
||||
this.initPTS = initSegment.initPTS = initPTS = startDTS - timeOffset;
|
||||
}
|
||||
|
||||
const duration = getDuration(data, initData);
|
||||
const startDTS = lastEndDTS as number;
|
||||
const endDTS = duration + startDTS;
|
||||
const startTime = audioTrack
|
||||
? startDTS - (initPTS as number)
|
||||
: (lastEndTime as number);
|
||||
const endTime = startTime + duration;
|
||||
offsetStartDTS(initData, data, initPTS as number);
|
||||
|
||||
if (duration > 0) {
|
||||
this.lastEndDTS = endDTS;
|
||||
this.lastEndTime = endTime;
|
||||
} else {
|
||||
logger.warn('Duration parsed from mp4 should be greater than zero');
|
||||
this.resetNextTimestamp();
|
||||
|
@ -192,10 +192,10 @@ class PassThroughRemuxer implements Remuxer {
|
|||
|
||||
const track: RemuxedTrack = {
|
||||
data1: data,
|
||||
startPTS: startDTS,
|
||||
startDTS,
|
||||
endPTS: endDTS,
|
||||
endDTS,
|
||||
startPTS: startTime,
|
||||
startDTS: startTime,
|
||||
endPTS: endTime,
|
||||
endDTS: endTime,
|
||||
type,
|
||||
hasAudio,
|
||||
hasVideo,
|
||||
|
@ -226,9 +226,6 @@ class PassThroughRemuxer implements Remuxer {
|
|||
}
|
||||
}
|
||||
|
||||
const computeInitPTS = (initData, data, timeOffset) =>
|
||||
getStartDTS(initData, data) - timeOffset;
|
||||
|
||||
function getParsedTrackCodec(
|
||||
track: InitDataTrack | undefined,
|
||||
type: ElementaryStreamTypes.AUDIO | ElementaryStreamTypes.VIDEO
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue