1
0
Fork 0
mirror of https://github.com/DanielnetoDotCom/YouPHPTube synced 2025-10-05 10:49:36 +02:00

New updates and modules

This commit is contained in:
DanieL 2022-07-15 11:08:01 -03:00
parent 4d5d408898
commit 0abf0f90f6
959 changed files with 364301 additions and 17493 deletions

372
node_modules/hls.js/src/config.ts generated vendored Normal file
View file

@ -0,0 +1,372 @@
import AbrController from './controller/abr-controller';
import AudioStreamController from './controller/audio-stream-controller';
import AudioTrackController from './controller/audio-track-controller';
import { SubtitleStreamController } from './controller/subtitle-stream-controller';
import SubtitleTrackController from './controller/subtitle-track-controller';
import BufferController from './controller/buffer-controller';
import { TimelineController } from './controller/timeline-controller';
import CapLevelController from './controller/cap-level-controller';
import FPSController from './controller/fps-controller';
import EMEController from './controller/eme-controller';
import CMCDController from './controller/cmcd-controller';
import XhrLoader from './utils/xhr-loader';
import FetchLoader, { fetchSupported } from './utils/fetch-loader';
import Cues from './utils/cues';
import { requestMediaKeySystemAccess } from './utils/mediakeys-helper';
import { ILogger, logger } from './utils/logger';
import type { CuesInterface } from './utils/cues';
import type { MediaKeyFunc } from './utils/mediakeys-helper';
import type {
FragmentLoaderContext,
Loader,
LoaderContext,
PlaylistLoaderContext,
} from './types/loader';
export type ABRControllerConfig = {
abrEwmaFastLive: number;
abrEwmaSlowLive: number;
abrEwmaFastVoD: number;
abrEwmaSlowVoD: number;
abrEwmaDefaultEstimate: number;
abrBandWidthFactor: number;
abrBandWidthUpFactor: number;
abrMaxWithRealBitrate: boolean;
maxStarvationDelay: number;
maxLoadingDelay: number;
};
export type BufferControllerConfig = {
appendErrorMaxRetry: number;
backBufferLength: number;
liveDurationInfinity: boolean;
liveBackBufferLength: number | null;
};
export type CapLevelControllerConfig = {
capLevelToPlayerSize: boolean;
};
export type CMCDControllerConfig = {
sessionId?: string;
contentId?: string;
useHeaders?: boolean;
};
export type DRMSystemOptions = {
audioRobustness?: string;
videoRobustness?: string;
};
export type EMEControllerConfig = {
licenseXhrSetup?: (xhr: XMLHttpRequest, url: string) => void;
licenseResponseCallback?: (xhr: XMLHttpRequest, url: string) => ArrayBuffer;
emeEnabled: boolean;
widevineLicenseUrl?: string;
drmSystemOptions: DRMSystemOptions;
requestMediaKeySystemAccessFunc: MediaKeyFunc | null;
};
export interface FragmentLoaderConstructor {
new (confg: HlsConfig): Loader<FragmentLoaderContext>;
}
export type FragmentLoaderConfig = {
fLoader?: FragmentLoaderConstructor;
fragLoadingTimeOut: number;
fragLoadingMaxRetry: number;
fragLoadingRetryDelay: number;
fragLoadingMaxRetryTimeout: number;
};
export type FPSControllerConfig = {
capLevelOnFPSDrop: boolean;
fpsDroppedMonitoringPeriod: number;
fpsDroppedMonitoringThreshold: number;
};
export type LevelControllerConfig = {
startLevel?: number;
};
export type MP4RemuxerConfig = {
stretchShortVideoTrack: boolean;
maxAudioFramesDrift: number;
};
export interface PlaylistLoaderConstructor {
new (confg: HlsConfig): Loader<PlaylistLoaderContext>;
}
export type PlaylistLoaderConfig = {
pLoader?: PlaylistLoaderConstructor;
manifestLoadingTimeOut: number;
manifestLoadingMaxRetry: number;
manifestLoadingRetryDelay: number;
manifestLoadingMaxRetryTimeout: number;
levelLoadingTimeOut: number;
levelLoadingMaxRetry: number;
levelLoadingRetryDelay: number;
levelLoadingMaxRetryTimeout: number;
};
export type StreamControllerConfig = {
autoStartLoad: boolean;
startPosition: number;
defaultAudioCodec?: string;
initialLiveManifestSize: number;
maxBufferLength: number;
maxBufferSize: number;
maxBufferHole: number;
highBufferWatchdogPeriod: number;
nudgeOffset: number;
nudgeMaxRetry: number;
maxFragLookUpTolerance: number;
maxMaxBufferLength: number;
startFragPrefetch: boolean;
testBandwidth: boolean;
};
export type LatencyControllerConfig = {
liveSyncDurationCount: number;
liveMaxLatencyDurationCount: number;
liveSyncDuration?: number;
liveMaxLatencyDuration?: number;
maxLiveSyncPlaybackRate: number;
};
export type TimelineControllerConfig = {
cueHandler: CuesInterface;
enableCEA708Captions: boolean;
enableWebVTT: boolean;
enableIMSC1: boolean;
captionsTextTrack1Label: string;
captionsTextTrack1LanguageCode: string;
captionsTextTrack2Label: string;
captionsTextTrack2LanguageCode: string;
captionsTextTrack3Label: string;
captionsTextTrack3LanguageCode: string;
captionsTextTrack4Label: string;
captionsTextTrack4LanguageCode: string;
renderTextTracksNatively: boolean;
};
export type TSDemuxerConfig = {
forceKeyFrameOnDiscontinuity: boolean;
};
export type HlsConfig = {
debug: boolean | ILogger;
enableWorker: boolean;
enableSoftwareAES: boolean;
minAutoBitrate: number;
loader: { new (confg: HlsConfig): Loader<LoaderContext> };
fetchSetup?: (context: LoaderContext, initParams: any) => Request;
xhrSetup?: (xhr: XMLHttpRequest, url: string) => void;
// Alt Audio
audioStreamController?: typeof AudioStreamController;
audioTrackController?: typeof AudioTrackController;
// Subtitle
subtitleStreamController?: typeof SubtitleStreamController;
subtitleTrackController?: typeof SubtitleTrackController;
timelineController?: typeof TimelineController;
// EME
emeController?: typeof EMEController;
// CMCD
cmcd?: CMCDControllerConfig;
cmcdController?: typeof CMCDController;
abrController: typeof AbrController;
bufferController: typeof BufferController;
capLevelController: typeof CapLevelController;
fpsController: typeof FPSController;
progressive: boolean;
lowLatencyMode: boolean;
} & ABRControllerConfig &
BufferControllerConfig &
CapLevelControllerConfig &
EMEControllerConfig &
FPSControllerConfig &
FragmentLoaderConfig &
LevelControllerConfig &
MP4RemuxerConfig &
PlaylistLoaderConfig &
StreamControllerConfig &
LatencyControllerConfig &
TimelineControllerConfig &
TSDemuxerConfig;
// If possible, keep hlsDefaultConfig shallow
// It is cloned whenever a new Hls instance is created, by keeping the config
// shallow the properties are cloned, and we don't end up manipulating the default
export const hlsDefaultConfig: HlsConfig = {
autoStartLoad: true, // used by stream-controller
startPosition: -1, // used by stream-controller
defaultAudioCodec: undefined, // used by stream-controller
debug: false, // used by logger
capLevelOnFPSDrop: false, // used by fps-controller
capLevelToPlayerSize: false, // used by cap-level-controller
initialLiveManifestSize: 1, // used by stream-controller
maxBufferLength: 30, // used by stream-controller
backBufferLength: Infinity, // used by buffer-controller
maxBufferSize: 60 * 1000 * 1000, // used by stream-controller
maxBufferHole: 0.1, // used by stream-controller
highBufferWatchdogPeriod: 2, // used by stream-controller
nudgeOffset: 0.1, // used by stream-controller
nudgeMaxRetry: 3, // used by stream-controller
maxFragLookUpTolerance: 0.25, // used by stream-controller
liveSyncDurationCount: 3, // used by latency-controller
liveMaxLatencyDurationCount: Infinity, // used by latency-controller
liveSyncDuration: undefined, // used by latency-controller
liveMaxLatencyDuration: undefined, // used by latency-controller
maxLiveSyncPlaybackRate: 1, // used by latency-controller
liveDurationInfinity: false, // used by buffer-controller
liveBackBufferLength: null, // used by buffer-controller
maxMaxBufferLength: 600, // used by stream-controller
enableWorker: true, // used by demuxer
enableSoftwareAES: true, // used by decrypter
manifestLoadingTimeOut: 10000, // used by playlist-loader
manifestLoadingMaxRetry: 1, // used by playlist-loader
manifestLoadingRetryDelay: 1000, // used by playlist-loader
manifestLoadingMaxRetryTimeout: 64000, // used by playlist-loader
startLevel: undefined, // used by level-controller
levelLoadingTimeOut: 10000, // used by playlist-loader
levelLoadingMaxRetry: 4, // used by playlist-loader
levelLoadingRetryDelay: 1000, // used by playlist-loader
levelLoadingMaxRetryTimeout: 64000, // used by playlist-loader
fragLoadingTimeOut: 20000, // used by fragment-loader
fragLoadingMaxRetry: 6, // used by fragment-loader
fragLoadingRetryDelay: 1000, // used by fragment-loader
fragLoadingMaxRetryTimeout: 64000, // used by fragment-loader
startFragPrefetch: false, // used by stream-controller
fpsDroppedMonitoringPeriod: 5000, // used by fps-controller
fpsDroppedMonitoringThreshold: 0.2, // used by fps-controller
appendErrorMaxRetry: 3, // used by buffer-controller
loader: XhrLoader,
// loader: FetchLoader,
fLoader: undefined, // used by fragment-loader
pLoader: undefined, // used by playlist-loader
xhrSetup: undefined, // used by xhr-loader
licenseXhrSetup: undefined, // used by eme-controller
licenseResponseCallback: undefined, // used by eme-controller
abrController: AbrController,
bufferController: BufferController,
capLevelController: CapLevelController,
fpsController: FPSController,
stretchShortVideoTrack: false, // used by mp4-remuxer
maxAudioFramesDrift: 1, // used by mp4-remuxer
forceKeyFrameOnDiscontinuity: true, // used by ts-demuxer
abrEwmaFastLive: 3, // used by abr-controller
abrEwmaSlowLive: 9, // used by abr-controller
abrEwmaFastVoD: 3, // used by abr-controller
abrEwmaSlowVoD: 9, // used by abr-controller
abrEwmaDefaultEstimate: 5e5, // 500 kbps // used by abr-controller
abrBandWidthFactor: 0.95, // used by abr-controller
abrBandWidthUpFactor: 0.7, // used by abr-controller
abrMaxWithRealBitrate: false, // used by abr-controller
maxStarvationDelay: 4, // used by abr-controller
maxLoadingDelay: 4, // used by abr-controller
minAutoBitrate: 0, // used by hls
emeEnabled: false, // used by eme-controller
widevineLicenseUrl: undefined, // used by eme-controller
drmSystemOptions: {}, // used by eme-controller
requestMediaKeySystemAccessFunc: requestMediaKeySystemAccess, // used by eme-controller
testBandwidth: true,
progressive: false,
lowLatencyMode: true,
cmcd: undefined,
// Dynamic Modules
...timelineConfig(),
subtitleStreamController: __USE_SUBTITLES__
? SubtitleStreamController
: undefined,
subtitleTrackController: __USE_SUBTITLES__
? SubtitleTrackController
: undefined,
timelineController: __USE_SUBTITLES__ ? TimelineController : undefined,
audioStreamController: __USE_ALT_AUDIO__ ? AudioStreamController : undefined,
audioTrackController: __USE_ALT_AUDIO__ ? AudioTrackController : undefined,
emeController: __USE_EME_DRM__ ? EMEController : undefined,
cmcdController: __USE_CMCD__ ? CMCDController : undefined,
};
function timelineConfig(): TimelineControllerConfig {
return {
cueHandler: Cues, // used by timeline-controller
enableCEA708Captions: __USE_SUBTITLES__, // used by timeline-controller
enableWebVTT: __USE_SUBTITLES__, // used by timeline-controller
enableIMSC1: __USE_SUBTITLES__, // used by timeline-controller
captionsTextTrack1Label: 'English', // used by timeline-controller
captionsTextTrack1LanguageCode: 'en', // used by timeline-controller
captionsTextTrack2Label: 'Spanish', // used by timeline-controller
captionsTextTrack2LanguageCode: 'es', // used by timeline-controller
captionsTextTrack3Label: 'Unknown CC', // used by timeline-controller
captionsTextTrack3LanguageCode: '', // used by timeline-controller
captionsTextTrack4Label: 'Unknown CC', // used by timeline-controller
captionsTextTrack4LanguageCode: '', // used by timeline-controller
renderTextTracksNatively: true,
};
}
export function mergeConfig(
defaultConfig: HlsConfig,
userConfig: Partial<HlsConfig>
): HlsConfig {
if (
(userConfig.liveSyncDurationCount ||
userConfig.liveMaxLatencyDurationCount) &&
(userConfig.liveSyncDuration || userConfig.liveMaxLatencyDuration)
) {
throw new Error(
"Illegal hls.js config: don't mix up liveSyncDurationCount/liveMaxLatencyDurationCount and liveSyncDuration/liveMaxLatencyDuration"
);
}
if (
userConfig.liveMaxLatencyDurationCount !== undefined &&
(userConfig.liveSyncDurationCount === undefined ||
userConfig.liveMaxLatencyDurationCount <=
userConfig.liveSyncDurationCount)
) {
throw new Error(
'Illegal hls.js config: "liveMaxLatencyDurationCount" must be greater than "liveSyncDurationCount"'
);
}
if (
userConfig.liveMaxLatencyDuration !== undefined &&
(userConfig.liveSyncDuration === undefined ||
userConfig.liveMaxLatencyDuration <= userConfig.liveSyncDuration)
) {
throw new Error(
'Illegal hls.js config: "liveMaxLatencyDuration" must be greater than "liveSyncDuration"'
);
}
return Object.assign({}, defaultConfig, userConfig);
}
export function enableStreamingMode(config) {
const currentLoader = config.loader;
if (currentLoader !== FetchLoader && currentLoader !== XhrLoader) {
// If a developer has configured their own loader, respect that choice
logger.log(
'[config]: Custom loader detected, cannot enable progressive streaming'
);
config.progressive = false;
} else {
const canStreamProgressively = fetchSupported();
if (canStreamProgressively) {
config.loader = FetchLoader;
config.progressive = true;
config.enableSoftwareAES = true;
logger.log('[config]: Progressive streaming enabled, using FetchLoader');
}
}
}

481
node_modules/hls.js/src/controller/abr-controller.ts generated vendored Normal file
View file

@ -0,0 +1,481 @@
import EwmaBandWidthEstimator from '../utils/ewma-bandwidth-estimator';
import { Events } from '../events';
import { BufferHelper } from '../utils/buffer-helper';
import { ErrorDetails } from '../errors';
import { PlaylistLevelType } from '../types/loader';
import { logger } from '../utils/logger';
import type { Bufferable } from '../utils/buffer-helper';
import type { Fragment } from '../loader/fragment';
import type { Part } from '../loader/fragment';
import type { LoaderStats } from '../types/loader';
import type Hls from '../hls';
import type {
FragLoadingData,
FragLoadedData,
FragBufferedData,
ErrorData,
LevelLoadedData,
} from '../types/events';
import type { ComponentAPI } from '../types/component-api';
class AbrController implements ComponentAPI {
protected hls: Hls;
private lastLoadedFragLevel: number = 0;
private _nextAutoLevel: number = -1;
private timer?: number;
private onCheck: Function = this._abandonRulesCheck.bind(this);
private fragCurrent: Fragment | null = null;
private partCurrent: Part | null = null;
private bitrateTestDelay: number = 0;
public readonly bwEstimator: EwmaBandWidthEstimator;
constructor(hls: Hls) {
this.hls = hls;
const config = hls.config;
this.bwEstimator = new EwmaBandWidthEstimator(
config.abrEwmaSlowVoD,
config.abrEwmaFastVoD,
config.abrEwmaDefaultEstimate
);
this.registerListeners();
}
protected registerListeners() {
const { hls } = this;
hls.on(Events.FRAG_LOADING, this.onFragLoading, this);
hls.on(Events.FRAG_LOADED, this.onFragLoaded, this);
hls.on(Events.FRAG_BUFFERED, this.onFragBuffered, this);
hls.on(Events.LEVEL_LOADED, this.onLevelLoaded, this);
hls.on(Events.ERROR, this.onError, this);
}
protected unregisterListeners() {
const { hls } = this;
hls.off(Events.FRAG_LOADING, this.onFragLoading, this);
hls.off(Events.FRAG_LOADED, this.onFragLoaded, this);
hls.off(Events.FRAG_BUFFERED, this.onFragBuffered, this);
hls.off(Events.LEVEL_LOADED, this.onLevelLoaded, this);
hls.off(Events.ERROR, this.onError, this);
}
public destroy() {
this.unregisterListeners();
this.clearTimer();
// @ts-ignore
this.hls = this.onCheck = null;
this.fragCurrent = this.partCurrent = null;
}
protected onFragLoading(event: Events.FRAG_LOADING, data: FragLoadingData) {
const frag = data.frag;
if (frag.type === PlaylistLevelType.MAIN) {
if (!this.timer) {
this.fragCurrent = frag;
this.partCurrent = data.part ?? null;
this.timer = self.setInterval(this.onCheck, 100);
}
}
}
protected onLevelLoaded(event: Events.LEVEL_LOADED, data: LevelLoadedData) {
const config = this.hls.config;
if (data.details.live) {
this.bwEstimator.update(config.abrEwmaSlowLive, config.abrEwmaFastLive);
} else {
this.bwEstimator.update(config.abrEwmaSlowVoD, config.abrEwmaFastVoD);
}
}
/*
This method monitors the download rate of the current fragment, and will downswitch if that fragment will not load
quickly enough to prevent underbuffering
*/
private _abandonRulesCheck() {
const { fragCurrent: frag, partCurrent: part, hls } = this;
const { autoLevelEnabled, config, media } = hls;
if (!frag || !media) {
return;
}
const stats: LoaderStats = part ? part.stats : frag.stats;
const duration = part ? part.duration : frag.duration;
// If loading has been aborted and not in lowLatencyMode, stop timer and return
if (stats.aborted) {
logger.warn('frag loader destroy or aborted, disarm abandonRules');
this.clearTimer();
// reset forced auto level value so that next level will be selected
this._nextAutoLevel = -1;
return;
}
// This check only runs if we're in ABR mode and actually playing
if (
!autoLevelEnabled ||
media.paused ||
!media.playbackRate ||
!media.readyState
) {
return;
}
const requestDelay = performance.now() - stats.loading.start;
const playbackRate = Math.abs(media.playbackRate);
// In order to work with a stable bandwidth, only begin monitoring bandwidth after half of the fragment has been loaded
if (requestDelay <= (500 * duration) / playbackRate) {
return;
}
const { levels, minAutoLevel } = hls;
const level = levels[frag.level];
const expectedLen =
stats.total ||
Math.max(stats.loaded, Math.round((duration * level.maxBitrate) / 8));
const loadRate = Math.max(
1,
stats.bwEstimate
? stats.bwEstimate / 8
: (stats.loaded * 1000) / requestDelay
);
// fragLoadDelay is an estimate of the time (in seconds) it will take to buffer the entire fragment
const fragLoadedDelay = (expectedLen - stats.loaded) / loadRate;
const pos = media.currentTime;
// bufferStarvationDelay is an estimate of the amount time (in seconds) it will take to exhaust the buffer
const bufferStarvationDelay =
(BufferHelper.bufferInfo(media, pos, config.maxBufferHole).end - pos) /
playbackRate;
// Attempt an emergency downswitch only if less than 2 fragment lengths are buffered, and the time to finish loading
// the current fragment is greater than the amount of buffer we have left
if (
bufferStarvationDelay >= (2 * duration) / playbackRate ||
fragLoadedDelay <= bufferStarvationDelay
) {
return;
}
let fragLevelNextLoadedDelay: number = Number.POSITIVE_INFINITY;
let nextLoadLevel: number;
// Iterate through lower level and try to find the largest one that avoids rebuffering
for (
nextLoadLevel = frag.level - 1;
nextLoadLevel > minAutoLevel;
nextLoadLevel--
) {
// compute time to load next fragment at lower level
// 0.8 : consider only 80% of current bw to be conservative
// 8 = bits per byte (bps/Bps)
const levelNextBitrate = levels[nextLoadLevel].maxBitrate;
fragLevelNextLoadedDelay =
(duration * levelNextBitrate) / (8 * 0.8 * loadRate);
if (fragLevelNextLoadedDelay < bufferStarvationDelay) {
break;
}
}
// Only emergency switch down if it takes less time to load a new fragment at lowest level instead of continuing
// to load the current one
if (fragLevelNextLoadedDelay >= fragLoadedDelay) {
return;
}
const bwEstimate: number = this.bwEstimator.getEstimate();
logger.warn(`Fragment ${frag.sn}${
part ? ' part ' + part.index : ''
} of level ${
frag.level
} is loading too slowly and will cause an underbuffer; aborting and switching to level ${nextLoadLevel}
Current BW estimate: ${
Number.isFinite(bwEstimate) ? (bwEstimate / 1024).toFixed(3) : 'Unknown'
} Kb/s
Estimated load time for current fragment: ${fragLoadedDelay.toFixed(3)} s
Estimated load time for the next fragment: ${fragLevelNextLoadedDelay.toFixed(
3
)} s
Time to underbuffer: ${bufferStarvationDelay.toFixed(3)} s`);
hls.nextLoadLevel = nextLoadLevel;
this.bwEstimator.sample(requestDelay, stats.loaded);
this.clearTimer();
if (frag.loader) {
this.fragCurrent = this.partCurrent = null;
frag.loader.abort();
}
hls.trigger(Events.FRAG_LOAD_EMERGENCY_ABORTED, { frag, part, stats });
}
protected onFragLoaded(
event: Events.FRAG_LOADED,
{ frag, part }: FragLoadedData
) {
if (
frag.type === PlaylistLevelType.MAIN &&
Number.isFinite(frag.sn as number)
) {
const stats = part ? part.stats : frag.stats;
const duration = part ? part.duration : frag.duration;
// stop monitoring bw once frag loaded
this.clearTimer();
// store level id after successful fragment load
this.lastLoadedFragLevel = frag.level;
// reset forced auto level value so that next level will be selected
this._nextAutoLevel = -1;
// compute level average bitrate
if (this.hls.config.abrMaxWithRealBitrate) {
const level = this.hls.levels[frag.level];
const loadedBytes =
(level.loaded ? level.loaded.bytes : 0) + stats.loaded;
const loadedDuration =
(level.loaded ? level.loaded.duration : 0) + duration;
level.loaded = { bytes: loadedBytes, duration: loadedDuration };
level.realBitrate = Math.round((8 * loadedBytes) / loadedDuration);
}
if (frag.bitrateTest) {
const fragBufferedData: FragBufferedData = {
stats,
frag,
part,
id: frag.type,
};
this.onFragBuffered(Events.FRAG_BUFFERED, fragBufferedData);
frag.bitrateTest = false;
}
}
}
protected onFragBuffered(
event: Events.FRAG_BUFFERED,
data: FragBufferedData
) {
const { frag, part } = data;
const stats = part ? part.stats : frag.stats;
if (stats.aborted) {
return;
}
// Only count non-alt-audio frags which were actually buffered in our BW calculations
if (frag.type !== PlaylistLevelType.MAIN || frag.sn === 'initSegment') {
return;
}
// Use the difference between parsing and request instead of buffering and request to compute fragLoadingProcessing;
// rationale is that buffer appending only happens once media is attached. This can happen when config.startFragPrefetch
// is used. If we used buffering in that case, our BW estimate sample will be very large.
const processingMs = stats.parsing.end - stats.loading.start;
this.bwEstimator.sample(processingMs, stats.loaded);
stats.bwEstimate = this.bwEstimator.getEstimate();
if (frag.bitrateTest) {
this.bitrateTestDelay = processingMs / 1000;
} else {
this.bitrateTestDelay = 0;
}
}
protected onError(event: Events.ERROR, data: ErrorData) {
// stop timer in case of frag loading error
switch (data.details) {
case ErrorDetails.FRAG_LOAD_ERROR:
case ErrorDetails.FRAG_LOAD_TIMEOUT:
this.clearTimer();
break;
default:
break;
}
}
clearTimer() {
self.clearInterval(this.timer);
this.timer = undefined;
}
// return next auto level
get nextAutoLevel() {
const forcedAutoLevel = this._nextAutoLevel;
const bwEstimator = this.bwEstimator;
// in case next auto level has been forced, and bw not available or not reliable, return forced value
if (
forcedAutoLevel !== -1 &&
(!bwEstimator || !bwEstimator.canEstimate())
) {
return forcedAutoLevel;
}
// compute next level using ABR logic
let nextABRAutoLevel = this.getNextABRAutoLevel();
// if forced auto level has been defined, use it to cap ABR computed quality level
if (forcedAutoLevel !== -1) {
nextABRAutoLevel = Math.min(forcedAutoLevel, nextABRAutoLevel);
}
return nextABRAutoLevel;
}
private getNextABRAutoLevel() {
const { fragCurrent, partCurrent, hls } = this;
const { maxAutoLevel, config, minAutoLevel, media } = hls;
const currentFragDuration = partCurrent
? partCurrent.duration
: fragCurrent
? fragCurrent.duration
: 0;
const pos = media ? media.currentTime : 0;
// playbackRate is the absolute value of the playback rate; if media.playbackRate is 0, we use 1 to load as
// if we're playing back at the normal rate.
const playbackRate =
media && media.playbackRate !== 0 ? Math.abs(media.playbackRate) : 1.0;
const avgbw = this.bwEstimator
? this.bwEstimator.getEstimate()
: config.abrEwmaDefaultEstimate;
// bufferStarvationDelay is the wall-clock time left until the playback buffer is exhausted.
const bufferStarvationDelay =
(BufferHelper.bufferInfo(media as Bufferable, pos, config.maxBufferHole)
.end -
pos) /
playbackRate;
// First, look to see if we can find a level matching with our avg bandwidth AND that could also guarantee no rebuffering at all
let bestLevel = this.findBestLevel(
avgbw,
minAutoLevel,
maxAutoLevel,
bufferStarvationDelay,
config.abrBandWidthFactor,
config.abrBandWidthUpFactor
);
if (bestLevel >= 0) {
return bestLevel;
}
logger.trace(
`${
bufferStarvationDelay ? 'rebuffering expected' : 'buffer is empty'
}, finding optimal quality level`
);
// not possible to get rid of rebuffering ... let's try to find level that will guarantee less than maxStarvationDelay of rebuffering
// if no matching level found, logic will return 0
let maxStarvationDelay = currentFragDuration
? Math.min(currentFragDuration, config.maxStarvationDelay)
: config.maxStarvationDelay;
let bwFactor = config.abrBandWidthFactor;
let bwUpFactor = config.abrBandWidthUpFactor;
if (!bufferStarvationDelay) {
// in case buffer is empty, let's check if previous fragment was loaded to perform a bitrate test
const bitrateTestDelay = this.bitrateTestDelay;
if (bitrateTestDelay) {
// if it is the case, then we need to adjust our max starvation delay using maxLoadingDelay config value
// max video loading delay used in automatic start level selection :
// in that mode ABR controller will ensure that video loading time (ie the time to fetch the first fragment at lowest quality level +
// the time to fetch the fragment at the appropriate quality level is less than ```maxLoadingDelay``` )
// cap maxLoadingDelay and ensure it is not bigger 'than bitrate test' frag duration
const maxLoadingDelay = currentFragDuration
? Math.min(currentFragDuration, config.maxLoadingDelay)
: config.maxLoadingDelay;
maxStarvationDelay = maxLoadingDelay - bitrateTestDelay;
logger.trace(
`bitrate test took ${Math.round(
1000 * bitrateTestDelay
)}ms, set first fragment max fetchDuration to ${Math.round(
1000 * maxStarvationDelay
)} ms`
);
// don't use conservative factor on bitrate test
bwFactor = bwUpFactor = 1;
}
}
bestLevel = this.findBestLevel(
avgbw,
minAutoLevel,
maxAutoLevel,
bufferStarvationDelay + maxStarvationDelay,
bwFactor,
bwUpFactor
);
return Math.max(bestLevel, 0);
}
private findBestLevel(
currentBw: number,
minAutoLevel: number,
maxAutoLevel: number,
maxFetchDuration: number,
bwFactor: number,
bwUpFactor: number
): number {
const {
fragCurrent,
partCurrent,
lastLoadedFragLevel: currentLevel,
} = this;
const { levels } = this.hls;
const level = levels[currentLevel];
const live = !!level?.details?.live;
const currentCodecSet = level?.codecSet;
const currentFragDuration = partCurrent
? partCurrent.duration
: fragCurrent
? fragCurrent.duration
: 0;
for (let i = maxAutoLevel; i >= minAutoLevel; i--) {
const levelInfo = levels[i];
if (
!levelInfo ||
(currentCodecSet && levelInfo.codecSet !== currentCodecSet)
) {
continue;
}
const levelDetails = levelInfo.details;
const avgDuration =
(partCurrent
? levelDetails?.partTarget
: levelDetails?.averagetargetduration) || currentFragDuration;
let adjustedbw: number;
// follow algorithm captured from stagefright :
// https://android.googlesource.com/platform/frameworks/av/+/master/media/libstagefright/httplive/LiveSession.cpp
// Pick the highest bandwidth stream below or equal to estimated bandwidth.
// consider only 80% of the available bandwidth, but if we are switching up,
// be even more conservative (70%) to avoid overestimating and immediately
// switching back.
if (i <= currentLevel) {
adjustedbw = bwFactor * currentBw;
} else {
adjustedbw = bwUpFactor * currentBw;
}
const bitrate: number = levels[i].maxBitrate;
const fetchDuration: number = (bitrate * avgDuration) / adjustedbw;
logger.trace(
`level/adjustedbw/bitrate/avgDuration/maxFetchDuration/fetchDuration: ${i}/${Math.round(
adjustedbw
)}/${bitrate}/${avgDuration}/${maxFetchDuration}/${fetchDuration}`
);
// if adjusted bw is greater than level bitrate AND
if (
adjustedbw > bitrate &&
// fragment fetchDuration unknown OR live stream OR fragment fetchDuration less than max allowed fetch duration, then this level matches
// we don't account for max Fetch Duration for live streams, this is to avoid switching down when near the edge of live sliding window ...
// special case to support startLevel = -1 (bitrateTest) on live streams : in that case we should not exit loop so that findBestLevel will return -1
(!fetchDuration ||
(live && !this.bitrateTestDelay) ||
fetchDuration < maxFetchDuration)
) {
// as we are looping from highest to lowest, this will return the best achievable quality level
return i;
}
}
// not enough time budget even with quality level 0 ... rebuffering might happen
return -1;
}
set nextAutoLevel(nextLevel) {
this._nextAutoLevel = nextLevel;
}
}
export default AbrController;

View file

@ -0,0 +1,819 @@
import BaseStreamController, { State } from './base-stream-controller';
import { Events } from '../events';
import { BufferHelper } from '../utils/buffer-helper';
import { FragmentState } from './fragment-tracker';
import { Level } from '../types/level';
import { PlaylistLevelType } from '../types/loader';
import { Fragment, ElementaryStreamTypes, Part } from '../loader/fragment';
import ChunkCache from '../demux/chunk-cache';
import TransmuxerInterface from '../demux/transmuxer-interface';
import { ChunkMetadata } from '../types/transmuxer';
import { fragmentWithinToleranceTest } from './fragment-finders';
import { alignMediaPlaylistByPDT } from '../utils/discontinuities';
import { ErrorDetails } from '../errors';
import { logger } from '../utils/logger';
import type { NetworkComponentAPI } from '../types/component-api';
import type { FragmentTracker } from './fragment-tracker';
import type { TransmuxerResult } from '../types/transmuxer';
import type Hls from '../hls';
import type { LevelDetails } from '../loader/level-details';
import type { TrackSet } from '../types/track';
import type {
BufferCreatedData,
AudioTracksUpdatedData,
AudioTrackSwitchingData,
LevelLoadedData,
TrackLoadedData,
BufferAppendingData,
BufferFlushedData,
InitPTSFoundData,
FragLoadedData,
FragParsingMetadataData,
FragParsingUserdataData,
FragBufferedData,
ErrorData,
} from '../types/events';
const TICK_INTERVAL = 100; // how often to tick in ms
type WaitingForPTSData = {
frag: Fragment;
part: Part | null;
cache: ChunkCache;
complete: boolean;
};
class AudioStreamController
extends BaseStreamController
implements NetworkComponentAPI
{
private videoBuffer: any | null = null;
private videoTrackCC: number = -1;
private waitingVideoCC: number = -1;
private audioSwitch: boolean = false;
private trackId: number = -1;
private waitingData: WaitingForPTSData | null = null;
private mainDetails: LevelDetails | null = null;
private bufferFlushed: boolean = false;
constructor(hls: Hls, fragmentTracker: FragmentTracker) {
super(hls, fragmentTracker, '[audio-stream-controller]');
this._registerListeners();
}
protected onHandlerDestroying() {
this._unregisterListeners();
this.mainDetails = null;
}
private _registerListeners() {
const { hls } = this;
hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this);
hls.on(Events.LEVEL_LOADED, this.onLevelLoaded, this);
hls.on(Events.AUDIO_TRACKS_UPDATED, this.onAudioTracksUpdated, this);
hls.on(Events.AUDIO_TRACK_SWITCHING, this.onAudioTrackSwitching, this);
hls.on(Events.AUDIO_TRACK_LOADED, this.onAudioTrackLoaded, this);
hls.on(Events.ERROR, this.onError, this);
hls.on(Events.BUFFER_RESET, this.onBufferReset, this);
hls.on(Events.BUFFER_CREATED, this.onBufferCreated, this);
hls.on(Events.BUFFER_FLUSHED, this.onBufferFlushed, this);
hls.on(Events.INIT_PTS_FOUND, this.onInitPtsFound, this);
hls.on(Events.FRAG_BUFFERED, this.onFragBuffered, this);
}
private _unregisterListeners() {
const { hls } = this;
hls.off(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this);
hls.off(Events.LEVEL_LOADED, this.onLevelLoaded, this);
hls.off(Events.AUDIO_TRACKS_UPDATED, this.onAudioTracksUpdated, this);
hls.off(Events.AUDIO_TRACK_SWITCHING, this.onAudioTrackSwitching, this);
hls.off(Events.AUDIO_TRACK_LOADED, this.onAudioTrackLoaded, this);
hls.off(Events.ERROR, this.onError, this);
hls.off(Events.BUFFER_RESET, this.onBufferReset, this);
hls.off(Events.BUFFER_CREATED, this.onBufferCreated, this);
hls.off(Events.BUFFER_FLUSHED, this.onBufferFlushed, this);
hls.off(Events.INIT_PTS_FOUND, this.onInitPtsFound, this);
hls.off(Events.FRAG_BUFFERED, this.onFragBuffered, this);
}
// INIT_PTS_FOUND is triggered when the video track parsed in the stream-controller has a new PTS value
onInitPtsFound(
event: Events.INIT_PTS_FOUND,
{ frag, id, initPTS }: InitPTSFoundData
) {
// Always update the new INIT PTS
// Can change due level switch
if (id === 'main') {
const cc = frag.cc;
this.initPTS[frag.cc] = initPTS;
this.log(`InitPTS for cc: ${cc} found from main: ${initPTS}`);
this.videoTrackCC = cc;
// If we are waiting, tick immediately to unblock audio fragment transmuxing
if (this.state === State.WAITING_INIT_PTS) {
this.tick();
}
}
}
startLoad(startPosition: number) {
if (!this.levels) {
this.startPosition = startPosition;
this.state = State.STOPPED;
return;
}
const lastCurrentTime = this.lastCurrentTime;
this.stopLoad();
this.setInterval(TICK_INTERVAL);
this.fragLoadError = 0;
if (lastCurrentTime > 0 && startPosition === -1) {
this.log(
`Override startPosition with lastCurrentTime @${lastCurrentTime.toFixed(
3
)}`
);
this.state = State.IDLE;
} else {
this.loadedmetadata = false;
this.state = State.WAITING_TRACK;
}
this.nextLoadPosition =
this.startPosition =
this.lastCurrentTime =
startPosition;
this.tick();
}
doTick() {
switch (this.state) {
case State.IDLE:
this.doTickIdle();
break;
case State.WAITING_TRACK: {
const { levels, trackId } = this;
const details = levels?.[trackId]?.details;
if (details) {
if (this.waitForCdnTuneIn(details)) {
break;
}
this.state = State.WAITING_INIT_PTS;
}
break;
}
case State.FRAG_LOADING_WAITING_RETRY: {
const now = performance.now();
const retryDate = this.retryDate;
// if current time is gt than retryDate, or if media seeking let's switch to IDLE state to retry loading
if (!retryDate || now >= retryDate || this.media?.seeking) {
this.log('RetryDate reached, switch back to IDLE state');
this.state = State.IDLE;
}
break;
}
case State.WAITING_INIT_PTS: {
// Ensure we don't get stuck in the WAITING_INIT_PTS state if the waiting frag CC doesn't match any initPTS
const waitingData = this.waitingData;
if (waitingData) {
const { frag, part, cache, complete } = waitingData;
if (this.initPTS[frag.cc] !== undefined) {
this.waitingData = null;
this.waitingVideoCC = -1;
this.state = State.FRAG_LOADING;
const payload = cache.flush();
const data: FragLoadedData = {
frag,
part,
payload,
networkDetails: null,
};
this._handleFragmentLoadProgress(data);
if (complete) {
super._handleFragmentLoadComplete(data);
}
} else if (this.videoTrackCC !== this.waitingVideoCC) {
// Drop waiting fragment if videoTrackCC has changed since waitingFragment was set and initPTS was not found
logger.log(
`Waiting fragment cc (${frag.cc}) cancelled because video is at cc ${this.videoTrackCC}`
);
this.clearWaitingFragment();
} else {
// Drop waiting fragment if an earlier fragment is needed
const pos = this.getLoadPosition();
const bufferInfo = BufferHelper.bufferInfo(
this.mediaBuffer,
pos,
this.config.maxBufferHole
);
const waitingFragmentAtPosition = fragmentWithinToleranceTest(
bufferInfo.end,
this.config.maxFragLookUpTolerance,
frag
);
if (waitingFragmentAtPosition < 0) {
logger.log(
`Waiting fragment cc (${frag.cc}) @ ${frag.start} cancelled because another fragment at ${bufferInfo.end} is needed`
);
this.clearWaitingFragment();
}
}
} else {
this.state = State.IDLE;
}
}
}
this.onTickEnd();
}
clearWaitingFragment() {
const waitingData = this.waitingData;
if (waitingData) {
this.fragmentTracker.removeFragment(waitingData.frag);
this.waitingData = null;
this.waitingVideoCC = -1;
this.state = State.IDLE;
}
}
protected onTickEnd() {
const { media } = this;
if (!media || !media.readyState) {
// Exit early if we don't have media or if the media hasn't buffered anything yet (readyState 0)
return;
}
const mediaBuffer = this.mediaBuffer ? this.mediaBuffer : media;
const buffered = mediaBuffer.buffered;
if (!this.loadedmetadata && buffered.length) {
this.loadedmetadata = true;
}
this.lastCurrentTime = media.currentTime;
}
private doTickIdle() {
const { hls, levels, media, trackId } = this;
const config = hls.config;
if (!levels || !levels[trackId]) {
return;
}
// if video not attached AND
// start fragment already requested OR start frag prefetch not enabled
// exit loop
// => if media not attached but start frag prefetch is enabled and start frag not requested yet, we will not exit loop
if (!media && (this.startFragRequested || !config.startFragPrefetch)) {
return;
}
const levelInfo = levels[trackId];
const trackDetails = levelInfo.details;
if (
!trackDetails ||
(trackDetails.live && this.levelLastLoaded !== trackId) ||
this.waitForCdnTuneIn(trackDetails)
) {
this.state = State.WAITING_TRACK;
return;
}
if (this.bufferFlushed) {
this.bufferFlushed = false;
this.afterBufferFlushed(
this.mediaBuffer ? this.mediaBuffer : this.media,
ElementaryStreamTypes.AUDIO,
PlaylistLevelType.AUDIO
);
}
const bufferInfo = this.getFwdBufferInfo(
this.mediaBuffer ? this.mediaBuffer : this.media,
PlaylistLevelType.AUDIO
);
if (bufferInfo === null) {
return;
}
const bufferLen = bufferInfo.len;
const maxBufLen = this.getMaxBufferLength();
const audioSwitch = this.audioSwitch;
// if buffer length is less than maxBufLen try to load a new fragment
if (bufferLen >= maxBufLen && !audioSwitch) {
return;
}
if (!audioSwitch && this._streamEnded(bufferInfo, trackDetails)) {
hls.trigger(Events.BUFFER_EOS, { type: 'audio' });
this.state = State.ENDED;
return;
}
const fragments = trackDetails.fragments;
const start = fragments[0].start;
let targetBufferTime = bufferInfo.end;
if (audioSwitch) {
const pos = this.getLoadPosition();
targetBufferTime = pos;
// if currentTime (pos) is less than alt audio playlist start time, it means that alt audio is ahead of currentTime
if (trackDetails.PTSKnown && pos < start) {
// if everything is buffered from pos to start or if audio buffer upfront, let's seek to start
if (bufferInfo.end > start || bufferInfo.nextStart) {
this.log(
'Alt audio track ahead of main track, seek to start of alt audio track'
);
media.currentTime = start + 0.05;
}
}
}
const frag = this.getNextFragment(targetBufferTime, trackDetails);
if (!frag) {
this.bufferFlushed = true;
return;
}
if (frag.decryptdata?.keyFormat === 'identity' && !frag.decryptdata?.key) {
this.loadKey(frag, trackDetails);
} else {
this.loadFragment(frag, trackDetails, targetBufferTime);
}
}
protected getMaxBufferLength(): number {
const maxConfigBuffer = super.getMaxBufferLength();
const mainBufferInfo = this.getFwdBufferInfo(
this.videoBuffer ? this.videoBuffer : this.media,
PlaylistLevelType.MAIN
);
if (mainBufferInfo === null) {
return maxConfigBuffer;
}
return Math.max(maxConfigBuffer, mainBufferInfo.len);
}
onMediaDetaching() {
this.videoBuffer = null;
super.onMediaDetaching();
}
onAudioTracksUpdated(
event: Events.AUDIO_TRACKS_UPDATED,
{ audioTracks }: AudioTracksUpdatedData
) {
this.resetTransmuxer();
this.levels = audioTracks.map((mediaPlaylist) => new Level(mediaPlaylist));
}
onAudioTrackSwitching(
event: Events.AUDIO_TRACK_SWITCHING,
data: AudioTrackSwitchingData
) {
// if any URL found on new audio track, it is an alternate audio track
const altAudio = !!data.url;
this.trackId = data.id;
const { fragCurrent } = this;
if (fragCurrent?.loader) {
fragCurrent.loader.abort();
}
this.fragCurrent = null;
this.clearWaitingFragment();
// destroy useless transmuxer when switching audio to main
if (!altAudio) {
this.resetTransmuxer();
} else {
// switching to audio track, start timer if not already started
this.setInterval(TICK_INTERVAL);
}
// should we switch tracks ?
if (altAudio) {
this.audioSwitch = true;
// main audio track are handled by stream-controller, just do something if switching to alt audio track
this.state = State.IDLE;
} else {
this.state = State.STOPPED;
}
this.tick();
}
onManifestLoading() {
this.mainDetails = null;
this.fragmentTracker.removeAllFragments();
this.startPosition = this.lastCurrentTime = 0;
this.bufferFlushed = false;
}
onLevelLoaded(event: Events.LEVEL_LOADED, data: LevelLoadedData) {
this.mainDetails = data.details;
}
onAudioTrackLoaded(event: Events.AUDIO_TRACK_LOADED, data: TrackLoadedData) {
const { levels } = this;
const { details: newDetails, id: trackId } = data;
if (!levels) {
this.warn(`Audio tracks were reset while loading level ${trackId}`);
return;
}
this.log(
`Track ${trackId} loaded [${newDetails.startSN},${newDetails.endSN}],duration:${newDetails.totalduration}`
);
const track = levels[trackId];
let sliding = 0;
if (newDetails.live || track.details?.live) {
const mainDetails = this.mainDetails;
if (!newDetails.fragments[0]) {
newDetails.deltaUpdateFailed = true;
}
if (newDetails.deltaUpdateFailed || !mainDetails) {
return;
}
if (
!track.details &&
newDetails.hasProgramDateTime &&
mainDetails.hasProgramDateTime
) {
// Make sure our audio rendition is aligned with the "main" rendition, using
// pdt as our reference times.
alignMediaPlaylistByPDT(newDetails, mainDetails);
sliding = newDetails.fragments[0].start;
} else {
sliding = this.alignPlaylists(newDetails, track.details);
}
}
track.details = newDetails;
this.levelLastLoaded = trackId;
// compute start position if we are aligned with the main playlist
if (!this.startFragRequested && (this.mainDetails || !newDetails.live)) {
this.setStartPosition(track.details, sliding);
}
// only switch back to IDLE state if we were waiting for track to start downloading a new fragment
if (
this.state === State.WAITING_TRACK &&
!this.waitForCdnTuneIn(newDetails)
) {
this.state = State.IDLE;
}
// trigger handler right now
this.tick();
}
_handleFragmentLoadProgress(data: FragLoadedData) {
const { frag, part, payload } = data;
const { config, trackId, levels } = this;
if (!levels) {
this.warn(
`Audio tracks were reset while fragment load was in progress. Fragment ${frag.sn} of level ${frag.level} will not be buffered`
);
return;
}
const track = levels[trackId] as Level;
console.assert(track, 'Audio track is defined on fragment load progress');
const details = track.details as LevelDetails;
console.assert(
details,
'Audio track details are defined on fragment load progress'
);
const audioCodec =
config.defaultAudioCodec || track.audioCodec || 'mp4a.40.2';
let transmuxer = this.transmuxer;
if (!transmuxer) {
transmuxer = this.transmuxer = new TransmuxerInterface(
this.hls,
PlaylistLevelType.AUDIO,
this._handleTransmuxComplete.bind(this),
this._handleTransmuxerFlush.bind(this)
);
}
// Check if we have video initPTS
// If not we need to wait for it
const initPTS = this.initPTS[frag.cc];
const initSegmentData = frag.initSegment?.data;
if (initPTS !== undefined) {
// this.log(`Transmuxing ${sn} of [${details.startSN} ,${details.endSN}],track ${trackId}`);
// time Offset is accurate if level PTS is known, or if playlist is not sliding (not live)
const accurateTimeOffset = false; // details.PTSKnown || !details.live;
const partIndex = part ? part.index : -1;
const partial = partIndex !== -1;
const chunkMeta = new ChunkMetadata(
frag.level,
frag.sn as number,
frag.stats.chunkCount,
payload.byteLength,
partIndex,
partial
);
transmuxer.push(
payload,
initSegmentData,
audioCodec,
'',
frag,
part,
details.totalduration,
accurateTimeOffset,
chunkMeta,
initPTS
);
} else {
logger.log(
`Unknown video PTS for cc ${frag.cc}, waiting for video PTS before demuxing audio frag ${frag.sn} of [${details.startSN} ,${details.endSN}],track ${trackId}`
);
const { cache } = (this.waitingData = this.waitingData || {
frag,
part,
cache: new ChunkCache(),
complete: false,
});
cache.push(new Uint8Array(payload));
this.waitingVideoCC = this.videoTrackCC;
this.state = State.WAITING_INIT_PTS;
}
}
protected _handleFragmentLoadComplete(fragLoadedData: FragLoadedData) {
if (this.waitingData) {
this.waitingData.complete = true;
return;
}
super._handleFragmentLoadComplete(fragLoadedData);
}
onBufferReset(/* event: Events.BUFFER_RESET */) {
// reset reference to sourcebuffers
this.mediaBuffer = this.videoBuffer = null;
this.loadedmetadata = false;
}
onBufferCreated(event: Events.BUFFER_CREATED, data: BufferCreatedData) {
const audioTrack = data.tracks.audio;
if (audioTrack) {
this.mediaBuffer = audioTrack.buffer;
}
if (data.tracks.video) {
this.videoBuffer = data.tracks.video.buffer;
}
}
onFragBuffered(event: Events.FRAG_BUFFERED, data: FragBufferedData) {
const { frag, part } = data;
if (frag.type !== PlaylistLevelType.AUDIO) {
return;
}
if (this.fragContextChanged(frag)) {
// If a level switch was requested while a fragment was buffering, it will emit the FRAG_BUFFERED event upon completion
// Avoid setting state back to IDLE or concluding the audio switch; otherwise, the switched-to track will not buffer
this.warn(
`Fragment ${frag.sn}${part ? ' p: ' + part.index : ''} of level ${
frag.level
} finished buffering, but was aborted. state: ${
this.state
}, audioSwitch: ${this.audioSwitch}`
);
return;
}
if (frag.sn !== 'initSegment') {
this.fragPrevious = frag;
if (this.audioSwitch) {
this.audioSwitch = false;
this.hls.trigger(Events.AUDIO_TRACK_SWITCHED, { id: this.trackId });
}
}
this.fragBufferedComplete(frag, part);
}
private onError(event: Events.ERROR, data: ErrorData) {
switch (data.details) {
case ErrorDetails.FRAG_LOAD_ERROR:
case ErrorDetails.FRAG_LOAD_TIMEOUT:
case ErrorDetails.KEY_LOAD_ERROR:
case ErrorDetails.KEY_LOAD_TIMEOUT:
// TODO: Skip fragments that do not belong to this.fragCurrent audio-group id
this.onFragmentOrKeyLoadError(PlaylistLevelType.AUDIO, data);
break;
case ErrorDetails.AUDIO_TRACK_LOAD_ERROR:
case ErrorDetails.AUDIO_TRACK_LOAD_TIMEOUT:
// when in ERROR state, don't switch back to IDLE state in case a non-fatal error is received
if (this.state !== State.ERROR && this.state !== State.STOPPED) {
// if fatal error, stop processing, otherwise move to IDLE to retry loading
this.state = data.fatal ? State.ERROR : State.IDLE;
this.warn(
`${data.details} while loading frag, switching to ${this.state} state`
);
}
break;
case ErrorDetails.BUFFER_FULL_ERROR:
// if in appending state
if (
data.parent === 'audio' &&
(this.state === State.PARSING || this.state === State.PARSED)
) {
let flushBuffer = true;
const bufferedInfo = this.getFwdBufferInfo(
this.mediaBuffer,
PlaylistLevelType.AUDIO
);
// 0.5 : tolerance needed as some browsers stalls playback before reaching buffered end
// reduce max buf len if current position is buffered
if (bufferedInfo && bufferedInfo.len > 0.5) {
flushBuffer = !this.reduceMaxBufferLength(bufferedInfo.len);
}
if (flushBuffer) {
// current position is not buffered, but browser is still complaining about buffer full error
// this happens on IE/Edge, refer to https://github.com/video-dev/hls.js/pull/708
// in that case flush the whole audio buffer to recover
this.warn(
'Buffer full error also media.currentTime is not buffered, flush audio buffer'
);
this.fragCurrent = null;
super.flushMainBuffer(0, Number.POSITIVE_INFINITY, 'audio');
}
this.resetLoadingState();
}
break;
default:
break;
}
}
private onBufferFlushed(
event: Events.BUFFER_FLUSHED,
{ type }: BufferFlushedData
) {
if (type === ElementaryStreamTypes.AUDIO) {
this.bufferFlushed = true;
}
}
private _handleTransmuxComplete(transmuxResult: TransmuxerResult) {
const id = 'audio';
const { hls } = this;
const { remuxResult, chunkMeta } = transmuxResult;
const context = this.getCurrentContext(chunkMeta);
if (!context) {
this.warn(
`The loading context changed while buffering fragment ${chunkMeta.sn} of level ${chunkMeta.level}. This chunk will not be buffered.`
);
this.resetLiveStartWhenNotLoaded(chunkMeta.level);
return;
}
const { frag, part } = context;
const { audio, text, id3, initSegment } = remuxResult;
// Check if the current fragment has been aborted. We check this by first seeing if we're still playing the current level.
// If we are, subsequently check if the currently loading fragment (fragCurrent) has changed.
if (this.fragContextChanged(frag)) {
return;
}
this.state = State.PARSING;
if (this.audioSwitch && audio) {
this.completeAudioSwitch();
}
if (initSegment?.tracks) {
this._bufferInitSegment(initSegment.tracks, frag, chunkMeta);
hls.trigger(Events.FRAG_PARSING_INIT_SEGMENT, {
frag,
id,
tracks: initSegment.tracks,
});
// Only flush audio from old audio tracks when PTS is known on new audio track
}
if (audio) {
const { startPTS, endPTS, startDTS, endDTS } = audio;
if (part) {
part.elementaryStreams[ElementaryStreamTypes.AUDIO] = {
startPTS,
endPTS,
startDTS,
endDTS,
};
}
frag.setElementaryStreamInfo(
ElementaryStreamTypes.AUDIO,
startPTS,
endPTS,
startDTS,
endDTS
);
this.bufferFragmentData(audio, frag, part, chunkMeta);
}
if (id3?.samples?.length) {
const emittedID3: FragParsingMetadataData = Object.assign(
{
frag,
id,
},
id3
);
hls.trigger(Events.FRAG_PARSING_METADATA, emittedID3);
}
if (text) {
const emittedText: FragParsingUserdataData = Object.assign(
{
frag,
id,
},
text
);
hls.trigger(Events.FRAG_PARSING_USERDATA, emittedText);
}
}
private _bufferInitSegment(
tracks: TrackSet,
frag: Fragment,
chunkMeta: ChunkMetadata
) {
if (this.state !== State.PARSING) {
return;
}
// delete any video track found on audio transmuxer
if (tracks.video) {
delete tracks.video;
}
// include levelCodec in audio and video tracks
const track = tracks.audio;
if (!track) {
return;
}
track.levelCodec = track.codec;
track.id = 'audio';
this.log(
`Init audio buffer, container:${track.container}, codecs[parsed]=[${track.codec}]`
);
this.hls.trigger(Events.BUFFER_CODECS, tracks);
const initSegment = track.initSegment;
if (initSegment?.byteLength) {
const segment: BufferAppendingData = {
type: 'audio',
frag,
part: null,
chunkMeta,
parent: frag.type,
data: initSegment,
};
this.hls.trigger(Events.BUFFER_APPENDING, segment);
}
// trigger handler right now
this.tick();
}
protected loadFragment(
frag: Fragment,
trackDetails: LevelDetails,
targetBufferTime: number
) {
// only load if fragment is not loaded or if in audio switch
const fragState = this.fragmentTracker.getState(frag);
this.fragCurrent = frag;
// we force a frag loading in audio switch as fragment tracker might not have evicted previous frags in case of quick audio switch
if (
this.audioSwitch ||
fragState === FragmentState.NOT_LOADED ||
fragState === FragmentState.PARTIAL
) {
if (frag.sn === 'initSegment') {
this._loadInitSegment(frag);
} else if (trackDetails.live && !Number.isFinite(this.initPTS[frag.cc])) {
this.log(
`Waiting for video PTS in continuity counter ${frag.cc} of live stream before loading audio fragment ${frag.sn} of level ${this.trackId}`
);
this.state = State.WAITING_INIT_PTS;
} else {
this.startFragRequested = true;
super.loadFragment(frag, trackDetails, targetBufferTime);
}
}
}
private completeAudioSwitch() {
const { hls, media, trackId } = this;
if (media) {
this.log('Switching audio track : flushing all audio');
super.flushMainBuffer(0, Number.POSITIVE_INFINITY, 'audio');
}
this.audioSwitch = false;
hls.trigger(Events.AUDIO_TRACK_SWITCHED, { id: trackId });
}
}
export default AudioStreamController;

View file

@ -0,0 +1,270 @@
import { Events } from '../events';
import { ErrorTypes, ErrorDetails } from '../errors';
import {
ManifestParsedData,
AudioTracksUpdatedData,
ErrorData,
LevelLoadingData,
AudioTrackLoadedData,
LevelSwitchingData,
} from '../types/events';
import BasePlaylistController from './base-playlist-controller';
import { PlaylistContextType } from '../types/loader';
import type Hls from '../hls';
import type { HlsUrlParameters } from '../types/level';
import type { MediaPlaylist } from '../types/media-playlist';
class AudioTrackController extends BasePlaylistController {
private tracks: MediaPlaylist[] = [];
private groupId: string | null = null;
private tracksInGroup: MediaPlaylist[] = [];
private trackId: number = -1;
private trackName: string = '';
private selectDefaultTrack: boolean = true;
constructor(hls: Hls) {
super(hls, '[audio-track-controller]');
this.registerListeners();
}
private registerListeners() {
const { hls } = this;
hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this);
hls.on(Events.MANIFEST_PARSED, this.onManifestParsed, this);
hls.on(Events.LEVEL_LOADING, this.onLevelLoading, this);
hls.on(Events.LEVEL_SWITCHING, this.onLevelSwitching, this);
hls.on(Events.AUDIO_TRACK_LOADED, this.onAudioTrackLoaded, this);
hls.on(Events.ERROR, this.onError, this);
}
private unregisterListeners() {
const { hls } = this;
hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this);
hls.off(Events.MANIFEST_PARSED, this.onManifestParsed, this);
hls.off(Events.LEVEL_LOADING, this.onLevelLoading, this);
hls.off(Events.LEVEL_SWITCHING, this.onLevelSwitching, this);
hls.off(Events.AUDIO_TRACK_LOADED, this.onAudioTrackLoaded, this);
hls.off(Events.ERROR, this.onError, this);
}
public destroy() {
this.unregisterListeners();
this.tracks.length = 0;
this.tracksInGroup.length = 0;
super.destroy();
}
protected onManifestLoading(): void {
this.tracks = [];
this.groupId = null;
this.tracksInGroup = [];
this.trackId = -1;
this.trackName = '';
this.selectDefaultTrack = true;
}
protected onManifestParsed(
event: Events.MANIFEST_PARSED,
data: ManifestParsedData
): void {
this.tracks = data.audioTracks || [];
}
protected onAudioTrackLoaded(
event: Events.AUDIO_TRACK_LOADED,
data: AudioTrackLoadedData
): void {
const { id, details } = data;
const currentTrack = this.tracksInGroup[id];
if (!currentTrack) {
this.warn(`Invalid audio track id ${id}`);
return;
}
const curDetails = currentTrack.details;
currentTrack.details = data.details;
this.log(`audioTrack ${id} loaded [${details.startSN}-${details.endSN}]`);
if (id === this.trackId) {
this.retryCount = 0;
this.playlistLoaded(id, data, curDetails);
}
}
protected onLevelLoading(
event: Events.LEVEL_LOADING,
data: LevelLoadingData
): void {
this.switchLevel(data.level);
}
protected onLevelSwitching(
event: Events.LEVEL_SWITCHING,
data: LevelSwitchingData
): void {
this.switchLevel(data.level);
}
private switchLevel(levelIndex: number) {
const levelInfo = this.hls.levels[levelIndex];
if (!levelInfo?.audioGroupIds) {
return;
}
const audioGroupId = levelInfo.audioGroupIds[levelInfo.urlId];
if (this.groupId !== audioGroupId) {
this.groupId = audioGroupId;
const audioTracks = this.tracks.filter(
(track): boolean => !audioGroupId || track.groupId === audioGroupId
);
// Disable selectDefaultTrack if there are no default tracks
if (
this.selectDefaultTrack &&
!audioTracks.some((track) => track.default)
) {
this.selectDefaultTrack = false;
}
this.tracksInGroup = audioTracks;
const audioTracksUpdated: AudioTracksUpdatedData = { audioTracks };
this.log(
`Updating audio tracks, ${audioTracks.length} track(s) found in "${audioGroupId}" group-id`
);
this.hls.trigger(Events.AUDIO_TRACKS_UPDATED, audioTracksUpdated);
this.selectInitialTrack();
}
}
protected onError(event: Events.ERROR, data: ErrorData): void {
super.onError(event, data);
if (data.fatal || !data.context) {
return;
}
if (
data.context.type === PlaylistContextType.AUDIO_TRACK &&
data.context.id === this.trackId &&
data.context.groupId === this.groupId
) {
this.retryLoadingOrFail(data);
}
}
get audioTracks(): MediaPlaylist[] {
return this.tracksInGroup;
}
get audioTrack(): number {
return this.trackId;
}
set audioTrack(newId: number) {
// If audio track is selected from API then don't choose from the manifest default track
this.selectDefaultTrack = false;
this.setAudioTrack(newId);
}
private setAudioTrack(newId: number): void {
const tracks = this.tracksInGroup;
// check if level idx is valid
if (newId < 0 || newId >= tracks.length) {
this.warn('Invalid id passed to audio-track controller');
return;
}
// stopping live reloading timer if any
this.clearTimer();
const lastTrack = tracks[this.trackId];
this.log(`Now switching to audio-track index ${newId}`);
const track = tracks[newId];
const { id, groupId = '', name, type, url } = track;
this.trackId = newId;
this.trackName = name;
this.selectDefaultTrack = false;
this.hls.trigger(Events.AUDIO_TRACK_SWITCHING, {
id,
groupId,
name,
type,
url,
});
// Do not reload track unless live
if (track.details && !track.details.live) {
return;
}
const hlsUrlParameters = this.switchParams(track.url, lastTrack?.details);
this.loadPlaylist(hlsUrlParameters);
}
private selectInitialTrack(): void {
const audioTracks = this.tracksInGroup;
console.assert(
audioTracks.length,
'Initial audio track should be selected when tracks are known'
);
const currentAudioTrackName = this.trackName;
const trackId =
this.findTrackId(currentAudioTrackName) || this.findTrackId();
if (trackId !== -1) {
this.setAudioTrack(trackId);
} else {
this.warn(`No track found for running audio group-ID: ${this.groupId}`);
this.hls.trigger(Events.ERROR, {
type: ErrorTypes.MEDIA_ERROR,
details: ErrorDetails.AUDIO_TRACK_LOAD_ERROR,
fatal: true,
});
}
}
private findTrackId(name?: string): number {
const audioTracks = this.tracksInGroup;
for (let i = 0; i < audioTracks.length; i++) {
const track = audioTracks[i];
if (!this.selectDefaultTrack || track.default) {
if (!name || name === track.name) {
return track.id;
}
}
}
return -1;
}
protected loadPlaylist(hlsUrlParameters?: HlsUrlParameters): void {
const audioTrack = this.tracksInGroup[this.trackId];
if (this.shouldLoadTrack(audioTrack)) {
const id = audioTrack.id;
const groupId = audioTrack.groupId as string;
let url = audioTrack.url;
if (hlsUrlParameters) {
try {
url = hlsUrlParameters.addDirectives(url);
} catch (error) {
this.warn(
`Could not construct new URL with HLS Delivery Directives: ${error}`
);
}
}
// track not retrieved yet, or live playlist we need to (re)load it
this.log(`loading audio-track playlist for id: ${id}`);
this.clearTimer();
this.hls.trigger(Events.AUDIO_TRACK_LOADING, {
url,
id,
groupId,
deliveryDirectives: hlsUrlParameters || null,
});
}
}
}
export default AudioTrackController;

View file

@ -0,0 +1,273 @@
import type Hls from '../hls';
import type { NetworkComponentAPI } from '../types/component-api';
import { getSkipValue, HlsSkip, HlsUrlParameters } from '../types/level';
import { computeReloadInterval, mergeDetails } from './level-helper';
import { logger } from '../utils/logger';
import type { LevelDetails } from '../loader/level-details';
import type { MediaPlaylist } from '../types/media-playlist';
import type {
AudioTrackLoadedData,
LevelLoadedData,
TrackLoadedData,
} from '../types/events';
import { ErrorData } from '../types/events';
import { Events } from '../events';
import { ErrorTypes } from '../errors';
export default class BasePlaylistController implements NetworkComponentAPI {
protected hls: Hls;
protected timer: number = -1;
protected canLoad: boolean = false;
protected retryCount: number = 0;
protected log: (msg: any) => void;
protected warn: (msg: any) => void;
constructor(hls: Hls, logPrefix: string) {
this.log = logger.log.bind(logger, `${logPrefix}:`);
this.warn = logger.warn.bind(logger, `${logPrefix}:`);
this.hls = hls;
}
public destroy(): void {
this.clearTimer();
// @ts-ignore
this.hls = this.log = this.warn = null;
}
protected onError(event: Events.ERROR, data: ErrorData): void {
if (data.fatal && data.type === ErrorTypes.NETWORK_ERROR) {
this.clearTimer();
}
}
protected clearTimer(): void {
clearTimeout(this.timer);
this.timer = -1;
}
public startLoad(): void {
this.canLoad = true;
this.retryCount = 0;
this.loadPlaylist();
}
public stopLoad(): void {
this.canLoad = false;
this.clearTimer();
}
protected switchParams(
playlistUri: string,
previous?: LevelDetails
): HlsUrlParameters | undefined {
const renditionReports = previous?.renditionReports;
if (renditionReports) {
for (let i = 0; i < renditionReports.length; i++) {
const attr = renditionReports[i];
const uri = '' + attr.URI;
if (uri === playlistUri.substr(-uri.length)) {
const msn = parseInt(attr['LAST-MSN']);
let part = parseInt(attr['LAST-PART']);
if (previous && this.hls.config.lowLatencyMode) {
const currentGoal = Math.min(
previous.age - previous.partTarget,
previous.targetduration
);
if (part !== undefined && currentGoal > previous.partTarget) {
part += 1;
}
}
if (Number.isFinite(msn)) {
return new HlsUrlParameters(
msn,
Number.isFinite(part) ? part : undefined,
HlsSkip.No
);
}
}
}
}
}
protected loadPlaylist(hlsUrlParameters?: HlsUrlParameters): void {}
protected shouldLoadTrack(track: MediaPlaylist): boolean {
return (
this.canLoad &&
track &&
!!track.url &&
(!track.details || track.details.live)
);
}
protected playlistLoaded(
index: number,
data: LevelLoadedData | AudioTrackLoadedData | TrackLoadedData,
previousDetails?: LevelDetails
) {
const { details, stats } = data;
// Set last updated date-time
const elapsed = stats.loading.end
? Math.max(0, self.performance.now() - stats.loading.end)
: 0;
details.advancedDateTime = Date.now() - elapsed;
// if current playlist is a live playlist, arm a timer to reload it
if (details.live || previousDetails?.live) {
details.reloaded(previousDetails);
if (previousDetails) {
this.log(
`live playlist ${index} ${
details.advanced
? 'REFRESHED ' + details.lastPartSn + '-' + details.lastPartIndex
: 'MISSED'
}`
);
}
// Merge live playlists to adjust fragment starts and fill in delta playlist skipped segments
if (previousDetails && details.fragments.length > 0) {
mergeDetails(previousDetails, details);
}
if (!this.canLoad || !details.live) {
return;
}
let deliveryDirectives: HlsUrlParameters;
let msn: number | undefined = undefined;
let part: number | undefined = undefined;
if (details.canBlockReload && details.endSN && details.advanced) {
// Load level with LL-HLS delivery directives
const lowLatencyMode = this.hls.config.lowLatencyMode;
const lastPartSn = details.lastPartSn;
const endSn = details.endSN;
const lastPartIndex = details.lastPartIndex;
const hasParts = lastPartIndex !== -1;
const lastPart = lastPartSn === endSn;
// When low latency mode is disabled, we'll skip part requests once the last part index is found
const nextSnStartIndex = lowLatencyMode ? 0 : lastPartIndex;
if (hasParts) {
msn = lastPart ? endSn + 1 : lastPartSn;
part = lastPart ? nextSnStartIndex : lastPartIndex + 1;
} else {
msn = endSn + 1;
}
// Low-Latency CDN Tune-in: "age" header and time since load indicates we're behind by more than one part
// Update directives to obtain the Playlist that has the estimated additional duration of media
const lastAdvanced = details.age;
const cdnAge = lastAdvanced + details.ageHeader;
let currentGoal = Math.min(
cdnAge - details.partTarget,
details.targetduration * 1.5
);
if (currentGoal > 0) {
if (previousDetails && currentGoal > previousDetails.tuneInGoal) {
// If we attempted to get the next or latest playlist update, but currentGoal increased,
// then we either can't catchup, or the "age" header cannot be trusted.
this.warn(
`CDN Tune-in goal increased from: ${previousDetails.tuneInGoal} to: ${currentGoal} with playlist age: ${details.age}`
);
currentGoal = 0;
} else {
const segments = Math.floor(currentGoal / details.targetduration);
msn += segments;
if (part !== undefined) {
const parts = Math.round(
(currentGoal % details.targetduration) / details.partTarget
);
part += parts;
}
this.log(
`CDN Tune-in age: ${
details.ageHeader
}s last advanced ${lastAdvanced.toFixed(
2
)}s goal: ${currentGoal} skip sn ${segments} to part ${part}`
);
}
details.tuneInGoal = currentGoal;
}
deliveryDirectives = this.getDeliveryDirectives(
details,
data.deliveryDirectives,
msn,
part
);
if (lowLatencyMode || !lastPart) {
this.loadPlaylist(deliveryDirectives);
return;
}
} else {
deliveryDirectives = this.getDeliveryDirectives(
details,
data.deliveryDirectives,
msn,
part
);
}
let reloadInterval = computeReloadInterval(details, stats);
if (msn !== undefined && details.canBlockReload) {
reloadInterval -= details.partTarget || 1;
}
this.log(
`reload live playlist ${index} in ${Math.round(reloadInterval)} ms`
);
this.timer = self.setTimeout(
() => this.loadPlaylist(deliveryDirectives),
reloadInterval
);
} else {
this.clearTimer();
}
}
private getDeliveryDirectives(
details: LevelDetails,
previousDeliveryDirectives: HlsUrlParameters | null,
msn?: number,
part?: number
): HlsUrlParameters {
let skip = getSkipValue(details, msn);
if (previousDeliveryDirectives?.skip && details.deltaUpdateFailed) {
msn = previousDeliveryDirectives.msn;
part = previousDeliveryDirectives.part;
skip = HlsSkip.No;
}
return new HlsUrlParameters(msn, part, skip);
}
protected retryLoadingOrFail(errorEvent: ErrorData): boolean {
const { config } = this.hls;
const retry = this.retryCount < config.levelLoadingMaxRetry;
if (retry) {
this.retryCount++;
if (
errorEvent.details.indexOf('LoadTimeOut') > -1 &&
errorEvent.context?.deliveryDirectives
) {
// The LL-HLS request already timed out so retry immediately
this.warn(
`retry playlist loading #${this.retryCount} after "${errorEvent.details}"`
);
this.loadPlaylist();
} else {
// exponential backoff capped to max retry timeout
const delay = Math.min(
Math.pow(2, this.retryCount) * config.levelLoadingRetryDelay,
config.levelLoadingMaxRetryTimeout
);
// Schedule level/track reload
this.timer = self.setTimeout(() => this.loadPlaylist(), delay);
this.warn(
`retry playlist loading #${this.retryCount} in ${delay} ms after "${errorEvent.details}"`
);
}
} else {
this.warn(`cannot recover from error "${errorEvent.details}"`);
// stopping live reloading timer if any
this.clearTimer();
// switch error to fatal
errorEvent.fatal = true;
}
return retry;
}
}

File diff suppressed because it is too large Load diff

906
node_modules/hls.js/src/controller/buffer-controller.ts generated vendored Normal file
View file

@ -0,0 +1,906 @@
import { Events } from '../events';
import { logger } from '../utils/logger';
import { ErrorDetails, ErrorTypes } from '../errors';
import { BufferHelper } from '../utils/buffer-helper';
import { getMediaSource } from '../utils/mediasource-helper';
import { ElementaryStreamTypes } from '../loader/fragment';
import type { TrackSet } from '../types/track';
import BufferOperationQueue from './buffer-operation-queue';
import {
BufferOperation,
SourceBuffers,
SourceBufferName,
SourceBufferListeners,
} from '../types/buffer';
import type {
LevelUpdatedData,
BufferAppendingData,
MediaAttachingData,
ManifestParsedData,
BufferCodecsData,
BufferEOSData,
BufferFlushingData,
FragParsedData,
FragChangedData,
} from '../types/events';
import type { ComponentAPI } from '../types/component-api';
import type Hls from '../hls';
import { LevelDetails } from '../loader/level-details';
const MediaSource = getMediaSource();
const VIDEO_CODEC_PROFILE_REPACE = /([ha]vc.)(?:\.[^.,]+)+/;
export default class BufferController implements ComponentAPI {
// The level details used to determine duration, target-duration and live
private details: LevelDetails | null = null;
// cache the self generated object url to detect hijack of video tag
private _objectUrl: string | null = null;
// A queue of buffer operations which require the SourceBuffer to not be updating upon execution
private operationQueue!: BufferOperationQueue;
// References to event listeners for each SourceBuffer, so that they can be referenced for event removal
private listeners!: SourceBufferListeners;
private hls: Hls;
// The number of BUFFER_CODEC events received before any sourceBuffers are created
public bufferCodecEventsExpected: number = 0;
// The total number of BUFFER_CODEC events received
private _bufferCodecEventsTotal: number = 0;
// A reference to the attached media element
public media: HTMLMediaElement | null = null;
// A reference to the active media source
public mediaSource: MediaSource | null = null;
// counters
public appendError: number = 0;
public tracks: TrackSet = {};
public pendingTracks: TrackSet = {};
public sourceBuffer!: SourceBuffers;
constructor(hls: Hls) {
this.hls = hls;
this._initSourceBuffer();
this.registerListeners();
}
public hasSourceTypes(): boolean {
return (
this.getSourceBufferTypes().length > 0 ||
Object.keys(this.pendingTracks).length > 0
);
}
public destroy() {
this.unregisterListeners();
this.details = null;
}
protected registerListeners() {
const { hls } = this;
hls.on(Events.MEDIA_ATTACHING, this.onMediaAttaching, this);
hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
hls.on(Events.MANIFEST_PARSED, this.onManifestParsed, this);
hls.on(Events.BUFFER_RESET, this.onBufferReset, this);
hls.on(Events.BUFFER_APPENDING, this.onBufferAppending, this);
hls.on(Events.BUFFER_CODECS, this.onBufferCodecs, this);
hls.on(Events.BUFFER_EOS, this.onBufferEos, this);
hls.on(Events.BUFFER_FLUSHING, this.onBufferFlushing, this);
hls.on(Events.LEVEL_UPDATED, this.onLevelUpdated, this);
hls.on(Events.FRAG_PARSED, this.onFragParsed, this);
hls.on(Events.FRAG_CHANGED, this.onFragChanged, this);
}
protected unregisterListeners() {
const { hls } = this;
hls.off(Events.MEDIA_ATTACHING, this.onMediaAttaching, this);
hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
hls.off(Events.MANIFEST_PARSED, this.onManifestParsed, this);
hls.off(Events.BUFFER_RESET, this.onBufferReset, this);
hls.off(Events.BUFFER_APPENDING, this.onBufferAppending, this);
hls.off(Events.BUFFER_CODECS, this.onBufferCodecs, this);
hls.off(Events.BUFFER_EOS, this.onBufferEos, this);
hls.off(Events.BUFFER_FLUSHING, this.onBufferFlushing, this);
hls.off(Events.LEVEL_UPDATED, this.onLevelUpdated, this);
hls.off(Events.FRAG_PARSED, this.onFragParsed, this);
hls.off(Events.FRAG_CHANGED, this.onFragChanged, this);
}
private _initSourceBuffer() {
this.sourceBuffer = {};
this.operationQueue = new BufferOperationQueue(this.sourceBuffer);
this.listeners = {
audio: [],
video: [],
audiovideo: [],
};
}
protected onManifestParsed(
event: Events.MANIFEST_PARSED,
data: ManifestParsedData
) {
// in case of alt audio 2 BUFFER_CODECS events will be triggered, one per stream controller
// sourcebuffers will be created all at once when the expected nb of tracks will be reached
// in case alt audio is not used, only one BUFFER_CODEC event will be fired from main stream controller
// it will contain the expected nb of source buffers, no need to compute it
let codecEvents: number = 2;
if ((data.audio && !data.video) || !data.altAudio) {
codecEvents = 1;
}
this.bufferCodecEventsExpected = this._bufferCodecEventsTotal = codecEvents;
this.details = null;
logger.log(
`${this.bufferCodecEventsExpected} bufferCodec event(s) expected`
);
}
protected onMediaAttaching(
event: Events.MEDIA_ATTACHING,
data: MediaAttachingData
) {
const media = (this.media = data.media);
if (media && MediaSource) {
const ms = (this.mediaSource = new MediaSource());
// MediaSource listeners are arrow functions with a lexical scope, and do not need to be bound
ms.addEventListener('sourceopen', this._onMediaSourceOpen);
ms.addEventListener('sourceended', this._onMediaSourceEnded);
ms.addEventListener('sourceclose', this._onMediaSourceClose);
// link video and media Source
media.src = self.URL.createObjectURL(ms);
// cache the locally generated object url
this._objectUrl = media.src;
}
}
protected onMediaDetaching() {
const { media, mediaSource, _objectUrl } = this;
if (mediaSource) {
logger.log('[buffer-controller]: media source detaching');
if (mediaSource.readyState === 'open') {
try {
// endOfStream could trigger exception if any sourcebuffer is in updating state
// we don't really care about checking sourcebuffer state here,
// as we are anyway detaching the MediaSource
// let's just avoid this exception to propagate
mediaSource.endOfStream();
} catch (err) {
logger.warn(
`[buffer-controller]: onMediaDetaching: ${err.message} while calling endOfStream`
);
}
}
// Clean up the SourceBuffers by invoking onBufferReset
this.onBufferReset();
mediaSource.removeEventListener('sourceopen', this._onMediaSourceOpen);
mediaSource.removeEventListener('sourceended', this._onMediaSourceEnded);
mediaSource.removeEventListener('sourceclose', this._onMediaSourceClose);
// Detach properly the MediaSource from the HTMLMediaElement as
// suggested in https://github.com/w3c/media-source/issues/53.
if (media) {
if (_objectUrl) {
self.URL.revokeObjectURL(_objectUrl);
}
// clean up video tag src only if it's our own url. some external libraries might
// hijack the video tag and change its 'src' without destroying the Hls instance first
if (media.src === _objectUrl) {
media.removeAttribute('src');
media.load();
} else {
logger.warn(
'[buffer-controller]: media.src was changed by a third party - skip cleanup'
);
}
}
this.mediaSource = null;
this.media = null;
this._objectUrl = null;
this.bufferCodecEventsExpected = this._bufferCodecEventsTotal;
this.pendingTracks = {};
this.tracks = {};
}
this.hls.trigger(Events.MEDIA_DETACHED, undefined);
}
protected onBufferReset() {
this.getSourceBufferTypes().forEach((type) => {
const sb = this.sourceBuffer[type];
try {
if (sb) {
this.removeBufferListeners(type);
if (this.mediaSource) {
this.mediaSource.removeSourceBuffer(sb);
}
// Synchronously remove the SB from the map before the next call in order to prevent an async function from
// accessing it
this.sourceBuffer[type] = undefined;
}
} catch (err) {
logger.warn(
`[buffer-controller]: Failed to reset the ${type} buffer`,
err
);
}
});
this._initSourceBuffer();
}
protected onBufferCodecs(
event: Events.BUFFER_CODECS,
data: BufferCodecsData
) {
const sourceBufferCount = this.getSourceBufferTypes().length;
Object.keys(data).forEach((trackName) => {
if (sourceBufferCount) {
// check if SourceBuffer codec needs to change
const track = this.tracks[trackName];
if (track && typeof track.buffer.changeType === 'function') {
const { codec, levelCodec, container } = data[trackName];
const currentCodec = (track.levelCodec || track.codec).replace(
VIDEO_CODEC_PROFILE_REPACE,
'$1'
);
const nextCodec = (levelCodec || codec).replace(
VIDEO_CODEC_PROFILE_REPACE,
'$1'
);
if (currentCodec !== nextCodec) {
const mimeType = `${container};codecs=${levelCodec || codec}`;
this.appendChangeType(trackName, mimeType);
}
}
} else {
// if source buffer(s) not created yet, appended buffer tracks in this.pendingTracks
this.pendingTracks[trackName] = data[trackName];
}
});
// if sourcebuffers already created, do nothing ...
if (sourceBufferCount) {
return;
}
this.bufferCodecEventsExpected = Math.max(
this.bufferCodecEventsExpected - 1,
0
);
if (this.mediaSource && this.mediaSource.readyState === 'open') {
this.checkPendingTracks();
}
}
protected appendChangeType(type, mimeType) {
const { operationQueue } = this;
const operation: BufferOperation = {
execute: () => {
const sb = this.sourceBuffer[type];
if (sb) {
logger.log(
`[buffer-controller]: changing ${type} sourceBuffer type to ${mimeType}`
);
sb.changeType(mimeType);
}
operationQueue.shiftAndExecuteNext(type);
},
onStart: () => {},
onComplete: () => {},
onError: (e) => {
logger.warn(
`[buffer-controller]: Failed to change ${type} SourceBuffer type`,
e
);
},
};
operationQueue.append(operation, type);
}
protected onBufferAppending(
event: Events.BUFFER_APPENDING,
eventData: BufferAppendingData
) {
const { hls, operationQueue, tracks } = this;
const { data, type, frag, part, chunkMeta } = eventData;
const chunkStats = chunkMeta.buffering[type];
const bufferAppendingStart = self.performance.now();
chunkStats.start = bufferAppendingStart;
const fragBuffering = frag.stats.buffering;
const partBuffering = part ? part.stats.buffering : null;
if (fragBuffering.start === 0) {
fragBuffering.start = bufferAppendingStart;
}
if (partBuffering && partBuffering.start === 0) {
partBuffering.start = bufferAppendingStart;
}
// TODO: Only update timestampOffset when audio/mpeg fragment or part is not contiguous with previously appended
// Adjusting `SourceBuffer.timestampOffset` (desired point in the timeline where the next frames should be appended)
// in Chrome browser when we detect MPEG audio container and time delta between level PTS and `SourceBuffer.timestampOffset`
// is greater than 100ms (this is enough to handle seek for VOD or level change for LIVE videos).
// More info here: https://github.com/video-dev/hls.js/issues/332#issuecomment-257986486
const audioTrack = tracks.audio;
const checkTimestampOffset =
type === 'audio' &&
chunkMeta.id === 1 &&
audioTrack?.container === 'audio/mpeg';
const operation: BufferOperation = {
execute: () => {
chunkStats.executeStart = self.performance.now();
if (checkTimestampOffset) {
const sb = this.sourceBuffer[type];
if (sb) {
const delta = frag.start - sb.timestampOffset;
if (Math.abs(delta) >= 0.1) {
logger.log(
`[buffer-controller]: Updating audio SourceBuffer timestampOffset to ${frag.start} (delta: ${delta}) sn: ${frag.sn})`
);
sb.timestampOffset = frag.start;
}
}
}
this.appendExecutor(data, type);
},
onStart: () => {
// logger.debug(`[buffer-controller]: ${type} SourceBuffer updatestart`);
},
onComplete: () => {
// logger.debug(`[buffer-controller]: ${type} SourceBuffer updateend`);
const end = self.performance.now();
chunkStats.executeEnd = chunkStats.end = end;
if (fragBuffering.first === 0) {
fragBuffering.first = end;
}
if (partBuffering && partBuffering.first === 0) {
partBuffering.first = end;
}
const { sourceBuffer } = this;
const timeRanges = {};
for (const type in sourceBuffer) {
timeRanges[type] = BufferHelper.getBuffered(sourceBuffer[type]);
}
this.appendError = 0;
this.hls.trigger(Events.BUFFER_APPENDED, {
type,
frag,
part,
chunkMeta,
parent: frag.type,
timeRanges,
});
},
onError: (err) => {
// in case any error occured while appending, put back segment in segments table
logger.error(
`[buffer-controller]: Error encountered while trying to append to the ${type} SourceBuffer`,
err
);
const event = {
type: ErrorTypes.MEDIA_ERROR,
parent: frag.type,
details: ErrorDetails.BUFFER_APPEND_ERROR,
err,
fatal: false,
};
if (err.code === DOMException.QUOTA_EXCEEDED_ERR) {
// QuotaExceededError: http://www.w3.org/TR/html5/infrastructure.html#quotaexceedederror
// let's stop appending any segments, and report BUFFER_FULL_ERROR error
event.details = ErrorDetails.BUFFER_FULL_ERROR;
} else {
this.appendError++;
event.details = ErrorDetails.BUFFER_APPEND_ERROR;
/* with UHD content, we could get loop of quota exceeded error until
browser is able to evict some data from sourcebuffer. Retrying can help recover.
*/
if (this.appendError > hls.config.appendErrorMaxRetry) {
logger.error(
`[buffer-controller]: Failed ${hls.config.appendErrorMaxRetry} times to append segment in sourceBuffer`
);
event.fatal = true;
}
}
hls.trigger(Events.ERROR, event);
},
};
operationQueue.append(operation, type);
}
protected onBufferFlushing(
event: Events.BUFFER_FLUSHING,
data: BufferFlushingData
) {
const { operationQueue } = this;
const flushOperation = (type: SourceBufferName): BufferOperation => ({
execute: this.removeExecutor.bind(
this,
type,
data.startOffset,
data.endOffset
),
onStart: () => {
// logger.debug(`[buffer-controller]: Started flushing ${data.startOffset} -> ${data.endOffset} for ${type} Source Buffer`);
},
onComplete: () => {
// logger.debug(`[buffer-controller]: Finished flushing ${data.startOffset} -> ${data.endOffset} for ${type} Source Buffer`);
this.hls.trigger(Events.BUFFER_FLUSHED, { type });
},
onError: (e) => {
logger.warn(
`[buffer-controller]: Failed to remove from ${type} SourceBuffer`,
e
);
},
});
if (data.type) {
operationQueue.append(flushOperation(data.type), data.type);
} else {
this.getSourceBufferTypes().forEach((type: SourceBufferName) => {
operationQueue.append(flushOperation(type), type);
});
}
}
protected onFragParsed(event: Events.FRAG_PARSED, data: FragParsedData) {
const { frag, part } = data;
const buffersAppendedTo: Array<SourceBufferName> = [];
const elementaryStreams = part
? part.elementaryStreams
: frag.elementaryStreams;
if (elementaryStreams[ElementaryStreamTypes.AUDIOVIDEO]) {
buffersAppendedTo.push('audiovideo');
} else {
if (elementaryStreams[ElementaryStreamTypes.AUDIO]) {
buffersAppendedTo.push('audio');
}
if (elementaryStreams[ElementaryStreamTypes.VIDEO]) {
buffersAppendedTo.push('video');
}
}
const onUnblocked = () => {
const now = self.performance.now();
frag.stats.buffering.end = now;
if (part) {
part.stats.buffering.end = now;
}
const stats = part ? part.stats : frag.stats;
this.hls.trigger(Events.FRAG_BUFFERED, {
frag,
part,
stats,
id: frag.type,
});
};
if (buffersAppendedTo.length === 0) {
logger.warn(
`Fragments must have at least one ElementaryStreamType set. type: ${frag.type} level: ${frag.level} sn: ${frag.sn}`
);
}
this.blockBuffers(onUnblocked, buffersAppendedTo);
}
private onFragChanged(event: Events.FRAG_CHANGED, data: FragChangedData) {
this.flushBackBuffer();
}
// on BUFFER_EOS mark matching sourcebuffer(s) as ended and trigger checkEos()
// an undefined data.type will mark all buffers as EOS.
protected onBufferEos(event: Events.BUFFER_EOS, data: BufferEOSData) {
const ended = this.getSourceBufferTypes().reduce((acc, type) => {
const sb = this.sourceBuffer[type];
if (!data.type || data.type === type) {
if (sb && !sb.ended) {
sb.ended = true;
logger.log(`[buffer-controller]: ${type} sourceBuffer now EOS`);
}
}
return acc && !!(!sb || sb.ended);
}, true);
if (ended) {
this.blockBuffers(() => {
const { mediaSource } = this;
if (!mediaSource || mediaSource.readyState !== 'open') {
return;
}
// Allow this to throw and be caught by the enqueueing function
mediaSource.endOfStream();
});
}
}
protected onLevelUpdated(
event: Events.LEVEL_UPDATED,
{ details }: LevelUpdatedData
) {
if (!details.fragments.length) {
return;
}
this.details = details;
if (this.getSourceBufferTypes().length) {
this.blockBuffers(this.updateMediaElementDuration.bind(this));
} else {
this.updateMediaElementDuration();
}
}
flushBackBuffer() {
const { hls, details, media, sourceBuffer } = this;
if (!media || details === null) {
return;
}
const sourceBufferTypes = this.getSourceBufferTypes();
if (!sourceBufferTypes.length) {
return;
}
// Support for deprecated liveBackBufferLength
const backBufferLength =
details.live && hls.config.liveBackBufferLength !== null
? hls.config.liveBackBufferLength
: hls.config.backBufferLength;
if (!Number.isFinite(backBufferLength) || backBufferLength < 0) {
return;
}
const currentTime = media.currentTime;
const targetDuration = details.levelTargetDuration;
const maxBackBufferLength = Math.max(backBufferLength, targetDuration);
const targetBackBufferPosition =
Math.floor(currentTime / targetDuration) * targetDuration -
maxBackBufferLength;
sourceBufferTypes.forEach((type: SourceBufferName) => {
const sb = sourceBuffer[type];
if (sb) {
const buffered = BufferHelper.getBuffered(sb);
// when target buffer start exceeds actual buffer start
if (
buffered.length > 0 &&
targetBackBufferPosition > buffered.start(0)
) {
hls.trigger(Events.BACK_BUFFER_REACHED, {
bufferEnd: targetBackBufferPosition,
});
// Support for deprecated event:
if (details.live) {
hls.trigger(Events.LIVE_BACK_BUFFER_REACHED, {
bufferEnd: targetBackBufferPosition,
});
}
hls.trigger(Events.BUFFER_FLUSHING, {
startOffset: 0,
endOffset: targetBackBufferPosition,
type,
});
}
}
});
}
/**
* Update Media Source duration to current level duration or override to Infinity if configuration parameter
* 'liveDurationInfinity` is set to `true`
* More details: https://github.com/video-dev/hls.js/issues/355
*/
private updateMediaElementDuration() {
if (
!this.details ||
!this.media ||
!this.mediaSource ||
this.mediaSource.readyState !== 'open'
) {
return;
}
const { details, hls, media, mediaSource } = this;
const levelDuration = details.fragments[0].start + details.totalduration;
const mediaDuration = media.duration;
const msDuration = Number.isFinite(mediaSource.duration)
? mediaSource.duration
: 0;
if (details.live && hls.config.liveDurationInfinity) {
// Override duration to Infinity
logger.log(
'[buffer-controller]: Media Source duration is set to Infinity'
);
mediaSource.duration = Infinity;
this.updateSeekableRange(details);
} else if (
(levelDuration > msDuration && levelDuration > mediaDuration) ||
!Number.isFinite(mediaDuration)
) {
// levelDuration was the last value we set.
// not using mediaSource.duration as the browser may tweak this value
// only update Media Source duration if its value increase, this is to avoid
// flushing already buffered portion when switching between quality level
logger.log(
`[buffer-controller]: Updating Media Source duration to ${levelDuration.toFixed(
3
)}`
);
mediaSource.duration = levelDuration;
}
}
updateSeekableRange(levelDetails) {
const mediaSource = this.mediaSource;
const fragments = levelDetails.fragments;
const len = fragments.length;
if (len && levelDetails.live && mediaSource?.setLiveSeekableRange) {
const start = Math.max(0, fragments[0].start);
const end = Math.max(start, start + levelDetails.totalduration);
mediaSource.setLiveSeekableRange(start, end);
}
}
protected checkPendingTracks() {
const { bufferCodecEventsExpected, operationQueue, pendingTracks } = this;
// Check if we've received all of the expected bufferCodec events. When none remain, create all the sourceBuffers at once.
// This is important because the MSE spec allows implementations to throw QuotaExceededErrors if creating new sourceBuffers after
// data has been appended to existing ones.
// 2 tracks is the max (one for audio, one for video). If we've reach this max go ahead and create the buffers.
const pendingTracksCount = Object.keys(pendingTracks).length;
if (
(pendingTracksCount && !bufferCodecEventsExpected) ||
pendingTracksCount === 2
) {
// ok, let's create them now !
this.createSourceBuffers(pendingTracks);
this.pendingTracks = {};
// append any pending segments now !
const buffers = this.getSourceBufferTypes();
if (buffers.length === 0) {
this.hls.trigger(Events.ERROR, {
type: ErrorTypes.MEDIA_ERROR,
details: ErrorDetails.BUFFER_INCOMPATIBLE_CODECS_ERROR,
fatal: true,
reason: 'could not create source buffer for media codec(s)',
});
return;
}
buffers.forEach((type: SourceBufferName) => {
operationQueue.executeNext(type);
});
}
}
protected createSourceBuffers(tracks: TrackSet) {
const { sourceBuffer, mediaSource } = this;
if (!mediaSource) {
throw Error('createSourceBuffers called when mediaSource was null');
}
let tracksCreated = 0;
for (const trackName in tracks) {
if (!sourceBuffer[trackName]) {
const track = tracks[trackName as keyof TrackSet];
if (!track) {
throw Error(
`source buffer exists for track ${trackName}, however track does not`
);
}
// use levelCodec as first priority
const codec = track.levelCodec || track.codec;
const mimeType = `${track.container};codecs=${codec}`;
logger.log(`[buffer-controller]: creating sourceBuffer(${mimeType})`);
try {
const sb = (sourceBuffer[trackName] =
mediaSource.addSourceBuffer(mimeType));
const sbName = trackName as SourceBufferName;
this.addBufferListener(sbName, 'updatestart', this._onSBUpdateStart);
this.addBufferListener(sbName, 'updateend', this._onSBUpdateEnd);
this.addBufferListener(sbName, 'error', this._onSBUpdateError);
this.tracks[trackName] = {
buffer: sb,
codec: codec,
container: track.container,
levelCodec: track.levelCodec,
id: track.id,
};
tracksCreated++;
} catch (err) {
logger.error(
`[buffer-controller]: error while trying to add sourceBuffer: ${err.message}`
);
this.hls.trigger(Events.ERROR, {
type: ErrorTypes.MEDIA_ERROR,
details: ErrorDetails.BUFFER_ADD_CODEC_ERROR,
fatal: false,
error: err,
mimeType: mimeType,
});
}
}
}
if (tracksCreated) {
this.hls.trigger(Events.BUFFER_CREATED, { tracks: this.tracks });
}
}
// Keep as arrow functions so that we can directly reference these functions directly as event listeners
private _onMediaSourceOpen = () => {
const { hls, media, mediaSource } = this;
logger.log('[buffer-controller]: Media source opened');
if (media) {
this.updateMediaElementDuration();
hls.trigger(Events.MEDIA_ATTACHED, { media });
}
if (mediaSource) {
// once received, don't listen anymore to sourceopen event
mediaSource.removeEventListener('sourceopen', this._onMediaSourceOpen);
}
this.checkPendingTracks();
};
private _onMediaSourceClose = () => {
logger.log('[buffer-controller]: Media source closed');
};
private _onMediaSourceEnded = () => {
logger.log('[buffer-controller]: Media source ended');
};
private _onSBUpdateStart(type: SourceBufferName) {
const { operationQueue } = this;
const operation = operationQueue.current(type);
operation.onStart();
}
private _onSBUpdateEnd(type: SourceBufferName) {
const { operationQueue } = this;
const operation = operationQueue.current(type);
operation.onComplete();
operationQueue.shiftAndExecuteNext(type);
}
private _onSBUpdateError(type: SourceBufferName, event: Event) {
logger.error(`[buffer-controller]: ${type} SourceBuffer error`, event);
// according to http://www.w3.org/TR/media-source/#sourcebuffer-append-error
// SourceBuffer errors are not necessarily fatal; if so, the HTMLMediaElement will fire an error event
this.hls.trigger(Events.ERROR, {
type: ErrorTypes.MEDIA_ERROR,
details: ErrorDetails.BUFFER_APPENDING_ERROR,
fatal: false,
});
// updateend is always fired after error, so we'll allow that to shift the current operation off of the queue
const operation = this.operationQueue.current(type);
if (operation) {
operation.onError(event);
}
}
// This method must result in an updateend event; if remove is not called, _onSBUpdateEnd must be called manually
private removeExecutor(
type: SourceBufferName,
startOffset: number,
endOffset: number
) {
const { media, mediaSource, operationQueue, sourceBuffer } = this;
const sb = sourceBuffer[type];
if (!media || !mediaSource || !sb) {
logger.warn(
`[buffer-controller]: Attempting to remove from the ${type} SourceBuffer, but it does not exist`
);
operationQueue.shiftAndExecuteNext(type);
return;
}
const mediaDuration = Number.isFinite(media.duration)
? media.duration
: Infinity;
const msDuration = Number.isFinite(mediaSource.duration)
? mediaSource.duration
: Infinity;
const removeStart = Math.max(0, startOffset);
const removeEnd = Math.min(endOffset, mediaDuration, msDuration);
if (removeEnd > removeStart) {
logger.log(
`[buffer-controller]: Removing [${removeStart},${removeEnd}] from the ${type} SourceBuffer`
);
console.assert(!sb.updating, `${type} sourceBuffer must not be updating`);
sb.remove(removeStart, removeEnd);
} else {
// Cycle the queue
operationQueue.shiftAndExecuteNext(type);
}
}
// This method must result in an updateend event; if append is not called, _onSBUpdateEnd must be called manually
private appendExecutor(data: Uint8Array, type: SourceBufferName) {
const { operationQueue, sourceBuffer } = this;
const sb = sourceBuffer[type];
if (!sb) {
logger.warn(
`[buffer-controller]: Attempting to append to the ${type} SourceBuffer, but it does not exist`
);
operationQueue.shiftAndExecuteNext(type);
return;
}
sb.ended = false;
console.assert(!sb.updating, `${type} sourceBuffer must not be updating`);
sb.appendBuffer(data);
}
// Enqueues an operation to each SourceBuffer queue which, upon execution, resolves a promise. When all promises
// resolve, the onUnblocked function is executed. Functions calling this method do not need to unblock the queue
// upon completion, since we already do it here
private blockBuffers(
onUnblocked: Function,
buffers: Array<SourceBufferName> = this.getSourceBufferTypes()
) {
if (!buffers.length) {
logger.log(
'[buffer-controller]: Blocking operation requested, but no SourceBuffers exist'
);
Promise.resolve(onUnblocked);
return;
}
const { operationQueue } = this;
// logger.debug(`[buffer-controller]: Blocking ${buffers} SourceBuffer`);
const blockingOperations = buffers.map((type) =>
operationQueue.appendBlocker(type as SourceBufferName)
);
Promise.all(blockingOperations).then(() => {
// logger.debug(`[buffer-controller]: Blocking operation resolved; unblocking ${buffers} SourceBuffer`);
onUnblocked();
buffers.forEach((type) => {
const sb = this.sourceBuffer[type];
// Only cycle the queue if the SB is not updating. There's a bug in Chrome which sets the SB updating flag to
// true when changing the MediaSource duration (https://bugs.chromium.org/p/chromium/issues/detail?id=959359&can=2&q=mediasource%20duration)
// While this is a workaround, it's probably useful to have around
if (!sb || !sb.updating) {
operationQueue.shiftAndExecuteNext(type);
}
});
});
}
private getSourceBufferTypes(): Array<SourceBufferName> {
return Object.keys(this.sourceBuffer) as Array<SourceBufferName>;
}
private addBufferListener(
type: SourceBufferName,
event: string,
fn: Function
) {
const buffer = this.sourceBuffer[type];
if (!buffer) {
return;
}
const listener = fn.bind(this, type);
this.listeners[type].push({ event, listener });
buffer.addEventListener(event, listener);
}
private removeBufferListeners(type: SourceBufferName) {
const buffer = this.sourceBuffer[type];
if (!buffer) {
return;
}
this.listeners[type].forEach((l) => {
buffer.removeEventListener(l.event, l.listener);
});
}
}

View file

@ -0,0 +1,84 @@
import { logger } from '../utils/logger';
import type {
BufferOperation,
BufferOperationQueues,
SourceBuffers,
SourceBufferName,
} from '../types/buffer';
export default class BufferOperationQueue {
private buffers: SourceBuffers;
private queues: BufferOperationQueues = {
video: [],
audio: [],
audiovideo: [],
};
constructor(sourceBufferReference: SourceBuffers) {
this.buffers = sourceBufferReference;
}
public append(operation: BufferOperation, type: SourceBufferName) {
const queue = this.queues[type];
queue.push(operation);
if (queue.length === 1 && this.buffers[type]) {
this.executeNext(type);
}
}
public insertAbort(operation: BufferOperation, type: SourceBufferName) {
const queue = this.queues[type];
queue.unshift(operation);
this.executeNext(type);
}
public appendBlocker(type: SourceBufferName): Promise<{}> {
let execute;
const promise: Promise<{}> = new Promise((resolve) => {
execute = resolve;
});
const operation: BufferOperation = {
execute,
onStart: () => {},
onComplete: () => {},
onError: () => {},
};
this.append(operation, type);
return promise;
}
public executeNext(type: SourceBufferName) {
const { buffers, queues } = this;
const sb = buffers[type];
const queue = queues[type];
if (queue.length) {
const operation: BufferOperation = queue[0];
try {
// Operations are expected to result in an 'updateend' event being fired. If not, the queue will lock. Operations
// which do not end with this event must call _onSBUpdateEnd manually
operation.execute();
} catch (e) {
logger.warn(
'[buffer-operation-queue]: Unhandled exception executing the current operation'
);
operation.onError(e);
// Only shift the current operation off, otherwise the updateend handler will do this for us
if (!sb || !sb.updating) {
queue.shift();
this.executeNext(type);
}
}
}
}
public shiftAndExecuteNext(type: SourceBufferName) {
this.queues[type].shift();
this.executeNext(type);
}
public current(type: SourceBufferName) {
return this.queues[type][0];
}
}

View file

@ -0,0 +1,282 @@
/*
* cap stream level to media size dimension controller
*/
import { Events } from '../events';
import type { Level } from '../types/level';
import type {
ManifestParsedData,
BufferCodecsData,
MediaAttachingData,
FPSDropLevelCappingData,
} from '../types/events';
import StreamController from './stream-controller';
import type { ComponentAPI } from '../types/component-api';
import type Hls from '../hls';
class CapLevelController implements ComponentAPI {
public autoLevelCapping: number;
public firstLevel: number;
public media: HTMLVideoElement | null;
public restrictedLevels: Array<number>;
public timer: number | undefined;
private hls: Hls;
private streamController?: StreamController;
public clientRect: { width: number; height: number } | null;
constructor(hls: Hls) {
this.hls = hls;
this.autoLevelCapping = Number.POSITIVE_INFINITY;
this.firstLevel = -1;
this.media = null;
this.restrictedLevels = [];
this.timer = undefined;
this.clientRect = null;
this.registerListeners();
}
public setStreamController(streamController: StreamController) {
this.streamController = streamController;
}
public destroy() {
this.unregisterListener();
if (this.hls.config.capLevelToPlayerSize) {
this.stopCapping();
}
this.media = null;
this.clientRect = null;
// @ts-ignore
this.hls = this.streamController = null;
}
protected registerListeners() {
const { hls } = this;
hls.on(Events.FPS_DROP_LEVEL_CAPPING, this.onFpsDropLevelCapping, this);
hls.on(Events.MEDIA_ATTACHING, this.onMediaAttaching, this);
hls.on(Events.MANIFEST_PARSED, this.onManifestParsed, this);
hls.on(Events.BUFFER_CODECS, this.onBufferCodecs, this);
hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
}
protected unregisterListener() {
const { hls } = this;
hls.off(Events.FPS_DROP_LEVEL_CAPPING, this.onFpsDropLevelCapping, this);
hls.off(Events.MEDIA_ATTACHING, this.onMediaAttaching, this);
hls.off(Events.MANIFEST_PARSED, this.onManifestParsed, this);
hls.off(Events.BUFFER_CODECS, this.onBufferCodecs, this);
hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
}
protected onFpsDropLevelCapping(
event: Events.FPS_DROP_LEVEL_CAPPING,
data: FPSDropLevelCappingData
) {
// Don't add a restricted level more than once
if (
CapLevelController.isLevelAllowed(
data.droppedLevel,
this.restrictedLevels
)
) {
this.restrictedLevels.push(data.droppedLevel);
}
}
protected onMediaAttaching(
event: Events.MEDIA_ATTACHING,
data: MediaAttachingData
) {
this.media = data.media instanceof HTMLVideoElement ? data.media : null;
}
protected onManifestParsed(
event: Events.MANIFEST_PARSED,
data: ManifestParsedData
) {
const hls = this.hls;
this.restrictedLevels = [];
this.firstLevel = data.firstLevel;
if (hls.config.capLevelToPlayerSize && data.video) {
// Start capping immediately if the manifest has signaled video codecs
this.startCapping();
}
}
// Only activate capping when playing a video stream; otherwise, multi-bitrate audio-only streams will be restricted
// to the first level
protected onBufferCodecs(
event: Events.BUFFER_CODECS,
data: BufferCodecsData
) {
const hls = this.hls;
if (hls.config.capLevelToPlayerSize && data.video) {
// If the manifest did not signal a video codec capping has been deferred until we're certain video is present
this.startCapping();
}
}
protected onMediaDetaching() {
this.stopCapping();
}
detectPlayerSize() {
if (this.media && this.mediaHeight > 0 && this.mediaWidth > 0) {
const levels = this.hls.levels;
if (levels.length) {
const hls = this.hls;
hls.autoLevelCapping = this.getMaxLevel(levels.length - 1);
if (
hls.autoLevelCapping > this.autoLevelCapping &&
this.streamController
) {
// if auto level capping has a higher value for the previous one, flush the buffer using nextLevelSwitch
// usually happen when the user go to the fullscreen mode.
this.streamController.nextLevelSwitch();
}
this.autoLevelCapping = hls.autoLevelCapping;
}
}
}
/*
* returns level should be the one with the dimensions equal or greater than the media (player) dimensions (so the video will be downscaled)
*/
getMaxLevel(capLevelIndex: number): number {
const levels = this.hls.levels;
if (!levels.length) {
return -1;
}
const validLevels = levels.filter(
(level, index) =>
CapLevelController.isLevelAllowed(index, this.restrictedLevels) &&
index <= capLevelIndex
);
this.clientRect = null;
return CapLevelController.getMaxLevelByMediaSize(
validLevels,
this.mediaWidth,
this.mediaHeight
);
}
startCapping() {
if (this.timer) {
// Don't reset capping if started twice; this can happen if the manifest signals a video codec
return;
}
this.autoLevelCapping = Number.POSITIVE_INFINITY;
this.hls.firstLevel = this.getMaxLevel(this.firstLevel);
self.clearInterval(this.timer);
this.timer = self.setInterval(this.detectPlayerSize.bind(this), 1000);
this.detectPlayerSize();
}
stopCapping() {
this.restrictedLevels = [];
this.firstLevel = -1;
this.autoLevelCapping = Number.POSITIVE_INFINITY;
if (this.timer) {
self.clearInterval(this.timer);
this.timer = undefined;
}
}
getDimensions(): { width: number; height: number } {
if (this.clientRect) {
return this.clientRect;
}
const media = this.media;
const boundsRect = {
width: 0,
height: 0,
};
if (media) {
const clientRect = media.getBoundingClientRect();
boundsRect.width = clientRect.width;
boundsRect.height = clientRect.height;
if (!boundsRect.width && !boundsRect.height) {
// When the media element has no width or height (equivalent to not being in the DOM),
// then use its width and height attributes (media.width, media.height)
boundsRect.width =
clientRect.right - clientRect.left || media.width || 0;
boundsRect.height =
clientRect.bottom - clientRect.top || media.height || 0;
}
}
this.clientRect = boundsRect;
return boundsRect;
}
get mediaWidth(): number {
return this.getDimensions().width * CapLevelController.contentScaleFactor;
}
get mediaHeight(): number {
return this.getDimensions().height * CapLevelController.contentScaleFactor;
}
static get contentScaleFactor(): number {
let pixelRatio = 1;
try {
pixelRatio = self.devicePixelRatio;
} catch (e) {
/* no-op */
}
return pixelRatio;
}
static isLevelAllowed(
level: number,
restrictedLevels: Array<number> = []
): boolean {
return restrictedLevels.indexOf(level) === -1;
}
static getMaxLevelByMediaSize(
levels: Array<Level>,
width: number,
height: number
): number {
if (!levels || !levels.length) {
return -1;
}
// Levels can have the same dimensions but differing bandwidths - since levels are ordered, we can look to the next
// to determine whether we've chosen the greatest bandwidth for the media's dimensions
const atGreatestBandiwdth = (curLevel, nextLevel) => {
if (!nextLevel) {
return true;
}
return (
curLevel.width !== nextLevel.width ||
curLevel.height !== nextLevel.height
);
};
// If we run through the loop without breaking, the media's dimensions are greater than every level, so default to
// the max level
let maxLevelIndex = levels.length - 1;
for (let i = 0; i < levels.length; i += 1) {
const level = levels[i];
if (
(level.width >= width || level.height >= height) &&
atGreatestBandiwdth(level, levels[i + 1])
) {
maxLevelIndex = i;
break;
}
}
return maxLevelIndex;
}
}
export default CapLevelController;

538
node_modules/hls.js/src/controller/cmcd-controller.ts generated vendored Normal file
View file

@ -0,0 +1,538 @@
import {
FragmentLoaderConstructor,
HlsConfig,
PlaylistLoaderConstructor,
} from '../config';
import { Events } from '../events';
import Hls, { Fragment } from '../hls';
import {
CMCD,
CMCDHeaders,
CMCDObjectType,
CMCDStreamingFormat,
CMCDVersion,
} from '../types/cmcd';
import { ComponentAPI } from '../types/component-api';
import { BufferCreatedData, MediaAttachedData } from '../types/events';
import {
FragmentLoaderContext,
Loader,
LoaderCallbacks,
LoaderConfiguration,
LoaderContext,
PlaylistLoaderContext,
} from '../types/loader';
import { BufferHelper } from '../utils/buffer-helper';
import { logger } from '../utils/logger';
/**
* Controller to deal with Common Media Client Data (CMCD)
* @see https://cdn.cta.tech/cta/media/media/resources/standards/pdfs/cta-5004-final.pdf
*/
export default class CMCDController implements ComponentAPI {
private hls: Hls;
private config: HlsConfig;
private media?: HTMLMediaElement;
private sid?: string;
private cid?: string;
private useHeaders: boolean = false;
private initialized: boolean = false;
private starved: boolean = false;
private buffering: boolean = true;
private audioBuffer?: SourceBuffer; // eslint-disable-line no-restricted-globals
private videoBuffer?: SourceBuffer; // eslint-disable-line no-restricted-globals
constructor(hls: Hls) {
this.hls = hls;
const config = (this.config = hls.config);
const { cmcd } = config;
if (cmcd != null) {
config.pLoader = this.createPlaylistLoader();
config.fLoader = this.createFragmentLoader();
this.sid = cmcd.sessionId || CMCDController.uuid();
this.cid = cmcd.contentId;
this.useHeaders = cmcd.useHeaders === true;
this.registerListeners();
}
}
private registerListeners() {
const hls = this.hls;
hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
hls.on(Events.MEDIA_DETACHED, this.onMediaDetached, this);
hls.on(Events.BUFFER_CREATED, this.onBufferCreated, this);
}
private unregisterListeners() {
const hls = this.hls;
hls.off(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
hls.off(Events.MEDIA_DETACHED, this.onMediaDetached, this);
hls.off(Events.BUFFER_CREATED, this.onBufferCreated, this);
this.onMediaDetached();
}
destroy() {
this.unregisterListeners();
// @ts-ignore
this.hls = this.config = this.audioBuffer = this.videoBuffer = null;
}
private onMediaAttached(
event: Events.MEDIA_ATTACHED,
data: MediaAttachedData
) {
this.media = data.media;
this.media.addEventListener('waiting', this.onWaiting);
this.media.addEventListener('playing', this.onPlaying);
}
private onMediaDetached() {
if (!this.media) {
return;
}
this.media.removeEventListener('waiting', this.onWaiting);
this.media.removeEventListener('playing', this.onPlaying);
// @ts-ignore
this.media = null;
}
private onBufferCreated(
event: Events.BUFFER_CREATED,
data: BufferCreatedData
) {
this.audioBuffer = data.tracks.audio?.buffer;
this.videoBuffer = data.tracks.video?.buffer;
}
private onWaiting = () => {
if (this.initialized) {
this.starved = true;
}
this.buffering = true;
};
private onPlaying = () => {
if (!this.initialized) {
this.initialized = true;
}
this.buffering = false;
};
/**
* Create baseline CMCD data
*/
private createData(): CMCD {
return {
v: CMCDVersion,
sf: CMCDStreamingFormat.HLS,
sid: this.sid,
cid: this.cid,
pr: this.media?.playbackRate,
mtp: this.hls.bandwidthEstimate / 1000,
};
}
/**
* Apply CMCD data to a request.
*/
private apply(context: LoaderContext, data: CMCD = {}) {
// apply baseline data
Object.assign(data, this.createData());
const isVideo =
data.ot === CMCDObjectType.INIT ||
data.ot === CMCDObjectType.VIDEO ||
data.ot === CMCDObjectType.MUXED;
if (this.starved && isVideo) {
data.bs = true;
data.su = true;
this.starved = false;
}
if (data.su == null) {
data.su = this.buffering;
}
// TODO: Implement rtp, nrr, nor, dl
if (this.useHeaders) {
const headers = CMCDController.toHeaders(data);
if (!Object.keys(headers).length) {
return;
}
if (!context.headers) {
context.headers = {};
}
Object.assign(context.headers, headers);
} else {
const query = CMCDController.toQuery(data);
if (!query) {
return;
}
context.url = CMCDController.appendQueryToUri(context.url, query);
}
}
/**
* Apply CMCD data to a manifest request.
*/
private applyPlaylistData = (context: PlaylistLoaderContext) => {
try {
this.apply(context, {
ot: CMCDObjectType.MANIFEST,
su: !this.initialized,
});
} catch (error) {
logger.warn('Could not generate manifest CMCD data.', error);
}
};
/**
* Apply CMCD data to a segment request
*/
private applyFragmentData = (context: FragmentLoaderContext) => {
try {
const fragment = context.frag;
const level = this.hls.levels[fragment.level];
const ot = this.getObjectType(fragment);
const data: CMCD = {
d: fragment.duration * 1000,
ot,
};
if (
ot === CMCDObjectType.VIDEO ||
ot === CMCDObjectType.AUDIO ||
ot == CMCDObjectType.MUXED
) {
data.br = level.bitrate / 1000;
data.tb = this.getTopBandwidth(ot) / 1000;
data.bl = this.getBufferLength(ot);
}
this.apply(context, data);
} catch (error) {
logger.warn('Could not generate segment CMCD data.', error);
}
};
/**
* The CMCD object type.
*/
private getObjectType(fragment: Fragment): CMCDObjectType | undefined {
const { type } = fragment;
if (type === 'subtitle') {
return CMCDObjectType.TIMED_TEXT;
}
if (fragment.sn === 'initSegment') {
return CMCDObjectType.INIT;
}
if (type === 'audio') {
return CMCDObjectType.AUDIO;
}
if (type === 'main') {
if (!this.hls.audioTracks.length) {
return CMCDObjectType.MUXED;
}
return CMCDObjectType.VIDEO;
}
return undefined;
}
/**
* Get the highest bitrate.
*/
private getTopBandwidth(type: CMCDObjectType) {
let bitrate: number = 0;
let levels;
const hls = this.hls;
if (type === CMCDObjectType.AUDIO) {
levels = hls.audioTracks;
} else {
const max = hls.maxAutoLevel;
const len = max > -1 ? max + 1 : hls.levels.length;
levels = hls.levels.slice(0, len);
}
for (const level of levels) {
if (level.bitrate > bitrate) {
bitrate = level.bitrate;
}
}
return bitrate > 0 ? bitrate : NaN;
}
/**
* Get the buffer length for a media type in milliseconds
*/
private getBufferLength(type: CMCDObjectType) {
const media = this.hls.media;
const buffer =
type === CMCDObjectType.AUDIO ? this.audioBuffer : this.videoBuffer;
if (!buffer || !media) {
return NaN;
}
const info = BufferHelper.bufferInfo(
buffer,
media.currentTime,
this.config.maxBufferHole
);
return info.len * 1000;
}
/**
* Create a playlist loader
*/
private createPlaylistLoader(): PlaylistLoaderConstructor | undefined {
const { pLoader } = this.config;
const apply = this.applyPlaylistData;
const Ctor = pLoader || (this.config.loader as PlaylistLoaderConstructor);
return class CmcdPlaylistLoader {
private loader: Loader<PlaylistLoaderContext>;
constructor(config: HlsConfig) {
this.loader = new Ctor(config);
}
get stats() {
return this.loader.stats;
}
get context() {
return this.loader.context;
}
destroy() {
this.loader.destroy();
}
abort() {
this.loader.abort();
}
load(
context: PlaylistLoaderContext,
config: LoaderConfiguration,
callbacks: LoaderCallbacks<PlaylistLoaderContext>
) {
apply(context);
this.loader.load(context, config, callbacks);
}
};
}
/**
* Create a playlist loader
*/
private createFragmentLoader(): FragmentLoaderConstructor | undefined {
const { fLoader } = this.config;
const apply = this.applyFragmentData;
const Ctor = fLoader || (this.config.loader as FragmentLoaderConstructor);
return class CmcdFragmentLoader {
private loader: Loader<FragmentLoaderContext>;
constructor(config: HlsConfig) {
this.loader = new Ctor(config);
}
get stats() {
return this.loader.stats;
}
get context() {
return this.loader.context;
}
destroy() {
this.loader.destroy();
}
abort() {
this.loader.abort();
}
load(
context: FragmentLoaderContext,
config: LoaderConfiguration,
callbacks: LoaderCallbacks<FragmentLoaderContext>
) {
apply(context);
this.loader.load(context, config, callbacks);
}
};
}
/**
* Generate a random v4 UUI
*
* @returns {string}
*/
static uuid(): string {
const url = URL.createObjectURL(new Blob());
const uuid = url.toString();
URL.revokeObjectURL(url);
return uuid.substr(uuid.lastIndexOf('/') + 1);
}
/**
* Serialize a CMCD data object according to the rules defined in the
* section 3.2 of
* [CTA-5004](https://cdn.cta.tech/cta/media/media/resources/standards/pdfs/cta-5004-final.pdf).
*/
static serialize(data: CMCD): string {
const results: string[] = [];
const isValid = (value: any) =>
!Number.isNaN(value) && value != null && value !== '' && value !== false;
const toRounded = (value: number) => Math.round(value);
const toHundred = (value: number) => toRounded(value / 100) * 100;
const toUrlSafe = (value: string) => encodeURIComponent(value);
const formatters = {
br: toRounded,
d: toRounded,
bl: toHundred,
dl: toHundred,
mtp: toHundred,
nor: toUrlSafe,
rtp: toHundred,
tb: toRounded,
};
const keys = Object.keys(data || {}).sort();
for (const key of keys) {
let value = data[key];
// ignore invalid values
if (!isValid(value)) {
continue;
}
// Version should only be reported if not equal to 1.
if (key === 'v' && value === 1) {
continue;
}
// Playback rate should only be sent if not equal to 1.
if (key == 'pr' && value === 1) {
continue;
}
// Certain values require special formatting
const formatter = formatters[key];
if (formatter) {
value = formatter(value);
}
// Serialize the key/value pair
const type = typeof value;
let result: string;
if (key === 'ot' || key === 'sf' || key === 'st') {
result = `${key}=${value}`;
} else if (type === 'boolean') {
result = key;
} else if (type === 'number') {
result = `${key}=${value}`;
} else {
result = `${key}=${JSON.stringify(value)}`;
}
results.push(result);
}
return results.join(',');
}
/**
* Convert a CMCD data object to request headers according to the rules
* defined in the section 2.1 and 3.2 of
* [CTA-5004](https://cdn.cta.tech/cta/media/media/resources/standards/pdfs/cta-5004-final.pdf).
*/
static toHeaders(data: CMCD): Partial<CMCDHeaders> {
const keys = Object.keys(data);
const headers = {};
const headerNames = ['Object', 'Request', 'Session', 'Status'];
const headerGroups = [{}, {}, {}, {}];
const headerMap = {
br: 0,
d: 0,
ot: 0,
tb: 0,
bl: 1,
dl: 1,
mtp: 1,
nor: 1,
nrr: 1,
su: 1,
cid: 2,
pr: 2,
sf: 2,
sid: 2,
st: 2,
v: 2,
bs: 3,
rtp: 3,
};
for (const key of keys) {
// Unmapped fields are mapped to the Request header
const index = headerMap[key] != null ? headerMap[key] : 1;
headerGroups[index][key] = data[key];
}
for (let i = 0; i < headerGroups.length; i++) {
const value = CMCDController.serialize(headerGroups[i]);
if (value) {
headers[`CMCD-${headerNames[i]}`] = value;
}
}
return headers;
}
/**
* Convert a CMCD data object to query args according to the rules
* defined in the section 2.2 and 3.2 of
* [CTA-5004](https://cdn.cta.tech/cta/media/media/resources/standards/pdfs/cta-5004-final.pdf).
*/
static toQuery(data: CMCD): string {
return `CMCD=${encodeURIComponent(CMCDController.serialize(data))}`;
}
/**
* Append query args to a uri.
*/
static appendQueryToUri(uri, query) {
if (!query) {
return uri;
}
const separator = uri.includes('?') ? '&' : '?';
return `${uri}${separator}${query}`;
}
}

707
node_modules/hls.js/src/controller/eme-controller.ts generated vendored Normal file
View file

@ -0,0 +1,707 @@
/**
* @author Stephan Hesse <disparat@gmail.com> | <tchakabam@gmail.com>
*
* DRM support for Hls.js
*/
import { Events } from '../events';
import { ErrorTypes, ErrorDetails } from '../errors';
import { logger } from '../utils/logger';
import type { DRMSystemOptions, EMEControllerConfig } from '../config';
import type { MediaKeyFunc } from '../utils/mediakeys-helper';
import { KeySystems } from '../utils/mediakeys-helper';
import type Hls from '../hls';
import type { ComponentAPI } from '../types/component-api';
import type { MediaAttachedData, ManifestParsedData } from '../types/events';
const MAX_LICENSE_REQUEST_FAILURES = 3;
/**
* @see https://developer.mozilla.org/en-US/docs/Web/API/MediaKeySystemConfiguration
* @param {Array<string>} audioCodecs List of required audio codecs to support
* @param {Array<string>} videoCodecs List of required video codecs to support
* @param {object} drmSystemOptions Optional parameters/requirements for the key-system
* @returns {Array<MediaSystemConfiguration>} An array of supported configurations
*/
const createWidevineMediaKeySystemConfigurations = function (
audioCodecs: string[],
videoCodecs: string[],
drmSystemOptions: DRMSystemOptions
): MediaKeySystemConfiguration[] {
/* jshint ignore:line */
const baseConfig: MediaKeySystemConfiguration = {
// initDataTypes: ['keyids', 'mp4'],
// label: "",
// persistentState: "not-allowed", // or "required" ?
// distinctiveIdentifier: "not-allowed", // or "required" ?
// sessionTypes: ['temporary'],
audioCapabilities: [], // { contentType: 'audio/mp4; codecs="mp4a.40.2"' }
videoCapabilities: [], // { contentType: 'video/mp4; codecs="avc1.42E01E"' }
};
audioCodecs.forEach((codec) => {
baseConfig.audioCapabilities!.push({
contentType: `audio/mp4; codecs="${codec}"`,
robustness: drmSystemOptions.audioRobustness || '',
});
});
videoCodecs.forEach((codec) => {
baseConfig.videoCapabilities!.push({
contentType: `video/mp4; codecs="${codec}"`,
robustness: drmSystemOptions.videoRobustness || '',
});
});
return [baseConfig];
};
/**
* The idea here is to handle key-system (and their respective platforms) specific configuration differences
* in order to work with the local requestMediaKeySystemAccess method.
*
* We can also rule-out platform-related key-system support at this point by throwing an error.
*
* @param {string} keySystem Identifier for the key-system, see `KeySystems` enum
* @param {Array<string>} audioCodecs List of required audio codecs to support
* @param {Array<string>} videoCodecs List of required video codecs to support
* @throws will throw an error if a unknown key system is passed
* @returns {Array<MediaSystemConfiguration>} A non-empty Array of MediaKeySystemConfiguration objects
*/
const getSupportedMediaKeySystemConfigurations = function (
keySystem: KeySystems,
audioCodecs: string[],
videoCodecs: string[],
drmSystemOptions: DRMSystemOptions
): MediaKeySystemConfiguration[] {
switch (keySystem) {
case KeySystems.WIDEVINE:
return createWidevineMediaKeySystemConfigurations(
audioCodecs,
videoCodecs,
drmSystemOptions
);
default:
throw new Error(`Unknown key-system: ${keySystem}`);
}
};
interface MediaKeysListItem {
mediaKeys?: MediaKeys;
mediaKeysSession?: MediaKeySession;
mediaKeysSessionInitialized: boolean;
mediaKeySystemAccess: MediaKeySystemAccess;
mediaKeySystemDomain: KeySystems;
}
/**
* Controller to deal with encrypted media extensions (EME)
* @see https://developer.mozilla.org/en-US/docs/Web/API/Encrypted_Media_Extensions_API
*
* @class
* @constructor
*/
class EMEController implements ComponentAPI {
private hls: Hls;
private _widevineLicenseUrl?: string;
private _licenseXhrSetup?: (xhr: XMLHttpRequest, url: string) => void;
private _licenseResponseCallback?: (
xhr: XMLHttpRequest,
url: string
) => ArrayBuffer;
private _emeEnabled: boolean;
private _requestMediaKeySystemAccess: MediaKeyFunc | null;
private _drmSystemOptions: DRMSystemOptions;
private _config: EMEControllerConfig;
private _mediaKeysList: MediaKeysListItem[] = [];
private _media: HTMLMediaElement | null = null;
private _hasSetMediaKeys: boolean = false;
private _requestLicenseFailureCount: number = 0;
private mediaKeysPromise: Promise<MediaKeys> | null = null;
private _onMediaEncrypted = this.onMediaEncrypted.bind(this);
/**
* @constructs
* @param {Hls} hls Our Hls.js instance
*/
constructor(hls: Hls) {
this.hls = hls;
this._config = hls.config;
this._widevineLicenseUrl = this._config.widevineLicenseUrl;
this._licenseXhrSetup = this._config.licenseXhrSetup;
this._licenseResponseCallback = this._config.licenseResponseCallback;
this._emeEnabled = this._config.emeEnabled;
this._requestMediaKeySystemAccess =
this._config.requestMediaKeySystemAccessFunc;
this._drmSystemOptions = this._config.drmSystemOptions;
this._registerListeners();
}
public destroy() {
this._unregisterListeners();
// @ts-ignore
this.hls = this._onMediaEncrypted = null;
this._requestMediaKeySystemAccess = null;
}
private _registerListeners() {
this.hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
this.hls.on(Events.MEDIA_DETACHED, this.onMediaDetached, this);
this.hls.on(Events.MANIFEST_PARSED, this.onManifestParsed, this);
}
private _unregisterListeners() {
this.hls.off(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
this.hls.off(Events.MEDIA_DETACHED, this.onMediaDetached, this);
this.hls.off(Events.MANIFEST_PARSED, this.onManifestParsed, this);
}
/**
* @param {string} keySystem Identifier for the key-system, see `KeySystems` enum
* @returns {string} License server URL for key-system (if any configured, otherwise causes error)
* @throws if a unsupported keysystem is passed
*/
getLicenseServerUrl(keySystem: KeySystems): string {
switch (keySystem) {
case KeySystems.WIDEVINE:
if (!this._widevineLicenseUrl) {
break;
}
return this._widevineLicenseUrl;
}
throw new Error(
`no license server URL configured for key-system "${keySystem}"`
);
}
/**
* Requests access object and adds it to our list upon success
* @private
* @param {string} keySystem System ID (see `KeySystems`)
* @param {Array<string>} audioCodecs List of required audio codecs to support
* @param {Array<string>} videoCodecs List of required video codecs to support
* @throws When a unsupported KeySystem is passed
*/
private _attemptKeySystemAccess(
keySystem: KeySystems,
audioCodecs: string[],
videoCodecs: string[]
) {
// This can throw, but is caught in event handler callpath
const mediaKeySystemConfigs = getSupportedMediaKeySystemConfigurations(
keySystem,
audioCodecs,
videoCodecs,
this._drmSystemOptions
);
logger.log('Requesting encrypted media key-system access');
// expecting interface like window.navigator.requestMediaKeySystemAccess
const keySystemAccessPromise = this.requestMediaKeySystemAccess(
keySystem,
mediaKeySystemConfigs
);
this.mediaKeysPromise = keySystemAccessPromise.then(
(mediaKeySystemAccess) =>
this._onMediaKeySystemAccessObtained(keySystem, mediaKeySystemAccess)
);
keySystemAccessPromise.catch((err) => {
logger.error(`Failed to obtain key-system "${keySystem}" access:`, err);
});
}
get requestMediaKeySystemAccess() {
if (!this._requestMediaKeySystemAccess) {
throw new Error('No requestMediaKeySystemAccess function configured');
}
return this._requestMediaKeySystemAccess;
}
/**
* Handles obtaining access to a key-system
* @private
* @param {string} keySystem
* @param {MediaKeySystemAccess} mediaKeySystemAccess https://developer.mozilla.org/en-US/docs/Web/API/MediaKeySystemAccess
*/
private _onMediaKeySystemAccessObtained(
keySystem: KeySystems,
mediaKeySystemAccess: MediaKeySystemAccess
): Promise<MediaKeys> {
logger.log(`Access for key-system "${keySystem}" obtained`);
const mediaKeysListItem: MediaKeysListItem = {
mediaKeysSessionInitialized: false,
mediaKeySystemAccess: mediaKeySystemAccess,
mediaKeySystemDomain: keySystem,
};
this._mediaKeysList.push(mediaKeysListItem);
const mediaKeysPromise = Promise.resolve()
.then(() => mediaKeySystemAccess.createMediaKeys())
.then((mediaKeys) => {
mediaKeysListItem.mediaKeys = mediaKeys;
logger.log(`Media-keys created for key-system "${keySystem}"`);
this._onMediaKeysCreated();
return mediaKeys;
});
mediaKeysPromise.catch((err) => {
logger.error('Failed to create media-keys:', err);
});
return mediaKeysPromise;
}
/**
* Handles key-creation (represents access to CDM). We are going to create key-sessions upon this
* for all existing keys where no session exists yet.
*
* @private
*/
private _onMediaKeysCreated() {
// check for all key-list items if a session exists, otherwise, create one
this._mediaKeysList.forEach((mediaKeysListItem) => {
if (!mediaKeysListItem.mediaKeysSession) {
// mediaKeys is definitely initialized here
mediaKeysListItem.mediaKeysSession =
mediaKeysListItem.mediaKeys!.createSession();
this._onNewMediaKeySession(mediaKeysListItem.mediaKeysSession);
}
});
}
/**
* @private
* @param {*} keySession
*/
private _onNewMediaKeySession(keySession: MediaKeySession) {
logger.log(`New key-system session ${keySession.sessionId}`);
keySession.addEventListener(
'message',
(event: MediaKeyMessageEvent) => {
this._onKeySessionMessage(keySession, event.message);
},
false
);
}
/**
* @private
* @param {MediaKeySession} keySession
* @param {ArrayBuffer} message
*/
private _onKeySessionMessage(
keySession: MediaKeySession,
message: ArrayBuffer
) {
logger.log('Got EME message event, creating license request');
this._requestLicense(message, (data: ArrayBuffer) => {
logger.log(
`Received license data (length: ${
data ? data.byteLength : data
}), updating key-session`
);
keySession.update(data);
});
}
/**
* @private
* @param e {MediaEncryptedEvent}
*/
private onMediaEncrypted(e: MediaEncryptedEvent) {
logger.log(`Media is encrypted using "${e.initDataType}" init data type`);
if (!this.mediaKeysPromise) {
logger.error(
'Fatal: Media is encrypted but no CDM access or no keys have been requested'
);
this.hls.trigger(Events.ERROR, {
type: ErrorTypes.KEY_SYSTEM_ERROR,
details: ErrorDetails.KEY_SYSTEM_NO_KEYS,
fatal: true,
});
return;
}
const finallySetKeyAndStartSession = (mediaKeys) => {
if (!this._media) {
return;
}
this._attemptSetMediaKeys(mediaKeys);
this._generateRequestWithPreferredKeySession(e.initDataType, e.initData);
};
// Could use `Promise.finally` but some Promise polyfills are missing it
this.mediaKeysPromise
.then(finallySetKeyAndStartSession)
.catch(finallySetKeyAndStartSession);
}
/**
* @private
*/
private _attemptSetMediaKeys(mediaKeys?: MediaKeys) {
if (!this._media) {
throw new Error(
'Attempted to set mediaKeys without first attaching a media element'
);
}
if (!this._hasSetMediaKeys) {
// FIXME: see if we can/want/need-to really to deal with several potential key-sessions?
const keysListItem = this._mediaKeysList[0];
if (!keysListItem || !keysListItem.mediaKeys) {
logger.error(
'Fatal: Media is encrypted but no CDM access or no keys have been obtained yet'
);
this.hls.trigger(Events.ERROR, {
type: ErrorTypes.KEY_SYSTEM_ERROR,
details: ErrorDetails.KEY_SYSTEM_NO_KEYS,
fatal: true,
});
return;
}
logger.log('Setting keys for encrypted media');
this._media.setMediaKeys(keysListItem.mediaKeys);
this._hasSetMediaKeys = true;
}
}
/**
* @private
*/
private _generateRequestWithPreferredKeySession(
initDataType: string,
initData: ArrayBuffer | null
) {
// FIXME: see if we can/want/need-to really to deal with several potential key-sessions?
const keysListItem = this._mediaKeysList[0];
if (!keysListItem) {
logger.error(
'Fatal: Media is encrypted but not any key-system access has been obtained yet'
);
this.hls.trigger(Events.ERROR, {
type: ErrorTypes.KEY_SYSTEM_ERROR,
details: ErrorDetails.KEY_SYSTEM_NO_ACCESS,
fatal: true,
});
return;
}
if (keysListItem.mediaKeysSessionInitialized) {
logger.warn('Key-Session already initialized but requested again');
return;
}
const keySession = keysListItem.mediaKeysSession;
if (!keySession) {
logger.error('Fatal: Media is encrypted but no key-session existing');
this.hls.trigger(Events.ERROR, {
type: ErrorTypes.KEY_SYSTEM_ERROR,
details: ErrorDetails.KEY_SYSTEM_NO_SESSION,
fatal: true,
});
return;
}
// initData is null if the media is not CORS-same-origin
if (!initData) {
logger.warn(
'Fatal: initData required for generating a key session is null'
);
this.hls.trigger(Events.ERROR, {
type: ErrorTypes.KEY_SYSTEM_ERROR,
details: ErrorDetails.KEY_SYSTEM_NO_INIT_DATA,
fatal: true,
});
return;
}
logger.log(
`Generating key-session request for "${initDataType}" init data type`
);
keysListItem.mediaKeysSessionInitialized = true;
keySession
.generateRequest(initDataType, initData)
.then(() => {
logger.debug('Key-session generation succeeded');
})
.catch((err) => {
logger.error('Error generating key-session request:', err);
this.hls.trigger(Events.ERROR, {
type: ErrorTypes.KEY_SYSTEM_ERROR,
details: ErrorDetails.KEY_SYSTEM_NO_SESSION,
fatal: false,
});
});
}
/**
* @private
* @param {string} url License server URL
* @param {ArrayBuffer} keyMessage Message data issued by key-system
* @param {function} callback Called when XHR has succeeded
* @returns {XMLHttpRequest} Unsent (but opened state) XHR object
* @throws if XMLHttpRequest construction failed
*/
private _createLicenseXhr(
url: string,
keyMessage: ArrayBuffer,
callback: (data: ArrayBuffer) => void
): XMLHttpRequest {
const xhr = new XMLHttpRequest();
xhr.responseType = 'arraybuffer';
xhr.onreadystatechange = this._onLicenseRequestReadyStageChange.bind(
this,
xhr,
url,
keyMessage,
callback
);
let licenseXhrSetup = this._licenseXhrSetup;
if (licenseXhrSetup) {
try {
licenseXhrSetup.call(this.hls, xhr, url);
licenseXhrSetup = undefined;
} catch (e) {
logger.error(e);
}
}
try {
// if licenseXhrSetup did not yet call open, let's do it now
if (!xhr.readyState) {
xhr.open('POST', url, true);
}
if (licenseXhrSetup) {
licenseXhrSetup.call(this.hls, xhr, url);
}
} catch (e) {
// IE11 throws an exception on xhr.open if attempting to access an HTTP resource over HTTPS
throw new Error(`issue setting up KeySystem license XHR ${e}`);
}
return xhr;
}
/**
* @private
* @param {XMLHttpRequest} xhr
* @param {string} url License server URL
* @param {ArrayBuffer} keyMessage Message data issued by key-system
* @param {function} callback Called when XHR has succeeded
*/
private _onLicenseRequestReadyStageChange(
xhr: XMLHttpRequest,
url: string,
keyMessage: ArrayBuffer,
callback: (data: ArrayBuffer) => void
) {
switch (xhr.readyState) {
case 4:
if (xhr.status === 200) {
this._requestLicenseFailureCount = 0;
logger.log('License request succeeded');
let data: ArrayBuffer = xhr.response;
const licenseResponseCallback = this._licenseResponseCallback;
if (licenseResponseCallback) {
try {
data = licenseResponseCallback.call(this.hls, xhr, url);
} catch (e) {
logger.error(e);
}
}
callback(data);
} else {
logger.error(
`License Request XHR failed (${url}). Status: ${xhr.status} (${xhr.statusText})`
);
this._requestLicenseFailureCount++;
if (this._requestLicenseFailureCount > MAX_LICENSE_REQUEST_FAILURES) {
this.hls.trigger(Events.ERROR, {
type: ErrorTypes.KEY_SYSTEM_ERROR,
details: ErrorDetails.KEY_SYSTEM_LICENSE_REQUEST_FAILED,
fatal: true,
});
return;
}
const attemptsLeft =
MAX_LICENSE_REQUEST_FAILURES - this._requestLicenseFailureCount + 1;
logger.warn(
`Retrying license request, ${attemptsLeft} attempts left`
);
this._requestLicense(keyMessage, callback);
}
break;
}
}
/**
* @private
* @param {MediaKeysListItem} keysListItem
* @param {ArrayBuffer} keyMessage
* @returns {ArrayBuffer} Challenge data posted to license server
* @throws if KeySystem is unsupported
*/
private _generateLicenseRequestChallenge(
keysListItem: MediaKeysListItem,
keyMessage: ArrayBuffer
): ArrayBuffer {
switch (keysListItem.mediaKeySystemDomain) {
// case KeySystems.PLAYREADY:
// from https://github.com/MicrosoftEdge/Demos/blob/master/eme/scripts/demo.js
/*
if (this.licenseType !== this.LICENSE_TYPE_WIDEVINE) {
// For PlayReady CDMs, we need to dig the Challenge out of the XML.
var keyMessageXml = new DOMParser().parseFromString(String.fromCharCode.apply(null, new Uint16Array(keyMessage)), 'application/xml');
if (keyMessageXml.getElementsByTagName('Challenge')[0]) {
challenge = atob(keyMessageXml.getElementsByTagName('Challenge')[0].childNodes[0].nodeValue);
} else {
throw 'Cannot find <Challenge> in key message';
}
var headerNames = keyMessageXml.getElementsByTagName('name');
var headerValues = keyMessageXml.getElementsByTagName('value');
if (headerNames.length !== headerValues.length) {
throw 'Mismatched header <name>/<value> pair in key message';
}
for (var i = 0; i < headerNames.length; i++) {
xhr.setRequestHeader(headerNames[i].childNodes[0].nodeValue, headerValues[i].childNodes[0].nodeValue);
}
}
break;
*/
case KeySystems.WIDEVINE:
// For Widevine CDMs, the challenge is the keyMessage.
return keyMessage;
}
throw new Error(
`unsupported key-system: ${keysListItem.mediaKeySystemDomain}`
);
}
/**
* @private
* @param keyMessage
* @param callback
*/
private _requestLicense(
keyMessage: ArrayBuffer,
callback: (data: ArrayBuffer) => void
) {
logger.log('Requesting content license for key-system');
const keysListItem = this._mediaKeysList[0];
if (!keysListItem) {
logger.error(
'Fatal error: Media is encrypted but no key-system access has been obtained yet'
);
this.hls.trigger(Events.ERROR, {
type: ErrorTypes.KEY_SYSTEM_ERROR,
details: ErrorDetails.KEY_SYSTEM_NO_ACCESS,
fatal: true,
});
return;
}
try {
const url = this.getLicenseServerUrl(keysListItem.mediaKeySystemDomain);
const xhr = this._createLicenseXhr(url, keyMessage, callback);
logger.log(`Sending license request to URL: ${url}`);
const challenge = this._generateLicenseRequestChallenge(
keysListItem,
keyMessage
);
xhr.send(challenge);
} catch (e) {
logger.error(`Failure requesting DRM license: ${e}`);
this.hls.trigger(Events.ERROR, {
type: ErrorTypes.KEY_SYSTEM_ERROR,
details: ErrorDetails.KEY_SYSTEM_LICENSE_REQUEST_FAILED,
fatal: true,
});
}
}
onMediaAttached(event: Events.MEDIA_ATTACHED, data: MediaAttachedData) {
if (!this._emeEnabled) {
return;
}
const media = data.media;
// keep reference of media
this._media = media;
media.addEventListener('encrypted', this._onMediaEncrypted);
}
onMediaDetached() {
const media = this._media;
const mediaKeysList = this._mediaKeysList;
if (!media) {
return;
}
media.removeEventListener('encrypted', this._onMediaEncrypted);
this._media = null;
this._mediaKeysList = [];
// Close all sessions and remove media keys from the video element.
Promise.all(
mediaKeysList.map((mediaKeysListItem) => {
if (mediaKeysListItem.mediaKeysSession) {
return mediaKeysListItem.mediaKeysSession.close().catch(() => {
// Ignore errors when closing the sessions. Closing a session that
// generated no key requests will throw an error.
});
}
})
)
.then(() => {
return media.setMediaKeys(null);
})
.catch(() => {
// Ignore any failures while removing media keys from the video element.
});
}
onManifestParsed(event: Events.MANIFEST_PARSED, data: ManifestParsedData) {
if (!this._emeEnabled) {
return;
}
const audioCodecs = data.levels
.map((level) => level.audioCodec)
.filter(
(audioCodec: string | undefined): audioCodec is string => !!audioCodec
);
const videoCodecs = data.levels
.map((level) => level.videoCodec)
.filter(
(videoCodec: string | undefined): videoCodec is string => !!videoCodec
);
this._attemptKeySystemAccess(KeySystems.WIDEVINE, audioCodecs, videoCodecs);
}
}
export default EMEController;

141
node_modules/hls.js/src/controller/fps-controller.ts generated vendored Normal file
View file

@ -0,0 +1,141 @@
import { Events } from '../events';
import { logger } from '../utils/logger';
import type { ComponentAPI } from '../types/component-api';
import type Hls from '../hls';
import type { MediaAttachingData } from '../types/events';
import StreamController from './stream-controller';
class FPSController implements ComponentAPI {
private hls: Hls;
private isVideoPlaybackQualityAvailable: boolean = false;
private timer?: number;
private media: HTMLVideoElement | null = null;
private lastTime: any;
private lastDroppedFrames: number = 0;
private lastDecodedFrames: number = 0;
// stream controller must be provided as a dependency!
private streamController!: StreamController;
constructor(hls: Hls) {
this.hls = hls;
this.registerListeners();
}
public setStreamController(streamController: StreamController) {
this.streamController = streamController;
}
protected registerListeners() {
this.hls.on(Events.MEDIA_ATTACHING, this.onMediaAttaching, this);
}
protected unregisterListeners() {
this.hls.off(Events.MEDIA_ATTACHING, this.onMediaAttaching);
}
destroy() {
if (this.timer) {
clearInterval(this.timer);
}
this.unregisterListeners();
this.isVideoPlaybackQualityAvailable = false;
this.media = null;
}
protected onMediaAttaching(
event: Events.MEDIA_ATTACHING,
data: MediaAttachingData
) {
const config = this.hls.config;
if (config.capLevelOnFPSDrop) {
const media =
data.media instanceof self.HTMLVideoElement ? data.media : null;
this.media = media;
if (media && typeof media.getVideoPlaybackQuality === 'function') {
this.isVideoPlaybackQualityAvailable = true;
}
self.clearInterval(this.timer);
this.timer = self.setInterval(
this.checkFPSInterval.bind(this),
config.fpsDroppedMonitoringPeriod
);
}
}
checkFPS(
video: HTMLVideoElement,
decodedFrames: number,
droppedFrames: number
) {
const currentTime = performance.now();
if (decodedFrames) {
if (this.lastTime) {
const currentPeriod = currentTime - this.lastTime;
const currentDropped = droppedFrames - this.lastDroppedFrames;
const currentDecoded = decodedFrames - this.lastDecodedFrames;
const droppedFPS = (1000 * currentDropped) / currentPeriod;
const hls = this.hls;
hls.trigger(Events.FPS_DROP, {
currentDropped: currentDropped,
currentDecoded: currentDecoded,
totalDroppedFrames: droppedFrames,
});
if (droppedFPS > 0) {
// logger.log('checkFPS : droppedFPS/decodedFPS:' + droppedFPS/(1000 * currentDecoded / currentPeriod));
if (
currentDropped >
hls.config.fpsDroppedMonitoringThreshold * currentDecoded
) {
let currentLevel = hls.currentLevel;
logger.warn(
'drop FPS ratio greater than max allowed value for currentLevel: ' +
currentLevel
);
if (
currentLevel > 0 &&
(hls.autoLevelCapping === -1 ||
hls.autoLevelCapping >= currentLevel)
) {
currentLevel = currentLevel - 1;
hls.trigger(Events.FPS_DROP_LEVEL_CAPPING, {
level: currentLevel,
droppedLevel: hls.currentLevel,
});
hls.autoLevelCapping = currentLevel;
this.streamController.nextLevelSwitch();
}
}
}
}
this.lastTime = currentTime;
this.lastDroppedFrames = droppedFrames;
this.lastDecodedFrames = decodedFrames;
}
}
checkFPSInterval() {
const video = this.media;
if (video) {
if (this.isVideoPlaybackQualityAvailable) {
const videoPlaybackQuality = video.getVideoPlaybackQuality();
this.checkFPS(
video,
videoPlaybackQuality.totalVideoFrames,
videoPlaybackQuality.droppedVideoFrames
);
} else {
// HTMLVideoElement doesn't include the webkit types
this.checkFPS(
video,
(video as any).webkitDecodedFrameCount as number,
(video as any).webkitDroppedFrameCount as number
);
}
}
}
}
export default FPSController;

175
node_modules/hls.js/src/controller/fragment-finders.ts generated vendored Normal file
View file

@ -0,0 +1,175 @@
import BinarySearch from '../utils/binary-search';
import { Fragment } from '../loader/fragment';
/**
* Returns first fragment whose endPdt value exceeds the given PDT.
* @param {Array<Fragment>} fragments - The array of candidate fragments
* @param {number|null} [PDTValue = null] - The PDT value which must be exceeded
* @param {number} [maxFragLookUpTolerance = 0] - The amount of time that a fragment's start/end can be within in order to be considered contiguous
* @returns {*|null} fragment - The best matching fragment
*/
export function findFragmentByPDT(
fragments: Array<Fragment>,
PDTValue: number | null,
maxFragLookUpTolerance: number
): Fragment | null {
if (
PDTValue === null ||
!Array.isArray(fragments) ||
!fragments.length ||
!Number.isFinite(PDTValue)
) {
return null;
}
// if less than start
const startPDT = fragments[0].programDateTime;
if (PDTValue < (startPDT || 0)) {
return null;
}
const endPDT = fragments[fragments.length - 1].endProgramDateTime;
if (PDTValue >= (endPDT || 0)) {
return null;
}
maxFragLookUpTolerance = maxFragLookUpTolerance || 0;
for (let seg = 0; seg < fragments.length; ++seg) {
const frag = fragments[seg];
if (pdtWithinToleranceTest(PDTValue, maxFragLookUpTolerance, frag)) {
return frag;
}
}
return null;
}
/**
* Finds a fragment based on the SN of the previous fragment; or based on the needs of the current buffer.
* This method compensates for small buffer gaps by applying a tolerance to the start of any candidate fragment, thus
* breaking any traps which would cause the same fragment to be continuously selected within a small range.
* @param {*} fragPrevious - The last frag successfully appended
* @param {Array} fragments - The array of candidate fragments
* @param {number} [bufferEnd = 0] - The end of the contiguous buffered range the playhead is currently within
* @param {number} maxFragLookUpTolerance - The amount of time that a fragment's start/end can be within in order to be considered contiguous
* @returns {*} foundFrag - The best matching fragment
*/
export function findFragmentByPTS(
fragPrevious: Fragment | null,
fragments: Array<Fragment>,
bufferEnd: number = 0,
maxFragLookUpTolerance: number = 0
): Fragment | null {
let fragNext: Fragment | null = null;
if (fragPrevious) {
fragNext =
fragments[
(fragPrevious.sn as number) - (fragments[0].sn as number) + 1
] || null;
} else if (bufferEnd === 0 && fragments[0].start === 0) {
fragNext = fragments[0];
}
// Prefer the next fragment if it's within tolerance
if (
fragNext &&
fragmentWithinToleranceTest(bufferEnd, maxFragLookUpTolerance, fragNext) ===
0
) {
return fragNext;
}
// We might be seeking past the tolerance so find the best match
const foundFragment = BinarySearch.search(
fragments,
fragmentWithinToleranceTest.bind(null, bufferEnd, maxFragLookUpTolerance)
);
if (foundFragment) {
return foundFragment;
}
// If no match was found return the next fragment after fragPrevious, or null
return fragNext;
}
/**
* The test function used by the findFragmentBySn's BinarySearch to look for the best match to the current buffer conditions.
* @param {*} candidate - The fragment to test
* @param {number} [bufferEnd = 0] - The end of the current buffered range the playhead is currently within
* @param {number} [maxFragLookUpTolerance = 0] - The amount of time that a fragment's start can be within in order to be considered contiguous
* @returns {number} - 0 if it matches, 1 if too low, -1 if too high
*/
export function fragmentWithinToleranceTest(
bufferEnd = 0,
maxFragLookUpTolerance = 0,
candidate: Fragment
) {
// offset should be within fragment boundary - config.maxFragLookUpTolerance
// this is to cope with situations like
// bufferEnd = 9.991
// frag[Ø] : [0,10]
// frag[1] : [10,20]
// bufferEnd is within frag[0] range ... although what we are expecting is to return frag[1] here
// frag start frag start+duration
// |-----------------------------|
// <---> <--->
// ...--------><-----------------------------><---------....
// previous frag matching fragment next frag
// return -1 return 0 return 1
// logger.log(`level/sn/start/end/bufEnd:${level}/${candidate.sn}/${candidate.start}/${(candidate.start+candidate.duration)}/${bufferEnd}`);
// Set the lookup tolerance to be small enough to detect the current segment - ensures we don't skip over very small segments
const candidateLookupTolerance = Math.min(
maxFragLookUpTolerance,
candidate.duration + (candidate.deltaPTS ? candidate.deltaPTS : 0)
);
if (
candidate.start + candidate.duration - candidateLookupTolerance <=
bufferEnd
) {
return 1;
} else if (
candidate.start - candidateLookupTolerance > bufferEnd &&
candidate.start
) {
// if maxFragLookUpTolerance will have negative value then don't return -1 for first element
return -1;
}
return 0;
}
/**
* The test function used by the findFragmentByPdt's BinarySearch to look for the best match to the current buffer conditions.
* This function tests the candidate's program date time values, as represented in Unix time
* @param {*} candidate - The fragment to test
* @param {number} [pdtBufferEnd = 0] - The Unix time representing the end of the current buffered range
* @param {number} [maxFragLookUpTolerance = 0] - The amount of time that a fragment's start can be within in order to be considered contiguous
* @returns {boolean} True if contiguous, false otherwise
*/
export function pdtWithinToleranceTest(
pdtBufferEnd: number,
maxFragLookUpTolerance: number,
candidate: Fragment
): boolean {
const candidateLookupTolerance =
Math.min(
maxFragLookUpTolerance,
candidate.duration + (candidate.deltaPTS ? candidate.deltaPTS : 0)
) * 1000;
// endProgramDateTime can be null, default to zero
const endProgramDateTime = candidate.endProgramDateTime || 0;
return endProgramDateTime - candidateLookupTolerance > pdtBufferEnd;
}
export function findFragWithCC(
fragments: Fragment[],
cc: number
): Fragment | null {
return BinarySearch.search(fragments, (candidate) => {
if (candidate.cc < cc) {
return 1;
} else if (candidate.cc > cc) {
return -1;
} else {
return 0;
}
});
}

470
node_modules/hls.js/src/controller/fragment-tracker.ts generated vendored Normal file
View file

@ -0,0 +1,470 @@
import { Events } from '../events';
import { Fragment, Part } from '../loader/fragment';
import { PlaylistLevelType } from '../types/loader';
import type { SourceBufferName } from '../types/buffer';
import type {
FragmentBufferedRange,
FragmentEntity,
FragmentTimeRange,
} from '../types/fragment-tracker';
import type { ComponentAPI } from '../types/component-api';
import type {
BufferAppendedData,
FragBufferedData,
FragLoadedData,
} from '../types/events';
import type Hls from '../hls';
export enum FragmentState {
NOT_LOADED = 'NOT_LOADED',
BACKTRACKED = 'BACKTRACKED',
APPENDING = 'APPENDING',
PARTIAL = 'PARTIAL',
OK = 'OK',
}
export class FragmentTracker implements ComponentAPI {
private activeFragment: Fragment | null = null;
private activeParts: Part[] | null = null;
private fragments: Partial<Record<string, FragmentEntity>> =
Object.create(null);
private timeRanges:
| {
[key in SourceBufferName]: TimeRanges;
}
| null = Object.create(null);
private bufferPadding: number = 0.2;
private hls: Hls;
constructor(hls: Hls) {
this.hls = hls;
this._registerListeners();
}
private _registerListeners() {
const { hls } = this;
hls.on(Events.BUFFER_APPENDED, this.onBufferAppended, this);
hls.on(Events.FRAG_BUFFERED, this.onFragBuffered, this);
hls.on(Events.FRAG_LOADED, this.onFragLoaded, this);
}
private _unregisterListeners() {
const { hls } = this;
hls.off(Events.BUFFER_APPENDED, this.onBufferAppended, this);
hls.off(Events.FRAG_BUFFERED, this.onFragBuffered, this);
hls.off(Events.FRAG_LOADED, this.onFragLoaded, this);
}
public destroy() {
this._unregisterListeners();
// @ts-ignore
this.fragments = this.timeRanges = null;
}
/**
* Return a Fragment with an appended range that matches the position and levelType.
* If not found any Fragment, return null
*/
public getAppendedFrag(
position: number,
levelType: PlaylistLevelType
): Fragment | Part | null {
if (levelType === PlaylistLevelType.MAIN) {
const { activeFragment, activeParts } = this;
if (!activeFragment) {
return null;
}
if (activeParts) {
for (let i = activeParts.length; i--; ) {
const activePart = activeParts[i];
const appendedPTS = activePart
? activePart.end
: activeFragment.appendedPTS;
if (
activePart.start <= position &&
appendedPTS !== undefined &&
position <= appendedPTS
) {
// 9 is a magic number. remove parts from lookup after a match but keep some short seeks back.
if (i > 9) {
this.activeParts = activeParts.slice(i - 9);
}
return activePart;
}
}
} else if (
activeFragment.start <= position &&
activeFragment.appendedPTS !== undefined &&
position <= activeFragment.appendedPTS
) {
return activeFragment;
}
}
return this.getBufferedFrag(position, levelType);
}
/**
* Return a buffered Fragment that matches the position and levelType.
* A buffered Fragment is one whose loading, parsing and appending is done (completed or "partial" meaning aborted).
* If not found any Fragment, return null
*/
public getBufferedFrag(
position: number,
levelType: PlaylistLevelType
): Fragment | null {
const { fragments } = this;
const keys = Object.keys(fragments);
for (let i = keys.length; i--; ) {
const fragmentEntity = fragments[keys[i]];
if (fragmentEntity?.body.type === levelType && fragmentEntity.buffered) {
const frag = fragmentEntity.body;
if (frag.start <= position && position <= frag.end) {
return frag;
}
}
}
return null;
}
/**
* Partial fragments effected by coded frame eviction will be removed
* The browser will unload parts of the buffer to free up memory for new buffer data
* Fragments will need to be reloaded when the buffer is freed up, removing partial fragments will allow them to reload(since there might be parts that are still playable)
*/
public detectEvictedFragments(
elementaryStream: SourceBufferName,
timeRange: TimeRanges,
playlistType?: PlaylistLevelType
) {
// Check if any flagged fragments have been unloaded
Object.keys(this.fragments).forEach((key) => {
const fragmentEntity = this.fragments[key];
if (!fragmentEntity) {
return;
}
if (!fragmentEntity.buffered) {
if (fragmentEntity.body.type === playlistType) {
this.removeFragment(fragmentEntity.body);
}
return;
}
const esData = fragmentEntity.range[elementaryStream];
if (!esData) {
return;
}
esData.time.some((time: FragmentTimeRange) => {
const isNotBuffered = !this.isTimeBuffered(
time.startPTS,
time.endPTS,
timeRange
);
if (isNotBuffered) {
// Unregister partial fragment as it needs to load again to be reused
this.removeFragment(fragmentEntity.body);
}
return isNotBuffered;
});
});
}
/**
* Checks if the fragment passed in is loaded in the buffer properly
* Partially loaded fragments will be registered as a partial fragment
*/
private detectPartialFragments(data: FragBufferedData) {
const timeRanges = this.timeRanges;
const { frag, part } = data;
if (!timeRanges || frag.sn === 'initSegment') {
return;
}
const fragKey = getFragmentKey(frag);
const fragmentEntity = this.fragments[fragKey];
if (!fragmentEntity) {
return;
}
Object.keys(timeRanges).forEach((elementaryStream) => {
const streamInfo = frag.elementaryStreams[elementaryStream];
if (!streamInfo) {
return;
}
const timeRange = timeRanges[elementaryStream];
const partial = part !== null || streamInfo.partial === true;
fragmentEntity.range[elementaryStream] = this.getBufferedTimes(
frag,
part,
partial,
timeRange
);
});
fragmentEntity.backtrack = fragmentEntity.loaded = null;
if (Object.keys(fragmentEntity.range).length) {
fragmentEntity.buffered = true;
} else {
// remove fragment if nothing was appended
this.removeFragment(fragmentEntity.body);
}
}
public fragBuffered(frag: Fragment) {
const fragKey = getFragmentKey(frag);
const fragmentEntity = this.fragments[fragKey];
if (fragmentEntity) {
fragmentEntity.backtrack = fragmentEntity.loaded = null;
fragmentEntity.buffered = true;
}
}
private getBufferedTimes(
fragment: Fragment,
part: Part | null,
partial: boolean,
timeRange: TimeRanges
): FragmentBufferedRange {
const buffered: FragmentBufferedRange = {
time: [],
partial,
};
const startPTS = part ? part.start : fragment.start;
const endPTS = part ? part.end : fragment.end;
const minEndPTS = fragment.minEndPTS || endPTS;
const maxStartPTS = fragment.maxStartPTS || startPTS;
for (let i = 0; i < timeRange.length; i++) {
const startTime = timeRange.start(i) - this.bufferPadding;
const endTime = timeRange.end(i) + this.bufferPadding;
if (maxStartPTS >= startTime && minEndPTS <= endTime) {
// Fragment is entirely contained in buffer
// No need to check the other timeRange times since it's completely playable
buffered.time.push({
startPTS: Math.max(startPTS, timeRange.start(i)),
endPTS: Math.min(endPTS, timeRange.end(i)),
});
break;
} else if (startPTS < endTime && endPTS > startTime) {
buffered.partial = true;
// Check for intersection with buffer
// Get playable sections of the fragment
buffered.time.push({
startPTS: Math.max(startPTS, timeRange.start(i)),
endPTS: Math.min(endPTS, timeRange.end(i)),
});
} else if (endPTS <= startTime) {
// No need to check the rest of the timeRange as it is in order
break;
}
}
return buffered;
}
/**
* Gets the partial fragment for a certain time
*/
public getPartialFragment(time: number): Fragment | null {
let bestFragment: Fragment | null = null;
let timePadding: number;
let startTime: number;
let endTime: number;
let bestOverlap: number = 0;
const { bufferPadding, fragments } = this;
Object.keys(fragments).forEach((key) => {
const fragmentEntity = fragments[key];
if (!fragmentEntity) {
return;
}
if (isPartial(fragmentEntity)) {
startTime = fragmentEntity.body.start - bufferPadding;
endTime = fragmentEntity.body.end + bufferPadding;
if (time >= startTime && time <= endTime) {
// Use the fragment that has the most padding from start and end time
timePadding = Math.min(time - startTime, endTime - time);
if (bestOverlap <= timePadding) {
bestFragment = fragmentEntity.body;
bestOverlap = timePadding;
}
}
}
});
return bestFragment;
}
public getState(fragment: Fragment): FragmentState {
const fragKey = getFragmentKey(fragment);
const fragmentEntity = this.fragments[fragKey];
if (fragmentEntity) {
if (!fragmentEntity.buffered) {
if (fragmentEntity.backtrack) {
return FragmentState.BACKTRACKED;
}
return FragmentState.APPENDING;
} else if (isPartial(fragmentEntity)) {
return FragmentState.PARTIAL;
} else {
return FragmentState.OK;
}
}
return FragmentState.NOT_LOADED;
}
public backtrack(
frag: Fragment,
data?: FragLoadedData
): FragLoadedData | null {
const fragKey = getFragmentKey(frag);
const fragmentEntity = this.fragments[fragKey];
if (!fragmentEntity || fragmentEntity.backtrack) {
return null;
}
const backtrack = (fragmentEntity.backtrack = data
? data
: fragmentEntity.loaded);
fragmentEntity.loaded = null;
return backtrack;
}
public getBacktrackData(fragment: Fragment): FragLoadedData | null {
const fragKey = getFragmentKey(fragment);
const fragmentEntity = this.fragments[fragKey];
if (fragmentEntity) {
const { backtrack } = fragmentEntity;
// If data was already sent to Worker it is detached no longer available
if (backtrack?.payload?.byteLength) {
return backtrack;
} else {
this.removeFragment(fragment);
}
}
return null;
}
private isTimeBuffered(
startPTS: number,
endPTS: number,
timeRange: TimeRanges
): boolean {
let startTime;
let endTime;
for (let i = 0; i < timeRange.length; i++) {
startTime = timeRange.start(i) - this.bufferPadding;
endTime = timeRange.end(i) + this.bufferPadding;
if (startPTS >= startTime && endPTS <= endTime) {
return true;
}
if (endPTS <= startTime) {
// No need to check the rest of the timeRange as it is in order
return false;
}
}
return false;
}
private onFragLoaded(event: Events.FRAG_LOADED, data: FragLoadedData) {
const { frag, part } = data;
// don't track initsegment (for which sn is not a number)
// don't track frags used for bitrateTest, they're irrelevant.
// don't track parts for memory efficiency
if (frag.sn === 'initSegment' || frag.bitrateTest || part) {
return;
}
const fragKey = getFragmentKey(frag);
this.fragments[fragKey] = {
body: frag,
loaded: data,
backtrack: null,
buffered: false,
range: Object.create(null),
};
}
private onBufferAppended(
event: Events.BUFFER_APPENDED,
data: BufferAppendedData
) {
const { frag, part, timeRanges } = data;
if (frag.type === PlaylistLevelType.MAIN) {
this.activeFragment = frag;
if (part) {
let activeParts = this.activeParts;
if (!activeParts) {
this.activeParts = activeParts = [];
}
activeParts.push(part);
} else {
this.activeParts = null;
}
}
// Store the latest timeRanges loaded in the buffer
this.timeRanges = timeRanges as { [key in SourceBufferName]: TimeRanges };
Object.keys(timeRanges).forEach((elementaryStream: SourceBufferName) => {
const timeRange = timeRanges[elementaryStream] as TimeRanges;
this.detectEvictedFragments(elementaryStream, timeRange);
if (!part) {
for (let i = 0; i < timeRange.length; i++) {
frag.appendedPTS = Math.max(timeRange.end(i), frag.appendedPTS || 0);
}
}
});
}
private onFragBuffered(event: Events.FRAG_BUFFERED, data: FragBufferedData) {
this.detectPartialFragments(data);
}
private hasFragment(fragment: Fragment): boolean {
const fragKey = getFragmentKey(fragment);
return !!this.fragments[fragKey];
}
public removeFragmentsInRange(
start: number,
end: number,
playlistType: PlaylistLevelType
) {
Object.keys(this.fragments).forEach((key) => {
const fragmentEntity = this.fragments[key];
if (!fragmentEntity) {
return;
}
if (fragmentEntity.buffered) {
const frag = fragmentEntity.body;
if (
frag.type === playlistType &&
frag.start < end &&
frag.end > start
) {
this.removeFragment(frag);
}
}
});
}
public removeFragment(fragment: Fragment) {
const fragKey = getFragmentKey(fragment);
fragment.stats.loaded = 0;
fragment.clearElementaryStreamInfo();
delete this.fragments[fragKey];
}
public removeAllFragments() {
this.fragments = Object.create(null);
this.activeFragment = null;
this.activeParts = null;
}
}
function isPartial(fragmentEntity: FragmentEntity): boolean {
return (
fragmentEntity.buffered &&
(fragmentEntity.range.video?.partial || fragmentEntity.range.audio?.partial)
);
}
function getFragmentKey(fragment: Fragment): string {
return `${fragment.type}_${fragment.level}_${fragment.urlId}_${fragment.sn}`;
}

294
node_modules/hls.js/src/controller/gap-controller.ts generated vendored Normal file
View file

@ -0,0 +1,294 @@
import type { BufferInfo } from '../utils/buffer-helper';
import { BufferHelper } from '../utils/buffer-helper';
import { ErrorTypes, ErrorDetails } from '../errors';
import { Events } from '../events';
import { logger } from '../utils/logger';
import type Hls from '../hls';
import type { HlsConfig } from '../config';
import type { FragmentTracker } from './fragment-tracker';
import { Fragment } from '../loader/fragment';
export const STALL_MINIMUM_DURATION_MS = 250;
export const MAX_START_GAP_JUMP = 2.0;
export const SKIP_BUFFER_HOLE_STEP_SECONDS = 0.1;
export const SKIP_BUFFER_RANGE_START = 0.05;
export default class GapController {
private config: HlsConfig;
private media: HTMLMediaElement;
private fragmentTracker: FragmentTracker;
private hls: Hls;
private nudgeRetry: number = 0;
private stallReported: boolean = false;
private stalled: number | null = null;
private moved: boolean = false;
private seeking: boolean = false;
constructor(config, media, fragmentTracker, hls) {
this.config = config;
this.media = media;
this.fragmentTracker = fragmentTracker;
this.hls = hls;
}
public destroy() {
// @ts-ignore
this.hls = this.fragmentTracker = this.media = null;
}
/**
* Checks if the playhead is stuck within a gap, and if so, attempts to free it.
* A gap is an unbuffered range between two buffered ranges (or the start and the first buffered range).
*
* @param {number} lastCurrentTime Previously read playhead position
*/
public poll(lastCurrentTime: number) {
const { config, media, stalled } = this;
const { currentTime, seeking } = media;
const seeked = this.seeking && !seeking;
const beginSeek = !this.seeking && seeking;
this.seeking = seeking;
// The playhead is moving, no-op
if (currentTime !== lastCurrentTime) {
this.moved = true;
if (stalled !== null) {
// The playhead is now moving, but was previously stalled
if (this.stallReported) {
const stalledDuration = self.performance.now() - stalled;
logger.warn(
`playback not stuck anymore @${currentTime}, after ${Math.round(
stalledDuration
)}ms`
);
this.stallReported = false;
}
this.stalled = null;
this.nudgeRetry = 0;
}
return;
}
// Clear stalled state when beginning or finishing seeking so that we don't report stalls coming out of a seek
if (beginSeek || seeked) {
this.stalled = null;
}
// The playhead should not be moving
if (
media.paused ||
media.ended ||
media.playbackRate === 0 ||
!BufferHelper.getBuffered(media).length
) {
return;
}
const bufferInfo = BufferHelper.bufferInfo(media, currentTime, 0);
const isBuffered = bufferInfo.len > 0;
const nextStart = bufferInfo.nextStart || 0;
// There is no playable buffer (seeked, waiting for buffer)
if (!isBuffered && !nextStart) {
return;
}
if (seeking) {
// Waiting for seeking in a buffered range to complete
const hasEnoughBuffer = bufferInfo.len > MAX_START_GAP_JUMP;
// Next buffered range is too far ahead to jump to while still seeking
const noBufferGap =
!nextStart ||
(nextStart - currentTime > MAX_START_GAP_JUMP &&
!this.fragmentTracker.getPartialFragment(currentTime));
if (hasEnoughBuffer || noBufferGap) {
return;
}
// Reset moved state when seeking to a point in or before a gap
this.moved = false;
}
// Skip start gaps if we haven't played, but the last poll detected the start of a stall
// The addition poll gives the browser a chance to jump the gap for us
if (!this.moved && this.stalled !== null) {
// Jump start gaps within jump threshold
const startJump =
Math.max(nextStart, bufferInfo.start || 0) - currentTime;
// When joining a live stream with audio tracks, account for live playlist window sliding by allowing
// a larger jump over start gaps caused by the audio-stream-controller buffering a start fragment
// that begins over 1 target duration after the video start position.
const level = this.hls.levels
? this.hls.levels[this.hls.currentLevel]
: null;
const isLive = level?.details?.live;
const maxStartGapJump = isLive
? level!.details!.targetduration * 2
: MAX_START_GAP_JUMP;
if (startJump > 0 && startJump <= maxStartGapJump) {
this._trySkipBufferHole(null);
return;
}
}
// Start tracking stall time
const tnow = self.performance.now();
if (stalled === null) {
this.stalled = tnow;
return;
}
const stalledDuration = tnow - stalled;
if (!seeking && stalledDuration >= STALL_MINIMUM_DURATION_MS) {
// Report stalling after trying to fix
this._reportStall(bufferInfo.len);
}
const bufferedWithHoles = BufferHelper.bufferInfo(
media,
currentTime,
config.maxBufferHole
);
this._tryFixBufferStall(bufferedWithHoles, stalledDuration);
}
/**
* Detects and attempts to fix known buffer stalling issues.
* @param bufferInfo - The properties of the current buffer.
* @param stalledDurationMs - The amount of time Hls.js has been stalling for.
* @private
*/
private _tryFixBufferStall(
bufferInfo: BufferInfo,
stalledDurationMs: number
) {
const { config, fragmentTracker, media } = this;
const currentTime = media.currentTime;
const partial = fragmentTracker.getPartialFragment(currentTime);
if (partial) {
// Try to skip over the buffer hole caused by a partial fragment
// This method isn't limited by the size of the gap between buffered ranges
const targetTime = this._trySkipBufferHole(partial);
// we return here in this case, meaning
// the branch below only executes when we don't handle a partial fragment
if (targetTime) {
return;
}
}
// if we haven't had to skip over a buffer hole of a partial fragment
// we may just have to "nudge" the playlist as the browser decoding/rendering engine
// needs to cross some sort of threshold covering all source-buffers content
// to start playing properly.
if (
bufferInfo.len > config.maxBufferHole &&
stalledDurationMs > config.highBufferWatchdogPeriod * 1000
) {
logger.warn('Trying to nudge playhead over buffer-hole');
// Try to nudge currentTime over a buffer hole if we've been stalling for the configured amount of seconds
// We only try to jump the hole if it's under the configured size
// Reset stalled so to rearm watchdog timer
this.stalled = null;
this._tryNudgeBuffer();
}
}
/**
* Triggers a BUFFER_STALLED_ERROR event, but only once per stall period.
* @param bufferLen - The playhead distance from the end of the current buffer segment.
* @private
*/
private _reportStall(bufferLen) {
const { hls, media, stallReported } = this;
if (!stallReported) {
// Report stalled error once
this.stallReported = true;
logger.warn(
`Playback stalling at @${media.currentTime} due to low buffer (buffer=${bufferLen})`
);
hls.trigger(Events.ERROR, {
type: ErrorTypes.MEDIA_ERROR,
details: ErrorDetails.BUFFER_STALLED_ERROR,
fatal: false,
buffer: bufferLen,
});
}
}
/**
* Attempts to fix buffer stalls by jumping over known gaps caused by partial fragments
* @param partial - The partial fragment found at the current time (where playback is stalling).
* @private
*/
private _trySkipBufferHole(partial: Fragment | null): number {
const { config, hls, media } = this;
const currentTime = media.currentTime;
let lastEndTime = 0;
// Check if currentTime is between unbuffered regions of partial fragments
const buffered = BufferHelper.getBuffered(media);
for (let i = 0; i < buffered.length; i++) {
const startTime = buffered.start(i);
if (
currentTime + config.maxBufferHole >= lastEndTime &&
currentTime < startTime
) {
const targetTime = Math.max(
startTime + SKIP_BUFFER_RANGE_START,
media.currentTime + SKIP_BUFFER_HOLE_STEP_SECONDS
);
logger.warn(
`skipping hole, adjusting currentTime from ${currentTime} to ${targetTime}`
);
this.moved = true;
this.stalled = null;
media.currentTime = targetTime;
if (partial) {
hls.trigger(Events.ERROR, {
type: ErrorTypes.MEDIA_ERROR,
details: ErrorDetails.BUFFER_SEEK_OVER_HOLE,
fatal: false,
reason: `fragment loaded with buffer holes, seeking from ${currentTime} to ${targetTime}`,
frag: partial,
});
}
return targetTime;
}
lastEndTime = buffered.end(i);
}
return 0;
}
/**
* Attempts to fix buffer stalls by advancing the mediaElement's current time by a small amount.
* @private
*/
private _tryNudgeBuffer() {
const { config, hls, media } = this;
const currentTime = media.currentTime;
const nudgeRetry = (this.nudgeRetry || 0) + 1;
this.nudgeRetry = nudgeRetry;
if (nudgeRetry < config.nudgeMaxRetry) {
const targetTime = currentTime + nudgeRetry * config.nudgeOffset;
// playback stalled in buffered area ... let's nudge currentTime to try to overcome this
logger.warn(`Nudging 'currentTime' from ${currentTime} to ${targetTime}`);
media.currentTime = targetTime;
hls.trigger(Events.ERROR, {
type: ErrorTypes.MEDIA_ERROR,
details: ErrorDetails.BUFFER_NUDGE_ON_STALL,
fatal: false,
});
} else {
logger.error(
`Playhead still not moving while enough data buffered @${currentTime} after ${config.nudgeMaxRetry} nudges`
);
hls.trigger(Events.ERROR, {
type: ErrorTypes.MEDIA_ERROR,
details: ErrorDetails.BUFFER_STALLED_ERROR,
fatal: true,
});
}
}
}

View file

@ -0,0 +1,148 @@
import { Events } from '../events';
import {
sendAddTrackEvent,
clearCurrentCues,
removeCuesInRange,
} from '../utils/texttrack-utils';
import * as ID3 from '../demux/id3';
import type {
BufferFlushingData,
FragParsingMetadataData,
MediaAttachedData,
} from '../types/events';
import type { ComponentAPI } from '../types/component-api';
import type Hls from '../hls';
declare global {
interface Window {
WebKitDataCue: VTTCue | void;
}
}
const MIN_CUE_DURATION = 0.25;
class ID3TrackController implements ComponentAPI {
private hls: Hls;
private id3Track: TextTrack | null = null;
private media: HTMLMediaElement | null = null;
constructor(hls) {
this.hls = hls;
this._registerListeners();
}
destroy() {
this._unregisterListeners();
}
private _registerListeners() {
const { hls } = this;
hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
hls.on(Events.FRAG_PARSING_METADATA, this.onFragParsingMetadata, this);
hls.on(Events.BUFFER_FLUSHING, this.onBufferFlushing, this);
}
private _unregisterListeners() {
const { hls } = this;
hls.off(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
hls.off(Events.FRAG_PARSING_METADATA, this.onFragParsingMetadata, this);
hls.off(Events.BUFFER_FLUSHING, this.onBufferFlushing, this);
}
// Add ID3 metatadata text track.
protected onMediaAttached(
event: Events.MEDIA_ATTACHED,
data: MediaAttachedData
): void {
this.media = data.media;
}
protected onMediaDetaching(): void {
if (!this.id3Track) {
return;
}
clearCurrentCues(this.id3Track);
this.id3Track = null;
this.media = null;
}
getID3Track(textTracks: TextTrackList): TextTrack | void {
if (!this.media) {
return;
}
for (let i = 0; i < textTracks.length; i++) {
const textTrack: TextTrack = textTracks[i];
if (textTrack.kind === 'metadata' && textTrack.label === 'id3') {
// send 'addtrack' when reusing the textTrack for metadata,
// same as what we do for captions
sendAddTrackEvent(textTrack, this.media);
return textTrack;
}
}
return this.media.addTextTrack('metadata', 'id3');
}
onFragParsingMetadata(
event: Events.FRAG_PARSING_METADATA,
data: FragParsingMetadataData
) {
if (!this.media) {
return;
}
const fragment = data.frag;
const samples = data.samples;
// create track dynamically
if (!this.id3Track) {
this.id3Track = this.getID3Track(this.media.textTracks) as TextTrack;
this.id3Track.mode = 'hidden';
}
// Attempt to recreate Safari functionality by creating
// WebKitDataCue objects when available and store the decoded
// ID3 data in the value property of the cue
const Cue = (self.WebKitDataCue || self.VTTCue || self.TextTrackCue) as any;
for (let i = 0; i < samples.length; i++) {
const frames = ID3.getID3Frames(samples[i].data);
if (frames) {
const startTime = samples[i].pts;
let endTime: number =
i < samples.length - 1 ? samples[i + 1].pts : fragment.end;
const timeDiff = endTime - startTime;
if (timeDiff <= 0) {
endTime = startTime + MIN_CUE_DURATION;
}
for (let j = 0; j < frames.length; j++) {
const frame = frames[j];
// Safari doesn't put the timestamp frame in the TextTrack
if (!ID3.isTimeStampFrame(frame)) {
const cue = new Cue(startTime, endTime, '');
cue.value = frame;
this.id3Track.addCue(cue);
}
}
}
}
}
onBufferFlushing(
event: Events.BUFFER_FLUSHING,
{ startOffset, endOffset, type }: BufferFlushingData
) {
if (!type || type === 'audio') {
// id3 cues come from parsed audio only remove cues when audio buffer is cleared
const { id3Track } = this;
if (id3Track) {
removeCuesInRange(id3Track, startOffset, endOffset);
}
}
}
}
export default ID3TrackController;

View file

@ -0,0 +1,254 @@
import { LevelDetails } from '../loader/level-details';
import { ErrorDetails } from '../errors';
import { Events } from '../events';
import type {
ErrorData,
LevelUpdatedData,
MediaAttachingData,
} from '../types/events';
import { logger } from '../utils/logger';
import type { ComponentAPI } from '../types/component-api';
import type Hls from '../hls';
import type { HlsConfig } from '../config';
export default class LatencyController implements ComponentAPI {
private hls: Hls;
private readonly config: HlsConfig;
private media: HTMLMediaElement | null = null;
private levelDetails: LevelDetails | null = null;
private currentTime: number = 0;
private stallCount: number = 0;
private _latency: number | null = null;
private timeupdateHandler = () => this.timeupdate();
constructor(hls: Hls) {
this.hls = hls;
this.config = hls.config;
this.registerListeners();
}
get latency(): number {
return this._latency || 0;
}
get maxLatency(): number {
const { config, levelDetails } = this;
if (config.liveMaxLatencyDuration !== undefined) {
return config.liveMaxLatencyDuration;
}
return levelDetails
? config.liveMaxLatencyDurationCount * levelDetails.targetduration
: 0;
}
get targetLatency(): number | null {
const { levelDetails } = this;
if (levelDetails === null) {
return null;
}
const { holdBack, partHoldBack, targetduration } = levelDetails;
const { liveSyncDuration, liveSyncDurationCount, lowLatencyMode } =
this.config;
const userConfig = this.hls.userConfig;
let targetLatency = lowLatencyMode ? partHoldBack || holdBack : holdBack;
if (
userConfig.liveSyncDuration ||
userConfig.liveSyncDurationCount ||
targetLatency === 0
) {
targetLatency =
liveSyncDuration !== undefined
? liveSyncDuration
: liveSyncDurationCount * targetduration;
}
const maxLiveSyncOnStallIncrease = targetduration;
const liveSyncOnStallIncrease = 1.0;
return (
targetLatency +
Math.min(
this.stallCount * liveSyncOnStallIncrease,
maxLiveSyncOnStallIncrease
)
);
}
get liveSyncPosition(): number | null {
const liveEdge = this.estimateLiveEdge();
const targetLatency = this.targetLatency;
const levelDetails = this.levelDetails;
if (liveEdge === null || targetLatency === null || levelDetails === null) {
return null;
}
const edge = levelDetails.edge;
const syncPosition = liveEdge - targetLatency - this.edgeStalled;
const min = edge - levelDetails.totalduration;
const max =
edge -
((this.config.lowLatencyMode && levelDetails.partTarget) ||
levelDetails.targetduration);
return Math.min(Math.max(min, syncPosition), max);
}
get drift(): number {
const { levelDetails } = this;
if (levelDetails === null) {
return 1;
}
return levelDetails.drift;
}
get edgeStalled(): number {
const { levelDetails } = this;
if (levelDetails === null) {
return 0;
}
const maxLevelUpdateAge =
((this.config.lowLatencyMode && levelDetails.partTarget) ||
levelDetails.targetduration) * 3;
return Math.max(levelDetails.age - maxLevelUpdateAge, 0);
}
private get forwardBufferLength(): number {
const { media, levelDetails } = this;
if (!media || !levelDetails) {
return 0;
}
const bufferedRanges = media.buffered.length;
return bufferedRanges
? media.buffered.end(bufferedRanges - 1)
: levelDetails.edge - this.currentTime;
}
public destroy(): void {
this.unregisterListeners();
this.onMediaDetaching();
this.levelDetails = null;
// @ts-ignore
this.hls = this.timeupdateHandler = null;
}
private registerListeners() {
this.hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
this.hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
this.hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this);
this.hls.on(Events.LEVEL_UPDATED, this.onLevelUpdated, this);
this.hls.on(Events.ERROR, this.onError, this);
}
private unregisterListeners() {
this.hls.off(Events.MEDIA_ATTACHED, this.onMediaAttached);
this.hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching);
this.hls.off(Events.MANIFEST_LOADING, this.onManifestLoading);
this.hls.off(Events.LEVEL_UPDATED, this.onLevelUpdated);
this.hls.off(Events.ERROR, this.onError);
}
private onMediaAttached(
event: Events.MEDIA_ATTACHED,
data: MediaAttachingData
) {
this.media = data.media;
this.media.addEventListener('timeupdate', this.timeupdateHandler);
}
private onMediaDetaching() {
if (this.media) {
this.media.removeEventListener('timeupdate', this.timeupdateHandler);
this.media = null;
}
}
private onManifestLoading() {
this.levelDetails = null;
this._latency = null;
this.stallCount = 0;
}
private onLevelUpdated(
event: Events.LEVEL_UPDATED,
{ details }: LevelUpdatedData
) {
this.levelDetails = details;
if (details.advanced) {
this.timeupdate();
}
if (!details.live && this.media) {
this.media.removeEventListener('timeupdate', this.timeupdateHandler);
}
}
private onError(event: Events.ERROR, data: ErrorData) {
if (data.details !== ErrorDetails.BUFFER_STALLED_ERROR) {
return;
}
this.stallCount++;
logger.warn(
'[playback-rate-controller]: Stall detected, adjusting target latency'
);
}
private timeupdate() {
const { media, levelDetails } = this;
if (!media || !levelDetails) {
return;
}
this.currentTime = media.currentTime;
const latency = this.computeLatency();
if (latency === null) {
return;
}
this._latency = latency;
// Adapt playbackRate to meet target latency in low-latency mode
const { lowLatencyMode, maxLiveSyncPlaybackRate } = this.config;
if (!lowLatencyMode || maxLiveSyncPlaybackRate === 1) {
return;
}
const targetLatency = this.targetLatency;
if (targetLatency === null) {
return;
}
const distanceFromTarget = latency - targetLatency;
// Only adjust playbackRate when within one target duration of targetLatency
// and more than one second from under-buffering.
// Playback further than one target duration from target can be considered DVR playback.
const liveMinLatencyDuration = Math.min(
this.maxLatency,
targetLatency + levelDetails.targetduration
);
const inLiveRange = distanceFromTarget < liveMinLatencyDuration;
if (
levelDetails.live &&
inLiveRange &&
distanceFromTarget > 0.05 &&
this.forwardBufferLength > 1
) {
const max = Math.min(2, Math.max(1.0, maxLiveSyncPlaybackRate));
const rate =
Math.round(
(2 / (1 + Math.exp(-0.75 * distanceFromTarget - this.edgeStalled))) *
20
) / 20;
media.playbackRate = Math.min(max, Math.max(1, rate));
} else if (media.playbackRate !== 1 && media.playbackRate !== 0) {
media.playbackRate = 1;
}
}
private estimateLiveEdge(): number | null {
const { levelDetails } = this;
if (levelDetails === null) {
return null;
}
return levelDetails.edge + levelDetails.age;
}
private computeLatency(): number | null {
const liveEdge = this.estimateLiveEdge();
if (liveEdge === null) {
return null;
}
return liveEdge - this.currentTime;
}
}

600
node_modules/hls.js/src/controller/level-controller.ts generated vendored Normal file
View file

@ -0,0 +1,600 @@
/*
* Level Controller
*/
import {
ManifestLoadedData,
ManifestParsedData,
LevelLoadedData,
TrackSwitchedData,
FragLoadedData,
ErrorData,
LevelSwitchingData,
} from '../types/events';
import { Level } from '../types/level';
import { Events } from '../events';
import { ErrorTypes, ErrorDetails } from '../errors';
import { isCodecSupportedInMp4 } from '../utils/codecs';
import { addGroupId, assignTrackIdsByGroup } from './level-helper';
import BasePlaylistController from './base-playlist-controller';
import { PlaylistContextType, PlaylistLevelType } from '../types/loader';
import type Hls from '../hls';
import type { HlsUrlParameters, LevelParsed } from '../types/level';
import type { MediaPlaylist } from '../types/media-playlist';
const chromeOrFirefox: boolean = /chrome|firefox/.test(
navigator.userAgent.toLowerCase()
);
export default class LevelController extends BasePlaylistController {
private _levels: Level[] = [];
private _firstLevel: number = -1;
private _startLevel?: number;
private currentLevelIndex: number = -1;
private manualLevelIndex: number = -1;
public onParsedComplete!: Function;
constructor(hls: Hls) {
super(hls, '[level-controller]');
this._registerListeners();
}
private _registerListeners() {
const { hls } = this;
hls.on(Events.MANIFEST_LOADED, this.onManifestLoaded, this);
hls.on(Events.LEVEL_LOADED, this.onLevelLoaded, this);
hls.on(Events.AUDIO_TRACK_SWITCHED, this.onAudioTrackSwitched, this);
hls.on(Events.FRAG_LOADED, this.onFragLoaded, this);
hls.on(Events.ERROR, this.onError, this);
}
private _unregisterListeners() {
const { hls } = this;
hls.off(Events.MANIFEST_LOADED, this.onManifestLoaded, this);
hls.off(Events.LEVEL_LOADED, this.onLevelLoaded, this);
hls.off(Events.AUDIO_TRACK_SWITCHED, this.onAudioTrackSwitched, this);
hls.off(Events.FRAG_LOADED, this.onFragLoaded, this);
hls.off(Events.ERROR, this.onError, this);
}
public destroy() {
this._unregisterListeners();
this.manualLevelIndex = -1;
this._levels.length = 0;
super.destroy();
}
public startLoad(): void {
const levels = this._levels;
// clean up live level details to force reload them, and reset load errors
levels.forEach((level) => {
level.loadError = 0;
});
super.startLoad();
}
protected onManifestLoaded(
event: Events.MANIFEST_LOADED,
data: ManifestLoadedData
): void {
let levels: Level[] = [];
let audioTracks: MediaPlaylist[] = [];
let subtitleTracks: MediaPlaylist[] = [];
let bitrateStart: number | undefined;
const levelSet: { [key: string]: Level } = {};
let levelFromSet: Level;
let resolutionFound = false;
let videoCodecFound = false;
let audioCodecFound = false;
// regroup redundant levels together
data.levels.forEach((levelParsed: LevelParsed) => {
const attributes = levelParsed.attrs;
resolutionFound =
resolutionFound || !!(levelParsed.width && levelParsed.height);
videoCodecFound = videoCodecFound || !!levelParsed.videoCodec;
audioCodecFound = audioCodecFound || !!levelParsed.audioCodec;
// erase audio codec info if browser does not support mp4a.40.34.
// demuxer will autodetect codec and fallback to mpeg/audio
if (
chromeOrFirefox &&
levelParsed.audioCodec &&
levelParsed.audioCodec.indexOf('mp4a.40.34') !== -1
) {
levelParsed.audioCodec = undefined;
}
const levelKey = `${levelParsed.bitrate}-${levelParsed.attrs.RESOLUTION}-${levelParsed.attrs.CODECS}`;
levelFromSet = levelSet[levelKey];
if (!levelFromSet) {
levelFromSet = new Level(levelParsed);
levelSet[levelKey] = levelFromSet;
levels.push(levelFromSet);
} else {
levelFromSet.url.push(levelParsed.url);
}
if (attributes) {
if (attributes.AUDIO) {
addGroupId(levelFromSet, 'audio', attributes.AUDIO);
}
if (attributes.SUBTITLES) {
addGroupId(levelFromSet, 'text', attributes.SUBTITLES);
}
}
});
// remove audio-only level if we also have levels with video codecs or RESOLUTION signalled
if ((resolutionFound || videoCodecFound) && audioCodecFound) {
levels = levels.filter(
({ videoCodec, width, height }) => !!videoCodec || !!(width && height)
);
}
// only keep levels with supported audio/video codecs
levels = levels.filter(({ audioCodec, videoCodec }) => {
return (
(!audioCodec || isCodecSupportedInMp4(audioCodec, 'audio')) &&
(!videoCodec || isCodecSupportedInMp4(videoCodec, 'video'))
);
});
if (data.audioTracks) {
audioTracks = data.audioTracks.filter(
(track) =>
!track.audioCodec || isCodecSupportedInMp4(track.audioCodec, 'audio')
);
// Assign ids after filtering as array indices by group-id
assignTrackIdsByGroup(audioTracks);
}
if (data.subtitles) {
subtitleTracks = data.subtitles;
assignTrackIdsByGroup(subtitleTracks);
}
if (levels.length > 0) {
// start bitrate is the first bitrate of the manifest
bitrateStart = levels[0].bitrate;
// sort level on bitrate
levels.sort((a, b) => a.bitrate - b.bitrate);
this._levels = levels;
// find index of first level in sorted levels
for (let i = 0; i < levels.length; i++) {
if (levels[i].bitrate === bitrateStart) {
this._firstLevel = i;
this.log(
`manifest loaded, ${levels.length} level(s) found, first bitrate: ${bitrateStart}`
);
break;
}
}
// Audio is only alternate if manifest include a URI along with the audio group tag,
// and this is not an audio-only stream where levels contain audio-only
const audioOnly = audioCodecFound && !videoCodecFound;
const edata: ManifestParsedData = {
levels,
audioTracks,
subtitleTracks,
firstLevel: this._firstLevel,
stats: data.stats,
audio: audioCodecFound,
video: videoCodecFound,
altAudio: !audioOnly && audioTracks.some((t) => !!t.url),
};
this.hls.trigger(Events.MANIFEST_PARSED, edata);
// Initiate loading after all controllers have received MANIFEST_PARSED
if (this.hls.config.autoStartLoad || this.hls.forceStartLoad) {
this.hls.startLoad(this.hls.config.startPosition);
}
} else {
this.hls.trigger(Events.ERROR, {
type: ErrorTypes.MEDIA_ERROR,
details: ErrorDetails.MANIFEST_INCOMPATIBLE_CODECS_ERROR,
fatal: true,
url: data.url,
reason: 'no level with compatible codecs found in manifest',
});
}
}
get levels(): Level[] | null {
if (this._levels.length === 0) {
return null;
}
return this._levels;
}
get level(): number {
return this.currentLevelIndex;
}
set level(newLevel: number) {
const levels = this._levels;
if (levels.length === 0) {
return;
}
if (this.currentLevelIndex === newLevel && levels[newLevel]?.details) {
return;
}
// check if level idx is valid
if (newLevel < 0 || newLevel >= levels.length) {
// invalid level id given, trigger error
const fatal = newLevel < 0;
this.hls.trigger(Events.ERROR, {
type: ErrorTypes.OTHER_ERROR,
details: ErrorDetails.LEVEL_SWITCH_ERROR,
level: newLevel,
fatal,
reason: 'invalid level idx',
});
if (fatal) {
return;
}
newLevel = Math.min(newLevel, levels.length - 1);
}
// stopping live reloading timer if any
this.clearTimer();
const lastLevelIndex = this.currentLevelIndex;
const lastLevel = levels[lastLevelIndex];
const level = levels[newLevel];
this.log(`switching to level ${newLevel} from ${lastLevelIndex}`);
this.currentLevelIndex = newLevel;
const levelSwitchingData: LevelSwitchingData = Object.assign({}, level, {
level: newLevel,
maxBitrate: level.maxBitrate,
uri: level.uri,
urlId: level.urlId,
});
// @ts-ignore
delete levelSwitchingData._urlId;
this.hls.trigger(Events.LEVEL_SWITCHING, levelSwitchingData);
// check if we need to load playlist for this level
const levelDetails = level.details;
if (!levelDetails || levelDetails.live) {
// level not retrieved yet, or live playlist we need to (re)load it
const hlsUrlParameters = this.switchParams(level.uri, lastLevel?.details);
this.loadPlaylist(hlsUrlParameters);
}
}
get manualLevel(): number {
return this.manualLevelIndex;
}
set manualLevel(newLevel) {
this.manualLevelIndex = newLevel;
if (this._startLevel === undefined) {
this._startLevel = newLevel;
}
if (newLevel !== -1) {
this.level = newLevel;
}
}
get firstLevel(): number {
return this._firstLevel;
}
set firstLevel(newLevel) {
this._firstLevel = newLevel;
}
get startLevel() {
// hls.startLevel takes precedence over config.startLevel
// if none of these values are defined, fallback on this._firstLevel (first quality level appearing in variant manifest)
if (this._startLevel === undefined) {
const configStartLevel = this.hls.config.startLevel;
if (configStartLevel !== undefined) {
return configStartLevel;
} else {
return this._firstLevel;
}
} else {
return this._startLevel;
}
}
set startLevel(newLevel) {
this._startLevel = newLevel;
}
protected onError(event: Events.ERROR, data: ErrorData) {
super.onError(event, data);
if (data.fatal) {
return;
}
// Switch to redundant level when track fails to load
const context = data.context;
const level = this._levels[this.currentLevelIndex];
if (
context &&
((context.type === PlaylistContextType.AUDIO_TRACK &&
level.audioGroupIds &&
context.groupId === level.audioGroupIds[level.urlId]) ||
(context.type === PlaylistContextType.SUBTITLE_TRACK &&
level.textGroupIds &&
context.groupId === level.textGroupIds[level.urlId]))
) {
this.redundantFailover(this.currentLevelIndex);
return;
}
let levelError = false;
let levelSwitch = true;
let levelIndex;
// try to recover not fatal errors
switch (data.details) {
case ErrorDetails.FRAG_LOAD_ERROR:
case ErrorDetails.FRAG_LOAD_TIMEOUT:
case ErrorDetails.KEY_LOAD_ERROR:
case ErrorDetails.KEY_LOAD_TIMEOUT:
if (data.frag) {
const level = this._levels[data.frag.level];
// Set levelIndex when we're out of fragment retries
if (level) {
level.fragmentError++;
if (level.fragmentError > this.hls.config.fragLoadingMaxRetry) {
levelIndex = data.frag.level;
}
} else {
levelIndex = data.frag.level;
}
}
break;
case ErrorDetails.LEVEL_LOAD_ERROR:
case ErrorDetails.LEVEL_LOAD_TIMEOUT:
// Do not perform level switch if an error occurred using delivery directives
// Attempt to reload level without directives first
if (context) {
if (context.deliveryDirectives) {
levelSwitch = false;
}
levelIndex = context.level;
}
levelError = true;
break;
case ErrorDetails.REMUX_ALLOC_ERROR:
levelIndex = data.level;
levelError = true;
break;
}
if (levelIndex !== undefined) {
this.recoverLevel(data, levelIndex, levelError, levelSwitch);
}
}
/**
* Switch to a redundant stream if any available.
* If redundant stream is not available, emergency switch down if ABR mode is enabled.
*/
private recoverLevel(
errorEvent: ErrorData,
levelIndex: number,
levelError: boolean,
levelSwitch: boolean
): void {
const { details: errorDetails } = errorEvent;
const level = this._levels[levelIndex];
level.loadError++;
if (levelError) {
const retrying = this.retryLoadingOrFail(errorEvent);
if (retrying) {
// boolean used to inform stream controller not to switch back to IDLE on non fatal error
errorEvent.levelRetry = true;
} else {
this.currentLevelIndex = -1;
return;
}
}
if (levelSwitch) {
const redundantLevels = level.url.length;
// Try redundant fail-over until level.loadError reaches redundantLevels
if (redundantLevels > 1 && level.loadError < redundantLevels) {
errorEvent.levelRetry = true;
this.redundantFailover(levelIndex);
} else if (this.manualLevelIndex === -1) {
// Search for available level in auto level selection mode, cycling from highest to lowest bitrate
const nextLevel =
levelIndex === 0 ? this._levels.length - 1 : levelIndex - 1;
if (
this.currentLevelIndex !== nextLevel &&
this._levels[nextLevel].loadError === 0
) {
this.warn(`${errorDetails}: switch to ${nextLevel}`);
errorEvent.levelRetry = true;
this.hls.nextAutoLevel = nextLevel;
}
}
}
}
private redundantFailover(levelIndex: number) {
const level = this._levels[levelIndex];
const redundantLevels = level.url.length;
if (redundantLevels > 1) {
// Update the url id of all levels so that we stay on the same set of variants when level switching
const newUrlId = (level.urlId + 1) % redundantLevels;
this.warn(`Switching to redundant URL-id ${newUrlId}`);
this._levels.forEach((level) => {
level.urlId = newUrlId;
});
this.level = levelIndex;
}
}
// reset errors on the successful load of a fragment
protected onFragLoaded(event: Events.FRAG_LOADED, { frag }: FragLoadedData) {
if (frag !== undefined && frag.type === PlaylistLevelType.MAIN) {
const level = this._levels[frag.level];
if (level !== undefined) {
level.fragmentError = 0;
level.loadError = 0;
}
}
}
protected onLevelLoaded(event: Events.LEVEL_LOADED, data: LevelLoadedData) {
const { level, details } = data;
const curLevel = this._levels[level];
if (!curLevel) {
this.warn(`Invalid level index ${level}`);
if (data.deliveryDirectives?.skip) {
details.deltaUpdateFailed = true;
}
return;
}
// only process level loaded events matching with expected level
if (level === this.currentLevelIndex) {
// reset level load error counter on successful level loaded only if there is no issues with fragments
if (curLevel.fragmentError === 0) {
curLevel.loadError = 0;
this.retryCount = 0;
}
this.playlistLoaded(level, data, curLevel.details);
} else if (data.deliveryDirectives?.skip) {
// received a delta playlist update that cannot be merged
details.deltaUpdateFailed = true;
}
}
protected onAudioTrackSwitched(
event: Events.AUDIO_TRACK_SWITCHED,
data: TrackSwitchedData
) {
const currentLevel = this.hls.levels[this.currentLevelIndex];
if (!currentLevel) {
return;
}
if (currentLevel.audioGroupIds) {
let urlId = -1;
const audioGroupId = this.hls.audioTracks[data.id].groupId;
for (let i = 0; i < currentLevel.audioGroupIds.length; i++) {
if (currentLevel.audioGroupIds[i] === audioGroupId) {
urlId = i;
break;
}
}
if (urlId !== currentLevel.urlId) {
currentLevel.urlId = urlId;
this.startLoad();
}
}
}
protected loadPlaylist(hlsUrlParameters?: HlsUrlParameters) {
const level = this.currentLevelIndex;
const currentLevel = this._levels[level];
if (this.canLoad && currentLevel && currentLevel.url.length > 0) {
const id = currentLevel.urlId;
let url = currentLevel.url[id];
if (hlsUrlParameters) {
try {
url = hlsUrlParameters.addDirectives(url);
} catch (error) {
this.warn(
`Could not construct new URL with HLS Delivery Directives: ${error}`
);
}
}
this.log(
`Attempt loading level index ${level}${
hlsUrlParameters
? ' at sn ' +
hlsUrlParameters.msn +
' part ' +
hlsUrlParameters.part
: ''
} with URL-id ${id} ${url}`
);
// console.log('Current audio track group ID:', this.hls.audioTracks[this.hls.audioTrack].groupId);
// console.log('New video quality level audio group id:', levelObject.attrs.AUDIO, level);
this.clearTimer();
this.hls.trigger(Events.LEVEL_LOADING, {
url,
level,
id,
deliveryDirectives: hlsUrlParameters || null,
});
}
}
get nextLoadLevel() {
if (this.manualLevelIndex !== -1) {
return this.manualLevelIndex;
} else {
return this.hls.nextAutoLevel;
}
}
set nextLoadLevel(nextLevel) {
this.level = nextLevel;
if (this.manualLevelIndex === -1) {
this.hls.nextAutoLevel = nextLevel;
}
}
removeLevel(levelIndex, urlId) {
const filterLevelAndGroupByIdIndex = (url, id) => id !== urlId;
const levels = this._levels
.filter((level, index) => {
if (index !== levelIndex) {
return true;
}
if (level.url.length > 1 && urlId !== undefined) {
level.url = level.url.filter(filterLevelAndGroupByIdIndex);
if (level.audioGroupIds) {
level.audioGroupIds = level.audioGroupIds.filter(
filterLevelAndGroupByIdIndex
);
}
if (level.textGroupIds) {
level.textGroupIds = level.textGroupIds.filter(
filterLevelAndGroupByIdIndex
);
}
level.urlId = 0;
return true;
}
return false;
})
.map((level, index) => {
const { details } = level;
if (details?.fragments) {
details.fragments.forEach((fragment) => {
fragment.level = index;
});
}
return level;
});
this._levels = levels;
this.hls.trigger(Events.LEVELS_UPDATED, { levels });
}
}

503
node_modules/hls.js/src/controller/level-helper.ts generated vendored Normal file
View file

@ -0,0 +1,503 @@
/**
* @module LevelHelper
* Providing methods dealing with playlist sliding and drift
* */
import { logger } from '../utils/logger';
import { Fragment, Part } from '../loader/fragment';
import { LevelDetails } from '../loader/level-details';
import type { Level } from '../types/level';
import type { LoaderStats } from '../types/loader';
import type { MediaPlaylist } from '../types/media-playlist';
type FragmentIntersection = (oldFrag: Fragment, newFrag: Fragment) => void;
type PartIntersection = (oldPart: Part, newPart: Part) => void;
export function addGroupId(level: Level, type: string, id: string): void {
switch (type) {
case 'audio':
if (!level.audioGroupIds) {
level.audioGroupIds = [];
}
level.audioGroupIds.push(id);
break;
case 'text':
if (!level.textGroupIds) {
level.textGroupIds = [];
}
level.textGroupIds.push(id);
break;
}
}
export function assignTrackIdsByGroup(tracks: MediaPlaylist[]): void {
const groups = {};
tracks.forEach((track) => {
const groupId = track.groupId || '';
track.id = groups[groupId] = groups[groupId] || 0;
groups[groupId]++;
});
}
export function updatePTS(
fragments: Fragment[],
fromIdx: number,
toIdx: number
): void {
const fragFrom = fragments[fromIdx];
const fragTo = fragments[toIdx];
updateFromToPTS(fragFrom, fragTo);
}
function updateFromToPTS(fragFrom: Fragment, fragTo: Fragment) {
const fragToPTS = fragTo.startPTS as number;
// if we know startPTS[toIdx]
if (Number.isFinite(fragToPTS)) {
// update fragment duration.
// it helps to fix drifts between playlist reported duration and fragment real duration
let duration: number = 0;
let frag: Fragment;
if (fragTo.sn > fragFrom.sn) {
duration = fragToPTS - fragFrom.start;
frag = fragFrom;
} else {
duration = fragFrom.start - fragToPTS;
frag = fragTo;
}
// TODO? Drift can go either way, or the playlist could be completely accurate
// console.assert(duration > 0,
// `duration of ${duration} computed for frag ${frag.sn}, level ${frag.level}, there should be some duration drift between playlist and fragment!`);
if (frag.duration !== duration) {
frag.duration = duration;
}
// we dont know startPTS[toIdx]
} else if (fragTo.sn > fragFrom.sn) {
const contiguous = fragFrom.cc === fragTo.cc;
// TODO: With part-loading end/durations we need to confirm the whole fragment is loaded before using (or setting) minEndPTS
if (contiguous && fragFrom.minEndPTS) {
fragTo.start = fragFrom.start + (fragFrom.minEndPTS - fragFrom.start);
} else {
fragTo.start = fragFrom.start + fragFrom.duration;
}
} else {
fragTo.start = Math.max(fragFrom.start - fragTo.duration, 0);
}
}
export function updateFragPTSDTS(
details: LevelDetails | undefined,
frag: Fragment,
startPTS: number,
endPTS: number,
startDTS: number,
endDTS: number
): number {
const parsedMediaDuration = endPTS - startPTS;
if (parsedMediaDuration <= 0) {
logger.warn('Fragment should have a positive duration', frag);
endPTS = startPTS + frag.duration;
endDTS = startDTS + frag.duration;
}
let maxStartPTS = startPTS;
let minEndPTS = endPTS;
const fragStartPts = frag.startPTS as number;
const fragEndPts = frag.endPTS as number;
if (Number.isFinite(fragStartPts)) {
// delta PTS between audio and video
const deltaPTS = Math.abs(fragStartPts - startPTS);
if (!Number.isFinite(frag.deltaPTS as number)) {
frag.deltaPTS = deltaPTS;
} else {
frag.deltaPTS = Math.max(deltaPTS, frag.deltaPTS as number);
}
maxStartPTS = Math.max(startPTS, fragStartPts);
startPTS = Math.min(startPTS, fragStartPts);
startDTS = Math.min(startDTS, frag.startDTS);
minEndPTS = Math.min(endPTS, fragEndPts);
endPTS = Math.max(endPTS, fragEndPts);
endDTS = Math.max(endDTS, frag.endDTS);
}
frag.duration = endPTS - startPTS;
const drift = startPTS - frag.start;
frag.appendedPTS = endPTS;
frag.start = frag.startPTS = startPTS;
frag.maxStartPTS = maxStartPTS;
frag.startDTS = startDTS;
frag.endPTS = endPTS;
frag.minEndPTS = minEndPTS;
frag.endDTS = endDTS;
const sn = frag.sn as number; // 'initSegment'
// exit if sn out of range
if (!details || sn < details.startSN || sn > details.endSN) {
return 0;
}
let i;
const fragIdx = sn - details.startSN;
const fragments = details.fragments;
// update frag reference in fragments array
// rationale is that fragments array might not contain this frag object.
// this will happen if playlist has been refreshed between frag loading and call to updateFragPTSDTS()
// if we don't update frag, we won't be able to propagate PTS info on the playlist
// resulting in invalid sliding computation
fragments[fragIdx] = frag;
// adjust fragment PTS/duration from seqnum-1 to frag 0
for (i = fragIdx; i > 0; i--) {
updateFromToPTS(fragments[i], fragments[i - 1]);
}
// adjust fragment PTS/duration from seqnum to last frag
for (i = fragIdx; i < fragments.length - 1; i++) {
updateFromToPTS(fragments[i], fragments[i + 1]);
}
if (details.fragmentHint) {
updateFromToPTS(fragments[fragments.length - 1], details.fragmentHint);
}
details.PTSKnown = details.alignedSliding = true;
return drift;
}
export function mergeDetails(
oldDetails: LevelDetails,
newDetails: LevelDetails
): void {
// Track the last initSegment processed. Initialize it to the last one on the timeline.
let currentInitSegment: Fragment | null = null;
const oldFragments = oldDetails.fragments;
for (let i = oldFragments.length - 1; i >= 0; i--) {
const oldInit = oldFragments[i].initSegment;
if (oldInit) {
currentInitSegment = oldInit;
break;
}
}
if (oldDetails.fragmentHint) {
// prevent PTS and duration from being adjusted on the next hint
delete oldDetails.fragmentHint.endPTS;
}
// check if old/new playlists have fragments in common
// loop through overlapping SN and update startPTS , cc, and duration if any found
let ccOffset = 0;
let PTSFrag;
mapFragmentIntersection(
oldDetails,
newDetails,
(oldFrag: Fragment, newFrag: Fragment) => {
if (oldFrag.relurl) {
// Do not compare CC if the old fragment has no url. This is a level.fragmentHint used by LL-HLS parts.
// It maybe be off by 1 if it was created before any parts or discontinuity tags were appended to the end
// of the playlist.
ccOffset = oldFrag.cc - newFrag.cc;
}
if (
Number.isFinite(oldFrag.startPTS) &&
Number.isFinite(oldFrag.endPTS)
) {
newFrag.start = newFrag.startPTS = oldFrag.startPTS as number;
newFrag.startDTS = oldFrag.startDTS;
newFrag.appendedPTS = oldFrag.appendedPTS;
newFrag.maxStartPTS = oldFrag.maxStartPTS;
newFrag.endPTS = oldFrag.endPTS;
newFrag.endDTS = oldFrag.endDTS;
newFrag.minEndPTS = oldFrag.minEndPTS;
newFrag.duration =
(oldFrag.endPTS as number) - (oldFrag.startPTS as number);
if (newFrag.duration) {
PTSFrag = newFrag;
}
// PTS is known when any segment has startPTS and endPTS
newDetails.PTSKnown = newDetails.alignedSliding = true;
}
newFrag.elementaryStreams = oldFrag.elementaryStreams;
newFrag.loader = oldFrag.loader;
newFrag.stats = oldFrag.stats;
newFrag.urlId = oldFrag.urlId;
if (oldFrag.initSegment) {
newFrag.initSegment = oldFrag.initSegment;
currentInitSegment = oldFrag.initSegment;
}
}
);
if (currentInitSegment) {
const fragmentsToCheck = newDetails.fragmentHint
? newDetails.fragments.concat(newDetails.fragmentHint)
: newDetails.fragments;
fragmentsToCheck.forEach((frag) => {
if (
!frag.initSegment ||
frag.initSegment.relurl === currentInitSegment?.relurl
) {
frag.initSegment = currentInitSegment;
}
});
}
if (newDetails.skippedSegments) {
newDetails.deltaUpdateFailed = newDetails.fragments.some((frag) => !frag);
if (newDetails.deltaUpdateFailed) {
logger.warn(
'[level-helper] Previous playlist missing segments skipped in delta playlist'
);
for (let i = newDetails.skippedSegments; i--; ) {
newDetails.fragments.shift();
}
newDetails.startSN = newDetails.fragments[0].sn as number;
newDetails.startCC = newDetails.fragments[0].cc;
}
}
const newFragments = newDetails.fragments;
if (ccOffset) {
logger.warn('discontinuity sliding from playlist, take drift into account');
for (let i = 0; i < newFragments.length; i++) {
newFragments[i].cc += ccOffset;
}
}
if (newDetails.skippedSegments) {
newDetails.startCC = newDetails.fragments[0].cc;
}
// Merge parts
mapPartIntersection(
oldDetails.partList,
newDetails.partList,
(oldPart: Part, newPart: Part) => {
newPart.elementaryStreams = oldPart.elementaryStreams;
newPart.stats = oldPart.stats;
}
);
// if at least one fragment contains PTS info, recompute PTS information for all fragments
if (PTSFrag) {
updateFragPTSDTS(
newDetails,
PTSFrag,
PTSFrag.startPTS,
PTSFrag.endPTS,
PTSFrag.startDTS,
PTSFrag.endDTS
);
} else {
// ensure that delta is within oldFragments range
// also adjust sliding in case delta is 0 (we could have old=[50-60] and new=old=[50-61])
// in that case we also need to adjust start offset of all fragments
adjustSliding(oldDetails, newDetails);
}
if (newFragments.length) {
newDetails.totalduration = newDetails.edge - newFragments[0].start;
}
newDetails.driftStartTime = oldDetails.driftStartTime;
newDetails.driftStart = oldDetails.driftStart;
const advancedDateTime = newDetails.advancedDateTime;
if (newDetails.advanced && advancedDateTime) {
const edge = newDetails.edge;
if (!newDetails.driftStart) {
newDetails.driftStartTime = advancedDateTime;
newDetails.driftStart = edge;
}
newDetails.driftEndTime = advancedDateTime;
newDetails.driftEnd = edge;
} else {
newDetails.driftEndTime = oldDetails.driftEndTime;
newDetails.driftEnd = oldDetails.driftEnd;
newDetails.advancedDateTime = oldDetails.advancedDateTime;
}
}
export function mapPartIntersection(
oldParts: Part[] | null,
newParts: Part[] | null,
intersectionFn: PartIntersection
) {
if (oldParts && newParts) {
let delta = 0;
for (let i = 0, len = oldParts.length; i <= len; i++) {
const oldPart = oldParts[i];
const newPart = newParts[i + delta];
if (
oldPart &&
newPart &&
oldPart.index === newPart.index &&
oldPart.fragment.sn === newPart.fragment.sn
) {
intersectionFn(oldPart, newPart);
} else {
delta--;
}
}
}
}
export function mapFragmentIntersection(
oldDetails: LevelDetails,
newDetails: LevelDetails,
intersectionFn: FragmentIntersection
): void {
const skippedSegments = newDetails.skippedSegments;
const start =
Math.max(oldDetails.startSN, newDetails.startSN) - newDetails.startSN;
const end =
(oldDetails.fragmentHint ? 1 : 0) +
(skippedSegments
? newDetails.endSN
: Math.min(oldDetails.endSN, newDetails.endSN)) -
newDetails.startSN;
const delta = newDetails.startSN - oldDetails.startSN;
const newFrags = newDetails.fragmentHint
? newDetails.fragments.concat(newDetails.fragmentHint)
: newDetails.fragments;
const oldFrags = oldDetails.fragmentHint
? oldDetails.fragments.concat(oldDetails.fragmentHint)
: oldDetails.fragments;
for (let i = start; i <= end; i++) {
const oldFrag = oldFrags[delta + i];
let newFrag = newFrags[i];
if (skippedSegments && !newFrag && i < skippedSegments) {
// Fill in skipped segments in delta playlist
newFrag = newDetails.fragments[i] = oldFrag;
}
if (oldFrag && newFrag) {
intersectionFn(oldFrag, newFrag);
}
}
}
export function adjustSliding(
oldDetails: LevelDetails,
newDetails: LevelDetails
): void {
const delta =
newDetails.startSN + newDetails.skippedSegments - oldDetails.startSN;
const oldFragments = oldDetails.fragments;
if (delta < 0 || delta >= oldFragments.length) {
return;
}
addSliding(newDetails, oldFragments[delta].start);
}
export function addSliding(details: LevelDetails, start: number) {
if (start) {
const fragments = details.fragments;
for (let i = details.skippedSegments; i < fragments.length; i++) {
fragments[i].start += start;
}
if (details.fragmentHint) {
details.fragmentHint.start += start;
}
}
}
export function computeReloadInterval(
newDetails: LevelDetails,
stats: LoaderStats
): number {
const reloadInterval = 1000 * newDetails.levelTargetDuration;
const reloadIntervalAfterMiss = reloadInterval / 2;
const timeSinceLastModified = newDetails.age;
const useLastModified =
timeSinceLastModified > 0 && timeSinceLastModified < reloadInterval * 3;
const roundTrip = stats.loading.end - stats.loading.start;
let estimatedTimeUntilUpdate;
let availabilityDelay = newDetails.availabilityDelay;
// let estimate = 'average';
if (newDetails.updated === false) {
if (useLastModified) {
// estimate = 'miss round trip';
// We should have had a hit so try again in the time it takes to get a response,
// but no less than 1/3 second.
const minRetry = 333 * newDetails.misses;
estimatedTimeUntilUpdate = Math.max(
Math.min(reloadIntervalAfterMiss, roundTrip * 2),
minRetry
);
newDetails.availabilityDelay =
(newDetails.availabilityDelay || 0) + estimatedTimeUntilUpdate;
} else {
// estimate = 'miss half average';
// follow HLS Spec, If the client reloads a Playlist file and finds that it has not
// changed then it MUST wait for a period of one-half the target
// duration before retrying.
estimatedTimeUntilUpdate = reloadIntervalAfterMiss;
}
} else if (useLastModified) {
// estimate = 'next modified date';
// Get the closest we've been to timeSinceLastModified on update
availabilityDelay = Math.min(
availabilityDelay || reloadInterval / 2,
timeSinceLastModified
);
newDetails.availabilityDelay = availabilityDelay;
estimatedTimeUntilUpdate =
availabilityDelay + reloadInterval - timeSinceLastModified;
} else {
estimatedTimeUntilUpdate = reloadInterval - roundTrip;
}
// console.log(`[computeReloadInterval] live reload ${newDetails.updated ? 'REFRESHED' : 'MISSED'}`,
// '\n method', estimate,
// '\n estimated time until update =>', estimatedTimeUntilUpdate,
// '\n average target duration', reloadInterval,
// '\n time since modified', timeSinceLastModified,
// '\n time round trip', roundTrip,
// '\n availability delay', availabilityDelay);
return Math.round(estimatedTimeUntilUpdate);
}
export function getFragmentWithSN(
level: Level,
sn: number,
fragCurrent: Fragment | null
): Fragment | null {
if (!level || !level.details) {
return null;
}
const levelDetails = level.details;
let fragment: Fragment | undefined =
levelDetails.fragments[sn - levelDetails.startSN];
if (fragment) {
return fragment;
}
fragment = levelDetails.fragmentHint;
if (fragment && fragment.sn === sn) {
return fragment;
}
if (sn < levelDetails.startSN && fragCurrent && fragCurrent.sn === sn) {
return fragCurrent;
}
return null;
}
export function getPartWith(
level: Level,
sn: number,
partIndex: number
): Part | null {
if (!level || !level.details) {
return null;
}
const partList = level.details.partList;
if (partList) {
for (let i = partList.length; i--; ) {
const part = partList[i];
if (part.index === partIndex && part.fragment.sn === sn) {
return part;
}
}
}
return null;
}

1356
node_modules/hls.js/src/controller/stream-controller.ts generated vendored Normal file

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,420 @@
import { Events } from '../events';
import { logger } from '../utils/logger';
import { BufferHelper } from '../utils/buffer-helper';
import { findFragmentByPTS } from './fragment-finders';
import { alignMediaPlaylistByPDT } from '../utils/discontinuities';
import { addSliding } from './level-helper';
import { FragmentState } from './fragment-tracker';
import BaseStreamController, { State } from './base-stream-controller';
import { PlaylistLevelType } from '../types/loader';
import { Level } from '../types/level';
import type { FragmentTracker } from './fragment-tracker';
import type { NetworkComponentAPI } from '../types/component-api';
import type Hls from '../hls';
import type { LevelDetails } from '../loader/level-details';
import type { Fragment } from '../loader/fragment';
import type {
ErrorData,
FragLoadedData,
SubtitleFragProcessed,
SubtitleTracksUpdatedData,
TrackLoadedData,
TrackSwitchedData,
BufferFlushingData,
LevelLoadedData,
} from '../types/events';
const TICK_INTERVAL = 500; // how often to tick in ms
interface TimeRange {
start: number;
end: number;
}
export class SubtitleStreamController
extends BaseStreamController
implements NetworkComponentAPI
{
protected levels: Array<Level> = [];
private currentTrackId: number = -1;
private tracksBuffered: Array<TimeRange[]> = [];
private mainDetails: LevelDetails | null = null;
constructor(hls: Hls, fragmentTracker: FragmentTracker) {
super(hls, fragmentTracker, '[subtitle-stream-controller]');
this._registerListeners();
}
protected onHandlerDestroying() {
this._unregisterListeners();
this.mainDetails = null;
}
private _registerListeners() {
const { hls } = this;
hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this);
hls.on(Events.LEVEL_LOADED, this.onLevelLoaded, this);
hls.on(Events.ERROR, this.onError, this);
hls.on(Events.SUBTITLE_TRACKS_UPDATED, this.onSubtitleTracksUpdated, this);
hls.on(Events.SUBTITLE_TRACK_SWITCH, this.onSubtitleTrackSwitch, this);
hls.on(Events.SUBTITLE_TRACK_LOADED, this.onSubtitleTrackLoaded, this);
hls.on(Events.SUBTITLE_FRAG_PROCESSED, this.onSubtitleFragProcessed, this);
hls.on(Events.BUFFER_FLUSHING, this.onBufferFlushing, this);
}
private _unregisterListeners() {
const { hls } = this;
hls.off(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this);
hls.off(Events.LEVEL_LOADED, this.onLevelLoaded, this);
hls.off(Events.ERROR, this.onError, this);
hls.off(Events.SUBTITLE_TRACKS_UPDATED, this.onSubtitleTracksUpdated, this);
hls.off(Events.SUBTITLE_TRACK_SWITCH, this.onSubtitleTrackSwitch, this);
hls.off(Events.SUBTITLE_TRACK_LOADED, this.onSubtitleTrackLoaded, this);
hls.off(Events.SUBTITLE_FRAG_PROCESSED, this.onSubtitleFragProcessed, this);
hls.off(Events.BUFFER_FLUSHING, this.onBufferFlushing, this);
}
startLoad() {
this.stopLoad();
this.state = State.IDLE;
this.setInterval(TICK_INTERVAL);
this.tick();
}
onManifestLoading() {
this.mainDetails = null;
this.fragmentTracker.removeAllFragments();
}
onLevelLoaded(event: Events.LEVEL_LOADED, data: LevelLoadedData) {
this.mainDetails = data.details;
}
onSubtitleFragProcessed(
event: Events.SUBTITLE_FRAG_PROCESSED,
data: SubtitleFragProcessed
) {
const { frag, success } = data;
this.fragPrevious = frag;
this.state = State.IDLE;
if (!success) {
return;
}
const buffered = this.tracksBuffered[this.currentTrackId];
if (!buffered) {
return;
}
// Create/update a buffered array matching the interface used by BufferHelper.bufferedInfo
// so we can re-use the logic used to detect how much has been buffered
let timeRange: TimeRange | undefined;
const fragStart = frag.start;
for (let i = 0; i < buffered.length; i++) {
if (fragStart >= buffered[i].start && fragStart <= buffered[i].end) {
timeRange = buffered[i];
break;
}
}
const fragEnd = frag.start + frag.duration;
if (timeRange) {
timeRange.end = fragEnd;
} else {
timeRange = {
start: fragStart,
end: fragEnd,
};
buffered.push(timeRange);
}
this.fragmentTracker.fragBuffered(frag);
}
onBufferFlushing(event: Events.BUFFER_FLUSHING, data: BufferFlushingData) {
const { startOffset, endOffset } = data;
if (startOffset === 0 && endOffset !== Number.POSITIVE_INFINITY) {
const { currentTrackId, levels } = this;
if (
!levels.length ||
!levels[currentTrackId] ||
!levels[currentTrackId].details
) {
return;
}
const trackDetails = levels[currentTrackId].details as LevelDetails;
const targetDuration = trackDetails.targetduration;
const endOffsetSubtitles = endOffset - targetDuration;
if (endOffsetSubtitles <= 0) {
return;
}
data.endOffsetSubtitles = Math.max(0, endOffsetSubtitles);
this.tracksBuffered.forEach((buffered) => {
for (let i = 0; i < buffered.length; ) {
if (buffered[i].end <= endOffsetSubtitles) {
buffered.shift();
continue;
} else if (buffered[i].start < endOffsetSubtitles) {
buffered[i].start = endOffsetSubtitles;
} else {
break;
}
i++;
}
});
this.fragmentTracker.removeFragmentsInRange(
startOffset,
endOffsetSubtitles,
PlaylistLevelType.SUBTITLE
);
}
}
// If something goes wrong, proceed to next frag, if we were processing one.
onError(event: Events.ERROR, data: ErrorData) {
const frag = data.frag;
// don't handle error not related to subtitle fragment
if (!frag || frag.type !== PlaylistLevelType.SUBTITLE) {
return;
}
if (this.fragCurrent?.loader) {
this.fragCurrent.loader.abort();
}
this.state = State.IDLE;
}
// Got all new subtitle levels.
onSubtitleTracksUpdated(
event: Events.SUBTITLE_TRACKS_UPDATED,
{ subtitleTracks }: SubtitleTracksUpdatedData
) {
this.tracksBuffered = [];
this.levels = subtitleTracks.map(
(mediaPlaylist) => new Level(mediaPlaylist)
);
this.fragmentTracker.removeAllFragments();
this.fragPrevious = null;
this.levels.forEach((level: Level) => {
this.tracksBuffered[level.id] = [];
});
this.mediaBuffer = null;
}
onSubtitleTrackSwitch(
event: Events.SUBTITLE_TRACK_SWITCH,
data: TrackSwitchedData
) {
this.currentTrackId = data.id;
if (!this.levels.length || this.currentTrackId === -1) {
this.clearInterval();
return;
}
// Check if track has the necessary details to load fragments
const currentTrack = this.levels[this.currentTrackId];
if (currentTrack?.details) {
this.mediaBuffer = this.mediaBufferTimeRanges;
} else {
this.mediaBuffer = null;
}
if (currentTrack) {
this.setInterval(TICK_INTERVAL);
}
}
// Got a new set of subtitle fragments.
onSubtitleTrackLoaded(
event: Events.SUBTITLE_TRACK_LOADED,
data: TrackLoadedData
) {
const { details: newDetails, id: trackId } = data;
const { currentTrackId, levels } = this;
if (!levels.length) {
return;
}
const track: Level = levels[currentTrackId];
if (trackId >= levels.length || trackId !== currentTrackId || !track) {
return;
}
this.mediaBuffer = this.mediaBufferTimeRanges;
if (newDetails.live || track.details?.live) {
const mainDetails = this.mainDetails;
if (newDetails.deltaUpdateFailed || !mainDetails) {
return;
}
const mainSlidingStartFragment = mainDetails.fragments[0];
if (!track.details) {
if (newDetails.hasProgramDateTime && mainDetails.hasProgramDateTime) {
alignMediaPlaylistByPDT(newDetails, mainDetails);
} else if (mainSlidingStartFragment) {
// line up live playlist with main so that fragments in range are loaded
addSliding(newDetails, mainSlidingStartFragment.start);
}
} else {
const sliding = this.alignPlaylists(newDetails, track.details);
if (sliding === 0 && mainSlidingStartFragment) {
// realign with main when there is no overlap with last refresh
addSliding(newDetails, mainSlidingStartFragment.start);
}
}
}
track.details = newDetails;
this.levelLastLoaded = trackId;
// trigger handler right now
this.tick();
// If playlist is misaligned because of bad PDT or drift, delete details to resync with main on reload
if (
newDetails.live &&
!this.fragCurrent &&
this.media &&
this.state === State.IDLE
) {
const foundFrag = findFragmentByPTS(
null,
newDetails.fragments,
this.media.currentTime,
0
);
if (!foundFrag) {
this.warn('Subtitle playlist not aligned with playback');
track.details = undefined;
}
}
}
_handleFragmentLoadComplete(fragLoadedData: FragLoadedData) {
const { frag, payload } = fragLoadedData;
const decryptData = frag.decryptdata;
const hls = this.hls;
if (this.fragContextChanged(frag)) {
return;
}
// check to see if the payload needs to be decrypted
if (
payload &&
payload.byteLength > 0 &&
decryptData &&
decryptData.key &&
decryptData.iv &&
decryptData.method === 'AES-128'
) {
const startTime = performance.now();
// decrypt the subtitles
this.decrypter
.webCryptoDecrypt(
new Uint8Array(payload),
decryptData.key.buffer,
decryptData.iv.buffer
)
.then((decryptedData) => {
const endTime = performance.now();
hls.trigger(Events.FRAG_DECRYPTED, {
frag,
payload: decryptedData,
stats: {
tstart: startTime,
tdecrypt: endTime,
},
});
});
}
}
doTick() {
if (!this.media) {
this.state = State.IDLE;
return;
}
if (this.state === State.IDLE) {
const { currentTrackId, levels } = this;
if (
!levels.length ||
!levels[currentTrackId] ||
!levels[currentTrackId].details
) {
return;
}
// Expand range of subs loaded by one target-duration in either direction to make up for misaligned playlists
const trackDetails = levels[currentTrackId].details as LevelDetails;
const targetDuration = trackDetails.targetduration;
const { config, media } = this;
const bufferedInfo = BufferHelper.bufferedInfo(
this.mediaBufferTimeRanges,
media.currentTime - targetDuration,
config.maxBufferHole
);
const { end: targetBufferTime, len: bufferLen } = bufferedInfo;
const maxBufLen = this.getMaxBufferLength() + targetDuration;
if (bufferLen > maxBufLen) {
return;
}
console.assert(
trackDetails,
'Subtitle track details are defined on idle subtitle stream controller tick'
);
const fragments = trackDetails.fragments;
const fragLen = fragments.length;
const end = trackDetails.edge;
let foundFrag;
const fragPrevious = this.fragPrevious;
if (targetBufferTime < end) {
const { maxFragLookUpTolerance } = config;
foundFrag = findFragmentByPTS(
fragPrevious,
fragments,
targetBufferTime,
maxFragLookUpTolerance
);
if (
!foundFrag &&
fragPrevious &&
fragPrevious.start < fragments[0].start
) {
foundFrag = fragments[0];
}
} else {
foundFrag = fragments[fragLen - 1];
}
if (foundFrag?.encrypted) {
this.loadKey(foundFrag, trackDetails);
} else if (
foundFrag &&
this.fragmentTracker.getState(foundFrag) === FragmentState.NOT_LOADED
) {
// only load if fragment is not loaded
this.loadFragment(foundFrag, trackDetails, targetBufferTime);
}
}
}
protected loadFragment(
frag: Fragment,
levelDetails: LevelDetails,
targetBufferTime: number
) {
this.fragCurrent = frag;
super.loadFragment(frag, levelDetails, targetBufferTime);
}
get mediaBufferTimeRanges(): TimeRange[] {
return this.tracksBuffered[this.currentTrackId] || [];
}
}

View file

@ -0,0 +1,419 @@
import { Events } from '../events';
import { clearCurrentCues } from '../utils/texttrack-utils';
import BasePlaylistController from './base-playlist-controller';
import type { HlsUrlParameters } from '../types/level';
import type Hls from '../hls';
import type {
TrackLoadedData,
MediaAttachedData,
SubtitleTracksUpdatedData,
ManifestParsedData,
LevelSwitchingData,
} from '../types/events';
import type { MediaPlaylist } from '../types/media-playlist';
import { ErrorData, LevelLoadingData } from '../types/events';
import { PlaylistContextType } from '../types/loader';
class SubtitleTrackController extends BasePlaylistController {
private media: HTMLMediaElement | null = null;
private tracks: MediaPlaylist[] = [];
private groupId: string | null = null;
private tracksInGroup: MediaPlaylist[] = [];
private trackId: number = -1;
private selectDefaultTrack: boolean = true;
private queuedDefaultTrack: number = -1;
private trackChangeListener: () => void = () => this.onTextTracksChanged();
private asyncPollTrackChange: () => void = () => this.pollTrackChange(0);
private useTextTrackPolling: boolean = false;
private subtitlePollingInterval: number = -1;
public subtitleDisplay: boolean = true; // Enable/disable subtitle display rendering
constructor(hls: Hls) {
super(hls, '[subtitle-track-controller]');
this.registerListeners();
}
public destroy() {
this.unregisterListeners();
this.tracks.length = 0;
this.tracksInGroup.length = 0;
this.trackChangeListener = this.asyncPollTrackChange = null as any;
super.destroy();
}
private registerListeners() {
const { hls } = this;
hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this);
hls.on(Events.MANIFEST_PARSED, this.onManifestParsed, this);
hls.on(Events.LEVEL_LOADING, this.onLevelLoading, this);
hls.on(Events.LEVEL_SWITCHING, this.onLevelSwitching, this);
hls.on(Events.SUBTITLE_TRACK_LOADED, this.onSubtitleTrackLoaded, this);
hls.on(Events.ERROR, this.onError, this);
}
private unregisterListeners() {
const { hls } = this;
hls.off(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this);
hls.off(Events.MANIFEST_PARSED, this.onManifestParsed, this);
hls.off(Events.LEVEL_LOADING, this.onLevelLoading, this);
hls.off(Events.LEVEL_SWITCHING, this.onLevelSwitching, this);
hls.off(Events.SUBTITLE_TRACK_LOADED, this.onSubtitleTrackLoaded, this);
hls.off(Events.ERROR, this.onError, this);
}
// Listen for subtitle track change, then extract the current track ID.
protected onMediaAttached(
event: Events.MEDIA_ATTACHED,
data: MediaAttachedData
): void {
this.media = data.media;
if (!this.media) {
return;
}
if (this.queuedDefaultTrack > -1) {
this.subtitleTrack = this.queuedDefaultTrack;
this.queuedDefaultTrack = -1;
}
this.useTextTrackPolling = !(
this.media.textTracks && 'onchange' in this.media.textTracks
);
if (this.useTextTrackPolling) {
this.pollTrackChange(500);
} else {
this.media.textTracks.addEventListener(
'change',
this.asyncPollTrackChange
);
}
}
private pollTrackChange(timeout: number) {
self.clearInterval(this.subtitlePollingInterval);
this.subtitlePollingInterval = self.setInterval(
this.trackChangeListener,
timeout
);
}
protected onMediaDetaching(): void {
if (!this.media) {
return;
}
self.clearInterval(this.subtitlePollingInterval);
if (!this.useTextTrackPolling) {
this.media.textTracks.removeEventListener(
'change',
this.asyncPollTrackChange
);
}
if (this.trackId > -1) {
this.queuedDefaultTrack = this.trackId;
}
const textTracks = filterSubtitleTracks(this.media.textTracks);
// Clear loaded cues on media detachment from tracks
textTracks.forEach((track) => {
clearCurrentCues(track);
});
// Disable all subtitle tracks before detachment so when reattached only tracks in that content are enabled.
this.subtitleTrack = -1;
this.media = null;
}
protected onManifestLoading(): void {
this.tracks = [];
this.groupId = null;
this.tracksInGroup = [];
this.trackId = -1;
this.selectDefaultTrack = true;
}
// Fired whenever a new manifest is loaded.
protected onManifestParsed(
event: Events.MANIFEST_PARSED,
data: ManifestParsedData
): void {
this.tracks = data.subtitleTracks;
}
protected onSubtitleTrackLoaded(
event: Events.SUBTITLE_TRACK_LOADED,
data: TrackLoadedData
): void {
const { id, details } = data;
const { trackId } = this;
const currentTrack = this.tracksInGroup[trackId];
if (!currentTrack) {
this.warn(`Invalid subtitle track id ${id}`);
return;
}
const curDetails = currentTrack.details;
currentTrack.details = data.details;
this.log(
`subtitle track ${id} loaded [${details.startSN}-${details.endSN}]`
);
if (id === this.trackId) {
this.retryCount = 0;
this.playlistLoaded(id, data, curDetails);
}
}
protected onLevelLoading(
event: Events.LEVEL_LOADING,
data: LevelLoadingData
): void {
this.switchLevel(data.level);
}
protected onLevelSwitching(
event: Events.LEVEL_SWITCHING,
data: LevelSwitchingData
): void {
this.switchLevel(data.level);
}
private switchLevel(levelIndex: number) {
const levelInfo = this.hls.levels[levelIndex];
if (!levelInfo?.textGroupIds) {
return;
}
const textGroupId = levelInfo.textGroupIds[levelInfo.urlId];
if (this.groupId !== textGroupId) {
const lastTrack = this.tracksInGroup
? this.tracksInGroup[this.trackId]
: undefined;
const subtitleTracks = this.tracks.filter(
(track): boolean => !textGroupId || track.groupId === textGroupId
);
this.tracksInGroup = subtitleTracks;
const initialTrackId =
this.findTrackId(lastTrack?.name) || this.findTrackId();
this.groupId = textGroupId;
const subtitleTracksUpdated: SubtitleTracksUpdatedData = {
subtitleTracks,
};
this.log(
`Updating subtitle tracks, ${subtitleTracks.length} track(s) found in "${textGroupId}" group-id`
);
this.hls.trigger(Events.SUBTITLE_TRACKS_UPDATED, subtitleTracksUpdated);
if (initialTrackId !== -1) {
this.setSubtitleTrack(initialTrackId, lastTrack);
}
}
}
private findTrackId(name?: string): number {
const textTracks = this.tracksInGroup;
for (let i = 0; i < textTracks.length; i++) {
const track = textTracks[i];
if (!this.selectDefaultTrack || track.default) {
if (!name || name === track.name) {
return track.id;
}
}
}
return -1;
}
protected onError(event: Events.ERROR, data: ErrorData): void {
super.onError(event, data);
if (data.fatal || !data.context) {
return;
}
if (
data.context.type === PlaylistContextType.SUBTITLE_TRACK &&
data.context.id === this.trackId &&
data.context.groupId === this.groupId
) {
this.retryLoadingOrFail(data);
}
}
/** get alternate subtitle tracks list from playlist **/
get subtitleTracks(): MediaPlaylist[] {
return this.tracksInGroup;
}
/** get/set index of the selected subtitle track (based on index in subtitle track lists) **/
get subtitleTrack(): number {
return this.trackId;
}
set subtitleTrack(newId: number) {
this.selectDefaultTrack = false;
const lastTrack = this.tracksInGroup
? this.tracksInGroup[this.trackId]
: undefined;
this.setSubtitleTrack(newId, lastTrack);
}
protected loadPlaylist(hlsUrlParameters?: HlsUrlParameters): void {
const currentTrack = this.tracksInGroup[this.trackId];
if (this.shouldLoadTrack(currentTrack)) {
const id = currentTrack.id;
const groupId = currentTrack.groupId as string;
let url = currentTrack.url;
if (hlsUrlParameters) {
try {
url = hlsUrlParameters.addDirectives(url);
} catch (error) {
this.warn(
`Could not construct new URL with HLS Delivery Directives: ${error}`
);
}
}
this.log(`Loading subtitle playlist for id ${id}`);
this.hls.trigger(Events.SUBTITLE_TRACK_LOADING, {
url,
id,
groupId,
deliveryDirectives: hlsUrlParameters || null,
});
}
}
/**
* Disables the old subtitleTrack and sets current mode on the next subtitleTrack.
* This operates on the DOM textTracks.
* A value of -1 will disable all subtitle tracks.
*/
private toggleTrackModes(newId: number): void {
const { media, subtitleDisplay, trackId } = this;
if (!media) {
return;
}
const textTracks = filterSubtitleTracks(media.textTracks);
const groupTracks = textTracks.filter(
(track) => (track as any).groupId === this.groupId
);
if (newId === -1) {
[].slice.call(textTracks).forEach((track) => {
track.mode = 'disabled';
});
} else {
const oldTrack = groupTracks[trackId];
if (oldTrack) {
oldTrack.mode = 'disabled';
}
}
const nextTrack = groupTracks[newId];
if (nextTrack) {
nextTrack.mode = subtitleDisplay ? 'showing' : 'hidden';
}
}
/**
* This method is responsible for validating the subtitle index and periodically reloading if live.
* Dispatches the SUBTITLE_TRACK_SWITCH event, which instructs the subtitle-stream-controller to load the selected track.
*/
private setSubtitleTrack(
newId: number,
lastTrack: MediaPlaylist | undefined
): void {
const tracks = this.tracksInGroup;
// setting this.subtitleTrack will trigger internal logic
// if media has not been attached yet, it will fail
// we keep a reference to the default track id
// and we'll set subtitleTrack when onMediaAttached is triggered
if (!this.media) {
this.queuedDefaultTrack = newId;
return;
}
if (this.trackId !== newId) {
this.toggleTrackModes(newId);
}
// exit if track id as already set or invalid
if (
(this.trackId === newId && (newId === -1 || tracks[newId]?.details)) ||
newId < -1 ||
newId >= tracks.length
) {
return;
}
// stopping live reloading timer if any
this.clearTimer();
const track = tracks[newId];
this.log(`Switching to subtitle track ${newId}`);
this.trackId = newId;
if (track) {
const { id, groupId = '', name, type, url } = track;
this.hls.trigger(Events.SUBTITLE_TRACK_SWITCH, {
id,
groupId,
name,
type,
url,
});
const hlsUrlParameters = this.switchParams(track.url, lastTrack?.details);
this.loadPlaylist(hlsUrlParameters);
} else {
// switch to -1
this.hls.trigger(Events.SUBTITLE_TRACK_SWITCH, { id: newId });
}
}
private onTextTracksChanged(): void {
if (!this.useTextTrackPolling) {
self.clearInterval(this.subtitlePollingInterval);
}
// Media is undefined when switching streams via loadSource()
if (!this.media || !this.hls.config.renderTextTracksNatively) {
return;
}
let trackId: number = -1;
const tracks = filterSubtitleTracks(this.media.textTracks);
for (let id = 0; id < tracks.length; id++) {
if (tracks[id].mode === 'hidden') {
// Do not break in case there is a following track with showing.
trackId = id;
} else if (tracks[id].mode === 'showing') {
trackId = id;
break;
}
}
// Setting current subtitleTrack will invoke code.
if (this.subtitleTrack !== trackId) {
this.subtitleTrack = trackId;
}
}
}
function filterSubtitleTracks(textTrackList: TextTrackList): TextTrack[] {
const tracks: TextTrack[] = [];
for (let i = 0; i < textTrackList.length; i++) {
const track = textTrackList[i];
// Edge adds a track without a label; we don't want to use it
if (track.kind === 'subtitles' && track.label) {
tracks.push(textTrackList[i]);
}
}
return tracks;
}
export default SubtitleTrackController;

View file

@ -0,0 +1,721 @@
import { Events } from '../events';
import Cea608Parser, { CaptionScreen } from '../utils/cea-608-parser';
import OutputFilter from '../utils/output-filter';
import { parseWebVTT } from '../utils/webvtt-parser';
import {
sendAddTrackEvent,
clearCurrentCues,
addCueToTrack,
removeCuesInRange,
} from '../utils/texttrack-utils';
import { parseIMSC1, IMSC1_CODEC } from '../utils/imsc1-ttml-parser';
import { PlaylistLevelType } from '../types/loader';
import { Fragment } from '../loader/fragment';
import {
FragParsingUserdataData,
FragLoadedData,
FragDecryptedData,
MediaAttachingData,
ManifestLoadedData,
InitPTSFoundData,
SubtitleTracksUpdatedData,
BufferFlushingData,
FragLoadingData,
} from '../types/events';
import { logger } from '../utils/logger';
import type Hls from '../hls';
import type { ComponentAPI } from '../types/component-api';
import type { HlsConfig } from '../config';
import type { CuesInterface } from '../utils/cues';
import type { MediaPlaylist } from '../types/media-playlist';
import type { VTTCCs } from '../types/vtt';
type TrackProperties = {
label: string;
languageCode: string;
media?: MediaPlaylist;
};
type NonNativeCaptionsTrack = {
_id?: string;
label: string;
kind: string;
default: boolean;
closedCaptions?: MediaPlaylist;
subtitleTrack?: MediaPlaylist;
};
export class TimelineController implements ComponentAPI {
private hls: Hls;
private media: HTMLMediaElement | null = null;
private config: HlsConfig;
private enabled: boolean = true;
private Cues: CuesInterface;
private textTracks: Array<TextTrack> = [];
private tracks: Array<MediaPlaylist> = [];
private initPTS: Array<number> = [];
private timescale: Array<number> = [];
private unparsedVttFrags: Array<FragLoadedData | FragDecryptedData> = [];
private captionsTracks: Record<string, TextTrack> = {};
private nonNativeCaptionsTracks: Record<string, NonNativeCaptionsTrack> = {};
private cea608Parser1!: Cea608Parser;
private cea608Parser2!: Cea608Parser;
private lastSn: number = -1;
private lastPartIndex: number = -1;
private prevCC: number = -1;
private vttCCs: VTTCCs = newVTTCCs();
private captionsProperties: {
textTrack1: TrackProperties;
textTrack2: TrackProperties;
textTrack3: TrackProperties;
textTrack4: TrackProperties;
};
constructor(hls: Hls) {
this.hls = hls;
this.config = hls.config;
this.Cues = hls.config.cueHandler;
this.captionsProperties = {
textTrack1: {
label: this.config.captionsTextTrack1Label,
languageCode: this.config.captionsTextTrack1LanguageCode,
},
textTrack2: {
label: this.config.captionsTextTrack2Label,
languageCode: this.config.captionsTextTrack2LanguageCode,
},
textTrack3: {
label: this.config.captionsTextTrack3Label,
languageCode: this.config.captionsTextTrack3LanguageCode,
},
textTrack4: {
label: this.config.captionsTextTrack4Label,
languageCode: this.config.captionsTextTrack4LanguageCode,
},
};
if (this.config.enableCEA708Captions) {
const channel1 = new OutputFilter(this, 'textTrack1');
const channel2 = new OutputFilter(this, 'textTrack2');
const channel3 = new OutputFilter(this, 'textTrack3');
const channel4 = new OutputFilter(this, 'textTrack4');
this.cea608Parser1 = new Cea608Parser(1, channel1, channel2);
this.cea608Parser2 = new Cea608Parser(3, channel3, channel4);
}
hls.on(Events.MEDIA_ATTACHING, this.onMediaAttaching, this);
hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this);
hls.on(Events.MANIFEST_LOADED, this.onManifestLoaded, this);
hls.on(Events.SUBTITLE_TRACKS_UPDATED, this.onSubtitleTracksUpdated, this);
hls.on(Events.FRAG_LOADING, this.onFragLoading, this);
hls.on(Events.FRAG_LOADED, this.onFragLoaded, this);
hls.on(Events.FRAG_PARSING_USERDATA, this.onFragParsingUserdata, this);
hls.on(Events.FRAG_DECRYPTED, this.onFragDecrypted, this);
hls.on(Events.INIT_PTS_FOUND, this.onInitPtsFound, this);
hls.on(Events.SUBTITLE_TRACKS_CLEARED, this.onSubtitleTracksCleared, this);
hls.on(Events.BUFFER_FLUSHING, this.onBufferFlushing, this);
}
public destroy(): void {
const { hls } = this;
hls.off(Events.MEDIA_ATTACHING, this.onMediaAttaching, this);
hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this);
hls.off(Events.MANIFEST_LOADED, this.onManifestLoaded, this);
hls.off(Events.SUBTITLE_TRACKS_UPDATED, this.onSubtitleTracksUpdated, this);
hls.off(Events.FRAG_LOADING, this.onFragLoading, this);
hls.off(Events.FRAG_LOADED, this.onFragLoaded, this);
hls.off(Events.FRAG_PARSING_USERDATA, this.onFragParsingUserdata, this);
hls.off(Events.FRAG_DECRYPTED, this.onFragDecrypted, this);
hls.off(Events.INIT_PTS_FOUND, this.onInitPtsFound, this);
hls.off(Events.SUBTITLE_TRACKS_CLEARED, this.onSubtitleTracksCleared, this);
hls.off(Events.BUFFER_FLUSHING, this.onBufferFlushing, this);
// @ts-ignore
this.hls = this.config = this.cea608Parser1 = this.cea608Parser2 = null;
}
public addCues(
trackName: string,
startTime: number,
endTime: number,
screen: CaptionScreen,
cueRanges: Array<[number, number]>
) {
// skip cues which overlap more than 50% with previously parsed time ranges
let merged = false;
for (let i = cueRanges.length; i--; ) {
const cueRange = cueRanges[i];
const overlap = intersection(
cueRange[0],
cueRange[1],
startTime,
endTime
);
if (overlap >= 0) {
cueRange[0] = Math.min(cueRange[0], startTime);
cueRange[1] = Math.max(cueRange[1], endTime);
merged = true;
if (overlap / (endTime - startTime) > 0.5) {
return;
}
}
}
if (!merged) {
cueRanges.push([startTime, endTime]);
}
if (this.config.renderTextTracksNatively) {
const track = this.captionsTracks[trackName];
this.Cues.newCue(track, startTime, endTime, screen);
} else {
const cues = this.Cues.newCue(null, startTime, endTime, screen);
this.hls.trigger(Events.CUES_PARSED, {
type: 'captions',
cues,
track: trackName,
});
}
}
// Triggered when an initial PTS is found; used for synchronisation of WebVTT.
private onInitPtsFound(
event: Events.INIT_PTS_FOUND,
{ frag, id, initPTS, timescale }: InitPTSFoundData
) {
const { unparsedVttFrags } = this;
if (id === 'main') {
this.initPTS[frag.cc] = initPTS;
this.timescale[frag.cc] = timescale;
}
// Due to asynchronous processing, initial PTS may arrive later than the first VTT fragments are loaded.
// Parse any unparsed fragments upon receiving the initial PTS.
if (unparsedVttFrags.length) {
this.unparsedVttFrags = [];
unparsedVttFrags.forEach((frag) => {
this.onFragLoaded(Events.FRAG_LOADED, frag as FragLoadedData);
});
}
}
private getExistingTrack(trackName: string): TextTrack | null {
const { media } = this;
if (media) {
for (let i = 0; i < media.textTracks.length; i++) {
const textTrack = media.textTracks[i];
if (textTrack[trackName]) {
return textTrack;
}
}
}
return null;
}
public createCaptionsTrack(trackName: string) {
if (this.config.renderTextTracksNatively) {
this.createNativeTrack(trackName);
} else {
this.createNonNativeTrack(trackName);
}
}
private createNativeTrack(trackName: string) {
if (this.captionsTracks[trackName]) {
return;
}
const { captionsProperties, captionsTracks, media } = this;
const { label, languageCode } = captionsProperties[trackName];
// Enable reuse of existing text track.
const existingTrack = this.getExistingTrack(trackName);
if (!existingTrack) {
const textTrack = this.createTextTrack('captions', label, languageCode);
if (textTrack) {
// Set a special property on the track so we know it's managed by Hls.js
textTrack[trackName] = true;
captionsTracks[trackName] = textTrack;
}
} else {
captionsTracks[trackName] = existingTrack;
clearCurrentCues(captionsTracks[trackName]);
sendAddTrackEvent(captionsTracks[trackName], media as HTMLMediaElement);
}
}
private createNonNativeTrack(trackName: string) {
if (this.nonNativeCaptionsTracks[trackName]) {
return;
}
// Create a list of a single track for the provider to consume
const trackProperties: TrackProperties = this.captionsProperties[trackName];
if (!trackProperties) {
return;
}
const label = trackProperties.label as string;
const track = {
_id: trackName,
label,
kind: 'captions',
default: trackProperties.media ? !!trackProperties.media.default : false,
closedCaptions: trackProperties.media,
};
this.nonNativeCaptionsTracks[trackName] = track;
this.hls.trigger(Events.NON_NATIVE_TEXT_TRACKS_FOUND, { tracks: [track] });
}
private createTextTrack(
kind: TextTrackKind,
label: string,
lang?: string
): TextTrack | undefined {
const media = this.media;
if (!media) {
return;
}
return media.addTextTrack(kind, label, lang);
}
private onMediaAttaching(
event: Events.MEDIA_ATTACHING,
data: MediaAttachingData
) {
this.media = data.media;
this._cleanTracks();
}
private onMediaDetaching() {
const { captionsTracks } = this;
Object.keys(captionsTracks).forEach((trackName) => {
clearCurrentCues(captionsTracks[trackName]);
delete captionsTracks[trackName];
});
this.nonNativeCaptionsTracks = {};
}
private onManifestLoading() {
this.lastSn = -1; // Detect discontinuity in fragment parsing
this.lastPartIndex = -1;
this.prevCC = -1;
this.vttCCs = newVTTCCs(); // Detect discontinuity in subtitle manifests
this._cleanTracks();
this.tracks = [];
this.captionsTracks = {};
this.nonNativeCaptionsTracks = {};
this.textTracks = [];
this.unparsedVttFrags = this.unparsedVttFrags || [];
this.initPTS = [];
this.timescale = [];
if (this.cea608Parser1 && this.cea608Parser2) {
this.cea608Parser1.reset();
this.cea608Parser2.reset();
}
}
private _cleanTracks() {
// clear outdated subtitles
const { media } = this;
if (!media) {
return;
}
const textTracks = media.textTracks;
if (textTracks) {
for (let i = 0; i < textTracks.length; i++) {
clearCurrentCues(textTracks[i]);
}
}
}
private onSubtitleTracksUpdated(
event: Events.SUBTITLE_TRACKS_UPDATED,
data: SubtitleTracksUpdatedData
) {
this.textTracks = [];
const tracks: Array<MediaPlaylist> = data.subtitleTracks || [];
const hasIMSC1 = tracks.some((track) => track.textCodec === IMSC1_CODEC);
if (this.config.enableWebVTT || (hasIMSC1 && this.config.enableIMSC1)) {
const sameTracks =
this.tracks && tracks && this.tracks.length === tracks.length;
this.tracks = tracks || [];
if (this.config.renderTextTracksNatively) {
const inUseTracks = this.media ? this.media.textTracks : [];
this.tracks.forEach((track, index) => {
let textTrack: TextTrack | undefined;
if (index < inUseTracks.length) {
let inUseTrack: TextTrack | null = null;
for (let i = 0; i < inUseTracks.length; i++) {
if (canReuseVttTextTrack(inUseTracks[i], track)) {
inUseTrack = inUseTracks[i];
break;
}
}
// Reuse tracks with the same label, but do not reuse 608/708 tracks
if (inUseTrack) {
textTrack = inUseTrack;
}
}
if (textTrack) {
clearCurrentCues(textTrack);
} else {
textTrack = this.createTextTrack(
'subtitles',
track.name,
track.lang
);
if (textTrack) {
textTrack.mode = 'disabled';
}
}
if (textTrack) {
(textTrack as any).groupId = track.groupId;
this.textTracks.push(textTrack);
}
});
} else if (!sameTracks && this.tracks && this.tracks.length) {
// Create a list of tracks for the provider to consume
const tracksList = this.tracks.map((track) => {
return {
label: track.name,
kind: track.type.toLowerCase(),
default: track.default,
subtitleTrack: track,
};
});
this.hls.trigger(Events.NON_NATIVE_TEXT_TRACKS_FOUND, {
tracks: tracksList,
});
}
}
}
private onManifestLoaded(
event: Events.MANIFEST_LOADED,
data: ManifestLoadedData
) {
if (this.config.enableCEA708Captions && data.captions) {
data.captions.forEach((captionsTrack) => {
const instreamIdMatch = /(?:CC|SERVICE)([1-4])/.exec(
captionsTrack.instreamId as string
);
if (!instreamIdMatch) {
return;
}
const trackName = `textTrack${instreamIdMatch[1]}`;
const trackProperties: TrackProperties =
this.captionsProperties[trackName];
if (!trackProperties) {
return;
}
trackProperties.label = captionsTrack.name;
if (captionsTrack.lang) {
// optional attribute
trackProperties.languageCode = captionsTrack.lang;
}
trackProperties.media = captionsTrack;
});
}
}
private onFragLoading(event: Events.FRAG_LOADING, data: FragLoadingData) {
const { cea608Parser1, cea608Parser2, lastSn, lastPartIndex } = this;
if (!this.enabled || !(cea608Parser1 && cea608Parser2)) {
return;
}
// if this frag isn't contiguous, clear the parser so cues with bad start/end times aren't added to the textTrack
if (data.frag.type === PlaylistLevelType.MAIN) {
const sn = data.frag.sn;
const partIndex = data?.part?.index ?? -1;
if (
!(
sn === lastSn + 1 ||
(sn === lastSn && partIndex === lastPartIndex + 1)
)
) {
cea608Parser1.reset();
cea608Parser2.reset();
}
this.lastSn = sn as number;
this.lastPartIndex = partIndex;
}
}
private onFragLoaded(
event: Events.FRAG_LOADED,
data: FragDecryptedData | FragLoadedData
) {
const { frag, payload } = data;
const { initPTS, unparsedVttFrags } = this;
if (frag.type === PlaylistLevelType.SUBTITLE) {
// If fragment is subtitle type, parse as WebVTT.
if (payload.byteLength) {
// We need an initial synchronisation PTS. Store fragments as long as none has arrived.
if (!Number.isFinite(initPTS[frag.cc])) {
unparsedVttFrags.push(data);
if (initPTS.length) {
// finish unsuccessfully, otherwise the subtitle-stream-controller could be blocked from loading new frags.
this.hls.trigger(Events.SUBTITLE_FRAG_PROCESSED, {
success: false,
frag,
error: new Error('Missing initial subtitle PTS'),
});
}
return;
}
const decryptData = frag.decryptdata;
// fragment after decryption has a stats object
const decrypted = 'stats' in data;
// If the subtitles are not encrypted, parse VTTs now. Otherwise, we need to wait.
if (
decryptData == null ||
decryptData.key == null ||
decryptData.method !== 'AES-128' ||
decrypted
) {
const trackPlaylistMedia = this.tracks[frag.level];
const vttCCs = this.vttCCs;
if (!vttCCs[frag.cc]) {
vttCCs[frag.cc] = {
start: frag.start,
prevCC: this.prevCC,
new: true,
};
this.prevCC = frag.cc;
}
if (
trackPlaylistMedia &&
trackPlaylistMedia.textCodec === IMSC1_CODEC
) {
this._parseIMSC1(frag, payload);
} else {
this._parseVTTs(frag, payload, vttCCs);
}
}
} else {
// In case there is no payload, finish unsuccessfully.
this.hls.trigger(Events.SUBTITLE_FRAG_PROCESSED, {
success: false,
frag,
error: new Error('Empty subtitle payload'),
});
}
}
}
private _parseIMSC1(frag: Fragment, payload: ArrayBuffer) {
const hls = this.hls;
parseIMSC1(
payload,
this.initPTS[frag.cc],
this.timescale[frag.cc],
(cues) => {
this._appendCues(cues, frag.level);
hls.trigger(Events.SUBTITLE_FRAG_PROCESSED, {
success: true,
frag: frag,
});
},
(error) => {
logger.log(`Failed to parse IMSC1: ${error}`);
hls.trigger(Events.SUBTITLE_FRAG_PROCESSED, {
success: false,
frag: frag,
error,
});
}
);
}
private _parseVTTs(frag: Fragment, payload: ArrayBuffer, vttCCs: any) {
const hls = this.hls;
// Parse the WebVTT file contents.
parseWebVTT(
payload,
this.initPTS[frag.cc],
this.timescale[frag.cc],
vttCCs,
frag.cc,
frag.start,
(cues) => {
this._appendCues(cues, frag.level);
hls.trigger(Events.SUBTITLE_FRAG_PROCESSED, {
success: true,
frag: frag,
});
},
(error) => {
this._fallbackToIMSC1(frag, payload);
// Something went wrong while parsing. Trigger event with success false.
logger.log(`Failed to parse VTT cue: ${error}`);
hls.trigger(Events.SUBTITLE_FRAG_PROCESSED, {
success: false,
frag: frag,
error,
});
}
);
}
private _fallbackToIMSC1(frag: Fragment, payload: ArrayBuffer) {
// If textCodec is unknown, try parsing as IMSC1. Set textCodec based on the result
const trackPlaylistMedia = this.tracks[frag.level];
if (!trackPlaylistMedia.textCodec) {
parseIMSC1(
payload,
this.initPTS[frag.cc],
this.timescale[frag.cc],
() => {
trackPlaylistMedia.textCodec = IMSC1_CODEC;
this._parseIMSC1(frag, payload);
},
() => {
trackPlaylistMedia.textCodec = 'wvtt';
}
);
}
}
private _appendCues(cues: VTTCue[], fragLevel: number) {
const hls = this.hls;
if (this.config.renderTextTracksNatively) {
const textTrack = this.textTracks[fragLevel];
// WebVTTParser.parse is an async method and if the currently selected text track mode is set to "disabled"
// before parsing is done then don't try to access currentTrack.cues.getCueById as cues will be null
// and trying to access getCueById method of cues will throw an exception
// Because we check if the mode is disabled, we can force check `cues` below. They can't be null.
if (textTrack.mode === 'disabled') {
return;
}
cues.forEach((cue) => addCueToTrack(textTrack, cue));
} else {
const currentTrack = this.tracks[fragLevel];
const track = currentTrack.default ? 'default' : 'subtitles' + fragLevel;
hls.trigger(Events.CUES_PARSED, { type: 'subtitles', cues, track });
}
}
private onFragDecrypted(
event: Events.FRAG_DECRYPTED,
data: FragDecryptedData
) {
const { frag } = data;
if (frag.type === PlaylistLevelType.SUBTITLE) {
if (!Number.isFinite(this.initPTS[frag.cc])) {
this.unparsedVttFrags.push(data as unknown as FragLoadedData);
return;
}
this.onFragLoaded(Events.FRAG_LOADED, data as unknown as FragLoadedData);
}
}
private onSubtitleTracksCleared() {
this.tracks = [];
this.captionsTracks = {};
}
private onFragParsingUserdata(
event: Events.FRAG_PARSING_USERDATA,
data: FragParsingUserdataData
) {
const { cea608Parser1, cea608Parser2 } = this;
if (!this.enabled || !(cea608Parser1 && cea608Parser2)) {
return;
}
// If the event contains captions (found in the bytes property), push all bytes into the parser immediately
// It will create the proper timestamps based on the PTS value
for (let i = 0; i < data.samples.length; i++) {
const ccBytes = data.samples[i].bytes;
if (ccBytes) {
const ccdatas = this.extractCea608Data(ccBytes);
cea608Parser1.addData(data.samples[i].pts, ccdatas[0]);
cea608Parser2.addData(data.samples[i].pts, ccdatas[1]);
}
}
}
onBufferFlushing(
event: Events.BUFFER_FLUSHING,
{ startOffset, endOffset, endOffsetSubtitles, type }: BufferFlushingData
) {
const { media } = this;
if (!media || media.currentTime < endOffset) {
return;
}
// Clear 608 caption cues from the captions TextTracks when the video back buffer is flushed
// Forward cues are never removed because we can loose streamed 608 content from recent fragments
if (!type || type === 'video') {
const { captionsTracks } = this;
Object.keys(captionsTracks).forEach((trackName) =>
removeCuesInRange(captionsTracks[trackName], startOffset, endOffset)
);
}
if (this.config.renderTextTracksNatively) {
// Clear VTT/IMSC1 subtitle cues from the subtitle TextTracks when the back buffer is flushed
if (startOffset === 0 && endOffsetSubtitles !== undefined) {
const { textTracks } = this;
Object.keys(textTracks).forEach((trackName) =>
removeCuesInRange(
textTracks[trackName],
startOffset,
endOffsetSubtitles
)
);
}
}
}
private extractCea608Data(byteArray: Uint8Array): number[][] {
const count = byteArray[0] & 31;
let position = 2;
const actualCCBytes: number[][] = [[], []];
for (let j = 0; j < count; j++) {
const tmpByte = byteArray[position++];
const ccbyte1 = 0x7f & byteArray[position++];
const ccbyte2 = 0x7f & byteArray[position++];
const ccValid = (4 & tmpByte) !== 0;
const ccType = 3 & tmpByte;
if (ccbyte1 === 0 && ccbyte2 === 0) {
continue;
}
if (ccValid) {
if (ccType === 0 || ccType === 1) {
actualCCBytes[ccType].push(ccbyte1);
actualCCBytes[ccType].push(ccbyte2);
}
}
}
return actualCCBytes;
}
}
function canReuseVttTextTrack(inUseTrack, manifestTrack): boolean {
return (
inUseTrack &&
inUseTrack.label === manifestTrack.name &&
!(inUseTrack.textTrack1 || inUseTrack.textTrack2)
);
}
function intersection(x1: number, x2: number, y1: number, y2: number): number {
return Math.min(x2, y2) - Math.max(x1, y1);
}
function newVTTCCs(): VTTCCs {
return {
ccOffset: 0,
presentationOffset: 0,
0: {
start: 0,
prevCC: -1,
new: false,
},
};
}

13
node_modules/hls.js/src/crypt/aes-crypto.ts generated vendored Normal file
View file

@ -0,0 +1,13 @@
export default class AESCrypto {
private subtle: SubtleCrypto;
private aesIV: ArrayBuffer;
constructor(subtle: SubtleCrypto, iv: ArrayBuffer) {
this.subtle = subtle;
this.aesIV = iv;
}
decrypt(data: ArrayBuffer, key: CryptoKey) {
return this.subtle.decrypt({ name: 'AES-CBC', iv: this.aesIV }, key, data);
}
}

337
node_modules/hls.js/src/crypt/aes-decryptor.ts generated vendored Normal file
View file

@ -0,0 +1,337 @@
import { sliceUint8 } from '../utils/typed-array';
// PKCS7
export function removePadding(array: Uint8Array): Uint8Array {
const outputBytes = array.byteLength;
const paddingBytes =
outputBytes && new DataView(array.buffer).getUint8(outputBytes - 1);
if (paddingBytes) {
return sliceUint8(array, 0, outputBytes - paddingBytes);
}
return array;
}
export default class AESDecryptor {
private rcon: Array<number> = [
0x0, 0x1, 0x2, 0x4, 0x8, 0x10, 0x20, 0x40, 0x80, 0x1b, 0x36,
];
private subMix: Array<Uint32Array> = [
new Uint32Array(256),
new Uint32Array(256),
new Uint32Array(256),
new Uint32Array(256),
];
private invSubMix: Array<Uint32Array> = [
new Uint32Array(256),
new Uint32Array(256),
new Uint32Array(256),
new Uint32Array(256),
];
private sBox: Uint32Array = new Uint32Array(256);
private invSBox: Uint32Array = new Uint32Array(256);
private key: Uint32Array = new Uint32Array(0);
private ksRows: number = 0;
private keySize: number = 0;
private keySchedule!: Uint32Array;
private invKeySchedule!: Uint32Array;
constructor() {
this.initTable();
}
// Using view.getUint32() also swaps the byte order.
uint8ArrayToUint32Array_(arrayBuffer) {
const view = new DataView(arrayBuffer);
const newArray = new Uint32Array(4);
for (let i = 0; i < 4; i++) {
newArray[i] = view.getUint32(i * 4);
}
return newArray;
}
initTable() {
const sBox = this.sBox;
const invSBox = this.invSBox;
const subMix = this.subMix;
const subMix0 = subMix[0];
const subMix1 = subMix[1];
const subMix2 = subMix[2];
const subMix3 = subMix[3];
const invSubMix = this.invSubMix;
const invSubMix0 = invSubMix[0];
const invSubMix1 = invSubMix[1];
const invSubMix2 = invSubMix[2];
const invSubMix3 = invSubMix[3];
const d = new Uint32Array(256);
let x = 0;
let xi = 0;
let i = 0;
for (i = 0; i < 256; i++) {
if (i < 128) {
d[i] = i << 1;
} else {
d[i] = (i << 1) ^ 0x11b;
}
}
for (i = 0; i < 256; i++) {
let sx = xi ^ (xi << 1) ^ (xi << 2) ^ (xi << 3) ^ (xi << 4);
sx = (sx >>> 8) ^ (sx & 0xff) ^ 0x63;
sBox[x] = sx;
invSBox[sx] = x;
// Compute multiplication
const x2 = d[x];
const x4 = d[x2];
const x8 = d[x4];
// Compute sub/invSub bytes, mix columns tables
let t = (d[sx] * 0x101) ^ (sx * 0x1010100);
subMix0[x] = (t << 24) | (t >>> 8);
subMix1[x] = (t << 16) | (t >>> 16);
subMix2[x] = (t << 8) | (t >>> 24);
subMix3[x] = t;
// Compute inv sub bytes, inv mix columns tables
t = (x8 * 0x1010101) ^ (x4 * 0x10001) ^ (x2 * 0x101) ^ (x * 0x1010100);
invSubMix0[sx] = (t << 24) | (t >>> 8);
invSubMix1[sx] = (t << 16) | (t >>> 16);
invSubMix2[sx] = (t << 8) | (t >>> 24);
invSubMix3[sx] = t;
// Compute next counter
if (!x) {
x = xi = 1;
} else {
x = x2 ^ d[d[d[x8 ^ x2]]];
xi ^= d[d[xi]];
}
}
}
expandKey(keyBuffer: ArrayBuffer) {
// convert keyBuffer to Uint32Array
const key = this.uint8ArrayToUint32Array_(keyBuffer);
let sameKey = true;
let offset = 0;
while (offset < key.length && sameKey) {
sameKey = key[offset] === this.key[offset];
offset++;
}
if (sameKey) {
return;
}
this.key = key;
const keySize = (this.keySize = key.length);
if (keySize !== 4 && keySize !== 6 && keySize !== 8) {
throw new Error('Invalid aes key size=' + keySize);
}
const ksRows = (this.ksRows = (keySize + 6 + 1) * 4);
let ksRow;
let invKsRow;
const keySchedule = (this.keySchedule = new Uint32Array(ksRows));
const invKeySchedule = (this.invKeySchedule = new Uint32Array(ksRows));
const sbox = this.sBox;
const rcon = this.rcon;
const invSubMix = this.invSubMix;
const invSubMix0 = invSubMix[0];
const invSubMix1 = invSubMix[1];
const invSubMix2 = invSubMix[2];
const invSubMix3 = invSubMix[3];
let prev;
let t;
for (ksRow = 0; ksRow < ksRows; ksRow++) {
if (ksRow < keySize) {
prev = keySchedule[ksRow] = key[ksRow];
continue;
}
t = prev;
if (ksRow % keySize === 0) {
// Rot word
t = (t << 8) | (t >>> 24);
// Sub word
t =
(sbox[t >>> 24] << 24) |
(sbox[(t >>> 16) & 0xff] << 16) |
(sbox[(t >>> 8) & 0xff] << 8) |
sbox[t & 0xff];
// Mix Rcon
t ^= rcon[(ksRow / keySize) | 0] << 24;
} else if (keySize > 6 && ksRow % keySize === 4) {
// Sub word
t =
(sbox[t >>> 24] << 24) |
(sbox[(t >>> 16) & 0xff] << 16) |
(sbox[(t >>> 8) & 0xff] << 8) |
sbox[t & 0xff];
}
keySchedule[ksRow] = prev = (keySchedule[ksRow - keySize] ^ t) >>> 0;
}
for (invKsRow = 0; invKsRow < ksRows; invKsRow++) {
ksRow = ksRows - invKsRow;
if (invKsRow & 3) {
t = keySchedule[ksRow];
} else {
t = keySchedule[ksRow - 4];
}
if (invKsRow < 4 || ksRow <= 4) {
invKeySchedule[invKsRow] = t;
} else {
invKeySchedule[invKsRow] =
invSubMix0[sbox[t >>> 24]] ^
invSubMix1[sbox[(t >>> 16) & 0xff]] ^
invSubMix2[sbox[(t >>> 8) & 0xff]] ^
invSubMix3[sbox[t & 0xff]];
}
invKeySchedule[invKsRow] = invKeySchedule[invKsRow] >>> 0;
}
}
// Adding this as a method greatly improves performance.
networkToHostOrderSwap(word) {
return (
(word << 24) |
((word & 0xff00) << 8) |
((word & 0xff0000) >> 8) |
(word >>> 24)
);
}
decrypt(inputArrayBuffer: ArrayBuffer, offset: number, aesIV: ArrayBuffer) {
const nRounds = this.keySize + 6;
const invKeySchedule = this.invKeySchedule;
const invSBOX = this.invSBox;
const invSubMix = this.invSubMix;
const invSubMix0 = invSubMix[0];
const invSubMix1 = invSubMix[1];
const invSubMix2 = invSubMix[2];
const invSubMix3 = invSubMix[3];
const initVector = this.uint8ArrayToUint32Array_(aesIV);
let initVector0 = initVector[0];
let initVector1 = initVector[1];
let initVector2 = initVector[2];
let initVector3 = initVector[3];
const inputInt32 = new Int32Array(inputArrayBuffer);
const outputInt32 = new Int32Array(inputInt32.length);
let t0, t1, t2, t3;
let s0, s1, s2, s3;
let inputWords0, inputWords1, inputWords2, inputWords3;
let ksRow, i;
const swapWord = this.networkToHostOrderSwap;
while (offset < inputInt32.length) {
inputWords0 = swapWord(inputInt32[offset]);
inputWords1 = swapWord(inputInt32[offset + 1]);
inputWords2 = swapWord(inputInt32[offset + 2]);
inputWords3 = swapWord(inputInt32[offset + 3]);
s0 = inputWords0 ^ invKeySchedule[0];
s1 = inputWords3 ^ invKeySchedule[1];
s2 = inputWords2 ^ invKeySchedule[2];
s3 = inputWords1 ^ invKeySchedule[3];
ksRow = 4;
// Iterate through the rounds of decryption
for (i = 1; i < nRounds; i++) {
t0 =
invSubMix0[s0 >>> 24] ^
invSubMix1[(s1 >> 16) & 0xff] ^
invSubMix2[(s2 >> 8) & 0xff] ^
invSubMix3[s3 & 0xff] ^
invKeySchedule[ksRow];
t1 =
invSubMix0[s1 >>> 24] ^
invSubMix1[(s2 >> 16) & 0xff] ^
invSubMix2[(s3 >> 8) & 0xff] ^
invSubMix3[s0 & 0xff] ^
invKeySchedule[ksRow + 1];
t2 =
invSubMix0[s2 >>> 24] ^
invSubMix1[(s3 >> 16) & 0xff] ^
invSubMix2[(s0 >> 8) & 0xff] ^
invSubMix3[s1 & 0xff] ^
invKeySchedule[ksRow + 2];
t3 =
invSubMix0[s3 >>> 24] ^
invSubMix1[(s0 >> 16) & 0xff] ^
invSubMix2[(s1 >> 8) & 0xff] ^
invSubMix3[s2 & 0xff] ^
invKeySchedule[ksRow + 3];
// Update state
s0 = t0;
s1 = t1;
s2 = t2;
s3 = t3;
ksRow = ksRow + 4;
}
// Shift rows, sub bytes, add round key
t0 =
(invSBOX[s0 >>> 24] << 24) ^
(invSBOX[(s1 >> 16) & 0xff] << 16) ^
(invSBOX[(s2 >> 8) & 0xff] << 8) ^
invSBOX[s3 & 0xff] ^
invKeySchedule[ksRow];
t1 =
(invSBOX[s1 >>> 24] << 24) ^
(invSBOX[(s2 >> 16) & 0xff] << 16) ^
(invSBOX[(s3 >> 8) & 0xff] << 8) ^
invSBOX[s0 & 0xff] ^
invKeySchedule[ksRow + 1];
t2 =
(invSBOX[s2 >>> 24] << 24) ^
(invSBOX[(s3 >> 16) & 0xff] << 16) ^
(invSBOX[(s0 >> 8) & 0xff] << 8) ^
invSBOX[s1 & 0xff] ^
invKeySchedule[ksRow + 2];
t3 =
(invSBOX[s3 >>> 24] << 24) ^
(invSBOX[(s0 >> 16) & 0xff] << 16) ^
(invSBOX[(s1 >> 8) & 0xff] << 8) ^
invSBOX[s2 & 0xff] ^
invKeySchedule[ksRow + 3];
// Write
outputInt32[offset] = swapWord(t0 ^ initVector0);
outputInt32[offset + 1] = swapWord(t3 ^ initVector1);
outputInt32[offset + 2] = swapWord(t2 ^ initVector2);
outputInt32[offset + 3] = swapWord(t1 ^ initVector3);
// reset initVector to last 4 unsigned int
initVector0 = inputWords0;
initVector1 = inputWords1;
initVector2 = inputWords2;
initVector3 = inputWords3;
offset = offset + 4;
}
return outputInt32.buffer;
}
}

194
node_modules/hls.js/src/crypt/decrypter.ts generated vendored Normal file
View file

@ -0,0 +1,194 @@
import AESCrypto from './aes-crypto';
import FastAESKey from './fast-aes-key';
import AESDecryptor, { removePadding } from './aes-decryptor';
import { logger } from '../utils/logger';
import { appendUint8Array } from '../utils/mp4-tools';
import { sliceUint8 } from '../utils/typed-array';
import type { HlsConfig } from '../config';
import type { HlsEventEmitter } from '../events';
const CHUNK_SIZE = 16; // 16 bytes, 128 bits
export default class Decrypter {
private logEnabled: boolean = true;
private observer: HlsEventEmitter;
private config: HlsConfig;
private removePKCS7Padding: boolean;
private subtle: SubtleCrypto | null = null;
private softwareDecrypter: AESDecryptor | null = null;
private key: ArrayBuffer | null = null;
private fastAesKey: FastAESKey | null = null;
private remainderData: Uint8Array | null = null;
private currentIV: ArrayBuffer | null = null;
private currentResult: ArrayBuffer | null = null;
constructor(
observer: HlsEventEmitter,
config: HlsConfig,
{ removePKCS7Padding = true } = {}
) {
this.observer = observer;
this.config = config;
this.removePKCS7Padding = removePKCS7Padding;
// built in decryptor expects PKCS7 padding
if (removePKCS7Padding) {
try {
const browserCrypto = self.crypto;
if (browserCrypto) {
this.subtle =
browserCrypto.subtle ||
((browserCrypto as any).webkitSubtle as SubtleCrypto);
}
} catch (e) {
/* no-op */
}
}
if (this.subtle === null) {
this.config.enableSoftwareAES = true;
}
}
destroy() {
// @ts-ignore
this.observer = null;
}
public isSync() {
return this.config.enableSoftwareAES;
}
public flush(): Uint8Array | void {
const { currentResult } = this;
if (!currentResult) {
this.reset();
return;
}
const data = new Uint8Array(currentResult);
this.reset();
if (this.removePKCS7Padding) {
return removePadding(data);
}
return data;
}
public reset() {
this.currentResult = null;
this.currentIV = null;
this.remainderData = null;
if (this.softwareDecrypter) {
this.softwareDecrypter = null;
}
}
public decrypt(
data: Uint8Array | ArrayBuffer,
key: ArrayBuffer,
iv: ArrayBuffer,
callback: (decryptedData: ArrayBuffer) => void
) {
if (this.config.enableSoftwareAES) {
this.softwareDecrypt(new Uint8Array(data), key, iv);
const decryptResult = this.flush();
if (decryptResult) {
callback(decryptResult.buffer);
}
} else {
this.webCryptoDecrypt(new Uint8Array(data), key, iv).then(callback);
}
}
public softwareDecrypt(
data: Uint8Array,
key: ArrayBuffer,
iv: ArrayBuffer
): ArrayBuffer | null {
const { currentIV, currentResult, remainderData } = this;
this.logOnce('JS AES decrypt');
// The output is staggered during progressive parsing - the current result is cached, and emitted on the next call
// This is done in order to strip PKCS7 padding, which is found at the end of each segment. We only know we've reached
// the end on flush(), but by that time we have already received all bytes for the segment.
// Progressive decryption does not work with WebCrypto
if (remainderData) {
data = appendUint8Array(remainderData, data);
this.remainderData = null;
}
// Byte length must be a multiple of 16 (AES-128 = 128 bit blocks = 16 bytes)
const currentChunk = this.getValidChunk(data);
if (!currentChunk.length) {
return null;
}
if (currentIV) {
iv = currentIV;
}
let softwareDecrypter = this.softwareDecrypter;
if (!softwareDecrypter) {
softwareDecrypter = this.softwareDecrypter = new AESDecryptor();
}
softwareDecrypter.expandKey(key);
const result = currentResult;
this.currentResult = softwareDecrypter.decrypt(currentChunk.buffer, 0, iv);
this.currentIV = sliceUint8(currentChunk, -16).buffer;
if (!result) {
return null;
}
return result;
}
public webCryptoDecrypt(
data: Uint8Array,
key: ArrayBuffer,
iv: ArrayBuffer
): Promise<ArrayBuffer> {
const subtle = this.subtle;
if (this.key !== key || !this.fastAesKey) {
this.key = key;
this.fastAesKey = new FastAESKey(subtle, key);
}
return this.fastAesKey
.expandKey()
.then((aesKey) => {
// decrypt using web crypto
if (!subtle) {
return Promise.reject(new Error('web crypto not initialized'));
}
const crypto = new AESCrypto(subtle, iv);
return crypto.decrypt(data.buffer, aesKey);
})
.catch((err) => {
return this.onWebCryptoError(err, data, key, iv) as ArrayBuffer;
});
}
private onWebCryptoError(err, data, key, iv): ArrayBuffer | null {
logger.warn('[decrypter.ts]: WebCrypto Error, disable WebCrypto API:', err);
this.config.enableSoftwareAES = true;
this.logEnabled = true;
return this.softwareDecrypt(data, key, iv);
}
private getValidChunk(data: Uint8Array): Uint8Array {
let currentChunk = data;
const splitPoint = data.length - (data.length % CHUNK_SIZE);
if (splitPoint !== data.length) {
currentChunk = sliceUint8(data, 0, splitPoint);
this.remainderData = sliceUint8(data, splitPoint);
}
return currentChunk;
}
private logOnce(msg: string) {
if (!this.logEnabled) {
return;
}
logger.log(`[decrypter.ts]: ${msg}`);
this.logEnabled = false;
}
}

16
node_modules/hls.js/src/crypt/fast-aes-key.ts generated vendored Normal file
View file

@ -0,0 +1,16 @@
export default class FastAESKey {
private subtle: any;
private key: ArrayBuffer;
constructor(subtle, key) {
this.subtle = subtle;
this.key = key;
}
expandKey() {
return this.subtle.importKey('raw', this.key, { name: 'AES-CBC' }, false, [
'encrypt',
'decrypt',
]);
}
}

7
node_modules/hls.js/src/define-plugin.d.ts generated vendored Normal file
View file

@ -0,0 +1,7 @@
declare const __VERSION__: string;
// Dynamic Modules
declare const __USE_ALT_AUDIO__: boolean;
declare const __USE_EME_DRM__: boolean;
declare const __USE_SUBTITLES__: boolean;
declare const __USE_CMCD__: boolean;

86
node_modules/hls.js/src/demux/aacdemuxer.ts generated vendored Normal file
View file

@ -0,0 +1,86 @@
/**
* AAC demuxer
*/
import BaseAudioDemuxer from './base-audio-demuxer';
import * as ADTS from './adts';
import { logger } from '../utils/logger';
import * as ID3 from '../demux/id3';
import type { HlsEventEmitter } from '../events';
import type { HlsConfig } from '../config';
class AACDemuxer extends BaseAudioDemuxer {
private readonly observer: HlsEventEmitter;
private readonly config: HlsConfig;
static readonly minProbeByteLength: number = 9;
constructor(observer, config) {
super();
this.observer = observer;
this.config = config;
}
resetInitSegment(audioCodec, videoCodec, duration) {
super.resetInitSegment(audioCodec, videoCodec, duration);
this._audioTrack = {
container: 'audio/adts',
type: 'audio',
id: 2,
pid: -1,
sequenceNumber: 0,
isAAC: true,
samples: [],
manifestCodec: audioCodec,
duration: duration,
inputTimeScale: 90000,
dropped: 0,
};
}
// Source for probe info - https://wiki.multimedia.cx/index.php?title=ADTS
static probe(data): boolean {
if (!data) {
return false;
}
// Check for the ADTS sync word
// Look for ADTS header | 1111 1111 | 1111 X00X | where X can be either 0 or 1
// Layer bits (position 14 and 15) in header should be always 0 for ADTS
// More info https://wiki.multimedia.cx/index.php?title=ADTS
const id3Data = ID3.getID3Data(data, 0) || [];
let offset = id3Data.length;
for (let length = data.length; offset < length; offset++) {
if (ADTS.probe(data, offset)) {
logger.log('ADTS sync word found !');
return true;
}
}
return false;
}
canParse(data, offset) {
return ADTS.canParse(data, offset);
}
appendFrame(track, data, offset) {
ADTS.initTrackConfig(
track,
this.observer,
data,
offset,
track.manifestCodec
);
const frame = ADTS.appendFrame(
track,
data,
offset,
this.initPTS as number,
this.frameIndex
);
if (frame && frame.missing === 0) {
return frame;
}
}
}
export default AACDemuxer;

308
node_modules/hls.js/src/demux/adts.ts generated vendored Normal file
View file

@ -0,0 +1,308 @@
/**
* ADTS parser helper
* @link https://wiki.multimedia.cx/index.php?title=ADTS
*/
import { logger } from '../utils/logger';
import { ErrorTypes, ErrorDetails } from '../errors';
import type { HlsEventEmitter } from '../events';
import { Events } from '../events';
import type {
DemuxedAudioTrack,
AudioFrame,
AudioSample,
} from '../types/demuxer';
type AudioConfig = {
config: number[];
samplerate: number;
channelCount: number;
codec: string;
manifestCodec: string;
};
type FrameHeader = {
headerLength: number;
frameLength: number;
stamp: number;
};
export function getAudioConfig(
observer,
data: Uint8Array,
offset: number,
audioCodec: string
): AudioConfig | void {
let adtsObjectType: number;
let adtsExtensionSamplingIndex: number;
let adtsChanelConfig: number;
let config: number[];
const userAgent = navigator.userAgent.toLowerCase();
const manifestCodec = audioCodec;
const adtsSampleingRates = [
96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025,
8000, 7350,
];
// byte 2
adtsObjectType = ((data[offset + 2] & 0xc0) >>> 6) + 1;
const adtsSamplingIndex = (data[offset + 2] & 0x3c) >>> 2;
if (adtsSamplingIndex > adtsSampleingRates.length - 1) {
observer.trigger(Events.ERROR, {
type: ErrorTypes.MEDIA_ERROR,
details: ErrorDetails.FRAG_PARSING_ERROR,
fatal: true,
reason: `invalid ADTS sampling index:${adtsSamplingIndex}`,
});
return;
}
adtsChanelConfig = (data[offset + 2] & 0x01) << 2;
// byte 3
adtsChanelConfig |= (data[offset + 3] & 0xc0) >>> 6;
logger.log(
`manifest codec:${audioCodec}, ADTS type:${adtsObjectType}, samplingIndex:${adtsSamplingIndex}`
);
// firefox: freq less than 24kHz = AAC SBR (HE-AAC)
if (/firefox/i.test(userAgent)) {
if (adtsSamplingIndex >= 6) {
adtsObjectType = 5;
config = new Array(4);
// HE-AAC uses SBR (Spectral Band Replication) , high frequencies are constructed from low frequencies
// there is a factor 2 between frame sample rate and output sample rate
// multiply frequency by 2 (see table below, equivalent to substract 3)
adtsExtensionSamplingIndex = adtsSamplingIndex - 3;
} else {
adtsObjectType = 2;
config = new Array(2);
adtsExtensionSamplingIndex = adtsSamplingIndex;
}
// Android : always use AAC
} else if (userAgent.indexOf('android') !== -1) {
adtsObjectType = 2;
config = new Array(2);
adtsExtensionSamplingIndex = adtsSamplingIndex;
} else {
/* for other browsers (Chrome/Vivaldi/Opera ...)
always force audio type to be HE-AAC SBR, as some browsers do not support audio codec switch properly (like Chrome ...)
*/
adtsObjectType = 5;
config = new Array(4);
// if (manifest codec is HE-AAC or HE-AACv2) OR (manifest codec not specified AND frequency less than 24kHz)
if (
(audioCodec &&
(audioCodec.indexOf('mp4a.40.29') !== -1 ||
audioCodec.indexOf('mp4a.40.5') !== -1)) ||
(!audioCodec && adtsSamplingIndex >= 6)
) {
// HE-AAC uses SBR (Spectral Band Replication) , high frequencies are constructed from low frequencies
// there is a factor 2 between frame sample rate and output sample rate
// multiply frequency by 2 (see table below, equivalent to substract 3)
adtsExtensionSamplingIndex = adtsSamplingIndex - 3;
} else {
// if (manifest codec is AAC) AND (frequency less than 24kHz AND nb channel is 1) OR (manifest codec not specified and mono audio)
// Chrome fails to play back with low frequency AAC LC mono when initialized with HE-AAC. This is not a problem with stereo.
if (
(audioCodec &&
audioCodec.indexOf('mp4a.40.2') !== -1 &&
((adtsSamplingIndex >= 6 && adtsChanelConfig === 1) ||
/vivaldi/i.test(userAgent))) ||
(!audioCodec && adtsChanelConfig === 1)
) {
adtsObjectType = 2;
config = new Array(2);
}
adtsExtensionSamplingIndex = adtsSamplingIndex;
}
}
/* refer to http://wiki.multimedia.cx/index.php?title=MPEG-4_Audio#Audio_Specific_Config
ISO 14496-3 (AAC).pdf - Table 1.13 Syntax of AudioSpecificConfig()
Audio Profile / Audio Object Type
0: Null
1: AAC Main
2: AAC LC (Low Complexity)
3: AAC SSR (Scalable Sample Rate)
4: AAC LTP (Long Term Prediction)
5: SBR (Spectral Band Replication)
6: AAC Scalable
sampling freq
0: 96000 Hz
1: 88200 Hz
2: 64000 Hz
3: 48000 Hz
4: 44100 Hz
5: 32000 Hz
6: 24000 Hz
7: 22050 Hz
8: 16000 Hz
9: 12000 Hz
10: 11025 Hz
11: 8000 Hz
12: 7350 Hz
13: Reserved
14: Reserved
15: frequency is written explictly
Channel Configurations
These are the channel configurations:
0: Defined in AOT Specifc Config
1: 1 channel: front-center
2: 2 channels: front-left, front-right
*/
// audioObjectType = profile => profile, the MPEG-4 Audio Object Type minus 1
config[0] = adtsObjectType << 3;
// samplingFrequencyIndex
config[0] |= (adtsSamplingIndex & 0x0e) >> 1;
config[1] |= (adtsSamplingIndex & 0x01) << 7;
// channelConfiguration
config[1] |= adtsChanelConfig << 3;
if (adtsObjectType === 5) {
// adtsExtensionSampleingIndex
config[1] |= (adtsExtensionSamplingIndex & 0x0e) >> 1;
config[2] = (adtsExtensionSamplingIndex & 0x01) << 7;
// adtsObjectType (force to 2, chrome is checking that object type is less than 5 ???
// https://chromium.googlesource.com/chromium/src.git/+/master/media/formats/mp4/aac.cc
config[2] |= 2 << 2;
config[3] = 0;
}
return {
config,
samplerate: adtsSampleingRates[adtsSamplingIndex],
channelCount: adtsChanelConfig,
codec: 'mp4a.40.' + adtsObjectType,
manifestCodec,
};
}
export function isHeaderPattern(data: Uint8Array, offset: number): boolean {
return data[offset] === 0xff && (data[offset + 1] & 0xf6) === 0xf0;
}
export function getHeaderLength(data: Uint8Array, offset: number): number {
return data[offset + 1] & 0x01 ? 7 : 9;
}
export function getFullFrameLength(data: Uint8Array, offset: number): number {
return (
((data[offset + 3] & 0x03) << 11) |
(data[offset + 4] << 3) |
((data[offset + 5] & 0xe0) >>> 5)
);
}
export function canGetFrameLength(data: Uint8Array, offset: number): boolean {
return offset + 5 < data.length;
}
export function isHeader(data: Uint8Array, offset: number): boolean {
// Look for ADTS header | 1111 1111 | 1111 X00X | where X can be either 0 or 1
// Layer bits (position 14 and 15) in header should be always 0 for ADTS
// More info https://wiki.multimedia.cx/index.php?title=ADTS
return offset + 1 < data.length && isHeaderPattern(data, offset);
}
export function canParse(data: Uint8Array, offset: number): boolean {
return (
canGetFrameLength(data, offset) &&
isHeaderPattern(data, offset) &&
getFullFrameLength(data, offset) <= data.length - offset
);
}
export function probe(data: Uint8Array, offset: number): boolean {
// same as isHeader but we also check that ADTS frame follows last ADTS frame
// or end of data is reached
if (isHeader(data, offset)) {
// ADTS header Length
const headerLength = getHeaderLength(data, offset);
if (offset + headerLength >= data.length) {
return false;
}
// ADTS frame Length
const frameLength = getFullFrameLength(data, offset);
if (frameLength <= headerLength) {
return false;
}
const newOffset = offset + frameLength;
return newOffset === data.length || isHeader(data, newOffset);
}
return false;
}
export function initTrackConfig(
track: DemuxedAudioTrack,
observer: HlsEventEmitter,
data: Uint8Array,
offset: number,
audioCodec: string
) {
if (!track.samplerate) {
const config = getAudioConfig(observer, data, offset, audioCodec);
if (!config) {
return;
}
track.config = config.config;
track.samplerate = config.samplerate;
track.channelCount = config.channelCount;
track.codec = config.codec;
track.manifestCodec = config.manifestCodec;
logger.log(
`parsed codec:${track.codec}, rate:${config.samplerate}, channels:${config.channelCount}`
);
}
}
export function getFrameDuration(samplerate: number): number {
return (1024 * 90000) / samplerate;
}
export function parseFrameHeader(
data: Uint8Array,
offset: number,
pts: number,
frameIndex: number,
frameDuration: number
): FrameHeader | void {
// The protection skip bit tells us if we have 2 bytes of CRC data at the end of the ADTS header
const headerLength = getHeaderLength(data, offset);
// retrieve frame size
let frameLength = getFullFrameLength(data, offset);
frameLength -= headerLength;
if (frameLength > 0) {
const stamp = pts + frameIndex * frameDuration;
// logger.log(`AAC frame, offset/length/total/pts:${offset+headerLength}/${frameLength}/${data.byteLength}/${(stamp/90).toFixed(0)}`);
return { headerLength, frameLength, stamp };
}
}
export function appendFrame(
track: DemuxedAudioTrack,
data: Uint8Array,
offset: number,
pts: number,
frameIndex: number
): AudioFrame | void {
const frameDuration = getFrameDuration(track.samplerate as number);
const header = parseFrameHeader(data, offset, pts, frameIndex, frameDuration);
if (header) {
const { frameLength, headerLength, stamp } = header;
const length = headerLength + frameLength;
const missing = Math.max(0, offset + length - data.length);
// logger.log(`AAC frame ${frameIndex}, pts:${stamp} length@offset/total: ${frameLength}@${offset+headerLength}/${data.byteLength} missing: ${missing}`);
let unit: Uint8Array;
if (missing) {
unit = new Uint8Array(length - headerLength);
unit.set(data.subarray(offset + headerLength, data.length), 0);
} else {
unit = data.subarray(offset + headerLength, offset + length);
}
const sample: AudioSample = {
unit,
pts: stamp,
};
if (!missing) {
track.samples.push(sample as AudioSample);
}
return { sample, length, missing };
}
}

163
node_modules/hls.js/src/demux/base-audio-demuxer.ts generated vendored Normal file
View file

@ -0,0 +1,163 @@
import * as ID3 from '../demux/id3';
import type {
DemuxerResult,
Demuxer,
DemuxedAudioTrack,
AudioFrame,
DemuxedMetadataTrack,
DemuxedAvcTrack,
DemuxedUserdataTrack,
KeyData,
} from '../types/demuxer';
import { dummyTrack } from './dummy-demuxed-track';
import { appendUint8Array } from '../utils/mp4-tools';
import { sliceUint8 } from '../utils/typed-array';
class BaseAudioDemuxer implements Demuxer {
protected _audioTrack!: DemuxedAudioTrack;
protected _id3Track!: DemuxedMetadataTrack;
protected frameIndex: number = 0;
protected cachedData: Uint8Array | null = null;
protected initPTS: number | null = null;
resetInitSegment(audioCodec: string, videoCodec: string, duration: number) {
this._id3Track = {
type: 'id3',
id: 3,
pid: -1,
inputTimeScale: 90000,
sequenceNumber: 0,
samples: [],
dropped: 0,
};
}
resetTimeStamp() {}
resetContiguity(): void {}
canParse(data: Uint8Array, offset: number): boolean {
return false;
}
appendFrame(
track: DemuxedAudioTrack,
data: Uint8Array,
offset: number
): AudioFrame | void {}
// feed incoming data to the front of the parsing pipeline
demux(data: Uint8Array, timeOffset: number): DemuxerResult {
if (this.cachedData) {
data = appendUint8Array(this.cachedData, data);
this.cachedData = null;
}
let id3Data: Uint8Array | undefined = ID3.getID3Data(data, 0);
let offset = id3Data ? id3Data.length : 0;
let lastDataIndex;
let pts;
const track = this._audioTrack;
const id3Track = this._id3Track;
const timestamp = id3Data ? ID3.getTimeStamp(id3Data) : undefined;
const length = data.length;
if (this.frameIndex === 0 || this.initPTS === null) {
this.initPTS = initPTSFn(timestamp, timeOffset);
}
// more expressive than alternative: id3Data?.length
if (id3Data && id3Data.length > 0) {
id3Track.samples.push({
pts: this.initPTS,
dts: this.initPTS,
data: id3Data,
});
}
pts = this.initPTS;
while (offset < length) {
if (this.canParse(data, offset)) {
const frame = this.appendFrame(track, data, offset);
if (frame) {
this.frameIndex++;
pts = frame.sample.pts;
offset += frame.length;
lastDataIndex = offset;
} else {
offset = length;
}
} else if (ID3.canParse(data, offset)) {
// after a ID3.canParse, a call to ID3.getID3Data *should* always returns some data
id3Data = ID3.getID3Data(data, offset)!;
id3Track.samples.push({ pts: pts, dts: pts, data: id3Data });
offset += id3Data.length;
lastDataIndex = offset;
} else {
offset++;
}
if (offset === length && lastDataIndex !== length) {
const partialData = sliceUint8(data, lastDataIndex);
if (this.cachedData) {
this.cachedData = appendUint8Array(this.cachedData, partialData);
} else {
this.cachedData = partialData;
}
}
}
return {
audioTrack: track,
avcTrack: dummyTrack() as DemuxedAvcTrack,
id3Track,
textTrack: dummyTrack() as DemuxedUserdataTrack,
};
}
demuxSampleAes(
data: Uint8Array,
keyData: KeyData,
timeOffset: number
): Promise<DemuxerResult> {
return Promise.reject(
new Error(`[${this}] This demuxer does not support Sample-AES decryption`)
);
}
flush(timeOffset: number): DemuxerResult {
// Parse cache in case of remaining frames.
const cachedData = this.cachedData;
if (cachedData) {
this.cachedData = null;
this.demux(cachedData, 0);
}
this.frameIndex = 0;
return {
audioTrack: this._audioTrack,
avcTrack: dummyTrack() as DemuxedAvcTrack,
id3Track: this._id3Track,
textTrack: dummyTrack() as DemuxedUserdataTrack,
};
}
destroy() {}
}
/**
* Initialize PTS
* <p>
* use timestamp unless it is undefined, NaN or Infinity
* </p>
*/
export const initPTSFn = (
timestamp: number | undefined,
timeOffset: number
): number => {
return Number.isFinite(timestamp as number)
? timestamp! * 90
: timeOffset * 90000;
};
export default BaseAudioDemuxer;

42
node_modules/hls.js/src/demux/chunk-cache.ts generated vendored Normal file
View file

@ -0,0 +1,42 @@
export default class ChunkCache {
private chunks: Array<Uint8Array> = [];
public dataLength: number = 0;
push(chunk: Uint8Array) {
this.chunks.push(chunk);
this.dataLength += chunk.length;
}
flush(): Uint8Array {
const { chunks, dataLength } = this;
let result;
if (!chunks.length) {
return new Uint8Array(0);
} else if (chunks.length === 1) {
result = chunks[0];
} else {
result = concatUint8Arrays(chunks, dataLength);
}
this.reset();
return result;
}
reset() {
this.chunks.length = 0;
this.dataLength = 0;
}
}
function concatUint8Arrays(
chunks: Array<Uint8Array>,
dataLength: number
): Uint8Array {
const result = new Uint8Array(dataLength);
let offset = 0;
for (let i = 0; i < chunks.length; i++) {
const chunk = chunks[i];
result.set(chunk, offset);
offset += chunk.length;
}
return result;
}

13
node_modules/hls.js/src/demux/dummy-demuxed-track.ts generated vendored Normal file
View file

@ -0,0 +1,13 @@
import type { DemuxedTrack } from '../types/demuxer';
export function dummyTrack(): DemuxedTrack {
return {
type: '',
id: -1,
pid: -1,
inputTimeScale: 90000,
sequenceNumber: -1,
samples: [],
dropped: 0,
};
}

359
node_modules/hls.js/src/demux/exp-golomb.ts generated vendored Normal file
View file

@ -0,0 +1,359 @@
/**
* Parser for exponential Golomb codes, a variable-bitwidth number encoding scheme used by h264.
*/
import { logger } from '../utils/logger';
class ExpGolomb {
private data: Uint8Array;
public bytesAvailable: number;
private word: number;
private bitsAvailable: number;
constructor(data: Uint8Array) {
this.data = data;
// the number of bytes left to examine in this.data
this.bytesAvailable = data.byteLength;
// the current word being examined
this.word = 0; // :uint
// the number of bits left to examine in the current word
this.bitsAvailable = 0; // :uint
}
// ():void
loadWord(): void {
const data = this.data;
const bytesAvailable = this.bytesAvailable;
const position = data.byteLength - bytesAvailable;
const workingBytes = new Uint8Array(4);
const availableBytes = Math.min(4, bytesAvailable);
if (availableBytes === 0) {
throw new Error('no bytes available');
}
workingBytes.set(data.subarray(position, position + availableBytes));
this.word = new DataView(workingBytes.buffer).getUint32(0);
// track the amount of this.data that has been processed
this.bitsAvailable = availableBytes * 8;
this.bytesAvailable -= availableBytes;
}
// (count:int):void
skipBits(count: number): void {
let skipBytes; // :int
if (this.bitsAvailable > count) {
this.word <<= count;
this.bitsAvailable -= count;
} else {
count -= this.bitsAvailable;
skipBytes = count >> 3;
count -= skipBytes >> 3;
this.bytesAvailable -= skipBytes;
this.loadWord();
this.word <<= count;
this.bitsAvailable -= count;
}
}
// (size:int):uint
readBits(size: number): number {
let bits = Math.min(this.bitsAvailable, size); // :uint
const valu = this.word >>> (32 - bits); // :uint
if (size > 32) {
logger.error('Cannot read more than 32 bits at a time');
}
this.bitsAvailable -= bits;
if (this.bitsAvailable > 0) {
this.word <<= bits;
} else if (this.bytesAvailable > 0) {
this.loadWord();
}
bits = size - bits;
if (bits > 0 && this.bitsAvailable) {
return (valu << bits) | this.readBits(bits);
} else {
return valu;
}
}
// ():uint
skipLZ(): number {
let leadingZeroCount; // :uint
for (
leadingZeroCount = 0;
leadingZeroCount < this.bitsAvailable;
++leadingZeroCount
) {
if ((this.word & (0x80000000 >>> leadingZeroCount)) !== 0) {
// the first bit of working word is 1
this.word <<= leadingZeroCount;
this.bitsAvailable -= leadingZeroCount;
return leadingZeroCount;
}
}
// we exhausted word and still have not found a 1
this.loadWord();
return leadingZeroCount + this.skipLZ();
}
// ():void
skipUEG(): void {
this.skipBits(1 + this.skipLZ());
}
// ():void
skipEG(): void {
this.skipBits(1 + this.skipLZ());
}
// ():uint
readUEG(): number {
const clz = this.skipLZ(); // :uint
return this.readBits(clz + 1) - 1;
}
// ():int
readEG(): number {
const valu = this.readUEG(); // :int
if (0x01 & valu) {
// the number is odd if the low order bit is set
return (1 + valu) >>> 1; // add 1 to make it even, and divide by 2
} else {
return -1 * (valu >>> 1); // divide by two then make it negative
}
}
// Some convenience functions
// :Boolean
readBoolean(): boolean {
return this.readBits(1) === 1;
}
// ():int
readUByte(): number {
return this.readBits(8);
}
// ():int
readUShort(): number {
return this.readBits(16);
}
// ():int
readUInt(): number {
return this.readBits(32);
}
/**
* Advance the ExpGolomb decoder past a scaling list. The scaling
* list is optionally transmitted as part of a sequence parameter
* set and is not relevant to transmuxing.
* @param count the number of entries in this scaling list
* @see Recommendation ITU-T H.264, Section 7.3.2.1.1.1
*/
skipScalingList(count: number): void {
let lastScale = 8;
let nextScale = 8;
let deltaScale;
for (let j = 0; j < count; j++) {
if (nextScale !== 0) {
deltaScale = this.readEG();
nextScale = (lastScale + deltaScale + 256) % 256;
}
lastScale = nextScale === 0 ? lastScale : nextScale;
}
}
/**
* Read a sequence parameter set and return some interesting video
* properties. A sequence parameter set is the H264 metadata that
* describes the properties of upcoming video frames.
* @param data {Uint8Array} the bytes of a sequence parameter set
* @return {object} an object with configuration parsed from the
* sequence parameter set, including the dimensions of the
* associated video frames.
*/
readSPS(): {
width: number;
height: number;
pixelRatio: [number, number];
} {
let frameCropLeftOffset = 0;
let frameCropRightOffset = 0;
let frameCropTopOffset = 0;
let frameCropBottomOffset = 0;
let numRefFramesInPicOrderCntCycle;
let scalingListCount;
let i;
const readUByte = this.readUByte.bind(this);
const readBits = this.readBits.bind(this);
const readUEG = this.readUEG.bind(this);
const readBoolean = this.readBoolean.bind(this);
const skipBits = this.skipBits.bind(this);
const skipEG = this.skipEG.bind(this);
const skipUEG = this.skipUEG.bind(this);
const skipScalingList = this.skipScalingList.bind(this);
readUByte();
const profileIdc = readUByte(); // profile_idc
readBits(5); // profileCompat constraint_set[0-4]_flag, u(5)
skipBits(3); // reserved_zero_3bits u(3),
readUByte(); // level_idc u(8)
skipUEG(); // seq_parameter_set_id
// some profiles have more optional data we don't need
if (
profileIdc === 100 ||
profileIdc === 110 ||
profileIdc === 122 ||
profileIdc === 244 ||
profileIdc === 44 ||
profileIdc === 83 ||
profileIdc === 86 ||
profileIdc === 118 ||
profileIdc === 128
) {
const chromaFormatIdc = readUEG();
if (chromaFormatIdc === 3) {
skipBits(1);
} // separate_colour_plane_flag
skipUEG(); // bit_depth_luma_minus8
skipUEG(); // bit_depth_chroma_minus8
skipBits(1); // qpprime_y_zero_transform_bypass_flag
if (readBoolean()) {
// seq_scaling_matrix_present_flag
scalingListCount = chromaFormatIdc !== 3 ? 8 : 12;
for (i = 0; i < scalingListCount; i++) {
if (readBoolean()) {
// seq_scaling_list_present_flag[ i ]
if (i < 6) {
skipScalingList(16);
} else {
skipScalingList(64);
}
}
}
}
}
skipUEG(); // log2_max_frame_num_minus4
const picOrderCntType = readUEG();
if (picOrderCntType === 0) {
readUEG(); // log2_max_pic_order_cnt_lsb_minus4
} else if (picOrderCntType === 1) {
skipBits(1); // delta_pic_order_always_zero_flag
skipEG(); // offset_for_non_ref_pic
skipEG(); // offset_for_top_to_bottom_field
numRefFramesInPicOrderCntCycle = readUEG();
for (i = 0; i < numRefFramesInPicOrderCntCycle; i++) {
skipEG();
} // offset_for_ref_frame[ i ]
}
skipUEG(); // max_num_ref_frames
skipBits(1); // gaps_in_frame_num_value_allowed_flag
const picWidthInMbsMinus1 = readUEG();
const picHeightInMapUnitsMinus1 = readUEG();
const frameMbsOnlyFlag = readBits(1);
if (frameMbsOnlyFlag === 0) {
skipBits(1);
} // mb_adaptive_frame_field_flag
skipBits(1); // direct_8x8_inference_flag
if (readBoolean()) {
// frame_cropping_flag
frameCropLeftOffset = readUEG();
frameCropRightOffset = readUEG();
frameCropTopOffset = readUEG();
frameCropBottomOffset = readUEG();
}
let pixelRatio: [number, number] = [1, 1];
if (readBoolean()) {
// vui_parameters_present_flag
if (readBoolean()) {
// aspect_ratio_info_present_flag
const aspectRatioIdc = readUByte();
switch (aspectRatioIdc) {
case 1:
pixelRatio = [1, 1];
break;
case 2:
pixelRatio = [12, 11];
break;
case 3:
pixelRatio = [10, 11];
break;
case 4:
pixelRatio = [16, 11];
break;
case 5:
pixelRatio = [40, 33];
break;
case 6:
pixelRatio = [24, 11];
break;
case 7:
pixelRatio = [20, 11];
break;
case 8:
pixelRatio = [32, 11];
break;
case 9:
pixelRatio = [80, 33];
break;
case 10:
pixelRatio = [18, 11];
break;
case 11:
pixelRatio = [15, 11];
break;
case 12:
pixelRatio = [64, 33];
break;
case 13:
pixelRatio = [160, 99];
break;
case 14:
pixelRatio = [4, 3];
break;
case 15:
pixelRatio = [3, 2];
break;
case 16:
pixelRatio = [2, 1];
break;
case 255: {
pixelRatio = [
(readUByte() << 8) | readUByte(),
(readUByte() << 8) | readUByte(),
];
break;
}
}
}
}
return {
width: Math.ceil(
(picWidthInMbsMinus1 + 1) * 16 -
frameCropLeftOffset * 2 -
frameCropRightOffset * 2
),
height:
(2 - frameMbsOnlyFlag) * (picHeightInMapUnitsMinus1 + 1) * 16 -
(frameMbsOnlyFlag ? 2 : 4) *
(frameCropTopOffset + frameCropBottomOffset),
pixelRatio: pixelRatio,
};
}
readSliceType() {
// skip NALu type
this.readUByte();
// discard first_mb_in_slice
this.readUEG();
// return slice_type
return this.readUEG();
}
}
export default ExpGolomb;

410
node_modules/hls.js/src/demux/id3.ts generated vendored Normal file
View file

@ -0,0 +1,410 @@
type RawFrame = { type: string; size: number; data: Uint8Array };
// breaking up those two types in order to clarify what is happening in the decoding path.
type DecodedFrame<T> = { key: string; data: T; info?: any };
export type Frame = DecodedFrame<ArrayBuffer | string>;
/**
* Returns true if an ID3 header can be found at offset in data
* @param {Uint8Array} data - The data to search in
* @param {number} offset - The offset at which to start searching
* @return {boolean} - True if an ID3 header is found
*/
export const isHeader = (data: Uint8Array, offset: number): boolean => {
/*
* http://id3.org/id3v2.3.0
* [0] = 'I'
* [1] = 'D'
* [2] = '3'
* [3,4] = {Version}
* [5] = {Flags}
* [6-9] = {ID3 Size}
*
* An ID3v2 tag can be detected with the following pattern:
* $49 44 33 yy yy xx zz zz zz zz
* Where yy is less than $FF, xx is the 'flags' byte and zz is less than $80
*/
if (offset + 10 <= data.length) {
// look for 'ID3' identifier
if (
data[offset] === 0x49 &&
data[offset + 1] === 0x44 &&
data[offset + 2] === 0x33
) {
// check version is within range
if (data[offset + 3] < 0xff && data[offset + 4] < 0xff) {
// check size is within range
if (
data[offset + 6] < 0x80 &&
data[offset + 7] < 0x80 &&
data[offset + 8] < 0x80 &&
data[offset + 9] < 0x80
) {
return true;
}
}
}
}
return false;
};
/**
* Returns true if an ID3 footer can be found at offset in data
* @param {Uint8Array} data - The data to search in
* @param {number} offset - The offset at which to start searching
* @return {boolean} - True if an ID3 footer is found
*/
export const isFooter = (data: Uint8Array, offset: number): boolean => {
/*
* The footer is a copy of the header, but with a different identifier
*/
if (offset + 10 <= data.length) {
// look for '3DI' identifier
if (
data[offset] === 0x33 &&
data[offset + 1] === 0x44 &&
data[offset + 2] === 0x49
) {
// check version is within range
if (data[offset + 3] < 0xff && data[offset + 4] < 0xff) {
// check size is within range
if (
data[offset + 6] < 0x80 &&
data[offset + 7] < 0x80 &&
data[offset + 8] < 0x80 &&
data[offset + 9] < 0x80
) {
return true;
}
}
}
}
return false;
};
/**
* Returns any adjacent ID3 tags found in data starting at offset, as one block of data
* @param {Uint8Array} data - The data to search in
* @param {number} offset - The offset at which to start searching
* @return {Uint8Array | undefined} - The block of data containing any ID3 tags found
* or *undefined* if no header is found at the starting offset
*/
export const getID3Data = (
data: Uint8Array,
offset: number
): Uint8Array | undefined => {
const front = offset;
let length = 0;
while (isHeader(data, offset)) {
// ID3 header is 10 bytes
length += 10;
const size = readSize(data, offset + 6);
length += size;
if (isFooter(data, offset + 10)) {
// ID3 footer is 10 bytes
length += 10;
}
offset += length;
}
if (length > 0) {
return data.subarray(front, front + length);
}
return undefined;
};
const readSize = (data: Uint8Array, offset: number): number => {
let size = 0;
size = (data[offset] & 0x7f) << 21;
size |= (data[offset + 1] & 0x7f) << 14;
size |= (data[offset + 2] & 0x7f) << 7;
size |= data[offset + 3] & 0x7f;
return size;
};
export const canParse = (data: Uint8Array, offset: number): boolean => {
return (
isHeader(data, offset) &&
readSize(data, offset + 6) + 10 <= data.length - offset
);
};
/**
* Searches for the Elementary Stream timestamp found in the ID3 data chunk
* @param {Uint8Array} data - Block of data containing one or more ID3 tags
* @return {number | undefined} - The timestamp
*/
export const getTimeStamp = (data: Uint8Array): number | undefined => {
const frames: Frame[] = getID3Frames(data);
for (let i = 0; i < frames.length; i++) {
const frame = frames[i];
if (isTimeStampFrame(frame)) {
return readTimeStamp(frame as DecodedFrame<ArrayBuffer>);
}
}
return undefined;
};
/**
* Returns true if the ID3 frame is an Elementary Stream timestamp frame
* @param {ID3 frame} frame
*/
export const isTimeStampFrame = (frame: Frame): boolean => {
return (
frame &&
frame.key === 'PRIV' &&
frame.info === 'com.apple.streaming.transportStreamTimestamp'
);
};
const getFrameData = (data: Uint8Array): RawFrame => {
/*
Frame ID $xx xx xx xx (four characters)
Size $xx xx xx xx
Flags $xx xx
*/
const type: string = String.fromCharCode(data[0], data[1], data[2], data[3]);
const size: number = readSize(data, 4);
// skip frame id, size, and flags
const offset = 10;
return { type, size, data: data.subarray(offset, offset + size) };
};
/**
* Returns an array of ID3 frames found in all the ID3 tags in the id3Data
* @param {Uint8Array} id3Data - The ID3 data containing one or more ID3 tags
* @return {ID3.Frame[]} - Array of ID3 frame objects
*/
export const getID3Frames = (id3Data: Uint8Array): Frame[] => {
let offset = 0;
const frames: Frame[] = [];
while (isHeader(id3Data, offset)) {
const size = readSize(id3Data, offset + 6);
// skip past ID3 header
offset += 10;
const end = offset + size;
// loop through frames in the ID3 tag
while (offset + 8 < end) {
const frameData: RawFrame = getFrameData(id3Data.subarray(offset));
const frame: Frame | undefined = decodeFrame(frameData);
if (frame) {
frames.push(frame);
}
// skip frame header and frame data
offset += frameData.size + 10;
}
if (isFooter(id3Data, offset)) {
offset += 10;
}
}
return frames;
};
export const decodeFrame = (frame: RawFrame): Frame | undefined => {
if (frame.type === 'PRIV') {
return decodePrivFrame(frame);
} else if (frame.type[0] === 'W') {
return decodeURLFrame(frame);
}
return decodeTextFrame(frame);
};
const decodePrivFrame = (
frame: RawFrame
): DecodedFrame<ArrayBuffer> | undefined => {
/*
Format: <text string>\0<binary data>
*/
if (frame.size < 2) {
return undefined;
}
const owner = utf8ArrayToStr(frame.data, true);
const privateData = new Uint8Array(frame.data.subarray(owner.length + 1));
return { key: frame.type, info: owner, data: privateData.buffer };
};
const decodeTextFrame = (frame: RawFrame): DecodedFrame<string> | undefined => {
if (frame.size < 2) {
return undefined;
}
if (frame.type === 'TXXX') {
/*
Format:
[0] = {Text Encoding}
[1-?] = {Description}\0{Value}
*/
let index = 1;
const description = utf8ArrayToStr(frame.data.subarray(index), true);
index += description.length + 1;
const value = utf8ArrayToStr(frame.data.subarray(index));
return { key: frame.type, info: description, data: value };
}
/*
Format:
[0] = {Text Encoding}
[1-?] = {Value}
*/
const text = utf8ArrayToStr(frame.data.subarray(1));
return { key: frame.type, data: text };
};
const decodeURLFrame = (frame: RawFrame): DecodedFrame<string> | undefined => {
if (frame.type === 'WXXX') {
/*
Format:
[0] = {Text Encoding}
[1-?] = {Description}\0{URL}
*/
if (frame.size < 2) {
return undefined;
}
let index = 1;
const description: string = utf8ArrayToStr(
frame.data.subarray(index),
true
);
index += description.length + 1;
const value: string = utf8ArrayToStr(frame.data.subarray(index));
return { key: frame.type, info: description, data: value };
}
/*
Format:
[0-?] = {URL}
*/
const url: string = utf8ArrayToStr(frame.data);
return { key: frame.type, data: url };
};
const readTimeStamp = (
timeStampFrame: DecodedFrame<ArrayBuffer>
): number | undefined => {
if (timeStampFrame.data.byteLength === 8) {
const data = new Uint8Array(timeStampFrame.data);
// timestamp is 33 bit expressed as a big-endian eight-octet number,
// with the upper 31 bits set to zero.
const pts33Bit = data[3] & 0x1;
let timestamp =
(data[4] << 23) + (data[5] << 15) + (data[6] << 7) + data[7];
timestamp /= 45;
if (pts33Bit) {
timestamp += 47721858.84;
} // 2^32 / 90
return Math.round(timestamp);
}
return undefined;
};
// http://stackoverflow.com/questions/8936984/uint8array-to-string-in-javascript/22373197
// http://www.onicos.com/staff/iz/amuse/javascript/expert/utf.txt
/* utf.js - UTF-8 <=> UTF-16 convertion
*
* Copyright (C) 1999 Masanao Izumo <iz@onicos.co.jp>
* Version: 1.0
* LastModified: Dec 25 1999
* This library is free. You can redistribute it and/or modify it.
*/
export const utf8ArrayToStr = (
array: Uint8Array,
exitOnNull: boolean = false
): string => {
const decoder = getTextDecoder();
if (decoder) {
const decoded = decoder.decode(array);
if (exitOnNull) {
// grab up to the first null
const idx = decoded.indexOf('\0');
return idx !== -1 ? decoded.substring(0, idx) : decoded;
}
// remove any null characters
return decoded.replace(/\0/g, '');
}
const len = array.length;
let c;
let char2;
let char3;
let out = '';
let i = 0;
while (i < len) {
c = array[i++];
if (c === 0x00 && exitOnNull) {
return out;
} else if (c === 0x00 || c === 0x03) {
// If the character is 3 (END_OF_TEXT) or 0 (NULL) then skip it
continue;
}
switch (c >> 4) {
case 0:
case 1:
case 2:
case 3:
case 4:
case 5:
case 6:
case 7:
// 0xxxxxxx
out += String.fromCharCode(c);
break;
case 12:
case 13:
// 110x xxxx 10xx xxxx
char2 = array[i++];
out += String.fromCharCode(((c & 0x1f) << 6) | (char2 & 0x3f));
break;
case 14:
// 1110 xxxx 10xx xxxx 10xx xxxx
char2 = array[i++];
char3 = array[i++];
out += String.fromCharCode(
((c & 0x0f) << 12) | ((char2 & 0x3f) << 6) | ((char3 & 0x3f) << 0)
);
break;
default:
}
}
return out;
};
export const testables = {
decodeTextFrame: decodeTextFrame,
};
let decoder: TextDecoder;
function getTextDecoder() {
if (!decoder && typeof self.TextDecoder !== 'undefined') {
decoder = new self.TextDecoder('utf-8');
}
return decoder;
}

68
node_modules/hls.js/src/demux/mp3demuxer.ts generated vendored Normal file
View file

@ -0,0 +1,68 @@
/**
* MP3 demuxer
*/
import BaseAudioDemuxer from './base-audio-demuxer';
import * as ID3 from '../demux/id3';
import { logger } from '../utils/logger';
import * as MpegAudio from './mpegaudio';
class MP3Demuxer extends BaseAudioDemuxer {
static readonly minProbeByteLength: number = 4;
resetInitSegment(audioCodec, videoCodec, duration) {
super.resetInitSegment(audioCodec, videoCodec, duration);
this._audioTrack = {
container: 'audio/mpeg',
type: 'audio',
id: 2,
pid: -1,
sequenceNumber: 0,
isAAC: false,
samples: [],
manifestCodec: audioCodec,
duration: duration,
inputTimeScale: 90000,
dropped: 0,
};
}
static probe(data): boolean {
if (!data) {
return false;
}
// check if data contains ID3 timestamp and MPEG sync word
// Look for MPEG header | 1111 1111 | 111X XYZX | where X can be either 0 or 1 and Y or Z should be 1
// Layer bits (position 14 and 15) in header should be always different from 0 (Layer I or Layer II or Layer III)
// More info http://www.mp3-tech.org/programmer/frame_header.html
const id3Data = ID3.getID3Data(data, 0) || [];
let offset = id3Data.length;
for (let length = data.length; offset < length; offset++) {
if (MpegAudio.probe(data, offset)) {
logger.log('MPEG Audio sync word found !');
return true;
}
}
return false;
}
canParse(data, offset) {
return MpegAudio.canParse(data, offset);
}
appendFrame(track, data, offset) {
if (this.initPTS === null) {
return;
}
return MpegAudio.appendFrame(
track,
data,
offset,
this.initPTS,
this.frameIndex
);
}
}
export default MP3Demuxer;

98
node_modules/hls.js/src/demux/mp4demuxer.ts generated vendored Normal file
View file

@ -0,0 +1,98 @@
/**
* MP4 demuxer
*/
import {
Demuxer,
DemuxerResult,
PassthroughVideoTrack,
DemuxedAudioTrack,
DemuxedUserdataTrack,
DemuxedMetadataTrack,
KeyData,
} from '../types/demuxer';
import {
findBox,
segmentValidRange,
appendUint8Array,
} from '../utils/mp4-tools';
import { dummyTrack } from './dummy-demuxed-track';
import type { HlsEventEmitter } from '../events';
import type { HlsConfig } from '../config';
class MP4Demuxer implements Demuxer {
static readonly minProbeByteLength = 1024;
private remainderData: Uint8Array | null = null;
private config: HlsConfig;
constructor(observer: HlsEventEmitter, config: HlsConfig) {
this.config = config;
}
resetTimeStamp() {}
resetInitSegment() {}
resetContiguity(): void {}
static probe(data) {
// ensure we find a moof box in the first 16 kB
return (
findBox({ data: data, start: 0, end: Math.min(data.length, 16384) }, [
'moof',
]).length > 0
);
}
demux(data): DemuxerResult {
// Load all data into the avc track. The CMAF remuxer will look for the data in the samples object; the rest of the fields do not matter
let avcSamples = data;
const avcTrack = dummyTrack() as PassthroughVideoTrack;
if (this.config.progressive) {
// Split the bytestream into two ranges: one encompassing all data up until the start of the last moof, and everything else.
// This is done to guarantee that we're sending valid data to MSE - when demuxing progressively, we have no guarantee
// that the fetch loader gives us flush moof+mdat pairs. If we push jagged data to MSE, it will throw an exception.
if (this.remainderData) {
avcSamples = appendUint8Array(this.remainderData, data);
}
const segmentedData = segmentValidRange(avcSamples);
this.remainderData = segmentedData.remainder;
avcTrack.samples = segmentedData.valid || new Uint8Array();
} else {
avcTrack.samples = avcSamples;
}
return {
audioTrack: dummyTrack() as DemuxedAudioTrack,
avcTrack,
id3Track: dummyTrack() as DemuxedMetadataTrack,
textTrack: dummyTrack() as DemuxedUserdataTrack,
};
}
flush() {
const avcTrack = dummyTrack() as PassthroughVideoTrack;
avcTrack.samples = this.remainderData || new Uint8Array();
this.remainderData = null;
return {
audioTrack: dummyTrack() as DemuxedAudioTrack,
avcTrack,
id3Track: dummyTrack() as DemuxedMetadataTrack,
textTrack: dummyTrack() as DemuxedUserdataTrack,
};
}
demuxSampleAes(
data: Uint8Array,
keyData: KeyData,
timeOffset: number
): Promise<DemuxerResult> {
return Promise.reject(
new Error('The MP4 demuxer does not support SAMPLE-AES decryption')
);
}
destroy() {}
}
export default MP4Demuxer;

177
node_modules/hls.js/src/demux/mpegaudio.ts generated vendored Normal file
View file

@ -0,0 +1,177 @@
/**
* MPEG parser helper
*/
import { DemuxedAudioTrack } from '../types/demuxer';
let chromeVersion: number | null = null;
const BitratesMap = [
32, 64, 96, 128, 160, 192, 224, 256, 288, 320, 352, 384, 416, 448, 32, 48, 56,
64, 80, 96, 112, 128, 160, 192, 224, 256, 320, 384, 32, 40, 48, 56, 64, 80,
96, 112, 128, 160, 192, 224, 256, 320, 32, 48, 56, 64, 80, 96, 112, 128, 144,
160, 176, 192, 224, 256, 8, 16, 24, 32, 40, 48, 56, 64, 80, 96, 112, 128, 144,
160,
];
const SamplingRateMap = [
44100, 48000, 32000, 22050, 24000, 16000, 11025, 12000, 8000,
];
const SamplesCoefficients = [
// MPEG 2.5
[
0, // Reserved
72, // Layer3
144, // Layer2
12, // Layer1
],
// Reserved
[
0, // Reserved
0, // Layer3
0, // Layer2
0, // Layer1
],
// MPEG 2
[
0, // Reserved
72, // Layer3
144, // Layer2
12, // Layer1
],
// MPEG 1
[
0, // Reserved
144, // Layer3
144, // Layer2
12, // Layer1
],
];
const BytesInSlot = [
0, // Reserved
1, // Layer3
1, // Layer2
4, // Layer1
];
export function appendFrame(
track: DemuxedAudioTrack,
data: Uint8Array,
offset: number,
pts: number,
frameIndex: number
) {
// Using http://www.datavoyage.com/mpgscript/mpeghdr.htm as a reference
if (offset + 24 > data.length) {
return;
}
const header = parseHeader(data, offset);
if (header && offset + header.frameLength <= data.length) {
const frameDuration = (header.samplesPerFrame * 90000) / header.sampleRate;
const stamp = pts + frameIndex * frameDuration;
const sample = {
unit: data.subarray(offset, offset + header.frameLength),
pts: stamp,
dts: stamp,
};
track.config = [];
track.channelCount = header.channelCount;
track.samplerate = header.sampleRate;
track.samples.push(sample);
return { sample, length: header.frameLength, missing: 0 };
}
}
export function parseHeader(data: Uint8Array, offset: number) {
const mpegVersion = (data[offset + 1] >> 3) & 3;
const mpegLayer = (data[offset + 1] >> 1) & 3;
const bitRateIndex = (data[offset + 2] >> 4) & 15;
const sampleRateIndex = (data[offset + 2] >> 2) & 3;
if (
mpegVersion !== 1 &&
bitRateIndex !== 0 &&
bitRateIndex !== 15 &&
sampleRateIndex !== 3
) {
const paddingBit = (data[offset + 2] >> 1) & 1;
const channelMode = data[offset + 3] >> 6;
const columnInBitrates =
mpegVersion === 3 ? 3 - mpegLayer : mpegLayer === 3 ? 3 : 4;
const bitRate =
BitratesMap[columnInBitrates * 14 + bitRateIndex - 1] * 1000;
const columnInSampleRates =
mpegVersion === 3 ? 0 : mpegVersion === 2 ? 1 : 2;
const sampleRate =
SamplingRateMap[columnInSampleRates * 3 + sampleRateIndex];
const channelCount = channelMode === 3 ? 1 : 2; // If bits of channel mode are `11` then it is a single channel (Mono)
const sampleCoefficient = SamplesCoefficients[mpegVersion][mpegLayer];
const bytesInSlot = BytesInSlot[mpegLayer];
const samplesPerFrame = sampleCoefficient * 8 * bytesInSlot;
const frameLength =
Math.floor((sampleCoefficient * bitRate) / sampleRate + paddingBit) *
bytesInSlot;
if (chromeVersion === null) {
const userAgent = navigator.userAgent || '';
const result = userAgent.match(/Chrome\/(\d+)/i);
chromeVersion = result ? parseInt(result[1]) : 0;
}
const needChromeFix = !!chromeVersion && chromeVersion <= 87;
if (
needChromeFix &&
mpegLayer === 2 &&
bitRate >= 224000 &&
channelMode === 0
) {
// Work around bug in Chromium by setting channelMode to dual-channel (01) instead of stereo (00)
data[offset + 3] = data[offset + 3] | 0x80;
}
return { sampleRate, channelCount, frameLength, samplesPerFrame };
}
}
export function isHeaderPattern(data: Uint8Array, offset: number): boolean {
return (
data[offset] === 0xff &&
(data[offset + 1] & 0xe0) === 0xe0 &&
(data[offset + 1] & 0x06) !== 0x00
);
}
export function isHeader(data: Uint8Array, offset: number): boolean {
// Look for MPEG header | 1111 1111 | 111X XYZX | where X can be either 0 or 1 and Y or Z should be 1
// Layer bits (position 14 and 15) in header should be always different from 0 (Layer I or Layer II or Layer III)
// More info http://www.mp3-tech.org/programmer/frame_header.html
return offset + 1 < data.length && isHeaderPattern(data, offset);
}
export function canParse(data: Uint8Array, offset: number): boolean {
const headerSize = 4;
return isHeaderPattern(data, offset) && headerSize <= data.length - offset;
}
export function probe(data: Uint8Array, offset: number): boolean {
// same as isHeader but we also check that MPEG frame follows last MPEG frame
// or end of data is reached
if (offset + 1 < data.length && isHeaderPattern(data, offset)) {
// MPEG header Length
const headerLength = 4;
// MPEG frame Length
const header = parseHeader(data, offset);
let frameLength = headerLength;
if (header?.frameLength) {
frameLength = header.frameLength;
}
const newOffset = offset + frameLength;
return newOffset === data.length || isHeader(data, newOffset);
}
return false;
}

214
node_modules/hls.js/src/demux/sample-aes.ts generated vendored Normal file
View file

@ -0,0 +1,214 @@
/**
* SAMPLE-AES decrypter
*/
import { HlsConfig } from '../config';
import Decrypter from '../crypt/decrypter';
import { HlsEventEmitter } from '../events';
import type {
AudioSample,
AvcSample,
AvcSampleUnit,
DemuxedVideoTrack,
KeyData,
} from '../types/demuxer';
import { discardEPB } from './tsdemuxer';
class SampleAesDecrypter {
private keyData: KeyData;
private decrypter: Decrypter;
constructor(observer: HlsEventEmitter, config: HlsConfig, keyData: KeyData) {
this.keyData = keyData;
this.decrypter = new Decrypter(observer, config, {
removePKCS7Padding: false,
});
}
decryptBuffer(
encryptedData: Uint8Array | ArrayBuffer,
callback: (decryptedData: ArrayBuffer) => void
) {
this.decrypter.decrypt(
encryptedData,
this.keyData.key.buffer,
this.keyData.iv.buffer,
callback
);
}
// AAC - encrypt all full 16 bytes blocks starting from offset 16
private decryptAacSample(
samples: AudioSample[],
sampleIndex: number,
callback: () => void,
sync: boolean
) {
const curUnit = samples[sampleIndex].unit;
const encryptedData = curUnit.subarray(
16,
curUnit.length - (curUnit.length % 16)
);
const encryptedBuffer = encryptedData.buffer.slice(
encryptedData.byteOffset,
encryptedData.byteOffset + encryptedData.length
);
const localthis = this;
this.decryptBuffer(encryptedBuffer, (decryptedBuffer: ArrayBuffer) => {
const decryptedData = new Uint8Array(decryptedBuffer);
curUnit.set(decryptedData, 16);
if (!sync) {
localthis.decryptAacSamples(samples, sampleIndex + 1, callback);
}
});
}
decryptAacSamples(
samples: AudioSample[],
sampleIndex: number,
callback: () => void
) {
for (; ; sampleIndex++) {
if (sampleIndex >= samples.length) {
callback();
return;
}
if (samples[sampleIndex].unit.length < 32) {
continue;
}
const sync = this.decrypter.isSync();
this.decryptAacSample(samples, sampleIndex, callback, sync);
if (!sync) {
return;
}
}
}
// AVC - encrypt one 16 bytes block out of ten, starting from offset 32
getAvcEncryptedData(decodedData: Uint8Array) {
const encryptedDataLen =
Math.floor((decodedData.length - 48) / 160) * 16 + 16;
const encryptedData = new Int8Array(encryptedDataLen);
let outputPos = 0;
for (
let inputPos = 32;
inputPos < decodedData.length - 16;
inputPos += 160, outputPos += 16
) {
encryptedData.set(
decodedData.subarray(inputPos, inputPos + 16),
outputPos
);
}
return encryptedData;
}
getAvcDecryptedUnit(
decodedData: Uint8Array,
decryptedData: ArrayLike<number> | ArrayBuffer | SharedArrayBuffer
) {
const uint8DecryptedData = new Uint8Array(decryptedData);
let inputPos = 0;
for (
let outputPos = 32;
outputPos < decodedData.length - 16;
outputPos += 160, inputPos += 16
) {
decodedData.set(
uint8DecryptedData.subarray(inputPos, inputPos + 16),
outputPos
);
}
return decodedData;
}
decryptAvcSample(
samples: AvcSample[],
sampleIndex: number,
unitIndex: number,
callback: () => void,
curUnit: AvcSampleUnit,
sync: boolean
) {
const decodedData = discardEPB(curUnit.data);
const encryptedData = this.getAvcEncryptedData(decodedData);
const localthis = this;
this.decryptBuffer(
encryptedData.buffer,
function (decryptedBuffer: ArrayBuffer) {
curUnit.data = localthis.getAvcDecryptedUnit(
decodedData,
decryptedBuffer
);
if (!sync) {
localthis.decryptAvcSamples(
samples,
sampleIndex,
unitIndex + 1,
callback
);
}
}
);
}
decryptAvcSamples(
samples: DemuxedVideoTrack['samples'],
sampleIndex: number,
unitIndex: number,
callback: () => void
) {
if (samples instanceof Uint8Array) {
throw new Error('Cannot decrypt samples of type Uint8Array');
}
for (; ; sampleIndex++, unitIndex = 0) {
if (sampleIndex >= samples.length) {
callback();
return;
}
const curUnits = samples[sampleIndex].units;
for (; ; unitIndex++) {
if (unitIndex >= curUnits.length) {
break;
}
const curUnit = curUnits[unitIndex];
if (
curUnit.data.length <= 48 ||
(curUnit.type !== 1 && curUnit.type !== 5)
) {
continue;
}
const sync = this.decrypter.isSync();
this.decryptAvcSample(
samples,
sampleIndex,
unitIndex,
callback,
curUnit,
sync
);
if (!sync) {
return;
}
}
}
}
}
export default SampleAesDecrypter;

317
node_modules/hls.js/src/demux/transmuxer-interface.ts generated vendored Normal file
View file

@ -0,0 +1,317 @@
import * as work from 'webworkify-webpack';
import { Events } from '../events';
import Transmuxer, {
TransmuxConfig,
TransmuxState,
isPromise,
} from '../demux/transmuxer';
import { logger } from '../utils/logger';
import { ErrorTypes, ErrorDetails } from '../errors';
import { getMediaSource } from '../utils/mediasource-helper';
import { EventEmitter } from 'eventemitter3';
import { Fragment, Part } from '../loader/fragment';
import type { ChunkMetadata, TransmuxerResult } from '../types/transmuxer';
import type Hls from '../hls';
import type { HlsEventEmitter } from '../events';
import type { PlaylistLevelType } from '../types/loader';
import type { TypeSupported } from './tsdemuxer';
const MediaSource = getMediaSource() || { isTypeSupported: () => false };
export default class TransmuxerInterface {
private hls: Hls;
private id: PlaylistLevelType;
private observer: HlsEventEmitter;
private frag: Fragment | null = null;
private part: Part | null = null;
private worker: any;
private onwmsg?: Function;
private transmuxer: Transmuxer | null = null;
private onTransmuxComplete: (transmuxResult: TransmuxerResult) => void;
private onFlush: (chunkMeta: ChunkMetadata) => void;
constructor(
hls: Hls,
id: PlaylistLevelType,
onTransmuxComplete: (transmuxResult: TransmuxerResult) => void,
onFlush: (chunkMeta: ChunkMetadata) => void
) {
this.hls = hls;
this.id = id;
this.onTransmuxComplete = onTransmuxComplete;
this.onFlush = onFlush;
const config = hls.config;
const forwardMessage = (ev, data) => {
data = data || {};
data.frag = this.frag;
data.id = this.id;
hls.trigger(ev, data);
};
// forward events to main thread
this.observer = new EventEmitter() as HlsEventEmitter;
this.observer.on(Events.FRAG_DECRYPTED, forwardMessage);
this.observer.on(Events.ERROR, forwardMessage);
const typeSupported: TypeSupported = {
mp4: MediaSource.isTypeSupported('video/mp4'),
mpeg: MediaSource.isTypeSupported('audio/mpeg'),
mp3: MediaSource.isTypeSupported('audio/mp4; codecs="mp3"'),
};
// navigator.vendor is not always available in Web Worker
// refer to https://developer.mozilla.org/en-US/docs/Web/API/WorkerGlobalScope/navigator
const vendor = navigator.vendor;
if (config.enableWorker && typeof Worker !== 'undefined') {
logger.log('demuxing in webworker');
let worker;
try {
worker = this.worker = work(
require.resolve('../demux/transmuxer-worker.ts')
);
this.onwmsg = this.onWorkerMessage.bind(this);
worker.addEventListener('message', this.onwmsg);
worker.onerror = (event) => {
hls.trigger(Events.ERROR, {
type: ErrorTypes.OTHER_ERROR,
details: ErrorDetails.INTERNAL_EXCEPTION,
fatal: true,
event: 'demuxerWorker',
error: new Error(
`${event.message} (${event.filename}:${event.lineno})`
),
});
};
worker.postMessage({
cmd: 'init',
typeSupported: typeSupported,
vendor: vendor,
id: id,
config: JSON.stringify(config),
});
} catch (err) {
logger.warn('Error in worker:', err);
logger.error(
'Error while initializing DemuxerWorker, fallback to inline'
);
if (worker) {
// revoke the Object URL that was used to create transmuxer worker, so as not to leak it
self.URL.revokeObjectURL(worker.objectURL);
}
this.transmuxer = new Transmuxer(
this.observer,
typeSupported,
config,
vendor,
id
);
this.worker = null;
}
} else {
this.transmuxer = new Transmuxer(
this.observer,
typeSupported,
config,
vendor,
id
);
}
}
destroy(): void {
const w = this.worker;
if (w) {
w.removeEventListener('message', this.onwmsg);
w.terminate();
this.worker = null;
} else {
const transmuxer = this.transmuxer;
if (transmuxer) {
transmuxer.destroy();
this.transmuxer = null;
}
}
const observer = this.observer;
if (observer) {
observer.removeAllListeners();
}
// @ts-ignore
this.observer = null;
}
push(
data: ArrayBuffer,
initSegmentData: Uint8Array | undefined,
audioCodec: string | undefined,
videoCodec: string | undefined,
frag: Fragment,
part: Part | null,
duration: number,
accurateTimeOffset: boolean,
chunkMeta: ChunkMetadata,
defaultInitPTS?: number
): void {
chunkMeta.transmuxing.start = self.performance.now();
const { transmuxer, worker } = this;
const timeOffset = part ? part.start : frag.start;
const decryptdata = frag.decryptdata;
const lastFrag = this.frag;
const discontinuity = !(lastFrag && frag.cc === lastFrag.cc);
const trackSwitch = !(lastFrag && chunkMeta.level === lastFrag.level);
const snDiff = lastFrag ? chunkMeta.sn - (lastFrag.sn as number) : -1;
const partDiff = this.part ? chunkMeta.part - this.part.index : 1;
const contiguous =
!trackSwitch && (snDiff === 1 || (snDiff === 0 && partDiff === 1));
const now = self.performance.now();
if (trackSwitch || snDiff || frag.stats.parsing.start === 0) {
frag.stats.parsing.start = now;
}
if (part && (partDiff || !contiguous)) {
part.stats.parsing.start = now;
}
const initSegmentChange = !(
lastFrag && frag.initSegment?.url === lastFrag.initSegment?.url
);
const state = new TransmuxState(
discontinuity,
contiguous,
accurateTimeOffset,
trackSwitch,
timeOffset,
initSegmentChange
);
if (!contiguous || discontinuity || initSegmentChange) {
logger.log(`[transmuxer-interface, ${frag.type}]: Starting new transmux session for sn: ${chunkMeta.sn} p: ${chunkMeta.part} level: ${chunkMeta.level} id: ${chunkMeta.id}
discontinuity: ${discontinuity}
trackSwitch: ${trackSwitch}
contiguous: ${contiguous}
accurateTimeOffset: ${accurateTimeOffset}
timeOffset: ${timeOffset}
initSegmentChange: ${initSegmentChange}`);
const config = new TransmuxConfig(
audioCodec,
videoCodec,
initSegmentData,
duration,
defaultInitPTS
);
this.configureTransmuxer(config);
}
this.frag = frag;
this.part = part;
// Frags with sn of 'initSegment' are not transmuxed
if (worker) {
// post fragment payload as transferable objects for ArrayBuffer (no copy)
worker.postMessage(
{
cmd: 'demux',
data,
decryptdata,
chunkMeta,
state,
},
data instanceof ArrayBuffer ? [data] : []
);
} else if (transmuxer) {
const transmuxResult = transmuxer.push(
data,
decryptdata,
chunkMeta,
state
);
if (isPromise(transmuxResult)) {
transmuxResult.then((data) => {
this.handleTransmuxComplete(data);
});
} else {
this.handleTransmuxComplete(transmuxResult as TransmuxerResult);
}
}
}
flush(chunkMeta: ChunkMetadata) {
chunkMeta.transmuxing.start = self.performance.now();
const { transmuxer, worker } = this;
if (worker) {
worker.postMessage({
cmd: 'flush',
chunkMeta,
});
} else if (transmuxer) {
const transmuxResult = transmuxer.flush(chunkMeta);
if (isPromise(transmuxResult)) {
transmuxResult.then((data) => {
this.handleFlushResult(data, chunkMeta);
});
} else {
this.handleFlushResult(
transmuxResult as Array<TransmuxerResult>,
chunkMeta
);
}
}
}
private handleFlushResult(
results: Array<TransmuxerResult>,
chunkMeta: ChunkMetadata
) {
results.forEach((result) => {
this.handleTransmuxComplete(result);
});
this.onFlush(chunkMeta);
}
private onWorkerMessage(ev: any): void {
const data = ev.data;
const hls = this.hls;
switch (data.event) {
case 'init': {
// revoke the Object URL that was used to create transmuxer worker, so as not to leak it
self.URL.revokeObjectURL(this.worker.objectURL);
break;
}
case 'transmuxComplete': {
this.handleTransmuxComplete(data.data);
break;
}
case 'flush': {
this.onFlush(data.data);
break;
}
/* falls through */
default: {
data.data = data.data || {};
data.data.frag = this.frag;
data.data.id = this.id;
hls.trigger(data.event, data.data);
break;
}
}
}
private configureTransmuxer(config: TransmuxConfig) {
const { worker, transmuxer } = this;
if (worker) {
worker.postMessage({
cmd: 'configure',
config,
});
} else if (transmuxer) {
transmuxer.configure(config);
}
}
private handleTransmuxComplete(result: TransmuxerResult) {
result.chunkMeta.transmuxing.end = self.performance.now();
this.onTransmuxComplete(result);
}
}

128
node_modules/hls.js/src/demux/transmuxer-worker.ts generated vendored Normal file
View file

@ -0,0 +1,128 @@
import Transmuxer, { isPromise } from '../demux/transmuxer';
import { Events } from '../events';
import { enableLogs } from '../utils/logger';
import { EventEmitter } from 'eventemitter3';
import type { RemuxedTrack, RemuxerResult } from '../types/remuxer';
import type { TransmuxerResult, ChunkMetadata } from '../types/transmuxer';
export default function TransmuxerWorker(self) {
const observer = new EventEmitter();
const forwardMessage = (ev, data) => {
self.postMessage({ event: ev, data: data });
};
// forward events to main thread
observer.on(Events.FRAG_DECRYPTED, forwardMessage);
observer.on(Events.ERROR, forwardMessage);
self.addEventListener('message', (ev) => {
const data = ev.data;
switch (data.cmd) {
case 'init': {
const config = JSON.parse(data.config);
self.transmuxer = new Transmuxer(
observer,
data.typeSupported,
config,
data.vendor,
data.id
);
enableLogs(config.debug);
forwardMessage('init', null);
break;
}
case 'configure': {
self.transmuxer.configure(data.config);
break;
}
case 'demux': {
const transmuxResult: TransmuxerResult | Promise<TransmuxerResult> =
self.transmuxer.push(
data.data,
data.decryptdata,
data.chunkMeta,
data.state
);
if (isPromise(transmuxResult)) {
transmuxResult.then((data) => {
emitTransmuxComplete(self, data);
});
} else {
emitTransmuxComplete(self, transmuxResult);
}
break;
}
case 'flush': {
const id = data.chunkMeta;
const transmuxResult = self.transmuxer.flush(id);
if (isPromise(transmuxResult)) {
transmuxResult.then((results: Array<TransmuxerResult>) => {
handleFlushResult(self, results as Array<TransmuxerResult>, id);
});
} else {
handleFlushResult(
self,
transmuxResult as Array<TransmuxerResult>,
id
);
}
break;
}
default:
break;
}
});
}
function emitTransmuxComplete(self: any, transmuxResult: TransmuxerResult) {
if (isEmptyResult(transmuxResult.remuxResult)) {
return;
}
const transferable: Array<ArrayBuffer> = [];
const { audio, video } = transmuxResult.remuxResult;
if (audio) {
addToTransferable(transferable, audio);
}
if (video) {
addToTransferable(transferable, video);
}
self.postMessage(
{ event: 'transmuxComplete', data: transmuxResult },
transferable
);
}
// Converts data to a transferable object https://developers.google.com/web/updates/2011/12/Transferable-Objects-Lightning-Fast)
// in order to minimize message passing overhead
function addToTransferable(
transferable: Array<ArrayBuffer>,
track: RemuxedTrack
) {
if (track.data1) {
transferable.push(track.data1.buffer);
}
if (track.data2) {
transferable.push(track.data2.buffer);
}
}
function handleFlushResult(
self: any,
results: Array<TransmuxerResult>,
chunkMeta: ChunkMetadata
) {
results.forEach((result) => {
emitTransmuxComplete(self, result);
});
self.postMessage({ event: 'flush', data: chunkMeta });
}
function isEmptyResult(remuxResult: RemuxerResult) {
return (
!remuxResult.audio &&
!remuxResult.video &&
!remuxResult.text &&
!remuxResult.id3 &&
!remuxResult.initSegment
);
}

532
node_modules/hls.js/src/demux/transmuxer.ts generated vendored Normal file
View file

@ -0,0 +1,532 @@
import type { HlsEventEmitter } from '../events';
import { Events } from '../events';
import { ErrorTypes, ErrorDetails } from '../errors';
import Decrypter from '../crypt/decrypter';
import AACDemuxer from '../demux/aacdemuxer';
import MP4Demuxer from '../demux/mp4demuxer';
import TSDemuxer, { TypeSupported } from '../demux/tsdemuxer';
import MP3Demuxer from '../demux/mp3demuxer';
import MP4Remuxer from '../remux/mp4-remuxer';
import PassThroughRemuxer from '../remux/passthrough-remuxer';
import ChunkCache from './chunk-cache';
import { appendUint8Array } from '../utils/mp4-tools';
import { logger } from '../utils/logger';
import type { Demuxer, KeyData } from '../types/demuxer';
import type { Remuxer } from '../types/remuxer';
import type { TransmuxerResult, ChunkMetadata } from '../types/transmuxer';
import type { HlsConfig } from '../config';
import type { LevelKey } from '../loader/level-key';
import type { PlaylistLevelType } from '../types/loader';
let now;
// performance.now() not available on WebWorker, at least on Safari Desktop
try {
now = self.performance.now.bind(self.performance);
} catch (err) {
logger.debug('Unable to use Performance API on this environment');
now = self.Date.now;
}
type MuxConfig =
| { demux: typeof TSDemuxer; remux: typeof MP4Remuxer }
| { demux: typeof MP4Demuxer; remux: typeof PassThroughRemuxer }
| { demux: typeof AACDemuxer; remux: typeof MP4Remuxer }
| { demux: typeof MP3Demuxer; remux: typeof MP4Remuxer };
const muxConfig: MuxConfig[] = [
{ demux: TSDemuxer, remux: MP4Remuxer },
{ demux: MP4Demuxer, remux: PassThroughRemuxer },
{ demux: AACDemuxer, remux: MP4Remuxer },
{ demux: MP3Demuxer, remux: MP4Remuxer },
];
let minProbeByteLength = 1024;
muxConfig.forEach(({ demux }) => {
minProbeByteLength = Math.max(minProbeByteLength, demux.minProbeByteLength);
});
export default class Transmuxer {
private observer: HlsEventEmitter;
private typeSupported: TypeSupported;
private config: HlsConfig;
private vendor: string;
private id: PlaylistLevelType;
private demuxer?: Demuxer;
private remuxer?: Remuxer;
private decrypter?: Decrypter;
private probe!: Function;
private decryptionPromise: Promise<TransmuxerResult> | null = null;
private transmuxConfig!: TransmuxConfig;
private currentTransmuxState!: TransmuxState;
private cache: ChunkCache = new ChunkCache();
constructor(
observer: HlsEventEmitter,
typeSupported: TypeSupported,
config: HlsConfig,
vendor: string,
id: PlaylistLevelType
) {
this.observer = observer;
this.typeSupported = typeSupported;
this.config = config;
this.vendor = vendor;
this.id = id;
}
configure(transmuxConfig: TransmuxConfig) {
this.transmuxConfig = transmuxConfig;
if (this.decrypter) {
this.decrypter.reset();
}
}
push(
data: ArrayBuffer,
decryptdata: LevelKey | null,
chunkMeta: ChunkMetadata,
state?: TransmuxState
): TransmuxerResult | Promise<TransmuxerResult> {
const stats = chunkMeta.transmuxing;
stats.executeStart = now();
let uintData: Uint8Array = new Uint8Array(data);
const { cache, config, currentTransmuxState, transmuxConfig } = this;
if (state) {
this.currentTransmuxState = state;
}
const keyData = getEncryptionType(uintData, decryptdata);
if (keyData && keyData.method === 'AES-128') {
const decrypter = this.getDecrypter();
// Software decryption is synchronous; webCrypto is not
if (config.enableSoftwareAES) {
// Software decryption is progressive. Progressive decryption may not return a result on each call. Any cached
// data is handled in the flush() call
const decryptedData = decrypter.softwareDecrypt(
uintData,
keyData.key.buffer,
keyData.iv.buffer
);
if (!decryptedData) {
stats.executeEnd = now();
return emptyResult(chunkMeta);
}
uintData = new Uint8Array(decryptedData);
} else {
this.decryptionPromise = decrypter
.webCryptoDecrypt(uintData, keyData.key.buffer, keyData.iv.buffer)
.then((decryptedData): TransmuxerResult => {
// Calling push here is important; if flush() is called while this is still resolving, this ensures that
// the decrypted data has been transmuxed
const result = this.push(
decryptedData,
null,
chunkMeta
) as TransmuxerResult;
this.decryptionPromise = null;
return result;
});
return this.decryptionPromise!;
}
}
const {
contiguous,
discontinuity,
trackSwitch,
accurateTimeOffset,
timeOffset,
initSegmentChange,
} = state || currentTransmuxState;
const {
audioCodec,
videoCodec,
defaultInitPts,
duration,
initSegmentData,
} = transmuxConfig;
// Reset muxers before probing to ensure that their state is clean, even if flushing occurs before a successful probe
if (discontinuity || trackSwitch || initSegmentChange) {
this.resetInitSegment(initSegmentData, audioCodec, videoCodec, duration);
}
if (discontinuity || initSegmentChange) {
this.resetInitialTimestamp(defaultInitPts);
}
if (!contiguous) {
this.resetContiguity();
}
if (this.needsProbing(uintData, discontinuity, trackSwitch)) {
if (cache.dataLength) {
const cachedData = cache.flush();
uintData = appendUint8Array(cachedData, uintData);
}
this.configureTransmuxer(uintData, transmuxConfig);
}
const result = this.transmux(
uintData,
keyData,
timeOffset,
accurateTimeOffset,
chunkMeta
);
const currentState = this.currentTransmuxState;
currentState.contiguous = true;
currentState.discontinuity = false;
currentState.trackSwitch = false;
stats.executeEnd = now();
return result;
}
// Due to data caching, flush calls can produce more than one TransmuxerResult (hence the Array type)
flush(
chunkMeta: ChunkMetadata
): TransmuxerResult[] | Promise<TransmuxerResult[]> {
const stats = chunkMeta.transmuxing;
stats.executeStart = now();
const { decrypter, cache, currentTransmuxState, decryptionPromise } = this;
if (decryptionPromise) {
// Upon resolution, the decryption promise calls push() and returns its TransmuxerResult up the stack. Therefore
// only flushing is required for async decryption
return decryptionPromise.then(() => {
return this.flush(chunkMeta);
});
}
const transmuxResults: Array<TransmuxerResult> = [];
const { timeOffset } = currentTransmuxState;
if (decrypter) {
// The decrypter may have data cached, which needs to be demuxed. In this case we'll have two TransmuxResults
// This happens in the case that we receive only 1 push call for a segment (either for non-progressive downloads,
// or for progressive downloads with small segments)
const decryptedData = decrypter.flush();
if (decryptedData) {
// Push always returns a TransmuxerResult if decryptdata is null
transmuxResults.push(
this.push(decryptedData, null, chunkMeta) as TransmuxerResult
);
}
}
const bytesSeen = cache.dataLength;
cache.reset();
const { demuxer, remuxer } = this;
if (!demuxer || !remuxer) {
// If probing failed, and each demuxer saw enough bytes to be able to probe, then Hls.js has been given content its not able to handle
if (bytesSeen >= minProbeByteLength) {
this.observer.emit(Events.ERROR, Events.ERROR, {
type: ErrorTypes.MEDIA_ERROR,
details: ErrorDetails.FRAG_PARSING_ERROR,
fatal: true,
reason: 'no demux matching with content found',
});
}
stats.executeEnd = now();
return [emptyResult(chunkMeta)];
}
const demuxResultOrPromise = demuxer.flush(timeOffset);
if (isPromise(demuxResultOrPromise)) {
// Decrypt final SAMPLE-AES samples
return demuxResultOrPromise.then((demuxResult) => {
this.flushRemux(transmuxResults, demuxResult, chunkMeta);
return transmuxResults;
});
}
this.flushRemux(transmuxResults, demuxResultOrPromise, chunkMeta);
return transmuxResults;
}
private flushRemux(transmuxResults, demuxResult, chunkMeta) {
const { audioTrack, avcTrack, id3Track, textTrack } = demuxResult;
const { accurateTimeOffset, timeOffset } = this.currentTransmuxState;
logger.log(
`[transmuxer.ts]: Flushed fragment ${chunkMeta.sn}${
chunkMeta.part > -1 ? ' p: ' + chunkMeta.part : ''
} of level ${chunkMeta.level}`
);
const remuxResult = this.remuxer!.remux(
audioTrack,
avcTrack,
id3Track,
textTrack,
timeOffset,
accurateTimeOffset,
true,
this.id
);
transmuxResults.push({
remuxResult,
chunkMeta,
});
chunkMeta.transmuxing.executeEnd = now();
}
resetInitialTimestamp(defaultInitPts: number | undefined) {
const { demuxer, remuxer } = this;
if (!demuxer || !remuxer) {
return;
}
demuxer.resetTimeStamp(defaultInitPts);
remuxer.resetTimeStamp(defaultInitPts);
}
resetContiguity() {
const { demuxer, remuxer } = this;
if (!demuxer || !remuxer) {
return;
}
demuxer.resetContiguity();
remuxer.resetNextTimestamp();
}
resetInitSegment(
initSegmentData: Uint8Array | undefined,
audioCodec: string | undefined,
videoCodec: string | undefined,
duration: number
) {
const { demuxer, remuxer } = this;
if (!demuxer || !remuxer) {
return;
}
demuxer.resetInitSegment(audioCodec, videoCodec, duration);
remuxer.resetInitSegment(initSegmentData, audioCodec, videoCodec);
}
destroy(): void {
if (this.demuxer) {
this.demuxer.destroy();
this.demuxer = undefined;
}
if (this.remuxer) {
this.remuxer.destroy();
this.remuxer = undefined;
}
}
private transmux(
data: Uint8Array,
keyData: KeyData | null,
timeOffset: number,
accurateTimeOffset: boolean,
chunkMeta: ChunkMetadata
): TransmuxerResult | Promise<TransmuxerResult> {
let result: TransmuxerResult | Promise<TransmuxerResult>;
if (keyData && keyData.method === 'SAMPLE-AES') {
result = this.transmuxSampleAes(
data,
keyData,
timeOffset,
accurateTimeOffset,
chunkMeta
);
} else {
result = this.transmuxUnencrypted(
data,
timeOffset,
accurateTimeOffset,
chunkMeta
);
}
return result;
}
private transmuxUnencrypted(
data: Uint8Array,
timeOffset: number,
accurateTimeOffset: boolean,
chunkMeta: ChunkMetadata
): TransmuxerResult {
const { audioTrack, avcTrack, id3Track, textTrack } = (
this.demuxer as Demuxer
).demux(data, timeOffset, false, !this.config.progressive);
const remuxResult = this.remuxer!.remux(
audioTrack,
avcTrack,
id3Track,
textTrack,
timeOffset,
accurateTimeOffset,
false,
this.id
);
return {
remuxResult,
chunkMeta,
};
}
private transmuxSampleAes(
data: Uint8Array,
decryptData: KeyData,
timeOffset: number,
accurateTimeOffset: boolean,
chunkMeta: ChunkMetadata
): Promise<TransmuxerResult> {
return (this.demuxer as Demuxer)
.demuxSampleAes(data, decryptData, timeOffset)
.then((demuxResult) => {
const remuxResult = this.remuxer!.remux(
demuxResult.audioTrack,
demuxResult.avcTrack,
demuxResult.id3Track,
demuxResult.textTrack,
timeOffset,
accurateTimeOffset,
false,
this.id
);
return {
remuxResult,
chunkMeta,
};
});
}
private configureTransmuxer(
data: Uint8Array,
transmuxConfig: TransmuxConfig
) {
const { config, observer, typeSupported, vendor } = this;
const {
audioCodec,
defaultInitPts,
duration,
initSegmentData,
videoCodec,
} = transmuxConfig;
// probe for content type
let mux;
for (let i = 0, len = muxConfig.length; i < len; i++) {
if (muxConfig[i].demux.probe(data)) {
mux = muxConfig[i];
break;
}
}
if (!mux) {
// If probing previous configs fail, use mp4 passthrough
logger.warn(
'Failed to find demuxer by probing frag, treating as mp4 passthrough'
);
mux = { demux: MP4Demuxer, remux: PassThroughRemuxer };
}
// so let's check that current remuxer and demuxer are still valid
const demuxer = this.demuxer;
const remuxer = this.remuxer;
const Remuxer: MuxConfig['remux'] = mux.remux;
const Demuxer: MuxConfig['demux'] = mux.demux;
if (!remuxer || !(remuxer instanceof Remuxer)) {
this.remuxer = new Remuxer(observer, config, typeSupported, vendor);
}
if (!demuxer || !(demuxer instanceof Demuxer)) {
this.demuxer = new Demuxer(observer, config, typeSupported);
this.probe = Demuxer.probe;
}
// Ensure that muxers are always initialized with an initSegment
this.resetInitSegment(initSegmentData, audioCodec, videoCodec, duration);
this.resetInitialTimestamp(defaultInitPts);
}
private needsProbing(
data: Uint8Array,
discontinuity: boolean,
trackSwitch: boolean
): boolean {
// in case of continuity change, or track switch
// we might switch from content type (AAC container to TS container, or TS to fmp4 for example)
return !this.demuxer || !this.remuxer || discontinuity || trackSwitch;
}
private getDecrypter(): Decrypter {
let decrypter = this.decrypter;
if (!decrypter) {
decrypter = this.decrypter = new Decrypter(this.observer, this.config);
}
return decrypter;
}
}
function getEncryptionType(
data: Uint8Array,
decryptData: LevelKey | null
): KeyData | null {
let encryptionType: KeyData | null = null;
if (
data.byteLength > 0 &&
decryptData != null &&
decryptData.key != null &&
decryptData.iv !== null &&
decryptData.method != null
) {
encryptionType = decryptData as KeyData;
}
return encryptionType;
}
const emptyResult = (chunkMeta): TransmuxerResult => ({
remuxResult: {},
chunkMeta,
});
export function isPromise<T>(p: Promise<T> | any): p is Promise<T> {
return 'then' in p && p.then instanceof Function;
}
export class TransmuxConfig {
public audioCodec?: string;
public videoCodec?: string;
public initSegmentData?: Uint8Array;
public duration: number;
public defaultInitPts?: number;
constructor(
audioCodec: string | undefined,
videoCodec: string | undefined,
initSegmentData: Uint8Array | undefined,
duration: number,
defaultInitPts?: number
) {
this.audioCodec = audioCodec;
this.videoCodec = videoCodec;
this.initSegmentData = initSegmentData;
this.duration = duration;
this.defaultInitPts = defaultInitPts;
}
}
export class TransmuxState {
public discontinuity: boolean;
public contiguous: boolean;
public accurateTimeOffset: boolean;
public trackSwitch: boolean;
public timeOffset: number;
public initSegmentChange: boolean;
constructor(
discontinuity: boolean,
contiguous: boolean,
accurateTimeOffset: boolean,
trackSwitch: boolean,
timeOffset: number,
initSegmentChange: boolean
) {
this.discontinuity = discontinuity;
this.contiguous = contiguous;
this.accurateTimeOffset = accurateTimeOffset;
this.trackSwitch = trackSwitch;
this.timeOffset = timeOffset;
this.initSegmentChange = initSegmentChange;
}
}

1366
node_modules/hls.js/src/demux/tsdemuxer.ts generated vendored Normal file

File diff suppressed because it is too large Load diff

3
node_modules/hls.js/src/empty.js generated vendored Normal file
View file

@ -0,0 +1,3 @@
// This file is inserted as a shim for modules which we do not want to include into the distro.
// This replacement is done in the "resolve" section of the webpack config.
module.exports = undefined;

85
node_modules/hls.js/src/errors.ts generated vendored Normal file
View file

@ -0,0 +1,85 @@
export enum ErrorTypes {
// Identifier for a network error (loading error / timeout ...)
NETWORK_ERROR = 'networkError',
// Identifier for a media Error (video/parsing/mediasource error)
MEDIA_ERROR = 'mediaError',
// EME (encrypted media extensions) errors
KEY_SYSTEM_ERROR = 'keySystemError',
// Identifier for a mux Error (demuxing/remuxing)
MUX_ERROR = 'muxError',
// Identifier for all other errors
OTHER_ERROR = 'otherError',
}
/**
* @enum {ErrorDetails}
* @typedef {string} ErrorDetail
*/
export enum ErrorDetails {
KEY_SYSTEM_NO_KEYS = 'keySystemNoKeys',
KEY_SYSTEM_NO_ACCESS = 'keySystemNoAccess',
KEY_SYSTEM_NO_SESSION = 'keySystemNoSession',
KEY_SYSTEM_LICENSE_REQUEST_FAILED = 'keySystemLicenseRequestFailed',
KEY_SYSTEM_NO_INIT_DATA = 'keySystemNoInitData',
// Identifier for a manifest load error - data: { url : faulty URL, response : { code: error code, text: error text }}
MANIFEST_LOAD_ERROR = 'manifestLoadError',
// Identifier for a manifest load timeout - data: { url : faulty URL, response : { code: error code, text: error text }}
MANIFEST_LOAD_TIMEOUT = 'manifestLoadTimeOut',
// Identifier for a manifest parsing error - data: { url : faulty URL, reason : error reason}
MANIFEST_PARSING_ERROR = 'manifestParsingError',
// Identifier for a manifest with only incompatible codecs error - data: { url : faulty URL, reason : error reason}
MANIFEST_INCOMPATIBLE_CODECS_ERROR = 'manifestIncompatibleCodecsError',
// Identifier for a level which contains no fragments - data: { url: faulty URL, reason: "no fragments found in level", level: index of the bad level }
LEVEL_EMPTY_ERROR = 'levelEmptyError',
// Identifier for a level load error - data: { url : faulty URL, response : { code: error code, text: error text }}
LEVEL_LOAD_ERROR = 'levelLoadError',
// Identifier for a level load timeout - data: { url : faulty URL, response : { code: error code, text: error text }}
LEVEL_LOAD_TIMEOUT = 'levelLoadTimeOut',
// Identifier for a level switch error - data: { level : faulty level Id, event : error description}
LEVEL_SWITCH_ERROR = 'levelSwitchError',
// Identifier for an audio track load error - data: { url : faulty URL, response : { code: error code, text: error text }}
AUDIO_TRACK_LOAD_ERROR = 'audioTrackLoadError',
// Identifier for an audio track load timeout - data: { url : faulty URL, response : { code: error code, text: error text }}
AUDIO_TRACK_LOAD_TIMEOUT = 'audioTrackLoadTimeOut',
// Identifier for a subtitle track load error - data: { url : faulty URL, response : { code: error code, text: error text }}
SUBTITLE_LOAD_ERROR = 'subtitleTrackLoadError',
// Identifier for a subtitle track load timeout - data: { url : faulty URL, response : { code: error code, text: error text }}
SUBTITLE_TRACK_LOAD_TIMEOUT = 'subtitleTrackLoadTimeOut',
// Identifier for fragment load error - data: { frag : fragment object, response : { code: error code, text: error text }}
FRAG_LOAD_ERROR = 'fragLoadError',
// Identifier for fragment load timeout error - data: { frag : fragment object}
FRAG_LOAD_TIMEOUT = 'fragLoadTimeOut',
// Identifier for a fragment decryption error event - data: {id : demuxer Id,frag: fragment object, reason : parsing error description }
FRAG_DECRYPT_ERROR = 'fragDecryptError',
// Identifier for a fragment parsing error event - data: { id : demuxer Id, reason : parsing error description }
// will be renamed DEMUX_PARSING_ERROR and switched to MUX_ERROR in the next major release
FRAG_PARSING_ERROR = 'fragParsingError',
// Identifier for a remux alloc error event - data: { id : demuxer Id, frag : fragment object, bytes : nb of bytes on which allocation failed , reason : error text }
REMUX_ALLOC_ERROR = 'remuxAllocError',
// Identifier for decrypt key load error - data: { frag : fragment object, response : { code: error code, text: error text }}
KEY_LOAD_ERROR = 'keyLoadError',
// Identifier for decrypt key load timeout error - data: { frag : fragment object}
KEY_LOAD_TIMEOUT = 'keyLoadTimeOut',
// Triggered when an exception occurs while adding a sourceBuffer to MediaSource - data : { error : exception , mimeType : mimeType }
BUFFER_ADD_CODEC_ERROR = 'bufferAddCodecError',
// Triggered when source buffer(s) could not be created using level (manifest CODECS attribute), parsed media, or best guess codec(s) - data: { reason : error reason }
BUFFER_INCOMPATIBLE_CODECS_ERROR = 'bufferIncompatibleCodecsError',
// Identifier for a buffer append error - data: append error description
BUFFER_APPEND_ERROR = 'bufferAppendError',
// Identifier for a buffer appending error event - data: appending error description
BUFFER_APPENDING_ERROR = 'bufferAppendingError',
// Identifier for a buffer stalled error event
BUFFER_STALLED_ERROR = 'bufferStalledError',
// Identifier for a buffer full event
BUFFER_FULL_ERROR = 'bufferFullError',
// Identifier for a buffer seek over hole event
BUFFER_SEEK_OVER_HOLE = 'bufferSeekOverHole',
// Identifier for a buffer nudge on stall (playback is stuck although currentTime is in a buffered area)
BUFFER_NUDGE_ON_STALL = 'bufferNudgeOnStall',
// Identifier for an internal exception happening inside hls.js while handling an event
INTERNAL_EXCEPTION = 'internalException',
// Identifier for an internal call to abort a loader
INTERNAL_ABORTED = 'aborted',
// Uncategorized error
UNKNOWN = 'unknown',
}

393
node_modules/hls.js/src/events.ts generated vendored Normal file
View file

@ -0,0 +1,393 @@
import {
ManifestLoadedData,
ManifestLoadingData,
MediaAttachedData,
MediaAttachingData,
LevelLoadingData,
LevelLoadedData,
ManifestParsedData,
LevelUpdatedData,
LevelsUpdatedData,
FragParsingUserdataData,
FragDecryptedData,
FragLoadedData,
InitPTSFoundData,
CuesParsedData,
SubtitleFragProcessedData,
NonNativeTextTracksData,
FragLoadingData,
AudioTrackLoadedData,
SubtitleTrackLoadedData,
ErrorData,
AudioTrackSwitchingData,
AudioTrackSwitchedData,
KeyLoadedData,
KeyLoadingData,
SubtitleTrackSwitchData,
SubtitleTracksUpdatedData,
LevelSwitchedData,
FragChangedData,
BufferAppendingData,
BufferCodecsData,
FragParsingMetadataData,
FragParsingInitSegmentData,
FragBufferedData,
BufferFlushingData,
BufferEOSData,
LevelSwitchingData,
FPSDropLevelCappingData,
FPSDropData,
BufferCreatedData,
BufferAppendedData,
LevelPTSUpdatedData,
FragParsedData,
AudioTracksUpdatedData,
FragLoadEmergencyAbortedData,
BackBufferData,
LiveBackBufferData,
TrackLoadingData,
BufferFlushedData,
} from './types/events';
/**
* @readonly
* @enum {string}
*/
export enum Events {
// Fired before MediaSource is attaching to media element
MEDIA_ATTACHING = 'hlsMediaAttaching',
// Fired when MediaSource has been successfully attached to media element
MEDIA_ATTACHED = 'hlsMediaAttached',
// Fired before deatching MediaSource from media element
MEDIA_DETACHING = 'hlsMediaDetaching',
// Fired when MediaSource has been detached from media element
MEDIA_DETACHED = 'hlsMediaDetached',
// Fired when the buffer is going to be reset
BUFFER_RESET = 'hlsBufferReset',
// Fired when we know about the codecs that we need buffers for to push into - data: {tracks : { container, codec, levelCodec, initSegment, metadata }}
BUFFER_CODECS = 'hlsBufferCodecs',
// fired when sourcebuffers have been created - data: { tracks : tracks }
BUFFER_CREATED = 'hlsBufferCreated',
// fired when we append a segment to the buffer - data: { segment: segment object }
BUFFER_APPENDING = 'hlsBufferAppending',
// fired when we are done with appending a media segment to the buffer - data : { parent : segment parent that triggered BUFFER_APPENDING, pending : nb of segments waiting for appending for this segment parent}
BUFFER_APPENDED = 'hlsBufferAppended',
// fired when the stream is finished and we want to notify the media buffer that there will be no more data - data: { }
BUFFER_EOS = 'hlsBufferEos',
// fired when the media buffer should be flushed - data { startOffset, endOffset }
BUFFER_FLUSHING = 'hlsBufferFlushing',
// fired when the media buffer has been flushed - data: { }
BUFFER_FLUSHED = 'hlsBufferFlushed',
// fired to signal that a manifest loading starts - data: { url : manifestURL}
MANIFEST_LOADING = 'hlsManifestLoading',
// fired after manifest has been loaded - data: { levels : [available quality levels], audioTracks : [ available audio tracks ], url : manifestURL, stats : LoaderStats }
MANIFEST_LOADED = 'hlsManifestLoaded',
// fired after manifest has been parsed - data: { levels : [available quality levels], firstLevel : index of first quality level appearing in Manifest}
MANIFEST_PARSED = 'hlsManifestParsed',
// fired when a level switch is requested - data: { level : id of new level }
LEVEL_SWITCHING = 'hlsLevelSwitching',
// fired when a level switch is effective - data: { level : id of new level }
LEVEL_SWITCHED = 'hlsLevelSwitched',
// fired when a level playlist loading starts - data: { url : level URL, level : id of level being loaded}
LEVEL_LOADING = 'hlsLevelLoading',
// fired when a level playlist loading finishes - data: { details : levelDetails object, level : id of loaded level, stats : LoaderStats }
LEVEL_LOADED = 'hlsLevelLoaded',
// fired when a level's details have been updated based on previous details, after it has been loaded - data: { details : levelDetails object, level : id of updated level }
LEVEL_UPDATED = 'hlsLevelUpdated',
// fired when a level's PTS information has been updated after parsing a fragment - data: { details : levelDetails object, level : id of updated level, drift: PTS drift observed when parsing last fragment }
LEVEL_PTS_UPDATED = 'hlsLevelPtsUpdated',
// fired to notify that levels have changed after removing a level - data: { levels : [available quality levels] }
LEVELS_UPDATED = 'hlsLevelsUpdated',
// fired to notify that audio track lists has been updated - data: { audioTracks : audioTracks }
AUDIO_TRACKS_UPDATED = 'hlsAudioTracksUpdated',
// fired when an audio track switching is requested - data: { id : audio track id }
AUDIO_TRACK_SWITCHING = 'hlsAudioTrackSwitching',
// fired when an audio track switch actually occurs - data: { id : audio track id }
AUDIO_TRACK_SWITCHED = 'hlsAudioTrackSwitched',
// fired when an audio track loading starts - data: { url : audio track URL, id : audio track id }
AUDIO_TRACK_LOADING = 'hlsAudioTrackLoading',
// fired when an audio track loading finishes - data: { details : levelDetails object, id : audio track id, stats : LoaderStats }
AUDIO_TRACK_LOADED = 'hlsAudioTrackLoaded',
// fired to notify that subtitle track lists has been updated - data: { subtitleTracks : subtitleTracks }
SUBTITLE_TRACKS_UPDATED = 'hlsSubtitleTracksUpdated',
// fired to notify that subtitle tracks were cleared as a result of stopping the media
SUBTITLE_TRACKS_CLEARED = 'hlsSubtitleTracksCleared',
// fired when an subtitle track switch occurs - data: { id : subtitle track id }
SUBTITLE_TRACK_SWITCH = 'hlsSubtitleTrackSwitch',
// fired when a subtitle track loading starts - data: { url : subtitle track URL, id : subtitle track id }
SUBTITLE_TRACK_LOADING = 'hlsSubtitleTrackLoading',
// fired when a subtitle track loading finishes - data: { details : levelDetails object, id : subtitle track id, stats : LoaderStats }
SUBTITLE_TRACK_LOADED = 'hlsSubtitleTrackLoaded',
// fired when a subtitle fragment has been processed - data: { success : boolean, frag : the processed frag }
SUBTITLE_FRAG_PROCESSED = 'hlsSubtitleFragProcessed',
// fired when a set of VTTCues to be managed externally has been parsed - data: { type: string, track: string, cues: [ VTTCue ] }
CUES_PARSED = 'hlsCuesParsed',
// fired when a text track to be managed externally is found - data: { tracks: [ { label: string, kind: string, default: boolean } ] }
NON_NATIVE_TEXT_TRACKS_FOUND = 'hlsNonNativeTextTracksFound',
// fired when the first timestamp is found - data: { id : demuxer id, initPTS: initPTS, timescale: timescale, frag : fragment object }
INIT_PTS_FOUND = 'hlsInitPtsFound',
// fired when a fragment loading starts - data: { frag : fragment object }
FRAG_LOADING = 'hlsFragLoading',
// fired when a fragment loading is progressing - data: { frag : fragment object, { trequest, tfirst, loaded } }
// FRAG_LOAD_PROGRESS = 'hlsFragLoadProgress',
// Identifier for fragment load aborting for emergency switch down - data: { frag : fragment object }
FRAG_LOAD_EMERGENCY_ABORTED = 'hlsFragLoadEmergencyAborted',
// fired when a fragment loading is completed - data: { frag : fragment object, payload : fragment payload, stats : LoaderStats }
FRAG_LOADED = 'hlsFragLoaded',
// fired when a fragment has finished decrypting - data: { id : demuxer id, frag: fragment object, payload : fragment payload, stats : { tstart, tdecrypt } }
FRAG_DECRYPTED = 'hlsFragDecrypted',
// fired when Init Segment has been extracted from fragment - data: { id : demuxer id, frag: fragment object, moov : moov MP4 box, codecs : codecs found while parsing fragment }
FRAG_PARSING_INIT_SEGMENT = 'hlsFragParsingInitSegment',
// fired when parsing sei text is completed - data: { id : demuxer id, frag: fragment object, samples : [ sei samples pes ] }
FRAG_PARSING_USERDATA = 'hlsFragParsingUserdata',
// fired when parsing id3 is completed - data: { id : demuxer id, frag: fragment object, samples : [ id3 samples pes ] }
FRAG_PARSING_METADATA = 'hlsFragParsingMetadata',
// fired when data have been extracted from fragment - data: { id : demuxer id, frag: fragment object, data1 : moof MP4 box or TS fragments, data2 : mdat MP4 box or null}
// FRAG_PARSING_DATA = 'hlsFragParsingData',
// fired when fragment parsing is completed - data: { id : demuxer id, frag: fragment object }
FRAG_PARSED = 'hlsFragParsed',
// fired when fragment remuxed MP4 boxes have all been appended into SourceBuffer - data: { id : demuxer id, frag : fragment object, stats : LoaderStats }
FRAG_BUFFERED = 'hlsFragBuffered',
// fired when fragment matching with current media position is changing - data : { id : demuxer id, frag : fragment object }
FRAG_CHANGED = 'hlsFragChanged',
// Identifier for a FPS drop event - data: { curentDropped, currentDecoded, totalDroppedFrames }
FPS_DROP = 'hlsFpsDrop',
// triggered when FPS drop triggers auto level capping - data: { level, droppedlevel }
FPS_DROP_LEVEL_CAPPING = 'hlsFpsDropLevelCapping',
// Identifier for an error event - data: { type : error type, details : error details, fatal : if true, hls.js cannot/will not try to recover, if false, hls.js will try to recover,other error specific data }
ERROR = 'hlsError',
// fired when hls.js instance starts destroying. Different from MEDIA_DETACHED as one could want to detach and reattach a media to the instance of hls.js to handle mid-rolls for example - data: { }
DESTROYING = 'hlsDestroying',
// fired when a decrypt key loading starts - data: { frag : fragment object }
KEY_LOADING = 'hlsKeyLoading',
// fired when a decrypt key loading is completed - data: { frag : fragment object, payload : key payload, stats : LoaderStats }
KEY_LOADED = 'hlsKeyLoaded',
// deprecated; please use BACK_BUFFER_REACHED - data : { bufferEnd: number }
LIVE_BACK_BUFFER_REACHED = 'hlsLiveBackBufferReached',
// fired when the back buffer is reached as defined by the backBufferLength config option - data : { bufferEnd: number }
BACK_BUFFER_REACHED = 'hlsBackBufferReached',
}
export interface HlsListeners {
[Events.MEDIA_ATTACHING]: (
event: Events.MEDIA_ATTACHING,
data: MediaAttachingData
) => void;
[Events.MEDIA_ATTACHED]: (
event: Events.MEDIA_ATTACHED,
data: MediaAttachedData
) => void;
[Events.MEDIA_DETACHING]: (event: Events.MEDIA_DETACHING) => void;
[Events.MEDIA_DETACHED]: (event: Events.MEDIA_DETACHED) => void;
[Events.BUFFER_RESET]: (event: Events.BUFFER_RESET) => void;
[Events.BUFFER_CODECS]: (
event: Events.BUFFER_CODECS,
data: BufferCodecsData
) => void;
[Events.BUFFER_CREATED]: (
event: Events.BUFFER_CREATED,
data: BufferCreatedData
) => void;
[Events.BUFFER_APPENDING]: (
event: Events.BUFFER_APPENDING,
data: BufferAppendingData
) => void;
[Events.BUFFER_APPENDED]: (
event: Events.BUFFER_APPENDED,
data: BufferAppendedData
) => void;
[Events.BUFFER_EOS]: (event: Events.BUFFER_EOS, data: BufferEOSData) => void;
[Events.BUFFER_FLUSHING]: (
event: Events.BUFFER_FLUSHING,
data: BufferFlushingData
) => void;
[Events.BUFFER_FLUSHED]: (
event: Events.BUFFER_FLUSHED,
data: BufferFlushedData
) => void;
[Events.MANIFEST_LOADING]: (
event: Events.MANIFEST_LOADING,
data: ManifestLoadingData
) => void;
[Events.MANIFEST_LOADED]: (
event: Events.MANIFEST_LOADED,
data: ManifestLoadedData
) => void;
[Events.MANIFEST_PARSED]: (
event: Events.MANIFEST_PARSED,
data: ManifestParsedData
) => void;
[Events.LEVEL_SWITCHING]: (
event: Events.LEVEL_SWITCHING,
data: LevelSwitchingData
) => void;
[Events.LEVEL_SWITCHED]: (
event: Events.LEVEL_SWITCHED,
data: LevelSwitchedData
) => void;
[Events.LEVEL_LOADING]: (
event: Events.LEVEL_LOADING,
data: LevelLoadingData
) => void;
[Events.LEVEL_LOADED]: (
event: Events.LEVEL_LOADED,
data: LevelLoadedData
) => void;
[Events.LEVEL_UPDATED]: (
event: Events.LEVEL_UPDATED,
data: LevelUpdatedData
) => void;
[Events.LEVEL_PTS_UPDATED]: (
event: Events.LEVEL_PTS_UPDATED,
data: LevelPTSUpdatedData
) => void;
[Events.LEVELS_UPDATED]: (
event: Events.LEVELS_UPDATED,
data: LevelsUpdatedData
) => void;
[Events.AUDIO_TRACKS_UPDATED]: (
event: Events.AUDIO_TRACKS_UPDATED,
data: AudioTracksUpdatedData
) => void;
[Events.AUDIO_TRACK_SWITCHING]: (
event: Events.AUDIO_TRACK_SWITCHING,
data: AudioTrackSwitchingData
) => void;
[Events.AUDIO_TRACK_SWITCHED]: (
event: Events.AUDIO_TRACK_SWITCHED,
data: AudioTrackSwitchedData
) => void;
[Events.AUDIO_TRACK_LOADING]: (
event: Events.AUDIO_TRACK_LOADING,
data: TrackLoadingData
) => void;
[Events.AUDIO_TRACK_LOADED]: (
event: Events.AUDIO_TRACK_LOADED,
data: AudioTrackLoadedData
) => void;
[Events.SUBTITLE_TRACKS_UPDATED]: (
event: Events.SUBTITLE_TRACKS_UPDATED,
data: SubtitleTracksUpdatedData
) => void;
[Events.SUBTITLE_TRACKS_CLEARED]: (
event: Events.SUBTITLE_TRACKS_CLEARED
) => void;
[Events.SUBTITLE_TRACK_SWITCH]: (
event: Events.SUBTITLE_TRACK_SWITCH,
data: SubtitleTrackSwitchData
) => void;
[Events.SUBTITLE_TRACK_LOADING]: (
event: Events.SUBTITLE_TRACK_LOADING,
data: TrackLoadingData
) => void;
[Events.SUBTITLE_TRACK_LOADED]: (
event: Events.SUBTITLE_TRACK_LOADED,
data: SubtitleTrackLoadedData
) => void;
[Events.SUBTITLE_FRAG_PROCESSED]: (
event: Events.SUBTITLE_FRAG_PROCESSED,
data: SubtitleFragProcessedData
) => void;
[Events.CUES_PARSED]: (
event: Events.CUES_PARSED,
data: CuesParsedData
) => void;
[Events.NON_NATIVE_TEXT_TRACKS_FOUND]: (
event: Events.NON_NATIVE_TEXT_TRACKS_FOUND,
data: NonNativeTextTracksData
) => void;
[Events.INIT_PTS_FOUND]: (
event: Events.INIT_PTS_FOUND,
data: InitPTSFoundData
) => void;
[Events.FRAG_LOADING]: (
event: Events.FRAG_LOADING,
data: FragLoadingData
) => void;
// [Events.FRAG_LOAD_PROGRESS]: TodoEventType
[Events.FRAG_LOAD_EMERGENCY_ABORTED]: (
event: Events.FRAG_LOAD_EMERGENCY_ABORTED,
data: FragLoadEmergencyAbortedData
) => void;
[Events.FRAG_LOADED]: (
event: Events.FRAG_LOADED,
data: FragLoadedData
) => void;
[Events.FRAG_DECRYPTED]: (
event: Events.FRAG_DECRYPTED,
data: FragDecryptedData
) => void;
[Events.FRAG_PARSING_INIT_SEGMENT]: (
event: Events.FRAG_PARSING_INIT_SEGMENT,
data: FragParsingInitSegmentData
) => void;
[Events.FRAG_PARSING_USERDATA]: (
event: Events.FRAG_PARSING_USERDATA,
data: FragParsingUserdataData
) => void;
[Events.FRAG_PARSING_METADATA]: (
event: Events.FRAG_PARSING_METADATA,
data: FragParsingMetadataData
) => void;
// [Events.FRAG_PARSING_DATA]: TodoEventType
[Events.FRAG_PARSED]: (
event: Events.FRAG_PARSED,
data: FragParsedData
) => void;
[Events.FRAG_BUFFERED]: (
event: Events.FRAG_BUFFERED,
data: FragBufferedData
) => void;
[Events.FRAG_CHANGED]: (
event: Events.FRAG_CHANGED,
data: FragChangedData
) => void;
[Events.FPS_DROP]: (event: Events.FPS_DROP, data: FPSDropData) => void;
[Events.FPS_DROP_LEVEL_CAPPING]: (
event: Events.FPS_DROP_LEVEL_CAPPING,
data: FPSDropLevelCappingData
) => void;
[Events.ERROR]: (event: Events.ERROR, data: ErrorData) => void;
[Events.DESTROYING]: (event: Events.DESTROYING) => void;
[Events.KEY_LOADING]: (
event: Events.KEY_LOADING,
data: KeyLoadingData
) => void;
[Events.KEY_LOADED]: (event: Events.KEY_LOADED, data: KeyLoadedData) => void;
[Events.LIVE_BACK_BUFFER_REACHED]: (
event: Events.LIVE_BACK_BUFFER_REACHED,
data: LiveBackBufferData
) => void;
[Events.BACK_BUFFER_REACHED]: (
event: Events.BACK_BUFFER_REACHED,
data: BackBufferData
) => void;
}
export interface HlsEventEmitter {
on<E extends keyof HlsListeners, Context = undefined>(
event: E,
listener: HlsListeners[E],
context?: Context
): void;
once<E extends keyof HlsListeners, Context = undefined>(
event: E,
listener: HlsListeners[E],
context?: Context
): void;
removeAllListeners<E extends keyof HlsListeners>(event?: E): void;
off<E extends keyof HlsListeners, Context = undefined>(
event: E,
listener?: HlsListeners[E],
context?: Context,
once?: boolean
): void;
listeners<E extends keyof HlsListeners>(event: E): HlsListeners[E][];
emit<E extends keyof HlsListeners>(
event: E,
name: E,
eventObject: Parameters<HlsListeners[E]>[1]
): boolean;
listenerCount<E extends keyof HlsListeners>(event: E): number;
}

957
node_modules/hls.js/src/hls.ts generated vendored Normal file
View file

@ -0,0 +1,957 @@
import * as URLToolkit from 'url-toolkit';
import PlaylistLoader from './loader/playlist-loader';
import KeyLoader from './loader/key-loader';
import ID3TrackController from './controller/id3-track-controller';
import LatencyController from './controller/latency-controller';
import LevelController from './controller/level-controller';
import { FragmentTracker } from './controller/fragment-tracker';
import StreamController from './controller/stream-controller';
import { isSupported } from './is-supported';
import { logger, enableLogs } from './utils/logger';
import { enableStreamingMode, hlsDefaultConfig, mergeConfig } from './config';
import { EventEmitter } from 'eventemitter3';
import { Events } from './events';
import { ErrorTypes, ErrorDetails } from './errors';
import type { HlsEventEmitter, HlsListeners } from './events';
import type AudioTrackController from './controller/audio-track-controller';
import type AbrController from './controller/abr-controller';
import type BufferController from './controller/buffer-controller';
import type CapLevelController from './controller/cap-level-controller';
import type CMCDController from './controller/cmcd-controller';
import type EMEController from './controller/eme-controller';
import type SubtitleTrackController from './controller/subtitle-track-controller';
import type { ComponentAPI, NetworkComponentAPI } from './types/component-api';
import type { MediaPlaylist } from './types/media-playlist';
import type { HlsConfig } from './config';
import type { Level } from './types/level';
import type { Fragment } from './loader/fragment';
/**
* @module Hls
* @class
* @constructor
*/
export default class Hls implements HlsEventEmitter {
private static defaultConfig?: HlsConfig;
public readonly config: HlsConfig;
public readonly userConfig: Partial<HlsConfig>;
private coreComponents: ComponentAPI[];
private networkControllers: NetworkComponentAPI[];
private _emitter: HlsEventEmitter = new EventEmitter();
private _autoLevelCapping: number;
private abrController: AbrController;
private bufferController: BufferController;
private capLevelController: CapLevelController;
private latencyController: LatencyController;
private levelController: LevelController;
private streamController: StreamController;
private audioTrackController: AudioTrackController;
private subtitleTrackController: SubtitleTrackController;
private emeController: EMEController;
private cmcdController: CMCDController;
private _media: HTMLMediaElement | null = null;
private url: string | null = null;
static get version(): string {
return __VERSION__;
}
static isSupported(): boolean {
return isSupported();
}
static get Events() {
return Events;
}
static get ErrorTypes() {
return ErrorTypes;
}
static get ErrorDetails() {
return ErrorDetails;
}
static get DefaultConfig(): HlsConfig {
if (!Hls.defaultConfig) {
return hlsDefaultConfig;
}
return Hls.defaultConfig;
}
/**
* @type {HlsConfig}
*/
static set DefaultConfig(defaultConfig: HlsConfig) {
Hls.defaultConfig = defaultConfig;
}
/**
* Creates an instance of an HLS client that can attach to exactly one `HTMLMediaElement`.
*
* @constructs Hls
* @param {HlsConfig} config
*/
constructor(userConfig: Partial<HlsConfig> = {}) {
const config = (this.config = mergeConfig(Hls.DefaultConfig, userConfig));
this.userConfig = userConfig;
enableLogs(config.debug);
this._autoLevelCapping = -1;
if (config.progressive) {
enableStreamingMode(config);
}
// core controllers and network loaders
const {
abrController: ConfigAbrController,
bufferController: ConfigBufferController,
capLevelController: ConfigCapLevelController,
fpsController: ConfigFpsController,
} = config;
const abrController = (this.abrController = new ConfigAbrController(this));
const bufferController = (this.bufferController =
new ConfigBufferController(this));
const capLevelController = (this.capLevelController =
new ConfigCapLevelController(this));
const fpsController = new ConfigFpsController(this);
const playListLoader = new PlaylistLoader(this);
const keyLoader = new KeyLoader(this);
const id3TrackController = new ID3TrackController(this);
// network controllers
const levelController = (this.levelController = new LevelController(this));
// FragmentTracker must be defined before StreamController because the order of event handling is important
const fragmentTracker = new FragmentTracker(this);
const streamController = (this.streamController = new StreamController(
this,
fragmentTracker
));
// Cap level controller uses streamController to flush the buffer
capLevelController.setStreamController(streamController);
// fpsController uses streamController to switch when frames are being dropped
fpsController.setStreamController(streamController);
const networkControllers = [levelController, streamController];
this.networkControllers = networkControllers;
const coreComponents = [
playListLoader,
keyLoader,
abrController,
bufferController,
capLevelController,
fpsController,
id3TrackController,
fragmentTracker,
];
this.audioTrackController = this.createController(
config.audioTrackController,
null,
networkControllers
);
this.createController(
config.audioStreamController,
fragmentTracker,
networkControllers
);
// subtitleTrackController must be defined before because the order of event handling is important
this.subtitleTrackController = this.createController(
config.subtitleTrackController,
null,
networkControllers
);
this.createController(
config.subtitleStreamController,
fragmentTracker,
networkControllers
);
this.createController(config.timelineController, null, coreComponents);
this.emeController = this.createController(
config.emeController,
null,
coreComponents
);
this.cmcdController = this.createController(
config.cmcdController,
null,
coreComponents
);
this.latencyController = this.createController(
LatencyController,
null,
coreComponents
);
this.coreComponents = coreComponents;
}
createController(ControllerClass, fragmentTracker, components) {
if (ControllerClass) {
const controllerInstance = fragmentTracker
? new ControllerClass(this, fragmentTracker)
: new ControllerClass(this);
if (components) {
components.push(controllerInstance);
}
return controllerInstance;
}
return null;
}
// Delegate the EventEmitter through the public API of Hls.js
on<E extends keyof HlsListeners, Context = undefined>(
event: E,
listener: HlsListeners[E],
context: Context = this as any
) {
this._emitter.on(event, listener, context);
}
once<E extends keyof HlsListeners, Context = undefined>(
event: E,
listener: HlsListeners[E],
context: Context = this as any
) {
this._emitter.once(event, listener, context);
}
removeAllListeners<E extends keyof HlsListeners>(event?: E | undefined) {
this._emitter.removeAllListeners(event);
}
off<E extends keyof HlsListeners, Context = undefined>(
event: E,
listener?: HlsListeners[E] | undefined,
context: Context = this as any,
once?: boolean | undefined
) {
this._emitter.off(event, listener, context, once);
}
listeners<E extends keyof HlsListeners>(event: E): HlsListeners[E][] {
return this._emitter.listeners(event);
}
emit<E extends keyof HlsListeners>(
event: E,
name: E,
eventObject: Parameters<HlsListeners[E]>[1]
): boolean {
return this._emitter.emit(event, name, eventObject);
}
trigger<E extends keyof HlsListeners>(
event: E,
eventObject: Parameters<HlsListeners[E]>[1]
): boolean {
if (this.config.debug) {
return this.emit(event, event, eventObject);
} else {
try {
return this.emit(event, event, eventObject);
} catch (e) {
logger.error(
'An internal error happened while handling event ' +
event +
'. Error message: "' +
e.message +
'". Here is a stacktrace:',
e
);
this.trigger(Events.ERROR, {
type: ErrorTypes.OTHER_ERROR,
details: ErrorDetails.INTERNAL_EXCEPTION,
fatal: false,
event: event,
error: e,
});
}
}
return false;
}
listenerCount<E extends keyof HlsListeners>(event: E): number {
return this._emitter.listenerCount(event);
}
/**
* Dispose of the instance
*/
destroy() {
logger.log('destroy');
this.trigger(Events.DESTROYING, undefined);
this.detachMedia();
this.removeAllListeners();
this._autoLevelCapping = -1;
this.url = null;
this.networkControllers.forEach((component) => component.destroy());
this.networkControllers.length = 0;
this.coreComponents.forEach((component) => component.destroy());
this.coreComponents.length = 0;
}
/**
* Attaches Hls.js to a media element
* @param {HTMLMediaElement} media
*/
attachMedia(media: HTMLMediaElement) {
logger.log('attachMedia');
this._media = media;
this.trigger(Events.MEDIA_ATTACHING, { media: media });
}
/**
* Detach Hls.js from the media
*/
detachMedia() {
logger.log('detachMedia');
this.trigger(Events.MEDIA_DETACHING, undefined);
this._media = null;
}
/**
* Set the source URL. Can be relative or absolute.
* @param {string} url
*/
loadSource(url: string) {
this.stopLoad();
const media = this.media;
const loadedSource = this.url;
const loadingSource = (this.url = URLToolkit.buildAbsoluteURL(
self.location.href,
url,
{
alwaysNormalize: true,
}
));
logger.log(`loadSource:${loadingSource}`);
if (
media &&
loadedSource &&
loadedSource !== loadingSource &&
this.bufferController.hasSourceTypes()
) {
this.detachMedia();
this.attachMedia(media);
}
// when attaching to a source URL, trigger a playlist load
this.trigger(Events.MANIFEST_LOADING, { url: url });
}
/**
* Start loading data from the stream source.
* Depending on default config, client starts loading automatically when a source is set.
*
* @param {number} startPosition Set the start position to stream from
* @default -1 None (from earliest point)
*/
startLoad(startPosition: number = -1) {
logger.log(`startLoad(${startPosition})`);
this.networkControllers.forEach((controller) => {
controller.startLoad(startPosition);
});
}
/**
* Stop loading of any stream data.
*/
stopLoad() {
logger.log('stopLoad');
this.networkControllers.forEach((controller) => {
controller.stopLoad();
});
}
/**
* Swap through possible audio codecs in the stream (for example to switch from stereo to 5.1)
*/
swapAudioCodec() {
logger.log('swapAudioCodec');
this.streamController.swapAudioCodec();
}
/**
* When the media-element fails, this allows to detach and then re-attach it
* as one call (convenience method).
*
* Automatic recovery of media-errors by this process is configurable.
*/
recoverMediaError() {
logger.log('recoverMediaError');
const media = this._media;
this.detachMedia();
if (media) {
this.attachMedia(media);
}
}
removeLevel(levelIndex, urlId = 0) {
this.levelController.removeLevel(levelIndex, urlId);
}
/**
* @type {Level[]}
*/
get levels(): Array<Level> {
const levels = this.levelController.levels;
return levels ? levels : [];
}
/**
* Index of quality level currently played
* @type {number}
*/
get currentLevel(): number {
return this.streamController.currentLevel;
}
/**
* Set quality level index immediately .
* This will flush the current buffer to replace the quality asap.
* That means playback will interrupt at least shortly to re-buffer and re-sync eventually.
* @type {number} -1 for automatic level selection
*/
set currentLevel(newLevel: number) {
logger.log(`set currentLevel:${newLevel}`);
this.loadLevel = newLevel;
this.abrController.clearTimer();
this.streamController.immediateLevelSwitch();
}
/**
* Index of next quality level loaded as scheduled by stream controller.
* @type {number}
*/
get nextLevel(): number {
return this.streamController.nextLevel;
}
/**
* Set quality level index for next loaded data.
* This will switch the video quality asap, without interrupting playback.
* May abort current loading of data, and flush parts of buffer (outside currently played fragment region).
* @type {number} -1 for automatic level selection
*/
set nextLevel(newLevel: number) {
logger.log(`set nextLevel:${newLevel}`);
this.levelController.manualLevel = newLevel;
this.streamController.nextLevelSwitch();
}
/**
* Return the quality level of the currently or last (of none is loaded currently) segment
* @type {number}
*/
get loadLevel(): number {
return this.levelController.level;
}
/**
* Set quality level index for next loaded data in a conservative way.
* This will switch the quality without flushing, but interrupt current loading.
* Thus the moment when the quality switch will appear in effect will only be after the already existing buffer.
* @type {number} newLevel -1 for automatic level selection
*/
set loadLevel(newLevel: number) {
logger.log(`set loadLevel:${newLevel}`);
this.levelController.manualLevel = newLevel;
}
/**
* get next quality level loaded
* @type {number}
*/
get nextLoadLevel(): number {
return this.levelController.nextLoadLevel;
}
/**
* Set quality level of next loaded segment in a fully "non-destructive" way.
* Same as `loadLevel` but will wait for next switch (until current loading is done).
* @type {number} level
*/
set nextLoadLevel(level: number) {
this.levelController.nextLoadLevel = level;
}
/**
* Return "first level": like a default level, if not set,
* falls back to index of first level referenced in manifest
* @type {number}
*/
get firstLevel(): number {
return Math.max(this.levelController.firstLevel, this.minAutoLevel);
}
/**
* Sets "first-level", see getter.
* @type {number}
*/
set firstLevel(newLevel: number) {
logger.log(`set firstLevel:${newLevel}`);
this.levelController.firstLevel = newLevel;
}
/**
* Return start level (level of first fragment that will be played back)
* if not overrided by user, first level appearing in manifest will be used as start level
* if -1 : automatic start level selection, playback will start from level matching download bandwidth
* (determined from download of first segment)
* @type {number}
*/
get startLevel(): number {
return this.levelController.startLevel;
}
/**
* set start level (level of first fragment that will be played back)
* if not overrided by user, first level appearing in manifest will be used as start level
* if -1 : automatic start level selection, playback will start from level matching download bandwidth
* (determined from download of first segment)
* @type {number} newLevel
*/
set startLevel(newLevel: number) {
logger.log(`set startLevel:${newLevel}`);
// if not in automatic start level detection, ensure startLevel is greater than minAutoLevel
if (newLevel !== -1) {
newLevel = Math.max(newLevel, this.minAutoLevel);
}
this.levelController.startLevel = newLevel;
}
/**
* Get the current setting for capLevelToPlayerSize
*
* @type {boolean}
*/
get capLevelToPlayerSize(): boolean {
return this.config.capLevelToPlayerSize;
}
/**
* set dynamically set capLevelToPlayerSize against (`CapLevelController`)
*
* @type {boolean}
*/
set capLevelToPlayerSize(shouldStartCapping: boolean) {
const newCapLevelToPlayerSize = !!shouldStartCapping;
if (newCapLevelToPlayerSize !== this.config.capLevelToPlayerSize) {
if (newCapLevelToPlayerSize) {
this.capLevelController.startCapping(); // If capping occurs, nextLevelSwitch will happen based on size.
} else {
this.capLevelController.stopCapping();
this.autoLevelCapping = -1;
this.streamController.nextLevelSwitch(); // Now we're uncapped, get the next level asap.
}
this.config.capLevelToPlayerSize = newCapLevelToPlayerSize;
}
}
/**
* Capping/max level value that should be used by automatic level selection algorithm (`ABRController`)
* @type {number}
*/
get autoLevelCapping(): number {
return this._autoLevelCapping;
}
/**
* get bandwidth estimate
* @type {number}
*/
get bandwidthEstimate(): number {
const { bwEstimator } = this.abrController;
if (!bwEstimator) {
return NaN;
}
return bwEstimator.getEstimate();
}
/**
* Capping/max level value that should be used by automatic level selection algorithm (`ABRController`)
* @type {number}
*/
set autoLevelCapping(newLevel: number) {
if (this._autoLevelCapping !== newLevel) {
logger.log(`set autoLevelCapping:${newLevel}`);
this._autoLevelCapping = newLevel;
}
}
/**
* True when automatic level selection enabled
* @type {boolean}
*/
get autoLevelEnabled(): boolean {
return this.levelController.manualLevel === -1;
}
/**
* Level set manually (if any)
* @type {number}
*/
get manualLevel(): number {
return this.levelController.manualLevel;
}
/**
* min level selectable in auto mode according to config.minAutoBitrate
* @type {number}
*/
get minAutoLevel(): number {
const {
levels,
config: { minAutoBitrate },
} = this;
if (!levels) return 0;
const len = levels.length;
for (let i = 0; i < len; i++) {
if (levels[i].maxBitrate > minAutoBitrate) {
return i;
}
}
return 0;
}
/**
* max level selectable in auto mode according to autoLevelCapping
* @type {number}
*/
get maxAutoLevel(): number {
const { levels, autoLevelCapping } = this;
let maxAutoLevel;
if (autoLevelCapping === -1 && levels && levels.length) {
maxAutoLevel = levels.length - 1;
} else {
maxAutoLevel = autoLevelCapping;
}
return maxAutoLevel;
}
/**
* next automatically selected quality level
* @type {number}
*/
get nextAutoLevel(): number {
// ensure next auto level is between min and max auto level
return Math.min(
Math.max(this.abrController.nextAutoLevel, this.minAutoLevel),
this.maxAutoLevel
);
}
/**
* this setter is used to force next auto level.
* this is useful to force a switch down in auto mode:
* in case of load error on level N, hls.js can set nextAutoLevel to N-1 for example)
* forced value is valid for one fragment. upon successful frag loading at forced level,
* this value will be resetted to -1 by ABR controller.
* @type {number}
*/
set nextAutoLevel(nextLevel: number) {
this.abrController.nextAutoLevel = Math.max(this.minAutoLevel, nextLevel);
}
/**
* @type {AudioTrack[]}
*/
get audioTracks(): Array<MediaPlaylist> {
const audioTrackController = this.audioTrackController;
return audioTrackController ? audioTrackController.audioTracks : [];
}
/**
* index of the selected audio track (index in audio track lists)
* @type {number}
*/
get audioTrack(): number {
const audioTrackController = this.audioTrackController;
return audioTrackController ? audioTrackController.audioTrack : -1;
}
/**
* selects an audio track, based on its index in audio track lists
* @type {number}
*/
set audioTrack(audioTrackId: number) {
const audioTrackController = this.audioTrackController;
if (audioTrackController) {
audioTrackController.audioTrack = audioTrackId;
}
}
/**
* get alternate subtitle tracks list from playlist
* @type {MediaPlaylist[]}
*/
get subtitleTracks(): Array<MediaPlaylist> {
const subtitleTrackController = this.subtitleTrackController;
return subtitleTrackController
? subtitleTrackController.subtitleTracks
: [];
}
/**
* index of the selected subtitle track (index in subtitle track lists)
* @type {number}
*/
get subtitleTrack(): number {
const subtitleTrackController = this.subtitleTrackController;
return subtitleTrackController ? subtitleTrackController.subtitleTrack : -1;
}
get media() {
return this._media;
}
/**
* select an subtitle track, based on its index in subtitle track lists
* @type {number}
*/
set subtitleTrack(subtitleTrackId: number) {
const subtitleTrackController = this.subtitleTrackController;
if (subtitleTrackController) {
subtitleTrackController.subtitleTrack = subtitleTrackId;
}
}
/**
* @type {boolean}
*/
get subtitleDisplay(): boolean {
const subtitleTrackController = this.subtitleTrackController;
return subtitleTrackController
? subtitleTrackController.subtitleDisplay
: false;
}
/**
* Enable/disable subtitle display rendering
* @type {boolean}
*/
set subtitleDisplay(value: boolean) {
const subtitleTrackController = this.subtitleTrackController;
if (subtitleTrackController) {
subtitleTrackController.subtitleDisplay = value;
}
}
/**
* get mode for Low-Latency HLS loading
* @type {boolean}
*/
get lowLatencyMode() {
return this.config.lowLatencyMode;
}
/**
* Enable/disable Low-Latency HLS part playlist and segment loading, and start live streams at playlist PART-HOLD-BACK rather than HOLD-BACK.
* @type {boolean}
*/
set lowLatencyMode(mode: boolean) {
this.config.lowLatencyMode = mode;
}
/**
* position (in seconds) of live sync point (ie edge of live position minus safety delay defined by ```hls.config.liveSyncDuration```)
* @type {number}
*/
get liveSyncPosition(): number | null {
return this.latencyController.liveSyncPosition;
}
/**
* estimated position (in seconds) of live edge (ie edge of live playlist plus time sync playlist advanced)
* returns 0 before first playlist is loaded
* @type {number}
*/
get latency() {
return this.latencyController.latency;
}
/**
* maximum distance from the edge before the player seeks forward to ```hls.liveSyncPosition```
* configured using ```liveMaxLatencyDurationCount``` (multiple of target duration) or ```liveMaxLatencyDuration```
* returns 0 before first playlist is loaded
* @type {number}
*/
get maxLatency(): number {
return this.latencyController.maxLatency;
}
/**
* target distance from the edge as calculated by the latency controller
* @type {number}
*/
get targetLatency(): number | null {
return this.latencyController.targetLatency;
}
/**
* the rate at which the edge of the current live playlist is advancing or 1 if there is none
* @type {number}
*/
get drift(): number | null {
return this.latencyController.drift;
}
/**
* set to true when startLoad is called before MANIFEST_PARSED event
* @type {boolean}
*/
get forceStartLoad(): boolean {
return this.streamController.forceStartLoad;
}
}
export type {
MediaPlaylist,
ErrorDetails,
ErrorTypes,
Events,
Level,
HlsListeners,
HlsEventEmitter,
HlsConfig,
Fragment,
};
export type {
ABRControllerConfig,
BufferControllerConfig,
CapLevelControllerConfig,
CMCDControllerConfig,
EMEControllerConfig,
DRMSystemOptions,
FPSControllerConfig,
FragmentLoaderConfig,
FragmentLoaderConstructor,
LevelControllerConfig,
MP4RemuxerConfig,
PlaylistLoaderConfig,
PlaylistLoaderConstructor,
StreamControllerConfig,
LatencyControllerConfig,
TimelineControllerConfig,
TSDemuxerConfig,
} from './config';
export type { CuesInterface } from './utils/cues';
export type { MediaKeyFunc, KeySystems } from './utils/mediakeys-helper';
export type { LoadStats } from './loader/load-stats';
export type { LevelKey } from './loader/level-key';
export type { LevelDetails } from './loader/level-details';
export type { SourceBufferName } from './types/buffer';
export type { MetadataSample, UserdataSample } from './types/demuxer';
export type {
LevelParsed,
LevelAttributes,
HlsUrlParameters,
HlsSkip,
} from './types/level';
export type {
PlaylistLevelType,
HlsChunkPerformanceTiming,
HlsPerformanceTiming,
PlaylistContextType,
PlaylistLoaderContext,
FragmentLoaderContext,
Loader,
LoaderStats,
LoaderContext,
LoaderResponse,
LoaderConfiguration,
LoaderCallbacks,
LoaderOnProgress,
LoaderOnAbort,
LoaderOnError,
LoaderOnSuccess,
LoaderOnTimeout,
HlsProgressivePerformanceTiming,
} from './types/loader';
export type {
MediaPlaylistType,
MainPlaylistType,
AudioPlaylistType,
SubtitlePlaylistType,
} from './types/media-playlist';
export type { Track, TrackSet } from './types/track';
export type { ChunkMetadata } from './types/transmuxer';
export type {
BaseSegment,
Part,
ElementaryStreams,
ElementaryStreamTypes,
ElementaryStreamInfo,
} from './loader/fragment';
export type {
TrackLoadingData,
TrackLoadedData,
AudioTrackLoadedData,
AudioTracksUpdatedData,
AudioTrackSwitchedData,
AudioTrackSwitchingData,
BackBufferData,
BufferAppendedData,
BufferAppendingData,
BufferCodecsData,
BufferCreatedData,
BufferEOSData,
BufferFlushedData,
BufferFlushingData,
CuesParsedData,
ErrorData,
FPSDropData,
FPSDropLevelCappingData,
FragBufferedData,
FragChangedData,
FragDecryptedData,
FragLoadedData,
FragLoadEmergencyAbortedData,
FragLoadingData,
FragParsedData,
FragParsingInitSegmentData,
FragParsingMetadataData,
FragParsingUserdataData,
InitPTSFoundData,
KeyLoadedData,
KeyLoadingData,
LevelLoadedData,
LevelLoadingData,
LevelPTSUpdatedData,
LevelsUpdatedData,
LevelSwitchedData,
LevelSwitchingData,
LevelUpdatedData,
LiveBackBufferData,
ManifestLoadedData,
ManifestLoadingData,
ManifestParsedData,
MediaAttachedData,
MediaAttachingData,
NonNativeTextTrack,
NonNativeTextTracksData,
SubtitleFragProcessedData,
SubtitleTrackLoadedData,
SubtitleTracksUpdatedData,
SubtitleTrackSwitchData,
} from './types/events';
export type { AttrList } from './utils/attr-list';

35
node_modules/hls.js/src/is-supported.ts generated vendored Normal file
View file

@ -0,0 +1,35 @@
import { getMediaSource } from './utils/mediasource-helper';
import { ExtendedSourceBuffer } from './types/buffer';
function getSourceBuffer(): typeof self.SourceBuffer {
return self.SourceBuffer || (self as any).WebKitSourceBuffer;
}
export function isSupported(): boolean {
const mediaSource = getMediaSource();
if (!mediaSource) {
return false;
}
const sourceBuffer = getSourceBuffer();
const isTypeSupported =
mediaSource &&
typeof mediaSource.isTypeSupported === 'function' &&
mediaSource.isTypeSupported('video/mp4; codecs="avc1.42E01E,mp4a.40.2"');
// if SourceBuffer is exposed ensure its API is valid
// safari and old version of Chrome doe not expose SourceBuffer globally so checking SourceBuffer.prototype is impossible
const sourceBufferValidAPI =
!sourceBuffer ||
(sourceBuffer.prototype &&
typeof sourceBuffer.prototype.appendBuffer === 'function' &&
typeof sourceBuffer.prototype.remove === 'function');
return !!isTypeSupported && !!sourceBufferValidAPI;
}
export function changeTypeSupported(): boolean {
const sourceBuffer = getSourceBuffer();
return (
typeof (sourceBuffer?.prototype as ExtendedSourceBuffer)?.changeType ===
'function'
);
}

318
node_modules/hls.js/src/loader/fragment-loader.ts generated vendored Normal file
View file

@ -0,0 +1,318 @@
import { ErrorTypes, ErrorDetails } from '../errors';
import { Fragment } from './fragment';
import {
Loader,
LoaderConfiguration,
FragmentLoaderContext,
} from '../types/loader';
import type { HlsConfig } from '../config';
import type { BaseSegment, Part } from './fragment';
import type { FragLoadedData } from '../types/events';
const MIN_CHUNK_SIZE = Math.pow(2, 17); // 128kb
export default class FragmentLoader {
private readonly config: HlsConfig;
private loader: Loader<FragmentLoaderContext> | null = null;
private partLoadTimeout: number = -1;
constructor(config: HlsConfig) {
this.config = config;
}
destroy() {
if (this.loader) {
this.loader.destroy();
this.loader = null;
}
}
abort() {
if (this.loader) {
// Abort the loader for current fragment. Only one may load at any given time
this.loader.abort();
}
}
load(
frag: Fragment,
onProgress?: FragmentLoadProgressCallback
): Promise<FragLoadedData> {
const url = frag.url;
if (!url) {
return Promise.reject(
new LoadError(
{
type: ErrorTypes.NETWORK_ERROR,
details: ErrorDetails.FRAG_LOAD_ERROR,
fatal: false,
frag,
networkDetails: null,
},
`Fragment does not have a ${url ? 'part list' : 'url'}`
)
);
}
this.abort();
const config = this.config;
const FragmentILoader = config.fLoader;
const DefaultILoader = config.loader;
return new Promise((resolve, reject) => {
if (this.loader) {
this.loader.destroy();
}
const loader =
(this.loader =
frag.loader =
FragmentILoader
? new FragmentILoader(config)
: (new DefaultILoader(config) as Loader<FragmentLoaderContext>));
const loaderContext = createLoaderContext(frag);
const loaderConfig: LoaderConfiguration = {
timeout: config.fragLoadingTimeOut,
maxRetry: 0,
retryDelay: 0,
maxRetryDelay: config.fragLoadingMaxRetryTimeout,
highWaterMark: MIN_CHUNK_SIZE,
};
// Assign frag stats to the loader's stats reference
frag.stats = loader.stats;
loader.load(loaderContext, loaderConfig, {
onSuccess: (response, stats, context, networkDetails) => {
this.resetLoader(frag, loader);
resolve({
frag,
part: null,
payload: response.data as ArrayBuffer,
networkDetails,
});
},
onError: (response, context, networkDetails) => {
this.resetLoader(frag, loader);
reject(
new LoadError({
type: ErrorTypes.NETWORK_ERROR,
details: ErrorDetails.FRAG_LOAD_ERROR,
fatal: false,
frag,
response,
networkDetails,
})
);
},
onAbort: (stats, context, networkDetails) => {
this.resetLoader(frag, loader);
reject(
new LoadError({
type: ErrorTypes.NETWORK_ERROR,
details: ErrorDetails.INTERNAL_ABORTED,
fatal: false,
frag,
networkDetails,
})
);
},
onTimeout: (response, context, networkDetails) => {
this.resetLoader(frag, loader);
reject(
new LoadError({
type: ErrorTypes.NETWORK_ERROR,
details: ErrorDetails.FRAG_LOAD_TIMEOUT,
fatal: false,
frag,
networkDetails,
})
);
},
onProgress: (stats, context, data, networkDetails) => {
if (onProgress) {
onProgress({
frag,
part: null,
payload: data as ArrayBuffer,
networkDetails,
});
}
},
});
});
}
public loadPart(
frag: Fragment,
part: Part,
onProgress: FragmentLoadProgressCallback
): Promise<FragLoadedData> {
this.abort();
const config = this.config;
const FragmentILoader = config.fLoader;
const DefaultILoader = config.loader;
return new Promise((resolve, reject) => {
if (this.loader) {
this.loader.destroy();
}
const loader =
(this.loader =
frag.loader =
FragmentILoader
? new FragmentILoader(config)
: (new DefaultILoader(config) as Loader<FragmentLoaderContext>));
const loaderContext = createLoaderContext(frag, part);
const loaderConfig: LoaderConfiguration = {
timeout: config.fragLoadingTimeOut,
maxRetry: 0,
retryDelay: 0,
maxRetryDelay: config.fragLoadingMaxRetryTimeout,
highWaterMark: MIN_CHUNK_SIZE,
};
// Assign part stats to the loader's stats reference
part.stats = loader.stats;
loader.load(loaderContext, loaderConfig, {
onSuccess: (response, stats, context, networkDetails) => {
this.resetLoader(frag, loader);
this.updateStatsFromPart(frag, part);
const partLoadedData: FragLoadedData = {
frag,
part,
payload: response.data as ArrayBuffer,
networkDetails,
};
onProgress(partLoadedData);
resolve(partLoadedData);
},
onError: (response, context, networkDetails) => {
this.resetLoader(frag, loader);
reject(
new LoadError({
type: ErrorTypes.NETWORK_ERROR,
details: ErrorDetails.FRAG_LOAD_ERROR,
fatal: false,
frag,
part,
response,
networkDetails,
})
);
},
onAbort: (stats, context, networkDetails) => {
frag.stats.aborted = part.stats.aborted;
this.resetLoader(frag, loader);
reject(
new LoadError({
type: ErrorTypes.NETWORK_ERROR,
details: ErrorDetails.INTERNAL_ABORTED,
fatal: false,
frag,
part,
networkDetails,
})
);
},
onTimeout: (response, context, networkDetails) => {
this.resetLoader(frag, loader);
reject(
new LoadError({
type: ErrorTypes.NETWORK_ERROR,
details: ErrorDetails.FRAG_LOAD_TIMEOUT,
fatal: false,
frag,
part,
networkDetails,
})
);
},
});
});
}
private updateStatsFromPart(frag: Fragment, part: Part) {
const fragStats = frag.stats;
const partStats = part.stats;
const partTotal = partStats.total;
fragStats.loaded += partStats.loaded;
if (partTotal) {
const estTotalParts = Math.round(frag.duration / part.duration);
const estLoadedParts = Math.min(
Math.round(fragStats.loaded / partTotal),
estTotalParts
);
const estRemainingParts = estTotalParts - estLoadedParts;
const estRemainingBytes =
estRemainingParts * Math.round(fragStats.loaded / estLoadedParts);
fragStats.total = fragStats.loaded + estRemainingBytes;
} else {
fragStats.total = Math.max(fragStats.loaded, fragStats.total);
}
const fragLoading = fragStats.loading;
const partLoading = partStats.loading;
if (fragLoading.start) {
// add to fragment loader latency
fragLoading.first += partLoading.first - partLoading.start;
} else {
fragLoading.start = partLoading.start;
fragLoading.first = partLoading.first;
}
fragLoading.end = partLoading.end;
}
private resetLoader(frag: Fragment, loader: Loader<FragmentLoaderContext>) {
frag.loader = null;
if (this.loader === loader) {
self.clearTimeout(this.partLoadTimeout);
this.loader = null;
}
loader.destroy();
}
}
function createLoaderContext(
frag: Fragment,
part: Part | null = null
): FragmentLoaderContext {
const segment: BaseSegment = part || frag;
const loaderContext: FragmentLoaderContext = {
frag,
part,
responseType: 'arraybuffer',
url: segment.url,
headers: {},
rangeStart: 0,
rangeEnd: 0,
};
const start = segment.byteRangeStartOffset;
const end = segment.byteRangeEndOffset;
if (Number.isFinite(start) && Number.isFinite(end)) {
loaderContext.rangeStart = start;
loaderContext.rangeEnd = end;
}
return loaderContext;
}
export class LoadError extends Error {
public readonly data: FragLoadFailResult;
constructor(data: FragLoadFailResult, ...params) {
super(...params);
this.data = data;
}
}
export interface FragLoadFailResult {
type: string;
details: string;
fatal: boolean;
frag: Fragment;
part?: Part;
response?: {
// error status code
code: number;
// error description
text: string;
};
networkDetails: any;
}
export type FragmentLoadProgressCallback = (result: FragLoadedData) => void;

336
node_modules/hls.js/src/loader/fragment.ts generated vendored Normal file
View file

@ -0,0 +1,336 @@
import { buildAbsoluteURL } from 'url-toolkit';
import { logger } from '../utils/logger';
import { LevelKey } from './level-key';
import { LoadStats } from './load-stats';
import { AttrList } from '../utils/attr-list';
import type {
FragmentLoaderContext,
Loader,
PlaylistLevelType,
} from '../types/loader';
export enum ElementaryStreamTypes {
AUDIO = 'audio',
VIDEO = 'video',
AUDIOVIDEO = 'audiovideo',
}
export interface ElementaryStreamInfo {
startPTS: number;
endPTS: number;
startDTS: number;
endDTS: number;
partial?: boolean;
}
export type ElementaryStreams = Record<
ElementaryStreamTypes,
ElementaryStreamInfo | null
>;
export class BaseSegment {
private _byteRange: number[] | null = null;
private _url: string | null = null;
// baseurl is the URL to the playlist
public readonly baseurl: string;
// relurl is the portion of the URL that comes from inside the playlist.
public relurl?: string;
// Holds the types of data this fragment supports
public elementaryStreams: ElementaryStreams = {
[ElementaryStreamTypes.AUDIO]: null,
[ElementaryStreamTypes.VIDEO]: null,
[ElementaryStreamTypes.AUDIOVIDEO]: null,
};
constructor(baseurl: string) {
this.baseurl = baseurl;
}
// setByteRange converts a EXT-X-BYTERANGE attribute into a two element array
setByteRange(value: string, previous?: BaseSegment) {
const params = value.split('@', 2);
const byteRange: number[] = [];
if (params.length === 1) {
byteRange[0] = previous ? previous.byteRangeEndOffset : 0;
} else {
byteRange[0] = parseInt(params[1]);
}
byteRange[1] = parseInt(params[0]) + byteRange[0];
this._byteRange = byteRange;
}
get byteRange(): number[] {
if (!this._byteRange) {
return [];
}
return this._byteRange;
}
get byteRangeStartOffset(): number {
return this.byteRange[0];
}
get byteRangeEndOffset(): number {
return this.byteRange[1];
}
get url(): string {
if (!this._url && this.baseurl && this.relurl) {
this._url = buildAbsoluteURL(this.baseurl, this.relurl, {
alwaysNormalize: true,
});
}
return this._url || '';
}
set url(value: string) {
this._url = value;
}
}
export class Fragment extends BaseSegment {
private _decryptdata: LevelKey | null = null;
public rawProgramDateTime: string | null = null;
public programDateTime: number | null = null;
public tagList: Array<string[]> = [];
// EXTINF has to be present for a m38 to be considered valid
public duration: number = 0;
// sn notates the sequence number for a segment, and if set to a string can be 'initSegment'
public sn: number | 'initSegment' = 0;
// levelkey is the EXT-X-KEY that applies to this segment for decryption
// core difference from the private field _decryptdata is the lack of the initialized IV
// _decryptdata will set the IV for this segment based on the segment number in the fragment
public levelkey?: LevelKey;
// A string representing the fragment type
public readonly type: PlaylistLevelType;
// A reference to the loader. Set while the fragment is loading, and removed afterwards. Used to abort fragment loading
public loader: Loader<FragmentLoaderContext> | null = null;
// The level/track index to which the fragment belongs
public level: number = -1;
// The continuity counter of the fragment
public cc: number = 0;
// The starting Presentation Time Stamp (PTS) of the fragment. Set after transmux complete.
public startPTS?: number;
// The ending Presentation Time Stamp (PTS) of the fragment. Set after transmux complete.
public endPTS?: number;
// The latest Presentation Time Stamp (PTS) appended to the buffer.
public appendedPTS?: number;
// The starting Decode Time Stamp (DTS) of the fragment. Set after transmux complete.
public startDTS!: number;
// The ending Decode Time Stamp (DTS) of the fragment. Set after transmux complete.
public endDTS!: number;
// The start time of the fragment, as listed in the manifest. Updated after transmux complete.
public start: number = 0;
// Set by `updateFragPTSDTS` in level-helper
public deltaPTS?: number;
// The maximum starting Presentation Time Stamp (audio/video PTS) of the fragment. Set after transmux complete.
public maxStartPTS?: number;
// The minimum ending Presentation Time Stamp (audio/video PTS) of the fragment. Set after transmux complete.
public minEndPTS?: number;
// Load/parse timing information
public stats: LoadStats = new LoadStats();
public urlId: number = 0;
public data?: Uint8Array;
// A flag indicating whether the segment was downloaded in order to test bitrate, and was not buffered
public bitrateTest: boolean = false;
// #EXTINF segment title
public title: string | null = null;
// The Media Initialization Section for this segment
public initSegment: Fragment | null = null;
constructor(type: PlaylistLevelType, baseurl: string) {
super(baseurl);
this.type = type;
}
get decryptdata(): LevelKey | null {
if (!this.levelkey && !this._decryptdata) {
return null;
}
if (!this._decryptdata && this.levelkey) {
let sn = this.sn;
if (typeof sn !== 'number') {
// We are fetching decryption data for a initialization segment
// If the segment was encrypted with AES-128
// It must have an IV defined. We cannot substitute the Segment Number in.
if (
this.levelkey &&
this.levelkey.method === 'AES-128' &&
!this.levelkey.iv
) {
logger.warn(
`missing IV for initialization segment with method="${this.levelkey.method}" - compliance issue`
);
}
/*
Be converted to a Number.
'initSegment' will become NaN.
NaN, which when converted through ToInt32() -> +0.
---
Explicitly set sn to resulting value from implicit conversions 'initSegment' values for IV generation.
*/
sn = 0;
}
this._decryptdata = this.setDecryptDataFromLevelKey(this.levelkey, sn);
}
return this._decryptdata;
}
get end(): number {
return this.start + this.duration;
}
get endProgramDateTime() {
if (this.programDateTime === null) {
return null;
}
if (!Number.isFinite(this.programDateTime)) {
return null;
}
const duration = !Number.isFinite(this.duration) ? 0 : this.duration;
return this.programDateTime + duration * 1000;
}
get encrypted() {
// At the m3u8-parser level we need to add support for manifest signalled keyformats
// when we want the fragment to start reporting that it is encrypted.
// Currently, keyFormat will only be set for identity keys
if (this.decryptdata?.keyFormat && this.decryptdata.uri) {
return true;
}
return false;
}
/**
* Utility method for parseLevelPlaylist to create an initialization vector for a given segment
* @param {number} segmentNumber - segment number to generate IV with
* @returns {Uint8Array}
*/
createInitializationVector(segmentNumber: number): Uint8Array {
const uint8View = new Uint8Array(16);
for (let i = 12; i < 16; i++) {
uint8View[i] = (segmentNumber >> (8 * (15 - i))) & 0xff;
}
return uint8View;
}
/**
* Utility method for parseLevelPlaylist to get a fragment's decryption data from the currently parsed encryption key data
* @param levelkey - a playlist's encryption info
* @param segmentNumber - the fragment's segment number
* @returns {LevelKey} - an object to be applied as a fragment's decryptdata
*/
setDecryptDataFromLevelKey(
levelkey: LevelKey,
segmentNumber: number
): LevelKey {
let decryptdata = levelkey;
if (levelkey?.method === 'AES-128' && levelkey.uri && !levelkey.iv) {
decryptdata = LevelKey.fromURI(levelkey.uri);
decryptdata.method = levelkey.method;
decryptdata.iv = this.createInitializationVector(segmentNumber);
decryptdata.keyFormat = 'identity';
}
return decryptdata;
}
setElementaryStreamInfo(
type: ElementaryStreamTypes,
startPTS: number,
endPTS: number,
startDTS: number,
endDTS: number,
partial: boolean = false
) {
const { elementaryStreams } = this;
const info = elementaryStreams[type];
if (!info) {
elementaryStreams[type] = {
startPTS,
endPTS,
startDTS,
endDTS,
partial,
};
return;
}
info.startPTS = Math.min(info.startPTS, startPTS);
info.endPTS = Math.max(info.endPTS, endPTS);
info.startDTS = Math.min(info.startDTS, startDTS);
info.endDTS = Math.max(info.endDTS, endDTS);
}
clearElementaryStreamInfo() {
const { elementaryStreams } = this;
elementaryStreams[ElementaryStreamTypes.AUDIO] = null;
elementaryStreams[ElementaryStreamTypes.VIDEO] = null;
elementaryStreams[ElementaryStreamTypes.AUDIOVIDEO] = null;
}
}
export class Part extends BaseSegment {
public readonly fragOffset: number = 0;
public readonly duration: number = 0;
public readonly gap: boolean = false;
public readonly independent: boolean = false;
public readonly relurl: string;
public readonly fragment: Fragment;
public readonly index: number;
public stats: LoadStats = new LoadStats();
constructor(
partAttrs: AttrList,
frag: Fragment,
baseurl: string,
index: number,
previous?: Part
) {
super(baseurl);
this.duration = partAttrs.decimalFloatingPoint('DURATION');
this.gap = partAttrs.bool('GAP');
this.independent = partAttrs.bool('INDEPENDENT');
this.relurl = partAttrs.enumeratedString('URI') as string;
this.fragment = frag;
this.index = index;
const byteRange = partAttrs.enumeratedString('BYTERANGE');
if (byteRange) {
this.setByteRange(byteRange, previous);
}
if (previous) {
this.fragOffset = previous.fragOffset + previous.duration;
}
}
get start(): number {
return this.fragment.start + this.fragOffset;
}
get end(): number {
return this.start + this.duration;
}
get loaded(): boolean {
const { elementaryStreams } = this;
return !!(
elementaryStreams.audio ||
elementaryStreams.video ||
elementaryStreams.audiovideo
);
}
}

168
node_modules/hls.js/src/loader/key-loader.ts generated vendored Normal file
View file

@ -0,0 +1,168 @@
/*
* Decrypt key Loader
*/
import { Events } from '../events';
import { ErrorTypes, ErrorDetails } from '../errors';
import { logger } from '../utils/logger';
import type Hls from '../hls';
import { Fragment } from './fragment';
import {
LoaderStats,
LoaderResponse,
LoaderContext,
LoaderConfiguration,
LoaderCallbacks,
Loader,
FragmentLoaderContext,
} from '../types/loader';
import type { ComponentAPI } from '../types/component-api';
import type { KeyLoadingData } from '../types/events';
interface KeyLoaderContext extends LoaderContext {
frag: Fragment;
}
export default class KeyLoader implements ComponentAPI {
private hls: Hls;
public loaders = {};
public decryptkey: Uint8Array | null = null;
public decrypturl: string | null = null;
constructor(hls: Hls) {
this.hls = hls;
this._registerListeners();
}
private _registerListeners() {
this.hls.on(Events.KEY_LOADING, this.onKeyLoading, this);
}
private _unregisterListeners() {
this.hls.off(Events.KEY_LOADING, this.onKeyLoading);
}
destroy(): void {
this._unregisterListeners();
for (const loaderName in this.loaders) {
const loader = this.loaders[loaderName];
if (loader) {
loader.destroy();
}
}
this.loaders = {};
}
onKeyLoading(event: Events.KEY_LOADING, data: KeyLoadingData) {
const { frag } = data;
const type = frag.type;
const loader = this.loaders[type];
if (!frag.decryptdata) {
logger.warn('Missing decryption data on fragment in onKeyLoading');
return;
}
// Load the key if the uri is different from previous one, or if the decrypt key has not yet been retrieved
const uri = frag.decryptdata.uri;
if (uri !== this.decrypturl || this.decryptkey === null) {
const config = this.hls.config;
if (loader) {
logger.warn(`abort previous key loader for type:${type}`);
loader.abort();
}
if (!uri) {
logger.warn('key uri is falsy');
return;
}
const Loader = config.loader;
const fragLoader =
(frag.loader =
this.loaders[type] =
new Loader(config) as Loader<FragmentLoaderContext>);
this.decrypturl = uri;
this.decryptkey = null;
const loaderContext: KeyLoaderContext = {
url: uri,
frag: frag,
responseType: 'arraybuffer',
};
// maxRetry is 0 so that instead of retrying the same key on the same variant multiple times,
// key-loader will trigger an error and rely on stream-controller to handle retry logic.
// this will also align retry logic with fragment-loader
const loaderConfig: LoaderConfiguration = {
timeout: config.fragLoadingTimeOut,
maxRetry: 0,
retryDelay: config.fragLoadingRetryDelay,
maxRetryDelay: config.fragLoadingMaxRetryTimeout,
highWaterMark: 0,
};
const loaderCallbacks: LoaderCallbacks<KeyLoaderContext> = {
onSuccess: this.loadsuccess.bind(this),
onError: this.loaderror.bind(this),
onTimeout: this.loadtimeout.bind(this),
};
fragLoader.load(loaderContext, loaderConfig, loaderCallbacks);
} else if (this.decryptkey) {
// Return the key if it's already been loaded
frag.decryptdata.key = this.decryptkey;
this.hls.trigger(Events.KEY_LOADED, { frag: frag });
}
}
loadsuccess(
response: LoaderResponse,
stats: LoaderStats,
context: KeyLoaderContext
) {
const frag = context.frag;
if (!frag.decryptdata) {
logger.error('after key load, decryptdata unset');
return;
}
this.decryptkey = frag.decryptdata.key = new Uint8Array(
response.data as ArrayBuffer
);
// detach fragment loader on load success
frag.loader = null;
delete this.loaders[frag.type];
this.hls.trigger(Events.KEY_LOADED, { frag: frag });
}
loaderror(response: LoaderResponse, context: KeyLoaderContext) {
const frag = context.frag;
const loader = frag.loader;
if (loader) {
loader.abort();
}
delete this.loaders[frag.type];
this.hls.trigger(Events.ERROR, {
type: ErrorTypes.NETWORK_ERROR,
details: ErrorDetails.KEY_LOAD_ERROR,
fatal: false,
frag,
response,
});
}
loadtimeout(stats: LoaderStats, context: KeyLoaderContext) {
const frag = context.frag;
const loader = frag.loader;
if (loader) {
loader.abort();
}
delete this.loaders[frag.type];
this.hls.trigger(Events.ERROR, {
type: ErrorTypes.NETWORK_ERROR,
details: ErrorDetails.KEY_LOAD_TIMEOUT,
fatal: false,
frag,
});
}
}

141
node_modules/hls.js/src/loader/level-details.ts generated vendored Normal file
View file

@ -0,0 +1,141 @@
import { Part } from './fragment';
import type { Fragment } from './fragment';
import type { AttrList } from '../utils/attr-list';
const DEFAULT_TARGET_DURATION = 10;
export class LevelDetails {
public PTSKnown: boolean = false;
public alignedSliding: boolean = false;
public averagetargetduration?: number;
public endCC: number = 0;
public endSN: number = 0;
public fragments: Fragment[];
public fragmentHint?: Fragment;
public partList: Part[] | null = null;
public live: boolean = true;
public ageHeader: number = 0;
public advancedDateTime?: number;
public updated: boolean = true;
public advanced: boolean = true;
public availabilityDelay?: number; // Manifest reload synchronization
public misses: number = 0;
public needSidxRanges: boolean = false;
public startCC: number = 0;
public startSN: number = 0;
public startTimeOffset: number | null = null;
public targetduration: number = 0;
public totalduration: number = 0;
public type: string | null = null;
public url: string;
public m3u8: string = '';
public version: number | null = null;
public canBlockReload: boolean = false;
public canSkipUntil: number = 0;
public canSkipDateRanges: boolean = false;
public skippedSegments: number = 0;
public recentlyRemovedDateranges?: string[];
public partHoldBack: number = 0;
public holdBack: number = 0;
public partTarget: number = 0;
public preloadHint?: AttrList;
public renditionReports?: AttrList[];
public tuneInGoal: number = 0;
public deltaUpdateFailed?: boolean;
public driftStartTime: number = 0;
public driftEndTime: number = 0;
public driftStart: number = 0;
public driftEnd: number = 0;
constructor(baseUrl) {
this.fragments = [];
this.url = baseUrl;
}
reloaded(previous: LevelDetails | undefined) {
if (!previous) {
this.advanced = true;
this.updated = true;
return;
}
const partSnDiff = this.lastPartSn - previous.lastPartSn;
const partIndexDiff = this.lastPartIndex - previous.lastPartIndex;
this.updated =
this.endSN !== previous.endSN || !!partIndexDiff || !!partSnDiff;
this.advanced =
this.endSN > previous.endSN ||
partSnDiff > 0 ||
(partSnDiff === 0 && partIndexDiff > 0);
if (this.updated || this.advanced) {
this.misses = Math.floor(previous.misses * 0.6);
} else {
this.misses = previous.misses + 1;
}
this.availabilityDelay = previous.availabilityDelay;
}
get hasProgramDateTime(): boolean {
if (this.fragments.length) {
return Number.isFinite(
this.fragments[this.fragments.length - 1].programDateTime as number
);
}
return false;
}
get levelTargetDuration(): number {
return (
this.averagetargetduration ||
this.targetduration ||
DEFAULT_TARGET_DURATION
);
}
get drift(): number {
const runTime = this.driftEndTime - this.driftStartTime;
if (runTime > 0) {
const runDuration = this.driftEnd - this.driftStart;
return (runDuration * 1000) / runTime;
}
return 1;
}
get edge(): number {
return this.partEnd || this.fragmentEnd;
}
get partEnd(): number {
if (this.partList?.length) {
return this.partList[this.partList.length - 1].end;
}
return this.fragmentEnd;
}
get fragmentEnd(): number {
if (this.fragments?.length) {
return this.fragments[this.fragments.length - 1].end;
}
return 0;
}
get age(): number {
if (this.advancedDateTime) {
return Math.max(Date.now() - this.advancedDateTime, 0) / 1000;
}
return 0;
}
get lastPartIndex(): number {
if (this.partList?.length) {
return this.partList[this.partList.length - 1].index;
}
return -1;
}
get lastPartSn(): number {
if (this.partList?.length) {
return this.partList[this.partList.length - 1].fragment.sn as number;
}
return this.endSN;
}
}

33
node_modules/hls.js/src/loader/level-key.ts generated vendored Normal file
View file

@ -0,0 +1,33 @@
import { buildAbsoluteURL } from 'url-toolkit';
export class LevelKey {
private _uri: string | null = null;
public method: string | null = null;
public keyFormat: string | null = null;
public keyFormatVersions: string | null = null;
public keyID: string | null = null;
public key: Uint8Array | null = null;
public iv: Uint8Array | null = null;
static fromURL(baseUrl: string, relativeUrl: string): LevelKey {
return new LevelKey(baseUrl, relativeUrl);
}
static fromURI(uri: string): LevelKey {
return new LevelKey(uri);
}
private constructor(absoluteOrBaseURI: string, relativeURL?: string) {
if (relativeURL) {
this._uri = buildAbsoluteURL(absoluteOrBaseURI, relativeURL, {
alwaysNormalize: true,
});
} else {
this._uri = absoluteOrBaseURI;
}
}
get uri() {
return this._uri;
}
}

17
node_modules/hls.js/src/loader/load-stats.ts generated vendored Normal file
View file

@ -0,0 +1,17 @@
import type {
HlsPerformanceTiming,
HlsProgressivePerformanceTiming,
LoaderStats,
} from '../types/loader';
export class LoadStats implements LoaderStats {
aborted: boolean = false;
loaded: number = 0;
retry: number = 0;
total: number = 0;
chunkCount: number = 0;
bwEstimate: number = 0;
loading: HlsProgressivePerformanceTiming = { start: 0, first: 0, end: 0 };
parsing: HlsPerformanceTiming = { start: 0, end: 0 };
buffering: HlsProgressivePerformanceTiming = { start: 0, first: 0, end: 0 };
}

624
node_modules/hls.js/src/loader/m3u8-parser.ts generated vendored Normal file
View file

@ -0,0 +1,624 @@
import * as URLToolkit from 'url-toolkit';
import { Fragment, Part } from './fragment';
import { LevelDetails } from './level-details';
import { LevelKey } from './level-key';
import { AttrList } from '../utils/attr-list';
import { logger } from '../utils/logger';
import type { CodecType } from '../utils/codecs';
import { isCodecType } from '../utils/codecs';
import type {
MediaPlaylist,
AudioGroup,
MediaPlaylistType,
} from '../types/media-playlist';
import type { PlaylistLevelType } from '../types/loader';
import type { LevelAttributes, LevelParsed } from '../types/level';
type M3U8ParserFragments = Array<Fragment | null>;
// https://regex101.com is your friend
const MASTER_PLAYLIST_REGEX =
/#EXT-X-STREAM-INF:([^\r\n]*)(?:[\r\n](?:#[^\r\n]*)?)*([^\r\n]+)|#EXT-X-SESSION-DATA:([^\r\n]*)[\r\n]+/g;
const MASTER_PLAYLIST_MEDIA_REGEX = /#EXT-X-MEDIA:(.*)/g;
const LEVEL_PLAYLIST_REGEX_FAST = new RegExp(
[
/#EXTINF:\s*(\d*(?:\.\d+)?)(?:,(.*)\s+)?/.source, // duration (#EXTINF:<duration>,<title>), group 1 => duration, group 2 => title
/(?!#) *(\S[\S ]*)/.source, // segment URI, group 3 => the URI (note newline is not eaten)
/#EXT-X-BYTERANGE:*(.+)/.source, // next segment's byterange, group 4 => range spec (x@y)
/#EXT-X-PROGRAM-DATE-TIME:(.+)/.source, // next segment's program date/time group 5 => the datetime spec
/#.*/.source, // All other non-segment oriented tags will match with all groups empty
].join('|'),
'g'
);
const LEVEL_PLAYLIST_REGEX_SLOW = new RegExp(
[
/#(EXTM3U)/.source,
/#EXT-X-(PLAYLIST-TYPE):(.+)/.source,
/#EXT-X-(MEDIA-SEQUENCE): *(\d+)/.source,
/#EXT-X-(SKIP):(.+)/.source,
/#EXT-X-(TARGETDURATION): *(\d+)/.source,
/#EXT-X-(KEY):(.+)/.source,
/#EXT-X-(START):(.+)/.source,
/#EXT-X-(ENDLIST)/.source,
/#EXT-X-(DISCONTINUITY-SEQ)UENCE: *(\d+)/.source,
/#EXT-X-(DIS)CONTINUITY/.source,
/#EXT-X-(VERSION):(\d+)/.source,
/#EXT-X-(MAP):(.+)/.source,
/#EXT-X-(SERVER-CONTROL):(.+)/.source,
/#EXT-X-(PART-INF):(.+)/.source,
/#EXT-X-(GAP)/.source,
/#EXT-X-(BITRATE):\s*(\d+)/.source,
/#EXT-X-(PART):(.+)/.source,
/#EXT-X-(PRELOAD-HINT):(.+)/.source,
/#EXT-X-(RENDITION-REPORT):(.+)/.source,
/(#)([^:]*):(.*)/.source,
/(#)(.*)(?:.*)\r?\n?/.source,
].join('|')
);
const MP4_REGEX_SUFFIX = /\.(mp4|m4s|m4v|m4a)$/i;
function isMP4Url(url: string): boolean {
return MP4_REGEX_SUFFIX.test(URLToolkit.parseURL(url)?.path ?? '');
}
export default class M3U8Parser {
static findGroup(
groups: Array<AudioGroup>,
mediaGroupId: string
): AudioGroup | undefined {
for (let i = 0; i < groups.length; i++) {
const group = groups[i];
if (group.id === mediaGroupId) {
return group;
}
}
}
static convertAVC1ToAVCOTI(codec) {
// Convert avc1 codec string from RFC-4281 to RFC-6381 for MediaSource.isTypeSupported
const avcdata = codec.split('.');
if (avcdata.length > 2) {
let result = avcdata.shift() + '.';
result += parseInt(avcdata.shift()).toString(16);
result += ('000' + parseInt(avcdata.shift()).toString(16)).substr(-4);
return result;
}
return codec;
}
static resolve(url, baseUrl) {
return URLToolkit.buildAbsoluteURL(baseUrl, url, { alwaysNormalize: true });
}
static parseMasterPlaylist(string: string, baseurl: string) {
const levels: Array<LevelParsed> = [];
const sessionData: Record<string, AttrList> = {};
let hasSessionData = false;
MASTER_PLAYLIST_REGEX.lastIndex = 0;
let result: RegExpExecArray | null;
while ((result = MASTER_PLAYLIST_REGEX.exec(string)) != null) {
if (result[1]) {
// '#EXT-X-STREAM-INF' is found, parse level tag in group 1
const attrs = new AttrList(result[1]);
const level: LevelParsed = {
attrs,
bitrate:
attrs.decimalInteger('AVERAGE-BANDWIDTH') ||
attrs.decimalInteger('BANDWIDTH'),
name: attrs.NAME,
url: M3U8Parser.resolve(result[2], baseurl),
};
const resolution = attrs.decimalResolution('RESOLUTION');
if (resolution) {
level.width = resolution.width;
level.height = resolution.height;
}
setCodecs(
(attrs.CODECS || '').split(/[ ,]+/).filter((c) => c),
level
);
if (level.videoCodec && level.videoCodec.indexOf('avc1') !== -1) {
level.videoCodec = M3U8Parser.convertAVC1ToAVCOTI(level.videoCodec);
}
levels.push(level);
} else if (result[3]) {
// '#EXT-X-SESSION-DATA' is found, parse session data in group 3
const sessionAttrs = new AttrList(result[3]);
if (sessionAttrs['DATA-ID']) {
hasSessionData = true;
sessionData[sessionAttrs['DATA-ID']] = sessionAttrs;
}
}
}
return {
levels,
sessionData: hasSessionData ? sessionData : null,
};
}
static parseMasterPlaylistMedia(
string: string,
baseurl: string,
type: MediaPlaylistType,
groups: Array<AudioGroup> = []
): Array<MediaPlaylist> {
let result: RegExpExecArray | null;
const medias: Array<MediaPlaylist> = [];
let id = 0;
MASTER_PLAYLIST_MEDIA_REGEX.lastIndex = 0;
while ((result = MASTER_PLAYLIST_MEDIA_REGEX.exec(string)) !== null) {
const attrs = new AttrList(result[1]) as LevelAttributes;
if (attrs.TYPE === type) {
const media: MediaPlaylist = {
attrs,
bitrate: 0,
id: id++,
groupId: attrs['GROUP-ID'],
instreamId: attrs['INSTREAM-ID'],
name: attrs.NAME || attrs.LANGUAGE || '',
type,
default: attrs.bool('DEFAULT'),
autoselect: attrs.bool('AUTOSELECT'),
forced: attrs.bool('FORCED'),
lang: attrs.LANGUAGE,
url: attrs.URI ? M3U8Parser.resolve(attrs.URI, baseurl) : '',
};
if (groups.length) {
// If there are audio or text groups signalled in the manifest, let's look for a matching codec string for this track
// If we don't find the track signalled, lets use the first audio groups codec we have
// Acting as a best guess
const groupCodec =
M3U8Parser.findGroup(groups, media.groupId as string) || groups[0];
assignCodec(media, groupCodec, 'audioCodec');
assignCodec(media, groupCodec, 'textCodec');
}
medias.push(media);
}
}
return medias;
}
static parseLevelPlaylist(
string: string,
baseurl: string,
id: number,
type: PlaylistLevelType,
levelUrlId: number
): LevelDetails {
const level = new LevelDetails(baseurl);
const fragments: M3U8ParserFragments = level.fragments;
// The most recent init segment seen (applies to all subsequent segments)
let currentInitSegment: Fragment | null = null;
let currentSN = 0;
let currentPart = 0;
let totalduration = 0;
let discontinuityCounter = 0;
let prevFrag: Fragment | null = null;
let frag: Fragment = new Fragment(type, baseurl);
let result: RegExpExecArray | RegExpMatchArray | null;
let i: number;
let levelkey: LevelKey | undefined;
let firstPdtIndex = -1;
let createNextFrag = false;
LEVEL_PLAYLIST_REGEX_FAST.lastIndex = 0;
level.m3u8 = string;
while ((result = LEVEL_PLAYLIST_REGEX_FAST.exec(string)) !== null) {
if (createNextFrag) {
createNextFrag = false;
frag = new Fragment(type, baseurl);
// setup the next fragment for part loading
frag.start = totalduration;
frag.sn = currentSN;
frag.cc = discontinuityCounter;
frag.level = id;
if (currentInitSegment) {
frag.initSegment = currentInitSegment;
frag.rawProgramDateTime = currentInitSegment.rawProgramDateTime;
}
}
const duration = result[1];
if (duration) {
// INF
frag.duration = parseFloat(duration);
// avoid sliced strings https://github.com/video-dev/hls.js/issues/939
const title = (' ' + result[2]).slice(1);
frag.title = title || null;
frag.tagList.push(title ? ['INF', duration, title] : ['INF', duration]);
} else if (result[3]) {
// url
if (Number.isFinite(frag.duration)) {
frag.start = totalduration;
if (levelkey) {
frag.levelkey = levelkey;
}
frag.sn = currentSN;
frag.level = id;
frag.cc = discontinuityCounter;
frag.urlId = levelUrlId;
fragments.push(frag);
// avoid sliced strings https://github.com/video-dev/hls.js/issues/939
frag.relurl = (' ' + result[3]).slice(1);
assignProgramDateTime(frag, prevFrag);
prevFrag = frag;
totalduration += frag.duration;
currentSN++;
currentPart = 0;
createNextFrag = true;
}
} else if (result[4]) {
// X-BYTERANGE
const data = (' ' + result[4]).slice(1);
if (prevFrag) {
frag.setByteRange(data, prevFrag);
} else {
frag.setByteRange(data);
}
} else if (result[5]) {
// PROGRAM-DATE-TIME
// avoid sliced strings https://github.com/video-dev/hls.js/issues/939
frag.rawProgramDateTime = (' ' + result[5]).slice(1);
frag.tagList.push(['PROGRAM-DATE-TIME', frag.rawProgramDateTime]);
if (firstPdtIndex === -1) {
firstPdtIndex = fragments.length;
}
} else {
result = result[0].match(LEVEL_PLAYLIST_REGEX_SLOW);
if (!result) {
logger.warn('No matches on slow regex match for level playlist!');
continue;
}
for (i = 1; i < result.length; i++) {
if (typeof result[i] !== 'undefined') {
break;
}
}
// avoid sliced strings https://github.com/video-dev/hls.js/issues/939
const tag = (' ' + result[i]).slice(1);
const value1 = (' ' + result[i + 1]).slice(1);
const value2 = result[i + 2] ? (' ' + result[i + 2]).slice(1) : '';
switch (tag) {
case 'PLAYLIST-TYPE':
level.type = value1.toUpperCase();
break;
case 'MEDIA-SEQUENCE':
currentSN = level.startSN = parseInt(value1);
break;
case 'SKIP': {
const skipAttrs = new AttrList(value1);
const skippedSegments =
skipAttrs.decimalInteger('SKIPPED-SEGMENTS');
if (Number.isFinite(skippedSegments)) {
level.skippedSegments = skippedSegments;
// This will result in fragments[] containing undefined values, which we will fill in with `mergeDetails`
for (let i = skippedSegments; i--; ) {
fragments.unshift(null);
}
currentSN += skippedSegments;
}
const recentlyRemovedDateranges = skipAttrs.enumeratedString(
'RECENTLY-REMOVED-DATERANGES'
);
if (recentlyRemovedDateranges) {
level.recentlyRemovedDateranges =
recentlyRemovedDateranges.split('\t');
}
break;
}
case 'TARGETDURATION':
level.targetduration = parseFloat(value1);
break;
case 'VERSION':
level.version = parseInt(value1);
break;
case 'EXTM3U':
break;
case 'ENDLIST':
level.live = false;
break;
case '#':
if (value1 || value2) {
frag.tagList.push(value2 ? [value1, value2] : [value1]);
}
break;
case 'DIS':
discontinuityCounter++;
/* falls through */
case 'GAP':
frag.tagList.push([tag]);
break;
case 'BITRATE':
frag.tagList.push([tag, value1]);
break;
case 'DISCONTINUITY-SEQ':
discontinuityCounter = parseInt(value1);
break;
case 'KEY': {
// https://tools.ietf.org/html/rfc8216#section-4.3.2.4
const keyAttrs = new AttrList(value1);
const decryptmethod = keyAttrs.enumeratedString('METHOD');
const decrypturi = keyAttrs.URI;
const decryptiv = keyAttrs.hexadecimalInteger('IV');
const decryptkeyformatversions =
keyAttrs.enumeratedString('KEYFORMATVERSIONS');
const decryptkeyid = keyAttrs.enumeratedString('KEYID');
// From RFC: This attribute is OPTIONAL; its absence indicates an implicit value of "identity".
const decryptkeyformat =
keyAttrs.enumeratedString('KEYFORMAT') ?? 'identity';
const unsupportedKnownKeyformatsInManifest = [
'com.apple.streamingkeydelivery',
'com.microsoft.playready',
'urn:uuid:edef8ba9-79d6-4ace-a3c8-27dcd51d21ed', // widevine (v2)
'com.widevine', // earlier widevine (v1)
];
if (
unsupportedKnownKeyformatsInManifest.indexOf(decryptkeyformat) >
-1
) {
logger.warn(
`Keyformat ${decryptkeyformat} is not supported from the manifest`
);
continue;
} else if (decryptkeyformat !== 'identity') {
// We are supposed to skip keys we don't understand.
// As we currently only officially support identity keys
// from the manifest we shouldn't save any other key.
continue;
}
// TODO: multiple keys can be defined on a fragment, and we need to support this
// for clients that support both playready and widevine
if (decryptmethod) {
// TODO: need to determine if the level key is actually a relative URL
// if it isn't, then we should instead construct the LevelKey using fromURI.
levelkey = LevelKey.fromURL(baseurl, decrypturi);
if (
decrypturi &&
['AES-128', 'SAMPLE-AES', 'SAMPLE-AES-CENC'].indexOf(
decryptmethod
) >= 0
) {
levelkey.method = decryptmethod;
levelkey.keyFormat = decryptkeyformat;
if (decryptkeyid) {
levelkey.keyID = decryptkeyid;
}
if (decryptkeyformatversions) {
levelkey.keyFormatVersions = decryptkeyformatversions;
}
// Initialization Vector (IV)
levelkey.iv = decryptiv;
}
}
break;
}
case 'START': {
const startAttrs = new AttrList(value1);
const startTimeOffset =
startAttrs.decimalFloatingPoint('TIME-OFFSET');
// TIME-OFFSET can be 0
if (Number.isFinite(startTimeOffset)) {
level.startTimeOffset = startTimeOffset;
}
break;
}
case 'MAP': {
const mapAttrs = new AttrList(value1);
frag.relurl = mapAttrs.URI;
if (mapAttrs.BYTERANGE) {
frag.setByteRange(mapAttrs.BYTERANGE);
}
frag.level = id;
frag.sn = 'initSegment';
if (levelkey) {
frag.levelkey = levelkey;
}
frag.initSegment = null;
currentInitSegment = frag;
createNextFrag = true;
break;
}
case 'SERVER-CONTROL': {
const serverControlAttrs = new AttrList(value1);
level.canBlockReload = serverControlAttrs.bool('CAN-BLOCK-RELOAD');
level.canSkipUntil = serverControlAttrs.optionalFloat(
'CAN-SKIP-UNTIL',
0
);
level.canSkipDateRanges =
level.canSkipUntil > 0 &&
serverControlAttrs.bool('CAN-SKIP-DATERANGES');
level.partHoldBack = serverControlAttrs.optionalFloat(
'PART-HOLD-BACK',
0
);
level.holdBack = serverControlAttrs.optionalFloat('HOLD-BACK', 0);
break;
}
case 'PART-INF': {
const partInfAttrs = new AttrList(value1);
level.partTarget = partInfAttrs.decimalFloatingPoint('PART-TARGET');
break;
}
case 'PART': {
let partList = level.partList;
if (!partList) {
partList = level.partList = [];
}
const previousFragmentPart =
currentPart > 0 ? partList[partList.length - 1] : undefined;
const index = currentPart++;
const part = new Part(
new AttrList(value1),
frag,
baseurl,
index,
previousFragmentPart
);
partList.push(part);
frag.duration += part.duration;
break;
}
case 'PRELOAD-HINT': {
const preloadHintAttrs = new AttrList(value1);
level.preloadHint = preloadHintAttrs;
break;
}
case 'RENDITION-REPORT': {
const renditionReportAttrs = new AttrList(value1);
level.renditionReports = level.renditionReports || [];
level.renditionReports.push(renditionReportAttrs);
break;
}
default:
logger.warn(`line parsed but not handled: ${result}`);
break;
}
}
}
if (prevFrag && !prevFrag.relurl) {
fragments.pop();
totalduration -= prevFrag.duration;
if (level.partList) {
level.fragmentHint = prevFrag;
}
} else if (level.partList) {
assignProgramDateTime(frag, prevFrag);
frag.cc = discontinuityCounter;
level.fragmentHint = frag;
}
const fragmentLength = fragments.length;
const firstFragment = fragments[0];
const lastFragment = fragments[fragmentLength - 1];
totalduration += level.skippedSegments * level.targetduration;
if (totalduration > 0 && fragmentLength && lastFragment) {
level.averagetargetduration = totalduration / fragmentLength;
const lastSn = lastFragment.sn;
level.endSN = lastSn !== 'initSegment' ? lastSn : 0;
if (firstFragment) {
level.startCC = firstFragment.cc;
if (!firstFragment.initSegment) {
// this is a bit lurky but HLS really has no other way to tell us
// if the fragments are TS or MP4, except if we download them :/
// but this is to be able to handle SIDX.
if (
level.fragments.every(
(frag) => frag.relurl && isMP4Url(frag.relurl)
)
) {
logger.warn(
'MP4 fragments found but no init segment (probably no MAP, incomplete M3U8), trying to fetch SIDX'
);
frag = new Fragment(type, baseurl);
frag.relurl = lastFragment.relurl;
frag.level = id;
frag.sn = 'initSegment';
firstFragment.initSegment = frag;
level.needSidxRanges = true;
}
}
}
} else {
level.endSN = 0;
level.startCC = 0;
}
if (level.fragmentHint) {
totalduration += level.fragmentHint.duration;
}
level.totalduration = totalduration;
level.endCC = discontinuityCounter;
/**
* Backfill any missing PDT values
* "If the first EXT-X-PROGRAM-DATE-TIME tag in a Playlist appears after
* one or more Media Segment URIs, the client SHOULD extrapolate
* backward from that tag (using EXTINF durations and/or media
* timestamps) to associate dates with those segments."
* We have already extrapolated forward, but all fragments up to the first instance of PDT do not have their PDTs
* computed.
*/
if (firstPdtIndex > 0) {
backfillProgramDateTimes(fragments, firstPdtIndex);
}
return level;
}
}
function setCodecs(codecs: Array<string>, level: LevelParsed) {
['video', 'audio', 'text'].forEach((type: CodecType) => {
const filtered = codecs.filter((codec) => isCodecType(codec, type));
if (filtered.length) {
const preferred = filtered.filter((codec) => {
return (
codec.lastIndexOf('avc1', 0) === 0 ||
codec.lastIndexOf('mp4a', 0) === 0
);
});
level[`${type}Codec`] = preferred.length > 0 ? preferred[0] : filtered[0];
// remove from list
codecs = codecs.filter((codec) => filtered.indexOf(codec) === -1);
}
});
level.unknownCodecs = codecs;
}
function assignCodec(media, groupItem, codecProperty) {
const codecValue = groupItem[codecProperty];
if (codecValue) {
media[codecProperty] = codecValue;
}
}
function backfillProgramDateTimes(
fragments: M3U8ParserFragments,
firstPdtIndex: number
) {
let fragPrev = fragments[firstPdtIndex] as Fragment;
for (let i = firstPdtIndex; i--; ) {
const frag = fragments[i];
// Exit on delta-playlist skipped segments
if (!frag) {
return;
}
frag.programDateTime =
(fragPrev.programDateTime as number) - frag.duration * 1000;
fragPrev = frag;
}
}
function assignProgramDateTime(frag, prevFrag) {
if (frag.rawProgramDateTime) {
frag.programDateTime = Date.parse(frag.rawProgramDateTime);
} else if (prevFrag?.programDateTime) {
frag.programDateTime = prevFrag.endProgramDateTime;
}
if (!Number.isFinite(frag.programDateTime)) {
frag.programDateTime = null;
frag.rawProgramDateTime = null;
}
}

730
node_modules/hls.js/src/loader/playlist-loader.ts generated vendored Normal file
View file

@ -0,0 +1,730 @@
/**
* PlaylistLoader - delegate for media manifest/playlist loading tasks. Takes care of parsing media to internal data-models.
*
* Once loaded, dispatches events with parsed data-models of manifest/levels/audio/subtitle tracks.
*
* Uses loader(s) set in config to do actual internal loading of resource tasks.
*
* @module
*
*/
import { Events } from '../events';
import { ErrorDetails, ErrorTypes } from '../errors';
import { logger } from '../utils/logger';
import { parseSegmentIndex } from '../utils/mp4-tools';
import M3U8Parser from './m3u8-parser';
import type { LevelParsed } from '../types/level';
import type {
Loader,
LoaderConfiguration,
LoaderContext,
LoaderResponse,
LoaderStats,
PlaylistLoaderContext,
} from '../types/loader';
import { PlaylistContextType, PlaylistLevelType } from '../types/loader';
import { LevelDetails } from './level-details';
import type Hls from '../hls';
import { AttrList } from '../utils/attr-list';
import type {
ErrorData,
LevelLoadingData,
ManifestLoadingData,
TrackLoadingData,
} from '../types/events';
function mapContextToLevelType(
context: PlaylistLoaderContext
): PlaylistLevelType {
const { type } = context;
switch (type) {
case PlaylistContextType.AUDIO_TRACK:
return PlaylistLevelType.AUDIO;
case PlaylistContextType.SUBTITLE_TRACK:
return PlaylistLevelType.SUBTITLE;
default:
return PlaylistLevelType.MAIN;
}
}
function getResponseUrl(
response: LoaderResponse,
context: PlaylistLoaderContext
): string {
let url = response.url;
// responseURL not supported on some browsers (it is used to detect URL redirection)
// data-uri mode also not supported (but no need to detect redirection)
if (url === undefined || url.indexOf('data:') === 0) {
// fallback to initial URL
url = context.url;
}
return url;
}
class PlaylistLoader {
private readonly hls: Hls;
private readonly loaders: {
[key: string]: Loader<LoaderContext>;
} = Object.create(null);
constructor(hls: Hls) {
this.hls = hls;
this.registerListeners();
}
private registerListeners() {
const { hls } = this;
hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this);
hls.on(Events.LEVEL_LOADING, this.onLevelLoading, this);
hls.on(Events.AUDIO_TRACK_LOADING, this.onAudioTrackLoading, this);
hls.on(Events.SUBTITLE_TRACK_LOADING, this.onSubtitleTrackLoading, this);
}
private unregisterListeners() {
const { hls } = this;
hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this);
hls.off(Events.LEVEL_LOADING, this.onLevelLoading, this);
hls.off(Events.AUDIO_TRACK_LOADING, this.onAudioTrackLoading, this);
hls.off(Events.SUBTITLE_TRACK_LOADING, this.onSubtitleTrackLoading, this);
}
/**
* Returns defaults or configured loader-type overloads (pLoader and loader config params)
*/
private createInternalLoader(
context: PlaylistLoaderContext
): Loader<LoaderContext> {
const config = this.hls.config;
const PLoader = config.pLoader;
const Loader = config.loader;
const InternalLoader = PLoader || Loader;
const loader = new InternalLoader(config) as Loader<PlaylistLoaderContext>;
context.loader = loader;
this.loaders[context.type] = loader;
return loader;
}
private getInternalLoader(
context: PlaylistLoaderContext
): Loader<LoaderContext> {
return this.loaders[context.type];
}
private resetInternalLoader(contextType): void {
if (this.loaders[contextType]) {
delete this.loaders[contextType];
}
}
/**
* Call `destroy` on all internal loader instances mapped (one per context type)
*/
private destroyInternalLoaders(): void {
for (const contextType in this.loaders) {
const loader = this.loaders[contextType];
if (loader) {
loader.destroy();
}
this.resetInternalLoader(contextType);
}
}
public destroy(): void {
this.unregisterListeners();
this.destroyInternalLoaders();
}
private onManifestLoading(
event: Events.MANIFEST_LOADING,
data: ManifestLoadingData
) {
const { url } = data;
this.load({
id: null,
groupId: null,
level: 0,
responseType: 'text',
type: PlaylistContextType.MANIFEST,
url,
deliveryDirectives: null,
});
}
private onLevelLoading(event: Events.LEVEL_LOADING, data: LevelLoadingData) {
const { id, level, url, deliveryDirectives } = data;
this.load({
id,
groupId: null,
level,
responseType: 'text',
type: PlaylistContextType.LEVEL,
url,
deliveryDirectives,
});
}
private onAudioTrackLoading(
event: Events.AUDIO_TRACK_LOADING,
data: TrackLoadingData
) {
const { id, groupId, url, deliveryDirectives } = data;
this.load({
id,
groupId,
level: null,
responseType: 'text',
type: PlaylistContextType.AUDIO_TRACK,
url,
deliveryDirectives,
});
}
private onSubtitleTrackLoading(
event: Events.SUBTITLE_TRACK_LOADING,
data: TrackLoadingData
) {
const { id, groupId, url, deliveryDirectives } = data;
this.load({
id,
groupId,
level: null,
responseType: 'text',
type: PlaylistContextType.SUBTITLE_TRACK,
url,
deliveryDirectives,
});
}
private load(context: PlaylistLoaderContext): void {
const config = this.hls.config;
// logger.debug(`[playlist-loader]: Loading playlist of type ${context.type}, level: ${context.level}, id: ${context.id}`);
// Check if a loader for this context already exists
let loader = this.getInternalLoader(context);
if (loader) {
const loaderContext = loader.context;
if (loaderContext && loaderContext.url === context.url) {
// same URL can't overlap
logger.trace('[playlist-loader]: playlist request ongoing');
return;
}
logger.log(
`[playlist-loader]: aborting previous loader for type: ${context.type}`
);
loader.abort();
}
let maxRetry;
let timeout;
let retryDelay;
let maxRetryDelay;
// apply different configs for retries depending on
// context (manifest, level, audio/subs playlist)
switch (context.type) {
case PlaylistContextType.MANIFEST:
maxRetry = config.manifestLoadingMaxRetry;
timeout = config.manifestLoadingTimeOut;
retryDelay = config.manifestLoadingRetryDelay;
maxRetryDelay = config.manifestLoadingMaxRetryTimeout;
break;
case PlaylistContextType.LEVEL:
case PlaylistContextType.AUDIO_TRACK:
case PlaylistContextType.SUBTITLE_TRACK:
// Manage retries in Level/Track Controller
maxRetry = 0;
timeout = config.levelLoadingTimeOut;
break;
default:
maxRetry = config.levelLoadingMaxRetry;
timeout = config.levelLoadingTimeOut;
retryDelay = config.levelLoadingRetryDelay;
maxRetryDelay = config.levelLoadingMaxRetryTimeout;
break;
}
loader = this.createInternalLoader(context);
// Override level/track timeout for LL-HLS requests
// (the default of 10000ms is counter productive to blocking playlist reload requests)
if (context.deliveryDirectives?.part) {
let levelDetails: LevelDetails | undefined;
if (
context.type === PlaylistContextType.LEVEL &&
context.level !== null
) {
levelDetails = this.hls.levels[context.level].details;
} else if (
context.type === PlaylistContextType.AUDIO_TRACK &&
context.id !== null
) {
levelDetails = this.hls.audioTracks[context.id].details;
} else if (
context.type === PlaylistContextType.SUBTITLE_TRACK &&
context.id !== null
) {
levelDetails = this.hls.subtitleTracks[context.id].details;
}
if (levelDetails) {
const partTarget = levelDetails.partTarget;
const targetDuration = levelDetails.targetduration;
if (partTarget && targetDuration) {
timeout = Math.min(
Math.max(partTarget * 3, targetDuration * 0.8) * 1000,
timeout
);
}
}
}
const loaderConfig: LoaderConfiguration = {
timeout,
maxRetry,
retryDelay,
maxRetryDelay,
highWaterMark: 0,
};
const loaderCallbacks = {
onSuccess: this.loadsuccess.bind(this),
onError: this.loaderror.bind(this),
onTimeout: this.loadtimeout.bind(this),
};
// logger.debug(`[playlist-loader]: Calling internal loader delegate for URL: ${context.url}`);
loader.load(context, loaderConfig, loaderCallbacks);
}
private loadsuccess(
response: LoaderResponse,
stats: LoaderStats,
context: PlaylistLoaderContext,
networkDetails: any = null
): void {
if (context.isSidxRequest) {
this.handleSidxRequest(response, context);
this.handlePlaylistLoaded(response, stats, context, networkDetails);
return;
}
this.resetInternalLoader(context.type);
const string = response.data as string;
// Validate if it is an M3U8 at all
if (string.indexOf('#EXTM3U') !== 0) {
this.handleManifestParsingError(
response,
context,
'no EXTM3U delimiter',
networkDetails
);
return;
}
stats.parsing.start = performance.now();
// Check if chunk-list or master. handle empty chunk list case (first EXTINF not signaled, but TARGETDURATION present)
if (
string.indexOf('#EXTINF:') > 0 ||
string.indexOf('#EXT-X-TARGETDURATION:') > 0
) {
this.handleTrackOrLevelPlaylist(response, stats, context, networkDetails);
} else {
this.handleMasterPlaylist(response, stats, context, networkDetails);
}
}
private loaderror(
response: LoaderResponse,
context: PlaylistLoaderContext,
networkDetails: any = null
): void {
this.handleNetworkError(context, networkDetails, false, response);
}
private loadtimeout(
stats: LoaderStats,
context: PlaylistLoaderContext,
networkDetails: any = null
): void {
this.handleNetworkError(context, networkDetails, true);
}
private handleMasterPlaylist(
response: LoaderResponse,
stats: LoaderStats,
context: PlaylistLoaderContext,
networkDetails: any
): void {
const hls = this.hls;
const string = response.data as string;
const url = getResponseUrl(response, context);
const { levels, sessionData } = M3U8Parser.parseMasterPlaylist(string, url);
if (!levels.length) {
this.handleManifestParsingError(
response,
context,
'no level found in manifest',
networkDetails
);
return;
}
// multi level playlist, parse level info
const audioGroups = levels.map((level: LevelParsed) => ({
id: level.attrs.AUDIO,
audioCodec: level.audioCodec,
}));
const subtitleGroups = levels.map((level: LevelParsed) => ({
id: level.attrs.SUBTITLES,
textCodec: level.textCodec,
}));
const audioTracks = M3U8Parser.parseMasterPlaylistMedia(
string,
url,
'AUDIO',
audioGroups
);
const subtitles = M3U8Parser.parseMasterPlaylistMedia(
string,
url,
'SUBTITLES',
subtitleGroups
);
const captions = M3U8Parser.parseMasterPlaylistMedia(
string,
url,
'CLOSED-CAPTIONS'
);
if (audioTracks.length) {
// check if we have found an audio track embedded in main playlist (audio track without URI attribute)
const embeddedAudioFound: boolean = audioTracks.some(
(audioTrack) => !audioTrack.url
);
// if no embedded audio track defined, but audio codec signaled in quality level,
// we need to signal this main audio track this could happen with playlists with
// alt audio rendition in which quality levels (main)
// contains both audio+video. but with mixed audio track not signaled
if (
!embeddedAudioFound &&
levels[0].audioCodec &&
!levels[0].attrs.AUDIO
) {
logger.log(
'[playlist-loader]: audio codec signaled in quality level, but no embedded audio track signaled, create one'
);
audioTracks.unshift({
type: 'main',
name: 'main',
default: false,
autoselect: false,
forced: false,
id: -1,
attrs: new AttrList({}),
bitrate: 0,
url: '',
});
}
}
hls.trigger(Events.MANIFEST_LOADED, {
levels,
audioTracks,
subtitles,
captions,
url,
stats,
networkDetails,
sessionData,
});
}
private handleTrackOrLevelPlaylist(
response: LoaderResponse,
stats: LoaderStats,
context: PlaylistLoaderContext,
networkDetails: any
): void {
const hls = this.hls;
const { id, level, type } = context;
const url = getResponseUrl(response, context);
const levelUrlId = Number.isFinite(id as number) ? id : 0;
const levelId = Number.isFinite(level as number) ? level : levelUrlId;
const levelType = mapContextToLevelType(context);
const levelDetails: LevelDetails = M3U8Parser.parseLevelPlaylist(
response.data as string,
url,
levelId!,
levelType,
levelUrlId!
);
if (!levelDetails.fragments.length) {
hls.trigger(Events.ERROR, {
type: ErrorTypes.NETWORK_ERROR,
details: ErrorDetails.LEVEL_EMPTY_ERROR,
fatal: false,
url: url,
reason: 'no fragments found in level',
level: typeof context.level === 'number' ? context.level : undefined,
});
return;
}
// We have done our first request (Manifest-type) and receive
// not a master playlist but a chunk-list (track/level)
// We fire the manifest-loaded event anyway with the parsed level-details
// by creating a single-level structure for it.
if (type === PlaylistContextType.MANIFEST) {
const singleLevel: LevelParsed = {
attrs: new AttrList({}),
bitrate: 0,
details: levelDetails,
name: '',
url,
};
hls.trigger(Events.MANIFEST_LOADED, {
levels: [singleLevel],
audioTracks: [],
url,
stats,
networkDetails,
sessionData: null,
});
}
// save parsing time
stats.parsing.end = performance.now();
// in case we need SIDX ranges
// return early after calling load for
// the SIDX box.
if (levelDetails.needSidxRanges) {
const sidxUrl = levelDetails.fragments[0].initSegment?.url as string;
this.load({
url: sidxUrl,
isSidxRequest: true,
type,
level,
levelDetails,
id,
groupId: null,
rangeStart: 0,
rangeEnd: 2048,
responseType: 'arraybuffer',
deliveryDirectives: null,
});
return;
}
// extend the context with the new levelDetails property
context.levelDetails = levelDetails;
this.handlePlaylistLoaded(response, stats, context, networkDetails);
}
private handleSidxRequest(
response: LoaderResponse,
context: PlaylistLoaderContext
): void {
const sidxInfo = parseSegmentIndex(
new Uint8Array(response.data as ArrayBuffer)
);
// if provided fragment does not contain sidx, early return
if (!sidxInfo) {
return;
}
const sidxReferences = sidxInfo.references;
const levelDetails = context.levelDetails as LevelDetails;
sidxReferences.forEach((segmentRef, index) => {
const segRefInfo = segmentRef.info;
const frag = levelDetails.fragments[index];
if (frag.byteRange.length === 0) {
frag.setByteRange(
String(1 + segRefInfo.end - segRefInfo.start) +
'@' +
String(segRefInfo.start)
);
}
if (frag.initSegment) {
frag.initSegment.setByteRange(String(sidxInfo.moovEndOffset) + '@0');
}
});
}
private handleManifestParsingError(
response: LoaderResponse,
context: PlaylistLoaderContext,
reason: string,
networkDetails: any
): void {
this.hls.trigger(Events.ERROR, {
type: ErrorTypes.NETWORK_ERROR,
details: ErrorDetails.MANIFEST_PARSING_ERROR,
fatal: context.type === PlaylistContextType.MANIFEST,
url: response.url,
reason,
response,
context,
networkDetails,
});
}
private handleNetworkError(
context: PlaylistLoaderContext,
networkDetails: any,
timeout = false,
response?: LoaderResponse
): void {
logger.warn(
`[playlist-loader]: A network ${
timeout ? 'timeout' : 'error'
} occurred while loading ${context.type} level: ${context.level} id: ${
context.id
} group-id: "${context.groupId}"`
);
let details = ErrorDetails.UNKNOWN;
let fatal = false;
const loader = this.getInternalLoader(context);
switch (context.type) {
case PlaylistContextType.MANIFEST:
details = timeout
? ErrorDetails.MANIFEST_LOAD_TIMEOUT
: ErrorDetails.MANIFEST_LOAD_ERROR;
fatal = true;
break;
case PlaylistContextType.LEVEL:
details = timeout
? ErrorDetails.LEVEL_LOAD_TIMEOUT
: ErrorDetails.LEVEL_LOAD_ERROR;
fatal = false;
break;
case PlaylistContextType.AUDIO_TRACK:
details = timeout
? ErrorDetails.AUDIO_TRACK_LOAD_TIMEOUT
: ErrorDetails.AUDIO_TRACK_LOAD_ERROR;
fatal = false;
break;
case PlaylistContextType.SUBTITLE_TRACK:
details = timeout
? ErrorDetails.SUBTITLE_TRACK_LOAD_TIMEOUT
: ErrorDetails.SUBTITLE_LOAD_ERROR;
fatal = false;
break;
}
if (loader) {
this.resetInternalLoader(context.type);
}
const errorData: ErrorData = {
type: ErrorTypes.NETWORK_ERROR,
details,
fatal,
url: context.url,
loader,
context,
networkDetails,
};
if (response) {
errorData.response = response;
}
this.hls.trigger(Events.ERROR, errorData);
}
private handlePlaylistLoaded(
response: LoaderResponse,
stats: LoaderStats,
context: PlaylistLoaderContext,
networkDetails: any
): void {
const {
type,
level,
id,
groupId,
loader,
levelDetails,
deliveryDirectives,
} = context;
if (!levelDetails?.targetduration) {
this.handleManifestParsingError(
response,
context,
'invalid target duration',
networkDetails
);
return;
}
if (!loader) {
return;
}
if (levelDetails.live) {
if (loader.getCacheAge) {
levelDetails.ageHeader = loader.getCacheAge() || 0;
}
if (!loader.getCacheAge || isNaN(levelDetails.ageHeader)) {
levelDetails.ageHeader = 0;
}
}
switch (type) {
case PlaylistContextType.MANIFEST:
case PlaylistContextType.LEVEL:
this.hls.trigger(Events.LEVEL_LOADED, {
details: levelDetails,
level: level || 0,
id: id || 0,
stats,
networkDetails,
deliveryDirectives,
});
break;
case PlaylistContextType.AUDIO_TRACK:
this.hls.trigger(Events.AUDIO_TRACK_LOADED, {
details: levelDetails,
id: id || 0,
groupId: groupId || '',
stats,
networkDetails,
deliveryDirectives,
});
break;
case PlaylistContextType.SUBTITLE_TRACK:
this.hls.trigger(Events.SUBTITLE_TRACK_LOADED, {
details: levelDetails,
id: id || 0,
groupId: groupId || '',
stats,
networkDetails,
deliveryDirectives,
});
break;
}
}
}
export default PlaylistLoader;

View file

@ -0,0 +1,58 @@
/*
* Push the performance monitor as the last core component in hls.ts
* so that it is the last class to handle events.
*
* coreComponents.push(new PerformanceMonitor(this));
*
* TODO: Add this to the demo page or a performance test page
*/
import { Events } from '../events';
import { logger } from '../utils/logger';
import Hls from '../hls';
import type { FragBufferedData } from '../types/events';
export default class PerformanceMonitor {
private hls: Hls;
constructor(hls: Hls) {
this.hls = hls;
this.hls.on(Events.FRAG_BUFFERED, this.onFragBuffered);
}
destroy() {
this.hls.off(Events.FRAG_BUFFERED);
}
onFragBuffered(event: Events.FRAG_BUFFERED, data: FragBufferedData) {
logFragStats(data);
}
}
function logFragStats(data: FragBufferedData) {
const { frag, part } = data;
const stats = part ? part.stats : frag.stats;
const tLoad = stats.loading.end - stats.loading.start;
const tBuffer = stats.buffering.end - stats.buffering.start;
const tParse = stats.parsing.end - stats.parsing.start;
const tTotal = stats.buffering.end - stats.loading.start;
logger.log(`[performance-monitor]: Stats for fragment ${frag.sn} ${
part ? ' part ' + part.index : ''
} of level ${frag.level}:
Size: ${(stats.total / 1024).toFixed(3)} kB
Chunk Count: ${stats.chunkCount}
Request: ${stats.loading.start.toFixed(3)} ms
First Byte: ${stats.loading.first.toFixed(3)} ms
Parse Start ${stats.parsing.start.toFixed(3)} ms
Buffering Start: ${stats.buffering.start.toFixed(3)} ms
First Buffer: ${stats.buffering.first.toFixed(3)} ms
Parse End: ${stats.parsing.end.toFixed(3)} ms
Buffering End: ${stats.buffering.end.toFixed(3)} ms
Load Duration: ${tLoad.toFixed(3)} ms
Parse Duration: ${tParse.toFixed(3)} ms
Buffer Duration: ${tBuffer.toFixed(3)} ms
End-To-End Duration: ${tTotal.toFixed(3)} ms`);
}

7
node_modules/hls.js/src/polyfills/number.ts generated vendored Normal file
View file

@ -0,0 +1,7 @@
export const isFiniteNumber =
Number.isFinite ||
function (value) {
return typeof value === 'number' && isFinite(value);
};
export const MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || 9007199254740991;

81
node_modules/hls.js/src/remux/aac-helper.ts generated vendored Normal file
View file

@ -0,0 +1,81 @@
/**
* AAC helper
*/
class AAC {
static getSilentFrame(
codec?: string,
channelCount?: number
): Uint8Array | undefined {
switch (codec) {
case 'mp4a.40.2':
if (channelCount === 1) {
return new Uint8Array([0x00, 0xc8, 0x00, 0x80, 0x23, 0x80]);
} else if (channelCount === 2) {
return new Uint8Array([
0x21, 0x00, 0x49, 0x90, 0x02, 0x19, 0x00, 0x23, 0x80,
]);
} else if (channelCount === 3) {
return new Uint8Array([
0x00, 0xc8, 0x00, 0x80, 0x20, 0x84, 0x01, 0x26, 0x40, 0x08, 0x64,
0x00, 0x8e,
]);
} else if (channelCount === 4) {
return new Uint8Array([
0x00, 0xc8, 0x00, 0x80, 0x20, 0x84, 0x01, 0x26, 0x40, 0x08, 0x64,
0x00, 0x80, 0x2c, 0x80, 0x08, 0x02, 0x38,
]);
} else if (channelCount === 5) {
return new Uint8Array([
0x00, 0xc8, 0x00, 0x80, 0x20, 0x84, 0x01, 0x26, 0x40, 0x08, 0x64,
0x00, 0x82, 0x30, 0x04, 0x99, 0x00, 0x21, 0x90, 0x02, 0x38,
]);
} else if (channelCount === 6) {
return new Uint8Array([
0x00, 0xc8, 0x00, 0x80, 0x20, 0x84, 0x01, 0x26, 0x40, 0x08, 0x64,
0x00, 0x82, 0x30, 0x04, 0x99, 0x00, 0x21, 0x90, 0x02, 0x00, 0xb2,
0x00, 0x20, 0x08, 0xe0,
]);
}
break;
// handle HE-AAC below (mp4a.40.5 / mp4a.40.29)
default:
if (channelCount === 1) {
// ffmpeg -y -f lavfi -i "aevalsrc=0:d=0.05" -c:a libfdk_aac -profile:a aac_he -b:a 4k output.aac && hexdump -v -e '16/1 "0x%x," "\n"' -v output.aac
return new Uint8Array([
0x1, 0x40, 0x22, 0x80, 0xa3, 0x4e, 0xe6, 0x80, 0xba, 0x8, 0x0, 0x0,
0x0, 0x1c, 0x6, 0xf1, 0xc1, 0xa, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a,
0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a,
0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a,
0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a,
0x5a, 0x5e,
]);
} else if (channelCount === 2) {
// ffmpeg -y -f lavfi -i "aevalsrc=0|0:d=0.05" -c:a libfdk_aac -profile:a aac_he_v2 -b:a 4k output.aac && hexdump -v -e '16/1 "0x%x," "\n"' -v output.aac
return new Uint8Array([
0x1, 0x40, 0x22, 0x80, 0xa3, 0x5e, 0xe6, 0x80, 0xba, 0x8, 0x0, 0x0,
0x0, 0x0, 0x95, 0x0, 0x6, 0xf1, 0xa1, 0xa, 0x5a, 0x5a, 0x5a, 0x5a,
0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a,
0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a,
0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a,
0x5a, 0x5e,
]);
} else if (channelCount === 3) {
// ffmpeg -y -f lavfi -i "aevalsrc=0|0|0:d=0.05" -c:a libfdk_aac -profile:a aac_he_v2 -b:a 4k output.aac && hexdump -v -e '16/1 "0x%x," "\n"' -v output.aac
return new Uint8Array([
0x1, 0x40, 0x22, 0x80, 0xa3, 0x5e, 0xe6, 0x80, 0xba, 0x8, 0x0, 0x0,
0x0, 0x0, 0x95, 0x0, 0x6, 0xf1, 0xa1, 0xa, 0x5a, 0x5a, 0x5a, 0x5a,
0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a,
0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a,
0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a,
0x5a, 0x5e,
]);
}
break;
}
return undefined;
}
}
export default AAC;

1149
node_modules/hls.js/src/remux/mp4-generator.ts generated vendored Normal file

File diff suppressed because it is too large Load diff

1090
node_modules/hls.js/src/remux/mp4-remuxer.ts generated vendored Normal file

File diff suppressed because it is too large Load diff

237
node_modules/hls.js/src/remux/passthrough-remuxer.ts generated vendored Normal file
View file

@ -0,0 +1,237 @@
import type { InitData, InitDataTrack } from '../utils/mp4-tools';
import {
getDuration,
getStartDTS,
offsetStartDTS,
parseInitSegment,
} from '../utils/mp4-tools';
import { ElementaryStreamTypes } from '../loader/fragment';
import { logger } from '../utils/logger';
import type { TrackSet } from '../types/track';
import type {
InitSegmentData,
RemuxedTrack,
Remuxer,
RemuxerResult,
} from '../types/remuxer';
import type {
DemuxedAudioTrack,
DemuxedMetadataTrack,
DemuxedUserdataTrack,
PassthroughVideoTrack,
} from '../types/demuxer';
class PassThroughRemuxer implements Remuxer {
private emitInitSegment: boolean = false;
private audioCodec?: string;
private videoCodec?: string;
private initData?: InitData;
private initPTS?: number;
private initTracks?: TrackSet;
private lastEndDTS: number | null = null;
destroy() {}
resetTimeStamp(defaultInitPTS) {
this.initPTS = defaultInitPTS;
this.lastEndDTS = null;
}
resetNextTimestamp() {
this.lastEndDTS = null;
}
resetInitSegment(
initSegment: Uint8Array | undefined,
audioCodec: string | undefined,
videoCodec: string | undefined
) {
this.audioCodec = audioCodec;
this.videoCodec = videoCodec;
this.generateInitSegment(initSegment);
this.emitInitSegment = true;
}
generateInitSegment(initSegment: Uint8Array | undefined): void {
let { audioCodec, videoCodec } = this;
if (!initSegment || !initSegment.byteLength) {
this.initTracks = undefined;
this.initData = undefined;
return;
}
const initData = (this.initData = parseInitSegment(initSegment));
// Get codec from initSegment or fallback to default
if (!audioCodec) {
audioCodec = getParsedTrackCodec(
initData.audio,
ElementaryStreamTypes.AUDIO
);
}
if (!videoCodec) {
videoCodec = getParsedTrackCodec(
initData.video,
ElementaryStreamTypes.VIDEO
);
}
const tracks: TrackSet = {};
if (initData.audio && initData.video) {
tracks.audiovideo = {
container: 'video/mp4',
codec: audioCodec + ',' + videoCodec,
initSegment,
id: 'main',
};
} else if (initData.audio) {
tracks.audio = {
container: 'audio/mp4',
codec: audioCodec,
initSegment,
id: 'audio',
};
} else if (initData.video) {
tracks.video = {
container: 'video/mp4',
codec: videoCodec,
initSegment,
id: 'main',
};
} else {
logger.warn(
'[passthrough-remuxer.ts]: initSegment does not contain moov or trak boxes.'
);
}
this.initTracks = tracks;
}
remux(
audioTrack: DemuxedAudioTrack,
videoTrack: PassthroughVideoTrack,
id3Track: DemuxedMetadataTrack,
textTrack: DemuxedUserdataTrack,
timeOffset: number
): RemuxerResult {
let { initPTS, lastEndDTS } = this;
const result: RemuxerResult = {
audio: undefined,
video: undefined,
text: textTrack,
id3: id3Track,
initSegment: undefined,
};
// If we haven't yet set a lastEndDTS, or it was reset, set it to the provided timeOffset. We want to use the
// lastEndDTS over timeOffset whenever possible; during progressive playback, the media source will not update
// the media duration (which is what timeOffset is provided as) before we need to process the next chunk.
if (!Number.isFinite(lastEndDTS!)) {
lastEndDTS = this.lastEndDTS = timeOffset || 0;
}
// The binary segment data is added to the videoTrack in the mp4demuxer. We don't check to see if the data is only
// audio or video (or both); adding it to video was an arbitrary choice.
const data = videoTrack.samples;
if (!data || !data.length) {
return result;
}
const initSegment: InitSegmentData = {
initPTS: undefined,
timescale: 1,
};
let initData = this.initData;
if (!initData || !initData.length) {
this.generateInitSegment(data);
initData = this.initData;
}
if (!initData || !initData.length) {
// We can't remux if the initSegment could not be generated
logger.warn('[passthrough-remuxer.ts]: Failed to generate initSegment.');
return result;
}
if (this.emitInitSegment) {
initSegment.tracks = this.initTracks as TrackSet;
this.emitInitSegment = false;
}
if (!Number.isFinite(initPTS!)) {
this.initPTS =
initSegment.initPTS =
initPTS =
computeInitPTS(initData, data, lastEndDTS);
}
const duration = getDuration(data, initData);
const startDTS = lastEndDTS as number;
const endDTS = duration + startDTS;
offsetStartDTS(initData, data, initPTS as number);
if (duration > 0) {
this.lastEndDTS = endDTS;
} else {
logger.warn('Duration parsed from mp4 should be greater than zero');
this.resetNextTimestamp();
}
const hasAudio = !!initData.audio;
const hasVideo = !!initData.video;
let type: any = '';
if (hasAudio) {
type += 'audio';
}
if (hasVideo) {
type += 'video';
}
const track: RemuxedTrack = {
data1: data,
startPTS: startDTS,
startDTS,
endPTS: endDTS,
endDTS,
type,
hasAudio,
hasVideo,
nb: 1,
dropped: 0,
};
result.audio = track.type === 'audio' ? track : undefined;
result.video = track.type !== 'audio' ? track : undefined;
result.text = textTrack;
result.id3 = id3Track;
result.initSegment = initSegment;
return result;
}
}
const computeInitPTS = (initData, data, timeOffset) =>
getStartDTS(initData, data) - timeOffset;
function getParsedTrackCodec(
track: InitDataTrack | undefined,
type: ElementaryStreamTypes.AUDIO | ElementaryStreamTypes.VIDEO
): string {
const parsedCodec = track?.codec;
if (parsedCodec && parsedCodec.length > 4) {
return parsedCodec;
}
// Since mp4-tools cannot parse full codec string (see 'TODO: Parse codec details'... in mp4-tools)
// Provide defaults based on codec type
// This allows for some playback of some fmp4 playlists without CODECS defined in manifest
if (parsedCodec === 'hvc1') {
return 'hvc1.1.c.L120.90';
}
if (parsedCodec === 'av01') {
return 'av01.0.04M.08';
}
if (parsedCodec === 'avc1' || type === ElementaryStreamTypes.VIDEO) {
return 'avc1.42e01e';
}
return 'mp4a.40.5';
}
export default PassThroughRemuxer;

131
node_modules/hls.js/src/task-loop.ts generated vendored Normal file
View file

@ -0,0 +1,131 @@
/**
* Sub-class specialization of EventHandler base class.
*
* TaskLoop allows to schedule a task function being called (optionnaly repeatedly) on the main loop,
* scheduled asynchroneously, avoiding recursive calls in the same tick.
*
* The task itself is implemented in `doTick`. It can be requested and called for single execution
* using the `tick` method.
*
* It will be assured that the task execution method (`tick`) only gets called once per main loop "tick",
* no matter how often it gets requested for execution. Execution in further ticks will be scheduled accordingly.
*
* If further execution requests have already been scheduled on the next tick, it can be checked with `hasNextTick`,
* and cancelled with `clearNextTick`.
*
* The task can be scheduled as an interval repeatedly with a period as parameter (see `setInterval`, `clearInterval`).
*
* Sub-classes need to implement the `doTick` method which will effectively have the task execution routine.
*
* Further explanations:
*
* The baseclass has a `tick` method that will schedule the doTick call. It may be called synchroneously
* only for a stack-depth of one. On re-entrant calls, sub-sequent calls are scheduled for next main loop ticks.
*
* When the task execution (`tick` method) is called in re-entrant way this is detected and
* we are limiting the task execution per call stack to exactly one, but scheduling/post-poning further
* task processing on the next main loop iteration (also known as "next tick" in the Node/JS runtime lingo).
*/
export default class TaskLoop {
private readonly _boundTick: () => void;
private _tickTimer: number | null = null;
private _tickInterval: number | null = null;
private _tickCallCount = 0;
constructor() {
this._boundTick = this.tick.bind(this);
}
public destroy() {
this.onHandlerDestroying();
this.onHandlerDestroyed();
}
protected onHandlerDestroying() {
// clear all timers before unregistering from event bus
this.clearNextTick();
this.clearInterval();
}
protected onHandlerDestroyed() {}
/**
* @returns {boolean}
*/
public hasInterval(): boolean {
return !!this._tickInterval;
}
/**
* @returns {boolean}
*/
public hasNextTick(): boolean {
return !!this._tickTimer;
}
/**
* @param {number} millis Interval time (ms)
* @returns {boolean} True when interval has been scheduled, false when already scheduled (no effect)
*/
public setInterval(millis: number): boolean {
if (!this._tickInterval) {
this._tickInterval = self.setInterval(this._boundTick, millis);
return true;
}
return false;
}
/**
* @returns {boolean} True when interval was cleared, false when none was set (no effect)
*/
public clearInterval(): boolean {
if (this._tickInterval) {
self.clearInterval(this._tickInterval);
this._tickInterval = null;
return true;
}
return false;
}
/**
* @returns {boolean} True when timeout was cleared, false when none was set (no effect)
*/
public clearNextTick(): boolean {
if (this._tickTimer) {
self.clearTimeout(this._tickTimer);
this._tickTimer = null;
return true;
}
return false;
}
/**
* Will call the subclass doTick implementation in this main loop tick
* or in the next one (via setTimeout(,0)) in case it has already been called
* in this tick (in case this is a re-entrant call).
*/
public tick(): void {
this._tickCallCount++;
if (this._tickCallCount === 1) {
this.doTick();
// re-entrant call to tick from previous doTick call stack
// -> schedule a call on the next main loop iteration to process this task processing request
if (this._tickCallCount > 1) {
// make sure only one timer exists at any time at max
this.tickImmediate();
}
this._tickCallCount = 0;
}
}
public tickImmediate(): void {
this.clearNextTick();
this._tickTimer = self.setTimeout(this._boundTick, 0);
}
/**
* For subclass to implement task logic
* @abstract
*/
protected doTick(): void {}
}

37
node_modules/hls.js/src/types/buffer.ts generated vendored Normal file
View file

@ -0,0 +1,37 @@
export type SourceBufferName = 'video' | 'audio' | 'audiovideo';
// eslint-disable-next-line no-restricted-globals
export type ExtendedSourceBuffer = SourceBuffer & {
ended?: boolean;
changeType?: (type: string) => void;
};
export type SourceBuffers = Partial<
Record<SourceBufferName, ExtendedSourceBuffer>
>;
export interface BufferOperationQueues {
video: Array<BufferOperation>;
audio: Array<BufferOperation>;
audiovideo: Array<BufferOperation>;
}
export interface BufferOperation {
execute: Function;
onStart: Function;
onComplete: Function;
onError: Function;
start?: number;
end?: number;
}
export interface SourceBufferListeners {
video: Array<SourceBufferListener>;
audio: Array<SourceBufferListener>;
audiovideo: Array<SourceBufferListener>;
}
export interface SourceBufferListener {
event: string;
listener: EventListener;
}

276
node_modules/hls.js/src/types/cmcd.ts generated vendored Normal file
View file

@ -0,0 +1,276 @@
/**
* CMCD spec version
*/
export const CMCDVersion = 1;
/**
* CMCD Object Type
*/
export enum CMCDObjectType {
MANIFEST = 'm',
AUDIO = 'a',
VIDEO = 'v',
MUXED = 'av',
INIT = 'i',
CAPTION = 'c',
TIMED_TEXT = 'tt',
KEY = 'k',
OTHER = 'o',
}
/**
* CMCD Streaming Format
*/
export enum CMCDStreamingFormat {
DASH = 'd',
HLS = 'h',
SMOOTH = 's',
OTHER = 'o',
}
/**
* CMCD Streaming Type
*/
export enum CMCDStreamType {
VOD = 'v',
LIVE = 'l',
}
/**
* CMCD Headers
*/
export interface CMCDHeaders {
'CMCD-Object': string;
'CMCD-Request': string;
'CMCD-Session': string;
'CMCD-Status': string;
}
/**
* CMCD
*/
export interface CMCD {
/////////////////
// CMCD Object //
/////////////////
/**
* Encoded bitrate
*
* The encoded bitrate of the audio or video object being requested. This may not be known precisely by the player; however,
* it MAY be estimated based upon playlist/manifest declarations. If the playlist declares both peak and average bitrate values,
* the peak value should be transmitted.
*
* Integer kbps
*/
br?: number;
/**
* Object duration
*
* The playback duration in milliseconds of the object being requested. If a partial segment is being requested, then this value
* MUST indicate the playback duration of that part and not that of its parent segment. This value can be an approximation of the
* estimated duration if the explicit value is not known.
*
* Integer milliseconds
*/
d?: number;
/**
* Object type
*
* The media type of the current object being requested:
* - `m` = text file, such as a manifest or playlist
* - `a` = audio only
* - `v` = video only
* - `av` = muxed audio and video
* - `i` = init segment
* - `c` = caption or subtitle
* - `tt` = ISOBMFF timed text track
* - `k` = cryptographic key, license or certificate.
* - `o` = other
*
* If the object type being requested is unknown, then this key MUST NOT be used.
*/
ot?: CMCDObjectType;
/**
* Top bitrate
*
* The highest bitrate rendition in the manifest or playlist that the client is allowed to play, given current codec, licensing and
* sizing constraints.
*
* Integer Kbps
*/
tb?: number;
//////////////////
// CMCD Request //
//////////////////
/**
* Buffer length
*
* The buffer length associated with the media object being requested. This value MUST be rounded to the nearest 100 ms. This key SHOULD only be
* sent with an object type of a, v or av.
*
* Integer milliseconds
*/
bl?: number;
/**
* Deadline
*
* Deadline from the request time until the first sample of this Segment/Object needs to be available in order to not create a buffer underrun or
* any other playback problems. This value MUST be rounded to the nearest 100ms. For a playback rate of 1, this may be equivalent to the players
* remaining buffer length.
*
* Integer milliseconds
*/
dl?: number;
/**
* Measured mtp CMCD throughput
*
* The throughput between client and server, as measured by the client and MUST be rounded to the nearest 100 kbps. This value, however derived,
* SHOULD be the value that the client is using to make its next Adaptive Bitrate switching decision. If the client is connected to multiple
* servers concurrently, it must take care to report only the throughput measured against the receiving server. If the client has multiple concurrent
* connections to the server, then the intent is that this value communicates the aggregate throughput the client sees across all those connections.
*
* Integer kbps
*/
mtp?: number;
/**
* Next object request
*
* Relative path of the next object to be requested. This can be used to trigger pre-fetching by the CDN. This MUST be a path relative to the current
* request. This string MUST be URLEncoded. The client SHOULD NOT depend upon any pre-fetch action being taken - it is merely a request for such a
* pre-fetch to take place.
*
* String
*/
nor?: string;
/**
* Next range request
*
* If the next request will be a partial object request, then this string denotes the byte range to be requested. If the nor field is not set, then the
* object is assumed to match the object currently being requested. The client SHOULD NOT depend upon any pre-fetch action being taken it is merely a
* request for such a pre-fetch to take place. Formatting is similar to the HTTP Range header, except that the unit MUST be byte, the Range: prefix is
* NOT required and specifying multiple ranges is NOT allowed. Valid combinations are:
*
* - `"\<range-start\>-"`
* - `"\<range-start\>-\<range-end\>"`
* - `"-\<suffix-length\>"`
*
* String
*/
nrr?: string;
/**
* Startup
*
* Key is included without a value if the object is needed urgently due to startup, seeking or recovery after a buffer-empty event. The media SHOULD not be
* rendering when this request is made. This key MUST not be sent if it is FALSE.
*
* Boolean
*/
su?: boolean;
//////////////////
// CMCD Session //
//////////////////
/**
* Content ID
*
* A unique string identifying the current content. Maximum length is 64 characters. This value is consistent across multiple different
* sessions and devices and is defined and updated at the discretion of the service provider.
*
* String
*/
cid?: string;
/**
* Playback rate
*
* `1` if real-time, `2` if double speed, `0` if not playing. SHOULD only be sent if not equal to `1`.
*
* Decimal
*/
pr?: number;
/**
* Streaming format
*
* The streaming format that defines the current request.
*
* - `d` = MPEG DASH
* - `h` = HTTP Live Streaming (HLS)
* - `s` = Smooth Streaming
* - `o` = other
*
* If the streaming format being requested is unknown, then this key MUST NOT be used.
*/
sf?: CMCDStreamingFormat;
/**
* Session ID
*
* A GUID identifying the current playback session. A playback session typically ties together segments belonging to a single media asset.
* Maximum length is 64 characters. It is RECOMMENDED to conform to the UUID specification.
*
* String
*/
sid?: string;
/**
* Stream type
* - `v` = all segments are available e.g., VOD
* - `l` = segments become available over time e.g., LIVE
*/
st?: CMCDStreamType;
/**
* CMCD version
*
* The version of this specification used for interpreting the defined key names and values. If this key is omitted, the client and server MUST
* interpret the values as being defined by version 1. Client SHOULD omit this field if the version is 1.
*
* Integer
*/
v?: number;
/////////////////
// CMCD Status //
/////////////////
/**
* Buffer starvation
*
* Key is included without a value if the buffer was starved at some point between the prior request and this object request,
* resulting in the player being in a rebuffering state and the video or audio playback being stalled. This key MUST NOT be
* sent if the buffer was not starved since the prior request.
*
* If the object type `ot` key is sent along with this key, then the `bs` key refers to the buffer associated with the particular
* object type. If no object type is communicated, then the buffer state applies to the current session.
*
* Boolean
*/
bs?: boolean;
/**
* Requested maximum throughput
*
* The requested maximum throughput that the client considers sufficient for delivery of the asset. Values MUST be rounded to the
* nearest 100kbps. For example, a client would indicate that the current segment, encoded at 2Mbps, is to be delivered at no more
* than 10Mbps, by using rtp=10000.
*
* Note: This can benefit clients by preventing buffer saturation through over-delivery and can also deliver a community benefit
* through fair-share delivery. The concept is that each client receives the throughput necessary for great performance, but no more.
* The CDN may not support the rtp feature.
*
* Integer kbps
*/
rtp?: number;
}

8
node_modules/hls.js/src/types/component-api.ts generated vendored Normal file
View file

@ -0,0 +1,8 @@
export interface ComponentAPI {
destroy(): void;
}
export interface NetworkComponentAPI extends ComponentAPI {
startLoad(startPosition: number): void;
stopLoad(): void;
}

134
node_modules/hls.js/src/types/demuxer.ts generated vendored Normal file
View file

@ -0,0 +1,134 @@
export interface Demuxer {
demux(
data: Uint8Array,
timeOffset: number,
isSampleAes?: boolean,
flush?: boolean
): DemuxerResult;
demuxSampleAes(
data: Uint8Array,
keyData: KeyData,
timeOffset: number
): Promise<DemuxerResult>;
flush(timeOffset?: number): DemuxerResult | Promise<DemuxerResult>;
destroy(): void;
resetInitSegment(
audioCodec: string | undefined,
videoCodec: string | undefined,
duration: number
);
resetTimeStamp(defaultInitPTS?: number | null): void;
resetContiguity(): void;
}
export interface DemuxerResult {
audioTrack: DemuxedAudioTrack;
avcTrack: DemuxedVideoTrack;
id3Track: DemuxedMetadataTrack;
textTrack: DemuxedUserdataTrack;
}
export interface DemuxedTrack {
type: string;
id: number;
pid: number;
inputTimeScale: number;
sequenceNumber: number;
samples:
| AudioSample[]
| AvcSample[]
| MetadataSample[]
| UserdataSample[]
| Uint8Array;
timescale?: number;
container?: string;
dropped: number;
duration?: number;
pesData?: ElementaryStreamData | null;
codec?: string;
}
export interface DemuxedAudioTrack extends DemuxedTrack {
config?: number[];
samplerate?: number;
isAAC?: boolean;
channelCount?: number;
manifestCodec?: string;
samples: AudioSample[];
}
export interface DemuxedVideoTrack extends DemuxedTrack {
width?: number;
height?: number;
pixelRatio?: [number, number];
audFound?: boolean;
pps?: number[];
sps?: number[];
naluState?: number;
samples: AvcSample[] | Uint8Array;
}
export interface DemuxedAvcTrack extends DemuxedVideoTrack {
samples: AvcSample[];
}
export interface PassthroughVideoTrack extends DemuxedVideoTrack {
samples: Uint8Array;
}
export interface DemuxedMetadataTrack extends DemuxedTrack {
samples: MetadataSample[];
}
export interface DemuxedUserdataTrack extends DemuxedTrack {
samples: UserdataSample[];
}
export interface MetadataSample {
pts: number;
dts: number;
len?: number;
data: Uint8Array;
}
export interface UserdataSample {
pts: number;
bytes: Uint8Array;
}
export interface AvcSample {
dts: number;
pts: number;
key: boolean;
frame: boolean;
units: AvcSampleUnit[];
debug: string;
length: number;
}
export interface AvcSampleUnit {
data: Uint8Array;
type: number;
}
export type AudioSample = {
unit: Uint8Array;
pts: number;
};
export type AudioFrame = {
sample: AudioSample;
length: number;
missing: number;
};
export interface ElementaryStreamData {
data: Uint8Array[];
size: number;
}
export interface KeyData {
method: string;
key: Uint8Array;
iv: Uint8Array;
}

348
node_modules/hls.js/src/types/events.ts generated vendored Normal file
View file

@ -0,0 +1,348 @@
// eslint-disable-next-line import/no-duplicates
import type { Fragment } from '../loader/fragment';
// eslint-disable-next-line import/no-duplicates
import type { Part } from '../loader/fragment';
import type { LevelDetails } from '../loader/level-details';
import type { HlsUrlParameters, Level, LevelParsed } from './level';
import type { MediaPlaylist, MediaPlaylistType } from './media-playlist';
import type {
Loader,
LoaderContext,
LoaderResponse,
LoaderStats,
PlaylistLevelType,
PlaylistLoaderContext,
} from './loader';
import type { Track, TrackSet } from './track';
import type { SourceBufferName } from './buffer';
import type { ChunkMetadata } from './transmuxer';
import type { LoadStats } from '../loader/load-stats';
import type { ErrorDetails, ErrorTypes } from '../errors';
import type { MetadataSample, UserdataSample } from './demuxer';
import type { AttrList } from '../utils/attr-list';
import type { HlsListeners } from '../events';
export interface MediaAttachingData {
media: HTMLMediaElement;
}
export interface MediaAttachedData {
media: HTMLMediaElement;
}
export interface BufferCodecsData {
video?: Track;
audio?: Track;
}
export interface BufferCreatedData {
tracks: TrackSet;
}
export interface BufferAppendingData {
type: SourceBufferName;
frag: Fragment;
part: Part | null;
chunkMeta: ChunkMetadata;
parent: PlaylistLevelType;
data: Uint8Array;
}
export interface BufferAppendedData {
type: SourceBufferName;
frag: Fragment;
part: Part | null;
chunkMeta: ChunkMetadata;
parent: PlaylistLevelType;
timeRanges: Partial<Record<SourceBufferName, TimeRanges>>;
}
export interface BufferEOSData {
type?: SourceBufferName;
}
export interface BufferFlushingData {
startOffset: number;
endOffset: number;
endOffsetSubtitles?: number;
type: SourceBufferName | null;
}
export interface BufferFlushedData {
type: SourceBufferName;
}
export interface ManifestLoadingData {
url: string;
}
export interface ManifestLoadedData {
audioTracks: MediaPlaylist[];
captions?: MediaPlaylist[];
levels: LevelParsed[];
networkDetails: any;
sessionData: Record<string, AttrList> | null;
stats: LoaderStats;
subtitles?: MediaPlaylist[];
url: string;
}
export interface ManifestParsedData {
levels: Level[];
audioTracks: MediaPlaylist[];
subtitleTracks: MediaPlaylist[];
firstLevel: number;
stats: LoaderStats;
audio: boolean;
video: boolean;
altAudio: boolean;
}
export interface LevelSwitchingData extends Omit<Level, '_urlId'> {
level: number;
}
export interface LevelSwitchedData {
level: number;
}
export interface TrackLoadingData {
id: number;
groupId: string;
url: string;
deliveryDirectives: HlsUrlParameters | null;
}
export interface LevelLoadingData {
id: number;
level: number;
url: string;
deliveryDirectives: HlsUrlParameters | null;
}
export interface TrackLoadedData {
details: LevelDetails;
id: number;
groupId: string;
networkDetails: any;
stats: LoaderStats;
deliveryDirectives: HlsUrlParameters | null;
}
export interface LevelLoadedData {
details: LevelDetails;
id: number;
level: number;
networkDetails: any;
stats: LoaderStats;
deliveryDirectives: HlsUrlParameters | null;
}
export interface LevelUpdatedData {
details: LevelDetails;
level: number;
}
export interface LevelPTSUpdatedData {
details: LevelDetails;
level: Level;
drift: number;
type: string;
frag: Fragment;
start: number;
end: number;
}
export interface AudioTrackSwitchingData {
id: number;
name: string;
groupId: string;
type: MediaPlaylistType | 'main';
url: string;
}
export interface AudioTrackSwitchedData {
id: number;
}
export interface AudioTrackLoadedData extends TrackLoadedData {}
export interface AudioTracksUpdatedData {
audioTracks: MediaPlaylist[];
}
export interface SubtitleTracksUpdatedData {
subtitleTracks: MediaPlaylist[];
}
export interface SubtitleTrackSwitchData {
id: number;
name?: string;
groupId?: string;
type?: MediaPlaylistType | 'main';
url?: string;
}
export interface SubtitleTrackLoadedData extends TrackLoadedData {}
export interface TrackSwitchedData {
id: number;
}
export interface SubtitleFragProcessed {
success: boolean;
frag: Fragment;
}
export interface FragChangedData {
frag: Fragment;
}
export interface FPSDropData {
currentDropped: number;
currentDecoded: number;
totalDroppedFrames: number;
}
export interface FPSDropLevelCappingData {
droppedLevel: number;
level: number;
}
export interface ErrorData {
type: ErrorTypes;
details: ErrorDetails;
fatal: boolean;
buffer?: number;
bytes?: number;
context?: PlaylistLoaderContext;
error?: Error;
event?: keyof HlsListeners | 'demuxerWorker';
frag?: Fragment;
level?: number | undefined;
levelRetry?: boolean;
loader?: Loader<LoaderContext>;
networkDetails?: any;
mimeType?: string;
reason?: string;
response?: LoaderResponse;
url?: string;
parent?: PlaylistLevelType;
err?: {
// comes from transmuxer interface
message: string;
};
}
export interface SubtitleFragProcessedData {
success: boolean;
frag: Fragment;
error?: Error;
}
export interface CuesParsedData {
type: 'captions' | 'subtitles';
cues: any;
track: string;
}
export interface NonNativeTextTrack {
_id?: string;
label: any;
kind: string;
default: boolean;
closedCaptions?: MediaPlaylist;
subtitleTrack?: MediaPlaylist;
}
export interface NonNativeTextTracksData {
tracks: Array<NonNativeTextTrack>;
}
export interface InitPTSFoundData {
id: string;
frag: Fragment;
initPTS: number;
timescale: number;
}
export interface FragLoadingData {
frag: Fragment;
part?: Part;
targetBufferTime: number | null;
}
export interface FragLoadEmergencyAbortedData {
frag: Fragment;
part: Part | null;
stats: LoaderStats;
}
export interface FragLoadedData {
frag: Fragment;
part: Part | null;
payload: ArrayBuffer;
networkDetails: unknown;
}
export interface PartsLoadedData {
frag: Fragment;
part: Part | null;
partsLoaded?: FragLoadedData[];
}
export interface FragDecryptedData {
frag: Fragment;
payload: ArrayBuffer;
stats: {
tstart: number;
tdecrypt: number;
};
}
export interface FragParsingInitSegmentData {}
export interface FragParsingUserdataData {
id: string;
frag: Fragment;
samples: UserdataSample[];
}
export interface FragParsingMetadataData {
id: string;
frag: Fragment;
samples: MetadataSample[];
}
export interface FragParsedData {
frag: Fragment;
part: Part | null;
}
export interface FragBufferedData {
stats: LoadStats;
frag: Fragment;
part: Part | null;
id: string;
}
export interface LevelsUpdatedData {
levels: Array<Level>;
}
export interface KeyLoadingData {
frag: Fragment;
}
export interface KeyLoadedData {
frag: Fragment;
}
export interface BackBufferData {
bufferEnd: number;
}
/**
* Deprecated; please use BackBufferData
*/
export interface LiveBackBufferData extends BackBufferData {}

21
node_modules/hls.js/src/types/fragment-tracker.ts generated vendored Normal file
View file

@ -0,0 +1,21 @@
import type { Fragment } from '../loader/fragment';
import type { SourceBufferName } from './buffer';
import type { FragLoadedData } from './events';
export interface FragmentEntity {
body: Fragment;
loaded: FragLoadedData | null;
backtrack: FragLoadedData | null;
buffered: boolean;
range: { [key in SourceBufferName]: FragmentBufferedRange };
}
export interface FragmentTimeRange {
startPTS: number;
endPTS: number;
}
export interface FragmentBufferedRange {
time: Array<FragmentTimeRange>;
partial: boolean;
}

6
node_modules/hls.js/src/types/general.ts generated vendored Normal file
View file

@ -0,0 +1,6 @@
/**
* Make specific properties in T required
*/
export type RequiredProperties<T, K extends keyof T> = T & {
[P in K]-?: T[P];
};

144
node_modules/hls.js/src/types/level.ts generated vendored Normal file
View file

@ -0,0 +1,144 @@
import { LevelDetails } from '../loader/level-details';
import { AttrList } from '../utils/attr-list';
export interface LevelParsed {
attrs: LevelAttributes;
audioCodec?: string;
bitrate: number;
details?: LevelDetails;
height?: number;
id?: number;
level?: number;
name: string;
textCodec?: string;
unknownCodecs?: string[];
url: string;
videoCodec?: string;
width?: number;
}
export interface LevelAttributes extends AttrList {
AUDIO?: string;
AUTOSELECT?: string;
'AVERAGE-BANDWIDTH'?: string;
BANDWIDTH?: string;
BYTERANGE?: string;
'CLOSED-CAPTIONS'?: string;
CODECS?: string;
DEFAULT?: string;
FORCED?: string;
'FRAME-RATE'?: string;
LANGUAGE?: string;
NAME?: string;
'PROGRAM-ID'?: string;
RESOLUTION?: string;
SUBTITLES?: string;
TYPE?: string;
URI?: string;
}
export enum HlsSkip {
No = '',
Yes = 'YES',
v2 = 'v2',
}
export function getSkipValue(details: LevelDetails, msn?: number): HlsSkip {
const { canSkipUntil, canSkipDateRanges, endSN } = details;
const snChangeGoal = msn !== undefined ? msn - endSN : 0;
if (canSkipUntil && snChangeGoal < canSkipUntil) {
if (canSkipDateRanges) {
return HlsSkip.v2;
}
return HlsSkip.Yes;
}
return HlsSkip.No;
}
export class HlsUrlParameters {
msn?: number;
part?: number;
skip?: HlsSkip;
constructor(msn?: number, part?: number, skip?: HlsSkip) {
this.msn = msn;
this.part = part;
this.skip = skip;
}
addDirectives(uri: string): string | never {
const url: URL = new self.URL(uri);
if (this.msn !== undefined) {
url.searchParams.set('_HLS_msn', this.msn.toString());
}
if (this.part !== undefined) {
url.searchParams.set('_HLS_part', this.part.toString());
}
if (this.skip) {
url.searchParams.set('_HLS_skip', this.skip);
}
return url.toString();
}
}
export class Level {
public readonly attrs: LevelAttributes;
public readonly audioCodec: string | undefined;
public readonly bitrate: number;
public readonly codecSet: string;
public readonly height: number;
public readonly id: number;
public readonly name: string | undefined;
public readonly videoCodec: string | undefined;
public readonly width: number;
public readonly unknownCodecs: string[] | undefined;
public audioGroupIds?: string[];
public details?: LevelDetails;
public fragmentError: number = 0;
public loadError: number = 0;
public loaded?: { bytes: number; duration: number };
public realBitrate: number = 0;
public textGroupIds?: string[];
public url: string[];
private _urlId: number = 0;
constructor(data: LevelParsed) {
this.url = [data.url];
this.attrs = data.attrs;
this.bitrate = data.bitrate;
if (data.details) {
this.details = data.details;
}
this.id = data.id || 0;
this.name = data.name;
this.width = data.width || 0;
this.height = data.height || 0;
this.audioCodec = data.audioCodec;
this.videoCodec = data.videoCodec;
this.unknownCodecs = data.unknownCodecs;
this.codecSet = [data.videoCodec, data.audioCodec]
.filter((c) => c)
.join(',')
.replace(/\.[^.,]+/g, '');
}
get maxBitrate(): number {
return Math.max(this.realBitrate, this.bitrate);
}
get uri(): string {
return this.url[this._urlId] || '';
}
get urlId(): number {
return this._urlId;
}
set urlId(value: number) {
const newValue = value % this.url.length;
if (this._urlId !== newValue) {
this.details = undefined;
this._urlId = newValue;
}
}
}

168
node_modules/hls.js/src/types/loader.ts generated vendored Normal file
View file

@ -0,0 +1,168 @@
import type { Fragment } from '../loader/fragment';
import type { Part } from '../loader/fragment';
import type { LevelDetails } from '../loader/level-details';
import type { HlsUrlParameters } from './level';
export interface LoaderContext {
// target URL
url: string;
// loader response type (arraybuffer or default response type for playlist)
responseType: string;
// headers
headers?: Record<string, string>;
// start byte range offset
rangeStart?: number;
// end byte range offset
rangeEnd?: number;
// true if onProgress should report partial chunk of loaded content
progressData?: boolean;
}
export interface FragmentLoaderContext extends LoaderContext {
frag: Fragment;
part: Part | null;
}
export interface LoaderConfiguration {
// Max number of load retries
maxRetry: number;
// Timeout after which `onTimeOut` callback will be triggered
// (if loading is still not finished after that delay)
timeout: number;
// Delay between an I/O error and following connection retry (ms).
// This to avoid spamming the server
retryDelay: number;
// max connection retry delay (ms)
maxRetryDelay: number;
// When streaming progressively, this is the minimum chunk size required to emit a PROGRESS event
highWaterMark: number;
}
export interface LoaderResponse {
url: string;
data: string | ArrayBuffer;
}
export interface LoaderStats {
aborted: boolean;
loaded: number;
retry: number;
total: number;
chunkCount: number;
bwEstimate: number;
loading: HlsProgressivePerformanceTiming;
parsing: HlsPerformanceTiming;
buffering: HlsProgressivePerformanceTiming;
}
export interface HlsPerformanceTiming {
start: number;
end: number;
}
export interface HlsChunkPerformanceTiming extends HlsPerformanceTiming {
executeStart: number;
executeEnd: number;
}
export interface HlsProgressivePerformanceTiming extends HlsPerformanceTiming {
first: number;
}
export type LoaderOnSuccess<T extends LoaderContext> = (
response: LoaderResponse,
stats: LoaderStats,
context: T,
networkDetails: any
) => void;
export type LoaderOnProgress<T extends LoaderContext> = (
stats: LoaderStats,
context: T,
data: string | ArrayBuffer,
networkDetails: any
) => void;
export type LoaderOnError<T extends LoaderContext> = (
error: {
// error status code
code: number;
// error description
text: string;
},
context: T,
networkDetails: any
) => void;
export type LoaderOnTimeout<T extends LoaderContext> = (
stats: LoaderStats,
context: T,
networkDetails: any
) => void;
export type LoaderOnAbort<T extends LoaderContext> = (
stats: LoaderStats,
context: T,
networkDetails: any
) => void;
export interface LoaderCallbacks<T extends LoaderContext> {
onSuccess: LoaderOnSuccess<T>;
onError: LoaderOnError<T>;
onTimeout: LoaderOnTimeout<T>;
onAbort?: LoaderOnAbort<T>;
onProgress?: LoaderOnProgress<T>;
}
export interface Loader<T extends LoaderContext> {
destroy(): void;
abort(): void;
load(
context: LoaderContext,
config: LoaderConfiguration,
callbacks: LoaderCallbacks<T>
): void;
/**
* `getCacheAge()` is called by hls.js to get the duration that a given object
* has been sitting in a cache proxy when playing live. If implemented,
* this should return a value in seconds.
*
* For HTTP based loaders, this should return the contents of the "age" header.
*
* @returns time object being lodaded
*/
getCacheAge?: () => number | null;
context: T;
stats: LoaderStats;
}
export enum PlaylistContextType {
MANIFEST = 'manifest',
LEVEL = 'level',
AUDIO_TRACK = 'audioTrack',
SUBTITLE_TRACK = 'subtitleTrack',
}
export enum PlaylistLevelType {
MAIN = 'main',
AUDIO = 'audio',
SUBTITLE = 'subtitle',
}
export interface PlaylistLoaderContext extends LoaderContext {
loader?: Loader<PlaylistLoaderContext>;
type: PlaylistContextType;
// the level index to load
level: number | null;
// level or track id from LevelLoadingData / TrackLoadingData
id: number | null;
// track group id
groupId: string | null;
// defines if the loader is handling a sidx request for the playlist
isSidxRequest?: boolean;
// internal representation of a parsed m3u8 level playlist
levelDetails?: LevelDetails;
// Blocking playlist request delivery directives (or null id none were added to playlist url
deliveryDirectives: HlsUrlParameters | null;
}

28
node_modules/hls.js/src/types/media-playlist.ts generated vendored Normal file
View file

@ -0,0 +1,28 @@
import type { LevelParsed } from './level';
export interface AudioGroup {
id?: string;
codec?: string;
}
export type AudioPlaylistType = 'AUDIO';
export type MainPlaylistType = AudioPlaylistType | 'VIDEO';
export type SubtitlePlaylistType = 'SUBTITLES' | 'CLOSED-CAPTIONS';
export type MediaPlaylistType = MainPlaylistType | SubtitlePlaylistType;
// audioTracks, captions and subtitles returned by `M3U8Parser.parseMasterPlaylistMedia`
export interface MediaPlaylist extends LevelParsed {
autoselect: boolean; // implicit false if not present
default: boolean; // implicit false if not present
forced: boolean; // implicit false if not present
groupId?: string; // not optional in HLS playlists, but it isn't always specified.
id: number; // incrementing number to track media playlists
instreamId?: string;
lang?: string;
name: string;
// 'main' is a custom type added to signal a audioCodec in main track?; see playlist-loader~L310
type: MediaPlaylistType | 'main';
}

73
node_modules/hls.js/src/types/remuxer.ts generated vendored Normal file
View file

@ -0,0 +1,73 @@
import type { TrackSet } from './track';
import {
DemuxedAudioTrack,
DemuxedMetadataTrack,
DemuxedUserdataTrack,
DemuxedVideoTrack,
MetadataSample,
UserdataSample,
} from './demuxer';
import type { SourceBufferName } from './buffer';
import type { PlaylistLevelType } from './loader';
export interface Remuxer {
remux(
audioTrack: DemuxedAudioTrack,
videoTrack: DemuxedVideoTrack,
id3Track: DemuxedMetadataTrack,
textTrack: DemuxedUserdataTrack,
timeOffset: number,
accurateTimeOffset: boolean,
flush: boolean,
playlistType: PlaylistLevelType
): RemuxerResult;
resetInitSegment(
initSegment: Uint8Array | undefined,
audioCodec: string | undefined,
videoCodec: string | undefined
): void;
resetTimeStamp(defaultInitPTS): void;
resetNextTimestamp(): void;
destroy(): void;
}
export interface RemuxedTrack {
data1: Uint8Array;
data2?: Uint8Array;
startPTS: number;
endPTS: number;
startDTS: number;
endDTS: number;
type: SourceBufferName;
hasAudio: boolean;
hasVideo: boolean;
independent?: boolean;
firstKeyFrame?: number;
nb: number;
transferredData1?: ArrayBuffer;
transferredData2?: ArrayBuffer;
dropped?: number;
}
export interface RemuxedMetadata {
samples: MetadataSample[];
}
export interface RemuxedUserdata {
samples: UserdataSample[];
}
export interface RemuxerResult {
audio?: RemuxedTrack;
video?: RemuxedTrack;
text?: RemuxedUserdata;
id3?: RemuxedMetadata;
initSegment?: InitSegmentData;
independent?: boolean;
}
export interface InitSegmentData {
tracks?: TrackSet;
initPTS: number | undefined;
timescale: number | undefined;
}

15
node_modules/hls.js/src/types/track.ts generated vendored Normal file
View file

@ -0,0 +1,15 @@
export interface TrackSet {
audio?: Track;
video?: Track;
audiovideo?: Track;
}
export interface Track {
id: 'audio' | 'main';
buffer?: SourceBuffer; // eslint-disable-line no-restricted-globals
container: string;
codec?: string;
initSegment?: Uint8Array;
levelCodec?: string;
metadata?: any;
}

46
node_modules/hls.js/src/types/transmuxer.ts generated vendored Normal file
View file

@ -0,0 +1,46 @@
import type { RemuxerResult } from './remuxer';
import type { HlsChunkPerformanceTiming } from './loader';
import type { SourceBufferName } from './buffer';
export interface TransmuxerResult {
remuxResult: RemuxerResult;
chunkMeta: ChunkMetadata;
}
export class ChunkMetadata {
public readonly level: number;
public readonly sn: number;
public readonly part: number;
public readonly id: number;
public readonly size: number;
public readonly partial: boolean;
public readonly transmuxing: HlsChunkPerformanceTiming =
getNewPerformanceTiming();
public readonly buffering: {
[key in SourceBufferName]: HlsChunkPerformanceTiming;
} = {
audio: getNewPerformanceTiming(),
video: getNewPerformanceTiming(),
audiovideo: getNewPerformanceTiming(),
};
constructor(
level: number,
sn: number,
id: number,
size = 0,
part = -1,
partial = false
) {
this.level = level;
this.sn = sn;
this.id = id;
this.size = size;
this.part = part;
this.partial = partial;
}
}
function getNewPerformanceTiming(): HlsChunkPerformanceTiming {
return { start: 0, executeStart: 0, executeEnd: 0, end: 0 };
}

6
node_modules/hls.js/src/types/tuples.ts generated vendored Normal file
View file

@ -0,0 +1,6 @@
export type Tail<T extends any[]> = ((...t: T) => any) extends (
_: any,
...tail: infer U
) => any
? U
: [];

9
node_modules/hls.js/src/types/vtt.ts generated vendored Normal file
View file

@ -0,0 +1,9 @@
export type VTTCCs = {
ccOffset: number;
presentationOffset: number;
[key: number]: {
start: number;
prevCC: number;
new: boolean;
};
};

107
node_modules/hls.js/src/utils/attr-list.ts generated vendored Normal file
View file

@ -0,0 +1,107 @@
const DECIMAL_RESOLUTION_REGEX = /^(\d+)x(\d+)$/; // eslint-disable-line no-useless-escape
const ATTR_LIST_REGEX = /\s*(.+?)\s*=((?:\".*?\")|.*?)(?:,|$)/g; // eslint-disable-line no-useless-escape
// adapted from https://github.com/kanongil/node-m3u8parse/blob/master/attrlist.js
export class AttrList {
[key: string]: any;
constructor(attrs: string | Record<string, any>) {
if (typeof attrs === 'string') {
attrs = AttrList.parseAttrList(attrs);
}
for (const attr in attrs) {
if (attrs.hasOwnProperty(attr)) {
this[attr] = attrs[attr];
}
}
}
decimalInteger(attrName: string): number {
const intValue = parseInt(this[attrName], 10);
if (intValue > Number.MAX_SAFE_INTEGER) {
return Infinity;
}
return intValue;
}
hexadecimalInteger(attrName: string) {
if (this[attrName]) {
let stringValue = (this[attrName] || '0x').slice(2);
stringValue = (stringValue.length & 1 ? '0' : '') + stringValue;
const value = new Uint8Array(stringValue.length / 2);
for (let i = 0; i < stringValue.length / 2; i++) {
value[i] = parseInt(stringValue.slice(i * 2, i * 2 + 2), 16);
}
return value;
} else {
return null;
}
}
hexadecimalIntegerAsNumber(attrName: string): number {
const intValue = parseInt(this[attrName], 16);
if (intValue > Number.MAX_SAFE_INTEGER) {
return Infinity;
}
return intValue;
}
decimalFloatingPoint(attrName: string): number {
return parseFloat(this[attrName]);
}
optionalFloat(attrName: string, defaultValue: number): number {
const value = this[attrName];
return value ? parseFloat(value) : defaultValue;
}
enumeratedString(attrName: string): string | undefined {
return this[attrName];
}
bool(attrName: string): boolean {
return this[attrName] === 'YES';
}
decimalResolution(attrName: string):
| {
width: number;
height: number;
}
| undefined {
const res = DECIMAL_RESOLUTION_REGEX.exec(this[attrName]);
if (res === null) {
return undefined;
}
return {
width: parseInt(res[1], 10),
height: parseInt(res[2], 10),
};
}
static parseAttrList(input: string): Record<string, any> {
let match;
const attrs = {};
const quote = '"';
ATTR_LIST_REGEX.lastIndex = 0;
while ((match = ATTR_LIST_REGEX.exec(input)) !== null) {
let value = match[2];
if (
value.indexOf(quote) === 0 &&
value.lastIndexOf(quote) === value.length - 1
) {
value = value.slice(1, -1);
}
attrs[match[1]] = value;
}
return attrs;
}
}

46
node_modules/hls.js/src/utils/binary-search.ts generated vendored Normal file
View file

@ -0,0 +1,46 @@
type BinarySearchComparison<T> = (candidate: T) => -1 | 0 | 1;
const BinarySearch = {
/**
* Searches for an item in an array which matches a certain condition.
* This requires the condition to only match one item in the array,
* and for the array to be ordered.
*
* @param {Array<T>} list The array to search.
* @param {BinarySearchComparison<T>} comparisonFn
* Called and provided a candidate item as the first argument.
* Should return:
* > -1 if the item should be located at a lower index than the provided item.
* > 1 if the item should be located at a higher index than the provided item.
* > 0 if the item is the item you're looking for.
*
* @return {T | null} The object if it is found or null otherwise.
*/
search: function <T>(
list: T[],
comparisonFn: BinarySearchComparison<T>
): T | null {
let minIndex: number = 0;
let maxIndex: number = list.length - 1;
let currentIndex: number | null = null;
let currentElement: T | null = null;
while (minIndex <= maxIndex) {
currentIndex = ((minIndex + maxIndex) / 2) | 0;
currentElement = list[currentIndex];
const comparisonResult = comparisonFn(currentElement);
if (comparisonResult > 0) {
minIndex = currentIndex + 1;
} else if (comparisonResult < 0) {
maxIndex = currentIndex - 1;
} else {
return currentElement;
}
}
return null;
},
};
export default BinarySearch;

178
node_modules/hls.js/src/utils/buffer-helper.ts generated vendored Normal file
View file

@ -0,0 +1,178 @@
/**
* @module BufferHelper
*
* Providing methods dealing with buffer length retrieval for example.
*
* In general, a helper around HTML5 MediaElement TimeRanges gathered from `buffered` property.
*
* Also @see https://developer.mozilla.org/en-US/docs/Web/API/HTMLMediaElement/buffered
*/
import { logger } from './logger';
type BufferTimeRange = {
start: number;
end: number;
};
export type Bufferable = {
buffered: TimeRanges;
};
export type BufferInfo = {
len: number;
start: number;
end: number;
nextStart?: number;
};
const noopBuffered: TimeRanges = {
length: 0,
start: () => 0,
end: () => 0,
};
export class BufferHelper {
/**
* Return true if `media`'s buffered include `position`
* @param {Bufferable} media
* @param {number} position
* @returns {boolean}
*/
static isBuffered(media: Bufferable, position: number): boolean {
try {
if (media) {
const buffered = BufferHelper.getBuffered(media);
for (let i = 0; i < buffered.length; i++) {
if (position >= buffered.start(i) && position <= buffered.end(i)) {
return true;
}
}
}
} catch (error) {
// this is to catch
// InvalidStateError: Failed to read the 'buffered' property from 'SourceBuffer':
// This SourceBuffer has been removed from the parent media source
}
return false;
}
static bufferInfo(
media: Bufferable | null,
pos: number,
maxHoleDuration: number
): BufferInfo {
try {
if (media) {
const vbuffered = BufferHelper.getBuffered(media);
const buffered: BufferTimeRange[] = [];
let i: number;
for (i = 0; i < vbuffered.length; i++) {
buffered.push({ start: vbuffered.start(i), end: vbuffered.end(i) });
}
return this.bufferedInfo(buffered, pos, maxHoleDuration);
}
} catch (error) {
// this is to catch
// InvalidStateError: Failed to read the 'buffered' property from 'SourceBuffer':
// This SourceBuffer has been removed from the parent media source
}
return { len: 0, start: pos, end: pos, nextStart: undefined };
}
static bufferedInfo(
buffered: BufferTimeRange[],
pos: number,
maxHoleDuration: number
): {
len: number;
start: number;
end: number;
nextStart?: number;
} {
pos = Math.max(0, pos);
// sort on buffer.start/smaller end (IE does not always return sorted buffered range)
buffered.sort(function (a, b) {
const diff = a.start - b.start;
if (diff) {
return diff;
} else {
return b.end - a.end;
}
});
let buffered2: BufferTimeRange[] = [];
if (maxHoleDuration) {
// there might be some small holes between buffer time range
// consider that holes smaller than maxHoleDuration are irrelevant and build another
// buffer time range representations that discards those holes
for (let i = 0; i < buffered.length; i++) {
const buf2len = buffered2.length;
if (buf2len) {
const buf2end = buffered2[buf2len - 1].end;
// if small hole (value between 0 or maxHoleDuration ) or overlapping (negative)
if (buffered[i].start - buf2end < maxHoleDuration) {
// merge overlapping time ranges
// update lastRange.end only if smaller than item.end
// e.g. [ 1, 15] with [ 2,8] => [ 1,15] (no need to modify lastRange.end)
// whereas [ 1, 8] with [ 2,15] => [ 1,15] ( lastRange should switch from [1,8] to [1,15])
if (buffered[i].end > buf2end) {
buffered2[buf2len - 1].end = buffered[i].end;
}
} else {
// big hole
buffered2.push(buffered[i]);
}
} else {
// first value
buffered2.push(buffered[i]);
}
}
} else {
buffered2 = buffered;
}
let bufferLen = 0;
// bufferStartNext can possibly be undefined based on the conditional logic below
let bufferStartNext: number | undefined;
// bufferStart and bufferEnd are buffer boundaries around current video position
let bufferStart: number = pos;
let bufferEnd: number = pos;
for (let i = 0; i < buffered2.length; i++) {
const start = buffered2[i].start;
const end = buffered2[i].end;
// logger.log('buf start/end:' + buffered.start(i) + '/' + buffered.end(i));
if (pos + maxHoleDuration >= start && pos < end) {
// play position is inside this buffer TimeRange, retrieve end of buffer position and buffer length
bufferStart = start;
bufferEnd = end;
bufferLen = bufferEnd - pos;
} else if (pos + maxHoleDuration < start) {
bufferStartNext = start;
break;
}
}
return {
len: bufferLen,
start: bufferStart || 0,
end: bufferEnd || 0,
nextStart: bufferStartNext,
};
}
/**
* Safe method to get buffered property.
* SourceBuffer.buffered may throw if SourceBuffer is removed from it's MediaSource
*/
static getBuffered(media: Bufferable): TimeRanges {
try {
return media.buffered;
} catch (e) {
logger.log('failed to get media.buffered', e);
return noopBuffered;
}
}
}

1463
node_modules/hls.js/src/utils/cea-608-parser.ts generated vendored Normal file

File diff suppressed because it is too large Load diff

42
node_modules/hls.js/src/utils/chunker.ts generated vendored Normal file
View file

@ -0,0 +1,42 @@
import { appendUint8Array } from './mp4-tools';
import { sliceUint8 } from './typed-array';
export default class Chunker {
private chunkSize: number;
public cache: Uint8Array | null = null;
constructor(chunkSize = Math.pow(2, 19)) {
this.chunkSize = chunkSize;
}
public push(data: Uint8Array): Array<Uint8Array> {
const { cache, chunkSize } = this;
const result: Array<Uint8Array> = [];
let temp: Uint8Array | null = null;
if (cache?.length) {
temp = appendUint8Array(cache, data);
this.cache = null;
} else {
temp = data;
}
if (temp.length < chunkSize) {
this.cache = temp;
return result;
}
if (temp.length > chunkSize) {
let offset = 0;
const len = temp.length;
while (offset < len - chunkSize) {
result.push(sliceUint8(temp, offset, offset + chunkSize));
offset += chunkSize;
}
this.cache = sliceUint8(temp, offset);
} else {
result.push(temp);
}
return result;
}
}

82
node_modules/hls.js/src/utils/codecs.ts generated vendored Normal file
View file

@ -0,0 +1,82 @@
// from http://mp4ra.org/codecs.html
const sampleEntryCodesISO = {
audio: {
a3ds: true,
'ac-3': true,
'ac-4': true,
alac: true,
alaw: true,
dra1: true,
'dts+': true,
'dts-': true,
dtsc: true,
dtse: true,
dtsh: true,
'ec-3': true,
enca: true,
g719: true,
g726: true,
m4ae: true,
mha1: true,
mha2: true,
mhm1: true,
mhm2: true,
mlpa: true,
mp4a: true,
'raw ': true,
Opus: true,
samr: true,
sawb: true,
sawp: true,
sevc: true,
sqcp: true,
ssmv: true,
twos: true,
ulaw: true,
},
video: {
avc1: true,
avc2: true,
avc3: true,
avc4: true,
avcp: true,
av01: true,
drac: true,
dvav: true,
dvhe: true,
encv: true,
hev1: true,
hvc1: true,
mjp2: true,
mp4v: true,
mvc1: true,
mvc2: true,
mvc3: true,
mvc4: true,
resv: true,
rv60: true,
s263: true,
svc1: true,
svc2: true,
'vc-1': true,
vp08: true,
vp09: true,
},
text: {
stpp: true,
wvtt: true,
},
};
export type CodecType = 'audio' | 'video';
export function isCodecType(codec: string, type: CodecType): boolean {
const typeCodes = sampleEntryCodesISO[type];
return !!typeCodes && typeCodes[codec.slice(0, 4)] === true;
}
export function isCodecSupportedInMp4(codec: string, type: CodecType): boolean {
return MediaSource.isTypeSupported(
`${type || 'video'}/mp4;codecs="${codec}"`
);
}

96
node_modules/hls.js/src/utils/cues.ts generated vendored Normal file
View file

@ -0,0 +1,96 @@
import { fixLineBreaks } from './vttparser';
import type { CaptionScreen, Row } from './cea-608-parser';
import { generateCueId } from './webvtt-parser';
import { addCueToTrack } from './texttrack-utils';
const WHITESPACE_CHAR = /\s/;
export interface CuesInterface {
newCue(
track: TextTrack | null,
startTime: number,
endTime: number,
captionScreen: CaptionScreen
): VTTCue[];
}
const Cues: CuesInterface = {
newCue(
track: TextTrack | null,
startTime: number,
endTime: number,
captionScreen: CaptionScreen
): VTTCue[] {
const result: VTTCue[] = [];
let row: Row;
// the type data states this is VTTCue, but it can potentially be a TextTrackCue on old browsers
let cue: VTTCue;
let indenting: boolean;
let indent: number;
let text: string;
const Cue = (self.VTTCue || self.TextTrackCue) as any;
for (let r = 0; r < captionScreen.rows.length; r++) {
row = captionScreen.rows[r];
indenting = true;
indent = 0;
text = '';
if (!row.isEmpty()) {
for (let c = 0; c < row.chars.length; c++) {
if (WHITESPACE_CHAR.test(row.chars[c].uchar) && indenting) {
indent++;
} else {
text += row.chars[c].uchar;
indenting = false;
}
}
// To be used for cleaning-up orphaned roll-up captions
row.cueStartTime = startTime;
// Give a slight bump to the endTime if it's equal to startTime to avoid a SyntaxError in IE
if (startTime === endTime) {
endTime += 0.0001;
}
if (indent >= 16) {
indent--;
} else {
indent++;
}
const cueText = fixLineBreaks(text.trim());
const id = generateCueId(startTime, endTime, cueText);
// If this cue already exists in the track do not push it
if (!track || !track.cues || !track.cues.getCueById(id)) {
cue = new Cue(startTime, endTime, cueText);
cue.id = id;
cue.line = r + 1;
cue.align = 'left';
// Clamp the position between 10 and 80 percent (CEA-608 PAC indent code)
// https://dvcs.w3.org/hg/text-tracks/raw-file/default/608toVTT/608toVTT.html#positioning-in-cea-608
// Firefox throws an exception and captions break with out of bounds 0-100 values
cue.position = 10 + Math.min(80, Math.floor((indent * 8) / 32) * 10);
result.push(cue);
}
}
}
if (track && result.length) {
// Sort bottom cues in reverse order so that they render in line order when overlapping in Chrome
result.sort((cueA, cueB) => {
if (cueA.line === 'auto' || cueB.line === 'auto') {
return 0;
}
if (cueA.line > 8 && cueB.line > 8) {
return cueB.line - cueA.line;
}
return cueA.line - cueB.line;
});
result.forEach((cue) => addCueToTrack(track, cue));
}
return result;
},
};
export default Cues;

228
node_modules/hls.js/src/utils/discontinuities.ts generated vendored Normal file
View file

@ -0,0 +1,228 @@
import { logger } from './logger';
import type { Fragment } from '../loader/fragment';
import type { LevelDetails } from '../loader/level-details';
import type { Level } from '../types/level';
import type { RequiredProperties } from '../types/general';
import { adjustSliding } from '../controller/level-helper';
export function findFirstFragWithCC(fragments: Fragment[], cc: number) {
let firstFrag: Fragment | null = null;
for (let i = 0, len = fragments.length; i < len; i++) {
const currentFrag = fragments[i];
if (currentFrag && currentFrag.cc === cc) {
firstFrag = currentFrag;
break;
}
}
return firstFrag;
}
export function shouldAlignOnDiscontinuities(
lastFrag: Fragment | null,
lastLevel: Level,
details: LevelDetails
): lastLevel is RequiredProperties<Level, 'details'> {
if (lastLevel.details) {
if (
details.endCC > details.startCC ||
(lastFrag && lastFrag.cc < details.startCC)
) {
return true;
}
}
return false;
}
// Find the first frag in the previous level which matches the CC of the first frag of the new level
export function findDiscontinuousReferenceFrag(
prevDetails: LevelDetails,
curDetails: LevelDetails
) {
const prevFrags = prevDetails.fragments;
const curFrags = curDetails.fragments;
if (!curFrags.length || !prevFrags.length) {
logger.log('No fragments to align');
return;
}
const prevStartFrag = findFirstFragWithCC(prevFrags, curFrags[0].cc);
if (!prevStartFrag || (prevStartFrag && !prevStartFrag.startPTS)) {
logger.log('No frag in previous level to align on');
return;
}
return prevStartFrag;
}
function adjustFragmentStart(frag: Fragment, sliding: number) {
if (frag) {
const start = frag.start + sliding;
frag.start = frag.startPTS = start;
frag.endPTS = start + frag.duration;
}
}
export function adjustSlidingStart(sliding: number, details: LevelDetails) {
// Update segments
const fragments = details.fragments;
for (let i = 0, len = fragments.length; i < len; i++) {
adjustFragmentStart(fragments[i], sliding);
}
// Update LL-HLS parts at the end of the playlist
if (details.fragmentHint) {
adjustFragmentStart(details.fragmentHint, sliding);
}
details.alignedSliding = true;
}
/**
* Using the parameters of the last level, this function computes PTS' of the new fragments so that they form a
* contiguous stream with the last fragments.
* The PTS of a fragment lets Hls.js know where it fits into a stream - by knowing every PTS, we know which fragment to
* download at any given time. PTS is normally computed when the fragment is demuxed, so taking this step saves us time
* and an extra download.
* @param lastFrag
* @param lastLevel
* @param details
*/
export function alignStream(
lastFrag: Fragment | null,
lastLevel: Level | null,
details: LevelDetails
) {
if (!lastLevel) {
return;
}
alignDiscontinuities(lastFrag, details, lastLevel);
if (!details.alignedSliding && lastLevel.details) {
// If the PTS wasn't figured out via discontinuity sequence that means there was no CC increase within the level.
// Aligning via Program Date Time should therefore be reliable, since PDT should be the same within the same
// discontinuity sequence.
alignPDT(details, lastLevel.details);
}
if (
!details.alignedSliding &&
lastLevel.details &&
!details.skippedSegments
) {
// Try to align on sn so that we pick a better start fragment.
// Do not perform this on playlists with delta updates as this is only to align levels on switch
// and adjustSliding only adjusts fragments after skippedSegments.
adjustSliding(lastLevel.details, details);
}
}
/**
* Computes the PTS if a new level's fragments using the PTS of a fragment in the last level which shares the same
* discontinuity sequence.
* @param lastFrag - The last Fragment which shares the same discontinuity sequence
* @param lastLevel - The details of the last loaded level
* @param details - The details of the new level
*/
function alignDiscontinuities(
lastFrag: Fragment | null,
details: LevelDetails,
lastLevel: Level
) {
if (shouldAlignOnDiscontinuities(lastFrag, lastLevel, details)) {
const referenceFrag = findDiscontinuousReferenceFrag(
lastLevel.details,
details
);
if (referenceFrag && Number.isFinite(referenceFrag.start)) {
logger.log(
`Adjusting PTS using last level due to CC increase within current level ${details.url}`
);
adjustSlidingStart(referenceFrag.start, details);
}
}
}
/**
* Computes the PTS of a new level's fragments using the difference in Program Date Time from the last level.
* @param details - The details of the new level
* @param lastDetails - The details of the last loaded level
*/
export function alignPDT(details: LevelDetails, lastDetails: LevelDetails) {
// This check protects the unsafe "!" usage below for null program date time access.
if (
!lastDetails.fragments.length ||
!details.hasProgramDateTime ||
!lastDetails.hasProgramDateTime
) {
return;
}
// if last level sliding is 1000 and its first frag PROGRAM-DATE-TIME is 2017-08-20 1:10:00 AM
// and if new details first frag PROGRAM DATE-TIME is 2017-08-20 1:10:08 AM
// then we can deduce that playlist B sliding is 1000+8 = 1008s
const lastPDT = lastDetails.fragments[0].programDateTime!; // hasProgramDateTime check above makes this safe.
const newPDT = details.fragments[0].programDateTime!;
// date diff is in ms. frag.start is in seconds
const sliding = (newPDT - lastPDT) / 1000 + lastDetails.fragments[0].start;
if (sliding && Number.isFinite(sliding)) {
logger.log(
`Adjusting PTS using programDateTime delta ${
newPDT - lastPDT
}ms, sliding:${sliding.toFixed(3)} ${details.url} `
);
adjustSlidingStart(sliding, details);
}
}
export function alignFragmentByPDTDelta(frag: Fragment, delta: number) {
const { programDateTime } = frag;
if (!programDateTime) return;
const start = (programDateTime - delta) / 1000;
frag.start = frag.startPTS = start;
frag.endPTS = start + frag.duration;
}
/**
* Ensures appropriate time-alignment between renditions based on PDT. Unlike `alignPDT`, which adjusts
* the timeline based on the delta between PDTs of the 0th fragment of two playlists/`LevelDetails`,
* this function assumes the timelines represented in `refDetails` are accurate, including the PDTs,
* and uses the "wallclock"/PDT timeline as a cross-reference to `details`, adjusting the presentation
* times/timelines of `details` accordingly.
* Given the asynchronous nature of fetches and initial loads of live `main` and audio/subtitle tracks,
* the primary purpose of this function is to ensure the "local timelines" of audio/subtitle tracks
* are aligned to the main/video timeline, using PDT as the cross-reference/"anchor" that should
* be consistent across playlists, per the HLS spec.
* @param details - The details of the rendition you'd like to time-align (e.g. an audio rendition).
* @param refDetails - The details of the reference rendition with start and PDT times for alignment.
*/
export function alignMediaPlaylistByPDT(
details: LevelDetails,
refDetails: LevelDetails
) {
// This check protects the unsafe "!" usage below for null program date time access.
if (
!refDetails.fragments.length ||
!details.hasProgramDateTime ||
!refDetails.hasProgramDateTime
) {
return;
}
const refPDT = refDetails.fragments[0].programDateTime!; // hasProgramDateTime check above makes this safe.
const refStart = refDetails.fragments[0].start;
// Use the delta between the reference details' presentation timeline's start time and its PDT
// to align the other rendtion's timeline.
const delta = refPDT - refStart * 1000;
// Per spec: "If any Media Playlist in a Master Playlist contains an EXT-X-PROGRAM-DATE-TIME tag, then all
// Media Playlists in that Master Playlist MUST contain EXT-X-PROGRAM-DATE-TIME tags with consistent mappings
// of date and time to media timestamps."
// So we should be able to use each rendition's PDT as a reference time and use the delta to compute our relevant
// start and end times.
// NOTE: This code assumes each level/details timelines have already been made "internally consistent"
details.fragments.forEach((frag) => {
alignFragmentByPDTDelta(frag, delta);
});
if (details.fragmentHint) {
alignFragmentByPDTDelta(details.fragmentHint, delta);
}
details.alignedSliding = true;
}

View file

@ -0,0 +1,66 @@
/*
* EWMA Bandwidth Estimator
* - heavily inspired from shaka-player
* Tracks bandwidth samples and estimates available bandwidth.
* Based on the minimum of two exponentially-weighted moving averages with
* different half-lives.
*/
import EWMA from '../utils/ewma';
class EwmaBandWidthEstimator {
private defaultEstimate_: number;
private minWeight_: number;
private minDelayMs_: number;
private slow_: EWMA;
private fast_: EWMA;
constructor(slow: number, fast: number, defaultEstimate: number) {
this.defaultEstimate_ = defaultEstimate;
this.minWeight_ = 0.001;
this.minDelayMs_ = 50;
this.slow_ = new EWMA(slow);
this.fast_ = new EWMA(fast);
}
update(slow: number, fast: number) {
const { slow_, fast_ } = this;
if (this.slow_.halfLife !== slow) {
this.slow_ = new EWMA(slow, slow_.getEstimate(), slow_.getTotalWeight());
}
if (this.fast_.halfLife !== fast) {
this.fast_ = new EWMA(fast, fast_.getEstimate(), fast_.getTotalWeight());
}
}
sample(durationMs: number, numBytes: number) {
durationMs = Math.max(durationMs, this.minDelayMs_);
const numBits = 8 * numBytes;
// weight is duration in seconds
const durationS = durationMs / 1000;
// value is bandwidth in bits/s
const bandwidthInBps = numBits / durationS;
this.fast_.sample(durationS, bandwidthInBps);
this.slow_.sample(durationS, bandwidthInBps);
}
canEstimate(): boolean {
const fast = this.fast_;
return fast && fast.getTotalWeight() >= this.minWeight_;
}
getEstimate(): number {
if (this.canEstimate()) {
// console.log('slow estimate:'+ Math.round(this.slow_.getEstimate()));
// console.log('fast estimate:'+ Math.round(this.fast_.getEstimate()));
// Take the minimum of these two estimates. This should have the effect of
// adapting down quickly, but up more slowly.
return Math.min(this.fast_.getEstimate(), this.slow_.getEstimate());
} else {
return this.defaultEstimate_;
}
}
destroy() {}
}
export default EwmaBandWidthEstimator;

43
node_modules/hls.js/src/utils/ewma.ts generated vendored Normal file
View file

@ -0,0 +1,43 @@
/*
* compute an Exponential Weighted moving average
* - https://en.wikipedia.org/wiki/Moving_average#Exponential_moving_average
* - heavily inspired from shaka-player
*/
class EWMA {
public readonly halfLife: number;
private alpha_: number;
private estimate_: number;
private totalWeight_: number;
// About half of the estimated value will be from the last |halfLife| samples by weight.
constructor(halfLife: number, estimate: number = 0, weight: number = 0) {
this.halfLife = halfLife;
// Larger values of alpha expire historical data more slowly.
this.alpha_ = halfLife ? Math.exp(Math.log(0.5) / halfLife) : 0;
this.estimate_ = estimate;
this.totalWeight_ = weight;
}
sample(weight: number, value: number) {
const adjAlpha = Math.pow(this.alpha_, weight);
this.estimate_ = value * (1 - adjAlpha) + adjAlpha * this.estimate_;
this.totalWeight_ += weight;
}
getTotalWeight(): number {
return this.totalWeight_;
}
getEstimate(): number {
if (this.alpha_) {
const zeroFactor = 1 - Math.pow(this.alpha_, this.totalWeight_);
if (zeroFactor) {
return this.estimate_ / zeroFactor;
}
}
return this.estimate_;
}
}
export default EWMA;

255
node_modules/hls.js/src/utils/fetch-loader.ts generated vendored Normal file
View file

@ -0,0 +1,255 @@
import {
LoaderCallbacks,
LoaderContext,
Loader,
LoaderStats,
LoaderConfiguration,
LoaderOnProgress,
} from '../types/loader';
import { LoadStats } from '../loader/load-stats';
import ChunkCache from '../demux/chunk-cache';
export function fetchSupported() {
if (
// @ts-ignore
self.fetch &&
self.AbortController &&
self.ReadableStream &&
self.Request
) {
try {
new self.ReadableStream({}); // eslint-disable-line no-new
return true;
} catch (e) {
/* noop */
}
}
return false;
}
class FetchLoader implements Loader<LoaderContext> {
private fetchSetup: Function;
private requestTimeout?: number;
private request!: Request;
private response!: Response;
private controller: AbortController;
public context!: LoaderContext;
private config: LoaderConfiguration | null = null;
private callbacks: LoaderCallbacks<LoaderContext> | null = null;
public stats: LoaderStats;
private loader: Response | null = null;
constructor(config /* HlsConfig */) {
this.fetchSetup = config.fetchSetup || getRequest;
this.controller = new self.AbortController();
this.stats = new LoadStats();
}
destroy(): void {
this.loader = this.callbacks = null;
this.abortInternal();
}
abortInternal(): void {
const response = this.response;
if (!response || !response.ok) {
this.stats.aborted = true;
this.controller.abort();
}
}
abort(): void {
this.abortInternal();
if (this.callbacks?.onAbort) {
this.callbacks.onAbort(this.stats, this.context, this.response);
}
}
load(
context: LoaderContext,
config: LoaderConfiguration,
callbacks: LoaderCallbacks<LoaderContext>
): void {
const stats = this.stats;
if (stats.loading.start) {
throw new Error('Loader can only be used once.');
}
stats.loading.start = self.performance.now();
const initParams = getRequestParameters(context, this.controller.signal);
const onProgress: LoaderOnProgress<LoaderContext> | undefined =
callbacks.onProgress;
const isArrayBuffer = context.responseType === 'arraybuffer';
const LENGTH = isArrayBuffer ? 'byteLength' : 'length';
this.context = context;
this.config = config;
this.callbacks = callbacks;
this.request = this.fetchSetup(context, initParams);
self.clearTimeout(this.requestTimeout);
this.requestTimeout = self.setTimeout(() => {
this.abortInternal();
callbacks.onTimeout(stats, context, this.response);
}, config.timeout);
self
.fetch(this.request)
.then((response: Response): Promise<string | ArrayBuffer> => {
this.response = this.loader = response;
if (!response.ok) {
const { status, statusText } = response;
throw new FetchError(
statusText || 'fetch, bad network response',
status,
response
);
}
stats.loading.first = Math.max(
self.performance.now(),
stats.loading.start
);
stats.total = parseInt(response.headers.get('Content-Length') || '0');
if (onProgress && Number.isFinite(config.highWaterMark)) {
return this.loadProgressively(
response,
stats,
context,
config.highWaterMark,
onProgress
);
}
if (isArrayBuffer) {
return response.arrayBuffer();
}
return response.text();
})
.then((responseData: string | ArrayBuffer) => {
const { response } = this;
self.clearTimeout(this.requestTimeout);
stats.loading.end = Math.max(
self.performance.now(),
stats.loading.first
);
stats.loaded = stats.total = responseData[LENGTH];
const loaderResponse = {
url: response.url,
data: responseData,
};
if (onProgress && !Number.isFinite(config.highWaterMark)) {
onProgress(stats, context, responseData, response);
}
callbacks.onSuccess(loaderResponse, stats, context, response);
})
.catch((error) => {
self.clearTimeout(this.requestTimeout);
if (stats.aborted) {
return;
}
// CORS errors result in an undefined code. Set it to 0 here to align with XHR's behavior
const code = error.code || 0;
callbacks.onError(
{ code, text: error.message },
context,
error.details
);
});
}
getCacheAge(): number | null {
let result: number | null = null;
if (this.response) {
const ageHeader = this.response.headers.get('age');
result = ageHeader ? parseFloat(ageHeader) : null;
}
return result;
}
private loadProgressively(
response: Response,
stats: LoaderStats,
context: LoaderContext,
highWaterMark: number = 0,
onProgress: LoaderOnProgress<LoaderContext>
): Promise<ArrayBuffer> {
const chunkCache = new ChunkCache();
const reader = (response.body as ReadableStream).getReader();
const pump = (): Promise<ArrayBuffer> => {
return reader
.read()
.then((data) => {
if (data.done) {
if (chunkCache.dataLength) {
onProgress(stats, context, chunkCache.flush(), response);
}
return Promise.resolve(new ArrayBuffer(0));
}
const chunk: Uint8Array = data.value;
const len = chunk.length;
stats.loaded += len;
if (len < highWaterMark || chunkCache.dataLength) {
// The current chunk is too small to to be emitted or the cache already has data
// Push it to the cache
chunkCache.push(chunk);
if (chunkCache.dataLength >= highWaterMark) {
// flush in order to join the typed arrays
onProgress(stats, context, chunkCache.flush(), response);
}
} else {
// If there's nothing cached already, and the chache is large enough
// just emit the progress event
onProgress(stats, context, chunk, response);
}
return pump();
})
.catch(() => {
/* aborted */
return Promise.reject();
});
};
return pump();
}
}
function getRequestParameters(context: LoaderContext, signal): any {
const initParams: any = {
method: 'GET',
mode: 'cors',
credentials: 'same-origin',
signal,
headers: new self.Headers(Object.assign({}, context.headers)),
};
if (context.rangeEnd) {
initParams.headers.set(
'Range',
'bytes=' + context.rangeStart + '-' + String(context.rangeEnd - 1)
);
}
return initParams;
}
function getRequest(context: LoaderContext, initParams: any): Request {
return new self.Request(context.url, initParams);
}
class FetchError extends Error {
public code: number;
public details: any;
constructor(message: string, code: number, details: any) {
super(message);
this.code = code;
this.details = details;
}
}
export default FetchLoader;

20
node_modules/hls.js/src/utils/hex.ts generated vendored Normal file
View file

@ -0,0 +1,20 @@
/**
* hex dump helper class
*/
const Hex = {
hexDump: function (array) {
let str = '';
for (let i = 0; i < array.length; i++) {
let h = array[i].toString(16);
if (h.length < 2) {
h = '0' + h;
}
str += h;
}
return str;
},
};
export default Hex;

248
node_modules/hls.js/src/utils/imsc1-ttml-parser.ts generated vendored Normal file
View file

@ -0,0 +1,248 @@
import { findBox } from './mp4-tools';
import { parseTimeStamp } from './vttparser';
import VTTCue from './vttcue';
import { utf8ArrayToStr } from '../demux/id3';
import { toTimescaleFromScale } from './timescale-conversion';
import { generateCueId } from './webvtt-parser';
export const IMSC1_CODEC = 'stpp.ttml.im1t';
// Time format: h:m:s:frames(.subframes)
const HMSF_REGEX = /^(\d{2,}):(\d{2}):(\d{2}):(\d{2})\.?(\d+)?$/;
// Time format: hours, minutes, seconds, milliseconds, frames, ticks
const TIME_UNIT_REGEX = /^(\d*(?:\.\d*)?)(h|m|s|ms|f|t)$/;
const textAlignToLineAlign: Partial<Record<string, LineAlignSetting>> = {
left: 'start',
center: 'center',
right: 'end',
start: 'start',
end: 'end',
};
export function parseIMSC1(
payload: ArrayBuffer,
initPTS: number,
timescale: number,
callBack: (cues: Array<VTTCue>) => any,
errorCallBack: (error: Error) => any
) {
const results = findBox(new Uint8Array(payload), ['mdat']);
if (results.length === 0) {
errorCallBack(new Error('Could not parse IMSC1 mdat'));
return;
}
const mdat = results[0];
const ttml = utf8ArrayToStr(
new Uint8Array(payload, mdat.start, mdat.end - mdat.start)
);
const syncTime = toTimescaleFromScale(initPTS, 1, timescale);
try {
callBack(parseTTML(ttml, syncTime));
} catch (error) {
errorCallBack(error);
}
}
function parseTTML(ttml: string, syncTime: number): Array<VTTCue> {
const parser = new DOMParser();
const xmlDoc = parser.parseFromString(ttml, 'text/xml');
const tt = xmlDoc.getElementsByTagName('tt')[0];
if (!tt) {
throw new Error('Invalid ttml');
}
const defaultRateInfo = {
frameRate: 30,
subFrameRate: 1,
frameRateMultiplier: 0,
tickRate: 0,
};
const rateInfo: Object = Object.keys(defaultRateInfo).reduce(
(result, key) => {
result[key] = tt.getAttribute(`ttp:${key}`) || defaultRateInfo[key];
return result;
},
{}
);
const trim = tt.getAttribute('xml:space') !== 'preserve';
const styleElements = collectionToDictionary(
getElementCollection(tt, 'styling', 'style')
);
const regionElements = collectionToDictionary(
getElementCollection(tt, 'layout', 'region')
);
const cueElements = getElementCollection(tt, 'body', '[begin]');
return [].map
.call(cueElements, (cueElement) => {
const cueText = getTextContent(cueElement, trim);
if (!cueText || !cueElement.hasAttribute('begin')) {
return null;
}
const startTime = parseTtmlTime(
cueElement.getAttribute('begin'),
rateInfo
);
const duration = parseTtmlTime(cueElement.getAttribute('dur'), rateInfo);
let endTime = parseTtmlTime(cueElement.getAttribute('end'), rateInfo);
if (startTime === null) {
throw timestampParsingError(cueElement);
}
if (endTime === null) {
if (duration === null) {
throw timestampParsingError(cueElement);
}
endTime = startTime + duration;
}
const cue = new VTTCue(startTime - syncTime, endTime - syncTime, cueText);
cue.id = generateCueId(cue.startTime, cue.endTime, cue.text);
const region = regionElements[cueElement.getAttribute('region')];
const style = styleElements[cueElement.getAttribute('style')];
// TODO: Add regions to track and cue (origin and extend)
// These values are hard-coded (for now) to simulate region settings in the demo
cue.position = 10;
cue.size = 80;
// Apply styles to cue
const styles = getTtmlStyles(region, style);
const { textAlign } = styles;
if (textAlign) {
// cue.positionAlign not settable in FF~2016
const lineAlign = textAlignToLineAlign[textAlign];
if (lineAlign) {
cue.lineAlign = lineAlign;
}
cue.align = textAlign as AlignSetting;
}
Object.assign(cue, styles);
return cue;
})
.filter((cue) => cue !== null);
}
function getElementCollection(
fromElement,
parentName,
childName
): Array<HTMLElement> {
const parent = fromElement.getElementsByTagName(parentName)[0];
if (parent) {
return [].slice.call(parent.querySelectorAll(childName));
}
return [];
}
function collectionToDictionary(elementsWithId: Array<HTMLElement>): {
[id: string]: HTMLElement;
} {
return elementsWithId.reduce((dict, element: HTMLElement) => {
const id = element.getAttribute('xml:id');
if (id) {
dict[id] = element;
}
return dict;
}, {});
}
function getTextContent(element, trim): string {
return [].slice.call(element.childNodes).reduce((str, node, i) => {
if (node.nodeName === 'br' && i) {
return str + '\n';
}
if (node.childNodes?.length) {
return getTextContent(node, trim);
} else if (trim) {
return str + node.textContent.trim().replace(/\s+/g, ' ');
}
return str + node.textContent;
}, '');
}
function getTtmlStyles(region, style): { [style: string]: string } {
const ttsNs = 'http://www.w3.org/ns/ttml#styling';
const styleAttributes = [
'displayAlign',
'textAlign',
'color',
'backgroundColor',
'fontSize',
'fontFamily',
// 'fontWeight',
// 'lineHeight',
// 'wrapOption',
// 'fontStyle',
// 'direction',
// 'writingMode'
];
return styleAttributes.reduce((styles, name) => {
const value =
getAttributeNS(style, ttsNs, name) || getAttributeNS(region, ttsNs, name);
if (value) {
styles[name] = value;
}
return styles;
}, {});
}
function getAttributeNS(element, ns, name): string | null {
return element.hasAttributeNS(ns, name)
? element.getAttributeNS(ns, name)
: null;
}
function timestampParsingError(node) {
return new Error(`Could not parse ttml timestamp ${node}`);
}
function parseTtmlTime(timeAttributeValue, rateInfo): number | null {
if (!timeAttributeValue) {
return null;
}
let seconds: number | null = parseTimeStamp(timeAttributeValue);
if (seconds === null) {
if (HMSF_REGEX.test(timeAttributeValue)) {
seconds = parseHoursMinutesSecondsFrames(timeAttributeValue, rateInfo);
} else if (TIME_UNIT_REGEX.test(timeAttributeValue)) {
seconds = parseTimeUnits(timeAttributeValue, rateInfo);
}
}
return seconds;
}
function parseHoursMinutesSecondsFrames(timeAttributeValue, rateInfo): number {
const m = HMSF_REGEX.exec(timeAttributeValue) as Array<any>;
const frames = (m[4] | 0) + (m[5] | 0) / rateInfo.subFrameRate;
return (
(m[1] | 0) * 3600 +
(m[2] | 0) * 60 +
(m[3] | 0) +
frames / rateInfo.frameRate
);
}
function parseTimeUnits(timeAttributeValue, rateInfo): number {
const m = TIME_UNIT_REGEX.exec(timeAttributeValue) as Array<any>;
const value = Number(m[1]);
const unit = m[2];
switch (unit) {
case 'h':
return value * 3600;
case 'm':
return value * 60;
case 'ms':
return value * 1000;
case 'f':
return value / rateInfo.frameRate;
case 't':
return value / rateInfo.tickRate;
}
return value;
}

83
node_modules/hls.js/src/utils/logger.ts generated vendored Normal file
View file

@ -0,0 +1,83 @@
interface ILogFunction {
(message?: any, ...optionalParams: any[]): void;
}
export interface ILogger {
trace: ILogFunction;
debug: ILogFunction;
log: ILogFunction;
warn: ILogFunction;
info: ILogFunction;
error: ILogFunction;
}
const noop: ILogFunction = function () {};
const fakeLogger: ILogger = {
trace: noop,
debug: noop,
log: noop,
warn: noop,
info: noop,
error: noop,
};
let exportedLogger: ILogger = fakeLogger;
// let lastCallTime;
// function formatMsgWithTimeInfo(type, msg) {
// const now = Date.now();
// const diff = lastCallTime ? '+' + (now - lastCallTime) : '0';
// lastCallTime = now;
// msg = (new Date(now)).toISOString() + ' | [' + type + '] > ' + msg + ' ( ' + diff + ' ms )';
// return msg;
// }
function consolePrintFn(type: string): ILogFunction {
const func: ILogFunction = self.console[type];
if (func) {
return func.bind(self.console, `[${type}] >`);
}
return noop;
}
function exportLoggerFunctions(
debugConfig: boolean | ILogger,
...functions: string[]
): void {
functions.forEach(function (type) {
exportedLogger[type] = debugConfig[type]
? debugConfig[type].bind(debugConfig)
: consolePrintFn(type);
});
}
export function enableLogs(debugConfig: boolean | ILogger): void {
// check that console is available
if (
(self.console && debugConfig === true) ||
typeof debugConfig === 'object'
) {
exportLoggerFunctions(
debugConfig,
// Remove out from list here to hard-disable a log-level
// 'trace',
'debug',
'log',
'info',
'warn',
'error'
);
// Some browsers don't allow to use bind on console object anyway
// fallback to default if needed
try {
exportedLogger.log();
} catch (e) {
exportedLogger = fakeLogger;
}
} else {
exportedLogger = fakeLogger;
}
}
export const logger: ILogger = exportedLogger;

25
node_modules/hls.js/src/utils/mediakeys-helper.ts generated vendored Normal file
View file

@ -0,0 +1,25 @@
/**
* @see https://developer.mozilla.org/en-US/docs/Web/API/Navigator/requestMediaKeySystemAccess
*/
export enum KeySystems {
WIDEVINE = 'com.widevine.alpha',
PLAYREADY = 'com.microsoft.playready',
}
export type MediaKeyFunc = (
keySystem: KeySystems,
supportedConfigurations: MediaKeySystemConfiguration[]
) => Promise<MediaKeySystemAccess>;
const requestMediaKeySystemAccess = (function (): MediaKeyFunc | null {
if (
typeof self !== 'undefined' &&
self.navigator &&
self.navigator.requestMediaKeySystemAccess
) {
return self.navigator.requestMediaKeySystemAccess.bind(self.navigator);
} else {
return null;
}
})();
export { requestMediaKeySystemAccess };

7
node_modules/hls.js/src/utils/mediasource-helper.ts generated vendored Normal file
View file

@ -0,0 +1,7 @@
/**
* MediaSource helper
*/
export function getMediaSource(): typeof MediaSource | undefined {
return self.MediaSource || ((self as any).WebKitMediaSource as MediaSource);
}

586
node_modules/hls.js/src/utils/mp4-tools.ts generated vendored Normal file
View file

@ -0,0 +1,586 @@
import { sliceUint8 } from './typed-array';
import { ElementaryStreamTypes } from '../loader/fragment';
type Mp4BoxData = {
data: Uint8Array;
start: number;
end: number;
};
const UINT32_MAX = Math.pow(2, 32) - 1;
const push = [].push;
export function bin2str(data: Uint8Array): string {
return String.fromCharCode.apply(null, data);
}
export function readUint16(
buffer: Uint8Array | Mp4BoxData,
offset: number
): number {
if ('data' in buffer) {
offset += buffer.start;
buffer = buffer.data;
}
const val = (buffer[offset] << 8) | buffer[offset + 1];
return val < 0 ? 65536 + val : val;
}
export function readUint32(
buffer: Uint8Array | Mp4BoxData,
offset: number
): number {
if ('data' in buffer) {
offset += buffer.start;
buffer = buffer.data;
}
const val =
(buffer[offset] << 24) |
(buffer[offset + 1] << 16) |
(buffer[offset + 2] << 8) |
buffer[offset + 3];
return val < 0 ? 4294967296 + val : val;
}
export function writeUint32(
buffer: Uint8Array | Mp4BoxData,
offset: number,
value: number
) {
if ('data' in buffer) {
offset += buffer.start;
buffer = buffer.data;
}
buffer[offset] = value >> 24;
buffer[offset + 1] = (value >> 16) & 0xff;
buffer[offset + 2] = (value >> 8) & 0xff;
buffer[offset + 3] = value & 0xff;
}
// Find the data for a box specified by its path
export function findBox(
input: Uint8Array | Mp4BoxData,
path: Array<string>
): Array<Mp4BoxData> {
const results = [] as Array<Mp4BoxData>;
if (!path.length) {
// short-circuit the search for empty paths
return results;
}
let data: Uint8Array;
let start;
let end;
if ('data' in input) {
data = input.data;
start = input.start;
end = input.end;
} else {
data = input;
start = 0;
end = data.byteLength;
}
for (let i = start; i < end; ) {
const size = readUint32(data, i);
const type = bin2str(data.subarray(i + 4, i + 8));
const endbox = size > 1 ? i + size : end;
if (type === path[0]) {
if (path.length === 1) {
// this is the end of the path and we've found the box we were
// looking for
results.push({ data: data, start: i + 8, end: endbox });
} else {
// recursively search for the next box along the path
const subresults = findBox(
{ data: data, start: i + 8, end: endbox },
path.slice(1)
);
if (subresults.length) {
push.apply(results, subresults);
}
}
}
i = endbox;
}
// we've finished searching all of data
return results;
}
type SidxInfo = {
earliestPresentationTime: number;
timescale: number;
version: number;
referencesCount: number;
references: any[];
moovEndOffset: number | null;
};
export function parseSegmentIndex(initSegment: Uint8Array): SidxInfo | null {
const moovBox = findBox(initSegment, ['moov']);
const moov = moovBox[0];
const moovEndOffset = moov ? moov.end : null; // we need this in case we need to chop of garbage of the end of current data
const sidxBox = findBox(initSegment, ['sidx']);
if (!sidxBox || !sidxBox[0]) {
return null;
}
const references: any[] = [];
const sidx = sidxBox[0];
const version = sidx.data[0];
// set initial offset, we skip the reference ID (not needed)
let index = version === 0 ? 8 : 16;
const timescale = readUint32(sidx, index);
index += 4;
// TODO: parse earliestPresentationTime and firstOffset
// usually zero in our case
const earliestPresentationTime = 0;
const firstOffset = 0;
if (version === 0) {
index += 8;
} else {
index += 16;
}
// skip reserved
index += 2;
let startByte = sidx.end + firstOffset;
const referencesCount = readUint16(sidx, index);
index += 2;
for (let i = 0; i < referencesCount; i++) {
let referenceIndex = index;
const referenceInfo = readUint32(sidx, referenceIndex);
referenceIndex += 4;
const referenceSize = referenceInfo & 0x7fffffff;
const referenceType = (referenceInfo & 0x80000000) >>> 31;
if (referenceType === 1) {
// eslint-disable-next-line no-console
console.warn('SIDX has hierarchical references (not supported)');
return null;
}
const subsegmentDuration = readUint32(sidx, referenceIndex);
referenceIndex += 4;
references.push({
referenceSize,
subsegmentDuration, // unscaled
info: {
duration: subsegmentDuration / timescale,
start: startByte,
end: startByte + referenceSize - 1,
},
});
startByte += referenceSize;
// Skipping 1 bit for |startsWithSap|, 3 bits for |sapType|, and 28 bits
// for |sapDelta|.
referenceIndex += 4;
// skip to next ref
index = referenceIndex;
}
return {
earliestPresentationTime,
timescale,
version,
referencesCount,
references,
moovEndOffset,
};
}
/**
* Parses an MP4 initialization segment and extracts stream type and
* timescale values for any declared tracks. Timescale values indicate the
* number of clock ticks per second to assume for time-based values
* elsewhere in the MP4.
*
* To determine the start time of an MP4, you need two pieces of
* information: the timescale unit and the earliest base media decode
* time. Multiple timescales can be specified within an MP4 but the
* base media decode time is always expressed in the timescale from
* the media header box for the track:
* ```
* moov > trak > mdia > mdhd.timescale
* moov > trak > mdia > hdlr
* ```
* @param initSegment {Uint8Array} the bytes of the init segment
* @return {InitData} a hash of track type to timescale values or null if
* the init segment is malformed.
*/
export interface InitDataTrack {
timescale: number;
id: number;
codec: string;
}
type HdlrType = ElementaryStreamTypes.AUDIO | ElementaryStreamTypes.VIDEO;
export interface InitData extends Array<any> {
[index: number]:
| {
timescale: number;
type: HdlrType;
default?: {
duration: number;
flags: number;
};
}
| undefined;
audio?: InitDataTrack;
video?: InitDataTrack;
}
export function parseInitSegment(initSegment: Uint8Array): InitData {
const result: InitData = [];
const traks = findBox(initSegment, ['moov', 'trak']);
for (let i = 0; i < traks.length; i++) {
const trak = traks[i];
const tkhd = findBox(trak, ['tkhd'])[0];
if (tkhd) {
let version = tkhd.data[tkhd.start];
let index = version === 0 ? 12 : 20;
const trackId = readUint32(tkhd, index);
const mdhd = findBox(trak, ['mdia', 'mdhd'])[0];
if (mdhd) {
version = mdhd.data[mdhd.start];
index = version === 0 ? 12 : 20;
const timescale = readUint32(mdhd, index);
const hdlr = findBox(trak, ['mdia', 'hdlr'])[0];
if (hdlr) {
const hdlrType = bin2str(
hdlr.data.subarray(hdlr.start + 8, hdlr.start + 12)
);
const type: HdlrType | undefined = {
soun: ElementaryStreamTypes.AUDIO as const,
vide: ElementaryStreamTypes.VIDEO as const,
}[hdlrType];
if (type) {
// Parse codec details
const stsd = findBox(trak, ['mdia', 'minf', 'stbl', 'stsd'])[0];
let codec;
if (stsd) {
codec = bin2str(
stsd.data.subarray(stsd.start + 12, stsd.start + 16)
);
// TODO: Parse codec details to be able to build MIME type.
// stsd.start += 8;
// const codecBox = findBox(stsd, [codec])[0];
// if (codecBox) {
// TODO: Codec parsing support for avc1, mp4a, hevc, av01...
// }
}
result[trackId] = { timescale, type };
result[type] = { timescale, id: trackId, codec };
}
}
}
}
}
const trex = findBox(initSegment, ['moov', 'mvex', 'trex']);
trex.forEach((trex) => {
const trackId = readUint32(trex, 4);
const track = result[trackId];
if (track) {
track.default = {
duration: readUint32(trex, 12),
flags: readUint32(trex, 20),
};
}
});
return result;
}
/**
* Determine the base media decode start time, in seconds, for an MP4
* fragment. If multiple fragments are specified, the earliest time is
* returned.
*
* The base media decode time can be parsed from track fragment
* metadata:
* ```
* moof > traf > tfdt.baseMediaDecodeTime
* ```
* It requires the timescale value from the mdhd to interpret.
*
* @param initData {InitData} a hash of track type to timescale values
* @param fmp4 {Uint8Array} the bytes of the mp4 fragment
* @return {number} the earliest base media decode start time for the
* fragment, in seconds
*/
export function getStartDTS(initData: InitData, fmp4: Uint8Array): number {
// we need info from two children of each track fragment box
return (
findBox(fmp4, ['moof', 'traf']).reduce((result: number | null, traf) => {
const tfdt = findBox(traf, ['tfdt'])[0];
const version = tfdt.data[tfdt.start];
const start = findBox(traf, ['tfhd']).reduce(
(result: number | null, tfhd) => {
// get the track id from the tfhd
const id = readUint32(tfhd, 4);
const track = initData[id];
if (track) {
let baseTime = readUint32(tfdt, 4);
if (version === 1) {
baseTime *= Math.pow(2, 32);
baseTime += readUint32(tfdt, 8);
}
// assume a 90kHz clock if no timescale was specified
const scale = track.timescale || 90e3;
// convert base time to seconds
const startTime = baseTime / scale;
if (
isFinite(startTime) &&
(result === null || startTime < result)
) {
return startTime;
}
}
return result;
},
null
);
if (
start !== null &&
isFinite(start) &&
(result === null || start < result)
) {
return start;
}
return result;
}, null) || 0
);
}
/*
For Reference:
aligned(8) class TrackFragmentHeaderBox
extends FullBox(tfhd, 0, tf_flags){
unsigned int(32) track_ID;
// all the following are optional fields
unsigned int(64) base_data_offset;
unsigned int(32) sample_description_index;
unsigned int(32) default_sample_duration;
unsigned int(32) default_sample_size;
unsigned int(32) default_sample_flags
}
*/
export function getDuration(data: Uint8Array, initData: InitData) {
let rawDuration = 0;
let videoDuration = 0;
let audioDuration = 0;
const trafs = findBox(data, ['moof', 'traf']);
for (let i = 0; i < trafs.length; i++) {
const traf = trafs[i];
// There is only one tfhd & trun per traf
// This is true for CMAF style content, and we should perhaps check the ftyp
// and only look for a single trun then, but for ISOBMFF we should check
// for multiple track runs.
const tfhd = findBox(traf, ['tfhd'])[0];
// get the track id from the tfhd
const id = readUint32(tfhd, 4);
const track = initData[id];
if (!track) {
continue;
}
const trackDefault = track.default;
const tfhdFlags = readUint32(tfhd, 0) | trackDefault?.flags!;
let sampleDuration: number | undefined = trackDefault?.duration;
if (tfhdFlags & 0x000008) {
// 0x000008 indicates the presence of the default_sample_duration field
if (tfhdFlags & 0x000002) {
// 0x000002 indicates the presence of the sample_description_index field, which precedes default_sample_duration
// If present, the default_sample_duration exists at byte offset 12
sampleDuration = readUint32(tfhd, 12);
} else {
// Otherwise, the duration is at byte offset 8
sampleDuration = readUint32(tfhd, 8);
}
}
// assume a 90kHz clock if no timescale was specified
const timescale = track.timescale || 90e3;
const truns = findBox(traf, ['trun']);
for (let j = 0; j < truns.length; j++) {
rawDuration = computeRawDurationFromSamples(truns[j]);
if (!rawDuration && sampleDuration) {
const sampleCount = readUint32(truns[j], 4);
rawDuration = sampleDuration * sampleCount;
}
if (track.type === ElementaryStreamTypes.VIDEO) {
videoDuration += rawDuration / timescale;
} else if (track.type === ElementaryStreamTypes.AUDIO) {
audioDuration += rawDuration / timescale;
}
}
}
if (videoDuration === 0 && audioDuration === 0) {
// If duration samples are not available in the traf use sidx subsegment_duration
const sidx = parseSegmentIndex(data);
if (sidx?.references) {
return sidx.references.reduce(
(dur, ref) => dur + ref.info.duration || 0,
0
);
}
}
if (videoDuration) {
return videoDuration;
}
return audioDuration;
}
/*
For Reference:
aligned(8) class TrackRunBox
extends FullBox(trun, version, tr_flags) {
unsigned int(32) sample_count;
// the following are optional fields
signed int(32) data_offset;
unsigned int(32) first_sample_flags;
// all fields in the following array are optional
{
unsigned int(32) sample_duration;
unsigned int(32) sample_size;
unsigned int(32) sample_flags
if (version == 0)
{ unsigned int(32)
else
{ signed int(32)
}[ sample_count ]
}
*/
export function computeRawDurationFromSamples(trun): number {
const flags = readUint32(trun, 0);
// Flags are at offset 0, non-optional sample_count is at offset 4. Therefore we start 8 bytes in.
// Each field is an int32, which is 4 bytes
let offset = 8;
// data-offset-present flag
if (flags & 0x000001) {
offset += 4;
}
// first-sample-flags-present flag
if (flags & 0x000004) {
offset += 4;
}
let duration = 0;
const sampleCount = readUint32(trun, 4);
for (let i = 0; i < sampleCount; i++) {
// sample-duration-present flag
if (flags & 0x000100) {
const sampleDuration = readUint32(trun, offset);
duration += sampleDuration;
offset += 4;
}
// sample-size-present flag
if (flags & 0x000200) {
offset += 4;
}
// sample-flags-present flag
if (flags & 0x000400) {
offset += 4;
}
// sample-composition-time-offsets-present flag
if (flags & 0x000800) {
offset += 4;
}
}
return duration;
}
export function offsetStartDTS(
initData: InitData,
fmp4: Uint8Array,
timeOffset: number
) {
findBox(fmp4, ['moof', 'traf']).forEach(function (traf) {
findBox(traf, ['tfhd']).forEach(function (tfhd) {
// get the track id from the tfhd
const id = readUint32(tfhd, 4);
const track = initData[id];
if (!track) {
return;
}
// assume a 90kHz clock if no timescale was specified
const timescale = track.timescale || 90e3;
// get the base media decode time from the tfdt
findBox(traf, ['tfdt']).forEach(function (tfdt) {
const version = tfdt.data[tfdt.start];
let baseMediaDecodeTime = readUint32(tfdt, 4);
if (version === 0) {
writeUint32(tfdt, 4, baseMediaDecodeTime - timeOffset * timescale);
} else {
baseMediaDecodeTime *= Math.pow(2, 32);
baseMediaDecodeTime += readUint32(tfdt, 8);
baseMediaDecodeTime -= timeOffset * timescale;
baseMediaDecodeTime = Math.max(baseMediaDecodeTime, 0);
const upper = Math.floor(baseMediaDecodeTime / (UINT32_MAX + 1));
const lower = Math.floor(baseMediaDecodeTime % (UINT32_MAX + 1));
writeUint32(tfdt, 4, upper);
writeUint32(tfdt, 8, lower);
}
});
});
});
}
// TODO: Check if the last moof+mdat pair is part of the valid range
export function segmentValidRange(data: Uint8Array): SegmentedRange {
const segmentedRange: SegmentedRange = {
valid: null,
remainder: null,
};
const moofs = findBox(data, ['moof']);
if (!moofs) {
return segmentedRange;
} else if (moofs.length < 2) {
segmentedRange.remainder = data;
return segmentedRange;
}
const last = moofs[moofs.length - 1];
// Offset by 8 bytes; findBox offsets the start by as much
segmentedRange.valid = sliceUint8(data, 0, last.start - 8);
segmentedRange.remainder = sliceUint8(data, last.start - 8);
return segmentedRange;
}
export interface SegmentedRange {
valid: Uint8Array | null;
remainder: Uint8Array | null;
}
export function appendUint8Array(
data1: Uint8Array,
data2: Uint8Array
): Uint8Array {
const temp = new Uint8Array(data1.length + data2.length);
temp.set(data1);
temp.set(data2, data1.length);
return temp;
}

46
node_modules/hls.js/src/utils/output-filter.ts generated vendored Normal file
View file

@ -0,0 +1,46 @@
import type { TimelineController } from '../controller/timeline-controller';
import type { CaptionScreen } from './cea-608-parser';
export default class OutputFilter {
private timelineController: TimelineController;
private cueRanges: Array<[number, number]> = [];
private trackName: string;
private startTime: number | null = null;
private endTime: number | null = null;
private screen: CaptionScreen | null = null;
constructor(timelineController: TimelineController, trackName: string) {
this.timelineController = timelineController;
this.trackName = trackName;
}
dispatchCue() {
if (this.startTime === null) {
return;
}
this.timelineController.addCues(
this.trackName,
this.startTime,
this.endTime as number,
this.screen as CaptionScreen,
this.cueRanges
);
this.startTime = null;
}
newCue(startTime: number, endTime: number, screen: CaptionScreen) {
if (this.startTime === null || this.startTime > startTime) {
this.startTime = startTime;
}
this.endTime = endTime;
this.screen = screen;
this.timelineController.createCaptionsTrack(this.trackName);
}
reset() {
this.cueRanges = [];
this.startTime = null;
}
}

141
node_modules/hls.js/src/utils/texttrack-utils.ts generated vendored Normal file
View file

@ -0,0 +1,141 @@
import { logger } from './logger';
export function sendAddTrackEvent(track: TextTrack, videoEl: HTMLMediaElement) {
let event: Event;
try {
event = new Event('addtrack');
} catch (err) {
// for IE11
event = document.createEvent('Event');
event.initEvent('addtrack', false, false);
}
(event as any).track = track;
videoEl.dispatchEvent(event);
}
export function addCueToTrack(track: TextTrack, cue: VTTCue) {
// Sometimes there are cue overlaps on segmented vtts so the same
// cue can appear more than once in different vtt files.
// This avoid showing duplicated cues with same timecode and text.
const mode = track.mode;
if (mode === 'disabled') {
track.mode = 'hidden';
}
if (track.cues && !track.cues.getCueById(cue.id)) {
try {
track.addCue(cue);
if (!track.cues.getCueById(cue.id)) {
throw new Error(`addCue is failed for: ${cue}`);
}
} catch (err) {
logger.debug(`[texttrack-utils]: ${err}`);
const textTrackCue = new (self.TextTrackCue as any)(
cue.startTime,
cue.endTime,
cue.text
);
textTrackCue.id = cue.id;
track.addCue(textTrackCue);
}
}
if (mode === 'disabled') {
track.mode = mode;
}
}
export function clearCurrentCues(track: TextTrack) {
// When track.mode is disabled, track.cues will be null.
// To guarantee the removal of cues, we need to temporarily
// change the mode to hidden
const mode = track.mode;
if (mode === 'disabled') {
track.mode = 'hidden';
}
if (track.cues) {
for (let i = track.cues.length; i--; ) {
track.removeCue(track.cues[i]);
}
}
if (mode === 'disabled') {
track.mode = mode;
}
}
export function removeCuesInRange(
track: TextTrack,
start: number,
end: number
) {
const mode = track.mode;
if (mode === 'disabled') {
track.mode = 'hidden';
}
if (track.cues && track.cues.length > 0) {
const cues = getCuesInRange(track.cues, start, end);
for (let i = 0; i < cues.length; i++) {
track.removeCue(cues[i]);
}
}
if (mode === 'disabled') {
track.mode = mode;
}
}
// Find first cue starting after given time.
// Modified version of binary search O(log(n)).
function getFirstCueIndexAfterTime(
cues: TextTrackCueList | TextTrackCue[],
time: number
): number {
// If first cue starts after time, start there
if (time < cues[0].startTime) {
return 0;
}
// If the last cue ends before time there is no overlap
const len = cues.length - 1;
if (time > cues[len].endTime) {
return -1;
}
let left = 0;
let right = len;
while (left <= right) {
const mid = Math.floor((right + left) / 2);
if (time < cues[mid].startTime) {
right = mid - 1;
} else if (time > cues[mid].startTime && left < len) {
left = mid + 1;
} else {
// If it's not lower or higher, it must be equal.
return mid;
}
}
// At this point, left and right have swapped.
// No direct match was found, left or right element must be the closest. Check which one has the smallest diff.
return cues[left].startTime - time < time - cues[right].startTime
? left
: right;
}
export function getCuesInRange(
cues: TextTrackCueList | TextTrackCue[],
start: number,
end: number
): TextTrackCue[] {
const cuesFound: TextTrackCue[] = [];
const firstCueInRange = getFirstCueIndexAfterTime(cues, start);
if (firstCueInRange > -1) {
for (let i = firstCueInRange, len = cues.length; i < len; i++) {
const cue = cues[i];
if (cue.startTime >= start && cue.endTime <= end) {
cuesFound.push(cue);
} else if (cue.startTime > end) {
return cuesFound;
}
}
}
return cuesFound;
}

17
node_modules/hls.js/src/utils/time-ranges.ts generated vendored Normal file
View file

@ -0,0 +1,17 @@
/**
* TimeRanges to string helper
*/
const TimeRanges = {
toString: function (r: TimeRanges) {
let log = '';
const len = r.length;
for (let i = 0; i < len; i++) {
log += '[' + r.start(i).toFixed(3) + ',' + r.end(i).toFixed(3) + ']';
}
return log;
},
};
export default TimeRanges;

34
node_modules/hls.js/src/utils/timescale-conversion.ts generated vendored Normal file
View file

@ -0,0 +1,34 @@
const MPEG_TS_CLOCK_FREQ_HZ = 90000;
export function toTimescaleFromBase(
value,
destScale: number,
srcBase: number = 1,
round: boolean = false
): number {
const result = value * destScale * srcBase; // equivalent to `(value * scale) / (1 / base)`
return round ? Math.round(result) : result;
}
export function toTimescaleFromScale(
value,
destScale: number,
srcScale: number = 1,
round: boolean = false
): number {
return toTimescaleFromBase(value, destScale, 1 / srcScale, round);
}
export function toMsFromMpegTsClock(
value: number,
round: boolean = false
): number {
return toTimescaleFromBase(value, 1000, 1 / MPEG_TS_CLOCK_FREQ_HZ, round);
}
export function toMpegTsClockFromTimescale(
value: number,
srcScale: number = 1
): number {
return toTimescaleFromBase(value, MPEG_TS_CLOCK_FREQ_HZ, 1 / srcScale);
}

11
node_modules/hls.js/src/utils/typed-array.ts generated vendored Normal file
View file

@ -0,0 +1,11 @@
export function sliceUint8(
array: Uint8Array,
start?: number,
end?: number
): Uint8Array {
// @ts-expect-error This polyfills IE11 usage of Uint8Array slice.
// It always exists in the TypeScript definition so fails, but it fails at runtime on IE11.
return Uint8Array.prototype.slice
? array.slice(start, end)
: new Uint8Array(Array.prototype.slice.call(array, start, end));
}

Some files were not shown because too many files have changed in this diff Show more