1
0
Fork 0
mirror of https://github.com/DanielnetoDotCom/YouPHPTube synced 2025-10-05 02:39:46 +02:00

Node modules update

This commit is contained in:
DanieL 2022-08-31 09:55:36 -03:00
parent f394ef7856
commit 12225f5c27
123 changed files with 6221 additions and 3459 deletions

View file

@ -11,7 +11,6 @@ import { ChunkMetadata } from '../types/transmuxer';
import { fragmentWithinToleranceTest } from './fragment-finders';
import { alignMediaPlaylistByPDT } from '../utils/discontinuities';
import { ErrorDetails } from '../errors';
import { logger } from '../utils/logger';
import type { NetworkComponentAPI } from '../types/component-api';
import type { FragmentTracker } from './fragment-tracker';
import type { TransmuxerResult } from '../types/transmuxer';
@ -136,6 +135,7 @@ class AudioStreamController
3
)}`
);
startPosition = lastCurrentTime;
this.state = State.IDLE;
} else {
this.loadedmetadata = false;
@ -197,7 +197,7 @@ class AudioStreamController
}
} else if (this.videoTrackCC !== this.waitingVideoCC) {
// Drop waiting fragment if videoTrackCC has changed since waitingFragment was set and initPTS was not found
logger.log(
this.log(
`Waiting fragment cc (${frag.cc}) cancelled because video is at cc ${this.videoTrackCC}`
);
this.clearWaitingFragment();
@ -215,7 +215,7 @@ class AudioStreamController
frag
);
if (waitingFragmentAtPosition < 0) {
logger.log(
this.log(
`Waiting fragment cc (${frag.cc}) @ ${frag.start} cancelled because another fragment at ${bufferInfo.end} is needed`
);
this.clearWaitingFragment();
@ -240,6 +240,11 @@ class AudioStreamController
}
}
protected resetLoadingState() {
this.clearWaitingFragment();
super.resetLoadingState();
}
protected onTickEnd() {
const { media } = this;
if (!media || !media.readyState) {
@ -550,7 +555,7 @@ class AudioStreamController
initPTS
);
} else {
logger.log(
this.log(
`Unknown video PTS for cc ${frag.cc}, waiting for video PTS before demuxing audio frag ${frag.sn} of [${details.startSN} ,${details.endSN}],track ${trackId}`
);
const { cache } = (this.waitingData = this.waitingData || {

View file

@ -854,9 +854,13 @@ export default class BaseStreamController
frag = this.getFragmentAtPosition(pos, end, levelDetails);
}
return this.mapToInitFragWhenRequired(frag);
}
mapToInitFragWhenRequired(frag: Fragment | null): typeof frag {
// If an initSegment is present, it must be buffered first
if (frag?.initSegment && !frag?.initSegment.data && !this.bitrateTest) {
frag = frag.initSegment;
return frag.initSegment;
}
return frag;

View file

@ -13,7 +13,6 @@ import type { TransmuxerResult } from '../types/transmuxer';
import { ChunkMetadata } from '../types/transmuxer';
import GapController from './gap-controller';
import { ErrorDetails } from '../errors';
import { logger } from '../utils/logger';
import type Hls from '../hls';
import type { LevelDetails } from '../loader/level-details';
import type { TrackSet } from '../types/track';
@ -52,7 +51,6 @@ export default class StreamController
private onvplaying: EventListener | null = null;
private onvseeked: EventListener | null = null;
private fragLastKbps: number = 0;
private stalled: boolean = false;
private couldBacktrack: boolean = false;
private backtrackFragment: Fragment | null = null;
private audioCodecSwitch: boolean = false;
@ -546,7 +544,7 @@ export default class StreamController
this.log('Trigger BUFFER_RESET');
this.hls.trigger(Events.BUFFER_RESET, undefined);
this.fragmentTracker.removeAllFragments();
this.couldBacktrack = this.stalled = false;
this.couldBacktrack = false;
this.startPosition = this.lastCurrentTime = 0;
this.fragPlaying = null;
this.backtrackFragment = null;
@ -982,7 +980,7 @@ export default class StreamController
// at that stage, there should be only one buffered range, as we reach that code after first fragment has been buffered
if (startPosition >= 0 && currentTime < startPosition) {
if (media.seeking) {
logger.log(
this.log(
`could not seek to ${startPosition}, already seeking at ${currentTime}`
);
return;
@ -995,9 +993,7 @@ export default class StreamController
(delta < this.config.maxBufferHole ||
delta < this.config.maxFragLookUpTolerance)
) {
logger.log(
`adjusting start position by ${delta} to match buffer start`
);
this.log(`adjusting start position by ${delta} to match buffer start`);
startPosition += delta;
this.startPosition = startPosition;
}

View file

@ -371,7 +371,7 @@ export class SubtitleStreamController
const fragLen = fragments.length;
const end = trackDetails.edge;
let foundFrag;
let foundFrag: Fragment | null;
const fragPrevious = this.fragPrevious;
if (targetBufferTime < end) {
const { maxFragLookUpTolerance } = config;
@ -392,10 +392,14 @@ export class SubtitleStreamController
foundFrag = fragments[fragLen - 1];
}
if (foundFrag?.encrypted) {
foundFrag = this.mapToInitFragWhenRequired(foundFrag);
if (!foundFrag) {
return;
}
if (foundFrag.encrypted) {
this.loadKey(foundFrag, trackDetails);
} else if (
foundFrag &&
this.fragmentTracker.getState(foundFrag) === FragmentState.NOT_LOADED
) {
// only load if fragment is not loaded
@ -410,7 +414,11 @@ export class SubtitleStreamController
targetBufferTime: number
) {
this.fragCurrent = frag;
super.loadFragment(frag, levelDetails, targetBufferTime);
if (frag.sn === 'initSegment') {
this._loadInitSegment(frag);
} else {
super.loadFragment(frag, levelDetails, targetBufferTime);
}
}
get mediaBufferTimeRanges(): TimeRange[] {

View file

@ -9,6 +9,7 @@ import {
removeCuesInRange,
} from '../utils/texttrack-utils';
import { parseIMSC1, IMSC1_CODEC } from '../utils/imsc1-ttml-parser';
import { appendUint8Array } from '../utils/mp4-tools';
import { PlaylistLevelType } from '../types/loader';
import { Fragment } from '../loader/fragment';
import {
@ -361,8 +362,10 @@ export class TimelineController implements ComponentAPI {
if (textTrack) {
clearCurrentCues(textTrack);
} else {
const textTrackKind =
this._captionsOrSubtitlesFromCharacteristics(track);
textTrack = this.createTextTrack(
'subtitles',
textTrackKind,
track.name,
track.lang
);
@ -392,6 +395,25 @@ export class TimelineController implements ComponentAPI {
}
}
private _captionsOrSubtitlesFromCharacteristics(
track: MediaPlaylist
): TextTrackKind {
if (track.attrs?.CHARACTERISTICS) {
const transcribesSpokenDialog = /transcribes-spoken-dialog/gi.test(
track.attrs.CHARACTERISTICS
);
const describesMusicAndSound = /describes-music-and-sound/gi.test(
track.attrs.CHARACTERISTICS
);
if (transcribesSpokenDialog && describesMusicAndSound) {
return 'captions';
}
}
return 'subtitles';
}
private onManifestLoaded(
event: Events.MANIFEST_LOADED,
data: ManifestLoadedData
@ -533,8 +555,11 @@ export class TimelineController implements ComponentAPI {
private _parseVTTs(frag: Fragment, payload: ArrayBuffer, vttCCs: any) {
const hls = this.hls;
// Parse the WebVTT file contents.
const payloadWebVTT = frag.initSegment?.data
? appendUint8Array(frag.initSegment.data, new Uint8Array(payload))
: payload;
parseWebVTT(
payload,
payloadWebVTT,
this.initPTS[frag.cc],
this.timescale[frag.cc],
vttCCs,

View file

@ -79,7 +79,7 @@ class AACDemuxer extends BaseAudioDemuxer {
track,
data,
offset,
this.initPTS as number,
this.basePTS as number,
this.frameIndex
);
if (frame && frame.missing === 0) {

View file

@ -19,6 +19,7 @@ class BaseAudioDemuxer implements Demuxer {
protected _id3Track!: DemuxedMetadataTrack;
protected frameIndex: number = 0;
protected cachedData: Uint8Array | null = null;
protected basePTS: number | null = null;
protected initPTS: number | null = null;
resetInitSegment(
@ -38,9 +39,15 @@ class BaseAudioDemuxer implements Demuxer {
};
}
resetTimeStamp() {}
resetTimeStamp(deaultTimestamp) {
this.initPTS = deaultTimestamp;
this.resetContiguity();
}
resetContiguity(): void {}
resetContiguity(): void {
this.basePTS = null;
this.frameIndex = 0;
}
canParse(data: Uint8Array, offset: number): boolean {
return false;
@ -68,21 +75,24 @@ class BaseAudioDemuxer implements Demuxer {
const timestamp = id3Data ? ID3.getTimeStamp(id3Data) : undefined;
const length = data.length;
if (this.frameIndex === 0 || this.initPTS === null) {
this.initPTS = initPTSFn(timestamp, timeOffset);
if (
this.basePTS === null ||
(this.frameIndex === 0 && Number.isFinite(timestamp))
) {
this.basePTS = initPTSFn(timestamp, timeOffset, this.initPTS);
}
// more expressive than alternative: id3Data?.length
if (id3Data && id3Data.length > 0) {
id3Track.samples.push({
pts: this.initPTS,
dts: this.initPTS,
pts: this.basePTS,
dts: this.basePTS,
data: id3Data,
type: MetadataSchema.audioId3,
});
}
pts = this.initPTS;
pts = this.basePTS;
while (offset < length) {
if (this.canParse(data, offset)) {
@ -145,8 +155,6 @@ class BaseAudioDemuxer implements Demuxer {
this.demux(cachedData, 0);
}
this.frameIndex = 0;
return {
audioTrack: this._audioTrack,
videoTrack: dummyTrack() as DemuxedVideoTrack,
@ -166,10 +174,12 @@ class BaseAudioDemuxer implements Demuxer {
*/
export const initPTSFn = (
timestamp: number | undefined,
timeOffset: number
timeOffset: number,
initPTS: number | null
): number => {
return Number.isFinite(timestamp as number)
? timestamp! * 90
: timeOffset * 90000;
if (Number.isFinite(timestamp as number)) {
return timestamp! * 90;
}
return timeOffset * 90000 + (initPTS || 0);
};
export default BaseAudioDemuxer;

View file

@ -57,14 +57,14 @@ class MP3Demuxer extends BaseAudioDemuxer {
}
appendFrame(track, data, offset) {
if (this.initPTS === null) {
if (this.basePTS === null) {
return;
}
return MpegAudio.appendFrame(
track,
data,
offset,
this.initPTS,
this.basePTS,
this.frameIndex
);
}

View file

@ -166,8 +166,14 @@ export default class TransmuxerInterface {
const trackSwitch = !(lastFrag && chunkMeta.level === lastFrag.level);
const snDiff = lastFrag ? chunkMeta.sn - (lastFrag.sn as number) : -1;
const partDiff = this.part ? chunkMeta.part - this.part.index : -1;
const progressive =
snDiff === 0 &&
chunkMeta.id > 1 &&
chunkMeta.id === lastFrag?.stats.chunkCount;
const contiguous =
!trackSwitch && (snDiff === 1 || (snDiff === 0 && partDiff === 1));
!trackSwitch &&
(snDiff === 1 ||
(snDiff === 0 && (partDiff === 1 || (progressive && partDiff <= 0))));
const now = self.performance.now();
if (trackSwitch || snDiff || frag.stats.parsing.start === 0) {
@ -291,6 +297,13 @@ export default class TransmuxerInterface {
break;
}
// pass logs from the worker thread to the main logger
case 'workerLog':
if (logger[data.data.logType]) {
logger[data.data.logType](data.data.message);
}
break;
/* falls through */
default: {
data.data = data.data || {};

View file

@ -1,6 +1,6 @@
import Transmuxer, { isPromise } from '../demux/transmuxer';
import { Events } from '../events';
import { enableLogs } from '../utils/logger';
import { ILogFunction, enableLogs, logger } from '../utils/logger';
import { EventEmitter } from 'eventemitter3';
import type { RemuxedTrack, RemuxerResult } from '../types/remuxer';
import type { TransmuxerResult, ChunkMetadata } from '../types/transmuxer';
@ -15,6 +15,20 @@ export default function TransmuxerWorker(self) {
observer.on(Events.FRAG_DECRYPTED, forwardMessage);
observer.on(Events.ERROR, forwardMessage);
// forward logger events to main thread
const forwardWorkerLogs = () => {
for (const logFn in logger) {
const func: ILogFunction = (message?) => {
forwardMessage('workerLog', {
logType: logFn,
message,
});
};
logger[logFn] = func;
}
};
self.addEventListener('message', (ev) => {
const data = ev.data;
switch (data.cmd) {
@ -28,6 +42,7 @@ export default function TransmuxerWorker(self) {
data.id
);
enableLogs(config.debug);
forwardWorkerLogs();
forwardMessage('init', null);
break;
}

View file

@ -12,7 +12,7 @@
import { Events } from '../events';
import { ErrorDetails, ErrorTypes } from '../errors';
import { logger } from '../utils/logger';
import { parseSegmentIndex } from '../utils/mp4-tools';
import { parseSegmentIndex, findBox } from '../utils/mp4-tools';
import M3U8Parser from './m3u8-parser';
import type { LevelParsed } from '../types/level';
import type {
@ -543,10 +543,13 @@ class PlaylistLoader {
response: LoaderResponse,
context: PlaylistLoaderContext
): void {
const sidxInfo = parseSegmentIndex(
new Uint8Array(response.data as ArrayBuffer)
);
const data = new Uint8Array(response.data as ArrayBuffer);
const sidxBox = findBox(data, ['sidx'])[0];
// if provided fragment does not contain sidx, early return
if (!sidxBox) {
return;
}
const sidxInfo = parseSegmentIndex(sidxBox);
if (!sidxInfo) {
return;
}
@ -564,7 +567,9 @@ class PlaylistLoader {
);
}
if (frag.initSegment) {
frag.initSegment.setByteRange(String(sidxInfo.moovEndOffset) + '@0');
const moovBox = findBox(data, ['moov'])[0];
const moovEndOffset = moovBox ? moovBox.length : null;
frag.initSegment.setByteRange(String(moovEndOffset) + '@0');
}
});
}

View file

@ -24,6 +24,7 @@ export interface LevelAttributes extends AttrList {
BANDWIDTH?: string;
BYTERANGE?: string;
'CLOSED-CAPTIONS'?: string;
CHARACTERISTICS?: string;
CODECS?: string;
DEFAULT?: string;
FORCED?: string;

View file

@ -1,4 +1,4 @@
interface ILogFunction {
export interface ILogFunction {
(message?: any, ...optionalParams: any[]): void;
}

View file

@ -91,22 +91,10 @@ type SidxInfo = {
version: number;
referencesCount: number;
references: any[];
moovEndOffset: number | null;
};
export function parseSegmentIndex(initSegment: Uint8Array): SidxInfo | null {
const moovBox = findBox(initSegment, ['moov']);
const moov = moovBox[0];
const moovEndOffset = moov ? moov.length : null; // we need this in case we need to chop of garbage of the end of current data
const sidxBox = findBox(initSegment, ['sidx']);
if (!sidxBox || !sidxBox[0]) {
return null;
}
export function parseSegmentIndex(sidx: Uint8Array): SidxInfo | null {
const references: any[] = [];
const sidx = sidxBox[0];
const version = sidx[0];
@ -179,7 +167,6 @@ export function parseSegmentIndex(initSegment: Uint8Array): SidxInfo | null {
version,
referencesCount,
references,
moovEndOffset,
};
}
@ -409,13 +396,19 @@ export function getDuration(data: Uint8Array, initData: InitData) {
}
if (videoDuration === 0 && audioDuration === 0) {
// If duration samples are not available in the traf use sidx subsegment_duration
const sidx = parseSegmentIndex(data);
if (sidx?.references) {
return sidx.references.reduce(
(dur, ref) => dur + ref.info.duration || 0,
0
);
let sidxDuration = 0;
const sidxs = findBox(data, ['sidx']);
for (let i = 0; i < sidxs.length; i++) {
const sidx = parseSegmentIndex(sidxs[i]);
if (sidx?.references) {
sidxDuration += sidx.references.reduce(
(dur, ref) => dur + ref.info.duration || 0,
0
);
}
}
return sidxDuration;
}
if (videoDuration) {
return videoDuration;