1
0
Fork 0
mirror of https://github.com/Chocobozzz/PeerTube.git synced 2025-10-03 09:49:20 +02:00

Fix crash on download stream error

This commit is contained in:
Chocobozzz 2025-05-13 13:54:19 +02:00
parent 1efa315d55
commit a9069d0d0b
No known key found for this signature in database
GPG key ID: 583A612D890159BE
2 changed files with 123 additions and 67 deletions

View file

@ -1,7 +1,7 @@
/* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */
import { getHLS } from '@peertube/peertube-core-utils'
import { VideoDetails, VideoFile, VideoResolution } from '@peertube/peertube-models'
import { HttpStatusCode, VideoDetails, VideoFile, VideoResolution } from '@peertube/peertube-models'
import { buildSUUID } from '@peertube/peertube-node-utils'
import {
ObjectStorageCommand,
@ -17,8 +17,11 @@ import { checkTmpIsEmpty } from '@tests/shared/directories.js'
import { probeResBody } from '@tests/shared/videos.js'
import { expect } from 'chai'
import { FfprobeData } from 'fluent-ffmpeg'
import { remove } from 'fs-extra'
import { basename } from 'path'
describe('Test generate download', function () {
const resolutions = [ VideoResolution.H_NOVIDEO, VideoResolution.H_144P ]
let servers: PeerTubeServer[]
before(async function () {
@ -48,8 +51,6 @@ describe('Test generate download', function () {
await server.run(objectStorage.getDefaultMockConfig())
}
const resolutions = [ VideoResolution.H_NOVIDEO, VideoResolution.H_144P ]
{
await server.config.enableTranscoding({ hls: true, webVideo: true, splitAudioAndVideo: false, resolutions })
await server.videos.quickUpload({ name: 'common-' + seed })
@ -132,13 +133,37 @@ describe('Test generate download', function () {
})
}
describe('Download crash', function () {
it('Should not crash the server on non existing file', async function () {
this.timeout(120000)
await servers[0].config.enableTranscoding({ webVideo: false, hls: true, splitAudioAndVideo: true, resolutions })
const { uuid } = await servers[0].videos.quickUpload({ name: 'crash' })
await waitJobs(servers)
for (const server of servers) {
const video = await server.videos.get({ id: uuid })
const file = getHLS(video).files.find(f => f.hasVideo)
await remove(servers[0].getDirectoryPath('streaming-playlists/hls/' + uuid + '/' + basename(file.fileUrl)))
await server.videos.generateDownload({
videoId: uuid,
videoFileIds: [ file.id ],
expectedStatus: server === servers[0]
? HttpStatusCode.OK_200
: HttpStatusCode.INTERNAL_SERVER_ERROR_500
})
}
})
})
for (const objectStorage of [ undefined, new ObjectStorageCommand() ]) {
const testName = objectStorage
? 'On Object Storage'
: 'On filesystem'
describe(testName, function () {
describe('Videos on local server', function () {
runSuite(() => servers[0], objectStorage)
})

View file

@ -272,6 +272,25 @@ export async function muxToMergeVideoFiles (options: {
const inputs: (string | Readable)[] = []
const tmpDestinations: string[] = []
let ffmpegContainer: FFmpegContainer
return new Promise<void>(async (res, rej) => {
const cleanup = async () => {
for (const destination of tmpDestinations) {
await remove(destination)
}
for (const input of inputs) {
if (input instanceof Readable) {
if (!input.destroyed) input.destroy()
}
}
if (ffmpegContainer) {
ffmpegContainer.forceKill()
ffmpegContainer = undefined
}
}
try {
let maxResolution = 0
@ -281,7 +300,18 @@ export async function muxToMergeVideoFiles (options: {
maxResolution = Math.max(maxResolution, videoFile.resolution)
const { input, isTmpDestination } = await buildMuxInput(video, videoFile)
const { input, isTmpDestination } = await buildMuxInput(
video,
videoFile,
err => {
logger.warn(`Cannot build mux input of video ${video.url}`, { err, inputs: inputsToLog, ...lTags(video.uuid) })
cleanup()
.catch(cleanupErr => logger.error('Cannot cleanup after mux error', { err: cleanupErr, ...lTags(video.uuid) }))
rej(buildRequestError(err as any))
}
)
inputs.push(input)
@ -303,7 +333,7 @@ export async function muxToMergeVideoFiles (options: {
logger.info(`Muxing files for video ${video.url}`, { inputs: inputsToLog, ...lTags(video.uuid) })
const ffmpegContainer = new FFmpegContainer(getFFmpegCommandWrapperOptions('vod'))
ffmpegContainer = new FFmpegContainer(getFFmpegCommandWrapperOptions('vod'))
try {
await ffmpegContainer.mergeInputs({
@ -316,6 +346,8 @@ export async function muxToMergeVideoFiles (options: {
})
logger.info(`Mux ended for video ${video.url}`, { inputs: inputsToLog, ...lTags(video.uuid) })
res()
} catch (err) {
const message = err?.message || ''
@ -334,22 +366,18 @@ export async function muxToMergeVideoFiles (options: {
} finally {
ffmpegContainer.forceKill()
}
} catch (err) {
rej(err)
} finally {
for (const destination of tmpDestinations) {
await remove(destination)
}
for (const input of inputs) {
if (input instanceof Readable) {
if (!input.destroyed) input.destroy()
}
}
await cleanup()
}
})
}
async function buildMuxInput (
video: MVideo,
videoFile: MVideoFile
videoFile: MVideoFile,
onStreamError: (err: Error) => void
): Promise<{ input: Readable, isTmpDestination: false } | { input: string, isTmpDestination: boolean }> {
// ---------------------------------------------------------------------------
// Remote
@ -375,7 +403,10 @@ async function buildMuxInput (
return { input: destination, isTmpDestination: true }
}
return { input: generateRequestStream(videoFile.fileUrl, { timeout, bodyKBLimit }), isTmpDestination: false }
return {
input: generateRequestStream(videoFile.fileUrl, { timeout, bodyKBLimit }).on('error', onStreamError),
isTmpDestination: false
}
}
// ---------------------------------------------------------------------------