mirror of
https://github.com/Chocobozzz/PeerTube.git
synced 2025-10-05 19:42:24 +02:00
Create another test suite for transcoding jobs
This commit is contained in:
parent
cba7977552
commit
95faf1eaff
11 changed files with 14 additions and 6 deletions
102
server/tests/api/transcoding/audio-only.ts
Normal file
102
server/tests/api/transcoding/audio-only.ts
Normal file
|
@ -0,0 +1,102 @@
|
|||
/* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */
|
||||
|
||||
import 'mocha'
|
||||
import * as chai from 'chai'
|
||||
import { getAudioStream, getVideoStreamDimensionsInfo } from '@server/helpers/ffmpeg'
|
||||
import {
|
||||
cleanupTests,
|
||||
createMultipleServers,
|
||||
doubleFollow,
|
||||
PeerTubeServer,
|
||||
setAccessTokensToServers,
|
||||
waitJobs
|
||||
} from '@shared/server-commands'
|
||||
|
||||
const expect = chai.expect
|
||||
|
||||
describe('Test audio only video transcoding', function () {
|
||||
let servers: PeerTubeServer[] = []
|
||||
let videoUUID: string
|
||||
let webtorrentAudioFileUrl: string
|
||||
let fragmentedAudioFileUrl: string
|
||||
|
||||
before(async function () {
|
||||
this.timeout(120000)
|
||||
|
||||
const configOverride = {
|
||||
transcoding: {
|
||||
enabled: true,
|
||||
resolutions: {
|
||||
'0p': true,
|
||||
'144p': false,
|
||||
'240p': true,
|
||||
'360p': false,
|
||||
'480p': false,
|
||||
'720p': false,
|
||||
'1080p': false,
|
||||
'1440p': false,
|
||||
'2160p': false
|
||||
},
|
||||
hls: {
|
||||
enabled: true
|
||||
},
|
||||
webtorrent: {
|
||||
enabled: true
|
||||
}
|
||||
}
|
||||
}
|
||||
servers = await createMultipleServers(2, configOverride)
|
||||
|
||||
// Get the access tokens
|
||||
await setAccessTokensToServers(servers)
|
||||
|
||||
// Server 1 and server 2 follow each other
|
||||
await doubleFollow(servers[0], servers[1])
|
||||
})
|
||||
|
||||
it('Should upload a video and transcode it', async function () {
|
||||
this.timeout(120000)
|
||||
|
||||
const { uuid } = await servers[0].videos.upload({ attributes: { name: 'audio only' } })
|
||||
videoUUID = uuid
|
||||
|
||||
await waitJobs(servers)
|
||||
|
||||
for (const server of servers) {
|
||||
const video = await server.videos.get({ id: videoUUID })
|
||||
expect(video.streamingPlaylists).to.have.lengthOf(1)
|
||||
|
||||
for (const files of [ video.files, video.streamingPlaylists[0].files ]) {
|
||||
expect(files).to.have.lengthOf(3)
|
||||
expect(files[0].resolution.id).to.equal(720)
|
||||
expect(files[1].resolution.id).to.equal(240)
|
||||
expect(files[2].resolution.id).to.equal(0)
|
||||
}
|
||||
|
||||
if (server.serverNumber === 1) {
|
||||
webtorrentAudioFileUrl = video.files[2].fileUrl
|
||||
fragmentedAudioFileUrl = video.streamingPlaylists[0].files[2].fileUrl
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
it('0p transcoded video should not have video', async function () {
|
||||
const paths = [
|
||||
servers[0].servers.buildWebTorrentFilePath(webtorrentAudioFileUrl),
|
||||
servers[0].servers.buildFragmentedFilePath(videoUUID, fragmentedAudioFileUrl)
|
||||
]
|
||||
|
||||
for (const path of paths) {
|
||||
const { audioStream } = await getAudioStream(path)
|
||||
expect(audioStream['codec_name']).to.be.equal('aac')
|
||||
expect(audioStream['bit_rate']).to.be.at.most(384 * 8000)
|
||||
|
||||
const size = await getVideoStreamDimensionsInfo(path)
|
||||
expect(size).to.not.exist
|
||||
}
|
||||
})
|
||||
|
||||
after(async function () {
|
||||
await cleanupTests(servers)
|
||||
})
|
||||
})
|
252
server/tests/api/transcoding/create-transcoding.ts
Normal file
252
server/tests/api/transcoding/create-transcoding.ts
Normal file
|
@ -0,0 +1,252 @@
|
|||
/* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */
|
||||
|
||||
import 'mocha'
|
||||
import * as chai from 'chai'
|
||||
import { checkResolutionsInMasterPlaylist, expectStartWith } from '@server/tests/shared'
|
||||
import { areObjectStorageTestsDisabled } from '@shared/core-utils'
|
||||
import { HttpStatusCode, VideoDetails } from '@shared/models'
|
||||
import {
|
||||
cleanupTests,
|
||||
ConfigCommand,
|
||||
createMultipleServers,
|
||||
doubleFollow,
|
||||
expectNoFailedTranscodingJob,
|
||||
makeRawRequest,
|
||||
ObjectStorageCommand,
|
||||
PeerTubeServer,
|
||||
setAccessTokensToServers,
|
||||
waitJobs
|
||||
} from '@shared/server-commands'
|
||||
|
||||
const expect = chai.expect
|
||||
|
||||
async function checkFilesInObjectStorage (video: VideoDetails) {
|
||||
for (const file of video.files) {
|
||||
expectStartWith(file.fileUrl, ObjectStorageCommand.getWebTorrentBaseUrl())
|
||||
await makeRawRequest(file.fileUrl, HttpStatusCode.OK_200)
|
||||
}
|
||||
|
||||
if (video.streamingPlaylists.length === 0) return
|
||||
|
||||
const hlsPlaylist = video.streamingPlaylists[0]
|
||||
for (const file of hlsPlaylist.files) {
|
||||
expectStartWith(file.fileUrl, ObjectStorageCommand.getPlaylistBaseUrl())
|
||||
await makeRawRequest(file.fileUrl, HttpStatusCode.OK_200)
|
||||
}
|
||||
|
||||
expectStartWith(hlsPlaylist.playlistUrl, ObjectStorageCommand.getPlaylistBaseUrl())
|
||||
await makeRawRequest(hlsPlaylist.playlistUrl, HttpStatusCode.OK_200)
|
||||
|
||||
expectStartWith(hlsPlaylist.segmentsSha256Url, ObjectStorageCommand.getPlaylistBaseUrl())
|
||||
await makeRawRequest(hlsPlaylist.segmentsSha256Url, HttpStatusCode.OK_200)
|
||||
}
|
||||
|
||||
function runTests (objectStorage: boolean) {
|
||||
let servers: PeerTubeServer[] = []
|
||||
let videoUUID: string
|
||||
let publishedAt: string
|
||||
|
||||
before(async function () {
|
||||
this.timeout(120000)
|
||||
|
||||
const config = objectStorage
|
||||
? ObjectStorageCommand.getDefaultConfig()
|
||||
: {}
|
||||
|
||||
// Run server 2 to have transcoding enabled
|
||||
servers = await createMultipleServers(2, config)
|
||||
await setAccessTokensToServers(servers)
|
||||
|
||||
await servers[0].config.disableTranscoding()
|
||||
|
||||
await doubleFollow(servers[0], servers[1])
|
||||
|
||||
if (objectStorage) await ObjectStorageCommand.prepareDefaultBuckets()
|
||||
|
||||
const { shortUUID } = await servers[0].videos.quickUpload({ name: 'video' })
|
||||
videoUUID = shortUUID
|
||||
|
||||
await waitJobs(servers)
|
||||
|
||||
const video = await servers[0].videos.get({ id: videoUUID })
|
||||
publishedAt = video.publishedAt as string
|
||||
|
||||
await servers[0].config.enableTranscoding()
|
||||
})
|
||||
|
||||
it('Should generate HLS', async function () {
|
||||
this.timeout(60000)
|
||||
|
||||
await servers[0].videos.runTranscoding({
|
||||
videoId: videoUUID,
|
||||
transcodingType: 'hls'
|
||||
})
|
||||
|
||||
await waitJobs(servers)
|
||||
await expectNoFailedTranscodingJob(servers[0])
|
||||
|
||||
for (const server of servers) {
|
||||
const videoDetails = await server.videos.get({ id: videoUUID })
|
||||
|
||||
expect(videoDetails.files).to.have.lengthOf(1)
|
||||
expect(videoDetails.streamingPlaylists).to.have.lengthOf(1)
|
||||
expect(videoDetails.streamingPlaylists[0].files).to.have.lengthOf(5)
|
||||
|
||||
if (objectStorage) await checkFilesInObjectStorage(videoDetails)
|
||||
}
|
||||
})
|
||||
|
||||
it('Should generate WebTorrent', async function () {
|
||||
this.timeout(60000)
|
||||
|
||||
await servers[0].videos.runTranscoding({
|
||||
videoId: videoUUID,
|
||||
transcodingType: 'webtorrent'
|
||||
})
|
||||
|
||||
await waitJobs(servers)
|
||||
|
||||
for (const server of servers) {
|
||||
const videoDetails = await server.videos.get({ id: videoUUID })
|
||||
|
||||
expect(videoDetails.files).to.have.lengthOf(5)
|
||||
expect(videoDetails.streamingPlaylists).to.have.lengthOf(1)
|
||||
expect(videoDetails.streamingPlaylists[0].files).to.have.lengthOf(5)
|
||||
|
||||
if (objectStorage) await checkFilesInObjectStorage(videoDetails)
|
||||
}
|
||||
})
|
||||
|
||||
it('Should generate WebTorrent from HLS only video', async function () {
|
||||
this.timeout(60000)
|
||||
|
||||
await servers[0].videos.removeWebTorrentFiles({ videoId: videoUUID })
|
||||
await waitJobs(servers)
|
||||
|
||||
await servers[0].videos.runTranscoding({ videoId: videoUUID, transcodingType: 'webtorrent' })
|
||||
await waitJobs(servers)
|
||||
|
||||
for (const server of servers) {
|
||||
const videoDetails = await server.videos.get({ id: videoUUID })
|
||||
|
||||
expect(videoDetails.files).to.have.lengthOf(5)
|
||||
expect(videoDetails.streamingPlaylists).to.have.lengthOf(1)
|
||||
expect(videoDetails.streamingPlaylists[0].files).to.have.lengthOf(5)
|
||||
|
||||
if (objectStorage) await checkFilesInObjectStorage(videoDetails)
|
||||
}
|
||||
})
|
||||
|
||||
it('Should only generate WebTorrent', async function () {
|
||||
this.timeout(60000)
|
||||
|
||||
await servers[0].videos.removeHLSFiles({ videoId: videoUUID })
|
||||
await waitJobs(servers)
|
||||
|
||||
await servers[0].videos.runTranscoding({ videoId: videoUUID, transcodingType: 'webtorrent' })
|
||||
await waitJobs(servers)
|
||||
|
||||
for (const server of servers) {
|
||||
const videoDetails = await server.videos.get({ id: videoUUID })
|
||||
|
||||
expect(videoDetails.files).to.have.lengthOf(5)
|
||||
expect(videoDetails.streamingPlaylists).to.have.lengthOf(0)
|
||||
|
||||
if (objectStorage) await checkFilesInObjectStorage(videoDetails)
|
||||
}
|
||||
})
|
||||
|
||||
it('Should correctly update HLS playlist on resolution change', async function () {
|
||||
this.timeout(120000)
|
||||
|
||||
await servers[0].config.updateExistingSubConfig({
|
||||
newConfig: {
|
||||
transcoding: {
|
||||
enabled: true,
|
||||
resolutions: ConfigCommand.getCustomConfigResolutions(false),
|
||||
|
||||
webtorrent: {
|
||||
enabled: true
|
||||
},
|
||||
hls: {
|
||||
enabled: true
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const { uuid } = await servers[0].videos.quickUpload({ name: 'quick' })
|
||||
|
||||
await waitJobs(servers)
|
||||
|
||||
for (const server of servers) {
|
||||
const videoDetails = await server.videos.get({ id: uuid })
|
||||
|
||||
expect(videoDetails.files).to.have.lengthOf(1)
|
||||
expect(videoDetails.streamingPlaylists).to.have.lengthOf(1)
|
||||
expect(videoDetails.streamingPlaylists[0].files).to.have.lengthOf(1)
|
||||
|
||||
if (objectStorage) await checkFilesInObjectStorage(videoDetails)
|
||||
}
|
||||
|
||||
await servers[0].config.updateExistingSubConfig({
|
||||
newConfig: {
|
||||
transcoding: {
|
||||
enabled: true,
|
||||
resolutions: ConfigCommand.getCustomConfigResolutions(true),
|
||||
|
||||
webtorrent: {
|
||||
enabled: true
|
||||
},
|
||||
hls: {
|
||||
enabled: true
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
await servers[0].videos.runTranscoding({ videoId: uuid, transcodingType: 'hls' })
|
||||
await waitJobs(servers)
|
||||
|
||||
for (const server of servers) {
|
||||
const videoDetails = await server.videos.get({ id: uuid })
|
||||
|
||||
expect(videoDetails.streamingPlaylists).to.have.lengthOf(1)
|
||||
expect(videoDetails.streamingPlaylists[0].files).to.have.lengthOf(5)
|
||||
|
||||
if (objectStorage) {
|
||||
await checkFilesInObjectStorage(videoDetails)
|
||||
|
||||
const hlsPlaylist = videoDetails.streamingPlaylists[0]
|
||||
const resolutions = hlsPlaylist.files.map(f => f.resolution.id)
|
||||
await checkResolutionsInMasterPlaylist({ server: servers[0], playlistUrl: hlsPlaylist.playlistUrl, resolutions })
|
||||
|
||||
const shaBody = await servers[0].streamingPlaylists.getSegmentSha256({ url: hlsPlaylist.segmentsSha256Url })
|
||||
expect(Object.keys(shaBody)).to.have.lengthOf(5)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
it('Should not have updated published at attributes', async function () {
|
||||
const video = await servers[0].videos.get({ id: videoUUID })
|
||||
|
||||
expect(video.publishedAt).to.equal(publishedAt)
|
||||
})
|
||||
|
||||
after(async function () {
|
||||
await cleanupTests(servers)
|
||||
})
|
||||
}
|
||||
|
||||
describe('Test create transcoding jobs from API', function () {
|
||||
|
||||
describe('On filesystem', function () {
|
||||
runTests(false)
|
||||
})
|
||||
|
||||
describe('On object storage', function () {
|
||||
if (areObjectStorageTestsDisabled()) return
|
||||
|
||||
runTests(true)
|
||||
})
|
||||
})
|
289
server/tests/api/transcoding/hls.ts
Normal file
289
server/tests/api/transcoding/hls.ts
Normal file
|
@ -0,0 +1,289 @@
|
|||
/* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */
|
||||
|
||||
import 'mocha'
|
||||
import * as chai from 'chai'
|
||||
import { basename, join } from 'path'
|
||||
import {
|
||||
checkDirectoryIsEmpty,
|
||||
checkResolutionsInMasterPlaylist,
|
||||
checkSegmentHash,
|
||||
checkTmpIsEmpty,
|
||||
expectStartWith,
|
||||
hlsInfohashExist
|
||||
} from '@server/tests/shared'
|
||||
import { areObjectStorageTestsDisabled, removeFragmentedMP4Ext, uuidRegex } from '@shared/core-utils'
|
||||
import { HttpStatusCode, VideoStreamingPlaylistType } from '@shared/models'
|
||||
import {
|
||||
cleanupTests,
|
||||
createMultipleServers,
|
||||
doubleFollow,
|
||||
makeRawRequest,
|
||||
ObjectStorageCommand,
|
||||
PeerTubeServer,
|
||||
setAccessTokensToServers,
|
||||
waitJobs,
|
||||
webtorrentAdd
|
||||
} from '@shared/server-commands'
|
||||
import { DEFAULT_AUDIO_RESOLUTION } from '../../../initializers/constants'
|
||||
|
||||
const expect = chai.expect
|
||||
|
||||
async function checkHlsPlaylist (options: {
|
||||
servers: PeerTubeServer[]
|
||||
videoUUID: string
|
||||
hlsOnly: boolean
|
||||
|
||||
resolutions?: number[]
|
||||
objectStorageBaseUrl: string
|
||||
}) {
|
||||
const { videoUUID, hlsOnly, objectStorageBaseUrl } = options
|
||||
|
||||
const resolutions = options.resolutions ?? [ 240, 360, 480, 720 ]
|
||||
|
||||
for (const server of options.servers) {
|
||||
const videoDetails = await server.videos.get({ id: videoUUID })
|
||||
const baseUrl = `http://${videoDetails.account.host}`
|
||||
|
||||
expect(videoDetails.streamingPlaylists).to.have.lengthOf(1)
|
||||
|
||||
const hlsPlaylist = videoDetails.streamingPlaylists.find(p => p.type === VideoStreamingPlaylistType.HLS)
|
||||
expect(hlsPlaylist).to.not.be.undefined
|
||||
|
||||
const hlsFiles = hlsPlaylist.files
|
||||
expect(hlsFiles).to.have.lengthOf(resolutions.length)
|
||||
|
||||
if (hlsOnly) expect(videoDetails.files).to.have.lengthOf(0)
|
||||
else expect(videoDetails.files).to.have.lengthOf(resolutions.length)
|
||||
|
||||
// Check JSON files
|
||||
for (const resolution of resolutions) {
|
||||
const file = hlsFiles.find(f => f.resolution.id === resolution)
|
||||
expect(file).to.not.be.undefined
|
||||
|
||||
expect(file.magnetUri).to.have.lengthOf.above(2)
|
||||
expect(file.torrentUrl).to.match(
|
||||
new RegExp(`http://${server.host}/lazy-static/torrents/${uuidRegex}-${file.resolution.id}-hls.torrent`)
|
||||
)
|
||||
|
||||
if (objectStorageBaseUrl) {
|
||||
expectStartWith(file.fileUrl, objectStorageBaseUrl)
|
||||
} else {
|
||||
expect(file.fileUrl).to.match(
|
||||
new RegExp(`${baseUrl}/static/streaming-playlists/hls/${videoDetails.uuid}/${uuidRegex}-${file.resolution.id}-fragmented.mp4`)
|
||||
)
|
||||
}
|
||||
|
||||
expect(file.resolution.label).to.equal(resolution + 'p')
|
||||
|
||||
await makeRawRequest(file.torrentUrl, HttpStatusCode.OK_200)
|
||||
await makeRawRequest(file.fileUrl, HttpStatusCode.OK_200)
|
||||
|
||||
const torrent = await webtorrentAdd(file.magnetUri, true)
|
||||
expect(torrent.files).to.be.an('array')
|
||||
expect(torrent.files.length).to.equal(1)
|
||||
expect(torrent.files[0].path).to.exist.and.to.not.equal('')
|
||||
}
|
||||
|
||||
// Check master playlist
|
||||
{
|
||||
await checkResolutionsInMasterPlaylist({ server, playlistUrl: hlsPlaylist.playlistUrl, resolutions })
|
||||
|
||||
const masterPlaylist = await server.streamingPlaylists.get({ url: hlsPlaylist.playlistUrl })
|
||||
|
||||
let i = 0
|
||||
for (const resolution of resolutions) {
|
||||
expect(masterPlaylist).to.contain(`${resolution}.m3u8`)
|
||||
expect(masterPlaylist).to.contain(`${resolution}.m3u8`)
|
||||
|
||||
const url = 'http://' + videoDetails.account.host
|
||||
await hlsInfohashExist(url, hlsPlaylist.playlistUrl, i)
|
||||
|
||||
i++
|
||||
}
|
||||
}
|
||||
|
||||
// Check resolution playlists
|
||||
{
|
||||
for (const resolution of resolutions) {
|
||||
const file = hlsFiles.find(f => f.resolution.id === resolution)
|
||||
const playlistName = removeFragmentedMP4Ext(basename(file.fileUrl)) + '.m3u8'
|
||||
|
||||
const url = objectStorageBaseUrl
|
||||
? `${objectStorageBaseUrl}hls/${videoUUID}/${playlistName}`
|
||||
: `${baseUrl}/static/streaming-playlists/hls/${videoUUID}/${playlistName}`
|
||||
|
||||
const subPlaylist = await server.streamingPlaylists.get({ url })
|
||||
|
||||
expect(subPlaylist).to.match(new RegExp(`${uuidRegex}-${resolution}-fragmented.mp4`))
|
||||
expect(subPlaylist).to.contain(basename(file.fileUrl))
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
const baseUrlAndPath = objectStorageBaseUrl
|
||||
? objectStorageBaseUrl + 'hls/' + videoUUID
|
||||
: baseUrl + '/static/streaming-playlists/hls/' + videoUUID
|
||||
|
||||
for (const resolution of resolutions) {
|
||||
await checkSegmentHash({
|
||||
server,
|
||||
baseUrlPlaylist: baseUrlAndPath,
|
||||
baseUrlSegment: baseUrlAndPath,
|
||||
resolution,
|
||||
hlsPlaylist
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
describe('Test HLS videos', function () {
|
||||
let servers: PeerTubeServer[] = []
|
||||
let videoUUID = ''
|
||||
let videoAudioUUID = ''
|
||||
|
||||
function runTestSuite (hlsOnly: boolean, objectStorageBaseUrl?: string) {
|
||||
|
||||
it('Should upload a video and transcode it to HLS', async function () {
|
||||
this.timeout(120000)
|
||||
|
||||
const { uuid } = await servers[0].videos.upload({ attributes: { name: 'video 1', fixture: 'video_short.webm' } })
|
||||
videoUUID = uuid
|
||||
|
||||
await waitJobs(servers)
|
||||
|
||||
await checkHlsPlaylist({ servers, videoUUID, hlsOnly, objectStorageBaseUrl })
|
||||
})
|
||||
|
||||
it('Should upload an audio file and transcode it to HLS', async function () {
|
||||
this.timeout(120000)
|
||||
|
||||
const { uuid } = await servers[0].videos.upload({ attributes: { name: 'video audio', fixture: 'sample.ogg' } })
|
||||
videoAudioUUID = uuid
|
||||
|
||||
await waitJobs(servers)
|
||||
|
||||
await checkHlsPlaylist({
|
||||
servers,
|
||||
videoUUID: videoAudioUUID,
|
||||
hlsOnly,
|
||||
resolutions: [ DEFAULT_AUDIO_RESOLUTION, 360, 240 ],
|
||||
objectStorageBaseUrl
|
||||
})
|
||||
})
|
||||
|
||||
it('Should update the video', async function () {
|
||||
this.timeout(30000)
|
||||
|
||||
await servers[0].videos.update({ id: videoUUID, attributes: { name: 'video 1 updated' } })
|
||||
|
||||
await waitJobs(servers)
|
||||
|
||||
await checkHlsPlaylist({ servers, videoUUID, hlsOnly, objectStorageBaseUrl })
|
||||
})
|
||||
|
||||
it('Should delete videos', async function () {
|
||||
this.timeout(10000)
|
||||
|
||||
await servers[0].videos.remove({ id: videoUUID })
|
||||
await servers[0].videos.remove({ id: videoAudioUUID })
|
||||
|
||||
await waitJobs(servers)
|
||||
|
||||
for (const server of servers) {
|
||||
await server.videos.get({ id: videoUUID, expectedStatus: HttpStatusCode.NOT_FOUND_404 })
|
||||
await server.videos.get({ id: videoAudioUUID, expectedStatus: HttpStatusCode.NOT_FOUND_404 })
|
||||
}
|
||||
})
|
||||
|
||||
it('Should have the playlists/segment deleted from the disk', async function () {
|
||||
for (const server of servers) {
|
||||
await checkDirectoryIsEmpty(server, 'videos')
|
||||
await checkDirectoryIsEmpty(server, join('streaming-playlists', 'hls'))
|
||||
}
|
||||
})
|
||||
|
||||
it('Should have an empty tmp directory', async function () {
|
||||
for (const server of servers) {
|
||||
await checkTmpIsEmpty(server)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
before(async function () {
|
||||
this.timeout(120000)
|
||||
|
||||
const configOverride = {
|
||||
transcoding: {
|
||||
enabled: true,
|
||||
allow_audio_files: true,
|
||||
hls: {
|
||||
enabled: true
|
||||
}
|
||||
}
|
||||
}
|
||||
servers = await createMultipleServers(2, configOverride)
|
||||
|
||||
// Get the access tokens
|
||||
await setAccessTokensToServers(servers)
|
||||
|
||||
// Server 1 and server 2 follow each other
|
||||
await doubleFollow(servers[0], servers[1])
|
||||
})
|
||||
|
||||
describe('With WebTorrent & HLS enabled', function () {
|
||||
runTestSuite(false)
|
||||
})
|
||||
|
||||
describe('With only HLS enabled', function () {
|
||||
|
||||
before(async function () {
|
||||
await servers[0].config.updateCustomSubConfig({
|
||||
newConfig: {
|
||||
transcoding: {
|
||||
enabled: true,
|
||||
allowAudioFiles: true,
|
||||
resolutions: {
|
||||
'144p': false,
|
||||
'240p': true,
|
||||
'360p': true,
|
||||
'480p': true,
|
||||
'720p': true,
|
||||
'1080p': true,
|
||||
'1440p': true,
|
||||
'2160p': true
|
||||
},
|
||||
hls: {
|
||||
enabled: true
|
||||
},
|
||||
webtorrent: {
|
||||
enabled: false
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
runTestSuite(true)
|
||||
})
|
||||
|
||||
describe('With object storage enabled', function () {
|
||||
if (areObjectStorageTestsDisabled()) return
|
||||
|
||||
before(async function () {
|
||||
this.timeout(120000)
|
||||
|
||||
const configOverride = ObjectStorageCommand.getDefaultConfig()
|
||||
await ObjectStorageCommand.prepareDefaultBuckets()
|
||||
|
||||
await servers[0].kill()
|
||||
await servers[0].run(configOverride)
|
||||
})
|
||||
|
||||
runTestSuite(true, ObjectStorageCommand.getPlaylistBaseUrl())
|
||||
})
|
||||
|
||||
after(async function () {
|
||||
await cleanupTests(servers)
|
||||
})
|
||||
})
|
5
server/tests/api/transcoding/index.ts
Normal file
5
server/tests/api/transcoding/index.ts
Normal file
|
@ -0,0 +1,5 @@
|
|||
export * from './audio-only'
|
||||
export * from './create-transcoding'
|
||||
export * from './hls'
|
||||
export * from './transcoder'
|
||||
export * from './video-editor'
|
733
server/tests/api/transcoding/transcoder.ts
Normal file
733
server/tests/api/transcoding/transcoder.ts
Normal file
|
@ -0,0 +1,733 @@
|
|||
/* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */
|
||||
|
||||
import 'mocha'
|
||||
import * as chai from 'chai'
|
||||
import { omit } from 'lodash'
|
||||
import { canDoQuickTranscode } from '@server/helpers/ffmpeg'
|
||||
import { generateHighBitrateVideo, generateVideoWithFramerate, getAllFiles } from '@server/tests/shared'
|
||||
import { buildAbsoluteFixturePath, getMaxBitrate, getMinLimitBitrate } from '@shared/core-utils'
|
||||
import {
|
||||
getAudioStream,
|
||||
buildFileMetadata,
|
||||
getVideoStreamBitrate,
|
||||
getVideoStreamFPS,
|
||||
getVideoStreamDimensionsInfo,
|
||||
hasAudioStream
|
||||
} from '@shared/extra-utils'
|
||||
import { HttpStatusCode, VideoState } from '@shared/models'
|
||||
import {
|
||||
cleanupTests,
|
||||
createMultipleServers,
|
||||
doubleFollow,
|
||||
makeGetRequest,
|
||||
PeerTubeServer,
|
||||
setAccessTokensToServers,
|
||||
waitJobs,
|
||||
webtorrentAdd
|
||||
} from '@shared/server-commands'
|
||||
|
||||
const expect = chai.expect
|
||||
|
||||
function updateConfigForTranscoding (server: PeerTubeServer) {
|
||||
return server.config.updateCustomSubConfig({
|
||||
newConfig: {
|
||||
transcoding: {
|
||||
enabled: true,
|
||||
allowAdditionalExtensions: true,
|
||||
allowAudioFiles: true,
|
||||
hls: { enabled: true },
|
||||
webtorrent: { enabled: true },
|
||||
resolutions: {
|
||||
'0p': false,
|
||||
'144p': true,
|
||||
'240p': true,
|
||||
'360p': true,
|
||||
'480p': true,
|
||||
'720p': true,
|
||||
'1080p': true,
|
||||
'1440p': true,
|
||||
'2160p': true
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
describe('Test video transcoding', function () {
|
||||
let servers: PeerTubeServer[] = []
|
||||
let video4k: string
|
||||
|
||||
before(async function () {
|
||||
this.timeout(30_000)
|
||||
|
||||
// Run servers
|
||||
servers = await createMultipleServers(2)
|
||||
|
||||
await setAccessTokensToServers(servers)
|
||||
|
||||
await doubleFollow(servers[0], servers[1])
|
||||
|
||||
await updateConfigForTranscoding(servers[1])
|
||||
})
|
||||
|
||||
describe('Basic transcoding (or not)', function () {
|
||||
|
||||
it('Should not transcode video on server 1', async function () {
|
||||
this.timeout(60_000)
|
||||
|
||||
const attributes = {
|
||||
name: 'my super name for server 1',
|
||||
description: 'my super description for server 1',
|
||||
fixture: 'video_short.webm'
|
||||
}
|
||||
await servers[0].videos.upload({ attributes })
|
||||
|
||||
await waitJobs(servers)
|
||||
|
||||
for (const server of servers) {
|
||||
const { data } = await server.videos.list()
|
||||
const video = data[0]
|
||||
|
||||
const videoDetails = await server.videos.get({ id: video.id })
|
||||
expect(videoDetails.files).to.have.lengthOf(1)
|
||||
|
||||
const magnetUri = videoDetails.files[0].magnetUri
|
||||
expect(magnetUri).to.match(/\.webm/)
|
||||
|
||||
const torrent = await webtorrentAdd(magnetUri, true)
|
||||
expect(torrent.files).to.be.an('array')
|
||||
expect(torrent.files.length).to.equal(1)
|
||||
expect(torrent.files[0].path).match(/\.webm$/)
|
||||
}
|
||||
})
|
||||
|
||||
it('Should transcode video on server 2', async function () {
|
||||
this.timeout(120_000)
|
||||
|
||||
const attributes = {
|
||||
name: 'my super name for server 2',
|
||||
description: 'my super description for server 2',
|
||||
fixture: 'video_short.webm'
|
||||
}
|
||||
await servers[1].videos.upload({ attributes })
|
||||
|
||||
await waitJobs(servers)
|
||||
|
||||
for (const server of servers) {
|
||||
const { data } = await server.videos.list()
|
||||
|
||||
const video = data.find(v => v.name === attributes.name)
|
||||
const videoDetails = await server.videos.get({ id: video.id })
|
||||
|
||||
expect(videoDetails.files).to.have.lengthOf(5)
|
||||
|
||||
const magnetUri = videoDetails.files[0].magnetUri
|
||||
expect(magnetUri).to.match(/\.mp4/)
|
||||
|
||||
const torrent = await webtorrentAdd(magnetUri, true)
|
||||
expect(torrent.files).to.be.an('array')
|
||||
expect(torrent.files.length).to.equal(1)
|
||||
expect(torrent.files[0].path).match(/\.mp4$/)
|
||||
}
|
||||
})
|
||||
|
||||
it('Should wait for transcoding before publishing the video', async function () {
|
||||
this.timeout(160_000)
|
||||
|
||||
{
|
||||
// Upload the video, but wait transcoding
|
||||
const attributes = {
|
||||
name: 'waiting video',
|
||||
fixture: 'video_short1.webm',
|
||||
waitTranscoding: true
|
||||
}
|
||||
const { uuid } = await servers[1].videos.upload({ attributes })
|
||||
const videoId = uuid
|
||||
|
||||
// Should be in transcode state
|
||||
const body = await servers[1].videos.get({ id: videoId })
|
||||
expect(body.name).to.equal('waiting video')
|
||||
expect(body.state.id).to.equal(VideoState.TO_TRANSCODE)
|
||||
expect(body.state.label).to.equal('To transcode')
|
||||
expect(body.waitTranscoding).to.be.true
|
||||
|
||||
{
|
||||
// Should have my video
|
||||
const { data } = await servers[1].videos.listMyVideos()
|
||||
const videoToFindInMine = data.find(v => v.name === attributes.name)
|
||||
expect(videoToFindInMine).not.to.be.undefined
|
||||
expect(videoToFindInMine.state.id).to.equal(VideoState.TO_TRANSCODE)
|
||||
expect(videoToFindInMine.state.label).to.equal('To transcode')
|
||||
expect(videoToFindInMine.waitTranscoding).to.be.true
|
||||
}
|
||||
|
||||
{
|
||||
// Should not list this video
|
||||
const { data } = await servers[1].videos.list()
|
||||
const videoToFindInList = data.find(v => v.name === attributes.name)
|
||||
expect(videoToFindInList).to.be.undefined
|
||||
}
|
||||
|
||||
// Server 1 should not have the video yet
|
||||
await servers[0].videos.get({ id: videoId, expectedStatus: HttpStatusCode.NOT_FOUND_404 })
|
||||
}
|
||||
|
||||
await waitJobs(servers)
|
||||
|
||||
for (const server of servers) {
|
||||
const { data } = await server.videos.list()
|
||||
const videoToFind = data.find(v => v.name === 'waiting video')
|
||||
expect(videoToFind).not.to.be.undefined
|
||||
|
||||
const videoDetails = await server.videos.get({ id: videoToFind.id })
|
||||
|
||||
expect(videoDetails.state.id).to.equal(VideoState.PUBLISHED)
|
||||
expect(videoDetails.state.label).to.equal('Published')
|
||||
expect(videoDetails.waitTranscoding).to.be.true
|
||||
}
|
||||
})
|
||||
|
||||
it('Should accept and transcode additional extensions', async function () {
|
||||
this.timeout(300_000)
|
||||
|
||||
for (const fixture of [ 'video_short.mkv', 'video_short.avi' ]) {
|
||||
const attributes = {
|
||||
name: fixture,
|
||||
fixture
|
||||
}
|
||||
|
||||
await servers[1].videos.upload({ attributes })
|
||||
|
||||
await waitJobs(servers)
|
||||
|
||||
for (const server of servers) {
|
||||
const { data } = await server.videos.list()
|
||||
|
||||
const video = data.find(v => v.name === attributes.name)
|
||||
const videoDetails = await server.videos.get({ id: video.id })
|
||||
expect(videoDetails.files).to.have.lengthOf(5)
|
||||
|
||||
const magnetUri = videoDetails.files[0].magnetUri
|
||||
expect(magnetUri).to.contain('.mp4')
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
it('Should transcode a 4k video', async function () {
|
||||
this.timeout(200_000)
|
||||
|
||||
const attributes = {
|
||||
name: '4k video',
|
||||
fixture: 'video_short_4k.mp4'
|
||||
}
|
||||
|
||||
const { uuid } = await servers[1].videos.upload({ attributes })
|
||||
video4k = uuid
|
||||
|
||||
await waitJobs(servers)
|
||||
|
||||
const resolutions = [ 144, 240, 360, 480, 720, 1080, 1440, 2160 ]
|
||||
|
||||
for (const server of servers) {
|
||||
const videoDetails = await server.videos.get({ id: video4k })
|
||||
expect(videoDetails.files).to.have.lengthOf(resolutions.length)
|
||||
|
||||
for (const r of resolutions) {
|
||||
expect(videoDetails.files.find(f => f.resolution.id === r)).to.not.be.undefined
|
||||
expect(videoDetails.streamingPlaylists[0].files.find(f => f.resolution.id === r)).to.not.be.undefined
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
describe('Audio transcoding', function () {
|
||||
|
||||
it('Should transcode high bit rate mp3 to proper bit rate', async function () {
|
||||
this.timeout(60_000)
|
||||
|
||||
const attributes = {
|
||||
name: 'mp3_256k',
|
||||
fixture: 'video_short_mp3_256k.mp4'
|
||||
}
|
||||
await servers[1].videos.upload({ attributes })
|
||||
|
||||
await waitJobs(servers)
|
||||
|
||||
for (const server of servers) {
|
||||
const { data } = await server.videos.list()
|
||||
|
||||
const video = data.find(v => v.name === attributes.name)
|
||||
const videoDetails = await server.videos.get({ id: video.id })
|
||||
|
||||
expect(videoDetails.files).to.have.lengthOf(5)
|
||||
|
||||
const file = videoDetails.files.find(f => f.resolution.id === 240)
|
||||
const path = servers[1].servers.buildWebTorrentFilePath(file.fileUrl)
|
||||
const probe = await getAudioStream(path)
|
||||
|
||||
if (probe.audioStream) {
|
||||
expect(probe.audioStream['codec_name']).to.be.equal('aac')
|
||||
expect(probe.audioStream['bit_rate']).to.be.at.most(384 * 8000)
|
||||
} else {
|
||||
this.fail('Could not retrieve the audio stream on ' + probe.absolutePath)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
it('Should transcode video with no audio and have no audio itself', async function () {
|
||||
this.timeout(60_000)
|
||||
|
||||
const attributes = {
|
||||
name: 'no_audio',
|
||||
fixture: 'video_short_no_audio.mp4'
|
||||
}
|
||||
await servers[1].videos.upload({ attributes })
|
||||
|
||||
await waitJobs(servers)
|
||||
|
||||
for (const server of servers) {
|
||||
const { data } = await server.videos.list()
|
||||
|
||||
const video = data.find(v => v.name === attributes.name)
|
||||
const videoDetails = await server.videos.get({ id: video.id })
|
||||
|
||||
const file = videoDetails.files.find(f => f.resolution.id === 240)
|
||||
const path = servers[1].servers.buildWebTorrentFilePath(file.fileUrl)
|
||||
|
||||
expect(await hasAudioStream(path)).to.be.false
|
||||
}
|
||||
})
|
||||
|
||||
it('Should leave the audio untouched, but properly transcode the video', async function () {
|
||||
this.timeout(60_000)
|
||||
|
||||
const attributes = {
|
||||
name: 'untouched_audio',
|
||||
fixture: 'video_short.mp4'
|
||||
}
|
||||
await servers[1].videos.upload({ attributes })
|
||||
|
||||
await waitJobs(servers)
|
||||
|
||||
for (const server of servers) {
|
||||
const { data } = await server.videos.list()
|
||||
|
||||
const video = data.find(v => v.name === attributes.name)
|
||||
const videoDetails = await server.videos.get({ id: video.id })
|
||||
|
||||
expect(videoDetails.files).to.have.lengthOf(5)
|
||||
|
||||
const fixturePath = buildAbsoluteFixturePath(attributes.fixture)
|
||||
const fixtureVideoProbe = await getAudioStream(fixturePath)
|
||||
|
||||
const file = videoDetails.files.find(f => f.resolution.id === 240)
|
||||
const path = servers[1].servers.buildWebTorrentFilePath(file.fileUrl)
|
||||
|
||||
const videoProbe = await getAudioStream(path)
|
||||
|
||||
if (videoProbe.audioStream && fixtureVideoProbe.audioStream) {
|
||||
const toOmit = [ 'max_bit_rate', 'duration', 'duration_ts', 'nb_frames', 'start_time', 'start_pts' ]
|
||||
expect(omit(videoProbe.audioStream, toOmit)).to.be.deep.equal(omit(fixtureVideoProbe.audioStream, toOmit))
|
||||
} else {
|
||||
this.fail('Could not retrieve the audio stream on ' + videoProbe.absolutePath)
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
describe('Audio upload', function () {
|
||||
|
||||
function runSuite (mode: 'legacy' | 'resumable') {
|
||||
|
||||
before(async function () {
|
||||
await servers[1].config.updateCustomSubConfig({
|
||||
newConfig: {
|
||||
transcoding: {
|
||||
hls: { enabled: true },
|
||||
webtorrent: { enabled: true },
|
||||
resolutions: {
|
||||
'0p': false,
|
||||
'144p': false,
|
||||
'240p': false,
|
||||
'360p': false,
|
||||
'480p': false,
|
||||
'720p': false,
|
||||
'1080p': false,
|
||||
'1440p': false,
|
||||
'2160p': false
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
it('Should merge an audio file with the preview file', async function () {
|
||||
this.timeout(60_000)
|
||||
|
||||
const attributes = { name: 'audio_with_preview', previewfile: 'preview.jpg', fixture: 'sample.ogg' }
|
||||
await servers[1].videos.upload({ attributes, mode })
|
||||
|
||||
await waitJobs(servers)
|
||||
|
||||
for (const server of servers) {
|
||||
const { data } = await server.videos.list()
|
||||
|
||||
const video = data.find(v => v.name === 'audio_with_preview')
|
||||
const videoDetails = await server.videos.get({ id: video.id })
|
||||
|
||||
expect(videoDetails.files).to.have.lengthOf(1)
|
||||
|
||||
await makeGetRequest({ url: server.url, path: videoDetails.thumbnailPath, expectedStatus: HttpStatusCode.OK_200 })
|
||||
await makeGetRequest({ url: server.url, path: videoDetails.previewPath, expectedStatus: HttpStatusCode.OK_200 })
|
||||
|
||||
const magnetUri = videoDetails.files[0].magnetUri
|
||||
expect(magnetUri).to.contain('.mp4')
|
||||
}
|
||||
})
|
||||
|
||||
it('Should upload an audio file and choose a default background image', async function () {
|
||||
this.timeout(60_000)
|
||||
|
||||
const attributes = { name: 'audio_without_preview', fixture: 'sample.ogg' }
|
||||
await servers[1].videos.upload({ attributes, mode })
|
||||
|
||||
await waitJobs(servers)
|
||||
|
||||
for (const server of servers) {
|
||||
const { data } = await server.videos.list()
|
||||
|
||||
const video = data.find(v => v.name === 'audio_without_preview')
|
||||
const videoDetails = await server.videos.get({ id: video.id })
|
||||
|
||||
expect(videoDetails.files).to.have.lengthOf(1)
|
||||
|
||||
await makeGetRequest({ url: server.url, path: videoDetails.thumbnailPath, expectedStatus: HttpStatusCode.OK_200 })
|
||||
await makeGetRequest({ url: server.url, path: videoDetails.previewPath, expectedStatus: HttpStatusCode.OK_200 })
|
||||
|
||||
const magnetUri = videoDetails.files[0].magnetUri
|
||||
expect(magnetUri).to.contain('.mp4')
|
||||
}
|
||||
})
|
||||
|
||||
it('Should upload an audio file and create an audio version only', async function () {
|
||||
this.timeout(60_000)
|
||||
|
||||
await servers[1].config.updateCustomSubConfig({
|
||||
newConfig: {
|
||||
transcoding: {
|
||||
hls: { enabled: true },
|
||||
webtorrent: { enabled: true },
|
||||
resolutions: {
|
||||
'0p': true,
|
||||
'144p': false,
|
||||
'240p': false,
|
||||
'360p': false
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const attributes = { name: 'audio_with_preview', previewfile: 'preview.jpg', fixture: 'sample.ogg' }
|
||||
const { id } = await servers[1].videos.upload({ attributes, mode })
|
||||
|
||||
await waitJobs(servers)
|
||||
|
||||
for (const server of servers) {
|
||||
const videoDetails = await server.videos.get({ id })
|
||||
|
||||
for (const files of [ videoDetails.files, videoDetails.streamingPlaylists[0].files ]) {
|
||||
expect(files).to.have.lengthOf(2)
|
||||
expect(files.find(f => f.resolution.id === 0)).to.not.be.undefined
|
||||
}
|
||||
}
|
||||
|
||||
await updateConfigForTranscoding(servers[1])
|
||||
})
|
||||
}
|
||||
|
||||
describe('Legacy upload', function () {
|
||||
runSuite('legacy')
|
||||
})
|
||||
|
||||
describe('Resumable upload', function () {
|
||||
runSuite('resumable')
|
||||
})
|
||||
})
|
||||
|
||||
describe('Framerate', function () {
|
||||
|
||||
it('Should transcode a 60 FPS video', async function () {
|
||||
this.timeout(60_000)
|
||||
|
||||
const attributes = {
|
||||
name: 'my super 30fps name for server 2',
|
||||
description: 'my super 30fps description for server 2',
|
||||
fixture: '60fps_720p_small.mp4'
|
||||
}
|
||||
await servers[1].videos.upload({ attributes })
|
||||
|
||||
await waitJobs(servers)
|
||||
|
||||
for (const server of servers) {
|
||||
const { data } = await server.videos.list()
|
||||
|
||||
const video = data.find(v => v.name === attributes.name)
|
||||
const videoDetails = await server.videos.get({ id: video.id })
|
||||
|
||||
expect(videoDetails.files).to.have.lengthOf(5)
|
||||
expect(videoDetails.files[0].fps).to.be.above(58).and.below(62)
|
||||
expect(videoDetails.files[1].fps).to.be.below(31)
|
||||
expect(videoDetails.files[2].fps).to.be.below(31)
|
||||
expect(videoDetails.files[3].fps).to.be.below(31)
|
||||
expect(videoDetails.files[4].fps).to.be.below(31)
|
||||
|
||||
for (const resolution of [ 144, 240, 360, 480 ]) {
|
||||
const file = videoDetails.files.find(f => f.resolution.id === resolution)
|
||||
const path = servers[1].servers.buildWebTorrentFilePath(file.fileUrl)
|
||||
const fps = await getVideoStreamFPS(path)
|
||||
|
||||
expect(fps).to.be.below(31)
|
||||
}
|
||||
|
||||
const file = videoDetails.files.find(f => f.resolution.id === 720)
|
||||
const path = servers[1].servers.buildWebTorrentFilePath(file.fileUrl)
|
||||
const fps = await getVideoStreamFPS(path)
|
||||
|
||||
expect(fps).to.be.above(58).and.below(62)
|
||||
}
|
||||
})
|
||||
|
||||
it('Should downscale to the closest divisor standard framerate', async function () {
|
||||
this.timeout(200_000)
|
||||
|
||||
let tempFixturePath: string
|
||||
|
||||
{
|
||||
tempFixturePath = await generateVideoWithFramerate(59)
|
||||
|
||||
const fps = await getVideoStreamFPS(tempFixturePath)
|
||||
expect(fps).to.be.equal(59)
|
||||
}
|
||||
|
||||
const attributes = {
|
||||
name: '59fps video',
|
||||
description: '59fps video',
|
||||
fixture: tempFixturePath
|
||||
}
|
||||
|
||||
await servers[1].videos.upload({ attributes })
|
||||
|
||||
await waitJobs(servers)
|
||||
|
||||
for (const server of servers) {
|
||||
const { data } = await server.videos.list()
|
||||
|
||||
const { id } = data.find(v => v.name === attributes.name)
|
||||
const video = await server.videos.get({ id })
|
||||
|
||||
{
|
||||
const file = video.files.find(f => f.resolution.id === 240)
|
||||
const path = servers[1].servers.buildWebTorrentFilePath(file.fileUrl)
|
||||
const fps = await getVideoStreamFPS(path)
|
||||
expect(fps).to.be.equal(25)
|
||||
}
|
||||
|
||||
{
|
||||
const file = video.files.find(f => f.resolution.id === 720)
|
||||
const path = servers[1].servers.buildWebTorrentFilePath(file.fileUrl)
|
||||
const fps = await getVideoStreamFPS(path)
|
||||
expect(fps).to.be.equal(59)
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
describe('Bitrate control', function () {
|
||||
|
||||
it('Should respect maximum bitrate values', async function () {
|
||||
this.timeout(160_000)
|
||||
|
||||
const tempFixturePath = await generateHighBitrateVideo()
|
||||
|
||||
const attributes = {
|
||||
name: 'high bitrate video',
|
||||
description: 'high bitrate video',
|
||||
fixture: tempFixturePath
|
||||
}
|
||||
|
||||
await servers[1].videos.upload({ attributes })
|
||||
|
||||
await waitJobs(servers)
|
||||
|
||||
for (const server of servers) {
|
||||
const { data } = await server.videos.list()
|
||||
|
||||
const { id } = data.find(v => v.name === attributes.name)
|
||||
const video = await server.videos.get({ id })
|
||||
|
||||
for (const resolution of [ 240, 360, 480, 720, 1080 ]) {
|
||||
const file = video.files.find(f => f.resolution.id === resolution)
|
||||
const path = servers[1].servers.buildWebTorrentFilePath(file.fileUrl)
|
||||
|
||||
const bitrate = await getVideoStreamBitrate(path)
|
||||
const fps = await getVideoStreamFPS(path)
|
||||
const dataResolution = await getVideoStreamDimensionsInfo(path)
|
||||
|
||||
expect(resolution).to.equal(resolution)
|
||||
|
||||
const maxBitrate = getMaxBitrate({ ...dataResolution, fps })
|
||||
expect(bitrate).to.be.below(maxBitrate)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
it('Should not transcode to an higher bitrate than the original file but above our low limit', async function () {
|
||||
this.timeout(160_000)
|
||||
|
||||
const newConfig = {
|
||||
transcoding: {
|
||||
enabled: true,
|
||||
resolutions: {
|
||||
'144p': true,
|
||||
'240p': true,
|
||||
'360p': true,
|
||||
'480p': true,
|
||||
'720p': true,
|
||||
'1080p': true,
|
||||
'1440p': true,
|
||||
'2160p': true
|
||||
},
|
||||
webtorrent: { enabled: true },
|
||||
hls: { enabled: true }
|
||||
}
|
||||
}
|
||||
await servers[1].config.updateCustomSubConfig({ newConfig })
|
||||
|
||||
const attributes = {
|
||||
name: 'low bitrate',
|
||||
fixture: 'low-bitrate.mp4'
|
||||
}
|
||||
|
||||
const { id } = await servers[1].videos.upload({ attributes })
|
||||
|
||||
await waitJobs(servers)
|
||||
|
||||
const video = await servers[1].videos.get({ id })
|
||||
|
||||
const resolutions = [ 240, 360, 480, 720, 1080 ]
|
||||
for (const r of resolutions) {
|
||||
const file = video.files.find(f => f.resolution.id === r)
|
||||
|
||||
const path = servers[1].servers.buildWebTorrentFilePath(file.fileUrl)
|
||||
const bitrate = await getVideoStreamBitrate(path)
|
||||
|
||||
const inputBitrate = 60_000
|
||||
const limit = getMinLimitBitrate({ fps: 10, ratio: 1, resolution: r })
|
||||
let belowValue = Math.max(inputBitrate, limit)
|
||||
belowValue += belowValue * 0.20 // Apply 20% margin because bitrate control is not very precise
|
||||
|
||||
expect(bitrate, `${path} not below ${limit}`).to.be.below(belowValue)
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
describe('FFprobe', function () {
|
||||
|
||||
it('Should provide valid ffprobe data', async function () {
|
||||
this.timeout(160_000)
|
||||
|
||||
const videoUUID = (await servers[1].videos.quickUpload({ name: 'ffprobe data' })).uuid
|
||||
await waitJobs(servers)
|
||||
|
||||
{
|
||||
const video = await servers[1].videos.get({ id: videoUUID })
|
||||
const file = video.files.find(f => f.resolution.id === 240)
|
||||
const path = servers[1].servers.buildWebTorrentFilePath(file.fileUrl)
|
||||
const metadata = await buildFileMetadata(path)
|
||||
|
||||
// expected format properties
|
||||
for (const p of [
|
||||
'tags.encoder',
|
||||
'format_long_name',
|
||||
'size',
|
||||
'bit_rate'
|
||||
]) {
|
||||
expect(metadata.format).to.have.nested.property(p)
|
||||
}
|
||||
|
||||
// expected stream properties
|
||||
for (const p of [
|
||||
'codec_long_name',
|
||||
'profile',
|
||||
'width',
|
||||
'height',
|
||||
'display_aspect_ratio',
|
||||
'avg_frame_rate',
|
||||
'pix_fmt'
|
||||
]) {
|
||||
expect(metadata.streams[0]).to.have.nested.property(p)
|
||||
}
|
||||
|
||||
expect(metadata).to.not.have.nested.property('format.filename')
|
||||
}
|
||||
|
||||
for (const server of servers) {
|
||||
const videoDetails = await server.videos.get({ id: videoUUID })
|
||||
|
||||
const videoFiles = getAllFiles(videoDetails)
|
||||
expect(videoFiles).to.have.lengthOf(10)
|
||||
|
||||
for (const file of videoFiles) {
|
||||
expect(file.metadata).to.be.undefined
|
||||
expect(file.metadataUrl).to.exist
|
||||
expect(file.metadataUrl).to.contain(servers[1].url)
|
||||
expect(file.metadataUrl).to.contain(videoUUID)
|
||||
|
||||
const metadata = await server.videos.getFileMetadata({ url: file.metadataUrl })
|
||||
expect(metadata).to.have.nested.property('format.size')
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
it('Should correctly detect if quick transcode is possible', async function () {
|
||||
this.timeout(10_000)
|
||||
|
||||
expect(await canDoQuickTranscode(buildAbsoluteFixturePath('video_short.mp4'))).to.be.true
|
||||
expect(await canDoQuickTranscode(buildAbsoluteFixturePath('video_short.webm'))).to.be.false
|
||||
})
|
||||
})
|
||||
|
||||
describe('Transcoding job queue', function () {
|
||||
|
||||
it('Should have the appropriate priorities for transcoding jobs', async function () {
|
||||
const body = await servers[1].jobs.list({
|
||||
start: 0,
|
||||
count: 100,
|
||||
sort: 'createdAt',
|
||||
jobType: 'video-transcoding'
|
||||
})
|
||||
|
||||
const jobs = body.data
|
||||
const transcodingJobs = jobs.filter(j => j.data.videoUUID === video4k)
|
||||
|
||||
expect(transcodingJobs).to.have.lengthOf(16)
|
||||
|
||||
const hlsJobs = transcodingJobs.filter(j => j.data.type === 'new-resolution-to-hls')
|
||||
const webtorrentJobs = transcodingJobs.filter(j => j.data.type === 'new-resolution-to-webtorrent')
|
||||
const optimizeJobs = transcodingJobs.filter(j => j.data.type === 'optimize-to-webtorrent')
|
||||
|
||||
expect(hlsJobs).to.have.lengthOf(8)
|
||||
expect(webtorrentJobs).to.have.lengthOf(7)
|
||||
expect(optimizeJobs).to.have.lengthOf(1)
|
||||
|
||||
for (const j of optimizeJobs.concat(hlsJobs.concat(webtorrentJobs))) {
|
||||
expect(j.priority).to.be.greaterThan(100)
|
||||
expect(j.priority).to.be.lessThan(150)
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
after(async function () {
|
||||
await cleanupTests(servers)
|
||||
})
|
||||
})
|
368
server/tests/api/transcoding/video-editor.ts
Normal file
368
server/tests/api/transcoding/video-editor.ts
Normal file
|
@ -0,0 +1,368 @@
|
|||
import { expect } from 'chai'
|
||||
import { expectStartWith, getAllFiles } from '@server/tests/shared'
|
||||
import { areObjectStorageTestsDisabled } from '@shared/core-utils'
|
||||
import { VideoEditorTask } from '@shared/models'
|
||||
import {
|
||||
cleanupTests,
|
||||
createMultipleServers,
|
||||
doubleFollow,
|
||||
ObjectStorageCommand,
|
||||
PeerTubeServer,
|
||||
setAccessTokensToServers,
|
||||
setDefaultVideoChannel,
|
||||
VideoEditorCommand,
|
||||
waitJobs
|
||||
} from '@shared/server-commands'
|
||||
|
||||
describe('Test video editor', function () {
|
||||
let servers: PeerTubeServer[] = []
|
||||
let videoUUID: string
|
||||
|
||||
async function checkDuration (server: PeerTubeServer, duration: number) {
|
||||
const video = await server.videos.get({ id: videoUUID })
|
||||
|
||||
expect(video.duration).to.be.approximately(duration, 1)
|
||||
|
||||
for (const file of video.files) {
|
||||
const metadata = await server.videos.getFileMetadata({ url: file.metadataUrl })
|
||||
|
||||
for (const stream of metadata.streams) {
|
||||
expect(Math.round(stream.duration)).to.be.approximately(duration, 1)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function renewVideo (fixture = 'video_short.webm') {
|
||||
const video = await servers[0].videos.quickUpload({ name: 'video', fixture })
|
||||
videoUUID = video.uuid
|
||||
|
||||
await waitJobs(servers)
|
||||
}
|
||||
|
||||
async function createTasks (tasks: VideoEditorTask[]) {
|
||||
await servers[0].videoEditor.createEditionTasks({ videoId: videoUUID, tasks })
|
||||
await waitJobs(servers)
|
||||
}
|
||||
|
||||
before(async function () {
|
||||
this.timeout(120_000)
|
||||
|
||||
servers = await createMultipleServers(2)
|
||||
|
||||
await setAccessTokensToServers(servers)
|
||||
await setDefaultVideoChannel(servers)
|
||||
|
||||
await doubleFollow(servers[0], servers[1])
|
||||
|
||||
await servers[0].config.enableMinimumTranscoding()
|
||||
|
||||
await servers[0].config.updateExistingSubConfig({
|
||||
newConfig: {
|
||||
videoEditor: {
|
||||
enabled: true
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
describe('Cutting', function () {
|
||||
|
||||
it('Should cut the beginning of the video', async function () {
|
||||
this.timeout(120_000)
|
||||
|
||||
await renewVideo()
|
||||
await waitJobs(servers)
|
||||
|
||||
const beforeTasks = new Date()
|
||||
|
||||
await createTasks([
|
||||
{
|
||||
name: 'cut',
|
||||
options: {
|
||||
start: 2
|
||||
}
|
||||
}
|
||||
])
|
||||
|
||||
for (const server of servers) {
|
||||
await checkDuration(server, 3)
|
||||
|
||||
const video = await server.videos.get({ id: videoUUID })
|
||||
expect(new Date(video.publishedAt)).to.be.below(beforeTasks)
|
||||
}
|
||||
})
|
||||
|
||||
it('Should cut the end of the video', async function () {
|
||||
this.timeout(120_000)
|
||||
await renewVideo()
|
||||
|
||||
await createTasks([
|
||||
{
|
||||
name: 'cut',
|
||||
options: {
|
||||
end: 2
|
||||
}
|
||||
}
|
||||
])
|
||||
|
||||
for (const server of servers) {
|
||||
await checkDuration(server, 2)
|
||||
}
|
||||
})
|
||||
|
||||
it('Should cut start/end of the video', async function () {
|
||||
this.timeout(120_000)
|
||||
await renewVideo('video_short1.webm') // 10 seconds video duration
|
||||
|
||||
await createTasks([
|
||||
{
|
||||
name: 'cut',
|
||||
options: {
|
||||
start: 2,
|
||||
end: 6
|
||||
}
|
||||
}
|
||||
])
|
||||
|
||||
for (const server of servers) {
|
||||
await checkDuration(server, 4)
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
describe('Intro/Outro', function () {
|
||||
|
||||
it('Should add an intro', async function () {
|
||||
this.timeout(120_000)
|
||||
await renewVideo()
|
||||
|
||||
await createTasks([
|
||||
{
|
||||
name: 'add-intro',
|
||||
options: {
|
||||
file: 'video_short.webm'
|
||||
}
|
||||
}
|
||||
])
|
||||
|
||||
for (const server of servers) {
|
||||
await checkDuration(server, 10)
|
||||
}
|
||||
})
|
||||
|
||||
it('Should add an outro', async function () {
|
||||
this.timeout(120_000)
|
||||
await renewVideo()
|
||||
|
||||
await createTasks([
|
||||
{
|
||||
name: 'add-outro',
|
||||
options: {
|
||||
file: 'video_very_short_240p.mp4'
|
||||
}
|
||||
}
|
||||
])
|
||||
|
||||
for (const server of servers) {
|
||||
await checkDuration(server, 7)
|
||||
}
|
||||
})
|
||||
|
||||
it('Should add an intro/outro', async function () {
|
||||
this.timeout(120_000)
|
||||
await renewVideo()
|
||||
|
||||
await createTasks([
|
||||
{
|
||||
name: 'add-intro',
|
||||
options: {
|
||||
file: 'video_very_short_240p.mp4'
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'add-outro',
|
||||
options: {
|
||||
// Different frame rate
|
||||
file: 'video_short2.webm'
|
||||
}
|
||||
}
|
||||
])
|
||||
|
||||
for (const server of servers) {
|
||||
await checkDuration(server, 12)
|
||||
}
|
||||
})
|
||||
|
||||
it('Should add an intro to a video without audio', async function () {
|
||||
this.timeout(120_000)
|
||||
await renewVideo('video_short_no_audio.mp4')
|
||||
|
||||
await createTasks([
|
||||
{
|
||||
name: 'add-intro',
|
||||
options: {
|
||||
file: 'video_very_short_240p.mp4'
|
||||
}
|
||||
}
|
||||
])
|
||||
|
||||
for (const server of servers) {
|
||||
await checkDuration(server, 7)
|
||||
}
|
||||
})
|
||||
|
||||
it('Should add an outro without audio to a video with audio', async function () {
|
||||
this.timeout(120_000)
|
||||
await renewVideo()
|
||||
|
||||
await createTasks([
|
||||
{
|
||||
name: 'add-outro',
|
||||
options: {
|
||||
file: 'video_short_no_audio.mp4'
|
||||
}
|
||||
}
|
||||
])
|
||||
|
||||
for (const server of servers) {
|
||||
await checkDuration(server, 10)
|
||||
}
|
||||
})
|
||||
|
||||
it('Should add an outro without audio to a video with audio', async function () {
|
||||
this.timeout(120_000)
|
||||
await renewVideo('video_short_no_audio.mp4')
|
||||
|
||||
await createTasks([
|
||||
{
|
||||
name: 'add-outro',
|
||||
options: {
|
||||
file: 'video_short_no_audio.mp4'
|
||||
}
|
||||
}
|
||||
])
|
||||
|
||||
for (const server of servers) {
|
||||
await checkDuration(server, 10)
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
describe('Watermark', function () {
|
||||
|
||||
it('Should add a watermark to the video', async function () {
|
||||
this.timeout(120_000)
|
||||
await renewVideo()
|
||||
|
||||
const video = await servers[0].videos.get({ id: videoUUID })
|
||||
const oldFileUrls = getAllFiles(video).map(f => f.fileUrl)
|
||||
|
||||
await createTasks([
|
||||
{
|
||||
name: 'add-watermark',
|
||||
options: {
|
||||
file: 'thumbnail.png'
|
||||
}
|
||||
}
|
||||
])
|
||||
|
||||
for (const server of servers) {
|
||||
const video = await server.videos.get({ id: videoUUID })
|
||||
const fileUrls = getAllFiles(video).map(f => f.fileUrl)
|
||||
|
||||
for (const oldUrl of oldFileUrls) {
|
||||
expect(fileUrls).to.not.include(oldUrl)
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
describe('Complex tasks', function () {
|
||||
it('Should run a complex task', async function () {
|
||||
this.timeout(240_000)
|
||||
await renewVideo()
|
||||
|
||||
await createTasks(VideoEditorCommand.getComplexTask())
|
||||
|
||||
for (const server of servers) {
|
||||
await checkDuration(server, 9)
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
describe('HLS only video edition', function () {
|
||||
|
||||
before(async function () {
|
||||
// Disable webtorrent
|
||||
await servers[0].config.updateExistingSubConfig({
|
||||
newConfig: {
|
||||
transcoding: {
|
||||
webtorrent: {
|
||||
enabled: false
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
it('Should run a complex task on HLS only video', async function () {
|
||||
this.timeout(240_000)
|
||||
await renewVideo()
|
||||
|
||||
await createTasks(VideoEditorCommand.getComplexTask())
|
||||
|
||||
for (const server of servers) {
|
||||
const video = await server.videos.get({ id: videoUUID })
|
||||
expect(video.files).to.have.lengthOf(0)
|
||||
|
||||
await checkDuration(server, 9)
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
describe('Object storage video edition', function () {
|
||||
if (areObjectStorageTestsDisabled()) return
|
||||
|
||||
before(async function () {
|
||||
await ObjectStorageCommand.prepareDefaultBuckets()
|
||||
|
||||
await servers[0].kill()
|
||||
await servers[0].run(ObjectStorageCommand.getDefaultConfig())
|
||||
|
||||
await servers[0].config.enableMinimumTranscoding()
|
||||
})
|
||||
|
||||
it('Should run a complex task on a video in object storage', async function () {
|
||||
this.timeout(240_000)
|
||||
await renewVideo()
|
||||
|
||||
const video = await servers[0].videos.get({ id: videoUUID })
|
||||
const oldFileUrls = getAllFiles(video).map(f => f.fileUrl)
|
||||
|
||||
await createTasks(VideoEditorCommand.getComplexTask())
|
||||
|
||||
for (const server of servers) {
|
||||
const video = await server.videos.get({ id: videoUUID })
|
||||
const files = getAllFiles(video)
|
||||
|
||||
for (const f of files) {
|
||||
expect(oldFileUrls).to.not.include(f.fileUrl)
|
||||
}
|
||||
|
||||
for (const webtorrentFile of video.files) {
|
||||
expectStartWith(webtorrentFile.fileUrl, ObjectStorageCommand.getWebTorrentBaseUrl())
|
||||
}
|
||||
|
||||
for (const hlsFile of video.streamingPlaylists[0].files) {
|
||||
expectStartWith(hlsFile.fileUrl, ObjectStorageCommand.getPlaylistBaseUrl())
|
||||
}
|
||||
|
||||
await checkDuration(server, 9)
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
after(async function () {
|
||||
await cleanupTests(servers)
|
||||
})
|
||||
})
|
Loading…
Add table
Add a link
Reference in a new issue