1
0
Fork 0
mirror of https://github.com/Chocobozzz/PeerTube.git synced 2025-10-05 02:39:33 +02:00

Migrate server to ESM

Sorry for the very big commit that may lead to git log issues and merge
conflicts, but it's a major step forward:

 * Server can be faster at startup because imports() are async and we can
   easily lazy import big modules
 * Angular doesn't seem to support ES import (with .js extension), so we
   had to correctly organize peertube into a monorepo:
    * Use yarn workspace feature
    * Use typescript reference projects for dependencies
    * Shared projects have been moved into "packages", each one is now a
      node module (with a dedicated package.json/tsconfig.json)
    * server/tools have been moved into apps/ and is now a dedicated app
      bundled and published on NPM so users don't have to build peertube
      cli tools manually
    * server/tests have been moved into packages/ so we don't compile
      them every time we want to run the server
 * Use isolatedModule option:
   * Had to move from const enum to const
     (https://www.typescriptlang.org/docs/handbook/enums.html#objects-vs-enums)
   * Had to explictely specify "type" imports when used in decorators
 * Prefer tsx (that uses esbuild under the hood) instead of ts-node to
   load typescript files (tests with mocha or scripts):
     * To reduce test complexity as esbuild doesn't support decorator
       metadata, we only test server files that do not import server
       models
     * We still build tests files into js files for a faster CI
 * Remove unmaintained peertube CLI import script
 * Removed some barrels to speed up execution (less imports)
This commit is contained in:
Chocobozzz 2023-07-31 14:34:36 +02:00
parent 04d1da5621
commit 3a4992633e
No known key found for this signature in database
GPG key ID: 583A612D890159BE
2196 changed files with 12690 additions and 11574 deletions

View file

@ -0,0 +1,110 @@
import Bluebird from 'bluebird'
import { move } from 'fs-extra/esm'
import { readFile, writeFile } from 'fs/promises'
import { join } from 'path'
import { initDatabaseModels } from '@server/initializers/database.js'
import { federateVideoIfNeeded } from '@server/lib/activitypub/videos/index.js'
import { JobQueue } from '@server/lib/job-queue/index.js'
import {
generateHLSMasterPlaylistFilename,
generateHlsSha256SegmentsFilename,
getHlsResolutionPlaylistFilename
} from '@server/lib/paths.js'
import { VideoPathManager } from '@server/lib/video-path-manager.js'
import { VideoStreamingPlaylistModel } from '@server/models/video/video-streaming-playlist.js'
import { VideoModel } from '@server/models/video/video.js'
run()
.then(() => process.exit(0))
.catch(err => {
console.error(err)
process.exit(-1)
})
async function run () {
console.log('Migrate old HLS paths to new format.')
await initDatabaseModels(true)
JobQueue.Instance.init()
const ids = await VideoModel.listLocalIds()
await Bluebird.map(ids, async id => {
try {
await processVideo(id)
} catch (err) {
console.error('Cannot process video %s.', { err })
}
}, { concurrency: 5 })
console.log('Migration finished!')
}
async function processVideo (videoId: number) {
const video = await VideoModel.loadWithFiles(videoId)
const hls = video.getHLSPlaylist()
if (video.isLive || !hls || hls.playlistFilename !== 'master.m3u8' || hls.VideoFiles.length === 0) {
return
}
console.log(`Renaming HLS playlist files of video ${video.name}.`)
const playlist = await VideoStreamingPlaylistModel.loadHLSPlaylistByVideo(video.id)
const hlsDirPath = VideoPathManager.Instance.getFSHLSOutputPath(video)
const masterPlaylistPath = join(hlsDirPath, playlist.playlistFilename)
let masterPlaylistContent = await readFile(masterPlaylistPath, 'utf8')
for (const videoFile of hls.VideoFiles) {
const srcName = `${videoFile.resolution}.m3u8`
const dstName = getHlsResolutionPlaylistFilename(videoFile.filename)
const src = join(hlsDirPath, srcName)
const dst = join(hlsDirPath, dstName)
try {
await move(src, dst)
masterPlaylistContent = masterPlaylistContent.replace(new RegExp('^' + srcName + '$', 'm'), dstName)
} catch (err) {
console.error('Cannot move video file %s to %s.', src, dst, err)
}
}
await writeFile(masterPlaylistPath, masterPlaylistContent)
if (playlist.segmentsSha256Filename === 'segments-sha256.json') {
try {
const newName = generateHlsSha256SegmentsFilename(video.isLive)
const dst = join(hlsDirPath, newName)
await move(join(hlsDirPath, playlist.segmentsSha256Filename), dst)
playlist.segmentsSha256Filename = newName
} catch (err) {
console.error(`Cannot rename ${video.name} segments-sha256.json file to a new name`, err)
}
}
if (playlist.playlistFilename === 'master.m3u8') {
try {
const newName = generateHLSMasterPlaylistFilename(video.isLive)
const dst = join(hlsDirPath, newName)
await move(join(hlsDirPath, playlist.playlistFilename), dst)
playlist.playlistFilename = newName
} catch (err) {
console.error(`Cannot rename ${video.name} master.m3u8 file to a new name`, err)
}
}
// Everything worked, we can save the playlist now
await playlist.save()
const allVideo = await VideoModel.loadFull(video.id)
await federateVideoIfNeeded(allVideo, false)
console.log(`Successfully moved HLS files of ${video.name}.`)
}

View file

@ -0,0 +1,123 @@
import { ActorImageType } from '@peertube/peertube-models'
import { buildUUID, getLowercaseExtension } from '@peertube/peertube-node-utils'
import { getImageSize, processImage } from '@server/helpers/image-utils.js'
import { CONFIG } from '@server/initializers/config.js'
import { ACTOR_IMAGES_SIZE } from '@server/initializers/constants.js'
import { initDatabaseModels } from '@server/initializers/database.js'
import { updateActorImages } from '@server/lib/activitypub/actors/index.js'
import { sendUpdateActor } from '@server/lib/activitypub/send/index.js'
import { getBiggestActorImage } from '@server/lib/actor-image.js'
import { JobQueue } from '@server/lib/job-queue/index.js'
import { AccountModel } from '@server/models/account/account.js'
import { ActorModel } from '@server/models/actor/actor.js'
import { VideoChannelModel } from '@server/models/video/video-channel.js'
import { MAccountDefault, MActorDefault, MChannelDefault } from '@server/types/models/index.js'
import minBy from 'lodash-es/minBy.js'
import { join } from 'path'
run()
.then(() => process.exit(0))
.catch(err => {
console.error(err)
process.exit(-1)
})
async function run () {
console.log('Generate avatar miniatures from existing avatars.')
await initDatabaseModels(true)
JobQueue.Instance.init()
const accounts: AccountModel[] = await AccountModel.findAll({
include: [
{
model: ActorModel,
required: true,
where: {
serverId: null
}
},
{
model: VideoChannelModel,
include: [
{
model: AccountModel
}
]
}
]
})
for (const account of accounts) {
try {
await fillAvatarSizeIfNeeded(account)
await generateSmallerAvatarIfNeeded(account)
} catch (err) {
console.error(`Cannot process account avatar ${account.name}`, err)
}
for (const videoChannel of account.VideoChannels) {
try {
await fillAvatarSizeIfNeeded(videoChannel)
await generateSmallerAvatarIfNeeded(videoChannel)
} catch (err) {
console.error(`Cannot process channel avatar ${videoChannel.name}`, err)
}
}
}
console.log('Generation finished!')
}
async function fillAvatarSizeIfNeeded (accountOrChannel: MAccountDefault | MChannelDefault) {
const avatars = accountOrChannel.Actor.Avatars
for (const avatar of avatars) {
if (avatar.width && avatar.height) continue
console.log('Filling size of avatars of %s.', accountOrChannel.name)
const { width, height } = await getImageSize(join(CONFIG.STORAGE.ACTOR_IMAGES_DIR, avatar.filename))
avatar.width = width
avatar.height = height
await avatar.save()
}
}
async function generateSmallerAvatarIfNeeded (accountOrChannel: MAccountDefault | MChannelDefault) {
const avatars = accountOrChannel.Actor.Avatars
if (avatars.length !== 1) {
return
}
console.log(`Processing ${accountOrChannel.name}.`)
await generateSmallerAvatar(accountOrChannel.Actor)
accountOrChannel.Actor = Object.assign(accountOrChannel.Actor, { Server: null })
return sendUpdateActor(accountOrChannel, undefined)
}
async function generateSmallerAvatar (actor: MActorDefault) {
const bigAvatar = getBiggestActorImage(actor.Avatars)
const imageSize = minBy(ACTOR_IMAGES_SIZE[ActorImageType.AVATAR], 'width')
const sourceFilename = bigAvatar.filename
const newImageName = buildUUID() + getLowercaseExtension(sourceFilename)
const source = join(CONFIG.STORAGE.ACTOR_IMAGES_DIR, sourceFilename)
const destination = join(CONFIG.STORAGE.ACTOR_IMAGES_DIR, newImageName)
await processImage({ path: source, destination, newSize: imageSize, keepOriginal: true })
const actorImageInfo = {
name: newImageName,
fileUrl: null,
height: imageSize.height,
width: imageSize.width,
onDisk: true
}
await updateActorImages(actor, ActorImageType.AVATAR, [ actorImageInfo ], undefined)
}

View file

@ -0,0 +1,71 @@
import { ensureDir } from 'fs-extra/esm'
import { Op } from 'sequelize'
import { updateTorrentMetadata } from '@server/helpers/webtorrent.js'
import { DIRECTORIES } from '@server/initializers/constants.js'
import { moveFilesIfPrivacyChanged } from '@server/lib/video-privacy.js'
import { VideoModel } from '@server/models/video/video.js'
import { MVideoFullLight } from '@server/types/models/index.js'
import { VideoPrivacy } from '@peertube/peertube-models'
import { initDatabaseModels } from '@server/initializers/database.js'
run()
.then(() => process.exit(0))
.catch(err => {
console.error(err)
process.exit(-1)
})
async function run () {
console.log('Moving private video files in dedicated folders.')
await ensureDir(DIRECTORIES.HLS_STREAMING_PLAYLIST.PRIVATE)
await ensureDir(DIRECTORIES.VIDEOS.PRIVATE)
await initDatabaseModels(true)
const videos = await VideoModel.unscoped().findAll({
attributes: [ 'uuid' ],
where: {
privacy: {
[Op.in]: [ VideoPrivacy.PRIVATE, VideoPrivacy.INTERNAL ]
}
}
})
for (const { uuid } of videos) {
try {
console.log('Moving files of video %s.', uuid)
const video = await VideoModel.loadFull(uuid)
try {
await moveFilesIfPrivacyChanged(video, VideoPrivacy.PUBLIC)
} catch (err) {
console.error('Cannot move files of video %s.', uuid, err)
}
try {
await updateTorrents(video)
} catch (err) {
console.error('Cannot regenerate torrents of video %s.', uuid, err)
}
} catch (err) {
console.error('Cannot process video %s.', uuid, err)
}
}
}
async function updateTorrents (video: MVideoFullLight) {
for (const file of video.VideoFiles) {
await updateTorrentMetadata(video, file)
await file.save()
}
const playlist = video.getHLSPlaylist()
for (const file of (playlist?.VideoFiles || [])) {
await updateTorrentMetadata(playlist, file)
await file.save()
}
}