From 4145ba0d08c6b0611e5d749dd09be735344f1868 Mon Sep 17 00:00:00 2001 From: Jelle Besseling Date: Fri, 30 Jul 2021 13:28:45 +0200 Subject: [PATCH 01/23] Add support for saving video files to object storage --- config/default.yaml | 8 + package.json | 1 + server/controllers/api/videos/upload.ts | 7 +- server/initializers/config.ts | 12 + server/initializers/constants.ts | 11 +- .../migrations/0660-object-storage.ts | 41 + .../handlers/move-to-object-storage.ts | 125 +++ .../job-queue/handlers/video-transcoding.ts | 23 +- server/lib/job-queue/job-queue.ts | 18 +- server/lib/object-storage.ts | 65 ++ server/lib/video.ts | 10 + .../models/video/sql/shared/video-tables.ts | 9 +- server/models/video/video-file.ts | 13 +- .../models/video/video-streaming-playlist.ts | 28 +- server/models/video/video.ts | 16 +- server/types/models/video/video.ts | 5 + shared/models/server/job.model.ts | 5 + yarn.lock | 775 +++++++++++++++++- 18 files changed, 1145 insertions(+), 27 deletions(-) create mode 100644 server/initializers/migrations/0660-object-storage.ts create mode 100644 server/lib/job-queue/handlers/move-to-object-storage.ts create mode 100644 server/lib/object-storage.ts diff --git a/config/default.yaml b/config/default.yaml index 3d0ae6e8749..7f76db714cd 100644 --- a/config/default.yaml +++ b/config/default.yaml @@ -95,6 +95,14 @@ storage: # If not, peertube will fallback to the default fil client_overrides: 'storage/client-overrides/' +s3: + enabled: false + endpoint: 's3.amazonaws.com' # Will always use https + videos_bucket: 'videos' + videos_prefix: '' # Allows setting all buckets to the same value but with a different prefix + streaming_playlists_bucket: 'streaming-playlists' + streaming_playlists_prefix: '' + log: level: 'info' # 'debug' | 'info' | 'warn' | 'error' rotation: diff --git a/package.json b/package.json index 9ce1e1b0ea9..f1b7e00c8c3 100644 --- a/package.json +++ b/package.json @@ -73,6 +73,7 @@ "swagger-cli": "swagger-cli" }, "dependencies": { + "@aws-sdk/client-s3": "^3.23.0", "@uploadx/core": "^4.4.0", "async": "^3.0.1", "async-lru": "^1.1.1", diff --git a/server/controllers/api/videos/upload.ts b/server/controllers/api/videos/upload.ts index 89f50714d95..ad744615003 100644 --- a/server/controllers/api/videos/upload.ts +++ b/server/controllers/api/videos/upload.ts @@ -5,7 +5,12 @@ import { deleteResumableUploadMetaFile, getResumableUploadPath } from '@server/h import { uuidToShort } from '@server/helpers/uuid' import { createTorrentAndSetInfoHash } from '@server/helpers/webtorrent' import { getLocalVideoActivityPubUrl } from '@server/lib/activitypub/url' -import { addOptimizeOrMergeAudioJob, buildLocalVideoFromReq, buildVideoThumbnailsFromReq, setVideoTags } from '@server/lib/video' +import { + addOptimizeOrMergeAudioJob, + buildLocalVideoFromReq, + buildVideoThumbnailsFromReq, + setVideoTags +} from '@server/lib/video' import { generateWebTorrentVideoFilename, getVideoFilePath } from '@server/lib/video-paths' import { openapiOperationDoc } from '@server/middlewares/doc' import { MVideo, MVideoFile, MVideoFullLight } from '@server/types/models' diff --git a/server/initializers/config.ts b/server/initializers/config.ts index 30a9823b954..ea43ea141f3 100644 --- a/server/initializers/config.ts +++ b/server/initializers/config.ts @@ -73,6 +73,18 @@ const CONFIG = { PLUGINS_DIR: buildPath(config.get('storage.plugins')), CLIENT_OVERRIDES_DIR: buildPath(config.get('storage.client_overrides')) }, + S3: { + ENABLED: config.get('s3.enabled'), + ENDPOINT: config.get('s3.endpoint'), + VIDEOS_BUCKETINFO: { + bucket: config.get('s3.videos_bucket'), + prefix: config.get('s3.videos_prefix') + }, + STREAMING_PLAYLISTS_BUCKETINFO: { + bucket: config.get('s3.streaming_playlists_bucket'), + prefix: config.get('s3.streaming_playlists_prefix') + } + }, WEBSERVER: { SCHEME: config.get('webserver.https') === true ? 'https' : 'http', WS: config.get('webserver.https') === true ? 'wss' : 'ws', diff --git a/server/initializers/constants.ts b/server/initializers/constants.ts index 5f121d9a4db..b926a24aa36 100644 --- a/server/initializers/constants.ts +++ b/server/initializers/constants.ts @@ -24,7 +24,7 @@ import { CONFIG, registerConfigChangedHandler } from './config' // --------------------------------------------------------------------------- -const LAST_MIGRATION_VERSION = 655 +const LAST_MIGRATION_VERSION = 660 // --------------------------------------------------------------------------- @@ -147,7 +147,8 @@ const JOB_ATTEMPTS: { [id in JobType]: number } = { 'videos-views': 1, 'activitypub-refresher': 1, 'video-redundancy': 1, - 'video-live-ending': 1 + 'video-live-ending': 1, + 'move-to-object-storage': 3 } // Excluded keys are jobs that can be configured by admins const JOB_CONCURRENCY: { [id in Exclude]: number } = { @@ -162,7 +163,8 @@ const JOB_CONCURRENCY: { [id in Exclude { + { + await utils.queryInterface.addColumn('video', 'transcodeJobsRunning', { type: Sequelize.INTEGER, allowNull: false, defaultValue: 0 }) + } + + { + await utils.queryInterface.addColumn('videoFile', 'storage', { type: Sequelize.INTEGER, allowNull: false }) + } + + { + await utils.sequelize.query( + `UPDATE "videoFile" SET "storage" = 'local'` + ) + } + + { + await utils.queryInterface.addColumn('videoStreamingPlaylist', 'storage', { type: Sequelize.INTEGER, allowNull: false }) + } + + { + await utils.sequelize.query( + `UPDATE "videoStreamingPlaylist" SET "storage" = 'local'` + ) + } +} + +function down (options) { + throw new Error('Not implemented.') +} + +export { + up, + down +} diff --git a/server/lib/job-queue/handlers/move-to-object-storage.ts b/server/lib/job-queue/handlers/move-to-object-storage.ts new file mode 100644 index 00000000000..274ca7aa02c --- /dev/null +++ b/server/lib/job-queue/handlers/move-to-object-storage.ts @@ -0,0 +1,125 @@ +import * as Bull from 'bull' +import { logger } from '@server/helpers/logger' +import { + MoveObjectStoragePayload, VideoState +} from '../../../../shared' +import { VideoModel } from '@server/models/video/video' +import { storeObject } from '@server/lib/object-storage' +import { CONFIG } from '@server/initializers/config' +import { join } from 'path' +import { HLS_STREAMING_PLAYLIST_DIRECTORY } from '@server/initializers/constants' +import { getHlsResolutionPlaylistFilename } from '@server/lib/video-paths' +import { MVideoWithAllFiles, VideoStorageType } from '@server/types/models' + +export async function processMoveToObjectStorage (job: Bull.Job) { + const payload = job.data as MoveObjectStoragePayload + logger.info('Moving video %s in job %d.', payload.videoUUID, job.id) + + const video = await VideoModel.loadWithFiles(payload.videoUUID) + // No video, maybe deleted? + if (!video) { + logger.info('Can\'t process job %d, video does not exist.', job.id) + return undefined + } + + if (video.state === VideoState.TO_TRANSCODE) { + logger.info('Video needs to be transcoded still, exiting move job %d', job.id) + return undefined + } + + if (video.transcodeJobsRunning > 0) { + logger.info('A transcode job for this video is running, exiting move job %d', job.id) + return undefined + } + + if (video.VideoFiles) { + await moveWebTorrentFiles(video) + } + + if (video.VideoStreamingPlaylists) { + await moveHLSFiles(video) + } + + return payload.videoUUID +} + +async function moveWebTorrentFiles (video: MVideoWithAllFiles) { + for (const file of video.VideoFiles) { + if (file.storage !== VideoStorageType.LOCAL) continue + + const filename = file.filename + await storeObject( + { filename, path: join(CONFIG.STORAGE.VIDEOS_DIR, file.filename) }, + CONFIG.S3.VIDEOS_BUCKETINFO + ) + + file.storage = VideoStorageType.OBJECT_STORAGE + file.fileUrl = `https://${CONFIG.S3.VIDEOS_BUCKETINFO.bucket}.${CONFIG.S3.ENDPOINT}/${CONFIG.S3.VIDEOS_BUCKETINFO.prefix}${filename}` + await file.save() + } +} + +async function moveHLSFiles (video: MVideoWithAllFiles) { + for (const playlist of video.VideoStreamingPlaylists) { + const baseHlsDirectory = join(HLS_STREAMING_PLAYLIST_DIRECTORY, video.uuid) + + // Master playlist + const masterPlaylistFilename = join(playlist.getStringType(), video.uuid, playlist.playlistFilename) + await storeObject( + { + filename: masterPlaylistFilename, + path: join(baseHlsDirectory, playlist.playlistFilename) + }, + CONFIG.S3.STREAMING_PLAYLISTS_BUCKETINFO + ) + + // Sha256 segments file + const segmentsFileName = join(playlist.getStringType(), video.uuid, playlist.segmentsSha256Filename) + await storeObject( + { + filename: segmentsFileName, + path: join(baseHlsDirectory, playlist.segmentsSha256Filename) + }, + CONFIG.S3.STREAMING_PLAYLISTS_BUCKETINFO + ) + + // eslint-disable-next-line max-len + playlist.playlistUrl = `https://${CONFIG.S3.STREAMING_PLAYLISTS_BUCKETINFO.bucket}.${CONFIG.S3.ENDPOINT}/${CONFIG.S3.STREAMING_PLAYLISTS_BUCKETINFO.prefix}${masterPlaylistFilename}` + // eslint-disable-next-line max-len + playlist.segmentsSha256Url = `https://${CONFIG.S3.STREAMING_PLAYLISTS_BUCKETINFO.bucket}.${CONFIG.S3.ENDPOINT}/${CONFIG.S3.STREAMING_PLAYLISTS_BUCKETINFO.prefix}${segmentsFileName}` + + for (const videoFile of playlist.VideoFiles) { + const file = await videoFile.reload() + if (file.storage !== VideoStorageType.LOCAL) continue + + // Resolution playlist + const playlistFileName = getHlsResolutionPlaylistFilename(file.filename) + await storeObject( + { + filename: join(playlist.getStringType(), video.uuid, playlistFileName), + path: join(baseHlsDirectory, playlistFileName) + }, + CONFIG.S3.STREAMING_PLAYLISTS_BUCKETINFO + ) + + // Resolution fragmented file + const filename = join(playlist.getStringType(), video.uuid, file.filename) + await storeObject( + { + filename, + path: join(baseHlsDirectory, file.filename) + }, + CONFIG.S3.STREAMING_PLAYLISTS_BUCKETINFO + ) + + // Signals that the video file + playlist file were uploaded + file.storage = VideoStorageType.OBJECT_STORAGE + // eslint-disable-next-line max-len + file.fileUrl = `https://${CONFIG.S3.STREAMING_PLAYLISTS_BUCKETINFO.bucket}.${CONFIG.S3.ENDPOINT}/${CONFIG.S3.STREAMING_PLAYLISTS_BUCKETINFO.prefix}${filename}` + await file.save() + } + + playlist.storage = VideoStorageType.OBJECT_STORAGE + await playlist.save() + } +} diff --git a/server/lib/job-queue/handlers/video-transcoding.ts b/server/lib/job-queue/handlers/video-transcoding.ts index 876d1460cbd..1ba2a5fa4fd 100644 --- a/server/lib/job-queue/handlers/video-transcoding.ts +++ b/server/lib/job-queue/handlers/video-transcoding.ts @@ -1,6 +1,6 @@ import * as Bull from 'bull' import { TranscodeOptionsType } from '@server/helpers/ffmpeg-utils' -import { getTranscodingJobPriority, publishAndFederateIfNeeded } from '@server/lib/video' +import { addMoveToObjectStorageJob, getTranscodingJobPriority, publishAndFederateIfNeeded } from '@server/lib/video' import { getVideoFilePath } from '@server/lib/video-paths' import { UserModel } from '@server/models/user/user' import { MUser, MUserId, MVideoFullLight, MVideoUUID, MVideoWithFile } from '@server/types/models' @@ -46,17 +46,24 @@ async function processVideoTranscoding (job: Bull.Job) { return undefined } - const user = await UserModel.loadByChannelActorId(video.VideoChannel.actorId) + try { - const handler = handlers[payload.type] + const user = await UserModel.loadByChannelActorId(video.VideoChannel.actorId) - if (!handler) { - throw new Error('Cannot find transcoding handler for ' + payload.type) - } + const handler = handlers[payload.type] + + if (!handler) { + throw new Error('Cannot find transcoding handler for ' + payload.type) + } - await handler(job, payload, video, user) + await handler(job, payload, video, user) - return video + return video + } finally { + await video.decrement('transcodeJobsRunning') + // Create job to move the new files to object storage if enabled + await addMoveToObjectStorageJob(video) + } } // --------------------------------------------------------------------------- diff --git a/server/lib/job-queue/job-queue.ts b/server/lib/job-queue/job-queue.ts index 42e8347b1f6..5f2f1e54fd9 100644 --- a/server/lib/job-queue/job-queue.ts +++ b/server/lib/job-queue/job-queue.ts @@ -11,6 +11,7 @@ import { EmailPayload, JobState, JobType, + MoveObjectStoragePayload, RefreshPayload, VideoFileImportPayload, VideoImportPayload, @@ -34,6 +35,8 @@ import { processVideoImport } from './handlers/video-import' import { processVideoLiveEnding } from './handlers/video-live-ending' import { processVideoTranscoding } from './handlers/video-transcoding' import { processVideosViews } from './handlers/video-views' +import { processMoveToObjectStorage } from './handlers/move-to-object-storage' +import { VideoModel } from '@server/models/video/video' type CreateJobArgument = { type: 'activitypub-http-broadcast', payload: ActivitypubHttpBroadcastPayload } | @@ -49,7 +52,8 @@ type CreateJobArgument = { type: 'videos-views', payload: {} } | { type: 'video-live-ending', payload: VideoLiveEndingPayload } | { type: 'actor-keys', payload: ActorKeysPayload } | - { type: 'video-redundancy', payload: VideoRedundancyPayload } + { type: 'video-redundancy', payload: VideoRedundancyPayload } | + { type: 'move-to-object-storage', payload: MoveObjectStoragePayload } type CreateJobOptions = { delay?: number @@ -70,7 +74,8 @@ const handlers: { [id in JobType]: (job: Bull.Job) => Promise } = { 'activitypub-refresher': refreshAPObject, 'video-live-ending': processVideoLiveEnding, 'actor-keys': processActorKeys, - 'video-redundancy': processVideoRedundancy + 'video-redundancy': processVideoRedundancy, + 'move-to-object-storage': processMoveToObjectStorage } const jobTypes: JobType[] = [ @@ -87,7 +92,8 @@ const jobTypes: JobType[] = [ 'activitypub-refresher', 'video-redundancy', 'actor-keys', - 'video-live-ending' + 'video-live-ending', + 'move-to-object-storage' ] class JobQueue { @@ -154,6 +160,12 @@ class JobQueue { logger.error('Unknown queue %s: cannot create job.', obj.type) return } + if (obj.type === 'video-transcoding') { + // This value is decreased when the transcoding job is finished in ./handlers/video-transcoding.ts + // It's used by the move-to-object-storage job to detect when the last transcoding job is finished + VideoModel.increment('transcodeJobsRunning', { where: { uuid: obj.payload.videoUUID } }) + .catch(err => logger.error('Cannot increase transcodeJobsRunning.', { err })) + } const jobArgs: Bull.JobOptions = { backoff: { delay: 60 * 1000, type: 'exponential' }, diff --git a/server/lib/object-storage.ts b/server/lib/object-storage.ts new file mode 100644 index 00000000000..58c7accbe41 --- /dev/null +++ b/server/lib/object-storage.ts @@ -0,0 +1,65 @@ +import * as fs from 'fs' +import { DeleteObjectCommand, DeleteObjectsCommand, ListObjectsV2Command, PutObjectCommand, S3Client } from "@aws-sdk/client-s3" +import { CONFIG } from "@server/initializers/config" +import { logger } from '@server/helpers/logger' + +type BucketInfo = {bucket: string, prefix?: string} + +function getS3Client () { + return new S3Client({ endpoint: `https://${CONFIG.S3.ENDPOINT}` }) +} + +async function s3Put (options: {filename: string, content: string | fs.ReadStream, bucketInfo: BucketInfo}) { + const { filename, content, bucketInfo } = options + const key = bucketInfo.prefix + filename + const s3Client = getS3Client() + const command = new PutObjectCommand({ + Bucket: bucketInfo.bucket, + Key: key, + Body: content + }) + return await s3Client.send(command) +} + +export async function storeObject (file: {path: string, filename: string}, bucketInfo: BucketInfo) { + logger.debug('Uploading file to %s/%s%s', bucketInfo.bucket, bucketInfo.prefix, file.filename) + const fileStream = fs.createReadStream(file.path) + return await s3Put({ filename: file.filename, content: fileStream, bucketInfo }) +} + +export async function writeObjectContents (file: {filename: string, content: string}, bucketInfo: BucketInfo) { + logger.debug('Writing object to %s/%s%s', bucketInfo.bucket, bucketInfo.prefix, file.filename) + return await s3Put({ filename: file.filename, content: file.content, bucketInfo }) +} + +export async function removeObject (filename: string, bucketInfo: BucketInfo) { + const key = bucketInfo.prefix + filename + const s3Client = getS3Client() + const command = new DeleteObjectCommand({ + Bucket: bucketInfo.bucket, + Key: key + }) + return await s3Client.send(command) +} + +export async function removePrefix (prefix: string, bucketInfo: BucketInfo) { + const s3Client = getS3Client() + const listCommand = new ListObjectsV2Command({ + Bucket: bucketInfo.bucket, + Prefix: bucketInfo.prefix + prefix + }) + + const listedObjects = await s3Client.send(listCommand) + const deleteParams = { + Bucket: bucketInfo.bucket, + Delete: { Objects: [] } + } + for (const object of listedObjects.Contents) { + deleteParams.Delete.Objects.push({ Key: object.Key }) + } + const deleteCommand = new DeleteObjectsCommand(deleteParams) + await s3Client.send(deleteCommand) + + // Repeat if not all objects could be listed at once (limit of 1000?) + if (listedObjects.IsTruncated) await removePrefix(prefix, bucketInfo) +} diff --git a/server/lib/video.ts b/server/lib/video.ts index 61fee494929..0e36f6bbbbd 100644 --- a/server/lib/video.ts +++ b/server/lib/video.ts @@ -11,6 +11,7 @@ import { federateVideoIfNeeded } from './activitypub/videos' import { JobQueue } from './job-queue/job-queue' import { Notifier } from './notifier' import { updateVideoMiniatureFromExisting } from './thumbnail' +import { CONFIG } from '@server/initializers/config' function buildLocalVideoFromReq (videoInfo: VideoCreate, channelId: number): FilteredModelAttributes { return { @@ -130,6 +131,15 @@ async function addOptimizeOrMergeAudioJob (video: MVideoUUID, videoFile: MVideoF return JobQueue.Instance.createJobWithPromise({ type: 'video-transcoding', payload: dataInput }, jobOptions) } +export function addMoveToObjectStorageJob (video: MVideoUUID) { + if (CONFIG.S3.ENABLED) { + const dataInput = { + videoUUID: video.uuid + } + return JobQueue.Instance.createJobWithPromise({ type: 'move-to-object-storage', payload: dataInput }) + } +} + async function getTranscodingJobPriority (user: MUserId) { const now = new Date() const lastWeek = new Date(now.getFullYear(), now.getMonth(), now.getDate() - 7) diff --git a/server/models/video/sql/shared/video-tables.ts b/server/models/video/sql/shared/video-tables.ts index 742d19099e8..94069c1f2c1 100644 --- a/server/models/video/sql/shared/video-tables.ts +++ b/server/models/video/sql/shared/video-tables.ts @@ -87,7 +87,8 @@ export class VideoTables { 'fps', 'metadataUrl', 'videoStreamingPlaylistId', - 'videoId' + 'videoId', + 'storage' ] } @@ -102,7 +103,8 @@ export class VideoTables { 'segmentsSha256Url', 'videoId', 'createdAt', - 'updatedAt' + 'updatedAt', + 'storage' ]) } @@ -258,7 +260,8 @@ export class VideoTables { 'originallyPublishedAt', 'channelId', 'createdAt', - 'updatedAt' + 'updatedAt', + 'transcodeJobsRunning' ] } } diff --git a/server/models/video/video-file.ts b/server/models/video/video-file.ts index 09fc5288bff..babf393f8d8 100644 --- a/server/models/video/video-file.ts +++ b/server/models/video/video-file.ts @@ -24,7 +24,7 @@ import { buildRemoteVideoBaseUrl } from '@server/helpers/activitypub' import { logger } from '@server/helpers/logger' import { extractVideo } from '@server/helpers/video' import { getTorrentFilePath } from '@server/lib/video-paths' -import { MStreamingPlaylistVideo, MVideo, MVideoWithHost } from '@server/types/models' +import { MStreamingPlaylistVideo, MVideo, MVideoWithHost, VideoStorageType } from '@server/types/models' import { AttributesOnly } from '@shared/core-utils' import { isVideoFileExtnameValid, @@ -214,6 +214,11 @@ export class VideoFileModel extends Model @Column videoId: number + @AllowNull(false) + @Default(VideoStorageType.LOCAL) + @Column + storage: VideoStorageType + @BelongsTo(() => VideoModel, { foreignKey: { allowNull: true @@ -451,6 +456,9 @@ export class VideoFileModel extends Model } getFileUrl (video: MVideo) { + if (this.storage === VideoStorageType.OBJECT_STORAGE) { + return this.fileUrl + } if (!this.Video) this.Video = video as VideoModel if (video.isOwned()) return WEBSERVER.URL + this.getFileStaticPath(video) @@ -465,6 +473,9 @@ export class VideoFileModel extends Model } getFileDownloadUrl (video: MVideoWithHost) { + if (this.storage === VideoStorageType.OBJECT_STORAGE) { + return this.fileUrl + } const path = this.isHLS() ? join(STATIC_DOWNLOAD_PATHS.HLS_VIDEOS, `${video.uuid}-${this.resolution}-fragmented${this.extname}`) : join(STATIC_DOWNLOAD_PATHS.VIDEOS, `${video.uuid}-${this.resolution}${this.extname}`) diff --git a/server/models/video/video-streaming-playlist.ts b/server/models/video/video-streaming-playlist.ts index d591a3134f0..163b4573fd0 100644 --- a/server/models/video/video-streaming-playlist.ts +++ b/server/models/video/video-streaming-playlist.ts @@ -1,9 +1,22 @@ import * as memoizee from 'memoizee' import { join } from 'path' import { Op } from 'sequelize' -import { AllowNull, BelongsTo, Column, CreatedAt, DataType, ForeignKey, HasMany, Is, Model, Table, UpdatedAt } from 'sequelize-typescript' +import { + AllowNull, + BelongsTo, + Column, + CreatedAt, + DataType, + Default, + ForeignKey, + HasMany, + Is, + Model, + Table, + UpdatedAt +} from 'sequelize-typescript' import { VideoFileModel } from '@server/models/video/video-file' -import { MStreamingPlaylist, MVideo } from '@server/types/models' +import { MStreamingPlaylist, MVideo, VideoStorageType } from '@server/types/models' import { AttributesOnly } from '@shared/core-utils' import { VideoStreamingPlaylistType } from '../../../shared/models/videos/video-streaming-playlist.type' import { sha1 } from '../../helpers/core-utils' @@ -81,6 +94,11 @@ export class VideoStreamingPlaylistModel extends Model VideoModel, { foreignKey: { allowNull: false @@ -185,12 +203,18 @@ export class VideoStreamingPlaylistModel extends Model>> { @Column originallyPublishedAt: Date + @AllowNull(false) + @Default(0) + @IsInt + @Column + transcodeJobsRunning: number + @ForeignKey(() => VideoChannelModel) @Column channelId: number @@ -1677,6 +1685,9 @@ export class VideoModel extends Model>> { const promises: Promise[] = [ remove(filePath) ] if (!isRedundancy) promises.push(videoFile.removeTorrent()) + if (videoFile.storage === VideoStorageType.OBJECT_STORAGE) { + promises.push(removeObject(videoFile.filename, CONFIG.S3.VIDEOS_BUCKETINFO)) + } return Promise.all(promises) } @@ -1685,6 +1696,9 @@ export class VideoModel extends Model>> { const directoryPath = getHLSDirectory(this, isRedundancy) await remove(directoryPath) + if (streamingPlaylist.storage === VideoStorageType.OBJECT_STORAGE) { + await removePrefix(join(streamingPlaylist.getStringType(), this.uuid), CONFIG.S3.STREAMING_PLAYLISTS_BUCKETINFO) + } if (isRedundancy !== true) { const streamingPlaylistWithFiles = streamingPlaylist as MStreamingPlaylistFilesVideo diff --git a/server/types/models/video/video.ts b/server/types/models/video/video.ts index 16ddaf740e8..3ca3db45a70 100644 --- a/server/types/models/video/video.ts +++ b/server/types/models/video/video.ts @@ -219,3 +219,8 @@ export type MVideoFormattableDetails = Use<'VideoStreamingPlaylists', MStreamingPlaylistRedundanciesOpt[]> & Use<'VideoFiles', MVideoFileRedundanciesOpt[]> & PickWithOpt + +export enum VideoStorageType { + LOCAL, + OBJECT_STORAGE, +} diff --git a/shared/models/server/job.model.ts b/shared/models/server/job.model.ts index 4ab249e0b86..973cacef3b8 100644 --- a/shared/models/server/job.model.ts +++ b/shared/models/server/job.model.ts @@ -19,6 +19,7 @@ export type JobType = | 'video-redundancy' | 'video-live-ending' | 'actor-keys' + | 'move-to-object-storage' export interface Job { id: number @@ -136,3 +137,7 @@ export interface VideoLiveEndingPayload { export interface ActorKeysPayload { actorId: number } + +export interface MoveObjectStoragePayload { + videoUUID: string +} diff --git a/yarn.lock b/yarn.lock index 68f17d4143a..07b73251e36 100644 --- a/yarn.lock +++ b/yarn.lock @@ -49,6 +49,768 @@ resolved "https://registry.yarnpkg.com/@assemblyscript/loader/-/loader-0.10.1.tgz#70e45678f06c72fa2e350e8553ec4a4d72b92e06" integrity sha512-H71nDOOL8Y7kWRLqf6Sums+01Q5msqBW2KhDUTemh1tvY04eSkSXrK0uj/4mmY0Xr16/3zyZmsrxN7CKuRbNRg== +"@aws-crypto/crc32@^1.0.0": + version "1.0.0" + resolved "https://registry.yarnpkg.com/@aws-crypto/crc32/-/crc32-1.0.0.tgz#6a0164fd92bb365860ba6afb5dfef449701eb8ca" + integrity sha512-wr4EyCv3ZfLH3Sg7FErV6e/cLhpk9rUP/l5322y8PRgpQsItdieaLbtE4aDOR+dxl8U7BG9FIwWXH4TleTDZ9A== + dependencies: + tslib "^1.11.1" + +"@aws-crypto/ie11-detection@^1.0.0": + version "1.0.0" + resolved "https://registry.yarnpkg.com/@aws-crypto/ie11-detection/-/ie11-detection-1.0.0.tgz#d3a6af29ba7f15458f79c41d1cd8cac3925e726a" + integrity sha512-kCKVhCF1oDxFYgQrxXmIrS5oaWulkvRcPz+QBDMsUr2crbF4VGgGT6+uQhSwJFdUAQ2A//Vq+uT83eJrkzFgXA== + dependencies: + tslib "^1.11.1" + +"@aws-crypto/sha256-browser@^1.0.0": + version "1.1.1" + resolved "https://registry.yarnpkg.com/@aws-crypto/sha256-browser/-/sha256-browser-1.1.1.tgz#85dddf13e8f8d74c0d6592d993e4bf401da9f420" + integrity sha512-nS4vdan97It6HcweV58WXtjPbPSc0JXd3sAwlw3Ou5Mc3WllSycAS32Tv2LRn8butNQoU9AE3jEQAOgiMdNC1Q== + dependencies: + "@aws-crypto/ie11-detection" "^1.0.0" + "@aws-crypto/sha256-js" "^1.1.0" + "@aws-crypto/supports-web-crypto" "^1.0.0" + "@aws-sdk/types" "^3.1.0" + "@aws-sdk/util-locate-window" "^3.0.0" + "@aws-sdk/util-utf8-browser" "^3.0.0" + tslib "^1.11.1" + +"@aws-crypto/sha256-js@^1.0.0", "@aws-crypto/sha256-js@^1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@aws-crypto/sha256-js/-/sha256-js-1.1.0.tgz#a58386ad18186e392e0f1d98d18831261d27b071" + integrity sha512-VIhuqbPgXDVr8sZe2yhgQcDRRmzf4CI8fmC1A3bHiRfE6wlz1d8KpeemqbuoEHotz/Dch9yOxlshyQDNjNFeHA== + dependencies: + "@aws-sdk/types" "^3.1.0" + "@aws-sdk/util-utf8-browser" "^3.0.0" + tslib "^1.11.1" + +"@aws-crypto/supports-web-crypto@^1.0.0": + version "1.0.0" + resolved "https://registry.yarnpkg.com/@aws-crypto/supports-web-crypto/-/supports-web-crypto-1.0.0.tgz#c40901bc17ac1e875e248df16a2b47ad8bfd9a93" + integrity sha512-IHLfv+WmVH89EW4n6a5eE8/hUlz6qkWGMn/v4r5ZgzcXdTC5nolii2z3k46y01hWRiC2PPhOdeSLzMUCUMco7g== + dependencies: + tslib "^1.11.1" + +"@aws-sdk/abort-controller@3.23.0": + version "3.23.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/abort-controller/-/abort-controller-3.23.0.tgz#9a8d62f0a4bc789af759aa51d4dbad92a6a7b2d2" + integrity sha512-M69Sdoi6TH2UrnXKKNJNDaW6iCqpras7w274CZq4NjFOGwrb23KO2Aexgxr3g3hsUidfjuA38oFbHgC8odFrIQ== + dependencies: + "@aws-sdk/types" "3.22.0" + tslib "^2.3.0" + +"@aws-sdk/chunked-blob-reader-native@3.23.0": + version "3.23.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/chunked-blob-reader-native/-/chunked-blob-reader-native-3.23.0.tgz#72d711e3cc904bb380e99cdd60c59deacd1596ac" + integrity sha512-Ya5f8Ntv0EyZw+AHkpV6n6qqHzpCDNlkX50uj/dwFCMmPiHFWsWMvd0Qu04Y7miycJINEatRrJ5V8r/uVvZIDg== + dependencies: + "@aws-sdk/util-base64-browser" "3.23.0" + tslib "^2.3.0" + +"@aws-sdk/chunked-blob-reader@3.23.0": + version "3.23.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/chunked-blob-reader/-/chunked-blob-reader-3.23.0.tgz#83eb6a437172b671e699850378bcb558e15374ec" + integrity sha512-gmJhCuXrKOOumppviE4K30NvsIQIqqxbGDNptrJrMYBO0qXCbK8/BypZ/hS/oT3loDzlSIxG2z5GDL/va9lbFw== + dependencies: + tslib "^2.3.0" + +"@aws-sdk/client-s3@^3.23.0": + version "3.23.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/client-s3/-/client-s3-3.23.0.tgz#e74d53c9d065b3aecb11d4d9ebf0a98178614788" + integrity sha512-F6nP81HQ4pfZUCmeIHOoiIMm1i9L1lvbSioFZDgN6qFPzSUnbNRMxcgX+Dwwt/WHcf+lx1bVaA6h3U1CTCS93Q== + dependencies: + "@aws-crypto/sha256-browser" "^1.0.0" + "@aws-crypto/sha256-js" "^1.0.0" + "@aws-sdk/client-sts" "3.23.0" + "@aws-sdk/config-resolver" "3.23.0" + "@aws-sdk/credential-provider-node" "3.23.0" + "@aws-sdk/eventstream-serde-browser" "3.23.0" + "@aws-sdk/eventstream-serde-config-resolver" "3.23.0" + "@aws-sdk/eventstream-serde-node" "3.23.0" + "@aws-sdk/fetch-http-handler" "3.23.0" + "@aws-sdk/hash-blob-browser" "3.23.0" + "@aws-sdk/hash-node" "3.23.0" + "@aws-sdk/hash-stream-node" "3.23.0" + "@aws-sdk/invalid-dependency" "3.23.0" + "@aws-sdk/md5-js" "3.23.0" + "@aws-sdk/middleware-apply-body-checksum" "3.23.0" + "@aws-sdk/middleware-bucket-endpoint" "3.23.0" + "@aws-sdk/middleware-content-length" "3.23.0" + "@aws-sdk/middleware-expect-continue" "3.23.0" + "@aws-sdk/middleware-host-header" "3.23.0" + "@aws-sdk/middleware-location-constraint" "3.23.0" + "@aws-sdk/middleware-logger" "3.23.0" + "@aws-sdk/middleware-retry" "3.23.0" + "@aws-sdk/middleware-sdk-s3" "3.23.0" + "@aws-sdk/middleware-serde" "3.23.0" + "@aws-sdk/middleware-signing" "3.23.0" + "@aws-sdk/middleware-ssec" "3.23.0" + "@aws-sdk/middleware-stack" "3.23.0" + "@aws-sdk/middleware-user-agent" "3.23.0" + "@aws-sdk/node-config-provider" "3.23.0" + "@aws-sdk/node-http-handler" "3.23.0" + "@aws-sdk/protocol-http" "3.23.0" + "@aws-sdk/smithy-client" "3.23.0" + "@aws-sdk/types" "3.22.0" + "@aws-sdk/url-parser" "3.23.0" + "@aws-sdk/util-base64-browser" "3.23.0" + "@aws-sdk/util-base64-node" "3.23.0" + "@aws-sdk/util-body-length-browser" "3.23.0" + "@aws-sdk/util-body-length-node" "3.23.0" + "@aws-sdk/util-user-agent-browser" "3.23.0" + "@aws-sdk/util-user-agent-node" "3.23.0" + "@aws-sdk/util-utf8-browser" "3.23.0" + "@aws-sdk/util-utf8-node" "3.23.0" + "@aws-sdk/util-waiter" "3.23.0" + "@aws-sdk/xml-builder" "3.23.0" + entities "2.2.0" + fast-xml-parser "3.19.0" + tslib "^2.3.0" + +"@aws-sdk/client-sso@3.23.0": + version "3.23.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/client-sso/-/client-sso-3.23.0.tgz#4982bc9b159a7d88440fa08407a4f05b0176606f" + integrity sha512-Q4ZC7bKAQrcyyRUGPyzyZoygjocP2MR88TYRyKC/WrAGra8owNts7Dn2jBDHfb/mb/Xb8yRNv87lx51Htl15SQ== + dependencies: + "@aws-crypto/sha256-browser" "^1.0.0" + "@aws-crypto/sha256-js" "^1.0.0" + "@aws-sdk/config-resolver" "3.23.0" + "@aws-sdk/fetch-http-handler" "3.23.0" + "@aws-sdk/hash-node" "3.23.0" + "@aws-sdk/invalid-dependency" "3.23.0" + "@aws-sdk/middleware-content-length" "3.23.0" + "@aws-sdk/middleware-host-header" "3.23.0" + "@aws-sdk/middleware-logger" "3.23.0" + "@aws-sdk/middleware-retry" "3.23.0" + "@aws-sdk/middleware-serde" "3.23.0" + "@aws-sdk/middleware-stack" "3.23.0" + "@aws-sdk/middleware-user-agent" "3.23.0" + "@aws-sdk/node-config-provider" "3.23.0" + "@aws-sdk/node-http-handler" "3.23.0" + "@aws-sdk/protocol-http" "3.23.0" + "@aws-sdk/smithy-client" "3.23.0" + "@aws-sdk/types" "3.22.0" + "@aws-sdk/url-parser" "3.23.0" + "@aws-sdk/util-base64-browser" "3.23.0" + "@aws-sdk/util-base64-node" "3.23.0" + "@aws-sdk/util-body-length-browser" "3.23.0" + "@aws-sdk/util-body-length-node" "3.23.0" + "@aws-sdk/util-user-agent-browser" "3.23.0" + "@aws-sdk/util-user-agent-node" "3.23.0" + "@aws-sdk/util-utf8-browser" "3.23.0" + "@aws-sdk/util-utf8-node" "3.23.0" + tslib "^2.3.0" + +"@aws-sdk/client-sts@3.23.0": + version "3.23.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/client-sts/-/client-sts-3.23.0.tgz#486df855471660965fc14d872c73343fc491a7be" + integrity sha512-rF5YpfLw0DhP2ev4xKPoEvvZ3/KoahvlqrTst8VSbz08mI7qCOb2akGgN+6BUgaK4x9S/Suwq30HrFtbp1qjKA== + dependencies: + "@aws-crypto/sha256-browser" "^1.0.0" + "@aws-crypto/sha256-js" "^1.0.0" + "@aws-sdk/config-resolver" "3.23.0" + "@aws-sdk/credential-provider-node" "3.23.0" + "@aws-sdk/fetch-http-handler" "3.23.0" + "@aws-sdk/hash-node" "3.23.0" + "@aws-sdk/invalid-dependency" "3.23.0" + "@aws-sdk/middleware-content-length" "3.23.0" + "@aws-sdk/middleware-host-header" "3.23.0" + "@aws-sdk/middleware-logger" "3.23.0" + "@aws-sdk/middleware-retry" "3.23.0" + "@aws-sdk/middleware-sdk-sts" "3.23.0" + "@aws-sdk/middleware-serde" "3.23.0" + "@aws-sdk/middleware-signing" "3.23.0" + "@aws-sdk/middleware-stack" "3.23.0" + "@aws-sdk/middleware-user-agent" "3.23.0" + "@aws-sdk/node-config-provider" "3.23.0" + "@aws-sdk/node-http-handler" "3.23.0" + "@aws-sdk/protocol-http" "3.23.0" + "@aws-sdk/smithy-client" "3.23.0" + "@aws-sdk/types" "3.22.0" + "@aws-sdk/url-parser" "3.23.0" + "@aws-sdk/util-base64-browser" "3.23.0" + "@aws-sdk/util-base64-node" "3.23.0" + "@aws-sdk/util-body-length-browser" "3.23.0" + "@aws-sdk/util-body-length-node" "3.23.0" + "@aws-sdk/util-user-agent-browser" "3.23.0" + "@aws-sdk/util-user-agent-node" "3.23.0" + "@aws-sdk/util-utf8-browser" "3.23.0" + "@aws-sdk/util-utf8-node" "3.23.0" + entities "2.2.0" + fast-xml-parser "3.19.0" + tslib "^2.3.0" + +"@aws-sdk/config-resolver@3.23.0": + version "3.23.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/config-resolver/-/config-resolver-3.23.0.tgz#45f64e4fe6bd64b9eecb0400e49188133fdfee59" + integrity sha512-acCxrAymwx81XELBO/d1VBWaHOldxqbmxDAMfvOfUYN+CYXWIFYpY1VCWuAeWig7Dy18QEJQ2pHwQlFxmilA7w== + dependencies: + "@aws-sdk/signature-v4" "3.23.0" + "@aws-sdk/types" "3.22.0" + tslib "^2.3.0" + +"@aws-sdk/credential-provider-env@3.23.0": + version "3.23.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-env/-/credential-provider-env-3.23.0.tgz#42f5457672d7fa913d8150a5a91fc229f234fcd2" + integrity sha512-ljYkVATha4BdecVvYeW1WuzoAAwfM/i7p9Wmx1RY3Rb0AGwIFX2GjtoBPhS3EbCRTzQIhUr4zfIelVVVxIS6bA== + dependencies: + "@aws-sdk/property-provider" "3.23.0" + "@aws-sdk/types" "3.22.0" + tslib "^2.3.0" + +"@aws-sdk/credential-provider-imds@3.23.0": + version "3.23.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-imds/-/credential-provider-imds-3.23.0.tgz#772b0980f8c87f7935d1da2a4b88d14a7b5d4723" + integrity sha512-jD1EkoVDApKZJwOLACTrnxhDmQiVF1qMM+GMnoY4bMk1p1sfZYNKs6VkaY2LGUWXxkesj1aiMFxbwyWmu8SQbQ== + dependencies: + "@aws-sdk/property-provider" "3.23.0" + "@aws-sdk/types" "3.22.0" + tslib "^2.3.0" + +"@aws-sdk/credential-provider-ini@3.23.0": + version "3.23.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.23.0.tgz#b7bfbce374ad139ac0b98f0d2d729a4dc30c725a" + integrity sha512-n1h2YcuZ0ghpoNAYWc8VkVxYamQEBeHUbdfEVVJHsfSon+FBk5gI8V9IZm3xrLHQYwWBDMS/VxeIyKpWnduG9Q== + dependencies: + "@aws-sdk/credential-provider-env" "3.23.0" + "@aws-sdk/credential-provider-imds" "3.23.0" + "@aws-sdk/credential-provider-sso" "3.23.0" + "@aws-sdk/credential-provider-web-identity" "3.23.0" + "@aws-sdk/property-provider" "3.23.0" + "@aws-sdk/shared-ini-file-loader" "3.23.0" + "@aws-sdk/types" "3.22.0" + "@aws-sdk/util-credentials" "3.23.0" + tslib "^2.3.0" + +"@aws-sdk/credential-provider-node@3.23.0": + version "3.23.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-node/-/credential-provider-node-3.23.0.tgz#23789219a967192951c20a406e4af913fad08033" + integrity sha512-wbYNUNMNZ+nCZamAc77yLkiO0Lq2isldkkLuQsjerhB9gN0/LBeWVPf6381d5wQN5Q/o7/XEdew0QDB5i0KmIw== + dependencies: + "@aws-sdk/credential-provider-env" "3.23.0" + "@aws-sdk/credential-provider-imds" "3.23.0" + "@aws-sdk/credential-provider-ini" "3.23.0" + "@aws-sdk/credential-provider-process" "3.23.0" + "@aws-sdk/credential-provider-sso" "3.23.0" + "@aws-sdk/credential-provider-web-identity" "3.23.0" + "@aws-sdk/property-provider" "3.23.0" + "@aws-sdk/shared-ini-file-loader" "3.23.0" + "@aws-sdk/types" "3.22.0" + tslib "^2.3.0" + +"@aws-sdk/credential-provider-process@3.23.0": + version "3.23.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-process/-/credential-provider-process-3.23.0.tgz#86cf5ba064cac6fcd08a6019a119716e20941aad" + integrity sha512-xba0u86nS5MtH3FQKSbTOEaoHjqpoj6NyonZEy0O5i9KO0NHf+bZwlmI/pe54SOE9uSrDKHfXB6dsftVIqXtFQ== + dependencies: + "@aws-sdk/property-provider" "3.23.0" + "@aws-sdk/shared-ini-file-loader" "3.23.0" + "@aws-sdk/types" "3.22.0" + "@aws-sdk/util-credentials" "3.23.0" + tslib "^2.3.0" + +"@aws-sdk/credential-provider-sso@3.23.0": + version "3.23.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.23.0.tgz#ff8cf1bb91df94cd057b4031a5dddb1ca1778efc" + integrity sha512-5EXX505eSBN0GQcyO3lFDSU5btJc5aHgp9HJ7jFUqPn0OeNCtLepckuijzKC8cM11C3f2i1CCP/nn6II/fajew== + dependencies: + "@aws-sdk/client-sso" "3.23.0" + "@aws-sdk/property-provider" "3.23.0" + "@aws-sdk/shared-ini-file-loader" "3.23.0" + "@aws-sdk/types" "3.22.0" + "@aws-sdk/util-credentials" "3.23.0" + tslib "^2.3.0" + +"@aws-sdk/credential-provider-web-identity@3.23.0": + version "3.23.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.23.0.tgz#06f08b45ff5b23abae06b6689f7e2d28064d47d6" + integrity sha512-GbDw2izWfb4KG62V6MBTOKmDAhbexbemxJsR0rMlZxW/dEYQh/r8Nk+m7evAUakNMJGm4fcAZGxey+orReq1VQ== + dependencies: + "@aws-sdk/property-provider" "3.23.0" + "@aws-sdk/types" "3.22.0" + tslib "^2.3.0" + +"@aws-sdk/eventstream-marshaller@3.23.0": + version "3.23.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/eventstream-marshaller/-/eventstream-marshaller-3.23.0.tgz#829774b865c9c6dd651b7f02a54372a32b501678" + integrity sha512-gtxdB8/68ZePM1+nZnjZw2OBXB845SPQWnkNRyw2J6Vhlo4uFRYM9W+E/UEuTmGJ/EScnZAJOvegoFYz46CUDQ== + dependencies: + "@aws-crypto/crc32" "^1.0.0" + "@aws-sdk/types" "3.22.0" + "@aws-sdk/util-hex-encoding" "3.23.0" + tslib "^2.3.0" + +"@aws-sdk/eventstream-serde-browser@3.23.0": + version "3.23.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/eventstream-serde-browser/-/eventstream-serde-browser-3.23.0.tgz#60f9b80757dfc0da878a728e4b2007cdb3e1b010" + integrity sha512-DdHNmW+LU7oIsoKkiWlVQ3nNgw6g4QbZRoY1XExb/R1FgGWZ2JXKWEgblZwbaFxnEdD4wD7Tb1dmiwfSIpz2Zg== + dependencies: + "@aws-sdk/eventstream-marshaller" "3.23.0" + "@aws-sdk/eventstream-serde-universal" "3.23.0" + "@aws-sdk/types" "3.22.0" + tslib "^2.3.0" + +"@aws-sdk/eventstream-serde-config-resolver@3.23.0": + version "3.23.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/eventstream-serde-config-resolver/-/eventstream-serde-config-resolver-3.23.0.tgz#689df5da60f584c63668c2f115c462dc7fb2af1c" + integrity sha512-jedPrTVr73Uu869D/Bs9fL/dM//ScEXEKkjJwv/FJtxTmO6ytpdy6pbwwuvqrcLWjYWJoj9+ABVTsTZxr+TqHw== + dependencies: + "@aws-sdk/types" "3.22.0" + tslib "^2.3.0" + +"@aws-sdk/eventstream-serde-node@3.23.0": + version "3.23.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/eventstream-serde-node/-/eventstream-serde-node-3.23.0.tgz#675d79c324a855c78ff62ea96b3c2b3ac77a82e2" + integrity sha512-NR1HjoQiWM4FaLTtv47TdXoLQZ+f5m0SU8LNi2dheZlaLHtvssoOOmBbWg+SXOqmW2v4wjRGUFX6SG1qYOai+A== + dependencies: + "@aws-sdk/eventstream-marshaller" "3.23.0" + "@aws-sdk/eventstream-serde-universal" "3.23.0" + "@aws-sdk/types" "3.22.0" + tslib "^2.3.0" + +"@aws-sdk/eventstream-serde-universal@3.23.0": + version "3.23.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/eventstream-serde-universal/-/eventstream-serde-universal-3.23.0.tgz#b30619e8c6e756d463ae08f1c6e2ea21fc15bf07" + integrity sha512-vn0qdlmh2qk8QxTRj9CVUQN+lrxz7zufCvVpsK/TEhkpV2+t1PwaGYIPnUeD9OcjrnMR9twiiiWp5fyBRmJbHg== + dependencies: + "@aws-sdk/eventstream-marshaller" "3.23.0" + "@aws-sdk/types" "3.22.0" + tslib "^2.3.0" + +"@aws-sdk/fetch-http-handler@3.23.0": + version "3.23.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/fetch-http-handler/-/fetch-http-handler-3.23.0.tgz#97c1f47f7c0a5c33c0748d2d024b40cfc96b6e1c" + integrity sha512-gjToPkLlVOO8bHKhyw+d4mIX4OJEabqIFYbRFRDSm11LVLAAEc4pIFPYpMNWzrmDEnCxoGAcqfzP0m+0jChVCw== + dependencies: + "@aws-sdk/protocol-http" "3.23.0" + "@aws-sdk/querystring-builder" "3.23.0" + "@aws-sdk/types" "3.22.0" + "@aws-sdk/util-base64-browser" "3.23.0" + tslib "^2.3.0" + +"@aws-sdk/hash-blob-browser@3.23.0": + version "3.23.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/hash-blob-browser/-/hash-blob-browser-3.23.0.tgz#9d7b1e16fa5503a1575a9f6fcc9fc31f1c6d69f7" + integrity sha512-2oY8mSnr3cxmMA/ZTI/1ABbJ0t+BAGUkxys+9nrE6XTlzrpJqsCL8b2dmARD2iSMZaUGP24QB3cIvcwC6IwNWg== + dependencies: + "@aws-sdk/chunked-blob-reader" "3.23.0" + "@aws-sdk/chunked-blob-reader-native" "3.23.0" + "@aws-sdk/types" "3.22.0" + tslib "^2.3.0" + +"@aws-sdk/hash-node@3.23.0": + version "3.23.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/hash-node/-/hash-node-3.23.0.tgz#136348928c17419eacbbbf9a9278e36da16e1b49" + integrity sha512-yah+vNhKv6jpJR5qHYGc/AIAWwR9Ah9NplAq8cltMsPuI38u/aSlbcEIDwsRz3V1MDA89f/+qY3OHBfQw5kLVw== + dependencies: + "@aws-sdk/types" "3.22.0" + "@aws-sdk/util-buffer-from" "3.23.0" + tslib "^2.3.0" + +"@aws-sdk/hash-stream-node@3.23.0": + version "3.23.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/hash-stream-node/-/hash-stream-node-3.23.0.tgz#0d37a34579f5909429415d881f26a091941f42b0" + integrity sha512-n2WiYIkioYpgleJck23b3zB38wptc5xzKvC/by52tpMrYvkwM6RkwodVQ1aXFfCrTS78ZDnVNBUK2fxDkphfWw== + dependencies: + "@aws-sdk/types" "3.22.0" + tslib "^2.3.0" + +"@aws-sdk/invalid-dependency@3.23.0": + version "3.23.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/invalid-dependency/-/invalid-dependency-3.23.0.tgz#1b6dd596356849f2e8af99c0d790f340f01524ef" + integrity sha512-5VqL7crIEtXj+lBwh3kKdMMlejjumjJQ5uLYNSCE/jNS5YjnbhAfO+fyzMO50IhcSuG4Ev6i1DEezN9BmYdeXA== + dependencies: + "@aws-sdk/types" "3.22.0" + tslib "^2.3.0" + +"@aws-sdk/is-array-buffer@3.23.0": + version "3.23.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/is-array-buffer/-/is-array-buffer-3.23.0.tgz#3a5d601b0102ea3a4d832bde647509c8405b2ec9" + integrity sha512-XN20/scFthok0lCbjtinW77CoIBoar8cbOzmu+HkYTnBBpJrF6Ai5g9sgglO8r+X+OLn4PrDrTP+BxdpNuIh9g== + dependencies: + tslib "^2.3.0" + +"@aws-sdk/md5-js@3.23.0": + version "3.23.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/md5-js/-/md5-js-3.23.0.tgz#17d49bf5902bd08dd0df7fa9e15885780c2fa9c3" + integrity sha512-XXVuVMJlrWfI+NZ0k1g2gctPHm62BcPD+y0Lr61uC5tG99tMTN2vPC4+65I6AAktXX4Z7i6M89M2A8XkEwri4Q== + dependencies: + "@aws-sdk/types" "3.22.0" + "@aws-sdk/util-utf8-browser" "3.23.0" + tslib "^2.3.0" + +"@aws-sdk/middleware-apply-body-checksum@3.23.0": + version "3.23.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-apply-body-checksum/-/middleware-apply-body-checksum-3.23.0.tgz#ce0a3de18980eaa56896b84546f37a8a0ffcee98" + integrity sha512-tu+VsZemq5O9aLH9jDYHq1NFYsR3rCMvSVL9osXrvLwGgnqtE0vwR3myfhw8BF5NTDBckuClES4gPZfzV25F/w== + dependencies: + "@aws-sdk/is-array-buffer" "3.23.0" + "@aws-sdk/protocol-http" "3.23.0" + "@aws-sdk/types" "3.22.0" + tslib "^2.3.0" + +"@aws-sdk/middleware-bucket-endpoint@3.23.0": + version "3.23.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-bucket-endpoint/-/middleware-bucket-endpoint-3.23.0.tgz#5bca138ec7662960550800a6a3993bfb91d8fb32" + integrity sha512-5o6fvarLiNCnWFTs5a0VDfRhGuTm/5exXvZtWU9YIhCNYZEbBCWVFXg6XZ784EH2q1Hj/Tiu8jPa+OM4EQLlhQ== + dependencies: + "@aws-sdk/protocol-http" "3.23.0" + "@aws-sdk/types" "3.22.0" + "@aws-sdk/util-arn-parser" "3.23.0" + tslib "^2.3.0" + +"@aws-sdk/middleware-content-length@3.23.0": + version "3.23.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-content-length/-/middleware-content-length-3.23.0.tgz#cfc6c83d7b42b2f21a66bd6a8164fac28718f59e" + integrity sha512-ooyNeXZUtI16Qh/HfcwLWn7NB2HvM/XEajaQmVIJXbVy/D2+N82+0Jo2hY3DouuIJjoEv/KZ5Uia/cgCdfHrHQ== + dependencies: + "@aws-sdk/protocol-http" "3.23.0" + "@aws-sdk/types" "3.22.0" + tslib "^2.3.0" + +"@aws-sdk/middleware-expect-continue@3.23.0": + version "3.23.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-expect-continue/-/middleware-expect-continue-3.23.0.tgz#566013f55eb506a953b730b391503f1e04cffc66" + integrity sha512-52LNqIfUHVNqaW+WDqDwgt9AD++T9oDO/P5MVLb5MsMVbMYEvTRLu7LZ7iVR4aeWv60CGG87UWGDmX/WhxRmrA== + dependencies: + "@aws-sdk/middleware-header-default" "3.23.0" + "@aws-sdk/protocol-http" "3.23.0" + "@aws-sdk/types" "3.22.0" + tslib "^2.3.0" + +"@aws-sdk/middleware-header-default@3.23.0": + version "3.23.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-header-default/-/middleware-header-default-3.23.0.tgz#6d1176fb3f56c7cfff361125f6b15b552fc02f54" + integrity sha512-BW69RRNqWo5sFfGXAojFUMyqvGvG2bbyQk+ZxkWsoXng7LC979YdBJUaVE2C2G6d4ivPpN/SO77ZOZnUaOiHwQ== + dependencies: + "@aws-sdk/protocol-http" "3.23.0" + "@aws-sdk/types" "3.22.0" + tslib "^2.3.0" + +"@aws-sdk/middleware-host-header@3.23.0": + version "3.23.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-host-header/-/middleware-host-header-3.23.0.tgz#b20b8684274db44525edacf3ee50b7254d5b1349" + integrity sha512-bHqQbwY3guUr+AWcrerHIh1ONgqhV8W85+H7MYlt0V5/Kom0+ectR7yZZRt90PDMZ8OsW4+f5jTIURFMLtPbDA== + dependencies: + "@aws-sdk/protocol-http" "3.23.0" + "@aws-sdk/types" "3.22.0" + tslib "^2.3.0" + +"@aws-sdk/middleware-location-constraint@3.23.0": + version "3.23.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-location-constraint/-/middleware-location-constraint-3.23.0.tgz#ec6c7efbe30595fdc9955377ce6f1b02cd3d6fef" + integrity sha512-szytgGt1P4WtoGCiyGw6IRTBN7IFapECzUFLQ3/bz6HgivjnBupDkr1QXjoBy//uMcB+BcFq7DezsbvpfRSuTw== + dependencies: + "@aws-sdk/types" "3.22.0" + tslib "^2.3.0" + +"@aws-sdk/middleware-logger@3.23.0": + version "3.23.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-logger/-/middleware-logger-3.23.0.tgz#427babe45094dde77c0dcff914f8801b0400d8cf" + integrity sha512-0z0ULcxllHO6xz1VeX/ekmg/LpNFL8nFbRH067s2KaimBeCUZ0CA2RwTpi9IY74tikmZAjerASb8eMgI+L/d7A== + dependencies: + "@aws-sdk/types" "3.22.0" + tslib "^2.3.0" + +"@aws-sdk/middleware-retry@3.23.0": + version "3.23.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-retry/-/middleware-retry-3.23.0.tgz#10f87dd0ddd0b8766e48c0ab45009a045c696e49" + integrity sha512-NimiKrP90+aW62QmkOrhQAZjrwjOQuWye2POzdetSrBHpnwj2KQWNBjcRwjkGt53krPcDyCySjIw+ivTRYdxWw== + dependencies: + "@aws-sdk/protocol-http" "3.23.0" + "@aws-sdk/service-error-classification" "3.22.0" + "@aws-sdk/types" "3.22.0" + tslib "^2.3.0" + uuid "^8.3.2" + +"@aws-sdk/middleware-sdk-s3@3.23.0": + version "3.23.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-sdk-s3/-/middleware-sdk-s3-3.23.0.tgz#65d89e280892ff394850dc4c0efc8d5aeb7cf8a7" + integrity sha512-9RwW+Z18wygMYu7HhQGDpU5arqmpu992Q/O8Is6h460vp7JvJ1jZcY2vbJFDz6GdkuDSorRB/XBCxKrcmho3sA== + dependencies: + "@aws-sdk/protocol-http" "3.23.0" + "@aws-sdk/types" "3.22.0" + "@aws-sdk/util-arn-parser" "3.23.0" + tslib "^2.3.0" + +"@aws-sdk/middleware-sdk-sts@3.23.0": + version "3.23.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-sdk-sts/-/middleware-sdk-sts-3.23.0.tgz#d29a1d0bd8c5bc39ec5d70430588c106b2645819" + integrity sha512-Rufzuqp4neVsyll9Ya9j+zpoK1fXrujBX6XRR5fRU3SsoAh5YWiUMrkxYxzTN+TLeXmyhCzmH/RuX2hgjMK0VQ== + dependencies: + "@aws-sdk/middleware-signing" "3.23.0" + "@aws-sdk/property-provider" "3.23.0" + "@aws-sdk/protocol-http" "3.23.0" + "@aws-sdk/signature-v4" "3.23.0" + "@aws-sdk/types" "3.22.0" + tslib "^2.3.0" + +"@aws-sdk/middleware-serde@3.23.0": + version "3.23.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-serde/-/middleware-serde-3.23.0.tgz#928d87bbce6e002165835348600cf19ca9d5737c" + integrity sha512-gNNMOo6Phm/BAnLsXvFfu4PHxKzN1saT3lNkODY2qKB1b4IoFNdMfHMo3jH4sbx7QYoM81qMXKr7aLp1BzTHtw== + dependencies: + "@aws-sdk/types" "3.22.0" + tslib "^2.3.0" + +"@aws-sdk/middleware-signing@3.23.0": + version "3.23.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-signing/-/middleware-signing-3.23.0.tgz#7e492bed740c06f0310a40a52e9915639c452cdc" + integrity sha512-cTozWnc8HLxLjHYU10+uqE4RqXYmmCJqoEKiSzJH7f8n20Pr9ly3rv3/9AfbqPth1PXsg0xHYq/ovCvq6RiaYA== + dependencies: + "@aws-sdk/property-provider" "3.23.0" + "@aws-sdk/protocol-http" "3.23.0" + "@aws-sdk/signature-v4" "3.23.0" + "@aws-sdk/types" "3.22.0" + tslib "^2.3.0" + +"@aws-sdk/middleware-ssec@3.23.0": + version "3.23.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-ssec/-/middleware-ssec-3.23.0.tgz#cc7667e607be1754fe2218056a675769c98bd984" + integrity sha512-SK+HvXoCri0dllcIbWtsbVMf64li8IiT7KnvDbkKRxiO9WFjO70zJ0Ea1REVAXZ1tqAVARMxX/eFvnXOs7Xhhw== + dependencies: + "@aws-sdk/types" "3.22.0" + tslib "^2.3.0" + +"@aws-sdk/middleware-stack@3.23.0": + version "3.23.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-stack/-/middleware-stack-3.23.0.tgz#b41a78eaf0c8d04b7cc1af0a81e8a803f51eee6e" + integrity sha512-lk4u8wDajJ+VBXVWpzqaRUUJibt1YxsIciwLeZymilAZW5L9VtchUW9fmRpaZX8QHFGGkGuwZjtxlX6MeGXK4w== + dependencies: + tslib "^2.3.0" + +"@aws-sdk/middleware-user-agent@3.23.0": + version "3.23.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-user-agent/-/middleware-user-agent-3.23.0.tgz#ecd35ec3b3e360469a14dba1a39fa0cf060c06bc" + integrity sha512-cwOypi0no2Nsrw1N3VGe/0XgbNl487Wn4jgKZvj+nxdSWh4HQMWpoTLB3YZtzro+J7uVK6X7W+QxBU20+Ypg1g== + dependencies: + "@aws-sdk/protocol-http" "3.23.0" + "@aws-sdk/types" "3.22.0" + tslib "^2.3.0" + +"@aws-sdk/node-config-provider@3.23.0": + version "3.23.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/node-config-provider/-/node-config-provider-3.23.0.tgz#2fd99f134b3152290c285caa2fe076b2cf75d16f" + integrity sha512-OyhyqTXUy5HxPu2c1aCYFHKQGjf4uzjby9AteMhRJfa6cehuVODi3KEv7PyZmJQcYI0Pw9ZnoHqVrTNsUEC2YQ== + dependencies: + "@aws-sdk/property-provider" "3.23.0" + "@aws-sdk/shared-ini-file-loader" "3.23.0" + "@aws-sdk/types" "3.22.0" + tslib "^2.3.0" + +"@aws-sdk/node-http-handler@3.23.0": + version "3.23.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/node-http-handler/-/node-http-handler-3.23.0.tgz#9c19775a343a439366f88ec42085fbd9372a21ba" + integrity sha512-amvf0lwldUrr+CFtIeMZoNVmv34Fx3zwqobT5WuxtfRWbvSRALMw0LW/oXwoT+4WayM6sIwcIwSG1ZVGCjD0fA== + dependencies: + "@aws-sdk/abort-controller" "3.23.0" + "@aws-sdk/protocol-http" "3.23.0" + "@aws-sdk/querystring-builder" "3.23.0" + "@aws-sdk/types" "3.22.0" + tslib "^2.3.0" + +"@aws-sdk/property-provider@3.23.0": + version "3.23.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/property-provider/-/property-provider-3.23.0.tgz#d14ff1ce66a43c7af9cade2b0c2e84c2ccd0b457" + integrity sha512-GjFtmFHVzO4BeLRselGirt32cyorP1aRbD+ID4Zhz4RLxa9Nun766s8lqp7EcR/v9pSGdP1Xec3no8ALV3lXmw== + dependencies: + "@aws-sdk/types" "3.22.0" + tslib "^2.3.0" + +"@aws-sdk/protocol-http@3.23.0": + version "3.23.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/protocol-http/-/protocol-http-3.23.0.tgz#cc7a905a7f7b045d2714d094a5946ef932fa94c7" + integrity sha512-JTsq/UU/wTyeCMPVar2xSsMVFf72IK0L7dXbbS7ZHcBV6JAfM/wVTym8/s3mQGM6Kx/c6Wtn+J/5syDx56CV2g== + dependencies: + "@aws-sdk/types" "3.22.0" + tslib "^2.3.0" + +"@aws-sdk/querystring-builder@3.23.0": + version "3.23.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/querystring-builder/-/querystring-builder-3.23.0.tgz#fd3690d4b3199308151a61f3bd9cb022576e83e1" + integrity sha512-MfQknhgMT9tul0VrxmLBDKlV7Ls2/kEJyprWXUWzCUBMUZ6M+FtOMJhjP90qTbsNvlsEVQgTlS/cDsNVrAUR3A== + dependencies: + "@aws-sdk/types" "3.22.0" + "@aws-sdk/util-uri-escape" "3.23.0" + tslib "^2.3.0" + +"@aws-sdk/querystring-parser@3.23.0": + version "3.23.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/querystring-parser/-/querystring-parser-3.23.0.tgz#7a3a969c10a935b64e66e1708d9dce7cf9fb3375" + integrity sha512-pMEN+rE08QhixRfWEBuQwnOGuGiRjH5++mmyQTUIvEgKk/rnyAkUlrySv775jvrEQlCXH8yqMuHdutF8rHkHGA== + dependencies: + "@aws-sdk/types" "3.22.0" + tslib "^2.3.0" + +"@aws-sdk/service-error-classification@3.22.0": + version "3.22.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/service-error-classification/-/service-error-classification-3.22.0.tgz#dce2271f7415d0be31d4b6021589870b9b914d40" + integrity sha512-6ytFFoU8guAljwpmQTvZNf//cTurdumeLlAmQ8RJsbX3y5DGlpG2dfq7mpYJudtJtCQTwPYtaG5Xva460T2CqA== + +"@aws-sdk/shared-ini-file-loader@3.23.0": + version "3.23.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/shared-ini-file-loader/-/shared-ini-file-loader-3.23.0.tgz#574901a31e65e425632a9cae6a64f6382a2b76e8" + integrity sha512-YUp46l6E3dLKHp1cKMkZI4slTjsVc/Lm7nPCTVc3oQvZ1MvC99N/jMCmZ7X5YYofuAUSdc9eJ8sYiF2BnUww9g== + dependencies: + tslib "^2.3.0" + +"@aws-sdk/signature-v4@3.23.0": + version "3.23.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/signature-v4/-/signature-v4-3.23.0.tgz#6ff22e675285b62c4caaa3d6a7833135ada2583a" + integrity sha512-3smgG/6LcK8SjVqWzroAgSFOF8HKp4/LtOQQBtPkI04nTMVP4zmE5hsVQEZv33h5UKWkUpwQRBTCtfFZTq/Jvw== + dependencies: + "@aws-sdk/is-array-buffer" "3.23.0" + "@aws-sdk/types" "3.22.0" + "@aws-sdk/util-hex-encoding" "3.23.0" + "@aws-sdk/util-uri-escape" "3.23.0" + tslib "^2.3.0" + +"@aws-sdk/smithy-client@3.23.0": + version "3.23.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/smithy-client/-/smithy-client-3.23.0.tgz#50e55a1c4909204239ec48734f57c84404d7a476" + integrity sha512-wdGuSKVBLaIrC0V9eScWIqYDd50Z/pfAek3OG9lmP4IJLYd4HQJFXTuOXUg9eaJW+qZp3oNXD3clLOyV7WMPQA== + dependencies: + "@aws-sdk/middleware-stack" "3.23.0" + "@aws-sdk/types" "3.22.0" + tslib "^2.3.0" + +"@aws-sdk/types@3.22.0", "@aws-sdk/types@^3.1.0": + version "3.22.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/types/-/types-3.22.0.tgz#323afc96eb27a69a97da0803254a62969d3d3539" + integrity sha512-dGJBPbWm+YT+D5YIiqK3Z1xWzWShWgSxL1gPS9+vKNY2ld2TvtoiRhFy8NQG2jnC+eG/+WNeZS6ZxzLvEbQyTQ== + +"@aws-sdk/url-parser@3.23.0": + version "3.23.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/url-parser/-/url-parser-3.23.0.tgz#0f5c252e48d208773d60861ce39d2b5ef20880be" + integrity sha512-uU4BDX0eilGlMuz8qDlNzcH3k4WTZWgMnBuJ9+TdxTXNiLvC+X9HBjVmB2Nr+3mEJhhrRc/8mTrleJvcl60Pyg== + dependencies: + "@aws-sdk/querystring-parser" "3.23.0" + "@aws-sdk/types" "3.22.0" + tslib "^2.3.0" + +"@aws-sdk/util-arn-parser@3.23.0": + version "3.23.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/util-arn-parser/-/util-arn-parser-3.23.0.tgz#7372460ba98a6826f97d9622759764bcf09add79" + integrity sha512-J3+/wnC21kbb3UAHo7x31aCZxzIa7GBijt6Q7nad/j2aF38EZtE3SI0aZpD8250Vi+9zsZ4672QDUeSZ5BR5kg== + dependencies: + tslib "^2.3.0" + +"@aws-sdk/util-base64-browser@3.23.0": + version "3.23.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/util-base64-browser/-/util-base64-browser-3.23.0.tgz#61594ac9529756361c81ece287548ab5b8c5a768" + integrity sha512-xlI/qw+uhLJWa3k0mRtRHQ42v5QzsMFEUXScredQMfJ/34qzXyocsG6OHPOTV1I8WSANrxnHR5m1Ae3iU6JuVw== + dependencies: + tslib "^2.3.0" + +"@aws-sdk/util-base64-node@3.23.0": + version "3.23.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/util-base64-node/-/util-base64-node-3.23.0.tgz#d0da9ed6b8aaa7513ba4b36a20b4794c72c074ce" + integrity sha512-Kf8JIAUtjrPcD5CJzrig2B5CtegWswUNpW4zBarww/UJhHlp8WzKlCxxA+yNS1ghT0ZMjrRvxPabKDGpkyUfmQ== + dependencies: + "@aws-sdk/util-buffer-from" "3.23.0" + tslib "^2.3.0" + +"@aws-sdk/util-body-length-browser@3.23.0": + version "3.23.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/util-body-length-browser/-/util-body-length-browser-3.23.0.tgz#1a5c5e7ea5e15d93bd178021c54d2ea41faeb1cd" + integrity sha512-Bi6u/5omQbOBSB5BxqVvaPgVplLRjhhSuqK3XAukbeBPh7lcibIBdy7YvbhQyl4i8Hb2QjFnqqfzA0lNBe5eiw== + dependencies: + tslib "^2.3.0" + +"@aws-sdk/util-body-length-node@3.23.0": + version "3.23.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/util-body-length-node/-/util-body-length-node-3.23.0.tgz#2a7890b4fa6de78a042db9537a67f90ccb2a3034" + integrity sha512-8kSczloA78mikPaJ742SU9Wpwfcz3HOruoXiP/pOy69UZEsMe4P7zTZI1bo8BAp7j6IFUPCXth9E3UAtkbz+CQ== + dependencies: + tslib "^2.3.0" + +"@aws-sdk/util-buffer-from@3.23.0": + version "3.23.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/util-buffer-from/-/util-buffer-from-3.23.0.tgz#3bc02f50c6e8a5c2b9db61faeb3bebc9de701c3b" + integrity sha512-axXy1FvEOM1uECgMPmyHF1S3Hd7JI+BerhhcAlGig0bbqUsZVQUNL9yhOsWreA+nf1v08Ucj8P2SHPCT9Hvpgg== + dependencies: + "@aws-sdk/is-array-buffer" "3.23.0" + tslib "^2.3.0" + +"@aws-sdk/util-credentials@3.23.0": + version "3.23.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/util-credentials/-/util-credentials-3.23.0.tgz#6b3138c3853c72adc93c3f57e8fb28f58ffdc364" + integrity sha512-6TDGZnFa0kZr+vSsWXXMfWt347jbMGKtzGnBxbrmiQgZMijz9s/wLYxsjglZ+CyqI/QrSMOTtqy6mEgJxdnGWQ== + dependencies: + "@aws-sdk/shared-ini-file-loader" "3.23.0" + tslib "^2.3.0" + +"@aws-sdk/util-hex-encoding@3.23.0": + version "3.23.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/util-hex-encoding/-/util-hex-encoding-3.23.0.tgz#a8de34faf9e51dd4be379be0e9d3bdc093ae6bf4" + integrity sha512-RFDCwNrJMmmPSMVRadxRNePqTXGwtL9s4844x44D0bbGg1TdC42rrg0PRKYkxFL7wd1FbibVQOzciZAvzF+Z+w== + dependencies: + tslib "^2.3.0" + +"@aws-sdk/util-locate-window@^3.0.0": + version "3.23.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/util-locate-window/-/util-locate-window-3.23.0.tgz#e9bf2a023dce2ea1d13ec2e8c7c92abb333a1442" + integrity sha512-mM8kWW7SWIxCshkNllpYqCQi5SzwJ+sv5nURhtquOB5/H3qGqZm0V5lUE3qpE1AYmqKwk6qbGUy1woFn1T5nrw== + dependencies: + tslib "^2.3.0" + +"@aws-sdk/util-uri-escape@3.23.0": + version "3.23.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/util-uri-escape/-/util-uri-escape-3.23.0.tgz#52539674966eb456d65408d9028ed114e94dfd49" + integrity sha512-SvQx2E/FDlI5vLT67wwn/k1j2R/G58tYj4Te6GNgEwPGL43X2+7c0+d/WTgndMaRvxSBHZMUTxBYh1HOeU7loA== + dependencies: + tslib "^2.3.0" + +"@aws-sdk/util-user-agent-browser@3.23.0": + version "3.23.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/util-user-agent-browser/-/util-user-agent-browser-3.23.0.tgz#b56520071e2c0fea259e4c797ed69bc7ebcbd474" + integrity sha512-FIjcCdvnUuOBMQgvPZ04Hk28Qy+xJDrtXeWm/7xKJ1K7NRucJWjmC+0OU0uw9A7VOCHf08nk9xniZhAGXs1wJg== + dependencies: + "@aws-sdk/types" "3.22.0" + bowser "^2.11.0" + tslib "^2.3.0" + +"@aws-sdk/util-user-agent-node@3.23.0": + version "3.23.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/util-user-agent-node/-/util-user-agent-node-3.23.0.tgz#e0493c275bd310c6283cf52d57a7f5ba6ff995e0" + integrity sha512-6okok4u13uYRIYdgFZ4dCsowf5vKh+ZxkfVSwvnZO3XAaGEhmIkM3+JKIQjcxLJ+Mt0ssMSJwNMz5oOBSlXPeQ== + dependencies: + "@aws-sdk/node-config-provider" "3.23.0" + "@aws-sdk/types" "3.22.0" + tslib "^2.3.0" + +"@aws-sdk/util-utf8-browser@3.23.0", "@aws-sdk/util-utf8-browser@^3.0.0": + version "3.23.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/util-utf8-browser/-/util-utf8-browser-3.23.0.tgz#dff7e891c67936de677b7d7a6c796e5c2e1b1510" + integrity sha512-fSB95AKnvCnAbCd7o0xLbErfAgD9wnLCaEu23AgfGAiaG3nFF8Z2+wtjebU/9Z4RI9d/x83Ho/yguRnJdkMsPA== + dependencies: + tslib "^2.3.0" + +"@aws-sdk/util-utf8-node@3.23.0": + version "3.23.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/util-utf8-node/-/util-utf8-node-3.23.0.tgz#9f9fe76745c79c8a148f15d78e9a5c03d2bf0441" + integrity sha512-yao8+8okyfCxRvxZe3GBdO7lJnQEBf3P6rDgleOQD/0DZmMjOQGXCvDd42oagE2TegXhkUnJfVOZU2GqdoR0hg== + dependencies: + "@aws-sdk/util-buffer-from" "3.23.0" + tslib "^2.3.0" + +"@aws-sdk/util-waiter@3.23.0": + version "3.23.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/util-waiter/-/util-waiter-3.23.0.tgz#f34f64835ded74b884d8b1c2dbcd2e281ec21a23" + integrity sha512-TtCw6OoSrgXLbi1mBn/eicaa3RcJLVm4RdiV1lBQxSX22wyriFP+b1BXRkS9G49rBMciwWu/Xpg8E0Pi79pOnQ== + dependencies: + "@aws-sdk/abort-controller" "3.23.0" + "@aws-sdk/types" "3.22.0" + tslib "^2.3.0" + +"@aws-sdk/xml-builder@3.23.0": + version "3.23.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/xml-builder/-/xml-builder-3.23.0.tgz#e318f539b68fa9c0a36da49e85a96cdca13a8113" + integrity sha512-5LEGdhQIJtGTwg4dIYyNtpz5QvPcQoxsqJygmj+VB8KLd+mWorH1IOpiL74z0infeK9N+ZFUUPKIzPJa9xLPqw== + dependencies: + tslib "^2.3.0" + "@babel/code-frame@7.12.11": version "7.12.11" resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.12.11.tgz#f4ad435aa263db935b8f10f2c552d23fb716a63f" @@ -1722,6 +2484,11 @@ boolean@3.0.4: resolved "https://registry.yarnpkg.com/boolean/-/boolean-3.0.4.tgz#aa1df8749af41d7211b66b4eee584722ff428c27" integrity sha512-5pyOr+w2LNN72F2mAq6J0ckHUfJYSgRKma7e/wlcMMhgOLV9OI0ERhERYXxUqo+dPyVxcbXKy9n+wg13+LpNnA== +bowser@^2.11.0: + version "2.11.0" + resolved "https://registry.yarnpkg.com/bowser/-/bowser-2.11.0.tgz#5ca3c35757a7aa5771500c70a73a9f91ef420a8f" + integrity sha512-AlcaJBi/pqqJBIQ8U9Mcpc9i8Aqxn88Skv5d+xBX006BY5u8N3mGLHa5Lgppa7L/HfwgwLgZ6NYs+Ag6uUmJRA== + boxen@^4.2.0: version "4.2.0" resolved "https://registry.yarnpkg.com/boxen/-/boxen-4.2.0.tgz#e411b62357d6d6d36587c8ac3d5d974daa070e64" @@ -3037,7 +3804,7 @@ enquirer@^2.3.5: dependencies: ansi-colors "^4.1.1" -entities@^2.0.0: +entities@2.2.0, entities@^2.0.0: version "2.2.0" resolved "https://registry.yarnpkg.com/entities/-/entities-2.2.0.tgz#098dc90ebb83d8dffa089d55256b351d34c4da55" integrity sha512-p92if5Nz619I0w+akJrLZH0MX0Pb5DX39XOwQTtXSdQQOaYH03S1uIQp4mhOZtAXrxq4ViO67YTiLBo2638o9A== @@ -3539,7 +4306,7 @@ fast-safe-stringify@^2.0.4, fast-safe-stringify@^2.0.7: resolved "https://registry.yarnpkg.com/fast-safe-stringify/-/fast-safe-stringify-2.0.8.tgz#dc2af48c46cf712b683e849b2bbd446b32de936f" integrity sha512-lXatBjf3WPjmWD6DpIZxkeSsCOwqI0maYMpgDlx8g4U2qi4lbjA9oH/HD2a87G+KfsUmo5WbJFmqBZlPxtptag== -fast-xml-parser@^3.19.0: +fast-xml-parser@3.19.0, fast-xml-parser@^3.19.0: version "3.19.0" resolved "https://registry.yarnpkg.com/fast-xml-parser/-/fast-xml-parser-3.19.0.tgz#cb637ec3f3999f51406dd8ff0e6fc4d83e520d01" integrity sha512-4pXwmBplsCPv8FOY1WRakF970TjNGnGnfbOnLqjlYvMiF1SR3yOHyxMR/YCXpPTOspNF5gwudqktIP4VsWkvBg== @@ -8084,12 +8851,12 @@ tslib@2.2.0: resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.2.0.tgz#fb2c475977e35e241311ede2693cee1ec6698f5c" integrity sha512-gS9GVHRU+RGn5KQM2rllAlR3dU6m7AcpJKdtH8gFvQiC4Otgk98XnmMU+nZenHt/+VhnBPWwgrJsyrdcw6i23w== -tslib@^1.8.1, tslib@^1.9.0: +tslib@^1.11.1, tslib@^1.8.1, tslib@^1.9.0: version "1.14.1" resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00" integrity sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg== -tslib@^2.0.0, tslib@^2.2.0: +tslib@^2.0.0, tslib@^2.2.0, tslib@^2.3.0: version "2.3.0" resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.3.0.tgz#803b8cdab3e12ba581a4ca41c8839bbb0dacb09e" integrity sha512-N82ooyxVNm6h1riLCoyS9e3fuJ3AMG2zIZs2Gd1ATcSFjSA23Q0fzjjZeh0jbJvWVDZ0cJT8yaNNaaXHzueNjg== From 4f5d19bd3a460ea3a15e6f880a2f29534db25f14 Mon Sep 17 00:00:00 2001 From: Jelle Besseling Date: Sun, 1 Aug 2021 10:22:29 +0200 Subject: [PATCH 02/23] Add support for custom url generation on s3 stored files Uses two config keys to support url generation that doesn't directly go to (compatible s3). Can be used to generate urls to any cache server or CDN. --- config/default.yaml | 9 +++++++++ server/initializers/config.ts | 6 ++++-- .../job-queue/handlers/move-to-object-storage.ts | 13 +++++-------- server/lib/object-storage.ts | 10 +++++++++- 4 files changed, 27 insertions(+), 11 deletions(-) diff --git a/config/default.yaml b/config/default.yaml index 7f76db714cd..fd67a757616 100644 --- a/config/default.yaml +++ b/config/default.yaml @@ -100,8 +100,17 @@ s3: endpoint: 's3.amazonaws.com' # Will always use https videos_bucket: 'videos' videos_prefix: '' # Allows setting all buckets to the same value but with a different prefix + # Optional, use to overide the default url generation that just gives the public url to s3 + # Use the string %path% in place where the (full) path to the file should be. The path + # does not include the videos_prefix, so you should add it to your template if needed. + # For example: + # https://my-videos-cache.example.com/cdn-cache/%path%?thing=true + # Would turn into: + # https://my-videos-cache.example.com/cdn-cache/hls/9ffceb57-cbe3-41c5-80e4-dbb7e97c3958/b27d1892-9c5c-4bef-8bce-f68e54fb1208-240-fragmented.mp4?thing=true + videos_template: '' streaming_playlists_bucket: 'streaming-playlists' streaming_playlists_prefix: '' + streaming_playlists_template: '' log: level: 'info' # 'debug' | 'info' | 'warn' | 'error' diff --git a/server/initializers/config.ts b/server/initializers/config.ts index ea43ea141f3..8e07a444dc8 100644 --- a/server/initializers/config.ts +++ b/server/initializers/config.ts @@ -78,11 +78,13 @@ const CONFIG = { ENDPOINT: config.get('s3.endpoint'), VIDEOS_BUCKETINFO: { bucket: config.get('s3.videos_bucket'), - prefix: config.get('s3.videos_prefix') + prefix: config.get('s3.videos_prefix'), + url_template: config.get('s3.videos_url_template') }, STREAMING_PLAYLISTS_BUCKETINFO: { bucket: config.get('s3.streaming_playlists_bucket'), - prefix: config.get('s3.streaming_playlists_prefix') + prefix: config.get('s3.streaming_playlists_prefix'), + url_template: config.get('s3.streaming_playlists_template') } }, WEBSERVER: { diff --git a/server/lib/job-queue/handlers/move-to-object-storage.ts b/server/lib/job-queue/handlers/move-to-object-storage.ts index 274ca7aa02c..1982a97e5ef 100644 --- a/server/lib/job-queue/handlers/move-to-object-storage.ts +++ b/server/lib/job-queue/handlers/move-to-object-storage.ts @@ -4,7 +4,7 @@ import { MoveObjectStoragePayload, VideoState } from '../../../../shared' import { VideoModel } from '@server/models/video/video' -import { storeObject } from '@server/lib/object-storage' +import { generateUrl, storeObject } from '@server/lib/object-storage' import { CONFIG } from '@server/initializers/config' import { join } from 'path' import { HLS_STREAMING_PLAYLIST_DIRECTORY } from '@server/initializers/constants' @@ -54,7 +54,7 @@ async function moveWebTorrentFiles (video: MVideoWithAllFiles) { ) file.storage = VideoStorageType.OBJECT_STORAGE - file.fileUrl = `https://${CONFIG.S3.VIDEOS_BUCKETINFO.bucket}.${CONFIG.S3.ENDPOINT}/${CONFIG.S3.VIDEOS_BUCKETINFO.prefix}${filename}` + file.fileUrl = generateUrl(filename, CONFIG.S3.VIDEOS_BUCKETINFO) await file.save() } } @@ -83,10 +83,8 @@ async function moveHLSFiles (video: MVideoWithAllFiles) { CONFIG.S3.STREAMING_PLAYLISTS_BUCKETINFO ) - // eslint-disable-next-line max-len - playlist.playlistUrl = `https://${CONFIG.S3.STREAMING_PLAYLISTS_BUCKETINFO.bucket}.${CONFIG.S3.ENDPOINT}/${CONFIG.S3.STREAMING_PLAYLISTS_BUCKETINFO.prefix}${masterPlaylistFilename}` - // eslint-disable-next-line max-len - playlist.segmentsSha256Url = `https://${CONFIG.S3.STREAMING_PLAYLISTS_BUCKETINFO.bucket}.${CONFIG.S3.ENDPOINT}/${CONFIG.S3.STREAMING_PLAYLISTS_BUCKETINFO.prefix}${segmentsFileName}` + playlist.playlistUrl = generateUrl(masterPlaylistFilename, CONFIG.S3.STREAMING_PLAYLISTS_BUCKETINFO) + playlist.segmentsSha256Url = generateUrl(segmentsFileName, CONFIG.S3.STREAMING_PLAYLISTS_BUCKETINFO) for (const videoFile of playlist.VideoFiles) { const file = await videoFile.reload() @@ -114,8 +112,7 @@ async function moveHLSFiles (video: MVideoWithAllFiles) { // Signals that the video file + playlist file were uploaded file.storage = VideoStorageType.OBJECT_STORAGE - // eslint-disable-next-line max-len - file.fileUrl = `https://${CONFIG.S3.STREAMING_PLAYLISTS_BUCKETINFO.bucket}.${CONFIG.S3.ENDPOINT}/${CONFIG.S3.STREAMING_PLAYLISTS_BUCKETINFO.prefix}${filename}` + file.fileUrl = generateUrl(filename, CONFIG.S3.STREAMING_PLAYLISTS_BUCKETINFO) await file.save() } diff --git a/server/lib/object-storage.ts b/server/lib/object-storage.ts index 58c7accbe41..a0cf11a82dc 100644 --- a/server/lib/object-storage.ts +++ b/server/lib/object-storage.ts @@ -3,7 +3,7 @@ import { DeleteObjectCommand, DeleteObjectsCommand, ListObjectsV2Command, PutObj import { CONFIG } from "@server/initializers/config" import { logger } from '@server/helpers/logger' -type BucketInfo = {bucket: string, prefix?: string} +type BucketInfo = {bucket: string, prefix?: string, url_template?: string} function getS3Client () { return new S3Client({ endpoint: `https://${CONFIG.S3.ENDPOINT}` }) @@ -63,3 +63,11 @@ export async function removePrefix (prefix: string, bucketInfo: BucketInfo) { // Repeat if not all objects could be listed at once (limit of 1000?) if (listedObjects.IsTruncated) await removePrefix(prefix, bucketInfo) } + +export function generateUrl (filename: string, bucketInfo: BucketInfo) { + if (!bucketInfo.url_template) { + return `https://${bucketInfo.bucket}.${CONFIG.S3.ENDPOINT}/${bucketInfo.prefix}${filename}` + } + const key = filename + return bucketInfo.url_template.replace('%path%', key) +} From 67f7339ebba3f000d40a0495b72e54843a7a8606 Mon Sep 17 00:00:00 2001 From: Jelle Besseling Date: Sun, 1 Aug 2021 12:28:08 +0200 Subject: [PATCH 03/23] Upload files to s3 concurrently and delete originals afterwards --- config/default.yaml | 19 ++-- server/initializers/checker-after-init.ts | 25 +++++ server/initializers/config.ts | 2 +- server/initializers/constants.ts | 2 +- .../migrations/0660-object-storage.ts | 2 +- .../handlers/move-to-object-storage.ts | 96 +++++++++++-------- .../job-queue/handlers/video-live-ending.ts | 2 +- .../job-queue/handlers/video-transcoding.ts | 39 ++++---- server/lib/job-queue/job-queue.ts | 9 +- server/lib/object-storage.ts | 3 +- server/lib/transcoding/video-transcoding.ts | 10 +- server/lib/video.ts | 5 +- .../models/video/sql/shared/video-tables.ts | 2 +- server/models/video/video.ts | 3 +- shared/models/server/job.model.ts | 1 + 15 files changed, 134 insertions(+), 86 deletions(-) diff --git a/config/default.yaml b/config/default.yaml index fd67a757616..d09e465a661 100644 --- a/config/default.yaml +++ b/config/default.yaml @@ -97,20 +97,23 @@ storage: s3: enabled: false - endpoint: 's3.amazonaws.com' # Will always use https - videos_bucket: 'videos' - videos_prefix: '' # Allows setting all buckets to the same value but with a different prefix + # Will always use https with default URL generation (see below) + endpoint: 's3.amazonaws.com' + streaming_playlists_bucket: '' + # Allows setting all buckets to the same value but with a different prefix + streaming_playlists_prefix: '' # Optional, use to overide the default url generation that just gives the public url to s3 # Use the string %path% in place where the (full) path to the file should be. The path - # does not include the videos_prefix, so you should add it to your template if needed. + # does not include the streaming_playlists_prefix, so you should add it to your template if needed. # For example: # https://my-videos-cache.example.com/cdn-cache/%path%?thing=true # Would turn into: # https://my-videos-cache.example.com/cdn-cache/hls/9ffceb57-cbe3-41c5-80e4-dbb7e97c3958/b27d1892-9c5c-4bef-8bce-f68e54fb1208-240-fragmented.mp4?thing=true - videos_template: '' - streaming_playlists_bucket: 'streaming-playlists' - streaming_playlists_prefix: '' - streaming_playlists_template: '' + streaming_playlists_url_template: '' + # Same settings but for webtorrent videos + videos_bucket: '' + videos_prefix: '' + videos_url_template: '' log: level: 'info' # 'debug' | 'info' | 'warn' | 'error' diff --git a/server/initializers/checker-after-init.ts b/server/initializers/checker-after-init.ts index 911734fa006..384884ef756 100644 --- a/server/initializers/checker-after-init.ts +++ b/server/initializers/checker-after-init.ts @@ -153,6 +153,31 @@ function checkConfig () { } } + // Object storage + if (CONFIG.S3.ENABLED === true) { + if (CONFIG.TRANSCODING.WEBTORRENT.ENABLED && !CONFIG.S3.VIDEOS_BUCKETINFO.bucket) { + return 'videos_bucket should be set when object storage support is enabled.' + } + if (CONFIG.TRANSCODING.HLS.ENABLED && !CONFIG.S3.STREAMING_PLAYLISTS_BUCKETINFO.bucket) { + return 'streaming_playlists_bucket should be set when object storage support is enabled.' + } + if (CONFIG.S3.VIDEOS_BUCKETINFO.bucket === CONFIG.S3.STREAMING_PLAYLISTS_BUCKETINFO.bucket && + CONFIG.S3.VIDEOS_BUCKETINFO.prefix === CONFIG.S3.STREAMING_PLAYLISTS_BUCKETINFO.prefix) { + if (CONFIG.S3.VIDEOS_BUCKETINFO.prefix === '') { + return 'Object storage bucket prefixes should be set when the same bucket is used for both types of video.' + } else { + return 'Object storage bucket prefixes should be set to different values when the same bucket is used for both types of video.' + } + } + if ( + (CONFIG.S3.VIDEOS_BUCKETINFO.url_template !== '' && + !CONFIG.S3.VIDEOS_BUCKETINFO.url_template.includes('%path%')) || + (CONFIG.S3.STREAMING_PLAYLISTS_BUCKETINFO.url_template !== '' && + !CONFIG.S3.STREAMING_PLAYLISTS_BUCKETINFO.url_template.includes('%path%'))) { + return 'Object storage url templates should include `%path%\' in place where the file path needs to be inserted.' + } + } + return null } diff --git a/server/initializers/config.ts b/server/initializers/config.ts index 8e07a444dc8..36fdf07ead6 100644 --- a/server/initializers/config.ts +++ b/server/initializers/config.ts @@ -84,7 +84,7 @@ const CONFIG = { STREAMING_PLAYLISTS_BUCKETINFO: { bucket: config.get('s3.streaming_playlists_bucket'), prefix: config.get('s3.streaming_playlists_prefix'), - url_template: config.get('s3.streaming_playlists_template') + url_template: config.get('s3.streaming_playlists_url_template') } }, WEBSERVER: { diff --git a/server/initializers/constants.ts b/server/initializers/constants.ts index b926a24aa36..b3dad828b5f 100644 --- a/server/initializers/constants.ts +++ b/server/initializers/constants.ts @@ -164,7 +164,7 @@ const JOB_CONCURRENCY: { [id in Exclude { { - await utils.queryInterface.addColumn('video', 'transcodeJobsRunning', { type: Sequelize.INTEGER, allowNull: false, defaultValue: 0 }) + await utils.queryInterface.addColumn('video', 'moveJobsRunning', { type: Sequelize.INTEGER, allowNull: false, defaultValue: 0 }) } { diff --git a/server/lib/job-queue/handlers/move-to-object-storage.ts b/server/lib/job-queue/handlers/move-to-object-storage.ts index 1982a97e5ef..c78403e2ce6 100644 --- a/server/lib/job-queue/handlers/move-to-object-storage.ts +++ b/server/lib/job-queue/handlers/move-to-object-storage.ts @@ -1,8 +1,6 @@ import * as Bull from 'bull' import { logger } from '@server/helpers/logger' -import { - MoveObjectStoragePayload, VideoState -} from '../../../../shared' +import { MoveObjectStoragePayload } from '../../../../shared' import { VideoModel } from '@server/models/video/video' import { generateUrl, storeObject } from '@server/lib/object-storage' import { CONFIG } from '@server/initializers/config' @@ -10,6 +8,7 @@ import { join } from 'path' import { HLS_STREAMING_PLAYLIST_DIRECTORY } from '@server/initializers/constants' import { getHlsResolutionPlaylistFilename } from '@server/lib/video-paths' import { MVideoWithAllFiles, VideoStorageType } from '@server/types/models' +import { remove } from 'fs-extra' export async function processMoveToObjectStorage (job: Bull.Job) { const payload = job.data as MoveObjectStoragePayload @@ -22,30 +21,26 @@ export async function processMoveToObjectStorage (job: Bull.Job) { return undefined } - if (video.state === VideoState.TO_TRANSCODE) { - logger.info('Video needs to be transcoded still, exiting move job %d', job.id) - return undefined - } - - if (video.transcodeJobsRunning > 0) { - logger.info('A transcode job for this video is running, exiting move job %d', job.id) - return undefined + if (CONFIG.TRANSCODING.WEBTORRENT.ENABLED && video.VideoFiles) { + await moveWebTorrentFiles(video, payload.videoFileId) } - if (video.VideoFiles) { - await moveWebTorrentFiles(video) + if (CONFIG.TRANSCODING.HLS.ENABLED && video.VideoStreamingPlaylists) { + await moveHLSFiles(video, payload.videoFileId) } - if (video.VideoStreamingPlaylists) { - await moveHLSFiles(video) + await video.decrement('moveJobsRunning') + if (video.moveJobsRunning === 0) { + await doAfterLastJob(video) } return payload.videoUUID } -async function moveWebTorrentFiles (video: MVideoWithAllFiles) { +async function moveWebTorrentFiles (video: MVideoWithAllFiles, videoFileId?: number) { for (const file of video.VideoFiles) { if (file.storage !== VideoStorageType.LOCAL) continue + if (videoFileId !== null && file.id !== videoFileId) continue const filename = file.filename await storeObject( @@ -59,36 +54,13 @@ async function moveWebTorrentFiles (video: MVideoWithAllFiles) { } } -async function moveHLSFiles (video: MVideoWithAllFiles) { +async function moveHLSFiles (video: MVideoWithAllFiles, videoFileId: number) { for (const playlist of video.VideoStreamingPlaylists) { const baseHlsDirectory = join(HLS_STREAMING_PLAYLIST_DIRECTORY, video.uuid) - // Master playlist - const masterPlaylistFilename = join(playlist.getStringType(), video.uuid, playlist.playlistFilename) - await storeObject( - { - filename: masterPlaylistFilename, - path: join(baseHlsDirectory, playlist.playlistFilename) - }, - CONFIG.S3.STREAMING_PLAYLISTS_BUCKETINFO - ) - - // Sha256 segments file - const segmentsFileName = join(playlist.getStringType(), video.uuid, playlist.segmentsSha256Filename) - await storeObject( - { - filename: segmentsFileName, - path: join(baseHlsDirectory, playlist.segmentsSha256Filename) - }, - CONFIG.S3.STREAMING_PLAYLISTS_BUCKETINFO - ) - - playlist.playlistUrl = generateUrl(masterPlaylistFilename, CONFIG.S3.STREAMING_PLAYLISTS_BUCKETINFO) - playlist.segmentsSha256Url = generateUrl(segmentsFileName, CONFIG.S3.STREAMING_PLAYLISTS_BUCKETINFO) - - for (const videoFile of playlist.VideoFiles) { - const file = await videoFile.reload() + for (const file of playlist.VideoFiles) { if (file.storage !== VideoStorageType.LOCAL) continue + if (videoFileId !== null && file.id !== videoFileId) continue // Resolution playlist const playlistFileName = getHlsResolutionPlaylistFilename(file.filename) @@ -115,8 +87,48 @@ async function moveHLSFiles (video: MVideoWithAllFiles) { file.fileUrl = generateUrl(filename, CONFIG.S3.STREAMING_PLAYLISTS_BUCKETINFO) await file.save() } + } +} + +async function doAfterLastJob (video: MVideoWithAllFiles) { + for (const playlist of video.VideoStreamingPlaylists) { + const baseHlsDirectory = join(HLS_STREAMING_PLAYLIST_DIRECTORY, video.uuid) + // Master playlist + const masterPlaylistFilename = join(playlist.getStringType(), video.uuid, playlist.playlistFilename) + await storeObject( + { + filename: masterPlaylistFilename, + path: join(baseHlsDirectory, playlist.playlistFilename) + }, + CONFIG.S3.STREAMING_PLAYLISTS_BUCKETINFO + ) + // Sha256 segments file + const segmentsFileName = join(playlist.getStringType(), video.uuid, playlist.segmentsSha256Filename) + await storeObject( + { + filename: segmentsFileName, + path: join(baseHlsDirectory, playlist.segmentsSha256Filename) + }, + CONFIG.S3.STREAMING_PLAYLISTS_BUCKETINFO + ) + + playlist.playlistUrl = generateUrl(masterPlaylistFilename, CONFIG.S3.STREAMING_PLAYLISTS_BUCKETINFO) + playlist.segmentsSha256Url = generateUrl(segmentsFileName, CONFIG.S3.STREAMING_PLAYLISTS_BUCKETINFO) playlist.storage = VideoStorageType.OBJECT_STORAGE await playlist.save() } + + // Remove files that were "moved" + const tasks: Promise[] = [] + + for (const file of video.VideoFiles) { + tasks.push(remove(join(CONFIG.STORAGE.VIDEOS_DIR, file.filename))) + } + + if (video.VideoStreamingPlaylists) { + tasks.push(remove(join(HLS_STREAMING_PLAYLIST_DIRECTORY, video.uuid))) + } + + await Promise.all(tasks) } diff --git a/server/lib/job-queue/handlers/video-live-ending.ts b/server/lib/job-queue/handlers/video-live-ending.ts index aa5bd573aa2..ed8d9b44114 100644 --- a/server/lib/job-queue/handlers/video-live-ending.ts +++ b/server/lib/job-queue/handlers/video-live-ending.ts @@ -98,7 +98,7 @@ async function saveLive (video: MVideo, live: MVideoLive, streamingPlaylist: MSt const { resolution, isPortraitMode } = await getVideoFileResolution(concatenatedTsFilePath, probe) - const outputPath = await generateHlsPlaylistResolutionFromTS({ + const { resolutionPlaylistPath: outputPath } = await generateHlsPlaylistResolutionFromTS({ video: videoWithFiles, concatenatedTsFilePath, resolution, diff --git a/server/lib/job-queue/handlers/video-transcoding.ts b/server/lib/job-queue/handlers/video-transcoding.ts index 1ba2a5fa4fd..554c96ba77a 100644 --- a/server/lib/job-queue/handlers/video-transcoding.ts +++ b/server/lib/job-queue/handlers/video-transcoding.ts @@ -46,24 +46,20 @@ async function processVideoTranscoding (job: Bull.Job) { return undefined } - try { + const user = await UserModel.loadByChannelActorId(video.VideoChannel.actorId) - const user = await UserModel.loadByChannelActorId(video.VideoChannel.actorId) + const handler = handlers[payload.type] - const handler = handlers[payload.type] + if (!handler) { + throw new Error('Cannot find transcoding handler for ' + payload.type) + } - if (!handler) { - throw new Error('Cannot find transcoding handler for ' + payload.type) - } + const { videoFile } = await handler(job, payload, video, user) - await handler(job, payload, video, user) + // Create job to move the new files to object storage if enabled + await addMoveToObjectStorageJob(video, videoFile) - return video - } finally { - await video.decrement('transcodeJobsRunning') - // Create job to move the new files to object storage if enabled - await addMoveToObjectStorageJob(video) - } + return video } // --------------------------------------------------------------------------- @@ -78,7 +74,7 @@ async function handleHLSJob (job: Bull.Job, payload: HLSTranscodingPayload, vide const videoOrStreamingPlaylist = videoFileInput.getVideoOrStreamingPlaylist() const videoInputPath = getVideoFilePath(videoOrStreamingPlaylist, videoFileInput) - await generateHlsPlaylistResolution({ + const { videoFile } = await generateHlsPlaylistResolution({ video, videoInputPath, resolution: payload.resolution, @@ -88,6 +84,8 @@ async function handleHLSJob (job: Bull.Job, payload: HLSTranscodingPayload, vide }) await retryTransactionWrapper(onHlsPlaylistGeneration, video, user, payload) + + return { videoFile } } async function handleNewWebTorrentResolutionJob ( @@ -96,21 +94,28 @@ async function handleNewWebTorrentResolutionJob ( video: MVideoFullLight, user: MUserId ) { - await transcodeNewWebTorrentResolution(video, payload.resolution, payload.isPortraitMode || false, job) + const { videoFile } = await transcodeNewWebTorrentResolution(video, payload.resolution, payload.isPortraitMode || false, job) + + // Create job to move the new files to object storage if enabled + await addMoveToObjectStorageJob(video, videoFile) await retryTransactionWrapper(onNewWebTorrentFileResolution, video, user, payload) } async function handleWebTorrentMergeAudioJob (job: Bull.Job, payload: MergeAudioTranscodingPayload, video: MVideoFullLight, user: MUserId) { - await mergeAudioVideofile(video, payload.resolution, job) + const { videoFile } = await mergeAudioVideofile(video, payload.resolution, job) await retryTransactionWrapper(onVideoFileOptimizer, video, payload, 'video', user) + + return { videoFile } } async function handleWebTorrentOptimizeJob (job: Bull.Job, payload: OptimizeTranscodingPayload, video: MVideoFullLight, user: MUserId) { - const transcodeType = await optimizeOriginalVideofile(video, video.getMaxQualityFile(), job) + const { transcodeType, videoFile } = await optimizeOriginalVideofile(video, video.getMaxQualityFile(), job) await retryTransactionWrapper(onVideoFileOptimizer, video, payload, transcodeType, user) + + return { videoFile } } // --------------------------------------------------------------------------- diff --git a/server/lib/job-queue/job-queue.ts b/server/lib/job-queue/job-queue.ts index 5f2f1e54fd9..95b90c9296c 100644 --- a/server/lib/job-queue/job-queue.ts +++ b/server/lib/job-queue/job-queue.ts @@ -161,10 +161,11 @@ class JobQueue { return } if (obj.type === 'video-transcoding') { - // This value is decreased when the transcoding job is finished in ./handlers/video-transcoding.ts - // It's used by the move-to-object-storage job to detect when the last transcoding job is finished - VideoModel.increment('transcodeJobsRunning', { where: { uuid: obj.payload.videoUUID } }) - .catch(err => logger.error('Cannot increase transcodeJobsRunning.', { err })) + // This value is decreased when the move job is finished in ./handlers/move-to-object-storage.ts + // Because every transcode job starts a move job for the transcoded file, the value will only reach + // 0 again when all transcode jobs are finished and the last move job is running + VideoModel.increment('moveJobsRunning', { where: { uuid: obj.payload.videoUUID } }) + .catch(err => logger.error('Cannot increase moveJobsRunning.', { err })) } const jobArgs: Bull.JobOptions = { diff --git a/server/lib/object-storage.ts b/server/lib/object-storage.ts index a0cf11a82dc..25223044bec 100644 --- a/server/lib/object-storage.ts +++ b/server/lib/object-storage.ts @@ -68,6 +68,5 @@ export function generateUrl (filename: string, bucketInfo: BucketInfo) { if (!bucketInfo.url_template) { return `https://${bucketInfo.bucket}.${CONFIG.S3.ENDPOINT}/${bucketInfo.prefix}${filename}` } - const key = filename - return bucketInfo.url_template.replace('%path%', key) + return bucketInfo.url_template.replace('%path%', filename) } diff --git a/server/lib/transcoding/video-transcoding.ts b/server/lib/transcoding/video-transcoding.ts index d2a556360af..7330bc3d6bd 100644 --- a/server/lib/transcoding/video-transcoding.ts +++ b/server/lib/transcoding/video-transcoding.ts @@ -71,9 +71,9 @@ async function optimizeOriginalVideofile (video: MVideoFullLight, inputVideoFile const videoOutputPath = getVideoFilePath(video, inputVideoFile) - await onWebTorrentVideoFileTranscoding(video, inputVideoFile, videoTranscodedPath, videoOutputPath) + const { videoFile } = await onWebTorrentVideoFileTranscoding(video, inputVideoFile, videoTranscodedPath, videoOutputPath) - return transcodeType + return { transcodeType, videoFile } } catch (err) { // Auto destruction... video.destroy().catch(err => logger.error('Cannot destruct video after transcoding failure.', { err })) @@ -258,7 +258,7 @@ async function onWebTorrentVideoFileTranscoding ( await VideoFileModel.customUpsert(videoFile, 'video', undefined) video.VideoFiles = await video.$get('VideoFiles') - return video + return { video, videoFile } } async function generateHlsPlaylistCommon (options: { @@ -355,7 +355,7 @@ async function generateHlsPlaylistCommon (options: { await createTorrentAndSetInfoHash(playlist, newVideoFile) - await VideoFileModel.customUpsert(newVideoFile, 'streaming-playlist', undefined) + const savedVideoFile = await VideoFileModel.customUpsert(newVideoFile, 'streaming-playlist', undefined) const playlistWithFiles = playlist as MStreamingPlaylistFilesVideo playlistWithFiles.VideoFiles = await playlist.$get('VideoFiles') @@ -368,5 +368,5 @@ async function generateHlsPlaylistCommon (options: { await updateMasterHLSPlaylist(video, playlistWithFiles) await updateSha256VODSegments(video, playlistWithFiles) - return resolutionPlaylistPath + return { resolutionPlaylistPath, videoFile: savedVideoFile } } diff --git a/server/lib/video.ts b/server/lib/video.ts index 0e36f6bbbbd..21e947bdedd 100644 --- a/server/lib/video.ts +++ b/server/lib/video.ts @@ -131,10 +131,11 @@ async function addOptimizeOrMergeAudioJob (video: MVideoUUID, videoFile: MVideoF return JobQueue.Instance.createJobWithPromise({ type: 'video-transcoding', payload: dataInput }, jobOptions) } -export function addMoveToObjectStorageJob (video: MVideoUUID) { +export function addMoveToObjectStorageJob (video: MVideoUUID, videoFile: MVideoFile) { if (CONFIG.S3.ENABLED) { const dataInput = { - videoUUID: video.uuid + videoUUID: video.uuid, + videoFileId: videoFile.id } return JobQueue.Instance.createJobWithPromise({ type: 'move-to-object-storage', payload: dataInput }) } diff --git a/server/models/video/sql/shared/video-tables.ts b/server/models/video/sql/shared/video-tables.ts index 94069c1f2c1..75823864d35 100644 --- a/server/models/video/sql/shared/video-tables.ts +++ b/server/models/video/sql/shared/video-tables.ts @@ -261,7 +261,7 @@ export class VideoTables { 'channelId', 'createdAt', 'updatedAt', - 'transcodeJobsRunning' + 'moveJobsRunning' ] } } diff --git a/server/models/video/video.ts b/server/models/video/video.ts index dee8bb5667e..0de2c3bc896 100644 --- a/server/models/video/video.ts +++ b/server/models/video/video.ts @@ -571,7 +571,7 @@ export class VideoModel extends Model>> { @Default(0) @IsInt @Column - transcodeJobsRunning: number + moveJobsRunning: number @ForeignKey(() => VideoChannelModel) @Column @@ -1685,6 +1685,7 @@ export class VideoModel extends Model>> { const promises: Promise[] = [ remove(filePath) ] if (!isRedundancy) promises.push(videoFile.removeTorrent()) + if (videoFile.storage === VideoStorageType.OBJECT_STORAGE) { promises.push(removeObject(videoFile.filename, CONFIG.S3.VIDEOS_BUCKETINFO)) } diff --git a/shared/models/server/job.model.ts b/shared/models/server/job.model.ts index 973cacef3b8..8239f8cdff9 100644 --- a/shared/models/server/job.model.ts +++ b/shared/models/server/job.model.ts @@ -140,4 +140,5 @@ export interface ActorKeysPayload { export interface MoveObjectStoragePayload { videoUUID: string + videoFileId?: number } From 340dc0c62aec1f27431857b79403bfc74058c605 Mon Sep 17 00:00:00 2001 From: Jelle Besseling Date: Sun, 1 Aug 2021 16:38:44 +0200 Subject: [PATCH 04/23] Only publish after move to object storage is complete --- server/controllers/api/videos/upload.ts | 9 ++++++++- .../job-queue/handlers/move-to-object-storage.ts | 2 ++ .../lib/job-queue/handlers/video-transcoding.ts | 15 ++++++++++----- 3 files changed, 20 insertions(+), 6 deletions(-) diff --git a/server/controllers/api/videos/upload.ts b/server/controllers/api/videos/upload.ts index ad744615003..64930118b44 100644 --- a/server/controllers/api/videos/upload.ts +++ b/server/controllers/api/videos/upload.ts @@ -6,6 +6,7 @@ import { uuidToShort } from '@server/helpers/uuid' import { createTorrentAndSetInfoHash } from '@server/helpers/webtorrent' import { getLocalVideoActivityPubUrl } from '@server/lib/activitypub/url' import { + addMoveToObjectStorageJob, addOptimizeOrMergeAudioJob, buildLocalVideoFromReq, buildVideoThumbnailsFromReq, @@ -215,7 +216,13 @@ async function addVideo (options: { createTorrentFederate(video, videoFile) .then(() => { - if (video.state !== VideoState.TO_TRANSCODE) return + if (video.state !== VideoState.TO_TRANSCODE) { + return + } else { + // Video will be published before move is complete which may cause some video connections to drop + // But it's recommended to enable transcoding anyway, so this is the tradeoff + addMoveToObjectStorageJob(video, videoFile) + } return addOptimizeOrMergeAudioJob(videoCreated, videoFile, user) }) diff --git a/server/lib/job-queue/handlers/move-to-object-storage.ts b/server/lib/job-queue/handlers/move-to-object-storage.ts index c78403e2ce6..17bd4010849 100644 --- a/server/lib/job-queue/handlers/move-to-object-storage.ts +++ b/server/lib/job-queue/handlers/move-to-object-storage.ts @@ -9,6 +9,7 @@ import { HLS_STREAMING_PLAYLIST_DIRECTORY } from '@server/initializers/constants import { getHlsResolutionPlaylistFilename } from '@server/lib/video-paths' import { MVideoWithAllFiles, VideoStorageType } from '@server/types/models' import { remove } from 'fs-extra' +import { publishAndFederateIfNeeded } from '@server/lib/video' export async function processMoveToObjectStorage (job: Bull.Job) { const payload = job.data as MoveObjectStoragePayload @@ -131,4 +132,5 @@ async function doAfterLastJob (video: MVideoWithAllFiles) { } await Promise.all(tasks) + await publishAndFederateIfNeeded(video) } diff --git a/server/lib/job-queue/handlers/video-transcoding.ts b/server/lib/job-queue/handlers/video-transcoding.ts index 554c96ba77a..7cd489dfe14 100644 --- a/server/lib/job-queue/handlers/video-transcoding.ts +++ b/server/lib/job-queue/handlers/video-transcoding.ts @@ -96,10 +96,9 @@ async function handleNewWebTorrentResolutionJob ( ) { const { videoFile } = await transcodeNewWebTorrentResolution(video, payload.resolution, payload.isPortraitMode || false, job) - // Create job to move the new files to object storage if enabled - await addMoveToObjectStorageJob(video, videoFile) - await retryTransactionWrapper(onNewWebTorrentFileResolution, video, user, payload) + + return { videoFile } } async function handleWebTorrentMergeAudioJob (job: Bull.Job, payload: MergeAudioTranscodingPayload, video: MVideoFullLight, user: MUserId) { @@ -136,7 +135,10 @@ async function onHlsPlaylistGeneration (video: MVideoFullLight, user: MUser, pay await createLowerResolutionsJobs(video, user, payload.resolution, payload.isPortraitMode, 'hls') } - return publishAndFederateIfNeeded(video) + // Publishing will be done by mvoe-to-object-storage if enabled + if (!CONFIG.S3.ENABLED) { + await publishAndFederateIfNeeded(video) + } } async function onVideoFileOptimizer ( @@ -185,7 +187,10 @@ async function onNewWebTorrentFileResolution ( user: MUserId, payload: NewResolutionTranscodingPayload | MergeAudioTranscodingPayload ) { - await publishAndFederateIfNeeded(video) + // Publishing will be done by mvoe-to-object-storage if enabled + if (!CONFIG.S3.ENABLED) { + await publishAndFederateIfNeeded(video) + } await createHlsJobIfEnabled(user, Object.assign({}, payload, { copyCodecs: true, isMaxQuality: false })) } From 943222217f4c7c4025ac5fb10b7b0636efab564d Mon Sep 17 00:00:00 2001 From: Jelle Besseling Date: Mon, 2 Aug 2021 15:44:10 +0200 Subject: [PATCH 05/23] Use base url instead of url template --- config/default.yaml | 11 ++--------- server/initializers/checker-after-init.ts | 7 ------- server/initializers/config.ts | 4 ++-- server/lib/object-storage.ts | 6 +++--- 4 files changed, 7 insertions(+), 21 deletions(-) diff --git a/config/default.yaml b/config/default.yaml index d09e465a661..aef2c014a47 100644 --- a/config/default.yaml +++ b/config/default.yaml @@ -102,18 +102,11 @@ s3: streaming_playlists_bucket: '' # Allows setting all buckets to the same value but with a different prefix streaming_playlists_prefix: '' - # Optional, use to overide the default url generation that just gives the public url to s3 - # Use the string %path% in place where the (full) path to the file should be. The path - # does not include the streaming_playlists_prefix, so you should add it to your template if needed. - # For example: - # https://my-videos-cache.example.com/cdn-cache/%path%?thing=true - # Would turn into: - # https://my-videos-cache.example.com/cdn-cache/hls/9ffceb57-cbe3-41c5-80e4-dbb7e97c3958/b27d1892-9c5c-4bef-8bce-f68e54fb1208-240-fragmented.mp4?thing=true - streaming_playlists_url_template: '' + streaming_playlists_base_url: '' # Same settings but for webtorrent videos videos_bucket: '' videos_prefix: '' - videos_url_template: '' + videos_url_base_url: '' log: level: 'info' # 'debug' | 'info' | 'warn' | 'error' diff --git a/server/initializers/checker-after-init.ts b/server/initializers/checker-after-init.ts index 384884ef756..2adb8a56217 100644 --- a/server/initializers/checker-after-init.ts +++ b/server/initializers/checker-after-init.ts @@ -169,13 +169,6 @@ function checkConfig () { return 'Object storage bucket prefixes should be set to different values when the same bucket is used for both types of video.' } } - if ( - (CONFIG.S3.VIDEOS_BUCKETINFO.url_template !== '' && - !CONFIG.S3.VIDEOS_BUCKETINFO.url_template.includes('%path%')) || - (CONFIG.S3.STREAMING_PLAYLISTS_BUCKETINFO.url_template !== '' && - !CONFIG.S3.STREAMING_PLAYLISTS_BUCKETINFO.url_template.includes('%path%'))) { - return 'Object storage url templates should include `%path%\' in place where the file path needs to be inserted.' - } } return null diff --git a/server/initializers/config.ts b/server/initializers/config.ts index 36fdf07ead6..82f942296b6 100644 --- a/server/initializers/config.ts +++ b/server/initializers/config.ts @@ -79,12 +79,12 @@ const CONFIG = { VIDEOS_BUCKETINFO: { bucket: config.get('s3.videos_bucket'), prefix: config.get('s3.videos_prefix'), - url_template: config.get('s3.videos_url_template') + base_url: config.get('s3.videos_base_url') }, STREAMING_PLAYLISTS_BUCKETINFO: { bucket: config.get('s3.streaming_playlists_bucket'), prefix: config.get('s3.streaming_playlists_prefix'), - url_template: config.get('s3.streaming_playlists_url_template') + base_url: config.get('s3.streaming_playlists_base_url') } }, WEBSERVER: { diff --git a/server/lib/object-storage.ts b/server/lib/object-storage.ts index 25223044bec..458532cb1d1 100644 --- a/server/lib/object-storage.ts +++ b/server/lib/object-storage.ts @@ -3,7 +3,7 @@ import { DeleteObjectCommand, DeleteObjectsCommand, ListObjectsV2Command, PutObj import { CONFIG } from "@server/initializers/config" import { logger } from '@server/helpers/logger' -type BucketInfo = {bucket: string, prefix?: string, url_template?: string} +type BucketInfo = {bucket: string, prefix?: string, base_url?: string} function getS3Client () { return new S3Client({ endpoint: `https://${CONFIG.S3.ENDPOINT}` }) @@ -65,8 +65,8 @@ export async function removePrefix (prefix: string, bucketInfo: BucketInfo) { } export function generateUrl (filename: string, bucketInfo: BucketInfo) { - if (!bucketInfo.url_template) { + if (!bucketInfo.base_url) { return `https://${bucketInfo.bucket}.${CONFIG.S3.ENDPOINT}/${bucketInfo.prefix}${filename}` } - return bucketInfo.url_template.replace('%path%', filename) + return bucketInfo.base_url + filename } From 70e1ef7be1b6f28954cd083f62c5845c34b201b9 Mon Sep 17 00:00:00 2001 From: Jelle Besseling Date: Tue, 3 Aug 2021 10:39:04 +0200 Subject: [PATCH 06/23] Fix mistyped config field --- config/default.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/config/default.yaml b/config/default.yaml index aef2c014a47..de93aff9bc1 100644 --- a/config/default.yaml +++ b/config/default.yaml @@ -106,7 +106,7 @@ s3: # Same settings but for webtorrent videos videos_bucket: '' videos_prefix: '' - videos_url_base_url: '' + videos_base_url: '' log: level: 'info' # 'debug' | 'info' | 'warn' | 'error' From 121f8b56aacb585af9e29d8352eec0633638644b Mon Sep 17 00:00:00 2001 From: Jelle Besseling Date: Tue, 3 Aug 2021 16:39:27 +0200 Subject: [PATCH 07/23] Add rudenmentary way to download before transcode --- server/lib/object-storage.ts | 14 ++++++++++++++ server/lib/video-paths.ts | 25 +++++++++++++++++++++++++ server/models/video/video.ts | 6 +++--- 3 files changed, 42 insertions(+), 3 deletions(-) diff --git a/server/lib/object-storage.ts b/server/lib/object-storage.ts index 458532cb1d1..2a823b61a85 100644 --- a/server/lib/object-storage.ts +++ b/server/lib/object-storage.ts @@ -70,3 +70,17 @@ export function generateUrl (filename: string, bucketInfo: BucketInfo) { } return bucketInfo.base_url + filename } + +export async function makeAvailable (options: { filename: string, at: string }, bucketInfo: BucketInfo) { + await ensureDir(dirname(options.at)) + const key = bucketInfo.PREFIX + options.filename + const s3Client = getS3Client() + const command = new GetObjectCommand({ + Bucket: bucketInfo.BUCKET_NAME, + Key: key + }) + const response = await s3Client.send(command) + const file = createWriteStream(options.at) + await pipeline(response.Body as Readable, file) + file.close() +} diff --git a/server/lib/video-paths.ts b/server/lib/video-paths.ts index 1e43821083a..23afb0b44af 100644 --- a/server/lib/video-paths.ts +++ b/server/lib/video-paths.ts @@ -5,6 +5,8 @@ import { HLS_REDUNDANCY_DIRECTORY, HLS_STREAMING_PLAYLIST_DIRECTORY, STATIC_PATH import { isStreamingPlaylist, MStreamingPlaylist, MStreamingPlaylistVideo, MVideo, MVideoFile, MVideoUUID } from '@server/types/models' import { buildUUID } from '@server/helpers/uuid' import { removeFragmentedMP4Ext } from '@shared/core-utils' +import { makeAvailable } from './object-storage' +import { fileExistsSync } from 'tsconfig-paths/lib/filesystem' // ################## Video file name ################## @@ -30,6 +32,28 @@ function getVideoFilePath (videoOrPlaylist: MVideo | MStreamingPlaylistVideo, vi return join(baseDir, videoFile.filename) } +async function getVideoFilePathMakeAvailable ( + videoOrPlaylist: MVideo | MStreamingPlaylistVideo, + videoFile: MVideoFile +) { + const path = getVideoFilePath(videoOrPlaylist, videoFile) + if (fileExistsSync(path)) { + return path + } + + if (videoFile.isHLS()) { + const video = extractVideo(videoOrPlaylist) + await makeAvailable( + { filename: join((videoOrPlaylist as MStreamingPlaylistVideo).getStringType(), video.uuid, videoFile.filename), at: path }, + CONFIG.OBJECT_STORAGE.STREAMING_PLAYLISTS + ) + return path + } + + await makeAvailable({ filename: videoFile.filename, at: path }, CONFIG.OBJECT_STORAGE.VIDEOS) + return path +} + // ################## Redundancy ################## function generateHLSRedundancyUrl (video: MVideo, playlist: MStreamingPlaylist) { @@ -100,6 +124,7 @@ export { generateWebTorrentVideoFilename, getVideoFilePath, + getVideoFilePathMakeAvailable, generateTorrentFileName, getTorrentFilePath, diff --git a/server/models/video/video.ts b/server/models/video/video.ts index 0de2c3bc896..f5d66a3fbf3 100644 --- a/server/models/video/video.ts +++ b/server/models/video/video.ts @@ -28,7 +28,7 @@ import { buildNSFWFilter } from '@server/helpers/express-utils' import { uuidToShort } from '@server/helpers/uuid' import { getPrivaciesForFederation, isPrivacyForFederation, isStateForFederation } from '@server/helpers/video' import { LiveManager } from '@server/lib/live/live-manager' -import { getHLSDirectory, getVideoFilePath } from '@server/lib/video-paths' +import { getHLSDirectory, getVideoFilePath, getVideoFilePathMakeAvailable } from '@server/lib/video-paths' import { getServerActor } from '@server/models/application/application' import { ModelCache } from '@server/models/model-cache' import { AttributesOnly, buildVideoEmbedPath, buildVideoWatchPath, pick } from '@shared/core-utils' @@ -1646,10 +1646,10 @@ export class VideoModel extends Model>> { return peertubeTruncate(this.description, { length: maxLength }) } - getMaxQualityResolution () { + async getMaxQualityResolution () { const file = this.getMaxQualityFile() const videoOrPlaylist = file.getVideoOrStreamingPlaylist() - const originalFilePath = getVideoFilePath(videoOrPlaylist, file) + const originalFilePath = await getVideoFilePathMakeAvailable(videoOrPlaylist, file) return getVideoFileResolution(originalFilePath) } From 2374244c6e8a178b9643fbc205acc411ad02d606 Mon Sep 17 00:00:00 2001 From: Jelle Besseling Date: Tue, 3 Aug 2021 16:48:45 +0200 Subject: [PATCH 08/23] Implement Chocobozzz suggestions https://github.com/Chocobozzz/PeerTube/pull/4290#issuecomment-891670478 The remarks in question: Try to use objectStorage prefix instead of s3 prefix for your function/variables/config names Prefer to use a tree for the config: s3.streaming_playlists_bucket -> object_storage.streaming_playlists.bucket Use uppercase for config: S3.STREAMING_PLAYLISTS_BUCKETINFO.bucket -> OBJECT_STORAGE.STREAMING_PLAYLISTS.BUCKET (maybe BUCKET_NAME instead of BUCKET) I suggest to rename moveJobsRunning to pendingMovingJobs (or better, create a dedicated videoJobInfo table with a pendingMove & videoId columns so we could also use this table to track pending transcoding jobs) https://github.com/Chocobozzz/PeerTube/pull/4290/files#diff-3e26d41ca4bda1de8e1747af70ca2af642abcc1e9e0bfb94239ff2165acfbde5R19 uses a string instead of an integer I think we should store the origin object storage URL in fileUrl, without base_url injection. Instead, inject the base_url at "runtime" so admins can easily change this configuration without running a script to update DB URLs --- config/default.yaml | 19 +++-- server/controllers/api/videos/upload.ts | 3 +- server/initializers/checker-after-init.ts | 12 +-- server/initializers/config.ts | 22 ++--- server/initializers/database.ts | 4 +- .../migrations/0660-object-storage.ts | 18 +++- .../handlers/move-to-object-storage.ts | 26 +++--- .../job-queue/handlers/video-transcoding.ts | 21 +++-- server/lib/job-queue/job-queue.ts | 8 +- server/lib/object-storage.ts | 51 ++++++----- server/lib/video.ts | 2 +- server/models/video/video-file.ts | 15 +++- server/models/video/video-job-info.ts | 85 +++++++++++++++++++ .../models/video/video-streaming-playlist.ts | 7 ++ server/models/video/video.ts | 10 +-- 15 files changed, 215 insertions(+), 88 deletions(-) create mode 100644 server/models/video/video-job-info.ts diff --git a/config/default.yaml b/config/default.yaml index de93aff9bc1..484ff75b6c5 100644 --- a/config/default.yaml +++ b/config/default.yaml @@ -95,18 +95,21 @@ storage: # If not, peertube will fallback to the default fil client_overrides: 'storage/client-overrides/' -s3: +object_storage: enabled: false # Will always use https with default URL generation (see below) endpoint: 's3.amazonaws.com' - streaming_playlists_bucket: '' - # Allows setting all buckets to the same value but with a different prefix - streaming_playlists_prefix: '' - streaming_playlists_base_url: '' + streaming_playlists: + bucket_name: '' + # Allows setting all buckets to the same value but with a different prefix + prefix: '' + # Base url for object URL generation, path in bucket is appended to this url + base_url: '' # Same settings but for webtorrent videos - videos_bucket: '' - videos_prefix: '' - videos_base_url: '' + videos: + bucket_name: '' + prefix: '' + base_url: '' log: level: 'info' # 'debug' | 'info' | 'warn' | 'error' diff --git a/server/controllers/api/videos/upload.ts b/server/controllers/api/videos/upload.ts index 64930118b44..2818728d4e5 100644 --- a/server/controllers/api/videos/upload.ts +++ b/server/controllers/api/videos/upload.ts @@ -217,11 +217,10 @@ async function addVideo (options: { createTorrentFederate(video, videoFile) .then(() => { if (video.state !== VideoState.TO_TRANSCODE) { - return - } else { // Video will be published before move is complete which may cause some video connections to drop // But it's recommended to enable transcoding anyway, so this is the tradeoff addMoveToObjectStorageJob(video, videoFile) + return } return addOptimizeOrMergeAudioJob(videoCreated, videoFile, user) diff --git a/server/initializers/checker-after-init.ts b/server/initializers/checker-after-init.ts index 2adb8a56217..7dfb8ff5182 100644 --- a/server/initializers/checker-after-init.ts +++ b/server/initializers/checker-after-init.ts @@ -154,16 +154,16 @@ function checkConfig () { } // Object storage - if (CONFIG.S3.ENABLED === true) { - if (CONFIG.TRANSCODING.WEBTORRENT.ENABLED && !CONFIG.S3.VIDEOS_BUCKETINFO.bucket) { + if (CONFIG.OBJECT_STORAGE.ENABLED === true) { + if (CONFIG.TRANSCODING.WEBTORRENT.ENABLED && !CONFIG.OBJECT_STORAGE.VIDEOS.BUCKET_NAME) { return 'videos_bucket should be set when object storage support is enabled.' } - if (CONFIG.TRANSCODING.HLS.ENABLED && !CONFIG.S3.STREAMING_PLAYLISTS_BUCKETINFO.bucket) { + if (CONFIG.TRANSCODING.HLS.ENABLED && !CONFIG.OBJECT_STORAGE.STREAMING_PLAYLISTS.BUCKET_NAME) { return 'streaming_playlists_bucket should be set when object storage support is enabled.' } - if (CONFIG.S3.VIDEOS_BUCKETINFO.bucket === CONFIG.S3.STREAMING_PLAYLISTS_BUCKETINFO.bucket && - CONFIG.S3.VIDEOS_BUCKETINFO.prefix === CONFIG.S3.STREAMING_PLAYLISTS_BUCKETINFO.prefix) { - if (CONFIG.S3.VIDEOS_BUCKETINFO.prefix === '') { + if (CONFIG.OBJECT_STORAGE.VIDEOS.BUCKET_NAME === CONFIG.OBJECT_STORAGE.STREAMING_PLAYLISTS.BUCKET_NAME && + CONFIG.OBJECT_STORAGE.VIDEOS.PREFIX === CONFIG.OBJECT_STORAGE.STREAMING_PLAYLISTS.PREFIX) { + if (CONFIG.OBJECT_STORAGE.VIDEOS.PREFIX === '') { return 'Object storage bucket prefixes should be set when the same bucket is used for both types of video.' } else { return 'Object storage bucket prefixes should be set to different values when the same bucket is used for both types of video.' diff --git a/server/initializers/config.ts b/server/initializers/config.ts index 82f942296b6..0fbaa61315e 100644 --- a/server/initializers/config.ts +++ b/server/initializers/config.ts @@ -73,18 +73,18 @@ const CONFIG = { PLUGINS_DIR: buildPath(config.get('storage.plugins')), CLIENT_OVERRIDES_DIR: buildPath(config.get('storage.client_overrides')) }, - S3: { - ENABLED: config.get('s3.enabled'), - ENDPOINT: config.get('s3.endpoint'), - VIDEOS_BUCKETINFO: { - bucket: config.get('s3.videos_bucket'), - prefix: config.get('s3.videos_prefix'), - base_url: config.get('s3.videos_base_url') + OBJECT_STORAGE: { + ENABLED: config.get('object_storage.enabled'), + ENDPOINT: config.get('object_storage.endpoint'), + VIDEOS: { + BUCKET_NAME: config.get('object_storage.videos.bucket_name'), + PREFIX: config.get('object_storage.videos.prefix'), + BASE_URL: config.get('object_storage.videos.base_url') }, - STREAMING_PLAYLISTS_BUCKETINFO: { - bucket: config.get('s3.streaming_playlists_bucket'), - prefix: config.get('s3.streaming_playlists_prefix'), - base_url: config.get('s3.streaming_playlists_base_url') + STREAMING_PLAYLISTS: { + BUCKET_NAME: config.get('object_storage.streaming_playlists.bucket_name'), + PREFIX: config.get('object_storage.streaming_playlists.prefix'), + BASE_URL: config.get('object_storage.streaming_playlists.base_url') } }, WEBSERVER: { diff --git a/server/initializers/database.ts b/server/initializers/database.ts index 38e7a76d0b2..0e690f6ae9f 100644 --- a/server/initializers/database.ts +++ b/server/initializers/database.ts @@ -45,6 +45,7 @@ import { VideoTagModel } from '../models/video/video-tag' import { VideoViewModel } from '../models/video/video-view' import { CONFIG } from './config' import { ActorCustomPageModel } from '@server/models/account/actor-custom-page' +import { VideoJobInfoModel } from '@server/models/video/video-job-info' require('pg').defaults.parseInt8 = true // Avoid BIGINT to be converted to string @@ -143,7 +144,8 @@ async function initDatabaseModels (silent: boolean) { TrackerModel, VideoTrackerModel, PluginModel, - ActorCustomPageModel + ActorCustomPageModel, + VideoJobInfoModel ]) // Check extensions exist in the database diff --git a/server/initializers/migrations/0660-object-storage.ts b/server/initializers/migrations/0660-object-storage.ts index ffe24002108..9d826f7883f 100644 --- a/server/initializers/migrations/0660-object-storage.ts +++ b/server/initializers/migrations/0660-object-storage.ts @@ -1,3 +1,4 @@ +import { VideoStorageType } from '@server/types/models' import * as Sequelize from 'sequelize' async function up (utils: { @@ -7,7 +8,18 @@ async function up (utils: { db: any }): Promise { { - await utils.queryInterface.addColumn('video', 'moveJobsRunning', { type: Sequelize.INTEGER, allowNull: false, defaultValue: 0 }) + const query = ` + CREATE TABLE IF NOT EXISTS "videoJobInfo" ( + "id" serial, + "pendingMove" INTEGER NOT NULL, + "videoUUID" uuid UNIQUE NOT NULL REFERENCES "video" ("uuid") ON DELETE CASCADE ON UPDATE CASCADE, + "createdAt" timestamp WITH time zone NOT NULL, + "updatedAt" timestamp WITH time zone NOT NULL, + PRIMARY KEY ("id") + ); + ` + + await utils.sequelize.query(query) } { @@ -16,7 +28,7 @@ async function up (utils: { { await utils.sequelize.query( - `UPDATE "videoFile" SET "storage" = 'local'` + `UPDATE "videoFile" SET "storage" = ${VideoStorageType.LOCAL}` ) } @@ -26,7 +38,7 @@ async function up (utils: { { await utils.sequelize.query( - `UPDATE "videoStreamingPlaylist" SET "storage" = 'local'` + `UPDATE "videoStreamingPlaylist" SET "storage" = ${VideoStorageType.LOCAL}` ) } } diff --git a/server/lib/job-queue/handlers/move-to-object-storage.ts b/server/lib/job-queue/handlers/move-to-object-storage.ts index 17bd4010849..b5cba227617 100644 --- a/server/lib/job-queue/handlers/move-to-object-storage.ts +++ b/server/lib/job-queue/handlers/move-to-object-storage.ts @@ -2,7 +2,7 @@ import * as Bull from 'bull' import { logger } from '@server/helpers/logger' import { MoveObjectStoragePayload } from '../../../../shared' import { VideoModel } from '@server/models/video/video' -import { generateUrl, storeObject } from '@server/lib/object-storage' +import { generateObjectStoreUrl, storeObject } from '@server/lib/object-storage' import { CONFIG } from '@server/initializers/config' import { join } from 'path' import { HLS_STREAMING_PLAYLIST_DIRECTORY } from '@server/initializers/constants' @@ -10,6 +10,7 @@ import { getHlsResolutionPlaylistFilename } from '@server/lib/video-paths' import { MVideoWithAllFiles, VideoStorageType } from '@server/types/models' import { remove } from 'fs-extra' import { publishAndFederateIfNeeded } from '@server/lib/video' +import { VideoJobInfoModel } from '@server/models/video/video-job-info' export async function processMoveToObjectStorage (job: Bull.Job) { const payload = job.data as MoveObjectStoragePayload @@ -30,8 +31,9 @@ export async function processMoveToObjectStorage (job: Bull.Job) { await moveHLSFiles(video, payload.videoFileId) } - await video.decrement('moveJobsRunning') - if (video.moveJobsRunning === 0) { + const pendingMove = await VideoJobInfoModel.decreasePendingMove(video.uuid) + if (pendingMove === 0) { + logger.info("Running cleanup after moving files to object storage (video %s in job %d)", video.uuid, job.id) await doAfterLastJob(video) } @@ -46,11 +48,11 @@ async function moveWebTorrentFiles (video: MVideoWithAllFiles, videoFileId?: num const filename = file.filename await storeObject( { filename, path: join(CONFIG.STORAGE.VIDEOS_DIR, file.filename) }, - CONFIG.S3.VIDEOS_BUCKETINFO + CONFIG.OBJECT_STORAGE.VIDEOS ) file.storage = VideoStorageType.OBJECT_STORAGE - file.fileUrl = generateUrl(filename, CONFIG.S3.VIDEOS_BUCKETINFO) + file.fileUrl = generateObjectStoreUrl(filename, CONFIG.OBJECT_STORAGE.VIDEOS) await file.save() } } @@ -70,7 +72,7 @@ async function moveHLSFiles (video: MVideoWithAllFiles, videoFileId: number) { filename: join(playlist.getStringType(), video.uuid, playlistFileName), path: join(baseHlsDirectory, playlistFileName) }, - CONFIG.S3.STREAMING_PLAYLISTS_BUCKETINFO + CONFIG.OBJECT_STORAGE.STREAMING_PLAYLISTS ) // Resolution fragmented file @@ -80,12 +82,12 @@ async function moveHLSFiles (video: MVideoWithAllFiles, videoFileId: number) { filename, path: join(baseHlsDirectory, file.filename) }, - CONFIG.S3.STREAMING_PLAYLISTS_BUCKETINFO + CONFIG.OBJECT_STORAGE.STREAMING_PLAYLISTS ) // Signals that the video file + playlist file were uploaded file.storage = VideoStorageType.OBJECT_STORAGE - file.fileUrl = generateUrl(filename, CONFIG.S3.STREAMING_PLAYLISTS_BUCKETINFO) + file.fileUrl = generateObjectStoreUrl(filename, CONFIG.OBJECT_STORAGE.STREAMING_PLAYLISTS) await file.save() } } @@ -101,7 +103,7 @@ async function doAfterLastJob (video: MVideoWithAllFiles) { filename: masterPlaylistFilename, path: join(baseHlsDirectory, playlist.playlistFilename) }, - CONFIG.S3.STREAMING_PLAYLISTS_BUCKETINFO + CONFIG.OBJECT_STORAGE.STREAMING_PLAYLISTS ) // Sha256 segments file @@ -111,11 +113,11 @@ async function doAfterLastJob (video: MVideoWithAllFiles) { filename: segmentsFileName, path: join(baseHlsDirectory, playlist.segmentsSha256Filename) }, - CONFIG.S3.STREAMING_PLAYLISTS_BUCKETINFO + CONFIG.OBJECT_STORAGE.STREAMING_PLAYLISTS ) - playlist.playlistUrl = generateUrl(masterPlaylistFilename, CONFIG.S3.STREAMING_PLAYLISTS_BUCKETINFO) - playlist.segmentsSha256Url = generateUrl(segmentsFileName, CONFIG.S3.STREAMING_PLAYLISTS_BUCKETINFO) + playlist.playlistUrl = generateObjectStoreUrl(masterPlaylistFilename, CONFIG.OBJECT_STORAGE.STREAMING_PLAYLISTS) + playlist.segmentsSha256Url = generateObjectStoreUrl(segmentsFileName, CONFIG.OBJECT_STORAGE.STREAMING_PLAYLISTS) playlist.storage = VideoStorageType.OBJECT_STORAGE await playlist.save() } diff --git a/server/lib/job-queue/handlers/video-transcoding.ts b/server/lib/job-queue/handlers/video-transcoding.ts index 7cd489dfe14..90409b7e874 100644 --- a/server/lib/job-queue/handlers/video-transcoding.ts +++ b/server/lib/job-queue/handlers/video-transcoding.ts @@ -136,7 +136,7 @@ async function onHlsPlaylistGeneration (video: MVideoFullLight, user: MUser, pay } // Publishing will be done by mvoe-to-object-storage if enabled - if (!CONFIG.S3.ENABLED) { + if (!CONFIG.OBJECT_STORAGE.ENABLED) { await publishAndFederateIfNeeded(video) } } @@ -171,15 +171,18 @@ async function onVideoFileOptimizer ( const hasNewResolutions = await createLowerResolutionsJobs(videoDatabase, user, resolution, isPortraitMode, 'webtorrent') - if (!hasHls && !hasNewResolutions) { - // No transcoding to do, it's now published - videoPublished = await videoDatabase.publishIfNeededAndSave(undefined) - } + // Publishing will be done after the move-to-object-storage-job if enabled + if (!CONFIG.OBJECT_STORAGE.ENABLED) { + if (!hasHls && !hasNewResolutions) { + // No transcoding to do, it's now published + videoPublished = await videoDatabase.publishIfNeededAndSave(undefined) + } - await federateVideoIfNeeded(videoDatabase, payload.isNewVideo) + await federateVideoIfNeeded(videoDatabase, payload.isNewVideo) - if (payload.isNewVideo) Notifier.Instance.notifyOnNewVideoIfNeeded(videoDatabase) - if (videoPublished) Notifier.Instance.notifyOnVideoPublishedAfterTranscoding(videoDatabase) + if (payload.isNewVideo) Notifier.Instance.notifyOnNewVideoIfNeeded(videoDatabase) + if (videoPublished) Notifier.Instance.notifyOnVideoPublishedAfterTranscoding(videoDatabase) + } } async function onNewWebTorrentFileResolution ( @@ -188,7 +191,7 @@ async function onNewWebTorrentFileResolution ( payload: NewResolutionTranscodingPayload | MergeAudioTranscodingPayload ) { // Publishing will be done by mvoe-to-object-storage if enabled - if (!CONFIG.S3.ENABLED) { + if (!CONFIG.OBJECT_STORAGE.ENABLED) { await publishAndFederateIfNeeded(video) } diff --git a/server/lib/job-queue/job-queue.ts b/server/lib/job-queue/job-queue.ts index 95b90c9296c..8e9d779078a 100644 --- a/server/lib/job-queue/job-queue.ts +++ b/server/lib/job-queue/job-queue.ts @@ -36,7 +36,7 @@ import { processVideoLiveEnding } from './handlers/video-live-ending' import { processVideoTranscoding } from './handlers/video-transcoding' import { processVideosViews } from './handlers/video-views' import { processMoveToObjectStorage } from './handlers/move-to-object-storage' -import { VideoModel } from '@server/models/video/video' +import { VideoJobInfoModel } from '@server/models/video/video-job-info' type CreateJobArgument = { type: 'activitypub-http-broadcast', payload: ActivitypubHttpBroadcastPayload } | @@ -164,8 +164,10 @@ class JobQueue { // This value is decreased when the move job is finished in ./handlers/move-to-object-storage.ts // Because every transcode job starts a move job for the transcoded file, the value will only reach // 0 again when all transcode jobs are finished and the last move job is running - VideoModel.increment('moveJobsRunning', { where: { uuid: obj.payload.videoUUID } }) - .catch(err => logger.error('Cannot increase moveJobsRunning.', { err })) + // If object storage support is not enabled all the pendingMove values stay at the amount of transcode + // jobs that were started for that video. + VideoJobInfoModel.increaseOrCreatePendingMove(obj.payload.videoUUID) + .catch(err => logger.error('Cannot increase pendingMove.', { err })) } const jobArgs: Bull.JobOptions = { diff --git a/server/lib/object-storage.ts b/server/lib/object-storage.ts index 2a823b61a85..708cc428a31 100644 --- a/server/lib/object-storage.ts +++ b/server/lib/object-storage.ts @@ -1,20 +1,30 @@ -import * as fs from 'fs' -import { DeleteObjectCommand, DeleteObjectsCommand, ListObjectsV2Command, PutObjectCommand, S3Client } from "@aws-sdk/client-s3" +import { + DeleteObjectCommand, + DeleteObjectsCommand, + GetObjectCommand, + ListObjectsV2Command, + PutObjectCommand, + S3Client +} from "@aws-sdk/client-s3" import { CONFIG } from "@server/initializers/config" import { logger } from '@server/helpers/logger' +import { createReadStream, createWriteStream, ensureDir, ReadStream } from "fs-extra" +import { Readable } from "stream" +import { pipeline } from "stream/promises" +import { dirname } from "path" -type BucketInfo = {bucket: string, prefix?: string, base_url?: string} +type BucketInfo = {BUCKET_NAME: string, PREFIX?: string, BASE_URL?: string} function getS3Client () { - return new S3Client({ endpoint: `https://${CONFIG.S3.ENDPOINT}` }) + return new S3Client({ endpoint: `https://${CONFIG.OBJECT_STORAGE.ENDPOINT}` }) } -async function s3Put (options: {filename: string, content: string | fs.ReadStream, bucketInfo: BucketInfo}) { +async function objectStoragePut (options: {filename: string, content: string | ReadStream, bucketInfo: BucketInfo}) { const { filename, content, bucketInfo } = options - const key = bucketInfo.prefix + filename + const key = bucketInfo.PREFIX + filename const s3Client = getS3Client() const command = new PutObjectCommand({ - Bucket: bucketInfo.bucket, + Bucket: bucketInfo.BUCKET_NAME, Key: key, Body: content }) @@ -22,21 +32,21 @@ async function s3Put (options: {filename: string, content: string | fs.ReadStrea } export async function storeObject (file: {path: string, filename: string}, bucketInfo: BucketInfo) { - logger.debug('Uploading file to %s/%s%s', bucketInfo.bucket, bucketInfo.prefix, file.filename) - const fileStream = fs.createReadStream(file.path) - return await s3Put({ filename: file.filename, content: fileStream, bucketInfo }) + logger.debug('Uploading file to %s/%s%s', bucketInfo.BUCKET_NAME, bucketInfo.PREFIX, file.filename) + const fileStream = createReadStream(file.path) + return await objectStoragePut({ filename: file.filename, content: fileStream, bucketInfo }) } export async function writeObjectContents (file: {filename: string, content: string}, bucketInfo: BucketInfo) { - logger.debug('Writing object to %s/%s%s', bucketInfo.bucket, bucketInfo.prefix, file.filename) - return await s3Put({ filename: file.filename, content: file.content, bucketInfo }) + logger.debug('Writing object to %s/%s%s', bucketInfo.BUCKET_NAME, bucketInfo.PREFIX, file.filename) + return await objectStoragePut({ filename: file.filename, content: file.content, bucketInfo }) } export async function removeObject (filename: string, bucketInfo: BucketInfo) { - const key = bucketInfo.prefix + filename + const key = bucketInfo.PREFIX + filename const s3Client = getS3Client() const command = new DeleteObjectCommand({ - Bucket: bucketInfo.bucket, + Bucket: bucketInfo.BUCKET_NAME, Key: key }) return await s3Client.send(command) @@ -45,13 +55,13 @@ export async function removeObject (filename: string, bucketInfo: BucketInfo) { export async function removePrefix (prefix: string, bucketInfo: BucketInfo) { const s3Client = getS3Client() const listCommand = new ListObjectsV2Command({ - Bucket: bucketInfo.bucket, - Prefix: bucketInfo.prefix + prefix + Bucket: bucketInfo.BUCKET_NAME, + Prefix: bucketInfo.PREFIX + prefix }) const listedObjects = await s3Client.send(listCommand) const deleteParams = { - Bucket: bucketInfo.bucket, + Bucket: bucketInfo.BUCKET_NAME, Delete: { Objects: [] } } for (const object of listedObjects.Contents) { @@ -64,11 +74,8 @@ export async function removePrefix (prefix: string, bucketInfo: BucketInfo) { if (listedObjects.IsTruncated) await removePrefix(prefix, bucketInfo) } -export function generateUrl (filename: string, bucketInfo: BucketInfo) { - if (!bucketInfo.base_url) { - return `https://${bucketInfo.bucket}.${CONFIG.S3.ENDPOINT}/${bucketInfo.prefix}${filename}` - } - return bucketInfo.base_url + filename +export function generateObjectStoreUrl (filename: string, bucketInfo: BucketInfo) { + return `https://${bucketInfo.BUCKET_NAME}.${CONFIG.OBJECT_STORAGE.ENDPOINT}/${bucketInfo.PREFIX}${filename}` } export async function makeAvailable (options: { filename: string, at: string }, bucketInfo: BucketInfo) { diff --git a/server/lib/video.ts b/server/lib/video.ts index 21e947bdedd..9404ed4cdb2 100644 --- a/server/lib/video.ts +++ b/server/lib/video.ts @@ -132,7 +132,7 @@ async function addOptimizeOrMergeAudioJob (video: MVideoUUID, videoFile: MVideoF } export function addMoveToObjectStorageJob (video: MVideoUUID, videoFile: MVideoFile) { - if (CONFIG.S3.ENABLED) { + if (CONFIG.OBJECT_STORAGE.ENABLED) { const dataInput = { videoUUID: video.uuid, videoFileId: videoFile.id diff --git a/server/models/video/video-file.ts b/server/models/video/video-file.ts index babf393f8d8..a2319cadce0 100644 --- a/server/models/video/video-file.ts +++ b/server/models/video/video-file.ts @@ -48,6 +48,7 @@ import { doesExist } from '../shared' import { parseAggregateResult, throwIfNotValid } from '../utils' import { VideoModel } from './video' import { VideoStreamingPlaylistModel } from './video-streaming-playlist' +import { CONFIG } from '@server/initializers/config' export enum ScopeNames { WITH_VIDEO = 'WITH_VIDEO', @@ -455,9 +456,19 @@ export class VideoFileModel extends Model return !!this.videoStreamingPlaylistId } + generateObjectUrl (video: MVideo) { + if (!this.isHLS() && CONFIG.OBJECT_STORAGE.VIDEOS.BASE_URL) { + return CONFIG.OBJECT_STORAGE.VIDEOS.BASE_URL + this.filename + } + if (this.isHLS() && CONFIG.OBJECT_STORAGE.STREAMING_PLAYLISTS.BASE_URL) { + return CONFIG.OBJECT_STORAGE.STREAMING_PLAYLISTS.BASE_URL + this.filename + } + return this.fileUrl + } + getFileUrl (video: MVideo) { if (this.storage === VideoStorageType.OBJECT_STORAGE) { - return this.fileUrl + return this.generateObjectUrl(video) } if (!this.Video) this.Video = video as VideoModel @@ -474,7 +485,7 @@ export class VideoFileModel extends Model getFileDownloadUrl (video: MVideoWithHost) { if (this.storage === VideoStorageType.OBJECT_STORAGE) { - return this.fileUrl + return this.generateObjectUrl(video) } const path = this.isHLS() ? join(STATIC_DOWNLOAD_PATHS.HLS_VIDEOS, `${video.uuid}-${this.resolution}-fragmented${this.extname}`) diff --git a/server/models/video/video-job-info.ts b/server/models/video/video-job-info.ts new file mode 100644 index 00000000000..0d828a5c360 --- /dev/null +++ b/server/models/video/video-job-info.ts @@ -0,0 +1,85 @@ +import { AttributesOnly } from "@shared/core-utils" +import { + AllowNull, + BelongsTo, + Column, + CreatedAt, + DataType, + Default, + ForeignKey, + IsInt, + IsUUID, + Model, + Table, + UpdatedAt +} from "sequelize-typescript" +import { Op, QueryTypes } from "sequelize" +import { VideoModel } from "./video" + +@Table({ + tableName: 'videoJobInfo', + indexes: [ + { + fields: [ 'videoUUID' ], + where: { + videoUUID: { + [Op.ne]: null + } + }, + unique: true + } + ] +}) + +export class VideoJobInfoModel extends Model>> { + @CreatedAt + createdAt: Date + + @UpdatedAt + updatedAt: Date + + @AllowNull(false) + @Default(0) + @IsInt + @Column + pendingMove: number + + @ForeignKey(() => VideoModel) + @IsUUID(4) + @Column(DataType.UUID) + videoUUID: string + + @BelongsTo(() => VideoModel, { + foreignKey: { + allowNull: false + }, + targetKey: 'uuid', + onDelete: 'cascade' + }) + Video: VideoModel + + static async increaseOrCreatePendingMove (videoUUID: string): Promise { + const options = { type: QueryTypes.SELECT as QueryTypes.SELECT, bind: { videoUUID } } + + const [ { pendingMove } ] = await VideoJobInfoModel.sequelize.query<{pendingMove: number}>(` + INSERT INTO "videoJobInfo" ("videoUUID", "pendingMove", "createdAt", "updatedAt") + VALUES ($videoUUID, 1, NOW(), NOW()) + ON CONFLICT ("videoUUID") WHERE "videoUUID" = $videoUUID + DO UPDATE SET "pendingMove" = "videoJobInfo"."pendingMove" + 1, "updatedAt" = NOW() + RETURNING "pendingMove" + `, options) + + return pendingMove + } + + static async decreasePendingMove (videoUUID: string): Promise { + const options = { type: QueryTypes.SELECT as QueryTypes.SELECT, bind: { videoUUID } } + + const [ { pendingMove } ] = await VideoJobInfoModel.sequelize.query<{pendingMove: number}>(` + UPDATE "videoJobInfo" SET "pendingMove" = "videoJobInfo"."pendingMove" - 1, "updatedAt" = NOW() + RETURNING "pendingMove" + `, options) + + return pendingMove + } +} diff --git a/server/models/video/video-streaming-playlist.ts b/server/models/video/video-streaming-playlist.ts index 163b4573fd0..1c440fe1f2d 100644 --- a/server/models/video/video-streaming-playlist.ts +++ b/server/models/video/video-streaming-playlist.ts @@ -35,6 +35,7 @@ import { VideoRedundancyModel } from '../redundancy/video-redundancy' import { doesExist } from '../shared' import { throwIfNotValid } from '../utils' import { VideoModel } from './video' +import { CONFIG } from '@server/initializers/config' @Table({ tableName: 'videoStreamingPlaylist', @@ -204,6 +205,9 @@ export class VideoStreamingPlaylistModel extends Model>> { @Column originallyPublishedAt: Date - @AllowNull(false) - @Default(0) - @IsInt - @Column - moveJobsRunning: number - @ForeignKey(() => VideoChannelModel) @Column channelId: number @@ -1687,7 +1681,7 @@ export class VideoModel extends Model>> { if (!isRedundancy) promises.push(videoFile.removeTorrent()) if (videoFile.storage === VideoStorageType.OBJECT_STORAGE) { - promises.push(removeObject(videoFile.filename, CONFIG.S3.VIDEOS_BUCKETINFO)) + promises.push(removeObject(videoFile.filename, CONFIG.OBJECT_STORAGE.VIDEOS)) } return Promise.all(promises) @@ -1698,7 +1692,7 @@ export class VideoModel extends Model>> { await remove(directoryPath) if (streamingPlaylist.storage === VideoStorageType.OBJECT_STORAGE) { - await removePrefix(join(streamingPlaylist.getStringType(), this.uuid), CONFIG.S3.STREAMING_PLAYLISTS_BUCKETINFO) + await removePrefix(join(streamingPlaylist.getStringType(), this.uuid), CONFIG.OBJECT_STORAGE.STREAMING_PLAYLISTS) } if (isRedundancy !== true) { From 7d788e785afb79d8e05bb0fb6d5577ebea89ba09 Mon Sep 17 00:00:00 2001 From: Jelle Besseling Date: Tue, 3 Aug 2021 16:51:42 +0200 Subject: [PATCH 09/23] Import correct function --- server/lib/video-paths.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/server/lib/video-paths.ts b/server/lib/video-paths.ts index 23afb0b44af..8a3115f00a4 100644 --- a/server/lib/video-paths.ts +++ b/server/lib/video-paths.ts @@ -6,7 +6,7 @@ import { isStreamingPlaylist, MStreamingPlaylist, MStreamingPlaylistVideo, MVide import { buildUUID } from '@server/helpers/uuid' import { removeFragmentedMP4Ext } from '@shared/core-utils' import { makeAvailable } from './object-storage' -import { fileExistsSync } from 'tsconfig-paths/lib/filesystem' +import { existsSync } from 'fs-extra' // ################## Video file name ################## @@ -37,7 +37,7 @@ async function getVideoFilePathMakeAvailable ( videoFile: MVideoFile ) { const path = getVideoFilePath(videoOrPlaylist, videoFile) - if (fileExistsSync(path)) { + if (existsSync(path)) { return path } From 14fb34b92cf778e316228af460e2f488cdd3d3ba Mon Sep 17 00:00:00 2001 From: Jelle Besseling Date: Tue, 3 Aug 2021 23:21:07 +0200 Subject: [PATCH 10/23] Support multipart upload --- server/lib/object-storage.ts | 64 ++++++++++++++++++++++++++++++++++-- 1 file changed, 62 insertions(+), 2 deletions(-) diff --git a/server/lib/object-storage.ts b/server/lib/object-storage.ts index 708cc428a31..73be367059e 100644 --- a/server/lib/object-storage.ts +++ b/server/lib/object-storage.ts @@ -1,19 +1,27 @@ import { + CompletedPart, + CompleteMultipartUploadCommand, + CreateMultipartUploadCommand, DeleteObjectCommand, DeleteObjectsCommand, GetObjectCommand, ListObjectsV2Command, PutObjectCommand, - S3Client + S3Client, + UploadPartCommand } from "@aws-sdk/client-s3" import { CONFIG } from "@server/initializers/config" import { logger } from '@server/helpers/logger' -import { createReadStream, createWriteStream, ensureDir, ReadStream } from "fs-extra" +import { createReadStream, createWriteStream, ensureDir, open, close, ReadStream, stat, Stats } from "fs-extra" import { Readable } from "stream" import { pipeline } from "stream/promises" import { dirname } from "path" +import { min } from "lodash" type BucketInfo = {BUCKET_NAME: string, PREFIX?: string, BASE_URL?: string} +const ONE_MIB = 1024 * 1024 +const PART_SIZE = 100 * ONE_MIB +const MAX_PUT_SIZE = 100 * ONE_MIB function getS3Client () { return new S3Client({ endpoint: `https://${CONFIG.OBJECT_STORAGE.ENDPOINT}` }) @@ -31,8 +39,60 @@ async function objectStoragePut (options: {filename: string, content: string | R return await s3Client.send(command) } +async function multiPartUpload (file: {filename: string, path: string}, stats: Stats, bucketInfo: BucketInfo) { + const { filename, path } = file + const key = bucketInfo.PREFIX + filename + const s3Client = getS3Client() + const createMultipartCommand = new CreateMultipartUploadCommand({ + Bucket: bucketInfo.BUCKET_NAME, + Key: key + }) + const createResponse = await s3Client.send(createMultipartCommand) + let partNumber = 1 + const parts: CompletedPart[] = [] + const fd = await open(path, 'r') + for (let start = 0; start < stats.size; start += PART_SIZE) { + logger.debug('Uploading part %d of file to %s/%s%s', partNumber, bucketInfo.BUCKET_NAME, bucketInfo.PREFIX, file.filename) + const stream: ReadStream & {byteLength: number} = + createReadStream( + path, + { fd: fd, autoClose: false, start: start, end: (start + PART_SIZE) - 1 } + ) as ReadStream & {byteLength: number} + // The s3 sdk needs to know the length of the http body beforehand, but doesn't support + // streams with start and end set, so it just tries to stat the file in stream.path. + // This fails for us because we only want to send part of the file. The stream type + // is modified so we can set the byteLength here, which s3 detects because array buffers + // have this field set + stream.byteLength = min([ stats.size - start, PART_SIZE ]) + const uploadPartCommand = new UploadPartCommand({ + Bucket: bucketInfo.BUCKET_NAME, + Key: key, + UploadId: createResponse.UploadId, + PartNumber: partNumber, + Body: stream + }) + const uploadResponse = await s3Client.send(uploadPartCommand) + parts.push({ ETag: uploadResponse.ETag, PartNumber: partNumber }) + partNumber += 1 + } + await close(fd) + const completeUploadCommand = new CompleteMultipartUploadCommand({ + Bucket: bucketInfo.BUCKET_NAME, + Key: key, + UploadId: createResponse.UploadId, + MultipartUpload: { Parts: parts } + }) + await s3Client.send(completeUploadCommand) + logger.debug('Completed in %d parts of file to %s/%s%s', partNumber - 1, bucketInfo.BUCKET_NAME, bucketInfo.PREFIX, file.filename) +} + export async function storeObject (file: {path: string, filename: string}, bucketInfo: BucketInfo) { logger.debug('Uploading file to %s/%s%s', bucketInfo.BUCKET_NAME, bucketInfo.PREFIX, file.filename) + const stats = await stat(file.path) + // If bigger than 100 MiB we do a multipart upload + if (stats.size > MAX_PUT_SIZE) { + return await multiPartUpload(file, stats, bucketInfo) + } const fileStream = createReadStream(file.path) return await objectStoragePut({ filename: file.filename, content: fileStream, bucketInfo }) } From 9422fd9caa2b2f732bfdcf554cbaf97b732aa705 Mon Sep 17 00:00:00 2001 From: Jelle Besseling Date: Fri, 6 Aug 2021 14:01:54 +0200 Subject: [PATCH 11/23] Remove import of node 15.0 module stream/promises --- server/lib/object-storage.ts | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/server/lib/object-storage.ts b/server/lib/object-storage.ts index 73be367059e..d9460b3d51f 100644 --- a/server/lib/object-storage.ts +++ b/server/lib/object-storage.ts @@ -148,6 +148,10 @@ export async function makeAvailable (options: { filename: string, at: string }, }) const response = await s3Client.send(command) const file = createWriteStream(options.at) - await pipeline(response.Body as Readable, file) + await new Promise((resolve, reject) => { + const pipe = (response.Body as Readable).pipe(file) + pipe.on('end', () => resolve(options.at)) + pipe.on('error', reject) + }) file.close() } From 09cfb18e34915debf7c41baa7af728a624e9e44d Mon Sep 17 00:00:00 2001 From: Jelle Besseling Date: Fri, 6 Aug 2021 14:02:41 +0200 Subject: [PATCH 12/23] Extend maximum upload job length Using the same value as for redundancy downloading seems logical --- server/initializers/constants.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/initializers/constants.ts b/server/initializers/constants.ts index b3dad828b5f..60e43887523 100644 --- a/server/initializers/constants.ts +++ b/server/initializers/constants.ts @@ -181,7 +181,7 @@ const JOB_TTL: { [id in JobType]: number } = { 'activitypub-refresher': 60000 * 10, // 10 minutes 'video-redundancy': 1000 * 3600 * 3, // 3 hours 'video-live-ending': 1000 * 60 * 10, // 10 minutes - 'move-to-object-storage': 1000 * 60 * 20 // 20 minutes + 'move-to-object-storage': 1000 * 60 * 60 * 3 // 3 hours } const REPEAT_JOBS: { [ id: string ]: EveryRepeatOptions | CronRepeatOptions } = { 'videos-views': { From 25d6608d34681d034357944a08743fcbd5951964 Mon Sep 17 00:00:00 2001 From: Jelle Besseling Date: Fri, 6 Aug 2021 14:05:12 +0200 Subject: [PATCH 13/23] Use dynamic part size for really large uploads Also adds very small part size for local testing --- server/lib/object-storage.ts | 41 +++++++++++++++++++++++++----------- 1 file changed, 29 insertions(+), 12 deletions(-) diff --git a/server/lib/object-storage.ts b/server/lib/object-storage.ts index d9460b3d51f..bf00db7c848 100644 --- a/server/lib/object-storage.ts +++ b/server/lib/object-storage.ts @@ -14,19 +14,28 @@ import { CONFIG } from "@server/initializers/config" import { logger } from '@server/helpers/logger' import { createReadStream, createWriteStream, ensureDir, open, close, ReadStream, stat, Stats } from "fs-extra" import { Readable } from "stream" -import { pipeline } from "stream/promises" import { dirname } from "path" import { min } from "lodash" type BucketInfo = {BUCKET_NAME: string, PREFIX?: string, BASE_URL?: string} const ONE_MIB = 1024 * 1024 -const PART_SIZE = 100 * ONE_MIB -const MAX_PUT_SIZE = 100 * ONE_MIB +const MAX_PUT_SIZE = process.env.NODE_ENV.includes("test") ? 10 * ONE_MIB : 100 * ONE_MIB function getS3Client () { return new S3Client({ endpoint: `https://${CONFIG.OBJECT_STORAGE.ENDPOINT}` }) } +function getPartSize (stats: Stats) { + if (process.env.NODE_ENV.includes("test")) { + return 10 * ONE_MIB + } + // Use parts of 1 GiB if the file is very large (it would take more than 1000 requests at 100 MiB per request) + if (stats.size / (100 * ONE_MIB) > 1000) { + return 1024 * ONE_MIB + } + return 100 * ONE_MIB +} + async function objectStoragePut (options: {filename: string, content: string | ReadStream, bucketInfo: BucketInfo}) { const { filename, content, bucketInfo } = options const key = bucketInfo.PREFIX + filename @@ -43,27 +52,32 @@ async function multiPartUpload (file: {filename: string, path: string}, stats: S const { filename, path } = file const key = bucketInfo.PREFIX + filename const s3Client = getS3Client() + const createMultipartCommand = new CreateMultipartUploadCommand({ Bucket: bucketInfo.BUCKET_NAME, Key: key }) const createResponse = await s3Client.send(createMultipartCommand) + + const fd = await open(path, 'r') let partNumber = 1 const parts: CompletedPart[] = [] - const fd = await open(path, 'r') - for (let start = 0; start < stats.size; start += PART_SIZE) { + const partSize = getPartSize(stats) + for (let start = 0; start < stats.size; start += partSize) { logger.debug('Uploading part %d of file to %s/%s%s', partNumber, bucketInfo.BUCKET_NAME, bucketInfo.PREFIX, file.filename) - const stream: ReadStream & {byteLength: number} = - createReadStream( - path, - { fd: fd, autoClose: false, start: start, end: (start + PART_SIZE) - 1 } - ) as ReadStream & {byteLength: number} + // The s3 sdk needs to know the length of the http body beforehand, but doesn't support // streams with start and end set, so it just tries to stat the file in stream.path. // This fails for us because we only want to send part of the file. The stream type // is modified so we can set the byteLength here, which s3 detects because array buffers // have this field set - stream.byteLength = min([ stats.size - start, PART_SIZE ]) + const stream: ReadStream & {byteLength: number} = + createReadStream( + path, + { fd: fd, autoClose: false, start: start, end: (start + partSize) - 1 } + ) as ReadStream & {byteLength: number} + // Calculate if the part size is more than what's left over, and in that case use left over bytes for byteLength + stream.byteLength = min([ stats.size - start, partSize ]) const uploadPartCommand = new UploadPartCommand({ Bucket: bucketInfo.BUCKET_NAME, Key: key, @@ -72,10 +86,12 @@ async function multiPartUpload (file: {filename: string, path: string}, stats: S Body: stream }) const uploadResponse = await s3Client.send(uploadPartCommand) + parts.push({ ETag: uploadResponse.ETag, PartNumber: partNumber }) partNumber += 1 } await close(fd) + const completeUploadCommand = new CompleteMultipartUploadCommand({ Bucket: bucketInfo.BUCKET_NAME, Key: key, @@ -83,7 +99,7 @@ async function multiPartUpload (file: {filename: string, path: string}, stats: S MultipartUpload: { Parts: parts } }) await s3Client.send(completeUploadCommand) - logger.debug('Completed in %d parts of file to %s/%s%s', partNumber - 1, bucketInfo.BUCKET_NAME, bucketInfo.PREFIX, file.filename) + logger.debug('Completed %s/%s%s in %d parts', bucketInfo.BUCKET_NAME, bucketInfo.PREFIX, file.filename, partNumber - 1) } export async function storeObject (file: {path: string, filename: string}, bucketInfo: BucketInfo) { @@ -93,6 +109,7 @@ export async function storeObject (file: {path: string, filename: string}, bucke if (stats.size > MAX_PUT_SIZE) { return await multiPartUpload(file, stats, bucketInfo) } + const fileStream = createReadStream(file.path) return await objectStoragePut({ filename: file.filename, content: fileStream, bucketInfo }) } From 6b5c9ca7636110cafcd7d3735d4243993522602d Mon Sep 17 00:00:00 2001 From: Jelle Besseling Date: Fri, 6 Aug 2021 14:05:56 +0200 Subject: [PATCH 14/23] Fix decreasePendingMove query --- server/models/video/video-job-info.ts | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/server/models/video/video-job-info.ts b/server/models/video/video-job-info.ts index 0d828a5c360..8122684a3f1 100644 --- a/server/models/video/video-job-info.ts +++ b/server/models/video/video-job-info.ts @@ -76,7 +76,8 @@ export class VideoJobInfoModel extends Model(` - UPDATE "videoJobInfo" SET "pendingMove" = "videoJobInfo"."pendingMove" - 1, "updatedAt" = NOW() + UPDATE "videoJobInfo" SET "pendingMove" = "videoJobInfo"."pendingMove" - 1, "updatedAt" = NOW() + WHERE "videoUUID" = $videoUUID RETURNING "pendingMove" `, options) From 2a26bb7c414262451ded6c2c61a9ec6b422f14ee Mon Sep 17 00:00:00 2001 From: Jelle Besseling Date: Fri, 6 Aug 2021 16:41:43 +0200 Subject: [PATCH 15/23] Resolve various PR comments --- scripts/create-transcoding-job.ts | 3 +- server/controllers/api/videos/upload.ts | 4 +- server/initializers/checker-after-init.ts | 4 +- server/initializers/config.ts | 4 +- .../migrations/0660-object-storage.ts | 14 ++++-- .../handlers/move-to-object-storage.ts | 31 ++++-------- .../job-queue/handlers/video-transcoding.ts | 15 +++--- server/lib/job-queue/job-queue.ts | 12 +---- server/lib/object-storage.ts | 30 ++++++------ server/lib/video-paths.ts | 7 ++- server/lib/video.ts | 20 ++++++-- server/models/video/video-job-info.ts | 48 ++++++++++++------- server/models/video/video.ts | 10 ++++ 13 files changed, 113 insertions(+), 89 deletions(-) diff --git a/scripts/create-transcoding-job.ts b/scripts/create-transcoding-job.ts index 3a552c19a3d..ba885d97583 100755 --- a/scripts/create-transcoding-job.ts +++ b/scripts/create-transcoding-job.ts @@ -9,6 +9,7 @@ import { computeResolutionsToTranscode } from '@server/helpers/ffprobe-utils' import { VideoTranscodingPayload } from '@shared/models' import { CONFIG } from '@server/initializers/config' import { isUUIDValid } from '@server/helpers/custom-validators/misc' +import { addTranscodingJob } from '@server/lib/video' program .option('-v, --video [videoUUID]', 'Video UUID') @@ -90,7 +91,7 @@ async function run () { await JobQueue.Instance.init() for (const d of dataInput) { - await JobQueue.Instance.createJobWithPromise({ type: 'video-transcoding', payload: d }) + await addTranscodingJob(d, {}) console.log('Transcoding job for video %s created.', video.uuid) } } diff --git a/server/controllers/api/videos/upload.ts b/server/controllers/api/videos/upload.ts index 2818728d4e5..91f8f722267 100644 --- a/server/controllers/api/videos/upload.ts +++ b/server/controllers/api/videos/upload.ts @@ -6,7 +6,7 @@ import { uuidToShort } from '@server/helpers/uuid' import { createTorrentAndSetInfoHash } from '@server/helpers/webtorrent' import { getLocalVideoActivityPubUrl } from '@server/lib/activitypub/url' import { - addMoveToObjectStorageJob, + addMoveToObjectStorageJob as addMoveToObjectStorageJobIfNeeded, addOptimizeOrMergeAudioJob, buildLocalVideoFromReq, buildVideoThumbnailsFromReq, @@ -219,7 +219,7 @@ async function addVideo (options: { if (video.state !== VideoState.TO_TRANSCODE) { // Video will be published before move is complete which may cause some video connections to drop // But it's recommended to enable transcoding anyway, so this is the tradeoff - addMoveToObjectStorageJob(video, videoFile) + addMoveToObjectStorageJobIfNeeded(video, videoFile) return } diff --git a/server/initializers/checker-after-init.ts b/server/initializers/checker-after-init.ts index 7dfb8ff5182..dc77558b0ea 100644 --- a/server/initializers/checker-after-init.ts +++ b/server/initializers/checker-after-init.ts @@ -155,10 +155,10 @@ function checkConfig () { // Object storage if (CONFIG.OBJECT_STORAGE.ENABLED === true) { - if (CONFIG.TRANSCODING.WEBTORRENT.ENABLED && !CONFIG.OBJECT_STORAGE.VIDEOS.BUCKET_NAME) { + if (!CONFIG.OBJECT_STORAGE.VIDEOS.BUCKET_NAME) { return 'videos_bucket should be set when object storage support is enabled.' } - if (CONFIG.TRANSCODING.HLS.ENABLED && !CONFIG.OBJECT_STORAGE.STREAMING_PLAYLISTS.BUCKET_NAME) { + if (!CONFIG.OBJECT_STORAGE.STREAMING_PLAYLISTS.BUCKET_NAME) { return 'streaming_playlists_bucket should be set when object storage support is enabled.' } if (CONFIG.OBJECT_STORAGE.VIDEOS.BUCKET_NAME === CONFIG.OBJECT_STORAGE.STREAMING_PLAYLISTS.BUCKET_NAME && diff --git a/server/initializers/config.ts b/server/initializers/config.ts index 0fbaa61315e..4272a219bce 100644 --- a/server/initializers/config.ts +++ b/server/initializers/config.ts @@ -75,7 +75,9 @@ const CONFIG = { }, OBJECT_STORAGE: { ENABLED: config.get('object_storage.enabled'), - ENDPOINT: config.get('object_storage.endpoint'), + ENDPOINT: new URL(/^https?:\/\//i.test(config.get('object_storage.endpoint')) + ? config.get('object_storage.endpoint') + : 'https://' + config.get('object_storage.endpoint')), VIDEOS: { BUCKET_NAME: config.get('object_storage.videos.bucket_name'), PREFIX: config.get('object_storage.videos.prefix'), diff --git a/server/initializers/migrations/0660-object-storage.ts b/server/initializers/migrations/0660-object-storage.ts index 9d826f7883f..8b95ee205e1 100644 --- a/server/initializers/migrations/0660-object-storage.ts +++ b/server/initializers/migrations/0660-object-storage.ts @@ -12,7 +12,7 @@ async function up (utils: { CREATE TABLE IF NOT EXISTS "videoJobInfo" ( "id" serial, "pendingMove" INTEGER NOT NULL, - "videoUUID" uuid UNIQUE NOT NULL REFERENCES "video" ("uuid") ON DELETE CASCADE ON UPDATE CASCADE, + "videoId" serial UNIQUE NOT NULL REFERENCES "video" ("id") ON DELETE CASCADE ON UPDATE CASCADE, "createdAt" timestamp WITH time zone NOT NULL, "updatedAt" timestamp WITH time zone NOT NULL, PRIMARY KEY ("id") @@ -23,24 +23,28 @@ async function up (utils: { } { - await utils.queryInterface.addColumn('videoFile', 'storage', { type: Sequelize.INTEGER, allowNull: false }) + await utils.queryInterface.addColumn('videoFile', 'storage', { type: Sequelize.INTEGER, allowNull: true }) } - { await utils.sequelize.query( `UPDATE "videoFile" SET "storage" = ${VideoStorageType.LOCAL}` ) } - { - await utils.queryInterface.addColumn('videoStreamingPlaylist', 'storage', { type: Sequelize.INTEGER, allowNull: false }) + await utils.queryInterface.changeColumn('videoFile', 'storage', { type: Sequelize.INTEGER, allowNull: false }) } + { + await utils.queryInterface.addColumn('videoStreamingPlaylist', 'storage', { type: Sequelize.INTEGER, allowNull: true }) + } { await utils.sequelize.query( `UPDATE "videoStreamingPlaylist" SET "storage" = ${VideoStorageType.LOCAL}` ) } + { + await utils.queryInterface.changeColumn('videoStreamingPlaylist', 'storage', { type: Sequelize.INTEGER, allowNull: false }) + } } function down (options) { diff --git a/server/lib/job-queue/handlers/move-to-object-storage.ts b/server/lib/job-queue/handlers/move-to-object-storage.ts index b5cba227617..bc059b9dce3 100644 --- a/server/lib/job-queue/handlers/move-to-object-storage.ts +++ b/server/lib/job-queue/handlers/move-to-object-storage.ts @@ -23,7 +23,7 @@ export async function processMoveToObjectStorage (job: Bull.Job) { return undefined } - if (CONFIG.TRANSCODING.WEBTORRENT.ENABLED && video.VideoFiles) { + if (video.VideoFiles) { await moveWebTorrentFiles(video, payload.videoFileId) } @@ -46,10 +46,8 @@ async function moveWebTorrentFiles (video: MVideoWithAllFiles, videoFileId?: num if (videoFileId !== null && file.id !== videoFileId) continue const filename = file.filename - await storeObject( - { filename, path: join(CONFIG.STORAGE.VIDEOS_DIR, file.filename) }, - CONFIG.OBJECT_STORAGE.VIDEOS - ) + const path = join(CONFIG.STORAGE.VIDEOS_DIR, file.filename) + await storeObject({ filename, path }, CONFIG.OBJECT_STORAGE.VIDEOS) file.storage = VideoStorageType.OBJECT_STORAGE file.fileUrl = generateObjectStoreUrl(filename, CONFIG.OBJECT_STORAGE.VIDEOS) @@ -67,23 +65,19 @@ async function moveHLSFiles (video: MVideoWithAllFiles, videoFileId: number) { // Resolution playlist const playlistFileName = getHlsResolutionPlaylistFilename(file.filename) + const playlistPath = join(baseHlsDirectory, playlistFileName) await storeObject( { filename: join(playlist.getStringType(), video.uuid, playlistFileName), - path: join(baseHlsDirectory, playlistFileName) + path: playlistPath }, CONFIG.OBJECT_STORAGE.STREAMING_PLAYLISTS ) // Resolution fragmented file const filename = join(playlist.getStringType(), video.uuid, file.filename) - await storeObject( - { - filename, - path: join(baseHlsDirectory, file.filename) - }, - CONFIG.OBJECT_STORAGE.STREAMING_PLAYLISTS - ) + const path = join(baseHlsDirectory, file.filename) + await storeObject({ filename, path }, CONFIG.OBJECT_STORAGE.STREAMING_PLAYLISTS) // Signals that the video file + playlist file were uploaded file.storage = VideoStorageType.OBJECT_STORAGE @@ -122,17 +116,10 @@ async function doAfterLastJob (video: MVideoWithAllFiles) { await playlist.save() } - // Remove files that were "moved" - const tasks: Promise[] = [] - - for (const file of video.VideoFiles) { - tasks.push(remove(join(CONFIG.STORAGE.VIDEOS_DIR, file.filename))) - } - + // Remove empty hls video directory if (video.VideoStreamingPlaylists) { - tasks.push(remove(join(HLS_STREAMING_PLAYLIST_DIRECTORY, video.uuid))) + await remove(join(HLS_STREAMING_PLAYLIST_DIRECTORY, video.uuid)) } - await Promise.all(tasks) await publishAndFederateIfNeeded(video) } diff --git a/server/lib/job-queue/handlers/video-transcoding.ts b/server/lib/job-queue/handlers/video-transcoding.ts index 90409b7e874..a7825c60d2f 100644 --- a/server/lib/job-queue/handlers/video-transcoding.ts +++ b/server/lib/job-queue/handlers/video-transcoding.ts @@ -1,6 +1,6 @@ import * as Bull from 'bull' import { TranscodeOptionsType } from '@server/helpers/ffmpeg-utils' -import { addMoveToObjectStorageJob, getTranscodingJobPriority, publishAndFederateIfNeeded } from '@server/lib/video' +import { addMoveToObjectStorageJob, addTranscodingJob, getTranscodingJobPriority, publishAndFederateIfNeeded } from '@server/lib/video' import { getVideoFilePath } from '@server/lib/video-paths' import { UserModel } from '@server/models/user/user' import { MUser, MUserId, MVideoFullLight, MVideoUUID, MVideoWithFile } from '@server/types/models' @@ -24,7 +24,6 @@ import { optimizeOriginalVideofile, transcodeNewWebTorrentResolution } from '../../transcoding/video-transcoding' -import { JobQueue } from '../job-queue' type HandlerFunction = (job: Bull.Job, payload: VideoTranscodingPayload, video: MVideoFullLight, user: MUser) => Promise @@ -135,10 +134,10 @@ async function onHlsPlaylistGeneration (video: MVideoFullLight, user: MUser, pay await createLowerResolutionsJobs(video, user, payload.resolution, payload.isPortraitMode, 'hls') } - // Publishing will be done by mvoe-to-object-storage if enabled - if (!CONFIG.OBJECT_STORAGE.ENABLED) { - await publishAndFederateIfNeeded(video) - } + // Publishing will be done by move-to-object-storage if enabled + if (CONFIG.OBJECT_STORAGE.ENABLED) return + + await publishAndFederateIfNeeded(video) } async function onVideoFileOptimizer ( @@ -229,7 +228,7 @@ async function createHlsJobIfEnabled (user: MUserId, payload: { isMaxQuality: payload.isMaxQuality } - JobQueue.Instance.createJob({ type: 'video-transcoding', payload: hlsTranscodingPayload }, jobOptions) + await addTranscodingJob(hlsTranscodingPayload, jobOptions) return true } @@ -277,7 +276,7 @@ async function createLowerResolutionsJobs ( priority: await getTranscodingJobPriority(user) } - JobQueue.Instance.createJob({ type: 'video-transcoding', payload: dataInput }, jobOptions) + await addTranscodingJob(dataInput, jobOptions) } if (resolutionCreated.length === 0) { diff --git a/server/lib/job-queue/job-queue.ts b/server/lib/job-queue/job-queue.ts index 8e9d779078a..7a3a1bf8226 100644 --- a/server/lib/job-queue/job-queue.ts +++ b/server/lib/job-queue/job-queue.ts @@ -36,7 +36,6 @@ import { processVideoLiveEnding } from './handlers/video-live-ending' import { processVideoTranscoding } from './handlers/video-transcoding' import { processVideosViews } from './handlers/video-views' import { processMoveToObjectStorage } from './handlers/move-to-object-storage' -import { VideoJobInfoModel } from '@server/models/video/video-job-info' type CreateJobArgument = { type: 'activitypub-http-broadcast', payload: ActivitypubHttpBroadcastPayload } | @@ -55,7 +54,7 @@ type CreateJobArgument = { type: 'video-redundancy', payload: VideoRedundancyPayload } | { type: 'move-to-object-storage', payload: MoveObjectStoragePayload } -type CreateJobOptions = { +export type CreateJobOptions = { delay?: number priority?: number } @@ -160,15 +159,6 @@ class JobQueue { logger.error('Unknown queue %s: cannot create job.', obj.type) return } - if (obj.type === 'video-transcoding') { - // This value is decreased when the move job is finished in ./handlers/move-to-object-storage.ts - // Because every transcode job starts a move job for the transcoded file, the value will only reach - // 0 again when all transcode jobs are finished and the last move job is running - // If object storage support is not enabled all the pendingMove values stay at the amount of transcode - // jobs that were started for that video. - VideoJobInfoModel.increaseOrCreatePendingMove(obj.payload.videoUUID) - .catch(err => logger.error('Cannot increase pendingMove.', { err })) - } const jobArgs: Bull.JobOptions = { backoff: { delay: 60 * 1000, type: 'exponential' }, diff --git a/server/lib/object-storage.ts b/server/lib/object-storage.ts index bf00db7c848..a67a3dc666a 100644 --- a/server/lib/object-storage.ts +++ b/server/lib/object-storage.ts @@ -12,17 +12,18 @@ import { } from "@aws-sdk/client-s3" import { CONFIG } from "@server/initializers/config" import { logger } from '@server/helpers/logger' -import { createReadStream, createWriteStream, ensureDir, open, close, ReadStream, stat, Stats } from "fs-extra" +import { createReadStream, createWriteStream, ensureDir, open, close, ReadStream, stat, Stats, remove } from "fs-extra" import { Readable } from "stream" import { dirname } from "path" import { min } from "lodash" +import { pipelinePromise } from "@server/helpers/core-utils" type BucketInfo = {BUCKET_NAME: string, PREFIX?: string, BASE_URL?: string} const ONE_MIB = 1024 * 1024 const MAX_PUT_SIZE = process.env.NODE_ENV.includes("test") ? 10 * ONE_MIB : 100 * ONE_MIB function getS3Client () { - return new S3Client({ endpoint: `https://${CONFIG.OBJECT_STORAGE.ENDPOINT}` }) + return new S3Client({ endpoint: CONFIG.OBJECT_STORAGE.ENDPOINT.toString() }) } function getPartSize (stats: Stats) { @@ -45,7 +46,7 @@ async function objectStoragePut (options: {filename: string, content: string | R Key: key, Body: content }) - return await s3Client.send(command) + return s3Client.send(command) } async function multiPartUpload (file: {filename: string, path: string}, stats: Stats, bucketInfo: BucketInfo) { @@ -107,16 +108,19 @@ export async function storeObject (file: {path: string, filename: string}, bucke const stats = await stat(file.path) // If bigger than 100 MiB we do a multipart upload if (stats.size > MAX_PUT_SIZE) { - return await multiPartUpload(file, stats, bucketInfo) + await multiPartUpload(file, stats, bucketInfo) + } else { + const fileStream = createReadStream(file.path) + await objectStoragePut({ filename: file.filename, content: fileStream, bucketInfo }) } - const fileStream = createReadStream(file.path) - return await objectStoragePut({ filename: file.filename, content: fileStream, bucketInfo }) + logger.debug("Removing %s because it's now on object storage", file.path) + await remove(file.path) } export async function writeObjectContents (file: {filename: string, content: string}, bucketInfo: BucketInfo) { logger.debug('Writing object to %s/%s%s', bucketInfo.BUCKET_NAME, bucketInfo.PREFIX, file.filename) - return await objectStoragePut({ filename: file.filename, content: file.content, bucketInfo }) + return objectStoragePut({ filename: file.filename, content: file.content, bucketInfo }) } export async function removeObject (filename: string, bucketInfo: BucketInfo) { @@ -126,7 +130,7 @@ export async function removeObject (filename: string, bucketInfo: BucketInfo) { Bucket: bucketInfo.BUCKET_NAME, Key: key }) - return await s3Client.send(command) + return s3Client.send(command) } export async function removePrefix (prefix: string, bucketInfo: BucketInfo) { @@ -152,7 +156,9 @@ export async function removePrefix (prefix: string, bucketInfo: BucketInfo) { } export function generateObjectStoreUrl (filename: string, bucketInfo: BucketInfo) { - return `https://${bucketInfo.BUCKET_NAME}.${CONFIG.OBJECT_STORAGE.ENDPOINT}/${bucketInfo.PREFIX}${filename}` + const endpoint = CONFIG.OBJECT_STORAGE.ENDPOINT + const port = endpoint.port ? `:${endpoint.port}` : '' + return `${endpoint.protocol}//${bucketInfo.BUCKET_NAME}.${endpoint.hostname}${port}/${bucketInfo.PREFIX}${filename}` } export async function makeAvailable (options: { filename: string, at: string }, bucketInfo: BucketInfo) { @@ -165,10 +171,6 @@ export async function makeAvailable (options: { filename: string, at: string }, }) const response = await s3Client.send(command) const file = createWriteStream(options.at) - await new Promise((resolve, reject) => { - const pipe = (response.Body as Readable).pipe(file) - pipe.on('end', () => resolve(options.at)) - pipe.on('error', reject) - }) + await pipelinePromise(response.Body as Readable, file) file.close() } diff --git a/server/lib/video-paths.ts b/server/lib/video-paths.ts index 8a3115f00a4..7a7835056c8 100644 --- a/server/lib/video-paths.ts +++ b/server/lib/video-paths.ts @@ -6,7 +6,7 @@ import { isStreamingPlaylist, MStreamingPlaylist, MStreamingPlaylistVideo, MVide import { buildUUID } from '@server/helpers/uuid' import { removeFragmentedMP4Ext } from '@shared/core-utils' import { makeAvailable } from './object-storage' -import { existsSync } from 'fs-extra' +import { stat } from 'fs-extra' // ################## Video file name ################## @@ -37,8 +37,11 @@ async function getVideoFilePathMakeAvailable ( videoFile: MVideoFile ) { const path = getVideoFilePath(videoOrPlaylist, videoFile) - if (existsSync(path)) { + try { + await stat(path) return path + } catch { + // Continue if path not available } if (videoFile.isHLS()) { diff --git a/server/lib/video.ts b/server/lib/video.ts index 9404ed4cdb2..e2252a7aaaa 100644 --- a/server/lib/video.ts +++ b/server/lib/video.ts @@ -8,10 +8,11 @@ import { FilteredModelAttributes } from '@server/types' import { MThumbnail, MUserId, MVideoFile, MVideoTag, MVideoThumbnail, MVideoUUID } from '@server/types/models' import { ThumbnailType, VideoCreate, VideoPrivacy, VideoTranscodingPayload } from '@shared/models' import { federateVideoIfNeeded } from './activitypub/videos' -import { JobQueue } from './job-queue/job-queue' +import { CreateJobOptions, JobQueue } from './job-queue/job-queue' import { Notifier } from './notifier' import { updateVideoMiniatureFromExisting } from './thumbnail' import { CONFIG } from '@server/initializers/config' +import { VideoJobInfoModel } from '@server/models/video/video-job-info' function buildLocalVideoFromReq (videoInfo: VideoCreate, channelId: number): FilteredModelAttributes { return { @@ -128,10 +129,21 @@ async function addOptimizeOrMergeAudioJob (video: MVideoUUID, videoFile: MVideoF priority: await getTranscodingJobPriority(user) } - return JobQueue.Instance.createJobWithPromise({ type: 'video-transcoding', payload: dataInput }, jobOptions) + return addTranscodingJob(dataInput, jobOptions) } -export function addMoveToObjectStorageJob (video: MVideoUUID, videoFile: MVideoFile) { +async function addTranscodingJob (payload: VideoTranscodingPayload, options: CreateJobOptions) { + // This value is decreased when the move job is finished in ./handlers/move-to-object-storage.ts + // Because every transcode job starts a move job for the transcoded file, the value will only reach + // 0 again when all transcode jobs are finished and the last move job is running + // If object storage support is not enabled all the pendingMove values stay at the amount of transcode + // jobs that were started for that video. + await VideoJobInfoModel.increaseOrCreatePendingMove(payload.videoUUID) + + return JobQueue.Instance.createJobWithPromise({ type: 'video-transcoding', payload: payload }, options) +} + +function addMoveToObjectStorageJob (video: MVideoUUID, videoFile: MVideoFile) { if (CONFIG.OBJECT_STORAGE.ENABLED) { const dataInput = { videoUUID: video.uuid, @@ -158,5 +170,7 @@ export { buildVideoThumbnailsFromReq, setVideoTags, addOptimizeOrMergeAudioJob, + addTranscodingJob, + addMoveToObjectStorageJob, getTranscodingJobPriority } diff --git a/server/models/video/video-job-info.ts b/server/models/video/video-job-info.ts index 8122684a3f1..3cb266aa5a7 100644 --- a/server/models/video/video-job-info.ts +++ b/server/models/video/video-job-info.ts @@ -4,13 +4,12 @@ import { BelongsTo, Column, CreatedAt, - DataType, Default, ForeignKey, IsInt, - IsUUID, Model, Table, + Unique, UpdatedAt } from "sequelize-typescript" import { Op, QueryTypes } from "sequelize" @@ -20,13 +19,12 @@ import { VideoModel } from "./video" tableName: 'videoJobInfo', indexes: [ { - fields: [ 'videoUUID' ], + fields: [ 'videoId' ], where: { - videoUUID: { + videoId: { [Op.ne]: null } - }, - unique: true + } } ] }) @@ -45,15 +43,14 @@ export class VideoJobInfoModel extends Model VideoModel) - @IsUUID(4) - @Column(DataType.UUID) - videoUUID: string + @Unique + @Column + videoId: number @BelongsTo(() => VideoModel, { foreignKey: { allowNull: false }, - targetKey: 'uuid', onDelete: 'cascade' }) Video: VideoModel @@ -62,11 +59,19 @@ export class VideoJobInfoModel extends Model(` - INSERT INTO "videoJobInfo" ("videoUUID", "pendingMove", "createdAt", "updatedAt") - VALUES ($videoUUID, 1, NOW(), NOW()) - ON CONFLICT ("videoUUID") WHERE "videoUUID" = $videoUUID - DO UPDATE SET "pendingMove" = "videoJobInfo"."pendingMove" + 1, "updatedAt" = NOW() - RETURNING "pendingMove" + INSERT INTO "videoJobInfo" ("videoId", "pendingMove", "createdAt", "updatedAt") + SELECT + "video"."id" AS "videoId", 1, NOW(), NOW() + FROM + "video" + WHERE + "video"."uuid" = $videoUUID + ON CONFLICT ("videoId") DO UPDATE + SET + "pendingMove" = "videoJobInfo"."pendingMove" + 1, + "updatedAt" = NOW() + RETURNING + "pendingMove" `, options) return pendingMove @@ -76,9 +81,16 @@ export class VideoJobInfoModel extends Model(` - UPDATE "videoJobInfo" SET "pendingMove" = "videoJobInfo"."pendingMove" - 1, "updatedAt" = NOW() - WHERE "videoUUID" = $videoUUID - RETURNING "pendingMove" + UPDATE + "videoJobInfo" + SET + "pendingMove" = "videoJobInfo"."pendingMove" - 1, + "updatedAt" = NOW() + FROM "video" + WHERE + "video"."id" = "videoJobInfo"."videoId" AND "video"."uuid" = $videoUUID + RETURNING + "pendingMove"; `, options) return pendingMove diff --git a/server/models/video/video.ts b/server/models/video/video.ts index 21bea7344e8..5dfb5241ab3 100644 --- a/server/models/video/video.ts +++ b/server/models/video/video.ts @@ -122,6 +122,7 @@ import { VideoStreamingPlaylistModel } from './video-streaming-playlist' import { VideoTagModel } from './video-tag' import { VideoViewModel } from './video-view' import { removeObject, removePrefix } from '@server/lib/object-storage' +import { VideoJobInfoModel } from './video-job-info' export enum ScopeNames { FOR_API = 'FOR_API', @@ -734,6 +735,15 @@ export class VideoModel extends Model>> { }) VideoCaptions: VideoCaptionModel[] + @HasOne(() => VideoJobInfoModel, { + foreignKey: { + name: 'videoId', + allowNull: false + }, + onDelete: 'cascade' + }) + VideoJobInfo: VideoJobInfoModel + @BeforeDestroy static async sendDelete (instance: MVideoAccountLight, options) { if (!instance.isOwned()) return undefined From 24c45a5ddd40f580bb2bffd023fa12fb1cb8b980 Mon Sep 17 00:00:00 2001 From: Jelle Besseling Date: Fri, 6 Aug 2021 17:04:42 +0200 Subject: [PATCH 16/23] Move to object storage after optimize --- scripts/optimize-old-videos.ts | 6 ++++-- server/lib/job-queue/handlers/move-to-object-storage.ts | 2 +- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/scripts/optimize-old-videos.ts b/scripts/optimize-old-videos.ts index 9e66105ddeb..b5fe1ff5fa2 100644 --- a/scripts/optimize-old-videos.ts +++ b/scripts/optimize-old-videos.ts @@ -8,8 +8,9 @@ import { initDatabaseModels } from '../server/initializers/database' import { basename, dirname } from 'path' import { copy, move, remove } from 'fs-extra' import { createTorrentAndSetInfoHash } from '@server/helpers/webtorrent' -import { getVideoFilePath } from '@server/lib/video-paths' import { getMaxBitrate } from '@shared/core-utils' +import { getVideoFilePath, getVideoFilePathMakeAvailable } from '@server/lib/video-paths' +import { moveWebTorrentFiles } from '@server/lib/job-queue/handlers/move-to-object-storage' run() .then(() => process.exit(0)) @@ -39,7 +40,7 @@ async function run () { currentVideoId = video.id for (const file of video.VideoFiles) { - currentFilePath = getVideoFilePath(video, file) + currentFilePath = await getVideoFilePathMakeAvailable(video, file) const [ videoBitrate, fps, dataResolution ] = await Promise.all([ getVideoFileBitrate(currentFilePath), @@ -67,6 +68,7 @@ async function run () { if (originalDuration === newDuration) { console.log('Finished optimizing %s', basename(currentFilePath)) + await moveWebTorrentFiles(video, file.id) await remove(backupFile) continue } diff --git a/server/lib/job-queue/handlers/move-to-object-storage.ts b/server/lib/job-queue/handlers/move-to-object-storage.ts index bc059b9dce3..6ef5ed0a86d 100644 --- a/server/lib/job-queue/handlers/move-to-object-storage.ts +++ b/server/lib/job-queue/handlers/move-to-object-storage.ts @@ -40,7 +40,7 @@ export async function processMoveToObjectStorage (job: Bull.Job) { return payload.videoUUID } -async function moveWebTorrentFiles (video: MVideoWithAllFiles, videoFileId?: number) { +export async function moveWebTorrentFiles (video: MVideoWithAllFiles, videoFileId?: number) { for (const file of video.VideoFiles) { if (file.storage !== VideoStorageType.LOCAL) continue if (videoFileId !== null && file.id !== videoFileId) continue From 50d6556916e53c344b2510776b3437e5a4e79336 Mon Sep 17 00:00:00 2001 From: Jelle Besseling Date: Sat, 7 Aug 2021 12:42:15 +0200 Subject: [PATCH 17/23] Make upload size configurable and increase default --- config/default.yaml | 2 ++ server/initializers/config.ts | 1 + server/lib/object-storage.ts | 8 ++------ 3 files changed, 5 insertions(+), 6 deletions(-) diff --git a/config/default.yaml b/config/default.yaml index 484ff75b6c5..096d8ba7407 100644 --- a/config/default.yaml +++ b/config/default.yaml @@ -97,6 +97,8 @@ storage: object_storage: enabled: false + # Maximum amount of MiB to upload in one request to object storage + max_upload_part: 2048 # Will always use https with default URL generation (see below) endpoint: 's3.amazonaws.com' streaming_playlists: diff --git a/server/initializers/config.ts b/server/initializers/config.ts index 4272a219bce..9a289abf56a 100644 --- a/server/initializers/config.ts +++ b/server/initializers/config.ts @@ -75,6 +75,7 @@ const CONFIG = { }, OBJECT_STORAGE: { ENABLED: config.get('object_storage.enabled'), + MAX_UPLOAD_PART: config.get('object_storage.max_upload_part'), ENDPOINT: new URL(/^https?:\/\//i.test(config.get('object_storage.endpoint')) ? config.get('object_storage.endpoint') : 'https://' + config.get('object_storage.endpoint')), diff --git a/server/lib/object-storage.ts b/server/lib/object-storage.ts index a67a3dc666a..c39133665f0 100644 --- a/server/lib/object-storage.ts +++ b/server/lib/object-storage.ts @@ -20,7 +20,7 @@ import { pipelinePromise } from "@server/helpers/core-utils" type BucketInfo = {BUCKET_NAME: string, PREFIX?: string, BASE_URL?: string} const ONE_MIB = 1024 * 1024 -const MAX_PUT_SIZE = process.env.NODE_ENV.includes("test") ? 10 * ONE_MIB : 100 * ONE_MIB +const MAX_PUT_SIZE = process.env.NODE_ENV.includes("test") ? 10 * ONE_MIB : CONFIG.OBJECT_STORAGE.MAX_UPLOAD_PART * ONE_MIB function getS3Client () { return new S3Client({ endpoint: CONFIG.OBJECT_STORAGE.ENDPOINT.toString() }) @@ -30,11 +30,7 @@ function getPartSize (stats: Stats) { if (process.env.NODE_ENV.includes("test")) { return 10 * ONE_MIB } - // Use parts of 1 GiB if the file is very large (it would take more than 1000 requests at 100 MiB per request) - if (stats.size / (100 * ONE_MIB) > 1000) { - return 1024 * ONE_MIB - } - return 100 * ONE_MIB + return MAX_PUT_SIZE } async function objectStoragePut (options: {filename: string, content: string | ReadStream, bucketInfo: BucketInfo}) { From a54e28a1add3a9d38501c8474a652b33e92f3ed4 Mon Sep 17 00:00:00 2001 From: Jelle Besseling Date: Mon, 9 Aug 2021 14:17:01 +0200 Subject: [PATCH 18/23] Prune webtorrent files that are stored in object storage --- server/models/video/video-file.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/models/video/video-file.ts b/server/models/video/video-file.ts index a2319cadce0..a1c678b4d0d 100644 --- a/server/models/video/video-file.ts +++ b/server/models/video/video-file.ts @@ -279,7 +279,7 @@ export class VideoFileModel extends Model static async doesOwnedWebTorrentVideoFileExist (filename: string) { const query = 'SELECT 1 FROM "videoFile" INNER JOIN "video" ON "video"."id" = "videoFile"."videoId" AND "video"."remote" IS FALSE ' + - 'WHERE "filename" = $filename LIMIT 1' + `WHERE "filename" = $filename AND "storage" = ${VideoStorageType.LOCAL} LIMIT 1` return doesExist(query, { filename }) } From 72eb1470966f9329d45b84673190e0dd9842e373 Mon Sep 17 00:00:00 2001 From: Chocobozzz Date: Thu, 12 Aug 2021 14:45:49 +0200 Subject: [PATCH 19/23] Move files after transcoding jobs --- .github/workflows/test.yml | 5 + config/default.yaml | 31 +- scripts/ci.sh | 3 +- scripts/optimize-old-videos.ts | 27 +- server/controllers/api/videos/upload.ts | 19 +- server/controllers/download.ts | 10 +- server/initializers/checker-after-init.ts | 9 +- server/initializers/config.ts | 11 +- server/initializers/constants.ts | 3 +- .../migrations/0660-object-storage.ts | 7 +- .../handlers/move-to-object-storage.ts | 118 ++++---- .../job-queue/handlers/video-live-ending.ts | 4 +- .../job-queue/handlers/video-transcoding.ts | 70 ++--- server/lib/object-storage.ts | 172 ----------- server/lib/object-storage/index.ts | 3 + server/lib/object-storage/keys.ts | 19 ++ server/lib/object-storage/shared/client.ts | 36 +++ server/lib/object-storage/shared/index.ts | 3 + server/lib/object-storage/shared/logger.ts | 7 + .../shared/object-storage-helpers.ts | 221 ++++++++++++++ server/lib/object-storage/urls.ts | 40 +++ server/lib/object-storage/videos.ts | 39 +++ server/lib/video-paths.ts | 6 +- server/lib/video.ts | 105 +++++-- server/models/video/video-file.ts | 32 +- server/models/video/video-job-info.ts | 52 ++-- .../models/video/video-streaming-playlist.ts | 25 +- server/models/video/video.ts | 34 +-- server/tests/api/index.ts | 1 + server/tests/api/object-storage/index.ts | 1 + server/tests/api/object-storage/videos.ts | 273 ++++++++++++++++++ server/types/models/video/video.ts | 5 - shared/extra-utils/miscs/checks.ts | 7 +- shared/extra-utils/mock-servers/index.ts | 1 + .../mock-servers/mock-object-storage.ts | 78 +++++ shared/extra-utils/server/servers.ts | 4 +- shared/extra-utils/videos/videos-command.ts | 11 + shared/models/server/job.model.ts | 1 - shared/models/videos/index.ts | 1 + shared/models/videos/video-state.enum.ts | 3 +- shared/models/videos/video-storage.enum.ts | 4 + 41 files changed, 1054 insertions(+), 447 deletions(-) delete mode 100644 server/lib/object-storage.ts create mode 100644 server/lib/object-storage/index.ts create mode 100644 server/lib/object-storage/keys.ts create mode 100644 server/lib/object-storage/shared/client.ts create mode 100644 server/lib/object-storage/shared/index.ts create mode 100644 server/lib/object-storage/shared/logger.ts create mode 100644 server/lib/object-storage/shared/object-storage-helpers.ts create mode 100644 server/lib/object-storage/urls.ts create mode 100644 server/lib/object-storage/videos.ts create mode 100644 server/tests/api/object-storage/index.ts create mode 100644 server/tests/api/object-storage/videos.ts create mode 100644 shared/extra-utils/mock-servers/mock-object-storage.ts create mode 100644 shared/models/videos/video-storage.enum.ts diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index c5bbd9e2c64..093a2495ede 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -31,6 +31,11 @@ jobs: ports: - 10389:10389 + s3ninja: + image: scireum/s3-ninja + ports: + - 9444:9000 + strategy: fail-fast: false matrix: diff --git a/config/default.yaml b/config/default.yaml index 096d8ba7407..2227abbd522 100644 --- a/config/default.yaml +++ b/config/default.yaml @@ -97,20 +97,33 @@ storage: object_storage: enabled: false - # Maximum amount of MiB to upload in one request to object storage - max_upload_part: 2048 - # Will always use https with default URL generation (see below) + + # Without protocol, will default to HTTPS endpoint: 's3.amazonaws.com' + + region: 'us-east-1' + + credentials: + access_key_id: 'access-key' + secret_access_key: 'secret-access-key' + + # Maximum amount to upload in one request to object storage + max_upload_part: 2MB + streaming_playlists: - bucket_name: '' + bucket_name: 'streaming-playlists' + # Allows setting all buckets to the same value but with a different prefix - prefix: '' - # Base url for object URL generation, path in bucket is appended to this url - base_url: '' + prefix: '' # Example: 'streaming-playlists:' + + # Base url for object URL generation, scheme and host will be replaced by this URL + # Useful when you want to use a CDN/external proxy + base_url: '' # Example: 'https://mirror.example.com' + # Same settings but for webtorrent videos videos: - bucket_name: '' - prefix: '' + bucket_name: 'videos' + prefix: '' base_url: '' log: diff --git a/scripts/ci.sh b/scripts/ci.sh index 71b1be53b78..f49dbe6adcc 100755 --- a/scripts/ci.sh +++ b/scripts/ci.sh @@ -89,9 +89,10 @@ elif [ "$1" = "api-4" ]; then moderationFiles=$(findTestFiles ./dist/server/tests/api/moderation) redundancyFiles=$(findTestFiles ./dist/server/tests/api/redundancy) + objectStorageFiles=$(findTestFiles ./dist/server/tests/api/object-storage) activitypubFiles=$(findTestFiles ./dist/server/tests/api/activitypub) - MOCHA_PARALLEL=true TS_NODE_FILES=true runTest "$1" 2 $moderationFiles $redundancyFiles $activitypubFiles + MOCHA_PARALLEL=true TS_NODE_FILES=true runTest "$1" 2 $moderationFiles $redundancyFiles $activitypubFiles $objectStorageFiles elif [ "$1" = "external-plugins" ]; then npm run build:server diff --git a/scripts/optimize-old-videos.ts b/scripts/optimize-old-videos.ts index b5fe1ff5fa2..81594d72c0e 100644 --- a/scripts/optimize-old-videos.ts +++ b/scripts/optimize-old-videos.ts @@ -1,16 +1,18 @@ -import { registerTSPaths } from '../server/helpers/register-ts-paths' -registerTSPaths() - -import { getDurationFromVideoFile, getVideoFileBitrate, getVideoFileFPS, getVideoFileResolution } from '../server/helpers/ffprobe-utils' -import { VideoModel } from '../server/models/video/video' -import { optimizeOriginalVideofile } from '../server/lib/transcoding/video-transcoding' -import { initDatabaseModels } from '../server/initializers/database' -import { basename, dirname } from 'path' import { copy, move, remove } from 'fs-extra' +import { basename, dirname } from 'path' import { createTorrentAndSetInfoHash } from '@server/helpers/webtorrent' -import { getMaxBitrate } from '@shared/core-utils' +import { CONFIG } from '@server/initializers/config' +import { processMoveToObjectStorage } from '@server/lib/job-queue/handlers/move-to-object-storage' import { getVideoFilePath, getVideoFilePathMakeAvailable } from '@server/lib/video-paths' -import { moveWebTorrentFiles } from '@server/lib/job-queue/handlers/move-to-object-storage' +import { getMaxBitrate } from '@shared/core-utils' +import { MoveObjectStoragePayload } from '@shared/models' +import { getDurationFromVideoFile, getVideoFileBitrate, getVideoFileFPS, getVideoFileResolution } from '../server/helpers/ffprobe-utils' +import { registerTSPaths } from '../server/helpers/register-ts-paths' +import { initDatabaseModels } from '../server/initializers/database' +import { optimizeOriginalVideofile } from '../server/lib/transcoding/video-transcoding' +import { VideoModel } from '../server/models/video/video' + +registerTSPaths() run() .then(() => process.exit(0)) @@ -68,7 +70,6 @@ async function run () { if (originalDuration === newDuration) { console.log('Finished optimizing %s', basename(currentFilePath)) - await moveWebTorrentFiles(video, file.id) await remove(backupFile) continue } @@ -79,6 +80,10 @@ async function run () { await file.save() } } + + if (CONFIG.OBJECT_STORAGE.ENABLED === true) { + await processMoveToObjectStorage({ data: { videoUUID: video.uuid } as MoveObjectStoragePayload } as any) + } } console.log('Finished optimizing videos') diff --git a/server/controllers/api/videos/upload.ts b/server/controllers/api/videos/upload.ts index 91f8f722267..8d5b65f29cd 100644 --- a/server/controllers/api/videos/upload.ts +++ b/server/controllers/api/videos/upload.ts @@ -6,9 +6,10 @@ import { uuidToShort } from '@server/helpers/uuid' import { createTorrentAndSetInfoHash } from '@server/helpers/webtorrent' import { getLocalVideoActivityPubUrl } from '@server/lib/activitypub/url' import { - addMoveToObjectStorageJob as addMoveToObjectStorageJobIfNeeded, + addMoveToObjectStorageJob, addOptimizeOrMergeAudioJob, buildLocalVideoFromReq, + buildNextVideoState, buildVideoThumbnailsFromReq, setVideoTags } from '@server/lib/video' @@ -145,10 +146,7 @@ async function addVideo (options: { const videoData = buildLocalVideoFromReq(videoInfo, videoChannel.id) - videoData.state = CONFIG.TRANSCODING.ENABLED - ? VideoState.TO_TRANSCODE - : VideoState.PUBLISHED - + videoData.state = buildNextVideoState() videoData.duration = videoPhysicalFile.duration // duration was added by a previous middleware const video = new VideoModel(videoData) as MVideoFullLight @@ -216,14 +214,13 @@ async function addVideo (options: { createTorrentFederate(video, videoFile) .then(() => { - if (video.state !== VideoState.TO_TRANSCODE) { - // Video will be published before move is complete which may cause some video connections to drop - // But it's recommended to enable transcoding anyway, so this is the tradeoff - addMoveToObjectStorageJobIfNeeded(video, videoFile) - return + if (video.state === VideoState.TO_MOVE_TO_EXTERNAL_STORAGE) { + return addMoveToObjectStorageJob(video) } - return addOptimizeOrMergeAudioJob(videoCreated, videoFile, user) + if (video.state === VideoState.TO_TRANSCODE) { + return addOptimizeOrMergeAudioJob(videoCreated, videoFile, user) + } }) .catch(err => logger.error('Cannot add optimize/merge audio job for %s.', videoCreated.uuid, { err, ...lTags(videoCreated.uuid) })) diff --git a/server/controllers/download.ts b/server/controllers/download.ts index ddacc1b68ac..65aa53420e7 100644 --- a/server/controllers/download.ts +++ b/server/controllers/download.ts @@ -5,7 +5,7 @@ import { VideosTorrentCache } from '@server/lib/files-cache/videos-torrent-cache import { Hooks } from '@server/lib/plugins/hooks' import { getVideoFilePath } from '@server/lib/video-paths' import { MStreamingPlaylist, MVideo, MVideoFile, MVideoFullLight } from '@server/types/models' -import { HttpStatusCode, VideoStreamingPlaylistType } from '@shared/models' +import { HttpStatusCode, VideoStorage, VideoStreamingPlaylistType } from '@shared/models' import { STATIC_DOWNLOAD_PATHS } from '../initializers/constants' import { asyncMiddleware, videosDownloadValidator } from '../middlewares' @@ -81,6 +81,10 @@ async function downloadVideoFile (req: express.Request, res: express.Response) { if (!checkAllowResult(res, allowParameters, allowedResult)) return + if (videoFile.storage === VideoStorage.OBJECT_STORAGE) { + return res.redirect(videoFile.getObjectStorageUrl()) + } + return res.download(getVideoFilePath(video, videoFile), `${video.name}-${videoFile.resolution}p${videoFile.extname}`) } @@ -107,6 +111,10 @@ async function downloadHLSVideoFile (req: express.Request, res: express.Response if (!checkAllowResult(res, allowParameters, allowedResult)) return + if (videoFile.storage === VideoStorage.OBJECT_STORAGE) { + return res.redirect(videoFile.getObjectStorageUrl()) + } + const filename = `${video.name}-${videoFile.resolution}p-${streamingPlaylist.getStringType()}${videoFile.extname}` return res.download(getVideoFilePath(streamingPlaylist, videoFile), filename) } diff --git a/server/initializers/checker-after-init.ts b/server/initializers/checker-after-init.ts index dc77558b0ea..09f58727498 100644 --- a/server/initializers/checker-after-init.ts +++ b/server/initializers/checker-after-init.ts @@ -155,14 +155,19 @@ function checkConfig () { // Object storage if (CONFIG.OBJECT_STORAGE.ENABLED === true) { + if (!CONFIG.OBJECT_STORAGE.VIDEOS.BUCKET_NAME) { return 'videos_bucket should be set when object storage support is enabled.' } + if (!CONFIG.OBJECT_STORAGE.STREAMING_PLAYLISTS.BUCKET_NAME) { return 'streaming_playlists_bucket should be set when object storage support is enabled.' } - if (CONFIG.OBJECT_STORAGE.VIDEOS.BUCKET_NAME === CONFIG.OBJECT_STORAGE.STREAMING_PLAYLISTS.BUCKET_NAME && - CONFIG.OBJECT_STORAGE.VIDEOS.PREFIX === CONFIG.OBJECT_STORAGE.STREAMING_PLAYLISTS.PREFIX) { + + if ( + CONFIG.OBJECT_STORAGE.VIDEOS.BUCKET_NAME === CONFIG.OBJECT_STORAGE.STREAMING_PLAYLISTS.BUCKET_NAME && + CONFIG.OBJECT_STORAGE.VIDEOS.PREFIX === CONFIG.OBJECT_STORAGE.STREAMING_PLAYLISTS.PREFIX + ) { if (CONFIG.OBJECT_STORAGE.VIDEOS.PREFIX === '') { return 'Object storage bucket prefixes should be set when the same bucket is used for both types of video.' } else { diff --git a/server/initializers/config.ts b/server/initializers/config.ts index 9a289abf56a..0e684eef862 100644 --- a/server/initializers/config.ts +++ b/server/initializers/config.ts @@ -75,10 +75,13 @@ const CONFIG = { }, OBJECT_STORAGE: { ENABLED: config.get('object_storage.enabled'), - MAX_UPLOAD_PART: config.get('object_storage.max_upload_part'), - ENDPOINT: new URL(/^https?:\/\//i.test(config.get('object_storage.endpoint')) - ? config.get('object_storage.endpoint') - : 'https://' + config.get('object_storage.endpoint')), + MAX_UPLOAD_PART: bytes.parse(config.get('object_storage.max_upload_part')), + ENDPOINT: config.get('object_storage.endpoint'), + REGION: config.get('object_storage.region'), + CREDENTIALS: { + ACCESS_KEY_ID: config.get('object_storage.credentials.access_key_id'), + SECRET_ACCESS_KEY: config.get('object_storage.credentials.secret_access_key') + }, VIDEOS: { BUCKET_NAME: config.get('object_storage.videos.bucket_name'), PREFIX: config.get('object_storage.videos.prefix'), diff --git a/server/initializers/constants.ts b/server/initializers/constants.ts index 60e43887523..8a1526ae88d 100644 --- a/server/initializers/constants.ts +++ b/server/initializers/constants.ts @@ -415,7 +415,8 @@ const VIDEO_STATES: { [ id in VideoState ]: string } = { [VideoState.TO_TRANSCODE]: 'To transcode', [VideoState.TO_IMPORT]: 'To import', [VideoState.WAITING_FOR_LIVE]: 'Waiting for livestream', - [VideoState.LIVE_ENDED]: 'Livestream ended' + [VideoState.LIVE_ENDED]: 'Livestream ended', + [VideoState.TO_MOVE_TO_EXTERNAL_STORAGE]: 'To move to an external storage' } const VIDEO_IMPORT_STATES: { [ id in VideoImportState ]: string } = { diff --git a/server/initializers/migrations/0660-object-storage.ts b/server/initializers/migrations/0660-object-storage.ts index 8b95ee205e1..1cc265bfbb3 100644 --- a/server/initializers/migrations/0660-object-storage.ts +++ b/server/initializers/migrations/0660-object-storage.ts @@ -1,5 +1,5 @@ -import { VideoStorageType } from '@server/types/models' import * as Sequelize from 'sequelize' +import { VideoStorage } from '@shared/models' async function up (utils: { transaction: Sequelize.Transaction @@ -12,6 +12,7 @@ async function up (utils: { CREATE TABLE IF NOT EXISTS "videoJobInfo" ( "id" serial, "pendingMove" INTEGER NOT NULL, + "pendingTranscoding" INTEGER NOT NULL, "videoId" serial UNIQUE NOT NULL REFERENCES "video" ("id") ON DELETE CASCADE ON UPDATE CASCADE, "createdAt" timestamp WITH time zone NOT NULL, "updatedAt" timestamp WITH time zone NOT NULL, @@ -27,7 +28,7 @@ async function up (utils: { } { await utils.sequelize.query( - `UPDATE "videoFile" SET "storage" = ${VideoStorageType.LOCAL}` + `UPDATE "videoFile" SET "storage" = ${VideoStorage.LOCAL}` ) } { @@ -39,7 +40,7 @@ async function up (utils: { } { await utils.sequelize.query( - `UPDATE "videoStreamingPlaylist" SET "storage" = ${VideoStorageType.LOCAL}` + `UPDATE "videoStreamingPlaylist" SET "storage" = ${VideoStorage.LOCAL}` ) } { diff --git a/server/lib/job-queue/handlers/move-to-object-storage.ts b/server/lib/job-queue/handlers/move-to-object-storage.ts index 6ef5ed0a86d..c1fbdfc892b 100644 --- a/server/lib/job-queue/handlers/move-to-object-storage.ts +++ b/server/lib/job-queue/handlers/move-to-object-storage.ts @@ -1,16 +1,17 @@ import * as Bull from 'bull' +import { remove } from 'fs-extra' +import { join } from 'path' import { logger } from '@server/helpers/logger' -import { MoveObjectStoragePayload } from '../../../../shared' -import { VideoModel } from '@server/models/video/video' -import { generateObjectStoreUrl, storeObject } from '@server/lib/object-storage' +import { createTorrentAndSetInfoHash } from '@server/helpers/webtorrent' import { CONFIG } from '@server/initializers/config' -import { join } from 'path' import { HLS_STREAMING_PLAYLIST_DIRECTORY } from '@server/initializers/constants' -import { getHlsResolutionPlaylistFilename } from '@server/lib/video-paths' -import { MVideoWithAllFiles, VideoStorageType } from '@server/types/models' -import { remove } from 'fs-extra' -import { publishAndFederateIfNeeded } from '@server/lib/video' +import { storeHLSFile, storeWebTorrentFile } from '@server/lib/object-storage' +import { moveToNextState } from '@server/lib/video' +import { getHLSDirectory, getHlsResolutionPlaylistFilename } from '@server/lib/video-paths' +import { VideoModel } from '@server/models/video/video' import { VideoJobInfoModel } from '@server/models/video/video-job-info' +import { MVideoFile, MVideoWithAllFiles } from '@server/types/models' +import { MoveObjectStoragePayload, VideoStorage } from '../../../../shared' export async function processMoveToObjectStorage (job: Bull.Job) { const payload = job.data as MoveObjectStoragePayload @@ -24,95 +25,64 @@ export async function processMoveToObjectStorage (job: Bull.Job) { } if (video.VideoFiles) { - await moveWebTorrentFiles(video, payload.videoFileId) + await moveWebTorrentFiles(video) } if (CONFIG.TRANSCODING.HLS.ENABLED && video.VideoStreamingPlaylists) { - await moveHLSFiles(video, payload.videoFileId) + await moveHLSFiles(video) } - const pendingMove = await VideoJobInfoModel.decreasePendingMove(video.uuid) + const pendingMove = await VideoJobInfoModel.decrease(video.uuid, 'pendingMove') if (pendingMove === 0) { - logger.info("Running cleanup after moving files to object storage (video %s in job %d)", video.uuid, job.id) + logger.info('Running cleanup after moving files to object storage (video %s in job %d)', video.uuid, job.id) await doAfterLastJob(video) } return payload.videoUUID } -export async function moveWebTorrentFiles (video: MVideoWithAllFiles, videoFileId?: number) { +// --------------------------------------------------------------------------- + +async function moveWebTorrentFiles (video: MVideoWithAllFiles) { for (const file of video.VideoFiles) { - if (file.storage !== VideoStorageType.LOCAL) continue - if (videoFileId !== null && file.id !== videoFileId) continue + if (file.storage !== VideoStorage.LOCAL) continue - const filename = file.filename - const path = join(CONFIG.STORAGE.VIDEOS_DIR, file.filename) - await storeObject({ filename, path }, CONFIG.OBJECT_STORAGE.VIDEOS) + const fileUrl = await storeWebTorrentFile(file.filename) - file.storage = VideoStorageType.OBJECT_STORAGE - file.fileUrl = generateObjectStoreUrl(filename, CONFIG.OBJECT_STORAGE.VIDEOS) - await file.save() + const oldPath = join(CONFIG.STORAGE.VIDEOS_DIR, file.filename) + await onFileMoved({ video, file, fileUrl, oldPath }) } } -async function moveHLSFiles (video: MVideoWithAllFiles, videoFileId: number) { +async function moveHLSFiles (video: MVideoWithAllFiles) { for (const playlist of video.VideoStreamingPlaylists) { - const baseHlsDirectory = join(HLS_STREAMING_PLAYLIST_DIRECTORY, video.uuid) for (const file of playlist.VideoFiles) { - if (file.storage !== VideoStorageType.LOCAL) continue - if (videoFileId !== null && file.id !== videoFileId) continue + if (file.storage !== VideoStorage.LOCAL) continue // Resolution playlist - const playlistFileName = getHlsResolutionPlaylistFilename(file.filename) - const playlistPath = join(baseHlsDirectory, playlistFileName) - await storeObject( - { - filename: join(playlist.getStringType(), video.uuid, playlistFileName), - path: playlistPath - }, - CONFIG.OBJECT_STORAGE.STREAMING_PLAYLISTS - ) + const playlistFilename = getHlsResolutionPlaylistFilename(file.filename) + await storeHLSFile(playlist, video, playlistFilename) // Resolution fragmented file - const filename = join(playlist.getStringType(), video.uuid, file.filename) - const path = join(baseHlsDirectory, file.filename) - await storeObject({ filename, path }, CONFIG.OBJECT_STORAGE.STREAMING_PLAYLISTS) - - // Signals that the video file + playlist file were uploaded - file.storage = VideoStorageType.OBJECT_STORAGE - file.fileUrl = generateObjectStoreUrl(filename, CONFIG.OBJECT_STORAGE.STREAMING_PLAYLISTS) - await file.save() + const fileUrl = await storeHLSFile(playlist, video, file.filename) + + const oldPath = join(getHLSDirectory(video), file.filename) + + await onFileMoved({ video, file, fileUrl, oldPath }) } } } async function doAfterLastJob (video: MVideoWithAllFiles) { for (const playlist of video.VideoStreamingPlaylists) { - const baseHlsDirectory = join(HLS_STREAMING_PLAYLIST_DIRECTORY, video.uuid) // Master playlist - const masterPlaylistFilename = join(playlist.getStringType(), video.uuid, playlist.playlistFilename) - await storeObject( - { - filename: masterPlaylistFilename, - path: join(baseHlsDirectory, playlist.playlistFilename) - }, - CONFIG.OBJECT_STORAGE.STREAMING_PLAYLISTS - ) - + playlist.playlistUrl = await storeHLSFile(playlist, video, playlist.playlistFilename) // Sha256 segments file - const segmentsFileName = join(playlist.getStringType(), video.uuid, playlist.segmentsSha256Filename) - await storeObject( - { - filename: segmentsFileName, - path: join(baseHlsDirectory, playlist.segmentsSha256Filename) - }, - CONFIG.OBJECT_STORAGE.STREAMING_PLAYLISTS - ) - - playlist.playlistUrl = generateObjectStoreUrl(masterPlaylistFilename, CONFIG.OBJECT_STORAGE.STREAMING_PLAYLISTS) - playlist.segmentsSha256Url = generateObjectStoreUrl(segmentsFileName, CONFIG.OBJECT_STORAGE.STREAMING_PLAYLISTS) - playlist.storage = VideoStorageType.OBJECT_STORAGE + playlist.segmentsSha256Url = await storeHLSFile(playlist, video, playlist.segmentsSha256Filename) + + playlist.storage = VideoStorage.OBJECT_STORAGE + await playlist.save() } @@ -121,5 +91,23 @@ async function doAfterLastJob (video: MVideoWithAllFiles) { await remove(join(HLS_STREAMING_PLAYLIST_DIRECTORY, video.uuid)) } - await publishAndFederateIfNeeded(video) + await moveToNextState(video) +} + +async function onFileMoved (options: { + video: MVideoWithAllFiles + file: MVideoFile + fileUrl: string + oldPath: string +}) { + const { video, file, fileUrl, oldPath } = options + + file.fileUrl = fileUrl + file.storage = VideoStorage.OBJECT_STORAGE + + await createTorrentAndSetInfoHash(video, file) + await file.save() + + logger.debug('Removing %s because it\'s now on object storage', oldPath) + await remove(oldPath) } diff --git a/server/lib/job-queue/handlers/video-live-ending.ts b/server/lib/job-queue/handlers/video-live-ending.ts index ed8d9b44114..5399ea3d173 100644 --- a/server/lib/job-queue/handlers/video-live-ending.ts +++ b/server/lib/job-queue/handlers/video-live-ending.ts @@ -6,7 +6,7 @@ import { VIDEO_LIVE } from '@server/initializers/constants' import { buildConcatenatedName, cleanupLive, LiveSegmentShaStore } from '@server/lib/live' import { generateVideoMiniature } from '@server/lib/thumbnail' import { generateHlsPlaylistResolutionFromTS } from '@server/lib/transcoding/video-transcoding' -import { publishAndFederateIfNeeded } from '@server/lib/video' +import { moveToNextState } from '@server/lib/video' import { generateHLSMasterPlaylistFilename, generateHlsSha256SegmentsFilename, getHLSDirectory } from '@server/lib/video-paths' import { VideoModel } from '@server/models/video/video' import { VideoFileModel } from '@server/models/video/video-file' @@ -133,7 +133,7 @@ async function saveLive (video: MVideo, live: MVideoLive, streamingPlaylist: MSt }) } - await publishAndFederateIfNeeded(videoWithFiles, true) + await moveToNextState(videoWithFiles, false) } async function cleanupLiveFiles (hlsDirectory: string) { diff --git a/server/lib/job-queue/handlers/video-transcoding.ts b/server/lib/job-queue/handlers/video-transcoding.ts index a7825c60d2f..1c1c329f10b 100644 --- a/server/lib/job-queue/handlers/video-transcoding.ts +++ b/server/lib/job-queue/handlers/video-transcoding.ts @@ -1,9 +1,9 @@ import * as Bull from 'bull' import { TranscodeOptionsType } from '@server/helpers/ffmpeg-utils' -import { addMoveToObjectStorageJob, addTranscodingJob, getTranscodingJobPriority, publishAndFederateIfNeeded } from '@server/lib/video' +import { addTranscodingJob, getTranscodingJobPriority, moveToNextState } from '@server/lib/video' import { getVideoFilePath } from '@server/lib/video-paths' import { UserModel } from '@server/models/user/user' -import { MUser, MUserId, MVideoFullLight, MVideoUUID, MVideoWithFile } from '@server/types/models' +import { MUser, MUserId, MVideo, MVideoFullLight, MVideoWithFile } from '@server/types/models' import { HLSTranscodingPayload, MergeAudioTranscodingPayload, @@ -16,16 +16,15 @@ import { computeResolutionsToTranscode } from '../../../helpers/ffprobe-utils' import { logger } from '../../../helpers/logger' import { CONFIG } from '../../../initializers/config' import { VideoModel } from '../../../models/video/video' -import { federateVideoIfNeeded } from '../../activitypub/videos' -import { Notifier } from '../../notifier' import { generateHlsPlaylistResolution, mergeAudioVideofile, optimizeOriginalVideofile, transcodeNewWebTorrentResolution } from '../../transcoding/video-transcoding' +import { VideoJobInfoModel } from '@server/models/video/video-job-info' -type HandlerFunction = (job: Bull.Job, payload: VideoTranscodingPayload, video: MVideoFullLight, user: MUser) => Promise +type HandlerFunction = (job: Bull.Job, payload: VideoTranscodingPayload, video: MVideoFullLight, user: MUser) => Promise const handlers: { [ id in VideoTranscodingPayload['type'] ]: HandlerFunction } = { 'new-resolution-to-hls': handleHLSJob, @@ -53,10 +52,7 @@ async function processVideoTranscoding (job: Bull.Job) { throw new Error('Cannot find transcoding handler for ' + payload.type) } - const { videoFile } = await handler(job, payload, video, user) - - // Create job to move the new files to object storage if enabled - await addMoveToObjectStorageJob(video, videoFile) + await handler(job, payload, video, user) return video } @@ -73,7 +69,7 @@ async function handleHLSJob (job: Bull.Job, payload: HLSTranscodingPayload, vide const videoOrStreamingPlaylist = videoFileInput.getVideoOrStreamingPlaylist() const videoInputPath = getVideoFilePath(videoOrStreamingPlaylist, videoFileInput) - const { videoFile } = await generateHlsPlaylistResolution({ + await generateHlsPlaylistResolution({ video, videoInputPath, resolution: payload.resolution, @@ -83,8 +79,6 @@ async function handleHLSJob (job: Bull.Job, payload: HLSTranscodingPayload, vide }) await retryTransactionWrapper(onHlsPlaylistGeneration, video, user, payload) - - return { videoFile } } async function handleNewWebTorrentResolutionJob ( @@ -93,27 +87,21 @@ async function handleNewWebTorrentResolutionJob ( video: MVideoFullLight, user: MUserId ) { - const { videoFile } = await transcodeNewWebTorrentResolution(video, payload.resolution, payload.isPortraitMode || false, job) + await transcodeNewWebTorrentResolution(video, payload.resolution, payload.isPortraitMode || false, job) await retryTransactionWrapper(onNewWebTorrentFileResolution, video, user, payload) - - return { videoFile } } async function handleWebTorrentMergeAudioJob (job: Bull.Job, payload: MergeAudioTranscodingPayload, video: MVideoFullLight, user: MUserId) { - const { videoFile } = await mergeAudioVideofile(video, payload.resolution, job) + await mergeAudioVideofile(video, payload.resolution, job) await retryTransactionWrapper(onVideoFileOptimizer, video, payload, 'video', user) - - return { videoFile } } async function handleWebTorrentOptimizeJob (job: Bull.Job, payload: OptimizeTranscodingPayload, video: MVideoFullLight, user: MUserId) { - const { transcodeType, videoFile } = await optimizeOriginalVideofile(video, video.getMaxQualityFile(), job) + const { transcodeType } = await optimizeOriginalVideofile(video, video.getMaxQualityFile(), job) await retryTransactionWrapper(onVideoFileOptimizer, video, payload, transcodeType, user) - - return { videoFile } } // --------------------------------------------------------------------------- @@ -134,10 +122,8 @@ async function onHlsPlaylistGeneration (video: MVideoFullLight, user: MUser, pay await createLowerResolutionsJobs(video, user, payload.resolution, payload.isPortraitMode, 'hls') } - // Publishing will be done by move-to-object-storage if enabled - if (CONFIG.OBJECT_STORAGE.ENABLED) return - - await publishAndFederateIfNeeded(video) + await VideoJobInfoModel.decrease(video.uuid, 'pendingTranscoding') + await moveToNextState(video) } async function onVideoFileOptimizer ( @@ -156,45 +142,35 @@ async function onVideoFileOptimizer ( // Video does not exist anymore if (!videoDatabase) return undefined - let videoPublished = false - // Generate HLS version of the original file - const originalFileHLSPayload = Object.assign({}, payload, { + const originalFileHLSPayload = { + ...payload, + isPortraitMode, resolution: videoDatabase.getMaxQualityFile().resolution, // If we quick transcoded original file, force transcoding for HLS to avoid some weird playback issues copyCodecs: transcodeType !== 'quick-transcode', isMaxQuality: true - }) + } const hasHls = await createHlsJobIfEnabled(user, originalFileHLSPayload) - const hasNewResolutions = await createLowerResolutionsJobs(videoDatabase, user, resolution, isPortraitMode, 'webtorrent') + await VideoJobInfoModel.decrease(videoDatabase.uuid, 'pendingTranscoding') - // Publishing will be done after the move-to-object-storage-job if enabled - if (!CONFIG.OBJECT_STORAGE.ENABLED) { - if (!hasHls && !hasNewResolutions) { - // No transcoding to do, it's now published - videoPublished = await videoDatabase.publishIfNeededAndSave(undefined) - } - - await federateVideoIfNeeded(videoDatabase, payload.isNewVideo) - - if (payload.isNewVideo) Notifier.Instance.notifyOnNewVideoIfNeeded(videoDatabase) - if (videoPublished) Notifier.Instance.notifyOnVideoPublishedAfterTranscoding(videoDatabase) + // Move to next state if there are no other resolutions to generate + if (!hasHls && !hasNewResolutions) { + await moveToNextState(videoDatabase) } } async function onNewWebTorrentFileResolution ( - video: MVideoUUID, + video: MVideo, user: MUserId, payload: NewResolutionTranscodingPayload | MergeAudioTranscodingPayload ) { - // Publishing will be done by mvoe-to-object-storage if enabled - if (!CONFIG.OBJECT_STORAGE.ENABLED) { - await publishAndFederateIfNeeded(video) - } + await createHlsJobIfEnabled(user, { ...payload, copyCodecs: true, isMaxQuality: false }) + await VideoJobInfoModel.decrease(video.uuid, 'pendingTranscoding') - await createHlsJobIfEnabled(user, Object.assign({}, payload, { copyCodecs: true, isMaxQuality: false })) + await moveToNextState(video) } // --------------------------------------------------------------------------- diff --git a/server/lib/object-storage.ts b/server/lib/object-storage.ts deleted file mode 100644 index c39133665f0..00000000000 --- a/server/lib/object-storage.ts +++ /dev/null @@ -1,172 +0,0 @@ -import { - CompletedPart, - CompleteMultipartUploadCommand, - CreateMultipartUploadCommand, - DeleteObjectCommand, - DeleteObjectsCommand, - GetObjectCommand, - ListObjectsV2Command, - PutObjectCommand, - S3Client, - UploadPartCommand -} from "@aws-sdk/client-s3" -import { CONFIG } from "@server/initializers/config" -import { logger } from '@server/helpers/logger' -import { createReadStream, createWriteStream, ensureDir, open, close, ReadStream, stat, Stats, remove } from "fs-extra" -import { Readable } from "stream" -import { dirname } from "path" -import { min } from "lodash" -import { pipelinePromise } from "@server/helpers/core-utils" - -type BucketInfo = {BUCKET_NAME: string, PREFIX?: string, BASE_URL?: string} -const ONE_MIB = 1024 * 1024 -const MAX_PUT_SIZE = process.env.NODE_ENV.includes("test") ? 10 * ONE_MIB : CONFIG.OBJECT_STORAGE.MAX_UPLOAD_PART * ONE_MIB - -function getS3Client () { - return new S3Client({ endpoint: CONFIG.OBJECT_STORAGE.ENDPOINT.toString() }) -} - -function getPartSize (stats: Stats) { - if (process.env.NODE_ENV.includes("test")) { - return 10 * ONE_MIB - } - return MAX_PUT_SIZE -} - -async function objectStoragePut (options: {filename: string, content: string | ReadStream, bucketInfo: BucketInfo}) { - const { filename, content, bucketInfo } = options - const key = bucketInfo.PREFIX + filename - const s3Client = getS3Client() - const command = new PutObjectCommand({ - Bucket: bucketInfo.BUCKET_NAME, - Key: key, - Body: content - }) - return s3Client.send(command) -} - -async function multiPartUpload (file: {filename: string, path: string}, stats: Stats, bucketInfo: BucketInfo) { - const { filename, path } = file - const key = bucketInfo.PREFIX + filename - const s3Client = getS3Client() - - const createMultipartCommand = new CreateMultipartUploadCommand({ - Bucket: bucketInfo.BUCKET_NAME, - Key: key - }) - const createResponse = await s3Client.send(createMultipartCommand) - - const fd = await open(path, 'r') - let partNumber = 1 - const parts: CompletedPart[] = [] - const partSize = getPartSize(stats) - for (let start = 0; start < stats.size; start += partSize) { - logger.debug('Uploading part %d of file to %s/%s%s', partNumber, bucketInfo.BUCKET_NAME, bucketInfo.PREFIX, file.filename) - - // The s3 sdk needs to know the length of the http body beforehand, but doesn't support - // streams with start and end set, so it just tries to stat the file in stream.path. - // This fails for us because we only want to send part of the file. The stream type - // is modified so we can set the byteLength here, which s3 detects because array buffers - // have this field set - const stream: ReadStream & {byteLength: number} = - createReadStream( - path, - { fd: fd, autoClose: false, start: start, end: (start + partSize) - 1 } - ) as ReadStream & {byteLength: number} - // Calculate if the part size is more than what's left over, and in that case use left over bytes for byteLength - stream.byteLength = min([ stats.size - start, partSize ]) - const uploadPartCommand = new UploadPartCommand({ - Bucket: bucketInfo.BUCKET_NAME, - Key: key, - UploadId: createResponse.UploadId, - PartNumber: partNumber, - Body: stream - }) - const uploadResponse = await s3Client.send(uploadPartCommand) - - parts.push({ ETag: uploadResponse.ETag, PartNumber: partNumber }) - partNumber += 1 - } - await close(fd) - - const completeUploadCommand = new CompleteMultipartUploadCommand({ - Bucket: bucketInfo.BUCKET_NAME, - Key: key, - UploadId: createResponse.UploadId, - MultipartUpload: { Parts: parts } - }) - await s3Client.send(completeUploadCommand) - logger.debug('Completed %s/%s%s in %d parts', bucketInfo.BUCKET_NAME, bucketInfo.PREFIX, file.filename, partNumber - 1) -} - -export async function storeObject (file: {path: string, filename: string}, bucketInfo: BucketInfo) { - logger.debug('Uploading file to %s/%s%s', bucketInfo.BUCKET_NAME, bucketInfo.PREFIX, file.filename) - const stats = await stat(file.path) - // If bigger than 100 MiB we do a multipart upload - if (stats.size > MAX_PUT_SIZE) { - await multiPartUpload(file, stats, bucketInfo) - } else { - const fileStream = createReadStream(file.path) - await objectStoragePut({ filename: file.filename, content: fileStream, bucketInfo }) - } - - logger.debug("Removing %s because it's now on object storage", file.path) - await remove(file.path) -} - -export async function writeObjectContents (file: {filename: string, content: string}, bucketInfo: BucketInfo) { - logger.debug('Writing object to %s/%s%s', bucketInfo.BUCKET_NAME, bucketInfo.PREFIX, file.filename) - return objectStoragePut({ filename: file.filename, content: file.content, bucketInfo }) -} - -export async function removeObject (filename: string, bucketInfo: BucketInfo) { - const key = bucketInfo.PREFIX + filename - const s3Client = getS3Client() - const command = new DeleteObjectCommand({ - Bucket: bucketInfo.BUCKET_NAME, - Key: key - }) - return s3Client.send(command) -} - -export async function removePrefix (prefix: string, bucketInfo: BucketInfo) { - const s3Client = getS3Client() - const listCommand = new ListObjectsV2Command({ - Bucket: bucketInfo.BUCKET_NAME, - Prefix: bucketInfo.PREFIX + prefix - }) - - const listedObjects = await s3Client.send(listCommand) - const deleteParams = { - Bucket: bucketInfo.BUCKET_NAME, - Delete: { Objects: [] } - } - for (const object of listedObjects.Contents) { - deleteParams.Delete.Objects.push({ Key: object.Key }) - } - const deleteCommand = new DeleteObjectsCommand(deleteParams) - await s3Client.send(deleteCommand) - - // Repeat if not all objects could be listed at once (limit of 1000?) - if (listedObjects.IsTruncated) await removePrefix(prefix, bucketInfo) -} - -export function generateObjectStoreUrl (filename: string, bucketInfo: BucketInfo) { - const endpoint = CONFIG.OBJECT_STORAGE.ENDPOINT - const port = endpoint.port ? `:${endpoint.port}` : '' - return `${endpoint.protocol}//${bucketInfo.BUCKET_NAME}.${endpoint.hostname}${port}/${bucketInfo.PREFIX}${filename}` -} - -export async function makeAvailable (options: { filename: string, at: string }, bucketInfo: BucketInfo) { - await ensureDir(dirname(options.at)) - const key = bucketInfo.PREFIX + options.filename - const s3Client = getS3Client() - const command = new GetObjectCommand({ - Bucket: bucketInfo.BUCKET_NAME, - Key: key - }) - const response = await s3Client.send(command) - const file = createWriteStream(options.at) - await pipelinePromise(response.Body as Readable, file) - file.close() -} diff --git a/server/lib/object-storage/index.ts b/server/lib/object-storage/index.ts new file mode 100644 index 00000000000..8b413a40ef0 --- /dev/null +++ b/server/lib/object-storage/index.ts @@ -0,0 +1,3 @@ +export * from './keys' +export * from './urls' +export * from './videos' diff --git a/server/lib/object-storage/keys.ts b/server/lib/object-storage/keys.ts new file mode 100644 index 00000000000..998139964d7 --- /dev/null +++ b/server/lib/object-storage/keys.ts @@ -0,0 +1,19 @@ +import { join } from 'path' +import { MStreamingPlaylist, MVideoUUID } from '@server/types/models' + +function generateHLSObjectStorageKey (playlist: MStreamingPlaylist, video: MVideoUUID, filename?: string) { + const base = playlist.getStringType() + '_' + video.uuid + + if (!filename) return base + + return join(base, filename) +} + +function generateWebTorrentObjectStorageKey (filename: string) { + return filename +} + +export { + generateHLSObjectStorageKey, + generateWebTorrentObjectStorageKey +} diff --git a/server/lib/object-storage/shared/client.ts b/server/lib/object-storage/shared/client.ts new file mode 100644 index 00000000000..7a306410a5c --- /dev/null +++ b/server/lib/object-storage/shared/client.ts @@ -0,0 +1,36 @@ +import { S3Client } from '@aws-sdk/client-s3' +import { logger } from '@server/helpers/logger' +import { CONFIG } from '@server/initializers/config' +import { lTags } from './logger' + +const endpointConfig = CONFIG.OBJECT_STORAGE.ENDPOINT +const endpoint = endpointConfig.startsWith('http://') || endpointConfig.startsWith('https://') + ? CONFIG.OBJECT_STORAGE.ENDPOINT + : 'https://' + CONFIG.OBJECT_STORAGE.ENDPOINT +const endpointParsed = new URL(endpoint) + +let s3Client: S3Client +function getClient () { + if (s3Client) return s3Client + + const OBJECT_STORAGE = CONFIG.OBJECT_STORAGE + + s3Client = new S3Client({ + endpoint, + region: OBJECT_STORAGE.REGION, + credentials: { + accessKeyId: OBJECT_STORAGE.CREDENTIALS.ACCESS_KEY_ID, + secretAccessKey: OBJECT_STORAGE.CREDENTIALS.SECRET_ACCESS_KEY + } + }) + + logger.info('Initialized S3 client %s with region %s.', endpoint, OBJECT_STORAGE.REGION, lTags()) + + return s3Client +} + +export { + endpoint, + endpointParsed, + getClient +} diff --git a/server/lib/object-storage/shared/index.ts b/server/lib/object-storage/shared/index.ts new file mode 100644 index 00000000000..11e10aa9f76 --- /dev/null +++ b/server/lib/object-storage/shared/index.ts @@ -0,0 +1,3 @@ +export * from './client' +export * from './logger' +export * from './object-storage-helpers' diff --git a/server/lib/object-storage/shared/logger.ts b/server/lib/object-storage/shared/logger.ts new file mode 100644 index 00000000000..8ab7cbd71fc --- /dev/null +++ b/server/lib/object-storage/shared/logger.ts @@ -0,0 +1,7 @@ +import { loggerTagsFactory } from '@server/helpers/logger' + +const lTags = loggerTagsFactory('object-storage') + +export { + lTags +} diff --git a/server/lib/object-storage/shared/object-storage-helpers.ts b/server/lib/object-storage/shared/object-storage-helpers.ts new file mode 100644 index 00000000000..513c4afcb00 --- /dev/null +++ b/server/lib/object-storage/shared/object-storage-helpers.ts @@ -0,0 +1,221 @@ +import { close, createReadStream, createWriteStream, ensureDir, open, ReadStream, stat } from 'fs-extra' +import { min } from 'lodash' +import { dirname } from 'path' +import { Readable } from 'stream' +import { + CompletedPart, + CompleteMultipartUploadCommand, + CreateMultipartUploadCommand, + DeleteObjectCommand, + GetObjectCommand, + ListObjectsV2Command, + PutObjectCommand, + UploadPartCommand +} from '@aws-sdk/client-s3' +import { pipelinePromise } from '@server/helpers/core-utils' +import { isArray } from '@server/helpers/custom-validators/misc' +import { logger } from '@server/helpers/logger' +import { CONFIG } from '@server/initializers/config' +import { getPrivateUrl } from '../urls' +import { getClient } from './client' +import { lTags } from './logger' + +type BucketInfo = { + BUCKET_NAME: string + PREFIX?: string +} + +async function storeObject (options: { + inputPath: string + objectStorageKey: string + bucketInfo: BucketInfo +}): Promise { + const { inputPath, objectStorageKey, bucketInfo } = options + + logger.debug('Uploading file %s to %s%s in bucket %s', inputPath, bucketInfo.PREFIX, objectStorageKey, bucketInfo.BUCKET_NAME, lTags()) + + const stats = await stat(inputPath) + + // If bigger than max allowed size we do a multipart upload + if (stats.size > CONFIG.OBJECT_STORAGE.MAX_UPLOAD_PART) { + return multiPartUpload({ inputPath, objectStorageKey, bucketInfo }) + } + + const fileStream = createReadStream(inputPath) + return objectStoragePut({ objectStorageKey, content: fileStream, bucketInfo }) +} + +async function removeObject (filename: string, bucketInfo: BucketInfo) { + const command = new DeleteObjectCommand({ + Bucket: bucketInfo.BUCKET_NAME, + Key: buildKey(filename, bucketInfo) + }) + + return getClient().send(command) +} + +async function removePrefix (prefix: string, bucketInfo: BucketInfo) { + const s3Client = getClient() + + const commandPrefix = bucketInfo.PREFIX + prefix + const listCommand = new ListObjectsV2Command({ + Bucket: bucketInfo.BUCKET_NAME, + Prefix: commandPrefix + }) + + const listedObjects = await s3Client.send(listCommand) + + // FIXME: use bulk delete when s3ninja will support this operation + // const deleteParams = { + // Bucket: bucketInfo.BUCKET_NAME, + // Delete: { Objects: [] } + // } + + if (isArray(listedObjects.Contents) !== true) { + const message = `Cannot remove ${commandPrefix} prefix in bucket ${bucketInfo.BUCKET_NAME}: no files listed.` + + logger.error(message, { response: listedObjects, ...lTags() }) + throw new Error(message) + } + + for (const object of listedObjects.Contents) { + const command = new DeleteObjectCommand({ + Bucket: bucketInfo.BUCKET_NAME, + Key: object.Key + }) + + await s3Client.send(command) + + // FIXME: use bulk delete when s3ninja will support this operation + // deleteParams.Delete.Objects.push({ Key: object.Key }) + } + + // FIXME: use bulk delete when s3ninja will support this operation + // const deleteCommand = new DeleteObjectsCommand(deleteParams) + // await s3Client.send(deleteCommand) + + // Repeat if not all objects could be listed at once (limit of 1000?) + if (listedObjects.IsTruncated) await removePrefix(prefix, bucketInfo) +} + +async function makeAvailable (options: { filename: string, at: string }, bucketInfo: BucketInfo) { + await ensureDir(dirname(options.at)) + + const command = new GetObjectCommand({ + Bucket: bucketInfo.BUCKET_NAME, + Key: buildKey(options.filename, bucketInfo) + }) + const response = await getClient().send(command) + + const file = createWriteStream(options.at) + await pipelinePromise(response.Body as Readable, file) + file.close() +} + +function buildKey (key: string, bucketInfo: BucketInfo) { + return bucketInfo.PREFIX + key +} + +// --------------------------------------------------------------------------- + +export { + BucketInfo, + buildKey, + storeObject, + removeObject, + removePrefix, + makeAvailable +} + +// --------------------------------------------------------------------------- + +async function objectStoragePut (options: { + objectStorageKey: string + content: ReadStream + bucketInfo: BucketInfo +}) { + const { objectStorageKey, content, bucketInfo } = options + + const command = new PutObjectCommand({ + Bucket: bucketInfo.BUCKET_NAME, + Key: buildKey(objectStorageKey, bucketInfo), + Body: content + }) + + await getClient().send(command) + + return getPrivateUrl(bucketInfo, objectStorageKey) +} + +async function multiPartUpload (options: { + inputPath: string + objectStorageKey: string + bucketInfo: BucketInfo +}) { + const { objectStorageKey, inputPath, bucketInfo } = options + + const key = buildKey(objectStorageKey, bucketInfo) + const s3Client = getClient() + + const statResult = await stat(inputPath) + + const createMultipartCommand = new CreateMultipartUploadCommand({ + Bucket: bucketInfo.BUCKET_NAME, + Key: key + }) + const createResponse = await s3Client.send(createMultipartCommand) + + const fd = await open(inputPath, 'r') + let partNumber = 1 + const parts: CompletedPart[] = [] + const partSize = CONFIG.OBJECT_STORAGE.MAX_UPLOAD_PART + + for (let start = 0; start < statResult.size; start += partSize) { + logger.debug( + 'Uploading part %d of file to %s%s in bucket %s', + partNumber, bucketInfo.PREFIX, objectStorageKey, bucketInfo.BUCKET_NAME, lTags() + ) + + // The s3 sdk needs to know the length of the http body beforehand, but doesn't support + // streams with start and end set, so it just tries to stat the file in stream.path. + // This fails for us because we only want to send part of the file. The stream type + // is modified so we can set the byteLength here, which s3 detects because array buffers + // have this field set + const stream: ReadStream & { byteLength: number } = + createReadStream( + inputPath, + { fd, autoClose: false, start, end: (start + partSize) - 1 } + ) as ReadStream & { byteLength: number } + + // Calculate if the part size is more than what's left over, and in that case use left over bytes for byteLength + stream.byteLength = min([ statResult.size - start, partSize ]) + + const uploadPartCommand = new UploadPartCommand({ + Bucket: bucketInfo.BUCKET_NAME, + Key: key, + UploadId: createResponse.UploadId, + PartNumber: partNumber, + Body: stream + }) + const uploadResponse = await s3Client.send(uploadPartCommand) + + parts.push({ ETag: uploadResponse.ETag, PartNumber: partNumber }) + partNumber += 1 + } + await close(fd) + + const completeUploadCommand = new CompleteMultipartUploadCommand({ + Bucket: bucketInfo.BUCKET_NAME, + Key: objectStorageKey, + UploadId: createResponse.UploadId, + MultipartUpload: { Parts: parts } + }) + await s3Client.send(completeUploadCommand) + + logger.debug( + 'Completed %s%s in bucket %s in %d parts', + bucketInfo.PREFIX, objectStorageKey, bucketInfo.BUCKET_NAME, partNumber - 1, lTags() + ) + + return getPrivateUrl(bucketInfo, objectStorageKey) +} diff --git a/server/lib/object-storage/urls.ts b/server/lib/object-storage/urls.ts new file mode 100644 index 00000000000..a9d8516ec0f --- /dev/null +++ b/server/lib/object-storage/urls.ts @@ -0,0 +1,40 @@ +import { CONFIG } from '@server/initializers/config' +import { BucketInfo, buildKey, endpointParsed } from './shared' + +function getPrivateUrl (config: BucketInfo, keyWithoutPrefix: string) { + return getBaseUrl(config) + buildKey(keyWithoutPrefix, config) +} + +function getWebTorrentPublicFileUrl (fileUrl: string) { + const baseUrl = CONFIG.OBJECT_STORAGE.VIDEOS.BASE_URL + if (!baseUrl) return fileUrl + + return replaceByBaseUrl(fileUrl, baseUrl) +} + +function getHLSPublicFileUrl (fileUrl: string) { + const baseUrl = CONFIG.OBJECT_STORAGE.STREAMING_PLAYLISTS.BASE_URL + if (!baseUrl) return fileUrl + + return replaceByBaseUrl(fileUrl, baseUrl) +} + +export { + getPrivateUrl, + getWebTorrentPublicFileUrl, + replaceByBaseUrl, + getHLSPublicFileUrl +} + +// --------------------------------------------------------------------------- + +function getBaseUrl (bucketInfo: BucketInfo, baseUrl?: string) { + if (baseUrl) return baseUrl + + return `${endpointParsed.protocol}//${bucketInfo.BUCKET_NAME}.${endpointParsed.host}/` +} + +const regex = new RegExp('https?://[^/]+') +function replaceByBaseUrl (fileUrl: string, baseUrl: string) { + return fileUrl.replace(regex, baseUrl) +} diff --git a/server/lib/object-storage/videos.ts b/server/lib/object-storage/videos.ts new file mode 100644 index 00000000000..6e5535db0c3 --- /dev/null +++ b/server/lib/object-storage/videos.ts @@ -0,0 +1,39 @@ +import { join } from 'path' +import { CONFIG } from '@server/initializers/config' +import { MStreamingPlaylist, MVideoFile, MVideoUUID } from '@server/types/models' +import { getHLSDirectory } from '../video-paths' +import { generateHLSObjectStorageKey, generateWebTorrentObjectStorageKey } from './keys' +import { removeObject, removePrefix, storeObject } from './shared' + +function storeHLSFile (playlist: MStreamingPlaylist, video: MVideoUUID, filename: string) { + const baseHlsDirectory = getHLSDirectory(video) + + return storeObject({ + inputPath: join(baseHlsDirectory, filename), + objectStorageKey: generateHLSObjectStorageKey(playlist, video, filename), + bucketInfo: CONFIG.OBJECT_STORAGE.STREAMING_PLAYLISTS + }) +} + +function storeWebTorrentFile (filename: string) { + return storeObject({ + inputPath: join(CONFIG.STORAGE.VIDEOS_DIR, filename), + objectStorageKey: generateWebTorrentObjectStorageKey(filename), + bucketInfo: CONFIG.OBJECT_STORAGE.VIDEOS + }) +} + +function removeHLSObjectStorage (playlist: MStreamingPlaylist, video: MVideoUUID) { + return removePrefix(generateHLSObjectStorageKey(playlist, video), CONFIG.OBJECT_STORAGE.STREAMING_PLAYLISTS) +} + +function removeWebTorrentObjectStorage (videoFile: MVideoFile) { + return removeObject(generateWebTorrentObjectStorageKey(videoFile.filename), CONFIG.OBJECT_STORAGE.VIDEOS) +} + +export { + storeWebTorrentFile, + storeHLSFile, + removeHLSObjectStorage, + removeWebTorrentObjectStorage +} diff --git a/server/lib/video-paths.ts b/server/lib/video-paths.ts index 7a7835056c8..3bff6c0bd55 100644 --- a/server/lib/video-paths.ts +++ b/server/lib/video-paths.ts @@ -1,12 +1,12 @@ +import { stat } from 'fs-extra' import { join } from 'path' +import { buildUUID } from '@server/helpers/uuid' import { extractVideo } from '@server/helpers/video' import { CONFIG } from '@server/initializers/config' import { HLS_REDUNDANCY_DIRECTORY, HLS_STREAMING_PLAYLIST_DIRECTORY, STATIC_PATHS, WEBSERVER } from '@server/initializers/constants' import { isStreamingPlaylist, MStreamingPlaylist, MStreamingPlaylistVideo, MVideo, MVideoFile, MVideoUUID } from '@server/types/models' -import { buildUUID } from '@server/helpers/uuid' import { removeFragmentedMP4Ext } from '@shared/core-utils' -import { makeAvailable } from './object-storage' -import { stat } from 'fs-extra' +import { makeAvailable } from './object-storage/shared/object-storage-helpers' // ################## Video file name ################## diff --git a/server/lib/video.ts b/server/lib/video.ts index e2252a7aaaa..722c5901218 100644 --- a/server/lib/video.ts +++ b/server/lib/video.ts @@ -1,18 +1,19 @@ import { UploadFiles } from 'express' import { Transaction } from 'sequelize/types' +import { logger } from '@server/helpers/logger' +import { CONFIG } from '@server/initializers/config' import { DEFAULT_AUDIO_RESOLUTION, JOB_PRIORITY } from '@server/initializers/constants' import { sequelizeTypescript } from '@server/initializers/database' import { TagModel } from '@server/models/video/tag' import { VideoModel } from '@server/models/video/video' +import { VideoJobInfoModel } from '@server/models/video/video-job-info' import { FilteredModelAttributes } from '@server/types' import { MThumbnail, MUserId, MVideoFile, MVideoTag, MVideoThumbnail, MVideoUUID } from '@server/types/models' -import { ThumbnailType, VideoCreate, VideoPrivacy, VideoTranscodingPayload } from '@shared/models' +import { ThumbnailType, VideoCreate, VideoPrivacy, VideoState, VideoTranscodingPayload } from '@shared/models' import { federateVideoIfNeeded } from './activitypub/videos' import { CreateJobOptions, JobQueue } from './job-queue/job-queue' import { Notifier } from './notifier' import { updateVideoMiniatureFromExisting } from './thumbnail' -import { CONFIG } from '@server/initializers/config' -import { VideoJobInfoModel } from '@server/models/video/video-job-info' function buildLocalVideoFromReq (videoInfo: VideoCreate, channelId: number): FilteredModelAttributes { return { @@ -84,27 +85,53 @@ async function setVideoTags (options: { video.Tags = tagInstances } -async function publishAndFederateIfNeeded (video: MVideoUUID, wasLive = false) { - const result = await sequelizeTypescript.transaction(async t => { +function moveToNextState (video: MVideoUUID, isNewVideo = true) { + return sequelizeTypescript.transaction(async t => { // Maybe the video changed in database, refresh it const videoDatabase = await VideoModel.loadAndPopulateAccountAndServerAndTags(video.uuid, t) // Video does not exist anymore if (!videoDatabase) return undefined - // We transcoded the video file in another format, now we can publish it - const videoPublished = await videoDatabase.publishIfNeededAndSave(t) + const previousState = videoDatabase.state - // If the video was not published, we consider it is a new one for other instances - // Live videos are always federated, so it's not a new video - await federateVideoIfNeeded(videoDatabase, !wasLive && videoPublished, t) + // Already in its final state + if (previousState === VideoState.PUBLISHED) return - return { videoDatabase, videoPublished } - }) + const newState = buildNextVideoState(previousState) - if (result?.videoPublished) { - Notifier.Instance.notifyOnNewVideoIfNeeded(result.videoDatabase) - Notifier.Instance.notifyOnVideoPublishedAfterTranscoding(result.videoDatabase) - } + if (newState === VideoState.PUBLISHED) { + logger.info('Publishing video %s.', video.uuid, { tags: [ video.uuid ] }) + + await videoDatabase.setNewState(newState, t) + + // If the video was not published, we consider it is a new one for other instances + // Live videos are always federated, so it's not a new video + await federateVideoIfNeeded(videoDatabase, isNewVideo, t) + + Notifier.Instance.notifyOnNewVideoIfNeeded(videoDatabase) + + if (previousState === VideoState.TO_TRANSCODE) { + Notifier.Instance.notifyOnVideoPublishedAfterTranscoding(videoDatabase) + } + + return + } + + if (newState === VideoState.TO_MOVE_TO_EXTERNAL_STORAGE) { + const videoJobInfo = await VideoJobInfoModel.load(videoDatabase.id, t) + const pendingTranscoding = videoJobInfo?.pendingTranscoding || 0 + + // We want to wait all transcoding jobs before moving the video on an external storage + if (pendingTranscoding !== 0) return + + await videoDatabase.setNewState(newState, t) + + logger.info('Creating external storage move job for video %s.', video.uuid, { tags: [ video.uuid ] }) + + addMoveToObjectStorageJob(video) + .catch(err => logger.error('Cannot add move to object storage job', { err })) + } + }) } async function addOptimizeOrMergeAudioJob (video: MVideoUUID, videoFile: MVideoFile, user: MUserId) { @@ -133,24 +160,16 @@ async function addOptimizeOrMergeAudioJob (video: MVideoUUID, videoFile: MVideoF } async function addTranscodingJob (payload: VideoTranscodingPayload, options: CreateJobOptions) { - // This value is decreased when the move job is finished in ./handlers/move-to-object-storage.ts - // Because every transcode job starts a move job for the transcoded file, the value will only reach - // 0 again when all transcode jobs are finished and the last move job is running - // If object storage support is not enabled all the pendingMove values stay at the amount of transcode - // jobs that were started for that video. - await VideoJobInfoModel.increaseOrCreatePendingMove(payload.videoUUID) + await VideoJobInfoModel.increaseOrCreate(payload.videoUUID, 'pendingTranscoding') return JobQueue.Instance.createJobWithPromise({ type: 'video-transcoding', payload: payload }, options) } -function addMoveToObjectStorageJob (video: MVideoUUID, videoFile: MVideoFile) { - if (CONFIG.OBJECT_STORAGE.ENABLED) { - const dataInput = { - videoUUID: video.uuid, - videoFileId: videoFile.id - } - return JobQueue.Instance.createJobWithPromise({ type: 'move-to-object-storage', payload: dataInput }) - } +async function addMoveToObjectStorageJob (video: MVideoUUID) { + await VideoJobInfoModel.increaseOrCreate(video.uuid, 'pendingMove') + + const dataInput = { videoUUID: video.uuid } + return JobQueue.Instance.createJobWithPromise({ type: 'move-to-object-storage', payload: dataInput }) } async function getTranscodingJobPriority (user: MUserId) { @@ -162,14 +181,38 @@ async function getTranscodingJobPriority (user: MUserId) { return JOB_PRIORITY.TRANSCODING + videoUploadedByUser } +function buildNextVideoState (currentState?: VideoState) { + if (currentState === VideoState.PUBLISHED) { + throw new Error('Video is already in its final state') + } + + if ( + currentState !== VideoState.TO_TRANSCODE && + currentState !== VideoState.TO_MOVE_TO_EXTERNAL_STORAGE && + CONFIG.TRANSCODING.ENABLED + ) { + return VideoState.TO_TRANSCODE + } + + if ( + currentState !== VideoState.TO_MOVE_TO_EXTERNAL_STORAGE && + CONFIG.OBJECT_STORAGE.ENABLED + ) { + return VideoState.TO_MOVE_TO_EXTERNAL_STORAGE + } + + return VideoState.PUBLISHED +} + // --------------------------------------------------------------------------- export { buildLocalVideoFromReq, - publishAndFederateIfNeeded, buildVideoThumbnailsFromReq, setVideoTags, + moveToNextState, addOptimizeOrMergeAudioJob, + buildNextVideoState, addTranscodingJob, addMoveToObjectStorageJob, getTranscodingJobPriority diff --git a/server/models/video/video-file.ts b/server/models/video/video-file.ts index a1c678b4d0d..ccfbc817d4e 100644 --- a/server/models/video/video-file.ts +++ b/server/models/video/video-file.ts @@ -23,9 +23,11 @@ import validator from 'validator' import { buildRemoteVideoBaseUrl } from '@server/helpers/activitypub' import { logger } from '@server/helpers/logger' import { extractVideo } from '@server/helpers/video' +import { getHLSPublicFileUrl, getWebTorrentPublicFileUrl } from '@server/lib/object-storage' import { getTorrentFilePath } from '@server/lib/video-paths' -import { MStreamingPlaylistVideo, MVideo, MVideoWithHost, VideoStorageType } from '@server/types/models' +import { MStreamingPlaylistVideo, MVideo, MVideoWithHost } from '@server/types/models' import { AttributesOnly } from '@shared/core-utils' +import { VideoStorage } from '@shared/models' import { isVideoFileExtnameValid, isVideoFileInfoHashValid, @@ -48,7 +50,6 @@ import { doesExist } from '../shared' import { parseAggregateResult, throwIfNotValid } from '../utils' import { VideoModel } from './video' import { VideoStreamingPlaylistModel } from './video-streaming-playlist' -import { CONFIG } from '@server/initializers/config' export enum ScopeNames { WITH_VIDEO = 'WITH_VIDEO', @@ -216,9 +217,9 @@ export class VideoFileModel extends Model videoId: number @AllowNull(false) - @Default(VideoStorageType.LOCAL) + @Default(VideoStorage.LOCAL) @Column - storage: VideoStorageType + storage: VideoStorage @BelongsTo(() => VideoModel, { foreignKey: { @@ -279,7 +280,7 @@ export class VideoFileModel extends Model static async doesOwnedWebTorrentVideoFileExist (filename: string) { const query = 'SELECT 1 FROM "videoFile" INNER JOIN "video" ON "video"."id" = "videoFile"."videoId" AND "video"."remote" IS FALSE ' + - `WHERE "filename" = $filename AND "storage" = ${VideoStorageType.LOCAL} LIMIT 1` + `WHERE "filename" = $filename AND "storage" = ${VideoStorage.LOCAL} LIMIT 1` return doesExist(query, { filename }) } @@ -456,22 +457,20 @@ export class VideoFileModel extends Model return !!this.videoStreamingPlaylistId } - generateObjectUrl (video: MVideo) { - if (!this.isHLS() && CONFIG.OBJECT_STORAGE.VIDEOS.BASE_URL) { - return CONFIG.OBJECT_STORAGE.VIDEOS.BASE_URL + this.filename + getObjectStorageUrl () { + if (this.isHLS()) { + return getHLSPublicFileUrl(this.fileUrl) } - if (this.isHLS() && CONFIG.OBJECT_STORAGE.STREAMING_PLAYLISTS.BASE_URL) { - return CONFIG.OBJECT_STORAGE.STREAMING_PLAYLISTS.BASE_URL + this.filename - } - return this.fileUrl + + return getWebTorrentPublicFileUrl(this.fileUrl) } getFileUrl (video: MVideo) { - if (this.storage === VideoStorageType.OBJECT_STORAGE) { - return this.generateObjectUrl(video) + if (this.storage === VideoStorage.OBJECT_STORAGE) { + return this.getObjectStorageUrl() } - if (!this.Video) this.Video = video as VideoModel + if (!this.Video) this.Video = video as VideoModel if (video.isOwned()) return WEBSERVER.URL + this.getFileStaticPath(video) return this.fileUrl @@ -484,9 +483,6 @@ export class VideoFileModel extends Model } getFileDownloadUrl (video: MVideoWithHost) { - if (this.storage === VideoStorageType.OBJECT_STORAGE) { - return this.generateObjectUrl(video) - } const path = this.isHLS() ? join(STATIC_DOWNLOAD_PATHS.HLS_VIDEOS, `${video.uuid}-${this.resolution}-fragmented${this.extname}`) : join(STATIC_DOWNLOAD_PATHS.VIDEOS, `${video.uuid}-${this.resolution}${this.extname}`) diff --git a/server/models/video/video-job-info.ts b/server/models/video/video-job-info.ts index 3cb266aa5a7..766695b22b1 100644 --- a/server/models/video/video-job-info.ts +++ b/server/models/video/video-job-info.ts @@ -1,19 +1,7 @@ -import { AttributesOnly } from "@shared/core-utils" -import { - AllowNull, - BelongsTo, - Column, - CreatedAt, - Default, - ForeignKey, - IsInt, - Model, - Table, - Unique, - UpdatedAt -} from "sequelize-typescript" -import { Op, QueryTypes } from "sequelize" -import { VideoModel } from "./video" +import { Op, QueryTypes, Transaction } from 'sequelize' +import { AllowNull, BelongsTo, Column, CreatedAt, Default, ForeignKey, IsInt, Model, Table, Unique, UpdatedAt } from 'sequelize-typescript' +import { AttributesOnly } from '@shared/core-utils' +import { VideoModel } from './video' @Table({ tableName: 'videoJobInfo', @@ -42,6 +30,12 @@ export class VideoJobInfoModel extends Model VideoModel) @Unique @Column @@ -55,11 +49,19 @@ export class VideoJobInfoModel extends Model { + static load (videoId: number, transaction: Transaction) { + const where = { + videoId + } + + return VideoJobInfoModel.findOne({ where, transaction }) + } + + static async increaseOrCreate (videoUUID: string, column: 'pendingMove' | 'pendingTranscoding'): Promise { const options = { type: QueryTypes.SELECT as QueryTypes.SELECT, bind: { videoUUID } } - const [ { pendingMove } ] = await VideoJobInfoModel.sequelize.query<{pendingMove: number}>(` - INSERT INTO "videoJobInfo" ("videoId", "pendingMove", "createdAt", "updatedAt") + const [ { pendingMove } ] = await VideoJobInfoModel.sequelize.query<{ pendingMove: number }>(` + INSERT INTO "videoJobInfo" ("videoId", "${column}", "createdAt", "updatedAt") SELECT "video"."id" AS "videoId", 1, NOW(), NOW() FROM @@ -68,29 +70,29 @@ export class VideoJobInfoModel extends Model { + static async decrease (videoUUID: string, column: 'pendingMove' | 'pendingTranscoding'): Promise { const options = { type: QueryTypes.SELECT as QueryTypes.SELECT, bind: { videoUUID } } - const [ { pendingMove } ] = await VideoJobInfoModel.sequelize.query<{pendingMove: number}>(` + const [ { pendingMove } ] = await VideoJobInfoModel.sequelize.query<{ pendingMove: number }>(` UPDATE "videoJobInfo" SET - "pendingMove" = "videoJobInfo"."pendingMove" - 1, + "${column}" = "videoJobInfo"."${column}" - 1, "updatedAt" = NOW() FROM "video" WHERE "video"."id" = "videoJobInfo"."videoId" AND "video"."uuid" = $videoUUID RETURNING - "pendingMove"; + "${column}"; `, options) return pendingMove diff --git a/server/models/video/video-streaming-playlist.ts b/server/models/video/video-streaming-playlist.ts index 1c440fe1f2d..66277d396c7 100644 --- a/server/models/video/video-streaming-playlist.ts +++ b/server/models/video/video-streaming-playlist.ts @@ -15,9 +15,11 @@ import { Table, UpdatedAt } from 'sequelize-typescript' +import { getHLSPublicFileUrl } from '@server/lib/object-storage' import { VideoFileModel } from '@server/models/video/video-file' -import { MStreamingPlaylist, MVideo, VideoStorageType } from '@server/types/models' +import { MStreamingPlaylist, MVideo } from '@server/types/models' import { AttributesOnly } from '@shared/core-utils' +import { VideoStorage } from '@shared/models' import { VideoStreamingPlaylistType } from '../../../shared/models/videos/video-streaming-playlist.type' import { sha1 } from '../../helpers/core-utils' import { isActivityPubUrlValid } from '../../helpers/custom-validators/activitypub/misc' @@ -35,7 +37,6 @@ import { VideoRedundancyModel } from '../redundancy/video-redundancy' import { doesExist } from '../shared' import { throwIfNotValid } from '../utils' import { VideoModel } from './video' -import { CONFIG } from '@server/initializers/config' @Table({ tableName: 'videoStreamingPlaylist', @@ -96,9 +97,9 @@ export class VideoStreamingPlaylistModel extends Model VideoModel, { foreignKey: { @@ -204,24 +205,20 @@ export class VideoStreamingPlaylistModel extends Model>> { const promises: Promise[] = [ remove(filePath) ] if (!isRedundancy) promises.push(videoFile.removeTorrent()) - if (videoFile.storage === VideoStorageType.OBJECT_STORAGE) { - promises.push(removeObject(videoFile.filename, CONFIG.OBJECT_STORAGE.VIDEOS)) + if (videoFile.storage === VideoStorage.OBJECT_STORAGE) { + promises.push(removeWebTorrentObjectStorage(videoFile)) } return Promise.all(promises) @@ -1701,9 +1700,6 @@ export class VideoModel extends Model>> { const directoryPath = getHLSDirectory(this, isRedundancy) await remove(directoryPath) - if (streamingPlaylist.storage === VideoStorageType.OBJECT_STORAGE) { - await removePrefix(join(streamingPlaylist.getStringType(), this.uuid), CONFIG.OBJECT_STORAGE.STREAMING_PLAYLISTS) - } if (isRedundancy !== true) { const streamingPlaylistWithFiles = streamingPlaylist as MStreamingPlaylistFilesVideo @@ -1717,6 +1713,10 @@ export class VideoModel extends Model>> { await Promise.all( streamingPlaylistWithFiles.VideoFiles.map(file => file.removeTorrent()) ) + + if (streamingPlaylist.storage === VideoStorage.OBJECT_STORAGE) { + await removeHLSObjectStorage(streamingPlaylist, this) + } } } @@ -1760,16 +1760,16 @@ export class VideoModel extends Model>> { this.privacy === VideoPrivacy.INTERNAL } - async publishIfNeededAndSave (t: Transaction) { - if (this.state !== VideoState.PUBLISHED) { - this.state = VideoState.PUBLISHED - this.publishedAt = new Date() - await this.save({ transaction: t }) + async setNewState (newState: VideoState, transaction: Transaction) { + if (this.state === newState) throw new Error('Cannot use same state ' + newState) - return true + this.state = newState + + if (this.state === VideoState.PUBLISHED) { + this.publishedAt = new Date() } - return false + await this.save({ transaction }) } getBandwidthBits (videoFile: MVideoFile) { diff --git a/server/tests/api/index.ts b/server/tests/api/index.ts index b62e2f5f756..19301c0b936 100644 --- a/server/tests/api/index.ts +++ b/server/tests/api/index.ts @@ -2,6 +2,7 @@ import './activitypub' import './check-params' import './moderation' +import './object-storage' import './notifications' import './redundancy' import './search' diff --git a/server/tests/api/object-storage/index.ts b/server/tests/api/object-storage/index.ts new file mode 100644 index 00000000000..e29a9b7670f --- /dev/null +++ b/server/tests/api/object-storage/index.ts @@ -0,0 +1 @@ +export * from './videos' diff --git a/server/tests/api/object-storage/videos.ts b/server/tests/api/object-storage/videos.ts new file mode 100644 index 00000000000..847b7283d4d --- /dev/null +++ b/server/tests/api/object-storage/videos.ts @@ -0,0 +1,273 @@ +/* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */ + +import 'mocha' +import * as chai from 'chai' +import { + cleanupTests, + createMultipleServers, + doubleFollow, + expectStartWith, + makeRawRequest, + MockObjectStorage, + PeerTubeServer, + setAccessTokensToServers, + waitJobs, + webtorrentAdd +} from '@shared/extra-utils' +import { HttpStatusCode, VideoDetails } from '@shared/models' + +const expect = chai.expect + +async function checkFiles (options: { + video: VideoDetails + + mockObjectStorage: MockObjectStorage + + playlistBucket: string + playlistPrefix?: string + baseMockUrl?: string + + webtorrentBucket: string + webtorrentPrefix?: string +}) { + const { + mockObjectStorage, + video, + playlistBucket, + webtorrentBucket, + baseMockUrl, + playlistPrefix, + webtorrentPrefix + } = options + + let allFiles = video.files + + for (const file of video.files) { + const baseUrl = baseMockUrl + ? `${baseMockUrl}/${webtorrentBucket}/` + : `http://${webtorrentBucket}.${mockObjectStorage.getEndpointHost()}/` + + const prefix = webtorrentPrefix || '' + const start = baseUrl + prefix + + expectStartWith(file.fileUrl, start) + + const res = await makeRawRequest(file.fileDownloadUrl, HttpStatusCode.FOUND_302) + const location = res.headers['location'] + expectStartWith(location, start) + + await makeRawRequest(location, HttpStatusCode.OK_200) + } + + const hls = video.streamingPlaylists[0] + + if (hls) { + allFiles = allFiles.concat(hls.files) + + const baseUrl = baseMockUrl + ? `${baseMockUrl}/${playlistBucket}/` + : `http://${playlistBucket}.${mockObjectStorage.getEndpointHost()}/` + + const prefix = playlistPrefix || '' + const start = baseUrl + prefix + + expectStartWith(hls.playlistUrl, start) + expectStartWith(hls.segmentsSha256Url, start) + + await makeRawRequest(hls.playlistUrl, HttpStatusCode.OK_200) + const resSha = await makeRawRequest(hls.segmentsSha256Url, HttpStatusCode.OK_200) + expect(JSON.stringify(resSha.body)).to.not.throw + + for (const file of hls.files) { + expectStartWith(file.fileUrl, start) + + const res = await makeRawRequest(file.fileDownloadUrl, HttpStatusCode.FOUND_302) + const location = res.headers['location'] + expectStartWith(location, start) + + await makeRawRequest(location, HttpStatusCode.OK_200) + } + } + + for (const file of allFiles) { + const torrent = await webtorrentAdd(file.magnetUri, true) + + expect(torrent.files).to.be.an('array') + expect(torrent.files.length).to.equal(1) + expect(torrent.files[0].path).to.exist.and.to.not.equal('') + + const res = await makeRawRequest(file.fileUrl, HttpStatusCode.OK_200) + expect(res.body).to.have.length.above(100) + } + + return allFiles.map(f => f.fileUrl) +} + +function runTestSuite (options: { + playlistBucket: string + playlistPrefix?: string + + webtorrentBucket: string + webtorrentPrefix?: string + + useMockBaseUrl?: boolean + + maxUploadPart?: string +}) { + const mockObjectStorage = new MockObjectStorage() + let baseMockUrl: string + + let servers: PeerTubeServer[] + + let keptUrls: string[] = [] + + const uuidsToDelete: string[] = [] + let deletedUrls: string[] = [] + + before(async function () { + this.timeout(120000) + + const port = await mockObjectStorage.initialize() + baseMockUrl = options.useMockBaseUrl ? `http://localhost:${port}` : undefined + + await mockObjectStorage.createBucket(options.playlistBucket) + await mockObjectStorage.createBucket(options.webtorrentBucket) + + const config = { + object_storage: { + enabled: true, + endpoint: 'http://' + mockObjectStorage.getEndpointHost(), + region: mockObjectStorage.getRegion(), + + credentials: mockObjectStorage.getCrendentialsConfig(), + + max_upload_part: options.maxUploadPart || '2MB', + + streaming_playlists: { + bucket_name: options.playlistBucket, + prefix: options.playlistPrefix, + base_url: baseMockUrl + ? `${baseMockUrl}/${options.playlistBucket}` + : undefined + }, + + videos: { + bucket_name: options.webtorrentBucket, + prefix: options.webtorrentPrefix, + base_url: baseMockUrl + ? `${baseMockUrl}/${options.webtorrentBucket}` + : undefined + } + } + } + + servers = await createMultipleServers(2, config) + + await setAccessTokensToServers(servers) + await doubleFollow(servers[0], servers[1]) + + for (const server of servers) { + const { uuid } = await server.videos.quickUpload({ name: 'video to keep' }) + await waitJobs(servers) + + const files = await server.videos.listFiles({ id: uuid }) + keptUrls = keptUrls.concat(files.map(f => f.fileUrl)) + } + }) + + it('Should upload a video and move it to the object storage without transcoding', async function () { + this.timeout(20000) + + const { uuid } = await servers[0].videos.quickUpload({ name: 'video 1' }) + uuidsToDelete.push(uuid) + + await waitJobs(servers) + + for (const server of servers) { + const video = await server.videos.get({ id: uuid }) + const files = await checkFiles({ ...options, mockObjectStorage, video, baseMockUrl }) + + deletedUrls = deletedUrls.concat(files) + } + }) + + it('Should upload a video and move it to the object storage with transcoding', async function () { + this.timeout(40000) + + const { uuid } = await servers[1].videos.quickUpload({ name: 'video 2' }) + uuidsToDelete.push(uuid) + + await waitJobs(servers) + + for (const server of servers) { + const video = await server.videos.get({ id: uuid }) + const files = await checkFiles({ ...options, mockObjectStorage, video, baseMockUrl }) + + deletedUrls = deletedUrls.concat(files) + } + }) + + it('Should correctly delete the files', async function () { + await servers[0].videos.remove({ id: uuidsToDelete[0] }) + await servers[1].videos.remove({ id: uuidsToDelete[1] }) + + await waitJobs(servers) + + for (const url of deletedUrls) { + await makeRawRequest(url, HttpStatusCode.NOT_FOUND_404) + } + }) + + it('Should have kept other files', async function () { + for (const url of keptUrls) { + await makeRawRequest(url, HttpStatusCode.OK_200) + } + }) + + after(async function () { + mockObjectStorage.terminate() + + await cleanupTests(servers) + }) +} + +describe('Object storage', function () { + + describe('Test simple object storage', function () { + runTestSuite({ + playlistBucket: 'streaming-playlists', + webtorrentBucket: 'videos' + }) + }) + + describe('Test object storage with prefix', function () { + runTestSuite({ + playlistBucket: 'mybucket', + webtorrentBucket: 'mybucket', + + playlistPrefix: 'streaming-playlists_', + webtorrentPrefix: 'webtorrent_' + }) + }) + + describe('Test object storage with prefix and base URL', function () { + runTestSuite({ + playlistBucket: 'mybucket', + webtorrentBucket: 'mybucket', + + playlistPrefix: 'streaming-playlists_', + webtorrentPrefix: 'webtorrent_', + + useMockBaseUrl: true + }) + }) + + describe('Test object storage with small upload part', function () { + runTestSuite({ + playlistBucket: 'streaming-playlists', + webtorrentBucket: 'videos', + + maxUploadPart: '5KB' + }) + }) +}) diff --git a/server/types/models/video/video.ts b/server/types/models/video/video.ts index 3ca3db45a70..16ddaf740e8 100644 --- a/server/types/models/video/video.ts +++ b/server/types/models/video/video.ts @@ -219,8 +219,3 @@ export type MVideoFormattableDetails = Use<'VideoStreamingPlaylists', MStreamingPlaylistRedundanciesOpt[]> & Use<'VideoFiles', MVideoFileRedundanciesOpt[]> & PickWithOpt - -export enum VideoStorageType { - LOCAL, - OBJECT_STORAGE, -} diff --git a/shared/extra-utils/miscs/checks.ts b/shared/extra-utils/miscs/checks.ts index 7fc92f804e5..aa2c8e8fa47 100644 --- a/shared/extra-utils/miscs/checks.ts +++ b/shared/extra-utils/miscs/checks.ts @@ -16,6 +16,10 @@ function dateIsValid (dateString: string, interval = 300000) { return Math.abs(now.getTime() - dateToCheck.getTime()) <= interval } +function expectStartWith (str: string, start: string) { + expect(str.startsWith(start), `${str} does not start with ${start}`).to.be.true +} + async function testImage (url: string, imageName: string, imagePath: string, extension = '.jpg') { const res = await makeGetRequest({ url, @@ -42,5 +46,6 @@ async function testFileExistsOrNot (server: PeerTubeServer, directory: string, f export { dateIsValid, testImage, - testFileExistsOrNot + testFileExistsOrNot, + expectStartWith } diff --git a/shared/extra-utils/mock-servers/index.ts b/shared/extra-utils/mock-servers/index.ts index 0ec07f68518..93c00c78827 100644 --- a/shared/extra-utils/mock-servers/index.ts +++ b/shared/extra-utils/mock-servers/index.ts @@ -2,3 +2,4 @@ export * from './mock-email' export * from './mock-instances-index' export * from './mock-joinpeertube-versions' export * from './mock-plugin-blocklist' +export * from './mock-object-storage' diff --git a/shared/extra-utils/mock-servers/mock-object-storage.ts b/shared/extra-utils/mock-servers/mock-object-storage.ts new file mode 100644 index 00000000000..a6a52d87863 --- /dev/null +++ b/shared/extra-utils/mock-servers/mock-object-storage.ts @@ -0,0 +1,78 @@ +import * as express from 'express' +import got, { RequestError } from 'got' +import { Server } from 'http' +import { pipeline } from 'stream' +import { randomInt } from '@shared/core-utils' +import { HttpStatusCode } from '@shared/models' +import { makePostBodyRequest } from '../requests' + +export class MockObjectStorage { + private server: Server + + initialize () { + return new Promise(res => { + const app = express() + + app.get('/:bucketName/:path(*)', (req: express.Request, res: express.Response, next: express.NextFunction) => { + const url = `http://${req.params.bucketName}.${this.getEndpointHost()}/${req.params.path}` + + if (process.env.DEBUG) { + console.log('Receiving request on mocked server %s.', req.url) + console.log('Proxifying request to %s', url) + } + + return pipeline( + got.stream(url, { throwHttpErrors: false }), + res, + (err: RequestError) => { + if (!err) return + + console.error('Pipeline failed.', err) + } + ) + }) + + const port = 42301 + randomInt(1, 100) + this.server = app.listen(port, () => res(port)) + }) + } + + getCrendentialsConfig () { + return { + access_key_id: 'AKIAIOSFODNN7EXAMPLE', + secret_access_key: 'wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY' + } + } + + getEndpointHost () { + return 'localhost:9444' + } + + getRegion () { + return 'us-east-1' + } + + async createBucket (name: string) { + await makePostBodyRequest({ + url: this.getEndpointHost(), + path: '/ui/' + name + '?delete', + expectedStatus: HttpStatusCode.TEMPORARY_REDIRECT_307 + }) + + await makePostBodyRequest({ + url: this.getEndpointHost(), + path: '/ui/' + name + '?create', + expectedStatus: HttpStatusCode.TEMPORARY_REDIRECT_307 + }) + + await makePostBodyRequest({ + url: this.getEndpointHost(), + path: '/ui/' + name + '?make-public', + expectedStatus: HttpStatusCode.TEMPORARY_REDIRECT_307 + }) + } + + terminate () { + if (this.server) this.server.close() + } +} diff --git a/shared/extra-utils/server/servers.ts b/shared/extra-utils/server/servers.ts index f0622feb067..21ab9405b59 100644 --- a/shared/extra-utils/server/servers.ts +++ b/shared/extra-utils/server/servers.ts @@ -10,11 +10,11 @@ async function createSingleServer (serverNumber: number, configOverride?: Object return server } -function createMultipleServers (totalServers: number, configOverride?: Object) { +function createMultipleServers (totalServers: number, configOverride?: Object, options: RunServerOptions = {}) { const serverPromises: Promise[] = [] for (let i = 1; i <= totalServers; i++) { - serverPromises.push(createSingleServer(i, configOverride)) + serverPromises.push(createSingleServer(i, configOverride, options)) } return Promise.all(serverPromises) diff --git a/shared/extra-utils/videos/videos-command.ts b/shared/extra-utils/videos/videos-command.ts index 33725bfdcce..d35339c8d47 100644 --- a/shared/extra-utils/videos/videos-command.ts +++ b/shared/extra-utils/videos/videos-command.ts @@ -188,6 +188,17 @@ export class VideosCommand extends AbstractCommand { return id } + async listFiles (options: OverrideCommandOptions & { + id: number | string + }) { + const video = await this.get(options) + + const files = video.files || [] + const hlsFiles = video.streamingPlaylists[0]?.files || [] + + return files.concat(hlsFiles) + } + // --------------------------------------------------------------------------- listMyVideos (options: OverrideCommandOptions & { diff --git a/shared/models/server/job.model.ts b/shared/models/server/job.model.ts index 8239f8cdff9..973cacef3b8 100644 --- a/shared/models/server/job.model.ts +++ b/shared/models/server/job.model.ts @@ -140,5 +140,4 @@ export interface ActorKeysPayload { export interface MoveObjectStoragePayload { videoUUID: string - videoFileId?: number } diff --git a/shared/models/videos/index.ts b/shared/models/videos/index.ts index faa9b986868..733c433a09d 100644 --- a/shared/models/videos/index.ts +++ b/shared/models/videos/index.ts @@ -26,6 +26,7 @@ export * from './video-resolution.enum' export * from './video-schedule-update.model' export * from './video-sort-field.type' export * from './video-state.enum' +export * from './video-storage.enum' export * from './video-streaming-playlist.model' export * from './video-streaming-playlist.type' diff --git a/shared/models/videos/video-state.enum.ts b/shared/models/videos/video-state.enum.ts index 49d997f240d..c6af481e71a 100644 --- a/shared/models/videos/video-state.enum.ts +++ b/shared/models/videos/video-state.enum.ts @@ -3,5 +3,6 @@ export const enum VideoState { TO_TRANSCODE = 2, TO_IMPORT = 3, WAITING_FOR_LIVE = 4, - LIVE_ENDED = 5 + LIVE_ENDED = 5, + TO_MOVE_TO_EXTERNAL_STORAGE = 6 } diff --git a/shared/models/videos/video-storage.enum.ts b/shared/models/videos/video-storage.enum.ts new file mode 100644 index 00000000000..d9f52ff931b --- /dev/null +++ b/shared/models/videos/video-storage.enum.ts @@ -0,0 +1,4 @@ +export const enum VideoStorage { + LOCAL, + OBJECT_STORAGE, +} From 831f9734cc174e67f9cf2c0c9fcfb3149b819d73 Mon Sep 17 00:00:00 2001 From: Chocobozzz Date: Fri, 13 Aug 2021 14:30:14 +0200 Subject: [PATCH 20/23] Fix federation --- server/controllers/api/videos/upload.ts | 2 +- .../handlers/move-to-object-storage.ts | 2 +- .../job-queue/handlers/video-live-ending.ts | 2 +- .../job-queue/handlers/video-transcoding.ts | 5 +- server/lib/video-state.ts | 99 +++++++++++++++++++ server/lib/video.ts | 81 +-------------- 6 files changed, 106 insertions(+), 85 deletions(-) create mode 100644 server/lib/video-state.ts diff --git a/server/controllers/api/videos/upload.ts b/server/controllers/api/videos/upload.ts index 8d5b65f29cd..22de0d66230 100644 --- a/server/controllers/api/videos/upload.ts +++ b/server/controllers/api/videos/upload.ts @@ -9,11 +9,11 @@ import { addMoveToObjectStorageJob, addOptimizeOrMergeAudioJob, buildLocalVideoFromReq, - buildNextVideoState, buildVideoThumbnailsFromReq, setVideoTags } from '@server/lib/video' import { generateWebTorrentVideoFilename, getVideoFilePath } from '@server/lib/video-paths' +import { buildNextVideoState } from '@server/lib/video-state' import { openapiOperationDoc } from '@server/middlewares/doc' import { MVideo, MVideoFile, MVideoFullLight } from '@server/types/models' import { uploadx } from '@uploadx/core' diff --git a/server/lib/job-queue/handlers/move-to-object-storage.ts b/server/lib/job-queue/handlers/move-to-object-storage.ts index c1fbdfc892b..2b1dca3d18e 100644 --- a/server/lib/job-queue/handlers/move-to-object-storage.ts +++ b/server/lib/job-queue/handlers/move-to-object-storage.ts @@ -6,8 +6,8 @@ import { createTorrentAndSetInfoHash } from '@server/helpers/webtorrent' import { CONFIG } from '@server/initializers/config' import { HLS_STREAMING_PLAYLIST_DIRECTORY } from '@server/initializers/constants' import { storeHLSFile, storeWebTorrentFile } from '@server/lib/object-storage' -import { moveToNextState } from '@server/lib/video' import { getHLSDirectory, getHlsResolutionPlaylistFilename } from '@server/lib/video-paths' +import { moveToNextState } from '@server/lib/video-state' import { VideoModel } from '@server/models/video/video' import { VideoJobInfoModel } from '@server/models/video/video-job-info' import { MVideoFile, MVideoWithAllFiles } from '@server/types/models' diff --git a/server/lib/job-queue/handlers/video-live-ending.ts b/server/lib/job-queue/handlers/video-live-ending.ts index 5399ea3d173..38523c75245 100644 --- a/server/lib/job-queue/handlers/video-live-ending.ts +++ b/server/lib/job-queue/handlers/video-live-ending.ts @@ -6,8 +6,8 @@ import { VIDEO_LIVE } from '@server/initializers/constants' import { buildConcatenatedName, cleanupLive, LiveSegmentShaStore } from '@server/lib/live' import { generateVideoMiniature } from '@server/lib/thumbnail' import { generateHlsPlaylistResolutionFromTS } from '@server/lib/transcoding/video-transcoding' -import { moveToNextState } from '@server/lib/video' import { generateHLSMasterPlaylistFilename, generateHlsSha256SegmentsFilename, getHLSDirectory } from '@server/lib/video-paths' +import { moveToNextState } from '@server/lib/video-state' import { VideoModel } from '@server/models/video/video' import { VideoFileModel } from '@server/models/video/video-file' import { VideoLiveModel } from '@server/models/video/video-live' diff --git a/server/lib/job-queue/handlers/video-transcoding.ts b/server/lib/job-queue/handlers/video-transcoding.ts index 1c1c329f10b..6070c1899fd 100644 --- a/server/lib/job-queue/handlers/video-transcoding.ts +++ b/server/lib/job-queue/handlers/video-transcoding.ts @@ -1,8 +1,10 @@ import * as Bull from 'bull' import { TranscodeOptionsType } from '@server/helpers/ffmpeg-utils' -import { addTranscodingJob, getTranscodingJobPriority, moveToNextState } from '@server/lib/video' +import { addTranscodingJob, getTranscodingJobPriority } from '@server/lib/video' import { getVideoFilePath } from '@server/lib/video-paths' +import { moveToNextState } from '@server/lib/video-state' import { UserModel } from '@server/models/user/user' +import { VideoJobInfoModel } from '@server/models/video/video-job-info' import { MUser, MUserId, MVideo, MVideoFullLight, MVideoWithFile } from '@server/types/models' import { HLSTranscodingPayload, @@ -22,7 +24,6 @@ import { optimizeOriginalVideofile, transcodeNewWebTorrentResolution } from '../../transcoding/video-transcoding' -import { VideoJobInfoModel } from '@server/models/video/video-job-info' type HandlerFunction = (job: Bull.Job, payload: VideoTranscodingPayload, video: MVideoFullLight, user: MUser) => Promise diff --git a/server/lib/video-state.ts b/server/lib/video-state.ts new file mode 100644 index 00000000000..ee28f7e4884 --- /dev/null +++ b/server/lib/video-state.ts @@ -0,0 +1,99 @@ +import { Transaction } from 'sequelize' +import { logger } from '@server/helpers/logger' +import { CONFIG } from '@server/initializers/config' +import { sequelizeTypescript } from '@server/initializers/database' +import { VideoModel } from '@server/models/video/video' +import { VideoJobInfoModel } from '@server/models/video/video-job-info' +import { MVideoFullLight, MVideoUUID } from '@server/types/models' +import { VideoState } from '@shared/models' +import { federateVideoIfNeeded } from './activitypub/videos' +import { Notifier } from './notifier' +import { addMoveToObjectStorageJob } from './video' + +function buildNextVideoState (currentState?: VideoState) { + if (currentState === VideoState.PUBLISHED) { + throw new Error('Video is already in its final state') + } + + if ( + currentState !== VideoState.TO_TRANSCODE && + currentState !== VideoState.TO_MOVE_TO_EXTERNAL_STORAGE && + CONFIG.TRANSCODING.ENABLED + ) { + return VideoState.TO_TRANSCODE + } + + if ( + currentState !== VideoState.TO_MOVE_TO_EXTERNAL_STORAGE && + CONFIG.OBJECT_STORAGE.ENABLED + ) { + return VideoState.TO_MOVE_TO_EXTERNAL_STORAGE + } + + return VideoState.PUBLISHED +} + +function moveToNextState (video: MVideoUUID, isNewVideo = true) { + return sequelizeTypescript.transaction(async t => { + // Maybe the video changed in database, refresh it + const videoDatabase = await VideoModel.loadAndPopulateAccountAndServerAndTags(video.uuid, t) + // Video does not exist anymore + if (!videoDatabase) return undefined + + // Already in its final state + if (videoDatabase.state === VideoState.PUBLISHED) { + return federateVideoIfNeeded(videoDatabase, false, t) + } + + const newState = buildNextVideoState(videoDatabase.state) + + if (newState === VideoState.PUBLISHED) { + return moveToPublishedState(videoDatabase, isNewVideo, t) + } + + if (newState === VideoState.TO_MOVE_TO_EXTERNAL_STORAGE) { + return moveToExternalStorageState(videoDatabase, t) + } + }) +} + +// --------------------------------------------------------------------------- + +export { + buildNextVideoState, + moveToNextState +} + +// --------------------------------------------------------------------------- + +async function moveToPublishedState (video: MVideoFullLight, isNewVideo: boolean, transaction: Transaction) { + logger.info('Publishing video %s.', video.uuid, { tags: [ video.uuid ] }) + + const previousState = video.state + await video.setNewState(VideoState.PUBLISHED, transaction) + + // If the video was not published, we consider it is a new one for other instances + // Live videos are always federated, so it's not a new video + await federateVideoIfNeeded(video, isNewVideo, transaction) + + Notifier.Instance.notifyOnNewVideoIfNeeded(video) + + if (previousState === VideoState.TO_TRANSCODE) { + Notifier.Instance.notifyOnVideoPublishedAfterTranscoding(video) + } +} + +async function moveToExternalStorageState (video: MVideoFullLight, transaction: Transaction) { + const videoJobInfo = await VideoJobInfoModel.load(video.id, transaction) + const pendingTranscoding = videoJobInfo?.pendingTranscoding || 0 + + // We want to wait all transcoding jobs before moving the video on an external storage + if (pendingTranscoding !== 0) return + + await video.setNewState(VideoState.TO_MOVE_TO_EXTERNAL_STORAGE, transaction) + + logger.info('Creating external storage move job for video %s.', video.uuid, { tags: [ video.uuid ] }) + + addMoveToObjectStorageJob(video) + .catch(err => logger.error('Cannot add move to object storage job', { err })) +} diff --git a/server/lib/video.ts b/server/lib/video.ts index 722c5901218..30575125444 100644 --- a/server/lib/video.ts +++ b/server/lib/video.ts @@ -1,18 +1,13 @@ import { UploadFiles } from 'express' import { Transaction } from 'sequelize/types' -import { logger } from '@server/helpers/logger' -import { CONFIG } from '@server/initializers/config' import { DEFAULT_AUDIO_RESOLUTION, JOB_PRIORITY } from '@server/initializers/constants' -import { sequelizeTypescript } from '@server/initializers/database' import { TagModel } from '@server/models/video/tag' import { VideoModel } from '@server/models/video/video' import { VideoJobInfoModel } from '@server/models/video/video-job-info' import { FilteredModelAttributes } from '@server/types' import { MThumbnail, MUserId, MVideoFile, MVideoTag, MVideoThumbnail, MVideoUUID } from '@server/types/models' -import { ThumbnailType, VideoCreate, VideoPrivacy, VideoState, VideoTranscodingPayload } from '@shared/models' -import { federateVideoIfNeeded } from './activitypub/videos' +import { ThumbnailType, VideoCreate, VideoPrivacy, VideoTranscodingPayload } from '@shared/models' import { CreateJobOptions, JobQueue } from './job-queue/job-queue' -import { Notifier } from './notifier' import { updateVideoMiniatureFromExisting } from './thumbnail' function buildLocalVideoFromReq (videoInfo: VideoCreate, channelId: number): FilteredModelAttributes { @@ -85,55 +80,6 @@ async function setVideoTags (options: { video.Tags = tagInstances } -function moveToNextState (video: MVideoUUID, isNewVideo = true) { - return sequelizeTypescript.transaction(async t => { - // Maybe the video changed in database, refresh it - const videoDatabase = await VideoModel.loadAndPopulateAccountAndServerAndTags(video.uuid, t) - // Video does not exist anymore - if (!videoDatabase) return undefined - - const previousState = videoDatabase.state - - // Already in its final state - if (previousState === VideoState.PUBLISHED) return - - const newState = buildNextVideoState(previousState) - - if (newState === VideoState.PUBLISHED) { - logger.info('Publishing video %s.', video.uuid, { tags: [ video.uuid ] }) - - await videoDatabase.setNewState(newState, t) - - // If the video was not published, we consider it is a new one for other instances - // Live videos are always federated, so it's not a new video - await federateVideoIfNeeded(videoDatabase, isNewVideo, t) - - Notifier.Instance.notifyOnNewVideoIfNeeded(videoDatabase) - - if (previousState === VideoState.TO_TRANSCODE) { - Notifier.Instance.notifyOnVideoPublishedAfterTranscoding(videoDatabase) - } - - return - } - - if (newState === VideoState.TO_MOVE_TO_EXTERNAL_STORAGE) { - const videoJobInfo = await VideoJobInfoModel.load(videoDatabase.id, t) - const pendingTranscoding = videoJobInfo?.pendingTranscoding || 0 - - // We want to wait all transcoding jobs before moving the video on an external storage - if (pendingTranscoding !== 0) return - - await videoDatabase.setNewState(newState, t) - - logger.info('Creating external storage move job for video %s.', video.uuid, { tags: [ video.uuid ] }) - - addMoveToObjectStorageJob(video) - .catch(err => logger.error('Cannot add move to object storage job', { err })) - } - }) -} - async function addOptimizeOrMergeAudioJob (video: MVideoUUID, videoFile: MVideoFile, user: MUserId) { let dataInput: VideoTranscodingPayload @@ -181,38 +127,13 @@ async function getTranscodingJobPriority (user: MUserId) { return JOB_PRIORITY.TRANSCODING + videoUploadedByUser } -function buildNextVideoState (currentState?: VideoState) { - if (currentState === VideoState.PUBLISHED) { - throw new Error('Video is already in its final state') - } - - if ( - currentState !== VideoState.TO_TRANSCODE && - currentState !== VideoState.TO_MOVE_TO_EXTERNAL_STORAGE && - CONFIG.TRANSCODING.ENABLED - ) { - return VideoState.TO_TRANSCODE - } - - if ( - currentState !== VideoState.TO_MOVE_TO_EXTERNAL_STORAGE && - CONFIG.OBJECT_STORAGE.ENABLED - ) { - return VideoState.TO_MOVE_TO_EXTERNAL_STORAGE - } - - return VideoState.PUBLISHED -} - // --------------------------------------------------------------------------- export { buildLocalVideoFromReq, buildVideoThumbnailsFromReq, setVideoTags, - moveToNextState, addOptimizeOrMergeAudioJob, - buildNextVideoState, addTranscodingJob, addMoveToObjectStorageJob, getTranscodingJobPriority From 2b71af9a5b933fbf38ad3a1ce18a86c4f9bcfab3 Mon Sep 17 00:00:00 2001 From: Chocobozzz Date: Mon, 16 Aug 2021 11:49:34 +0200 Subject: [PATCH 21/23] Add video path manager --- .github/workflows/test.yml | 1 + config/default.yaml | 10 +- config/production.yaml.example | 33 +++ scripts/create-transcoding-job.ts | 10 +- scripts/optimize-old-videos.ts | 82 +++---- server/controllers/api/videos/upload.ts | 12 +- server/controllers/download.ts | 15 +- server/helpers/webtorrent.ts | 31 +-- .../migrations/0065-video-file-size.ts | 28 +-- .../migrations/0660-object-storage.ts | 6 +- .../shared/object-to-model-attributes.ts | 2 +- server/lib/hls.ts | 82 +++---- .../handlers/move-to-object-storage.ts | 31 +-- .../job-queue/handlers/video-file-import.ts | 27 ++- server/lib/job-queue/handlers/video-import.ts | 16 +- .../job-queue/handlers/video-live-ending.ts | 12 +- .../job-queue/handlers/video-transcoding.ts | 96 +++++--- server/lib/live/live-manager.ts | 2 +- server/lib/live/live-utils.ts | 4 +- server/lib/live/shared/muxing-session.ts | 4 +- server/lib/object-storage/keys.ts | 11 +- server/lib/object-storage/shared/client.ts | 46 +++- .../shared/object-storage-helpers.ts | 16 +- server/lib/object-storage/urls.ts | 4 +- server/lib/object-storage/videos.ts | 43 +++- server/lib/paths.ts | 82 +++++++ .../schedulers/videos-redundancy-scheduler.ts | 2 +- server/lib/thumbnail.ts | 30 +-- server/lib/transcoding/video-transcoding.ts | 222 +++++++++--------- server/lib/video-path-manager.ts | 139 +++++++++++ server/lib/video-paths.ts | 144 ------------ server/lib/video-state.ts | 10 +- server/lib/video-urls.ts | 31 +++ server/lib/video.ts | 6 +- .../video/formatter/video-format-utils.ts | 2 +- server/models/video/video-file.ts | 8 +- server/models/video/video-job-info.ts | 6 +- .../models/video/video-streaming-playlist.ts | 2 +- server/models/video/video.ts | 18 +- server/tests/api/live/live-save-replay.ts | 30 +-- server/tests/api/object-storage/index.ts | 2 + server/tests/api/object-storage/live.ts | 136 +++++++++++ .../tests/api/object-storage/video-imports.ts | 112 +++++++++ server/tests/api/object-storage/videos.ts | 144 +++++++++++- server/tests/api/redundancy/redundancy.ts | 6 +- server/tests/api/videos/video-hls.ts | 73 ++++-- .../tests/cli/create-import-video-file-job.ts | 56 ++++- server/tests/cli/create-transcoding-job.ts | 95 +++++--- server/tests/helpers/request.ts | 8 +- shared/extra-utils/miscs/tests.ts | 15 +- .../mock-servers/mock-object-storage.ts | 40 +--- shared/extra-utils/requests/requests.ts | 16 ++ shared/extra-utils/server/config-command.ts | 72 ++++++ shared/extra-utils/server/index.ts | 1 + shared/extra-utils/server/jobs-command.ts | 10 + shared/extra-utils/server/jobs.ts | 6 +- .../server/object-storage-command.ts | 77 ++++++ shared/extra-utils/server/server.ts | 18 +- shared/extra-utils/videos/live.ts | 7 + .../videos/streaming-playlists-command.ts | 6 +- .../extra-utils/videos/streaming-playlists.ts | 7 +- shared/models/server/job.model.ts | 1 + shared/models/videos/video-storage.enum.ts | 2 +- .../config/custom-environment-variables.yaml | 23 ++ 64 files changed, 1601 insertions(+), 688 deletions(-) create mode 100644 server/lib/paths.ts create mode 100644 server/lib/video-path-manager.ts delete mode 100644 server/lib/video-paths.ts create mode 100644 server/lib/video-urls.ts create mode 100644 server/tests/api/object-storage/live.ts create mode 100644 server/tests/api/object-storage/video-imports.ts create mode 100644 shared/extra-utils/server/object-storage-command.ts diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 093a2495ede..35c91bf85bc 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -45,6 +45,7 @@ jobs: PGUSER: peertube PGHOST: localhost NODE_PENDING_JOB_WAIT: 250 + ENABLE_OBJECT_STORAGE_TESTS: true steps: - uses: actions/checkout@v2 diff --git a/config/default.yaml b/config/default.yaml index 2227abbd522..3865ab5cf19 100644 --- a/config/default.yaml +++ b/config/default.yaml @@ -99,16 +99,18 @@ object_storage: enabled: false # Without protocol, will default to HTTPS - endpoint: 's3.amazonaws.com' + endpoint: '' # 's3.amazonaws.com' or 's3.fr-par.scw.cloud' for example region: 'us-east-1' credentials: - access_key_id: 'access-key' - secret_access_key: 'secret-access-key' + # You can also use AWS_ACCESS_KEY_ID env variable + access_key_id: '' + # You can also use AWS_SECRET_ACCESS_KEY env variable + secret_access_key: '' # Maximum amount to upload in one request to object storage - max_upload_part: 2MB + max_upload_part: 2GB streaming_playlists: bucket_name: 'streaming-playlists' diff --git a/config/production.yaml.example b/config/production.yaml.example index 514ab99a47c..94238fad00f 100644 --- a/config/production.yaml.example +++ b/config/production.yaml.example @@ -93,6 +93,39 @@ storage: # If not, peertube will fallback to the default file client_overrides: '/var/www/peertube/storage/client-overrides/' +object_storage: + enabled: false + + # Without protocol, will default to HTTPS + endpoint: '' # 's3.amazonaws.com' or 's3.fr-par.scw.cloud' for example + + region: 'us-east-1' + + credentials: + # You can also use AWS_ACCESS_KEY_ID env variable + access_key_id: '' + # You can also use AWS_SECRET_ACCESS_KEY env variable + secret_access_key: '' + + # Maximum amount to upload in one request to object storage + max_upload_part: 2GB + + streaming_playlists: + bucket_name: 'streaming-playlists' + + # Allows setting all buckets to the same value but with a different prefix + prefix: '' # Example: 'streaming-playlists:' + + # Base url for object URL generation, scheme and host will be replaced by this URL + # Useful when you want to use a CDN/external proxy + base_url: '' # Example: 'https://mirror.example.com' + + # Same settings but for webtorrent videos + videos: + bucket_name: 'videos' + prefix: '' + base_url: '' + log: level: 'info' # 'debug' | 'info' | 'warn' | 'error' rotation: diff --git a/scripts/create-transcoding-job.ts b/scripts/create-transcoding-job.ts index ba885d97583..0bb9bfeab8d 100755 --- a/scripts/create-transcoding-job.ts +++ b/scripts/create-transcoding-job.ts @@ -6,7 +6,7 @@ import { VideoModel } from '../server/models/video/video' import { initDatabaseModels } from '../server/initializers/database' import { JobQueue } from '../server/lib/job-queue' import { computeResolutionsToTranscode } from '@server/helpers/ffprobe-utils' -import { VideoTranscodingPayload } from '@shared/models' +import { VideoState, VideoTranscodingPayload } from '@shared/models' import { CONFIG } from '@server/initializers/config' import { isUUIDValid } from '@server/helpers/custom-validators/misc' import { addTranscodingJob } from '@server/lib/video' @@ -48,7 +48,7 @@ async function run () { if (!video) throw new Error('Video not found.') const dataInput: VideoTranscodingPayload[] = [] - const { resolution } = await video.getMaxQualityResolution() + const resolution = video.getMaxQualityFile().resolution // Generate HLS files if (options.generateHls || CONFIG.TRANSCODING.WEBTORRENT.ENABLED === false) { @@ -63,6 +63,7 @@ async function run () { resolution, isPortraitMode: false, copyCodecs: false, + isNewVideo: false, isMaxQuality: false }) } @@ -88,7 +89,10 @@ async function run () { } } - await JobQueue.Instance.init() + JobQueue.Instance.init() + + video.state = VideoState.TO_TRANSCODE + await video.save() for (const d of dataInput) { await addTranscodingJob(d, {}) diff --git a/scripts/optimize-old-videos.ts b/scripts/optimize-old-videos.ts index 81594d72c0e..245e4cf284f 100644 --- a/scripts/optimize-old-videos.ts +++ b/scripts/optimize-old-videos.ts @@ -1,19 +1,19 @@ +import { registerTSPaths } from '../server/helpers/register-ts-paths' +registerTSPaths() + import { copy, move, remove } from 'fs-extra' import { basename, dirname } from 'path' import { createTorrentAndSetInfoHash } from '@server/helpers/webtorrent' import { CONFIG } from '@server/initializers/config' import { processMoveToObjectStorage } from '@server/lib/job-queue/handlers/move-to-object-storage' -import { getVideoFilePath, getVideoFilePathMakeAvailable } from '@server/lib/video-paths' +import { VideoPathManager } from '@server/lib/video-path-manager' import { getMaxBitrate } from '@shared/core-utils' import { MoveObjectStoragePayload } from '@shared/models' import { getDurationFromVideoFile, getVideoFileBitrate, getVideoFileFPS, getVideoFileResolution } from '../server/helpers/ffprobe-utils' -import { registerTSPaths } from '../server/helpers/register-ts-paths' import { initDatabaseModels } from '../server/initializers/database' import { optimizeOriginalVideofile } from '../server/lib/transcoding/video-transcoding' import { VideoModel } from '../server/models/video/video' -registerTSPaths() - run() .then(() => process.exit(0)) .catch(err => { @@ -42,43 +42,45 @@ async function run () { currentVideoId = video.id for (const file of video.VideoFiles) { - currentFilePath = await getVideoFilePathMakeAvailable(video, file) - - const [ videoBitrate, fps, dataResolution ] = await Promise.all([ - getVideoFileBitrate(currentFilePath), - getVideoFileFPS(currentFilePath), - getVideoFileResolution(currentFilePath) - ]) - - const maxBitrate = getMaxBitrate({ ...dataResolution, fps }) - const isMaxBitrateExceeded = videoBitrate > maxBitrate - if (isMaxBitrateExceeded) { - console.log( - 'Optimizing video file %s with bitrate %s kbps (max: %s kbps)', - basename(currentFilePath), videoBitrate / 1000, maxBitrate / 1000 - ) - - const backupFile = `${currentFilePath}_backup` - await copy(currentFilePath, backupFile) - - await optimizeOriginalVideofile(video, file) - // Update file path, the video filename changed - currentFilePath = getVideoFilePath(video, file) - - const originalDuration = await getDurationFromVideoFile(backupFile) - const newDuration = await getDurationFromVideoFile(currentFilePath) - - if (originalDuration === newDuration) { - console.log('Finished optimizing %s', basename(currentFilePath)) - await remove(backupFile) - continue + await VideoPathManager.Instance.makeAvailableVideoFile(video, file, async path => { + currentFilePath = path + + const [ videoBitrate, fps, dataResolution ] = await Promise.all([ + getVideoFileBitrate(currentFilePath), + getVideoFileFPS(currentFilePath), + getVideoFileResolution(currentFilePath) + ]) + + const maxBitrate = getMaxBitrate({ ...dataResolution, fps }) + const isMaxBitrateExceeded = videoBitrate > maxBitrate + if (isMaxBitrateExceeded) { + console.log( + 'Optimizing video file %s with bitrate %s kbps (max: %s kbps)', + basename(currentFilePath), videoBitrate / 1000, maxBitrate / 1000 + ) + + const backupFile = `${currentFilePath}_backup` + await copy(currentFilePath, backupFile) + + await optimizeOriginalVideofile(video, file) + // Update file path, the video filename changed + currentFilePath = VideoPathManager.Instance.getFSVideoFileOutputPath(video, file) + + const originalDuration = await getDurationFromVideoFile(backupFile) + const newDuration = await getDurationFromVideoFile(currentFilePath) + + if (originalDuration === newDuration) { + console.log('Finished optimizing %s', basename(currentFilePath)) + await remove(backupFile) + return + } + + console.log('Failed to optimize %s, restoring original', basename(currentFilePath)) + await move(backupFile, currentFilePath, { overwrite: true }) + await createTorrentAndSetInfoHash(video, file) + await file.save() } - - console.log('Failed to optimize %s, restoring original', basename(currentFilePath)) - await move(backupFile, currentFilePath, { overwrite: true }) - await createTorrentAndSetInfoHash(video, file) - await file.save() - } + }) } if (CONFIG.OBJECT_STORAGE.ENABLED === true) { diff --git a/server/controllers/api/videos/upload.ts b/server/controllers/api/videos/upload.ts index 22de0d66230..5c740c0410e 100644 --- a/server/controllers/api/videos/upload.ts +++ b/server/controllers/api/videos/upload.ts @@ -1,10 +1,12 @@ import * as express from 'express' import { move } from 'fs-extra' +import { basename } from 'path' import { getLowercaseExtension } from '@server/helpers/core-utils' import { deleteResumableUploadMetaFile, getResumableUploadPath } from '@server/helpers/upload' import { uuidToShort } from '@server/helpers/uuid' import { createTorrentAndSetInfoHash } from '@server/helpers/webtorrent' import { getLocalVideoActivityPubUrl } from '@server/lib/activitypub/url' +import { generateWebTorrentVideoFilename } from '@server/lib/paths' import { addMoveToObjectStorageJob, addOptimizeOrMergeAudioJob, @@ -12,7 +14,7 @@ import { buildVideoThumbnailsFromReq, setVideoTags } from '@server/lib/video' -import { generateWebTorrentVideoFilename, getVideoFilePath } from '@server/lib/video-paths' +import { VideoPathManager } from '@server/lib/video-path-manager' import { buildNextVideoState } from '@server/lib/video-state' import { openapiOperationDoc } from '@server/middlewares/doc' import { MVideo, MVideoFile, MVideoFullLight } from '@server/types/models' @@ -153,13 +155,13 @@ async function addVideo (options: { video.VideoChannel = videoChannel video.url = getLocalVideoActivityPubUrl(video) // We use the UUID, so set the URL after building the object - const videoFile = await buildNewFile(video, videoPhysicalFile) + const videoFile = await buildNewFile(videoPhysicalFile) // Move physical file - const destination = getVideoFilePath(video, videoFile) + const destination = VideoPathManager.Instance.getFSVideoFileOutputPath(video, videoFile) await move(videoPhysicalFile.path, destination) // This is important in case if there is another attempt in the retry process - videoPhysicalFile.filename = getVideoFilePath(video, videoFile) + videoPhysicalFile.filename = basename(destination) videoPhysicalFile.path = destination const [ thumbnailModel, previewModel ] = await buildVideoThumbnailsFromReq({ @@ -235,7 +237,7 @@ async function addVideo (options: { }) } -async function buildNewFile (video: MVideo, videoPhysicalFile: express.VideoUploadFile) { +async function buildNewFile (videoPhysicalFile: express.VideoUploadFile) { const videoFile = new VideoFileModel({ extname: getLowercaseExtension(videoPhysicalFile.filename), size: videoPhysicalFile.size, diff --git a/server/controllers/download.ts b/server/controllers/download.ts index 65aa53420e7..ffe40d57e3c 100644 --- a/server/controllers/download.ts +++ b/server/controllers/download.ts @@ -3,7 +3,7 @@ import * as express from 'express' import { logger } from '@server/helpers/logger' import { VideosTorrentCache } from '@server/lib/files-cache/videos-torrent-cache' import { Hooks } from '@server/lib/plugins/hooks' -import { getVideoFilePath } from '@server/lib/video-paths' +import { VideoPathManager } from '@server/lib/video-path-manager' import { MStreamingPlaylist, MVideo, MVideoFile, MVideoFullLight } from '@server/types/models' import { HttpStatusCode, VideoStorage, VideoStreamingPlaylistType } from '@shared/models' import { STATIC_DOWNLOAD_PATHS } from '../initializers/constants' @@ -85,7 +85,11 @@ async function downloadVideoFile (req: express.Request, res: express.Response) { return res.redirect(videoFile.getObjectStorageUrl()) } - return res.download(getVideoFilePath(video, videoFile), `${video.name}-${videoFile.resolution}p${videoFile.extname}`) + await VideoPathManager.Instance.makeAvailableVideoFile(video, videoFile, path => { + const filename = `${video.name}-${videoFile.resolution}p${videoFile.extname}` + + return res.download(path, filename) + }) } async function downloadHLSVideoFile (req: express.Request, res: express.Response) { @@ -115,8 +119,11 @@ async function downloadHLSVideoFile (req: express.Request, res: express.Response return res.redirect(videoFile.getObjectStorageUrl()) } - const filename = `${video.name}-${videoFile.resolution}p-${streamingPlaylist.getStringType()}${videoFile.extname}` - return res.download(getVideoFilePath(streamingPlaylist, videoFile), filename) + await VideoPathManager.Instance.makeAvailableVideoFile(streamingPlaylist, videoFile, path => { + const filename = `${video.name}-${videoFile.resolution}p-${streamingPlaylist.getStringType()}${videoFile.extname}` + + return res.download(path, filename) + }) } function getVideoFile (req: express.Request, files: MVideoFile[]) { diff --git a/server/helpers/webtorrent.ts b/server/helpers/webtorrent.ts index ecf63e93e74..c8437630445 100644 --- a/server/helpers/webtorrent.ts +++ b/server/helpers/webtorrent.ts @@ -6,7 +6,8 @@ import { dirname, join } from 'path' import * as WebTorrent from 'webtorrent' import { isArray } from '@server/helpers/custom-validators/misc' import { WEBSERVER } from '@server/initializers/constants' -import { generateTorrentFileName, getVideoFilePath } from '@server/lib/video-paths' +import { generateTorrentFileName } from '@server/lib/paths' +import { VideoPathManager } from '@server/lib/video-path-manager' import { MVideo } from '@server/types/models/video/video' import { MVideoFile, MVideoFileRedundanciesOpt } from '@server/types/models/video/video-file' import { MStreamingPlaylistVideo } from '@server/types/models/video/video-streaming-playlist' @@ -78,7 +79,7 @@ async function downloadWebTorrentVideo (target: { magnetUri: string, torrentName }) } -async function createTorrentAndSetInfoHash ( +function createTorrentAndSetInfoHash ( videoOrPlaylist: MVideo | MStreamingPlaylistVideo, videoFile: MVideoFile ) { @@ -95,22 +96,24 @@ async function createTorrentAndSetInfoHash ( urlList: [ videoFile.getFileUrl(video) ] } - const torrent = await createTorrentPromise(getVideoFilePath(videoOrPlaylist, videoFile), options) + return VideoPathManager.Instance.makeAvailableVideoFile(videoOrPlaylist, videoFile, async videoPath => { + const torrent = await createTorrentPromise(videoPath, options) - const torrentFilename = generateTorrentFileName(videoOrPlaylist, videoFile.resolution) - const torrentPath = join(CONFIG.STORAGE.TORRENTS_DIR, torrentFilename) - logger.info('Creating torrent %s.', torrentPath) + const torrentFilename = generateTorrentFileName(videoOrPlaylist, videoFile.resolution) + const torrentPath = join(CONFIG.STORAGE.TORRENTS_DIR, torrentFilename) + logger.info('Creating torrent %s.', torrentPath) - await writeFile(torrentPath, torrent) + await writeFile(torrentPath, torrent) - // Remove old torrent file if it existed - if (videoFile.hasTorrent()) { - await remove(join(CONFIG.STORAGE.TORRENTS_DIR, videoFile.torrentFilename)) - } + // Remove old torrent file if it existed + if (videoFile.hasTorrent()) { + await remove(join(CONFIG.STORAGE.TORRENTS_DIR, videoFile.torrentFilename)) + } - const parsedTorrent = parseTorrent(torrent) - videoFile.infoHash = parsedTorrent.infoHash - videoFile.torrentFilename = torrentFilename + const parsedTorrent = parseTorrent(torrent) + videoFile.infoHash = parsedTorrent.infoHash + videoFile.torrentFilename = torrentFilename + }) } function generateMagnetUri ( diff --git a/server/initializers/migrations/0065-video-file-size.ts b/server/initializers/migrations/0065-video-file-size.ts index 1aeb27f2dce..ac952a98cc5 100644 --- a/server/initializers/migrations/0065-video-file-size.ts +++ b/server/initializers/migrations/0065-video-file-size.ts @@ -1,7 +1,4 @@ import * as Sequelize from 'sequelize' -import { stat } from 'fs-extra' -import { VideoModel } from '../../models/video/video' -import { getVideoFilePath } from '@server/lib/video-paths' function up (utils: { transaction: Sequelize.Transaction @@ -9,30 +6,7 @@ function up (utils: { sequelize: Sequelize.Sequelize db: any }): Promise { - return utils.db.Video.listOwnedAndPopulateAuthorAndTags() - .then((videos: VideoModel[]) => { - const tasks: Promise[] = [] - - videos.forEach(video => { - video.VideoFiles.forEach(videoFile => { - const p = new Promise((res, rej) => { - stat(getVideoFilePath(video, videoFile), (err, stats) => { - if (err) return rej(err) - - videoFile.size = stats.size - videoFile.save().then(res).catch(rej) - }) - }) - - tasks.push(p) - }) - }) - - return tasks - }) - .then((tasks: Promise[]) => { - return Promise.all(tasks) - }) + throw new Error('Removed, please upgrade from a previous version first.') } function down (options) { diff --git a/server/initializers/migrations/0660-object-storage.ts b/server/initializers/migrations/0660-object-storage.ts index 1cc265bfbb3..c815c71c65d 100644 --- a/server/initializers/migrations/0660-object-storage.ts +++ b/server/initializers/migrations/0660-object-storage.ts @@ -12,7 +12,7 @@ async function up (utils: { CREATE TABLE IF NOT EXISTS "videoJobInfo" ( "id" serial, "pendingMove" INTEGER NOT NULL, - "pendingTranscoding" INTEGER NOT NULL, + "pendingTranscode" INTEGER NOT NULL, "videoId" serial UNIQUE NOT NULL REFERENCES "video" ("id") ON DELETE CASCADE ON UPDATE CASCADE, "createdAt" timestamp WITH time zone NOT NULL, "updatedAt" timestamp WITH time zone NOT NULL, @@ -28,7 +28,7 @@ async function up (utils: { } { await utils.sequelize.query( - `UPDATE "videoFile" SET "storage" = ${VideoStorage.LOCAL}` + `UPDATE "videoFile" SET "storage" = ${VideoStorage.FILE_SYSTEM}` ) } { @@ -40,7 +40,7 @@ async function up (utils: { } { await utils.sequelize.query( - `UPDATE "videoStreamingPlaylist" SET "storage" = ${VideoStorage.LOCAL}` + `UPDATE "videoStreamingPlaylist" SET "storage" = ${VideoStorage.FILE_SYSTEM}` ) } { diff --git a/server/lib/activitypub/videos/shared/object-to-model-attributes.ts b/server/lib/activitypub/videos/shared/object-to-model-attributes.ts index 1fa16295d44..bd9ed45a9d7 100644 --- a/server/lib/activitypub/videos/shared/object-to-model-attributes.ts +++ b/server/lib/activitypub/videos/shared/object-to-model-attributes.ts @@ -6,7 +6,7 @@ import { isVideoFileInfoHashValid } from '@server/helpers/custom-validators/vide import { logger } from '@server/helpers/logger' import { getExtFromMimetype } from '@server/helpers/video' import { ACTIVITY_PUB, MIMETYPES, P2P_MEDIA_LOADER_PEER_VERSION, PREVIEWS_SIZE, THUMBNAILS_SIZE } from '@server/initializers/constants' -import { generateTorrentFileName } from '@server/lib/video-paths' +import { generateTorrentFileName } from '@server/lib/paths' import { VideoCaptionModel } from '@server/models/video/video-caption' import { VideoFileModel } from '@server/models/video/video-file' import { VideoStreamingPlaylistModel } from '@server/models/video/video-streaming-playlist' diff --git a/server/lib/hls.ts b/server/lib/hls.ts index 0e77ab9fa0d..0828a2d0fda 100644 --- a/server/lib/hls.ts +++ b/server/lib/hls.ts @@ -1,4 +1,4 @@ -import { close, ensureDir, move, open, outputJSON, pathExists, read, readFile, remove, stat, writeFile } from 'fs-extra' +import { close, ensureDir, move, open, outputJSON, read, readFile, remove, stat, writeFile } from 'fs-extra' import { flatten, uniq } from 'lodash' import { basename, dirname, join } from 'path' import { MStreamingPlaylistFilesVideo, MVideoWithFile } from '@server/types/models' @@ -8,11 +8,12 @@ import { logger } from '../helpers/logger' import { doRequest, doRequestAndSaveToFile } from '../helpers/requests' import { generateRandomString } from '../helpers/utils' import { CONFIG } from '../initializers/config' -import { HLS_STREAMING_PLAYLIST_DIRECTORY, P2P_MEDIA_LOADER_PEER_VERSION } from '../initializers/constants' +import { P2P_MEDIA_LOADER_PEER_VERSION } from '../initializers/constants' import { sequelizeTypescript } from '../initializers/database' import { VideoFileModel } from '../models/video/video-file' import { VideoStreamingPlaylistModel } from '../models/video/video-streaming-playlist' -import { getHlsResolutionPlaylistFilename, getVideoFilePath } from './video-paths' +import { getHlsResolutionPlaylistFilename } from './paths' +import { VideoPathManager } from './video-path-manager' async function updateStreamingPlaylistsInfohashesIfNeeded () { const playlistsToUpdate = await VideoStreamingPlaylistModel.listByIncorrectPeerVersion() @@ -31,75 +32,66 @@ async function updateStreamingPlaylistsInfohashesIfNeeded () { } async function updateMasterHLSPlaylist (video: MVideoWithFile, playlist: MStreamingPlaylistFilesVideo) { - const directory = join(HLS_STREAMING_PLAYLIST_DIRECTORY, video.uuid) - const masterPlaylists: string[] = [ '#EXTM3U', '#EXT-X-VERSION:3' ] - const masterPlaylistPath = join(directory, playlist.playlistFilename) - for (const file of playlist.VideoFiles) { const playlistFilename = getHlsResolutionPlaylistFilename(file.filename) - // If we did not generated a playlist for this resolution, skip - const filePlaylistPath = join(directory, playlistFilename) - if (await pathExists(filePlaylistPath) === false) continue - - const videoFilePath = getVideoFilePath(playlist, file) + await VideoPathManager.Instance.makeAvailableVideoFile(playlist, file, async videoFilePath => { + const size = await getVideoStreamSize(videoFilePath) - const size = await getVideoStreamSize(videoFilePath) + const bandwidth = 'BANDWIDTH=' + video.getBandwidthBits(file) + const resolution = `RESOLUTION=${size.width}x${size.height}` - const bandwidth = 'BANDWIDTH=' + video.getBandwidthBits(file) - const resolution = `RESOLUTION=${size.width}x${size.height}` + let line = `#EXT-X-STREAM-INF:${bandwidth},${resolution}` + if (file.fps) line += ',FRAME-RATE=' + file.fps - let line = `#EXT-X-STREAM-INF:${bandwidth},${resolution}` - if (file.fps) line += ',FRAME-RATE=' + file.fps + const codecs = await Promise.all([ + getVideoStreamCodec(videoFilePath), + getAudioStreamCodec(videoFilePath) + ]) - const codecs = await Promise.all([ - getVideoStreamCodec(videoFilePath), - getAudioStreamCodec(videoFilePath) - ]) + line += `,CODECS="${codecs.filter(c => !!c).join(',')}"` - line += `,CODECS="${codecs.filter(c => !!c).join(',')}"` - - masterPlaylists.push(line) - masterPlaylists.push(playlistFilename) + masterPlaylists.push(line) + masterPlaylists.push(playlistFilename) + }) } - await writeFile(masterPlaylistPath, masterPlaylists.join('\n') + '\n') + await VideoPathManager.Instance.makeAvailablePlaylistFile(playlist, playlist.playlistFilename, masterPlaylistPath => { + return writeFile(masterPlaylistPath, masterPlaylists.join('\n') + '\n') + }) } async function updateSha256VODSegments (video: MVideoWithFile, playlist: MStreamingPlaylistFilesVideo) { const json: { [filename: string]: { [range: string]: string } } = {} - const playlistDirectory = join(HLS_STREAMING_PLAYLIST_DIRECTORY, video.uuid) - // For all the resolutions available for this video for (const file of playlist.VideoFiles) { const rangeHashes: { [range: string]: string } = {} - const videoPath = getVideoFilePath(playlist, file) - const resolutionPlaylistPath = join(playlistDirectory, getHlsResolutionPlaylistFilename(file.filename)) - - // Maybe the playlist is not generated for this resolution yet - if (!await pathExists(resolutionPlaylistPath)) continue + await VideoPathManager.Instance.makeAvailableVideoFile(playlist, file, videoPath => { - const playlistContent = await readFile(resolutionPlaylistPath) - const ranges = getRangesFromPlaylist(playlistContent.toString()) + return VideoPathManager.Instance.makeAvailableResolutionPlaylistFile(playlist, file, async resolutionPlaylistPath => { + const playlistContent = await readFile(resolutionPlaylistPath) + const ranges = getRangesFromPlaylist(playlistContent.toString()) - const fd = await open(videoPath, 'r') - for (const range of ranges) { - const buf = Buffer.alloc(range.length) - await read(fd, buf, 0, range.length, range.offset) + const fd = await open(videoPath, 'r') + for (const range of ranges) { + const buf = Buffer.alloc(range.length) + await read(fd, buf, 0, range.length, range.offset) - rangeHashes[`${range.offset}-${range.offset + range.length - 1}`] = sha256(buf) - } - await close(fd) + rangeHashes[`${range.offset}-${range.offset + range.length - 1}`] = sha256(buf) + } + await close(fd) - const videoFilename = file.filename - json[videoFilename] = rangeHashes + const videoFilename = file.filename + json[videoFilename] = rangeHashes + }) + }) } - const outputPath = join(playlistDirectory, playlist.segmentsSha256Filename) + const outputPath = VideoPathManager.Instance.getFSHLSOutputPath(video, playlist.segmentsSha256Filename) await outputJSON(outputPath, json) } diff --git a/server/lib/job-queue/handlers/move-to-object-storage.ts b/server/lib/job-queue/handlers/move-to-object-storage.ts index 2b1dca3d18e..a0c58d21143 100644 --- a/server/lib/job-queue/handlers/move-to-object-storage.ts +++ b/server/lib/job-queue/handlers/move-to-object-storage.ts @@ -4,13 +4,12 @@ import { join } from 'path' import { logger } from '@server/helpers/logger' import { createTorrentAndSetInfoHash } from '@server/helpers/webtorrent' import { CONFIG } from '@server/initializers/config' -import { HLS_STREAMING_PLAYLIST_DIRECTORY } from '@server/initializers/constants' import { storeHLSFile, storeWebTorrentFile } from '@server/lib/object-storage' -import { getHLSDirectory, getHlsResolutionPlaylistFilename } from '@server/lib/video-paths' +import { getHLSDirectory, getHlsResolutionPlaylistFilename } from '@server/lib/paths' import { moveToNextState } from '@server/lib/video-state' import { VideoModel } from '@server/models/video/video' import { VideoJobInfoModel } from '@server/models/video/video-job-info' -import { MVideoFile, MVideoWithAllFiles } from '@server/types/models' +import { MStreamingPlaylistVideo, MVideo, MVideoFile, MVideoWithAllFiles } from '@server/types/models' import { MoveObjectStoragePayload, VideoStorage } from '../../../../shared' export async function processMoveToObjectStorage (job: Bull.Job) { @@ -28,14 +27,14 @@ export async function processMoveToObjectStorage (job: Bull.Job) { await moveWebTorrentFiles(video) } - if (CONFIG.TRANSCODING.HLS.ENABLED && video.VideoStreamingPlaylists) { + if (video.VideoStreamingPlaylists) { await moveHLSFiles(video) } const pendingMove = await VideoJobInfoModel.decrease(video.uuid, 'pendingMove') if (pendingMove === 0) { logger.info('Running cleanup after moving files to object storage (video %s in job %d)', video.uuid, job.id) - await doAfterLastJob(video) + await doAfterLastJob(video, payload.isNewVideo) } return payload.videoUUID @@ -45,12 +44,12 @@ export async function processMoveToObjectStorage (job: Bull.Job) { async function moveWebTorrentFiles (video: MVideoWithAllFiles) { for (const file of video.VideoFiles) { - if (file.storage !== VideoStorage.LOCAL) continue + if (file.storage !== VideoStorage.FILE_SYSTEM) continue const fileUrl = await storeWebTorrentFile(file.filename) const oldPath = join(CONFIG.STORAGE.VIDEOS_DIR, file.filename) - await onFileMoved({ video, file, fileUrl, oldPath }) + await onFileMoved({ videoOrPlaylist: video, file, fileUrl, oldPath }) } } @@ -58,7 +57,7 @@ async function moveHLSFiles (video: MVideoWithAllFiles) { for (const playlist of video.VideoStreamingPlaylists) { for (const file of playlist.VideoFiles) { - if (file.storage !== VideoStorage.LOCAL) continue + if (file.storage !== VideoStorage.FILE_SYSTEM) continue // Resolution playlist const playlistFilename = getHlsResolutionPlaylistFilename(file.filename) @@ -69,13 +68,15 @@ async function moveHLSFiles (video: MVideoWithAllFiles) { const oldPath = join(getHLSDirectory(video), file.filename) - await onFileMoved({ video, file, fileUrl, oldPath }) + await onFileMoved({ videoOrPlaylist: Object.assign(playlist, { Video: video }), file, fileUrl, oldPath }) } } } -async function doAfterLastJob (video: MVideoWithAllFiles) { +async function doAfterLastJob (video: MVideoWithAllFiles, isNewVideo: boolean) { for (const playlist of video.VideoStreamingPlaylists) { + if (playlist.storage === VideoStorage.OBJECT_STORAGE) continue + // Master playlist playlist.playlistUrl = await storeHLSFile(playlist, video, playlist.playlistFilename) // Sha256 segments file @@ -88,24 +89,24 @@ async function doAfterLastJob (video: MVideoWithAllFiles) { // Remove empty hls video directory if (video.VideoStreamingPlaylists) { - await remove(join(HLS_STREAMING_PLAYLIST_DIRECTORY, video.uuid)) + await remove(getHLSDirectory(video)) } - await moveToNextState(video) + await moveToNextState(video, isNewVideo) } async function onFileMoved (options: { - video: MVideoWithAllFiles + videoOrPlaylist: MVideo | MStreamingPlaylistVideo file: MVideoFile fileUrl: string oldPath: string }) { - const { video, file, fileUrl, oldPath } = options + const { videoOrPlaylist, file, fileUrl, oldPath } = options file.fileUrl = fileUrl file.storage = VideoStorage.OBJECT_STORAGE - await createTorrentAndSetInfoHash(video, file) + await createTorrentAndSetInfoHash(videoOrPlaylist, file) await file.save() logger.debug('Removing %s because it\'s now on object storage', oldPath) diff --git a/server/lib/job-queue/handlers/video-file-import.ts b/server/lib/job-queue/handlers/video-file-import.ts index 2f4abf73064..e8ee1f7596a 100644 --- a/server/lib/job-queue/handlers/video-file-import.ts +++ b/server/lib/job-queue/handlers/video-file-import.ts @@ -2,15 +2,19 @@ import * as Bull from 'bull' import { copy, stat } from 'fs-extra' import { getLowercaseExtension } from '@server/helpers/core-utils' import { createTorrentAndSetInfoHash } from '@server/helpers/webtorrent' -import { generateWebTorrentVideoFilename, getVideoFilePath } from '@server/lib/video-paths' +import { CONFIG } from '@server/initializers/config' +import { federateVideoIfNeeded } from '@server/lib/activitypub/videos' +import { generateWebTorrentVideoFilename } from '@server/lib/paths' +import { addMoveToObjectStorageJob } from '@server/lib/video' +import { VideoPathManager } from '@server/lib/video-path-manager' import { UserModel } from '@server/models/user/user' import { MVideoFullLight } from '@server/types/models' -import { VideoFileImportPayload } from '@shared/models' +import { VideoFileImportPayload, VideoStorage } from '@shared/models' import { getVideoFileFPS, getVideoFileResolution } from '../../../helpers/ffprobe-utils' import { logger } from '../../../helpers/logger' import { VideoModel } from '../../../models/video/video' import { VideoFileModel } from '../../../models/video/video-file' -import { onNewWebTorrentFileResolution } from './video-transcoding' +import { createHlsJobIfEnabled } from './video-transcoding' async function processVideoFileImport (job: Bull.Job) { const payload = job.data as VideoFileImportPayload @@ -29,15 +33,19 @@ async function processVideoFileImport (job: Bull.Job) { const user = await UserModel.loadByChannelActorId(video.VideoChannel.actorId) - const newResolutionPayload = { - type: 'new-resolution-to-webtorrent' as 'new-resolution-to-webtorrent', + await createHlsJobIfEnabled(user, { videoUUID: video.uuid, resolution: data.resolution, isPortraitMode: data.isPortraitMode, - copyCodecs: false, - isNewVideo: false + copyCodecs: true, + isMaxQuality: false + }) + + if (CONFIG.OBJECT_STORAGE.ENABLED) { + await addMoveToObjectStorageJob(video) + } else { + await federateVideoIfNeeded(video, false) } - await onNewWebTorrentFileResolution(video, user, newResolutionPayload) return video } @@ -72,12 +80,13 @@ async function updateVideoFile (video: MVideoFullLight, inputFilePath: string) { resolution, extname: fileExt, filename: generateWebTorrentVideoFilename(resolution, fileExt), + storage: VideoStorage.FILE_SYSTEM, size, fps, videoId: video.id }) - const outputPath = getVideoFilePath(video, newVideoFile) + const outputPath = VideoPathManager.Instance.getFSVideoFileOutputPath(video, newVideoFile) await copy(inputFilePath, outputPath) video.VideoFiles.push(newVideoFile) diff --git a/server/lib/job-queue/handlers/video-import.ts b/server/lib/job-queue/handlers/video-import.ts index fec553f2b1e..a5fa204f55a 100644 --- a/server/lib/job-queue/handlers/video-import.ts +++ b/server/lib/job-queue/handlers/video-import.ts @@ -4,11 +4,13 @@ import { getLowercaseExtension } from '@server/helpers/core-utils' import { retryTransactionWrapper } from '@server/helpers/database-utils' import { YoutubeDL } from '@server/helpers/youtube-dl' import { isPostImportVideoAccepted } from '@server/lib/moderation' +import { generateWebTorrentVideoFilename } from '@server/lib/paths' import { Hooks } from '@server/lib/plugins/hooks' import { ServerConfigManager } from '@server/lib/server-config-manager' import { isAbleToUploadVideo } from '@server/lib/user' -import { addOptimizeOrMergeAudioJob } from '@server/lib/video' -import { generateWebTorrentVideoFilename, getVideoFilePath } from '@server/lib/video-paths' +import { addMoveToObjectStorageJob, addOptimizeOrMergeAudioJob } from '@server/lib/video' +import { VideoPathManager } from '@server/lib/video-path-manager' +import { buildNextVideoState } from '@server/lib/video-state' import { ThumbnailModel } from '@server/models/video/thumbnail' import { MVideoImportDefault, MVideoImportDefaultFiles, MVideoImportVideo } from '@server/types/models/video/video-import' import { @@ -25,7 +27,6 @@ import { getDurationFromVideoFile, getVideoFileFPS, getVideoFileResolution } fro import { logger } from '../../../helpers/logger' import { getSecureTorrentName } from '../../../helpers/utils' import { createTorrentAndSetInfoHash, downloadWebTorrentVideo } from '../../../helpers/webtorrent' -import { CONFIG } from '../../../initializers/config' import { VIDEO_IMPORT_TIMEOUT } from '../../../initializers/constants' import { sequelizeTypescript } from '../../../initializers/database' import { VideoModel } from '../../../models/video/video' @@ -100,7 +101,6 @@ type ProcessFileOptions = { } async function processFile (downloader: () => Promise, videoImport: MVideoImportDefault, options: ProcessFileOptions) { let tempVideoPath: string - let videoDestFile: string let videoFile: VideoFileModel try { @@ -159,7 +159,7 @@ async function processFile (downloader: () => Promise, videoImport: MVid const videoImportWithFiles: MVideoImportDefaultFiles = Object.assign(videoImport, { Video: videoWithFiles }) // Move file - videoDestFile = getVideoFilePath(videoImportWithFiles.Video, videoFile) + const videoDestFile = VideoPathManager.Instance.getFSVideoFileOutputPath(videoImportWithFiles.Video, videoFile) await move(tempVideoPath, videoDestFile) tempVideoPath = null // This path is not used anymore @@ -204,7 +204,7 @@ async function processFile (downloader: () => Promise, videoImport: MVid // Update video DB object video.duration = duration - video.state = CONFIG.TRANSCODING.ENABLED ? VideoState.TO_TRANSCODE : VideoState.PUBLISHED + video.state = buildNextVideoState(video.state) await video.save({ transaction: t }) if (thumbnailModel) await video.addAndSaveThumbnail(thumbnailModel, t) @@ -245,6 +245,10 @@ async function processFile (downloader: () => Promise, videoImport: MVid Notifier.Instance.notifyOnNewVideoIfNeeded(video) } + if (video.state === VideoState.TO_MOVE_TO_EXTERNAL_STORAGE) { + return addMoveToObjectStorageJob(videoImportUpdated.Video) + } + // Create transcoding jobs? if (video.state === VideoState.TO_TRANSCODE) { await addOptimizeOrMergeAudioJob(videoImportUpdated.Video, videoFile, videoImport.User) diff --git a/server/lib/job-queue/handlers/video-live-ending.ts b/server/lib/job-queue/handlers/video-live-ending.ts index 38523c75245..9ccf724c2dc 100644 --- a/server/lib/job-queue/handlers/video-live-ending.ts +++ b/server/lib/job-queue/handlers/video-live-ending.ts @@ -4,9 +4,10 @@ import { join } from 'path' import { ffprobePromise, getAudioStream, getDurationFromVideoFile, getVideoFileResolution } from '@server/helpers/ffprobe-utils' import { VIDEO_LIVE } from '@server/initializers/constants' import { buildConcatenatedName, cleanupLive, LiveSegmentShaStore } from '@server/lib/live' +import { generateHLSMasterPlaylistFilename, generateHlsSha256SegmentsFilename, getLiveDirectory } from '@server/lib/paths' import { generateVideoMiniature } from '@server/lib/thumbnail' import { generateHlsPlaylistResolutionFromTS } from '@server/lib/transcoding/video-transcoding' -import { generateHLSMasterPlaylistFilename, generateHlsSha256SegmentsFilename, getHLSDirectory } from '@server/lib/video-paths' +import { VideoPathManager } from '@server/lib/video-path-manager' import { moveToNextState } from '@server/lib/video-state' import { VideoModel } from '@server/models/video/video' import { VideoFileModel } from '@server/models/video/video-file' @@ -55,16 +56,15 @@ export { // --------------------------------------------------------------------------- async function saveLive (video: MVideo, live: MVideoLive, streamingPlaylist: MStreamingPlaylist) { - const hlsDirectory = getHLSDirectory(video, false) - const replayDirectory = join(hlsDirectory, VIDEO_LIVE.REPLAY_DIRECTORY) + const replayDirectory = VideoPathManager.Instance.getFSHLSOutputPath(video, VIDEO_LIVE.REPLAY_DIRECTORY) - const rootFiles = await readdir(hlsDirectory) + const rootFiles = await readdir(getLiveDirectory(video)) const playlistFiles = rootFiles.filter(file => { return file.endsWith('.m3u8') && file !== streamingPlaylist.playlistFilename }) - await cleanupLiveFiles(hlsDirectory) + await cleanupTMPLiveFiles(getLiveDirectory(video)) await live.destroy() @@ -136,7 +136,7 @@ async function saveLive (video: MVideo, live: MVideoLive, streamingPlaylist: MSt await moveToNextState(videoWithFiles, false) } -async function cleanupLiveFiles (hlsDirectory: string) { +async function cleanupTMPLiveFiles (hlsDirectory: string) { if (!await pathExists(hlsDirectory)) return const files = await readdir(hlsDirectory) diff --git a/server/lib/job-queue/handlers/video-transcoding.ts b/server/lib/job-queue/handlers/video-transcoding.ts index 6070c1899fd..b3149dde896 100644 --- a/server/lib/job-queue/handlers/video-transcoding.ts +++ b/server/lib/job-queue/handlers/video-transcoding.ts @@ -1,7 +1,7 @@ import * as Bull from 'bull' import { TranscodeOptionsType } from '@server/helpers/ffmpeg-utils' import { addTranscodingJob, getTranscodingJobPriority } from '@server/lib/video' -import { getVideoFilePath } from '@server/lib/video-paths' +import { VideoPathManager } from '@server/lib/video-path-manager' import { moveToNextState } from '@server/lib/video-state' import { UserModel } from '@server/models/user/user' import { VideoJobInfoModel } from '@server/models/video/video-job-info' @@ -68,15 +68,16 @@ async function handleHLSJob (job: Bull.Job, payload: HLSTranscodingPayload, vide : video.getMaxQualityFile() const videoOrStreamingPlaylist = videoFileInput.getVideoOrStreamingPlaylist() - const videoInputPath = getVideoFilePath(videoOrStreamingPlaylist, videoFileInput) - await generateHlsPlaylistResolution({ - video, - videoInputPath, - resolution: payload.resolution, - copyCodecs: payload.copyCodecs, - isPortraitMode: payload.isPortraitMode || false, - job + await VideoPathManager.Instance.makeAvailableVideoFile(videoOrStreamingPlaylist, videoFileInput, videoInputPath => { + return generateHlsPlaylistResolution({ + video, + videoInputPath, + resolution: payload.resolution, + copyCodecs: payload.copyCodecs, + isPortraitMode: payload.isPortraitMode || false, + job + }) }) await retryTransactionWrapper(onHlsPlaylistGeneration, video, user, payload) @@ -120,11 +121,18 @@ async function onHlsPlaylistGeneration (video: MVideoFullLight, user: MUser, pay video.VideoFiles = [] // Create HLS new resolution jobs - await createLowerResolutionsJobs(video, user, payload.resolution, payload.isPortraitMode, 'hls') + await createLowerResolutionsJobs({ + video, + user, + videoFileResolution: payload.resolution, + isPortraitMode: payload.isPortraitMode, + isNewVideo: payload.isNewVideo ?? true, + type: 'hls' + }) } - await VideoJobInfoModel.decrease(video.uuid, 'pendingTranscoding') - await moveToNextState(video) + await VideoJobInfoModel.decrease(video.uuid, 'pendingTranscode') + await moveToNextState(video, payload.isNewVideo) } async function onVideoFileOptimizer ( @@ -154,12 +162,20 @@ async function onVideoFileOptimizer ( isMaxQuality: true } const hasHls = await createHlsJobIfEnabled(user, originalFileHLSPayload) - const hasNewResolutions = await createLowerResolutionsJobs(videoDatabase, user, resolution, isPortraitMode, 'webtorrent') - await VideoJobInfoModel.decrease(videoDatabase.uuid, 'pendingTranscoding') + const hasNewResolutions = await createLowerResolutionsJobs({ + video: videoDatabase, + user, + videoFileResolution: resolution, + isPortraitMode, + type: 'webtorrent', + isNewVideo: payload.isNewVideo ?? true + }) + + await VideoJobInfoModel.decrease(videoDatabase.uuid, 'pendingTranscode') // Move to next state if there are no other resolutions to generate if (!hasHls && !hasNewResolutions) { - await moveToNextState(videoDatabase) + await moveToNextState(videoDatabase, payload.isNewVideo) } } @@ -169,28 +185,20 @@ async function onNewWebTorrentFileResolution ( payload: NewResolutionTranscodingPayload | MergeAudioTranscodingPayload ) { await createHlsJobIfEnabled(user, { ...payload, copyCodecs: true, isMaxQuality: false }) - await VideoJobInfoModel.decrease(video.uuid, 'pendingTranscoding') + await VideoJobInfoModel.decrease(video.uuid, 'pendingTranscode') - await moveToNextState(video) + await moveToNextState(video, payload.isNewVideo) } -// --------------------------------------------------------------------------- - -export { - processVideoTranscoding, - onNewWebTorrentFileResolution -} - -// --------------------------------------------------------------------------- - async function createHlsJobIfEnabled (user: MUserId, payload: { videoUUID: string resolution: number isPortraitMode?: boolean copyCodecs: boolean isMaxQuality: boolean + isNewVideo?: boolean }) { - if (!payload || CONFIG.TRANSCODING.HLS.ENABLED !== true) return false + if (!payload || CONFIG.TRANSCODING.ENABLED !== true || CONFIG.TRANSCODING.HLS.ENABLED !== true) return false const jobOptions = { priority: await getTranscodingJobPriority(user) @@ -202,7 +210,8 @@ async function createHlsJobIfEnabled (user: MUserId, payload: { resolution: payload.resolution, isPortraitMode: payload.isPortraitMode, copyCodecs: payload.copyCodecs, - isMaxQuality: payload.isMaxQuality + isMaxQuality: payload.isMaxQuality, + isNewVideo: payload.isNewVideo } await addTranscodingJob(hlsTranscodingPayload, jobOptions) @@ -210,13 +219,26 @@ async function createHlsJobIfEnabled (user: MUserId, payload: { return true } -async function createLowerResolutionsJobs ( - video: MVideoFullLight, - user: MUserId, - videoFileResolution: number, - isPortraitMode: boolean, +// --------------------------------------------------------------------------- + +export { + processVideoTranscoding, + createHlsJobIfEnabled, + onNewWebTorrentFileResolution +} + +// --------------------------------------------------------------------------- + +async function createLowerResolutionsJobs (options: { + video: MVideoFullLight + user: MUserId + videoFileResolution: number + isPortraitMode: boolean + isNewVideo: boolean type: 'hls' | 'webtorrent' -) { +}) { + const { video, user, videoFileResolution, isPortraitMode, isNewVideo, type } = options + // Create transcoding jobs if there are enabled resolutions const resolutionsEnabled = computeResolutionsToTranscode(videoFileResolution, 'vod') const resolutionCreated: number[] = [] @@ -230,7 +252,8 @@ async function createLowerResolutionsJobs ( type: 'new-resolution-to-webtorrent', videoUUID: video.uuid, resolution, - isPortraitMode + isPortraitMode, + isNewVideo } } @@ -241,7 +264,8 @@ async function createLowerResolutionsJobs ( resolution, isPortraitMode, copyCodecs: false, - isMaxQuality: false + isMaxQuality: false, + isNewVideo } } diff --git a/server/lib/live/live-manager.ts b/server/lib/live/live-manager.ts index 2a429fb3378..d7dc841d929 100644 --- a/server/lib/live/live-manager.ts +++ b/server/lib/live/live-manager.ts @@ -20,7 +20,7 @@ import { VideoState, VideoStreamingPlaylistType } from '@shared/models' import { federateVideoIfNeeded } from '../activitypub/videos' import { JobQueue } from '../job-queue' import { PeerTubeSocket } from '../peertube-socket' -import { generateHLSMasterPlaylistFilename, generateHlsSha256SegmentsFilename } from '../video-paths' +import { generateHLSMasterPlaylistFilename, generateHlsSha256SegmentsFilename } from '../paths' import { LiveQuotaStore } from './live-quota-store' import { LiveSegmentShaStore } from './live-segment-sha-store' import { cleanupLive } from './live-utils' diff --git a/server/lib/live/live-utils.ts b/server/lib/live/live-utils.ts index e4526c7a574..3bf723b9816 100644 --- a/server/lib/live/live-utils.ts +++ b/server/lib/live/live-utils.ts @@ -1,7 +1,7 @@ import { remove } from 'fs-extra' import { basename } from 'path' import { MStreamingPlaylist, MVideo } from '@server/types/models' -import { getHLSDirectory } from '../video-paths' +import { getLiveDirectory } from '../paths' function buildConcatenatedName (segmentOrPlaylistPath: string) { const num = basename(segmentOrPlaylistPath).match(/^(\d+)(-|\.)/) @@ -10,7 +10,7 @@ function buildConcatenatedName (segmentOrPlaylistPath: string) { } async function cleanupLive (video: MVideo, streamingPlaylist: MStreamingPlaylist) { - const hlsDirectory = getHLSDirectory(video) + const hlsDirectory = getLiveDirectory(video) await remove(hlsDirectory) diff --git a/server/lib/live/shared/muxing-session.ts b/server/lib/live/shared/muxing-session.ts index a80abc843e3..9b5b6c4fc99 100644 --- a/server/lib/live/shared/muxing-session.ts +++ b/server/lib/live/shared/muxing-session.ts @@ -11,9 +11,9 @@ import { CONFIG } from '@server/initializers/config' import { MEMOIZE_TTL, VIDEO_LIVE } from '@server/initializers/constants' import { VideoFileModel } from '@server/models/video/video-file' import { MStreamingPlaylistVideo, MUserId, MVideoLiveVideo } from '@server/types/models' +import { getLiveDirectory } from '../../paths' import { VideoTranscodingProfilesManager } from '../../transcoding/video-transcoding-profiles' import { isAbleToUploadVideo } from '../../user' -import { getHLSDirectory } from '../../video-paths' import { LiveQuotaStore } from '../live-quota-store' import { LiveSegmentShaStore } from '../live-segment-sha-store' import { buildConcatenatedName } from '../live-utils' @@ -282,7 +282,7 @@ class MuxingSession extends EventEmitter { } private async prepareDirectories () { - const outPath = getHLSDirectory(this.videoLive.Video) + const outPath = getLiveDirectory(this.videoLive.Video) await ensureDir(outPath) const replayDirectory = join(outPath, VIDEO_LIVE.REPLAY_DIRECTORY) diff --git a/server/lib/object-storage/keys.ts b/server/lib/object-storage/keys.ts index 998139964d7..51947477513 100644 --- a/server/lib/object-storage/keys.ts +++ b/server/lib/object-storage/keys.ts @@ -1,12 +1,12 @@ import { join } from 'path' import { MStreamingPlaylist, MVideoUUID } from '@server/types/models' -function generateHLSObjectStorageKey (playlist: MStreamingPlaylist, video: MVideoUUID, filename?: string) { - const base = playlist.getStringType() + '_' + video.uuid - - if (!filename) return base +function generateHLSObjectStorageKey (playlist: MStreamingPlaylist, video: MVideoUUID, filename: string) { + return join(generateHLSObjectBaseStorageKey(playlist, video), filename) +} - return join(base, filename) +function generateHLSObjectBaseStorageKey (playlist: MStreamingPlaylist, video: MVideoUUID) { + return playlist.getStringType() + '_' + video.uuid } function generateWebTorrentObjectStorageKey (filename: string) { @@ -15,5 +15,6 @@ function generateWebTorrentObjectStorageKey (filename: string) { export { generateHLSObjectStorageKey, + generateHLSObjectBaseStorageKey, generateWebTorrentObjectStorageKey } diff --git a/server/lib/object-storage/shared/client.ts b/server/lib/object-storage/shared/client.ts index 7a306410a5c..c9a61459336 100644 --- a/server/lib/object-storage/shared/client.ts +++ b/server/lib/object-storage/shared/client.ts @@ -3,11 +3,14 @@ import { logger } from '@server/helpers/logger' import { CONFIG } from '@server/initializers/config' import { lTags } from './logger' -const endpointConfig = CONFIG.OBJECT_STORAGE.ENDPOINT -const endpoint = endpointConfig.startsWith('http://') || endpointConfig.startsWith('https://') - ? CONFIG.OBJECT_STORAGE.ENDPOINT - : 'https://' + CONFIG.OBJECT_STORAGE.ENDPOINT -const endpointParsed = new URL(endpoint) +let endpointParsed: URL +function getEndpointParsed () { + if (endpointParsed) return endpointParsed + + endpointParsed = new URL(getEndpoint()) + + return endpointParsed +} let s3Client: S3Client function getClient () { @@ -16,21 +19,38 @@ function getClient () { const OBJECT_STORAGE = CONFIG.OBJECT_STORAGE s3Client = new S3Client({ - endpoint, + endpoint: getEndpoint(), region: OBJECT_STORAGE.REGION, - credentials: { - accessKeyId: OBJECT_STORAGE.CREDENTIALS.ACCESS_KEY_ID, - secretAccessKey: OBJECT_STORAGE.CREDENTIALS.SECRET_ACCESS_KEY - } + credentials: OBJECT_STORAGE.CREDENTIALS.ACCESS_KEY_ID + ? { + accessKeyId: OBJECT_STORAGE.CREDENTIALS.ACCESS_KEY_ID, + secretAccessKey: OBJECT_STORAGE.CREDENTIALS.SECRET_ACCESS_KEY + } + : undefined }) - logger.info('Initialized S3 client %s with region %s.', endpoint, OBJECT_STORAGE.REGION, lTags()) + logger.info('Initialized S3 client %s with region %s.', getEndpoint(), OBJECT_STORAGE.REGION, lTags()) return s3Client } +// --------------------------------------------------------------------------- + export { - endpoint, - endpointParsed, + getEndpointParsed, getClient } + +// --------------------------------------------------------------------------- + +let endpoint: string +function getEndpoint () { + if (endpoint) return endpoint + + const endpointConfig = CONFIG.OBJECT_STORAGE.ENDPOINT + endpoint = endpointConfig.startsWith('http://') || endpointConfig.startsWith('https://') + ? CONFIG.OBJECT_STORAGE.ENDPOINT + : 'https://' + CONFIG.OBJECT_STORAGE.ENDPOINT + + return endpoint +} diff --git a/server/lib/object-storage/shared/object-storage-helpers.ts b/server/lib/object-storage/shared/object-storage-helpers.ts index 513c4afcb00..e2321690740 100644 --- a/server/lib/object-storage/shared/object-storage-helpers.ts +++ b/server/lib/object-storage/shared/object-storage-helpers.ts @@ -98,17 +98,24 @@ async function removePrefix (prefix: string, bucketInfo: BucketInfo) { if (listedObjects.IsTruncated) await removePrefix(prefix, bucketInfo) } -async function makeAvailable (options: { filename: string, at: string }, bucketInfo: BucketInfo) { - await ensureDir(dirname(options.at)) +async function makeAvailable (options: { + key: string + destination: string + bucketInfo: BucketInfo +}) { + const { key, destination, bucketInfo } = options + + await ensureDir(dirname(options.destination)) const command = new GetObjectCommand({ Bucket: bucketInfo.BUCKET_NAME, - Key: buildKey(options.filename, bucketInfo) + Key: buildKey(key, bucketInfo) }) const response = await getClient().send(command) - const file = createWriteStream(options.at) + const file = createWriteStream(destination) await pipelinePromise(response.Body as Readable, file) + file.close() } @@ -176,6 +183,7 @@ async function multiPartUpload (options: { partNumber, bucketInfo.PREFIX, objectStorageKey, bucketInfo.BUCKET_NAME, lTags() ) + // FIXME: Remove when https://github.com/aws/aws-sdk-js-v3/pull/2637 is released // The s3 sdk needs to know the length of the http body beforehand, but doesn't support // streams with start and end set, so it just tries to stat the file in stream.path. // This fails for us because we only want to send part of the file. The stream type diff --git a/server/lib/object-storage/urls.ts b/server/lib/object-storage/urls.ts index a9d8516ec0f..2a889190b9f 100644 --- a/server/lib/object-storage/urls.ts +++ b/server/lib/object-storage/urls.ts @@ -1,5 +1,5 @@ import { CONFIG } from '@server/initializers/config' -import { BucketInfo, buildKey, endpointParsed } from './shared' +import { BucketInfo, buildKey, getEndpointParsed } from './shared' function getPrivateUrl (config: BucketInfo, keyWithoutPrefix: string) { return getBaseUrl(config) + buildKey(keyWithoutPrefix, config) @@ -31,7 +31,7 @@ export { function getBaseUrl (bucketInfo: BucketInfo, baseUrl?: string) { if (baseUrl) return baseUrl - return `${endpointParsed.protocol}//${bucketInfo.BUCKET_NAME}.${endpointParsed.host}/` + return `${getEndpointParsed().protocol}//${bucketInfo.BUCKET_NAME}.${getEndpointParsed().host}/` } const regex = new RegExp('https?://[^/]+') diff --git a/server/lib/object-storage/videos.ts b/server/lib/object-storage/videos.ts index 6e5535db0c3..15b8f58d5e0 100644 --- a/server/lib/object-storage/videos.ts +++ b/server/lib/object-storage/videos.ts @@ -1,9 +1,10 @@ import { join } from 'path' +import { logger } from '@server/helpers/logger' import { CONFIG } from '@server/initializers/config' import { MStreamingPlaylist, MVideoFile, MVideoUUID } from '@server/types/models' -import { getHLSDirectory } from '../video-paths' -import { generateHLSObjectStorageKey, generateWebTorrentObjectStorageKey } from './keys' -import { removeObject, removePrefix, storeObject } from './shared' +import { getHLSDirectory } from '../paths' +import { generateHLSObjectBaseStorageKey, generateHLSObjectStorageKey, generateWebTorrentObjectStorageKey } from './keys' +import { lTags, makeAvailable, removeObject, removePrefix, storeObject } from './shared' function storeHLSFile (playlist: MStreamingPlaylist, video: MVideoUUID, filename: string) { const baseHlsDirectory = getHLSDirectory(video) @@ -24,16 +25,48 @@ function storeWebTorrentFile (filename: string) { } function removeHLSObjectStorage (playlist: MStreamingPlaylist, video: MVideoUUID) { - return removePrefix(generateHLSObjectStorageKey(playlist, video), CONFIG.OBJECT_STORAGE.STREAMING_PLAYLISTS) + return removePrefix(generateHLSObjectBaseStorageKey(playlist, video), CONFIG.OBJECT_STORAGE.STREAMING_PLAYLISTS) } function removeWebTorrentObjectStorage (videoFile: MVideoFile) { return removeObject(generateWebTorrentObjectStorageKey(videoFile.filename), CONFIG.OBJECT_STORAGE.VIDEOS) } +async function makeHLSFileAvailable (playlist: MStreamingPlaylist, video: MVideoUUID, filename: string, destination: string) { + const key = generateHLSObjectStorageKey(playlist, video, filename) + + logger.info('Fetching HLS file %s from object storage to %s.', key, destination, lTags()) + + await makeAvailable({ + key, + destination, + bucketInfo: CONFIG.OBJECT_STORAGE.STREAMING_PLAYLISTS + }) + + return destination +} + +async function makeWebTorrentFileAvailable (filename: string, destination: string) { + const key = generateWebTorrentObjectStorageKey(filename) + + logger.info('Fetching WebTorrent file %s from object storage to %s.', key, destination, lTags()) + + await makeAvailable({ + key, + destination, + bucketInfo: CONFIG.OBJECT_STORAGE.VIDEOS + }) + + return destination +} + export { storeWebTorrentFile, storeHLSFile, + removeHLSObjectStorage, - removeWebTorrentObjectStorage + removeWebTorrentObjectStorage, + + makeWebTorrentFileAvailable, + makeHLSFileAvailable } diff --git a/server/lib/paths.ts b/server/lib/paths.ts new file mode 100644 index 00000000000..434e637c6f9 --- /dev/null +++ b/server/lib/paths.ts @@ -0,0 +1,82 @@ +import { join } from 'path' +import { buildUUID } from '@server/helpers/uuid' +import { CONFIG } from '@server/initializers/config' +import { HLS_REDUNDANCY_DIRECTORY, HLS_STREAMING_PLAYLIST_DIRECTORY } from '@server/initializers/constants' +import { isStreamingPlaylist, MStreamingPlaylistVideo, MVideo, MVideoFile, MVideoUUID } from '@server/types/models' +import { removeFragmentedMP4Ext } from '@shared/core-utils' + +// ################## Video file name ################## + +function generateWebTorrentVideoFilename (resolution: number, extname: string) { + return buildUUID() + '-' + resolution + extname +} + +function generateHLSVideoFilename (resolution: number) { + return `${buildUUID()}-${resolution}-fragmented.mp4` +} + +// ################## Streaming playlist ################## + +function getLiveDirectory (video: MVideoUUID) { + return getHLSDirectory(video) +} + +function getHLSDirectory (video: MVideoUUID) { + return join(HLS_STREAMING_PLAYLIST_DIRECTORY, video.uuid) +} + +function getHLSRedundancyDirectory (video: MVideoUUID) { + return join(HLS_REDUNDANCY_DIRECTORY, video.uuid) +} + +function getHlsResolutionPlaylistFilename (videoFilename: string) { + // Video file name already contain resolution + return removeFragmentedMP4Ext(videoFilename) + '.m3u8' +} + +function generateHLSMasterPlaylistFilename (isLive = false) { + if (isLive) return 'master.m3u8' + + return buildUUID() + '-master.m3u8' +} + +function generateHlsSha256SegmentsFilename (isLive = false) { + if (isLive) return 'segments-sha256.json' + + return buildUUID() + '-segments-sha256.json' +} + +// ################## Torrents ################## + +function generateTorrentFileName (videoOrPlaylist: MVideo | MStreamingPlaylistVideo, resolution: number) { + const extension = '.torrent' + const uuid = buildUUID() + + if (isStreamingPlaylist(videoOrPlaylist)) { + return `${uuid}-${resolution}-${videoOrPlaylist.getStringType()}${extension}` + } + + return uuid + '-' + resolution + extension +} + +function getFSTorrentFilePath (videoFile: MVideoFile) { + return join(CONFIG.STORAGE.TORRENTS_DIR, videoFile.torrentFilename) +} + +// --------------------------------------------------------------------------- + +export { + generateHLSVideoFilename, + generateWebTorrentVideoFilename, + + generateTorrentFileName, + getFSTorrentFilePath, + + getHLSDirectory, + getLiveDirectory, + getHLSRedundancyDirectory, + + generateHLSMasterPlaylistFilename, + generateHlsSha256SegmentsFilename, + getHlsResolutionPlaylistFilename +} diff --git a/server/lib/schedulers/videos-redundancy-scheduler.ts b/server/lib/schedulers/videos-redundancy-scheduler.ts index 137ae53a039..ebfd015b5eb 100644 --- a/server/lib/schedulers/videos-redundancy-scheduler.ts +++ b/server/lib/schedulers/videos-redundancy-scheduler.ts @@ -24,7 +24,7 @@ import { getLocalVideoCacheFileActivityPubUrl, getLocalVideoCacheStreamingPlayli import { getOrCreateAPVideo } from '../activitypub/videos' import { downloadPlaylistSegments } from '../hls' import { removeVideoRedundancy } from '../redundancy' -import { generateHLSRedundancyUrl, generateWebTorrentRedundancyUrl } from '../video-paths' +import { generateHLSRedundancyUrl, generateWebTorrentRedundancyUrl } from '../video-urls' import { AbstractScheduler } from './abstract-scheduler' type CandidateToDuplicate = { diff --git a/server/lib/thumbnail.ts b/server/lib/thumbnail.ts index c085239880d..d2384f53cf1 100644 --- a/server/lib/thumbnail.ts +++ b/server/lib/thumbnail.ts @@ -1,5 +1,4 @@ import { join } from 'path' - import { ThumbnailType } from '../../shared/models/videos/thumbnail.type' import { generateImageFromVideoFile } from '../helpers/ffmpeg-utils' import { generateImageFilename, processImage } from '../helpers/image-utils' @@ -10,7 +9,7 @@ import { ThumbnailModel } from '../models/video/thumbnail' import { MVideoFile, MVideoThumbnail, MVideoUUID } from '../types/models' import { MThumbnail } from '../types/models/video/thumbnail' import { MVideoPlaylistThumbnail } from '../types/models/video/video-playlist' -import { getVideoFilePath } from './video-paths' +import { VideoPathManager } from './video-path-manager' type ImageSize = { height?: number, width?: number } @@ -116,21 +115,22 @@ function generateVideoMiniature (options: { }) { const { video, videoFile, type } = options - const input = getVideoFilePath(video, videoFile) + return VideoPathManager.Instance.makeAvailableVideoFile(video, videoFile, input => { + const { filename, basePath, height, width, existingThumbnail, outputPath } = buildMetadataFromVideo(video, type) - const { filename, basePath, height, width, existingThumbnail, outputPath } = buildMetadataFromVideo(video, type) - const thumbnailCreator = videoFile.isAudio() - ? () => processImage(ASSETS_PATH.DEFAULT_AUDIO_BACKGROUND, outputPath, { width, height }, true) - : () => generateImageFromVideoFile(input, basePath, filename, { height, width }) + const thumbnailCreator = videoFile.isAudio() + ? () => processImage(ASSETS_PATH.DEFAULT_AUDIO_BACKGROUND, outputPath, { width, height }, true) + : () => generateImageFromVideoFile(input, basePath, filename, { height, width }) - return updateThumbnailFromFunction({ - thumbnailCreator, - filename, - height, - width, - type, - automaticallyGenerated: true, - existingThumbnail + return updateThumbnailFromFunction({ + thumbnailCreator, + filename, + height, + width, + type, + automaticallyGenerated: true, + existingThumbnail + }) }) } diff --git a/server/lib/transcoding/video-transcoding.ts b/server/lib/transcoding/video-transcoding.ts index 7330bc3d6bd..ee228c01114 100644 --- a/server/lib/transcoding/video-transcoding.ts +++ b/server/lib/transcoding/video-transcoding.ts @@ -4,13 +4,13 @@ import { basename, extname as extnameUtil, join } from 'path' import { toEven } from '@server/helpers/core-utils' import { createTorrentAndSetInfoHash } from '@server/helpers/webtorrent' import { MStreamingPlaylistFilesVideo, MVideoFile, MVideoFullLight } from '@server/types/models' -import { VideoResolution } from '../../../shared/models/videos' +import { VideoResolution, VideoStorage } from '../../../shared/models/videos' import { VideoStreamingPlaylistType } from '../../../shared/models/videos/video-streaming-playlist.type' import { transcode, TranscodeOptions, TranscodeOptionsType } from '../../helpers/ffmpeg-utils' import { canDoQuickTranscode, getDurationFromVideoFile, getMetadataFromFile, getVideoFileFPS } from '../../helpers/ffprobe-utils' import { logger } from '../../helpers/logger' import { CONFIG } from '../../initializers/config' -import { HLS_STREAMING_PLAYLIST_DIRECTORY, P2P_MEDIA_LOADER_PEER_VERSION } from '../../initializers/constants' +import { P2P_MEDIA_LOADER_PEER_VERSION } from '../../initializers/constants' import { VideoFileModel } from '../../models/video/video-file' import { VideoStreamingPlaylistModel } from '../../models/video/video-streaming-playlist' import { updateMasterHLSPlaylist, updateSha256VODSegments } from '../hls' @@ -19,9 +19,9 @@ import { generateHlsSha256SegmentsFilename, generateHLSVideoFilename, generateWebTorrentVideoFilename, - getHlsResolutionPlaylistFilename, - getVideoFilePath -} from '../video-paths' + getHlsResolutionPlaylistFilename +} from '../paths' +import { VideoPathManager } from '../video-path-manager' import { VideoTranscodingProfilesManager } from './video-transcoding-profiles' /** @@ -32,159 +32,162 @@ import { VideoTranscodingProfilesManager } from './video-transcoding-profiles' */ // Optimize the original video file and replace it. The resolution is not changed. -async function optimizeOriginalVideofile (video: MVideoFullLight, inputVideoFile: MVideoFile, job?: Job) { +function optimizeOriginalVideofile (video: MVideoFullLight, inputVideoFile: MVideoFile, job?: Job) { const transcodeDirectory = CONFIG.STORAGE.TMP_DIR const newExtname = '.mp4' - const videoInputPath = getVideoFilePath(video, inputVideoFile) - const videoTranscodedPath = join(transcodeDirectory, video.id + '-transcoded' + newExtname) + return VideoPathManager.Instance.makeAvailableVideoFile(video, inputVideoFile, async videoInputPath => { + const videoTranscodedPath = join(transcodeDirectory, video.id + '-transcoded' + newExtname) - const transcodeType: TranscodeOptionsType = await canDoQuickTranscode(videoInputPath) - ? 'quick-transcode' - : 'video' + const transcodeType: TranscodeOptionsType = await canDoQuickTranscode(videoInputPath) + ? 'quick-transcode' + : 'video' - const resolution = toEven(inputVideoFile.resolution) + const resolution = toEven(inputVideoFile.resolution) - const transcodeOptions: TranscodeOptions = { - type: transcodeType, + const transcodeOptions: TranscodeOptions = { + type: transcodeType, - inputPath: videoInputPath, - outputPath: videoTranscodedPath, + inputPath: videoInputPath, + outputPath: videoTranscodedPath, - availableEncoders: VideoTranscodingProfilesManager.Instance.getAvailableEncoders(), - profile: CONFIG.TRANSCODING.PROFILE, + availableEncoders: VideoTranscodingProfilesManager.Instance.getAvailableEncoders(), + profile: CONFIG.TRANSCODING.PROFILE, - resolution, + resolution, - job - } + job + } - // Could be very long! - await transcode(transcodeOptions) + // Could be very long! + await transcode(transcodeOptions) - try { - await remove(videoInputPath) + try { + await remove(videoInputPath) - // Important to do this before getVideoFilename() to take in account the new filename - inputVideoFile.extname = newExtname - inputVideoFile.filename = generateWebTorrentVideoFilename(resolution, newExtname) + // Important to do this before getVideoFilename() to take in account the new filename + inputVideoFile.extname = newExtname + inputVideoFile.filename = generateWebTorrentVideoFilename(resolution, newExtname) + inputVideoFile.storage = VideoStorage.FILE_SYSTEM - const videoOutputPath = getVideoFilePath(video, inputVideoFile) + const videoOutputPath = VideoPathManager.Instance.getFSVideoFileOutputPath(video, inputVideoFile) - const { videoFile } = await onWebTorrentVideoFileTranscoding(video, inputVideoFile, videoTranscodedPath, videoOutputPath) + const { videoFile } = await onWebTorrentVideoFileTranscoding(video, inputVideoFile, videoTranscodedPath, videoOutputPath) - return { transcodeType, videoFile } - } catch (err) { - // Auto destruction... - video.destroy().catch(err => logger.error('Cannot destruct video after transcoding failure.', { err })) + return { transcodeType, videoFile } + } catch (err) { + // Auto destruction... + video.destroy().catch(err => logger.error('Cannot destruct video after transcoding failure.', { err })) - throw err - } + throw err + } + }) } -// Transcode the original video file to a lower resolution. -async function transcodeNewWebTorrentResolution (video: MVideoFullLight, resolution: VideoResolution, isPortrait: boolean, job: Job) { +// Transcode the original video file to a lower resolution +// We are sure it's x264 in mp4 because optimizeOriginalVideofile was already executed +function transcodeNewWebTorrentResolution (video: MVideoFullLight, resolution: VideoResolution, isPortrait: boolean, job: Job) { const transcodeDirectory = CONFIG.STORAGE.TMP_DIR const extname = '.mp4' - // We are sure it's x264 in mp4 because optimizeOriginalVideofile was already executed - const videoInputPath = getVideoFilePath(video, video.getMaxQualityFile()) + return VideoPathManager.Instance.makeAvailableVideoFile(video, video.getMaxQualityFile(), async videoInputPath => { + const newVideoFile = new VideoFileModel({ + resolution, + extname, + filename: generateWebTorrentVideoFilename(resolution, extname), + size: 0, + videoId: video.id + }) - const newVideoFile = new VideoFileModel({ - resolution, - extname, - filename: generateWebTorrentVideoFilename(resolution, extname), - size: 0, - videoId: video.id - }) + const videoOutputPath = VideoPathManager.Instance.getFSVideoFileOutputPath(video, newVideoFile) + const videoTranscodedPath = join(transcodeDirectory, newVideoFile.filename) - const videoOutputPath = getVideoFilePath(video, newVideoFile) - const videoTranscodedPath = join(transcodeDirectory, newVideoFile.filename) + const transcodeOptions = resolution === VideoResolution.H_NOVIDEO + ? { + type: 'only-audio' as 'only-audio', - const transcodeOptions = resolution === VideoResolution.H_NOVIDEO - ? { - type: 'only-audio' as 'only-audio', + inputPath: videoInputPath, + outputPath: videoTranscodedPath, - inputPath: videoInputPath, - outputPath: videoTranscodedPath, + availableEncoders: VideoTranscodingProfilesManager.Instance.getAvailableEncoders(), + profile: CONFIG.TRANSCODING.PROFILE, - availableEncoders: VideoTranscodingProfilesManager.Instance.getAvailableEncoders(), - profile: CONFIG.TRANSCODING.PROFILE, + resolution, - resolution, + job + } + : { + type: 'video' as 'video', + inputPath: videoInputPath, + outputPath: videoTranscodedPath, - job - } - : { - type: 'video' as 'video', - inputPath: videoInputPath, - outputPath: videoTranscodedPath, + availableEncoders: VideoTranscodingProfilesManager.Instance.getAvailableEncoders(), + profile: CONFIG.TRANSCODING.PROFILE, - availableEncoders: VideoTranscodingProfilesManager.Instance.getAvailableEncoders(), - profile: CONFIG.TRANSCODING.PROFILE, + resolution, + isPortraitMode: isPortrait, - resolution, - isPortraitMode: isPortrait, + job + } - job - } - - await transcode(transcodeOptions) + await transcode(transcodeOptions) - return onWebTorrentVideoFileTranscoding(video, newVideoFile, videoTranscodedPath, videoOutputPath) + return onWebTorrentVideoFileTranscoding(video, newVideoFile, videoTranscodedPath, videoOutputPath) + }) } // Merge an image with an audio file to create a video -async function mergeAudioVideofile (video: MVideoFullLight, resolution: VideoResolution, job: Job) { +function mergeAudioVideofile (video: MVideoFullLight, resolution: VideoResolution, job: Job) { const transcodeDirectory = CONFIG.STORAGE.TMP_DIR const newExtname = '.mp4' const inputVideoFile = video.getMinQualityFile() - const audioInputPath = getVideoFilePath(video, inputVideoFile) - const videoTranscodedPath = join(transcodeDirectory, video.id + '-transcoded' + newExtname) + return VideoPathManager.Instance.makeAvailableVideoFile(video, inputVideoFile, async audioInputPath => { + const videoTranscodedPath = join(transcodeDirectory, video.id + '-transcoded' + newExtname) - // If the user updates the video preview during transcoding - const previewPath = video.getPreview().getPath() - const tmpPreviewPath = join(CONFIG.STORAGE.TMP_DIR, basename(previewPath)) - await copyFile(previewPath, tmpPreviewPath) + // If the user updates the video preview during transcoding + const previewPath = video.getPreview().getPath() + const tmpPreviewPath = join(CONFIG.STORAGE.TMP_DIR, basename(previewPath)) + await copyFile(previewPath, tmpPreviewPath) - const transcodeOptions = { - type: 'merge-audio' as 'merge-audio', + const transcodeOptions = { + type: 'merge-audio' as 'merge-audio', - inputPath: tmpPreviewPath, - outputPath: videoTranscodedPath, + inputPath: tmpPreviewPath, + outputPath: videoTranscodedPath, - availableEncoders: VideoTranscodingProfilesManager.Instance.getAvailableEncoders(), - profile: CONFIG.TRANSCODING.PROFILE, + availableEncoders: VideoTranscodingProfilesManager.Instance.getAvailableEncoders(), + profile: CONFIG.TRANSCODING.PROFILE, - audioPath: audioInputPath, - resolution, + audioPath: audioInputPath, + resolution, - job - } + job + } - try { - await transcode(transcodeOptions) + try { + await transcode(transcodeOptions) - await remove(audioInputPath) - await remove(tmpPreviewPath) - } catch (err) { - await remove(tmpPreviewPath) - throw err - } + await remove(audioInputPath) + await remove(tmpPreviewPath) + } catch (err) { + await remove(tmpPreviewPath) + throw err + } - // Important to do this before getVideoFilename() to take in account the new file extension - inputVideoFile.extname = newExtname - inputVideoFile.filename = generateWebTorrentVideoFilename(inputVideoFile.resolution, newExtname) + // Important to do this before getVideoFilename() to take in account the new file extension + inputVideoFile.extname = newExtname + inputVideoFile.filename = generateWebTorrentVideoFilename(inputVideoFile.resolution, newExtname) - const videoOutputPath = getVideoFilePath(video, inputVideoFile) - // ffmpeg generated a new video file, so update the video duration - // See https://trac.ffmpeg.org/ticket/5456 - video.duration = await getDurationFromVideoFile(videoTranscodedPath) - await video.save() + const videoOutputPath = VideoPathManager.Instance.getFSVideoFileOutputPath(video, inputVideoFile) + // ffmpeg generated a new video file, so update the video duration + // See https://trac.ffmpeg.org/ticket/5456 + video.duration = await getDurationFromVideoFile(videoTranscodedPath) + await video.save() - return onWebTorrentVideoFileTranscoding(video, inputVideoFile, videoTranscodedPath, videoOutputPath) + return onWebTorrentVideoFileTranscoding(video, inputVideoFile, videoTranscodedPath, videoOutputPath) + }) } // Concat TS segments from a live video to a fragmented mp4 HLS playlist @@ -335,14 +338,13 @@ async function generateHlsPlaylistCommon (options: { videoStreamingPlaylistId: playlist.id }) - const videoFilePath = getVideoFilePath(playlist, newVideoFile) + const videoFilePath = VideoPathManager.Instance.getFSVideoFileOutputPath(playlist, newVideoFile) // Move files from tmp transcoded directory to the appropriate place - const baseHlsDirectory = join(HLS_STREAMING_PLAYLIST_DIRECTORY, video.uuid) - await ensureDir(baseHlsDirectory) + await ensureDir(VideoPathManager.Instance.getFSHLSOutputPath(video)) // Move playlist file - const resolutionPlaylistPath = join(baseHlsDirectory, resolutionPlaylistFilename) + const resolutionPlaylistPath = VideoPathManager.Instance.getFSHLSOutputPath(video, resolutionPlaylistFilename) await move(resolutionPlaylistFileTranscodePath, resolutionPlaylistPath, { overwrite: true }) // Move video file await move(join(videoTranscodedBasePath, videoFilename), videoFilePath, { overwrite: true }) diff --git a/server/lib/video-path-manager.ts b/server/lib/video-path-manager.ts new file mode 100644 index 00000000000..4c5d0c89d17 --- /dev/null +++ b/server/lib/video-path-manager.ts @@ -0,0 +1,139 @@ +import { remove } from 'fs-extra' +import { extname, join } from 'path' +import { buildUUID } from '@server/helpers/uuid' +import { extractVideo } from '@server/helpers/video' +import { CONFIG } from '@server/initializers/config' +import { MStreamingPlaylistVideo, MVideo, MVideoFile, MVideoUUID } from '@server/types/models' +import { VideoStorage } from '@shared/models' +import { makeHLSFileAvailable, makeWebTorrentFileAvailable } from './object-storage' +import { getHLSDirectory, getHLSRedundancyDirectory, getHlsResolutionPlaylistFilename } from './paths' + +type MakeAvailableCB = (path: string) => Promise | T + +class VideoPathManager { + + private static instance: VideoPathManager + + private constructor () {} + + getFSHLSOutputPath (video: MVideoUUID, filename?: string) { + const base = getHLSDirectory(video) + if (!filename) return base + + return join(base, filename) + } + + getFSRedundancyVideoFilePath (videoOrPlaylist: MVideo | MStreamingPlaylistVideo, videoFile: MVideoFile) { + if (videoFile.isHLS()) { + const video = extractVideo(videoOrPlaylist) + + return join(getHLSRedundancyDirectory(video), videoFile.filename) + } + + return join(CONFIG.STORAGE.REDUNDANCY_DIR, videoFile.filename) + } + + getFSVideoFileOutputPath (videoOrPlaylist: MVideo | MStreamingPlaylistVideo, videoFile: MVideoFile) { + if (videoFile.isHLS()) { + const video = extractVideo(videoOrPlaylist) + + return join(getHLSDirectory(video), videoFile.filename) + } + + return join(CONFIG.STORAGE.VIDEOS_DIR, videoFile.filename) + } + + async makeAvailableVideoFile (videoOrPlaylist: MVideo | MStreamingPlaylistVideo, videoFile: MVideoFile, cb: MakeAvailableCB) { + if (videoFile.storage === VideoStorage.FILE_SYSTEM) { + return this.makeAvailableFactory( + () => this.getFSVideoFileOutputPath(videoOrPlaylist, videoFile), + false, + cb + ) + } + + const destination = this.buildTMPDestination(videoFile.filename) + + if (videoFile.isHLS()) { + const video = extractVideo(videoOrPlaylist) + + return this.makeAvailableFactory( + () => makeHLSFileAvailable(videoOrPlaylist as MStreamingPlaylistVideo, video, videoFile.filename, destination), + true, + cb + ) + } + + return this.makeAvailableFactory( + () => makeWebTorrentFileAvailable(videoFile.filename, destination), + true, + cb + ) + } + + async makeAvailableResolutionPlaylistFile (playlist: MStreamingPlaylistVideo, videoFile: MVideoFile, cb: MakeAvailableCB) { + const filename = getHlsResolutionPlaylistFilename(videoFile.filename) + + if (videoFile.storage === VideoStorage.FILE_SYSTEM) { + return this.makeAvailableFactory( + () => join(getHLSDirectory(playlist.Video), filename), + false, + cb + ) + } + + return this.makeAvailableFactory( + () => makeHLSFileAvailable(playlist, playlist.Video, filename, this.buildTMPDestination(filename)), + true, + cb + ) + } + + async makeAvailablePlaylistFile (playlist: MStreamingPlaylistVideo, filename: string, cb: MakeAvailableCB) { + if (playlist.storage === VideoStorage.FILE_SYSTEM) { + return this.makeAvailableFactory( + () => join(getHLSDirectory(playlist.Video), filename), + false, + cb + ) + } + + return this.makeAvailableFactory( + () => makeHLSFileAvailable(playlist, playlist.Video, filename, this.buildTMPDestination(filename)), + true, + cb + ) + } + + private async makeAvailableFactory (method: () => Promise | string, clean: boolean, cb: MakeAvailableCB) { + let result: T + + const destination = await method() + + try { + result = await cb(destination) + } catch (err) { + if (destination && clean) await remove(destination) + throw err + } + + if (clean) await remove(destination) + + return result + } + + private buildTMPDestination (filename: string) { + return join(CONFIG.STORAGE.TMP_DIR, buildUUID() + extname(filename)) + + } + + static get Instance () { + return this.instance || (this.instance = new this()) + } +} + +// --------------------------------------------------------------------------- + +export { + VideoPathManager +} diff --git a/server/lib/video-paths.ts b/server/lib/video-paths.ts deleted file mode 100644 index 3bff6c0bd55..00000000000 --- a/server/lib/video-paths.ts +++ /dev/null @@ -1,144 +0,0 @@ -import { stat } from 'fs-extra' -import { join } from 'path' -import { buildUUID } from '@server/helpers/uuid' -import { extractVideo } from '@server/helpers/video' -import { CONFIG } from '@server/initializers/config' -import { HLS_REDUNDANCY_DIRECTORY, HLS_STREAMING_PLAYLIST_DIRECTORY, STATIC_PATHS, WEBSERVER } from '@server/initializers/constants' -import { isStreamingPlaylist, MStreamingPlaylist, MStreamingPlaylistVideo, MVideo, MVideoFile, MVideoUUID } from '@server/types/models' -import { removeFragmentedMP4Ext } from '@shared/core-utils' -import { makeAvailable } from './object-storage/shared/object-storage-helpers' - -// ################## Video file name ################## - -function generateWebTorrentVideoFilename (resolution: number, extname: string) { - return buildUUID() + '-' + resolution + extname -} - -function generateHLSVideoFilename (resolution: number) { - return `${buildUUID()}-${resolution}-fragmented.mp4` -} - -function getVideoFilePath (videoOrPlaylist: MVideo | MStreamingPlaylistVideo, videoFile: MVideoFile, isRedundancy = false) { - if (videoFile.isHLS()) { - const video = extractVideo(videoOrPlaylist) - - return join(getHLSDirectory(video), videoFile.filename) - } - - const baseDir = isRedundancy - ? CONFIG.STORAGE.REDUNDANCY_DIR - : CONFIG.STORAGE.VIDEOS_DIR - - return join(baseDir, videoFile.filename) -} - -async function getVideoFilePathMakeAvailable ( - videoOrPlaylist: MVideo | MStreamingPlaylistVideo, - videoFile: MVideoFile -) { - const path = getVideoFilePath(videoOrPlaylist, videoFile) - try { - await stat(path) - return path - } catch { - // Continue if path not available - } - - if (videoFile.isHLS()) { - const video = extractVideo(videoOrPlaylist) - await makeAvailable( - { filename: join((videoOrPlaylist as MStreamingPlaylistVideo).getStringType(), video.uuid, videoFile.filename), at: path }, - CONFIG.OBJECT_STORAGE.STREAMING_PLAYLISTS - ) - return path - } - - await makeAvailable({ filename: videoFile.filename, at: path }, CONFIG.OBJECT_STORAGE.VIDEOS) - return path -} - -// ################## Redundancy ################## - -function generateHLSRedundancyUrl (video: MVideo, playlist: MStreamingPlaylist) { - // Base URL used by our HLS player - return WEBSERVER.URL + STATIC_PATHS.REDUNDANCY + playlist.getStringType() + '/' + video.uuid -} - -function generateWebTorrentRedundancyUrl (file: MVideoFile) { - return WEBSERVER.URL + STATIC_PATHS.REDUNDANCY + file.filename -} - -// ################## Streaming playlist ################## - -function getHLSDirectory (video: MVideoUUID, isRedundancy = false) { - const baseDir = isRedundancy - ? HLS_REDUNDANCY_DIRECTORY - : HLS_STREAMING_PLAYLIST_DIRECTORY - - return join(baseDir, video.uuid) -} - -function getHlsResolutionPlaylistFilename (videoFilename: string) { - // Video file name already contain resolution - return removeFragmentedMP4Ext(videoFilename) + '.m3u8' -} - -function generateHLSMasterPlaylistFilename (isLive = false) { - if (isLive) return 'master.m3u8' - - return buildUUID() + '-master.m3u8' -} - -function generateHlsSha256SegmentsFilename (isLive = false) { - if (isLive) return 'segments-sha256.json' - - return buildUUID() + '-segments-sha256.json' -} - -// ################## Torrents ################## - -function generateTorrentFileName (videoOrPlaylist: MVideo | MStreamingPlaylistVideo, resolution: number) { - const extension = '.torrent' - const uuid = buildUUID() - - if (isStreamingPlaylist(videoOrPlaylist)) { - return `${uuid}-${resolution}-${videoOrPlaylist.getStringType()}${extension}` - } - - return uuid + '-' + resolution + extension -} - -function getTorrentFilePath (videoFile: MVideoFile) { - return join(CONFIG.STORAGE.TORRENTS_DIR, videoFile.torrentFilename) -} - -// ################## Meta data ################## - -function getLocalVideoFileMetadataUrl (video: MVideoUUID, videoFile: MVideoFile) { - const path = '/api/v1/videos/' - - return WEBSERVER.URL + path + video.uuid + '/metadata/' + videoFile.id -} - -// --------------------------------------------------------------------------- - -export { - generateHLSVideoFilename, - generateWebTorrentVideoFilename, - - getVideoFilePath, - getVideoFilePathMakeAvailable, - - generateTorrentFileName, - getTorrentFilePath, - - getHLSDirectory, - generateHLSMasterPlaylistFilename, - generateHlsSha256SegmentsFilename, - getHlsResolutionPlaylistFilename, - - getLocalVideoFileMetadataUrl, - - generateWebTorrentRedundancyUrl, - generateHLSRedundancyUrl -} diff --git a/server/lib/video-state.ts b/server/lib/video-state.ts index ee28f7e4884..0613d94bfa2 100644 --- a/server/lib/video-state.ts +++ b/server/lib/video-state.ts @@ -52,7 +52,7 @@ function moveToNextState (video: MVideoUUID, isNewVideo = true) { } if (newState === VideoState.TO_MOVE_TO_EXTERNAL_STORAGE) { - return moveToExternalStorageState(videoDatabase, t) + return moveToExternalStorageState(videoDatabase, isNewVideo, t) } }) } @@ -83,17 +83,17 @@ async function moveToPublishedState (video: MVideoFullLight, isNewVideo: boolean } } -async function moveToExternalStorageState (video: MVideoFullLight, transaction: Transaction) { +async function moveToExternalStorageState (video: MVideoFullLight, isNewVideo: boolean, transaction: Transaction) { const videoJobInfo = await VideoJobInfoModel.load(video.id, transaction) - const pendingTranscoding = videoJobInfo?.pendingTranscoding || 0 + const pendingTranscode = videoJobInfo?.pendingTranscode || 0 // We want to wait all transcoding jobs before moving the video on an external storage - if (pendingTranscoding !== 0) return + if (pendingTranscode !== 0) return await video.setNewState(VideoState.TO_MOVE_TO_EXTERNAL_STORAGE, transaction) logger.info('Creating external storage move job for video %s.', video.uuid, { tags: [ video.uuid ] }) - addMoveToObjectStorageJob(video) + addMoveToObjectStorageJob(video, isNewVideo) .catch(err => logger.error('Cannot add move to object storage job', { err })) } diff --git a/server/lib/video-urls.ts b/server/lib/video-urls.ts new file mode 100644 index 00000000000..64c2c9bf94a --- /dev/null +++ b/server/lib/video-urls.ts @@ -0,0 +1,31 @@ + +import { STATIC_PATHS, WEBSERVER } from '@server/initializers/constants' +import { MStreamingPlaylist, MVideo, MVideoFile, MVideoUUID } from '@server/types/models' + +// ################## Redundancy ################## + +function generateHLSRedundancyUrl (video: MVideo, playlist: MStreamingPlaylist) { + // Base URL used by our HLS player + return WEBSERVER.URL + STATIC_PATHS.REDUNDANCY + playlist.getStringType() + '/' + video.uuid +} + +function generateWebTorrentRedundancyUrl (file: MVideoFile) { + return WEBSERVER.URL + STATIC_PATHS.REDUNDANCY + file.filename +} + +// ################## Meta data ################## + +function getLocalVideoFileMetadataUrl (video: MVideoUUID, videoFile: MVideoFile) { + const path = '/api/v1/videos/' + + return WEBSERVER.URL + path + video.uuid + '/metadata/' + videoFile.id +} + +// --------------------------------------------------------------------------- + +export { + getLocalVideoFileMetadataUrl, + + generateWebTorrentRedundancyUrl, + generateHLSRedundancyUrl +} diff --git a/server/lib/video.ts b/server/lib/video.ts index 30575125444..0a2b93cc029 100644 --- a/server/lib/video.ts +++ b/server/lib/video.ts @@ -106,15 +106,15 @@ async function addOptimizeOrMergeAudioJob (video: MVideoUUID, videoFile: MVideoF } async function addTranscodingJob (payload: VideoTranscodingPayload, options: CreateJobOptions) { - await VideoJobInfoModel.increaseOrCreate(payload.videoUUID, 'pendingTranscoding') + await VideoJobInfoModel.increaseOrCreate(payload.videoUUID, 'pendingTranscode') return JobQueue.Instance.createJobWithPromise({ type: 'video-transcoding', payload: payload }, options) } -async function addMoveToObjectStorageJob (video: MVideoUUID) { +async function addMoveToObjectStorageJob (video: MVideoUUID, isNewVideo = true) { await VideoJobInfoModel.increaseOrCreate(video.uuid, 'pendingMove') - const dataInput = { videoUUID: video.uuid } + const dataInput = { videoUUID: video.uuid, isNewVideo } return JobQueue.Instance.createJobWithPromise({ type: 'move-to-object-storage', payload: dataInput }) } diff --git a/server/models/video/formatter/video-format-utils.ts b/server/models/video/formatter/video-format-utils.ts index 8a54de3b031..b3c4f390d1e 100644 --- a/server/models/video/formatter/video-format-utils.ts +++ b/server/models/video/formatter/video-format-utils.ts @@ -1,6 +1,6 @@ import { uuidToShort } from '@server/helpers/uuid' import { generateMagnetUri } from '@server/helpers/webtorrent' -import { getLocalVideoFileMetadataUrl } from '@server/lib/video-paths' +import { getLocalVideoFileMetadataUrl } from '@server/lib/video-urls' import { VideoFile } from '@shared/models/videos/video-file.model' import { ActivityTagObject, ActivityUrlObject, VideoObject } from '../../../../shared/models/activitypub/objects' import { Video, VideoDetails } from '../../../../shared/models/videos' diff --git a/server/models/video/video-file.ts b/server/models/video/video-file.ts index ccfbc817d4e..627c957635b 100644 --- a/server/models/video/video-file.ts +++ b/server/models/video/video-file.ts @@ -24,7 +24,7 @@ import { buildRemoteVideoBaseUrl } from '@server/helpers/activitypub' import { logger } from '@server/helpers/logger' import { extractVideo } from '@server/helpers/video' import { getHLSPublicFileUrl, getWebTorrentPublicFileUrl } from '@server/lib/object-storage' -import { getTorrentFilePath } from '@server/lib/video-paths' +import { getFSTorrentFilePath } from '@server/lib/paths' import { MStreamingPlaylistVideo, MVideo, MVideoWithHost } from '@server/types/models' import { AttributesOnly } from '@shared/core-utils' import { VideoStorage } from '@shared/models' @@ -217,7 +217,7 @@ export class VideoFileModel extends Model videoId: number @AllowNull(false) - @Default(VideoStorage.LOCAL) + @Default(VideoStorage.FILE_SYSTEM) @Column storage: VideoStorage @@ -280,7 +280,7 @@ export class VideoFileModel extends Model static async doesOwnedWebTorrentVideoFileExist (filename: string) { const query = 'SELECT 1 FROM "videoFile" INNER JOIN "video" ON "video"."id" = "videoFile"."videoId" AND "video"."remote" IS FALSE ' + - `WHERE "filename" = $filename AND "storage" = ${VideoStorage.LOCAL} LIMIT 1` + `WHERE "filename" = $filename AND "storage" = ${VideoStorage.FILE_SYSTEM} LIMIT 1` return doesExist(query, { filename }) } @@ -521,7 +521,7 @@ export class VideoFileModel extends Model removeTorrent () { if (!this.torrentFilename) return null - const torrentPath = getTorrentFilePath(this) + const torrentPath = getFSTorrentFilePath(this) return remove(torrentPath) .catch(err => logger.warn('Cannot delete torrent %s.', torrentPath, { err })) } diff --git a/server/models/video/video-job-info.ts b/server/models/video/video-job-info.ts index 766695b22b1..7c1fe67345b 100644 --- a/server/models/video/video-job-info.ts +++ b/server/models/video/video-job-info.ts @@ -34,7 +34,7 @@ export class VideoJobInfoModel extends Model VideoModel) @Unique @@ -57,7 +57,7 @@ export class VideoJobInfoModel extends Model { + static async increaseOrCreate (videoUUID: string, column: 'pendingMove' | 'pendingTranscode'): Promise { const options = { type: QueryTypes.SELECT as QueryTypes.SELECT, bind: { videoUUID } } const [ { pendingMove } ] = await VideoJobInfoModel.sequelize.query<{ pendingMove: number }>(` @@ -79,7 +79,7 @@ export class VideoJobInfoModel extends Model { + static async decrease (videoUUID: string, column: 'pendingMove' | 'pendingTranscode'): Promise { const options = { type: QueryTypes.SELECT as QueryTypes.SELECT, bind: { videoUUID } } const [ { pendingMove } ] = await VideoJobInfoModel.sequelize.query<{ pendingMove: number }>(` diff --git a/server/models/video/video-streaming-playlist.ts b/server/models/video/video-streaming-playlist.ts index 66277d396c7..3e9fd97c72e 100644 --- a/server/models/video/video-streaming-playlist.ts +++ b/server/models/video/video-streaming-playlist.ts @@ -97,7 +97,7 @@ export class VideoStreamingPlaylistModel extends Model>> { return peertubeTruncate(this.description, { length: maxLength }) } - async getMaxQualityResolution () { + getMaxQualityResolution () { const file = this.getMaxQualityFile() const videoOrPlaylist = file.getVideoOrStreamingPlaylist() - const originalFilePath = await getVideoFilePathMakeAvailable(videoOrPlaylist, file) - return getVideoFileResolution(originalFilePath) + return VideoPathManager.Instance.makeAvailableVideoFile(videoOrPlaylist, file, originalFilePath => { + return getVideoFileResolution(originalFilePath) + }) } getDescriptionAPIPath () { @@ -1684,7 +1686,9 @@ export class VideoModel extends Model>> { } removeFileAndTorrent (videoFile: MVideoFile, isRedundancy = false) { - const filePath = getVideoFilePath(this, videoFile, isRedundancy) + const filePath = isRedundancy + ? VideoPathManager.Instance.getFSRedundancyVideoFilePath(this, videoFile) + : VideoPathManager.Instance.getFSVideoFileOutputPath(this, videoFile) const promises: Promise[] = [ remove(filePath) ] if (!isRedundancy) promises.push(videoFile.removeTorrent()) @@ -1697,7 +1701,9 @@ export class VideoModel extends Model>> { } async removeStreamingPlaylistFiles (streamingPlaylist: MStreamingPlaylist, isRedundancy = false) { - const directoryPath = getHLSDirectory(this, isRedundancy) + const directoryPath = isRedundancy + ? getHLSRedundancyDirectory(this) + : getHLSDirectory(this) await remove(directoryPath) diff --git a/server/tests/api/live/live-save-replay.ts b/server/tests/api/live/live-save-replay.ts index 8f1fb78a5c3..6c4ea90ca88 100644 --- a/server/tests/api/live/live-save-replay.ts +++ b/server/tests/api/live/live-save-replay.ts @@ -15,7 +15,9 @@ import { stopFfmpeg, testFfmpegStreamError, wait, - waitJobs + waitJobs, + waitUntilLivePublishedOnAllServers, + waitUntilLiveSavedOnAllServers } from '@shared/extra-utils' import { HttpStatusCode, LiveVideoCreate, VideoPrivacy, VideoState } from '@shared/models' @@ -66,18 +68,6 @@ describe('Save replay setting', function () { } } - async function waitUntilLivePublishedOnAllServers (videoId: string) { - for (const server of servers) { - await server.live.waitUntilPublished({ videoId }) - } - } - - async function waitUntilLiveSavedOnAllServers (videoId: string) { - for (const server of servers) { - await server.live.waitUntilSaved({ videoId }) - } - } - before(async function () { this.timeout(120000) @@ -127,7 +117,7 @@ describe('Save replay setting', function () { ffmpegCommand = await servers[0].live.sendRTMPStreamInVideo({ videoId: liveVideoUUID }) - await waitUntilLivePublishedOnAllServers(liveVideoUUID) + await waitUntilLivePublishedOnAllServers(servers, liveVideoUUID) await waitJobs(servers) @@ -160,7 +150,7 @@ describe('Save replay setting', function () { ffmpegCommand = await servers[0].live.sendRTMPStreamInVideo({ videoId: liveVideoUUID }) - await waitUntilLivePublishedOnAllServers(liveVideoUUID) + await waitUntilLivePublishedOnAllServers(servers, liveVideoUUID) await waitJobs(servers) await checkVideosExist(liveVideoUUID, true, HttpStatusCode.OK_200) @@ -189,7 +179,7 @@ describe('Save replay setting', function () { ffmpegCommand = await servers[0].live.sendRTMPStreamInVideo({ videoId: liveVideoUUID }) - await waitUntilLivePublishedOnAllServers(liveVideoUUID) + await waitUntilLivePublishedOnAllServers(servers, liveVideoUUID) await waitJobs(servers) await checkVideosExist(liveVideoUUID, true, HttpStatusCode.OK_200) @@ -224,7 +214,7 @@ describe('Save replay setting', function () { this.timeout(20000) ffmpegCommand = await servers[0].live.sendRTMPStreamInVideo({ videoId: liveVideoUUID }) - await waitUntilLivePublishedOnAllServers(liveVideoUUID) + await waitUntilLivePublishedOnAllServers(servers, liveVideoUUID) await waitJobs(servers) @@ -237,7 +227,7 @@ describe('Save replay setting', function () { await stopFfmpeg(ffmpegCommand) - await waitUntilLiveSavedOnAllServers(liveVideoUUID) + await waitUntilLiveSavedOnAllServers(servers, liveVideoUUID) await waitJobs(servers) // Live has been transcoded @@ -268,7 +258,7 @@ describe('Save replay setting', function () { liveVideoUUID = await createLiveWrapper(true) ffmpegCommand = await servers[0].live.sendRTMPStreamInVideo({ videoId: liveVideoUUID }) - await waitUntilLivePublishedOnAllServers(liveVideoUUID) + await waitUntilLivePublishedOnAllServers(servers, liveVideoUUID) await waitJobs(servers) await checkVideosExist(liveVideoUUID, true, HttpStatusCode.OK_200) @@ -296,7 +286,7 @@ describe('Save replay setting', function () { liveVideoUUID = await createLiveWrapper(true) ffmpegCommand = await servers[0].live.sendRTMPStreamInVideo({ videoId: liveVideoUUID }) - await waitUntilLivePublishedOnAllServers(liveVideoUUID) + await waitUntilLivePublishedOnAllServers(servers, liveVideoUUID) await waitJobs(servers) await checkVideosExist(liveVideoUUID, true, HttpStatusCode.OK_200) diff --git a/server/tests/api/object-storage/index.ts b/server/tests/api/object-storage/index.ts index e29a9b7670f..f319d6ef58d 100644 --- a/server/tests/api/object-storage/index.ts +++ b/server/tests/api/object-storage/index.ts @@ -1 +1,3 @@ +export * from './live' +export * from './video-imports' export * from './videos' diff --git a/server/tests/api/object-storage/live.ts b/server/tests/api/object-storage/live.ts new file mode 100644 index 00000000000..d3e6777f293 --- /dev/null +++ b/server/tests/api/object-storage/live.ts @@ -0,0 +1,136 @@ +/* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */ + +import 'mocha' +import * as chai from 'chai' +import { FfmpegCommand } from 'fluent-ffmpeg' +import { + areObjectStorageTestsDisabled, + createMultipleServers, + doubleFollow, + expectStartWith, + killallServers, + makeRawRequest, + ObjectStorageCommand, + PeerTubeServer, + setAccessTokensToServers, + setDefaultVideoChannel, + stopFfmpeg, + waitJobs, + waitUntilLivePublishedOnAllServers, + waitUntilLiveSavedOnAllServers +} from '@shared/extra-utils' +import { HttpStatusCode, LiveVideoCreate, VideoFile, VideoPrivacy } from '@shared/models' + +const expect = chai.expect + +async function createLive (server: PeerTubeServer) { + const attributes: LiveVideoCreate = { + channelId: server.store.channel.id, + privacy: VideoPrivacy.PUBLIC, + name: 'my super live', + saveReplay: true + } + + const { uuid } = await server.live.create({ fields: attributes }) + + return uuid +} + +async function checkFiles (files: VideoFile[]) { + for (const file of files) { + expectStartWith(file.fileUrl, ObjectStorageCommand.getPlaylistBaseUrl()) + + await makeRawRequest(file.fileUrl, HttpStatusCode.OK_200) + } +} + +describe('Object storage for lives', function () { + if (areObjectStorageTestsDisabled()) return + + let ffmpegCommand: FfmpegCommand + let servers: PeerTubeServer[] + let videoUUID: string + + before(async function () { + this.timeout(120000) + + await ObjectStorageCommand.prepareDefaultBuckets() + + servers = await createMultipleServers(2, ObjectStorageCommand.getDefaultConfig()) + + await setAccessTokensToServers(servers) + await setDefaultVideoChannel(servers) + await doubleFollow(servers[0], servers[1]) + + await servers[0].config.enableTranscoding() + }) + + describe('Without live transcoding', async function () { + + before(async function () { + await servers[0].config.enableLive({ transcoding: false }) + + videoUUID = await createLive(servers[0]) + }) + + it('Should create a live and save the replay on object storage', async function () { + this.timeout(220000) + + ffmpegCommand = await servers[0].live.sendRTMPStreamInVideo({ videoId: videoUUID }) + await waitUntilLivePublishedOnAllServers(servers, videoUUID) + + await stopFfmpeg(ffmpegCommand) + + await waitUntilLiveSavedOnAllServers(servers, videoUUID) + await waitJobs(servers) + + for (const server of servers) { + const video = await server.videos.get({ id: videoUUID }) + + expect(video.files).to.have.lengthOf(0) + expect(video.streamingPlaylists).to.have.lengthOf(1) + + const files = video.streamingPlaylists[0].files + + await checkFiles(files) + } + }) + }) + + describe('With live transcoding', async function () { + + before(async function () { + await servers[0].config.enableLive({ transcoding: true }) + + videoUUID = await createLive(servers[0]) + }) + + it('Should import a video and have sent it to object storage', async function () { + this.timeout(240000) + + ffmpegCommand = await servers[0].live.sendRTMPStreamInVideo({ videoId: videoUUID }) + await waitUntilLivePublishedOnAllServers(servers, videoUUID) + + await stopFfmpeg(ffmpegCommand) + + await waitUntilLiveSavedOnAllServers(servers, videoUUID) + await waitJobs(servers) + + for (const server of servers) { + const video = await server.videos.get({ id: videoUUID }) + + expect(video.files).to.have.lengthOf(0) + expect(video.streamingPlaylists).to.have.lengthOf(1) + + const files = video.streamingPlaylists[0].files + expect(files).to.have.lengthOf(4) + + await checkFiles(files) + } + }) + }) + + after(async function () { + await killallServers(servers) + }) +}) diff --git a/server/tests/api/object-storage/video-imports.ts b/server/tests/api/object-storage/video-imports.ts new file mode 100644 index 00000000000..efc01f55041 --- /dev/null +++ b/server/tests/api/object-storage/video-imports.ts @@ -0,0 +1,112 @@ +/* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */ + +import 'mocha' +import * as chai from 'chai' +import { + areObjectStorageTestsDisabled, + createSingleServer, + expectStartWith, + FIXTURE_URLS, + killallServers, + makeRawRequest, + ObjectStorageCommand, + PeerTubeServer, + setAccessTokensToServers, + setDefaultVideoChannel, + waitJobs +} from '@shared/extra-utils' +import { HttpStatusCode, VideoPrivacy } from '@shared/models' + +const expect = chai.expect + +async function importVideo (server: PeerTubeServer) { + const attributes = { + name: 'import 2', + privacy: VideoPrivacy.PUBLIC, + channelId: server.store.channel.id, + targetUrl: FIXTURE_URLS.goodVideo720 + } + + const { video: { uuid } } = await server.imports.importVideo({ attributes }) + + return uuid +} + +describe('Object storage for video import', function () { + if (areObjectStorageTestsDisabled()) return + + let server: PeerTubeServer + + before(async function () { + this.timeout(120000) + + await ObjectStorageCommand.prepareDefaultBuckets() + + server = await createSingleServer(1, ObjectStorageCommand.getDefaultConfig()) + + await setAccessTokensToServers([ server ]) + await setDefaultVideoChannel([ server ]) + + await server.config.enableImports() + }) + + describe('Without transcoding', async function () { + + before(async function () { + await server.config.disableTranscoding() + }) + + it('Should import a video and have sent it to object storage', async function () { + this.timeout(120000) + + const uuid = await importVideo(server) + await waitJobs(server) + + const video = await server.videos.get({ id: uuid }) + + expect(video.files).to.have.lengthOf(1) + expect(video.streamingPlaylists).to.have.lengthOf(0) + + const fileUrl = video.files[0].fileUrl + expectStartWith(fileUrl, ObjectStorageCommand.getWebTorrentBaseUrl()) + + await makeRawRequest(fileUrl, HttpStatusCode.OK_200) + }) + }) + + describe('With transcoding', async function () { + + before(async function () { + await server.config.enableTranscoding() + }) + + it('Should import a video and have sent it to object storage', async function () { + this.timeout(120000) + + const uuid = await importVideo(server) + await waitJobs(server) + + const video = await server.videos.get({ id: uuid }) + + expect(video.files).to.have.lengthOf(4) + expect(video.streamingPlaylists).to.have.lengthOf(1) + expect(video.streamingPlaylists[0].files).to.have.lengthOf(4) + + for (const file of video.files) { + expectStartWith(file.fileUrl, ObjectStorageCommand.getWebTorrentBaseUrl()) + + await makeRawRequest(file.fileUrl, HttpStatusCode.OK_200) + } + + for (const file of video.streamingPlaylists[0].files) { + expectStartWith(file.fileUrl, ObjectStorageCommand.getPlaylistBaseUrl()) + + await makeRawRequest(file.fileUrl, HttpStatusCode.OK_200) + } + }) + }) + + after(async function () { + await killallServers([ server ]) + }) +}) diff --git a/server/tests/api/object-storage/videos.ts b/server/tests/api/object-storage/videos.ts index 847b7283d4d..3958bd3d7aa 100644 --- a/server/tests/api/object-storage/videos.ts +++ b/server/tests/api/object-storage/videos.ts @@ -2,13 +2,19 @@ import 'mocha' import * as chai from 'chai' +import { merge } from 'lodash' import { + areObjectStorageTestsDisabled, + checkTmpIsEmpty, cleanupTests, createMultipleServers, + createSingleServer, doubleFollow, expectStartWith, + killallServers, makeRawRequest, MockObjectStorage, + ObjectStorageCommand, PeerTubeServer, setAccessTokensToServers, waitJobs, @@ -21,17 +27,15 @@ const expect = chai.expect async function checkFiles (options: { video: VideoDetails - mockObjectStorage: MockObjectStorage + baseMockUrl?: string playlistBucket: string playlistPrefix?: string - baseMockUrl?: string webtorrentBucket: string webtorrentPrefix?: string }) { const { - mockObjectStorage, video, playlistBucket, webtorrentBucket, @@ -45,7 +49,7 @@ async function checkFiles (options: { for (const file of video.files) { const baseUrl = baseMockUrl ? `${baseMockUrl}/${webtorrentBucket}/` - : `http://${webtorrentBucket}.${mockObjectStorage.getEndpointHost()}/` + : `http://${webtorrentBucket}.${ObjectStorageCommand.getEndpointHost()}/` const prefix = webtorrentPrefix || '' const start = baseUrl + prefix @@ -66,7 +70,7 @@ async function checkFiles (options: { const baseUrl = baseMockUrl ? `${baseMockUrl}/${playlistBucket}/` - : `http://${playlistBucket}.${mockObjectStorage.getEndpointHost()}/` + : `http://${playlistBucket}.${ObjectStorageCommand.getEndpointHost()}/` const prefix = playlistPrefix || '' const start = baseUrl + prefix @@ -75,6 +79,7 @@ async function checkFiles (options: { expectStartWith(hls.segmentsSha256Url, start) await makeRawRequest(hls.playlistUrl, HttpStatusCode.OK_200) + const resSha = await makeRawRequest(hls.segmentsSha256Url, HttpStatusCode.OK_200) expect(JSON.stringify(resSha.body)).to.not.throw @@ -130,16 +135,16 @@ function runTestSuite (options: { const port = await mockObjectStorage.initialize() baseMockUrl = options.useMockBaseUrl ? `http://localhost:${port}` : undefined - await mockObjectStorage.createBucket(options.playlistBucket) - await mockObjectStorage.createBucket(options.webtorrentBucket) + await ObjectStorageCommand.createBucket(options.playlistBucket) + await ObjectStorageCommand.createBucket(options.webtorrentBucket) const config = { object_storage: { enabled: true, - endpoint: 'http://' + mockObjectStorage.getEndpointHost(), - region: mockObjectStorage.getRegion(), + endpoint: 'http://' + ObjectStorageCommand.getEndpointHost(), + region: ObjectStorageCommand.getRegion(), - credentials: mockObjectStorage.getCrendentialsConfig(), + credentials: ObjectStorageCommand.getCredentialsConfig(), max_upload_part: options.maxUploadPart || '2MB', @@ -185,7 +190,7 @@ function runTestSuite (options: { for (const server of servers) { const video = await server.videos.get({ id: uuid }) - const files = await checkFiles({ ...options, mockObjectStorage, video, baseMockUrl }) + const files = await checkFiles({ ...options, video, baseMockUrl }) deletedUrls = deletedUrls.concat(files) } @@ -201,7 +206,7 @@ function runTestSuite (options: { for (const server of servers) { const video = await server.videos.get({ id: uuid }) - const files = await checkFiles({ ...options, mockObjectStorage, video, baseMockUrl }) + const files = await checkFiles({ ...options, video, baseMockUrl }) deletedUrls = deletedUrls.concat(files) } @@ -224,6 +229,12 @@ function runTestSuite (options: { } }) + it('Should have an empty tmp directory', async function () { + for (const server of servers) { + await checkTmpIsEmpty(server) + } + }) + after(async function () { mockObjectStorage.terminate() @@ -231,7 +242,114 @@ function runTestSuite (options: { }) } -describe('Object storage', function () { +describe('Object storage for videos', function () { + if (areObjectStorageTestsDisabled()) return + + describe('Test config', function () { + let server: PeerTubeServer + + const baseConfig = { + object_storage: { + enabled: true, + endpoint: 'http://' + ObjectStorageCommand.getEndpointHost(), + region: ObjectStorageCommand.getRegion(), + + credentials: ObjectStorageCommand.getCredentialsConfig(), + + streaming_playlists: { + bucket_name: ObjectStorageCommand.DEFAULT_PLAYLIST_BUCKET + }, + + videos: { + bucket_name: ObjectStorageCommand.DEFAULT_WEBTORRENT_BUCKET + } + } + } + + const badCredentials = { + access_key_id: 'AKIAIOSFODNN7EXAMPLE', + secret_access_key: 'aJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY' + } + + it('Should fail with same bucket names without prefix', function (done) { + const config = merge({}, baseConfig, { + object_storage: { + streaming_playlists: { + bucket_name: 'aaa' + }, + + videos: { + bucket_name: 'aaa' + } + } + }) + + createSingleServer(1, config) + .then(() => done(new Error('Did not throw'))) + .catch(() => done()) + }) + + it('Should fail with bad credentials', async function () { + this.timeout(60000) + + await ObjectStorageCommand.prepareDefaultBuckets() + + const config = merge({}, baseConfig, { + object_storage: { + credentials: badCredentials + } + }) + + server = await createSingleServer(1, config) + await setAccessTokensToServers([ server ]) + + const { uuid } = await server.videos.quickUpload({ name: 'video' }) + + await waitJobs([ server ], true) + const video = await server.videos.get({ id: uuid }) + + expectStartWith(video.files[0].fileUrl, server.url) + + await killallServers([ server ]) + }) + + it('Should succeed with credentials from env', async function () { + this.timeout(60000) + + await ObjectStorageCommand.prepareDefaultBuckets() + + const config = merge({}, baseConfig, { + object_storage: { + credentials: { + access_key_id: '', + secret_access_key: '' + } + } + }) + + const goodCredentials = ObjectStorageCommand.getCredentialsConfig() + + server = await createSingleServer(1, config, { + env: { + AWS_ACCESS_KEY_ID: goodCredentials.access_key_id, + AWS_SECRET_ACCESS_KEY: goodCredentials.secret_access_key + } + }) + + await setAccessTokensToServers([ server ]) + + const { uuid } = await server.videos.quickUpload({ name: 'video' }) + + await waitJobs([ server ], true) + const video = await server.videos.get({ id: uuid }) + + expectStartWith(video.files[0].fileUrl, ObjectStorageCommand.getWebTorrentBaseUrl()) + }) + + after(async function () { + await killallServers([ server ]) + }) + }) describe('Test simple object storage', function () { runTestSuite({ diff --git a/server/tests/api/redundancy/redundancy.ts b/server/tests/api/redundancy/redundancy.ts index e1a12f5f8c5..3400b1d9adc 100644 --- a/server/tests/api/redundancy/redundancy.ts +++ b/server/tests/api/redundancy/redundancy.ts @@ -207,14 +207,14 @@ async function check1PlaylistRedundancies (videoUUID?: string) { expect(redundancy.baseUrl).to.equal(servers[0].url + '/static/redundancy/hls/' + videoUUID) } - const baseUrlPlaylist = servers[1].url + '/static/streaming-playlists/hls' - const baseUrlSegment = servers[0].url + '/static/redundancy/hls' + const baseUrlPlaylist = servers[1].url + '/static/streaming-playlists/hls/' + videoUUID + const baseUrlSegment = servers[0].url + '/static/redundancy/hls/' + videoUUID const video = await servers[0].videos.get({ id: videoUUID }) const hlsPlaylist = video.streamingPlaylists[0] for (const resolution of [ 240, 360, 480, 720 ]) { - await checkSegmentHash({ server: servers[1], baseUrlPlaylist, baseUrlSegment, videoUUID, resolution, hlsPlaylist }) + await checkSegmentHash({ server: servers[1], baseUrlPlaylist, baseUrlSegment, resolution, hlsPlaylist }) } const { hlsFilenames } = await ensureSameFilenames(videoUUID) diff --git a/server/tests/api/videos/video-hls.ts b/server/tests/api/videos/video-hls.ts index 961f0e617fc..2c829f53289 100644 --- a/server/tests/api/videos/video-hls.ts +++ b/server/tests/api/videos/video-hls.ts @@ -5,6 +5,7 @@ import * as chai from 'chai' import { basename, join } from 'path' import { removeFragmentedMP4Ext, uuidRegex } from '@shared/core-utils' import { + areObjectStorageTestsDisabled, checkDirectoryIsEmpty, checkResolutionsInMasterPlaylist, checkSegmentHash, @@ -12,7 +13,9 @@ import { cleanupTests, createMultipleServers, doubleFollow, + expectStartWith, makeRawRequest, + ObjectStorageCommand, PeerTubeServer, setAccessTokensToServers, waitJobs, @@ -23,8 +26,19 @@ import { DEFAULT_AUDIO_RESOLUTION } from '../../../initializers/constants' const expect = chai.expect -async function checkHlsPlaylist (servers: PeerTubeServer[], videoUUID: string, hlsOnly: boolean, resolutions = [ 240, 360, 480, 720 ]) { - for (const server of servers) { +async function checkHlsPlaylist (options: { + servers: PeerTubeServer[] + videoUUID: string + hlsOnly: boolean + + resolutions?: number[] + objectStorageBaseUrl: string +}) { + const { videoUUID, hlsOnly, objectStorageBaseUrl } = options + + const resolutions = options.resolutions ?? [ 240, 360, 480, 720 ] + + for (const server of options.servers) { const videoDetails = await server.videos.get({ id: videoUUID }) const baseUrl = `http://${videoDetails.account.host}` @@ -48,9 +62,15 @@ async function checkHlsPlaylist (servers: PeerTubeServer[], videoUUID: string, h expect(file.torrentUrl).to.match( new RegExp(`http://${server.host}/lazy-static/torrents/${uuidRegex}-${file.resolution.id}-hls.torrent`) ) - expect(file.fileUrl).to.match( - new RegExp(`${baseUrl}/static/streaming-playlists/hls/${videoDetails.uuid}/${uuidRegex}-${file.resolution.id}-fragmented.mp4`) - ) + + if (objectStorageBaseUrl) { + expectStartWith(file.fileUrl, objectStorageBaseUrl) + } else { + expect(file.fileUrl).to.match( + new RegExp(`${baseUrl}/static/streaming-playlists/hls/${videoDetails.uuid}/${uuidRegex}-${file.resolution.id}-fragmented.mp4`) + ) + } + expect(file.resolution.label).to.equal(resolution + 'p') await makeRawRequest(file.torrentUrl, HttpStatusCode.OK_200) @@ -80,9 +100,11 @@ async function checkHlsPlaylist (servers: PeerTubeServer[], videoUUID: string, h const file = hlsFiles.find(f => f.resolution.id === resolution) const playlistName = removeFragmentedMP4Ext(basename(file.fileUrl)) + '.m3u8' - const subPlaylist = await server.streamingPlaylists.get({ - url: `${baseUrl}/static/streaming-playlists/hls/${videoUUID}/${playlistName}` - }) + const url = objectStorageBaseUrl + ? `${objectStorageBaseUrl}hls_${videoUUID}/${playlistName}` + : `${baseUrl}/static/streaming-playlists/hls/${videoUUID}/${playlistName}` + + const subPlaylist = await server.streamingPlaylists.get({ url }) expect(subPlaylist).to.match(new RegExp(`${uuidRegex}-${resolution}-fragmented.mp4`)) expect(subPlaylist).to.contain(basename(file.fileUrl)) @@ -90,14 +112,15 @@ async function checkHlsPlaylist (servers: PeerTubeServer[], videoUUID: string, h } { - const baseUrlAndPath = baseUrl + '/static/streaming-playlists/hls' + const baseUrlAndPath = objectStorageBaseUrl + ? objectStorageBaseUrl + 'hls_' + videoUUID + : baseUrl + '/static/streaming-playlists/hls/' + videoUUID for (const resolution of resolutions) { await checkSegmentHash({ server, baseUrlPlaylist: baseUrlAndPath, baseUrlSegment: baseUrlAndPath, - videoUUID, resolution, hlsPlaylist }) @@ -111,7 +134,7 @@ describe('Test HLS videos', function () { let videoUUID = '' let videoAudioUUID = '' - function runTestSuite (hlsOnly: boolean) { + function runTestSuite (hlsOnly: boolean, objectStorageBaseUrl?: string) { it('Should upload a video and transcode it to HLS', async function () { this.timeout(120000) @@ -121,7 +144,7 @@ describe('Test HLS videos', function () { await waitJobs(servers) - await checkHlsPlaylist(servers, videoUUID, hlsOnly) + await checkHlsPlaylist({ servers, videoUUID, hlsOnly, objectStorageBaseUrl }) }) it('Should upload an audio file and transcode it to HLS', async function () { @@ -132,7 +155,13 @@ describe('Test HLS videos', function () { await waitJobs(servers) - await checkHlsPlaylist(servers, videoAudioUUID, hlsOnly, [ DEFAULT_AUDIO_RESOLUTION, 360, 240 ]) + await checkHlsPlaylist({ + servers, + videoUUID: videoAudioUUID, + hlsOnly, + resolutions: [ DEFAULT_AUDIO_RESOLUTION, 360, 240 ], + objectStorageBaseUrl + }) }) it('Should update the video', async function () { @@ -142,7 +171,7 @@ describe('Test HLS videos', function () { await waitJobs(servers) - await checkHlsPlaylist(servers, videoUUID, hlsOnly) + await checkHlsPlaylist({ servers, videoUUID, hlsOnly, objectStorageBaseUrl }) }) it('Should delete videos', async function () { @@ -229,6 +258,22 @@ describe('Test HLS videos', function () { runTestSuite(true) }) + describe('With object storage enabled', function () { + if (areObjectStorageTestsDisabled()) return + + before(async function () { + this.timeout(120000) + + const configOverride = ObjectStorageCommand.getDefaultConfig() + await ObjectStorageCommand.prepareDefaultBuckets() + + await servers[0].kill() + await servers[0].run(configOverride) + }) + + runTestSuite(true, ObjectStorageCommand.getPlaylistBaseUrl()) + }) + after(async function () { await cleanupTests(servers) }) diff --git a/server/tests/cli/create-import-video-file-job.ts b/server/tests/cli/create-import-video-file-job.ts index bddcff5e705..9f1b57a2e04 100644 --- a/server/tests/cli/create-import-video-file-job.ts +++ b/server/tests/cli/create-import-video-file-job.ts @@ -2,8 +2,19 @@ import 'mocha' import * as chai from 'chai' -import { cleanupTests, createMultipleServers, doubleFollow, PeerTubeServer, setAccessTokensToServers, waitJobs } from '@shared/extra-utils' -import { VideoFile } from '@shared/models' +import { + areObjectStorageTestsDisabled, + cleanupTests, + createMultipleServers, + doubleFollow, + expectStartWith, + makeRawRequest, + ObjectStorageCommand, + PeerTubeServer, + setAccessTokensToServers, + waitJobs +} from '@shared/extra-utils' +import { HttpStatusCode, VideoDetails, VideoFile } from '@shared/models' const expect = chai.expect @@ -17,22 +28,35 @@ function assertVideoProperties (video: VideoFile, resolution: number, extname: s if (size) expect(video.size).to.equal(size) } -describe('Test create import video jobs', function () { - this.timeout(60000) +async function checkFiles (video: VideoDetails, objectStorage: boolean) { + for (const file of video.files) { + if (objectStorage) expectStartWith(file.fileUrl, ObjectStorageCommand.getWebTorrentBaseUrl()) - let servers: PeerTubeServer[] = [] + await makeRawRequest(file.fileUrl, HttpStatusCode.OK_200) + } +} + +function runTests (objectStorage: boolean) { let video1UUID: string let video2UUID: string + let servers: PeerTubeServer[] = [] + before(async function () { this.timeout(90000) + const config = objectStorage + ? ObjectStorageCommand.getDefaultConfig() + : {} + // Run server 2 to have transcoding enabled - servers = await createMultipleServers(2) + servers = await createMultipleServers(2, config) await setAccessTokensToServers(servers) await doubleFollow(servers[0], servers[1]) + if (objectStorage) await ObjectStorageCommand.prepareDefaultBuckets() + // Upload two videos for our needs { const { uuid } = await servers[0].videos.upload({ attributes: { name: 'video1' } }) @@ -44,7 +68,6 @@ describe('Test create import video jobs', function () { video2UUID = uuid } - // Transcoding await waitJobs(servers) }) @@ -65,6 +88,8 @@ describe('Test create import video jobs', function () { const [ originalVideo, transcodedVideo ] = videoDetails.files assertVideoProperties(originalVideo, 720, 'webm', 218910) assertVideoProperties(transcodedVideo, 480, 'webm', 69217) + + await checkFiles(videoDetails, objectStorage) } }) @@ -87,6 +112,8 @@ describe('Test create import video jobs', function () { assertVideoProperties(transcodedVideo420, 480, 'mp4') assertVideoProperties(transcodedVideo320, 360, 'mp4') assertVideoProperties(transcodedVideo240, 240, 'mp4') + + await checkFiles(videoDetails, objectStorage) } }) @@ -107,10 +134,25 @@ describe('Test create import video jobs', function () { const [ video720, video480 ] = videoDetails.files assertVideoProperties(video720, 720, 'webm', 942961) assertVideoProperties(video480, 480, 'webm', 69217) + + await checkFiles(videoDetails, objectStorage) } }) after(async function () { await cleanupTests(servers) }) +} + +describe('Test create import video jobs', function () { + + describe('On filesystem', function () { + runTests(false) + }) + + describe('On object storage', function () { + if (areObjectStorageTestsDisabled()) return + + runTests(true) + }) }) diff --git a/server/tests/cli/create-transcoding-job.ts b/server/tests/cli/create-transcoding-job.ts index df787ccdcab..3313a492fdf 100644 --- a/server/tests/cli/create-transcoding-job.ts +++ b/server/tests/cli/create-transcoding-job.ts @@ -2,10 +2,15 @@ import 'mocha' import * as chai from 'chai' +import { HttpStatusCode, VideoFile } from '@shared/models' import { + areObjectStorageTestsDisabled, cleanupTests, createMultipleServers, doubleFollow, + expectStartWith, + makeRawRequest, + ObjectStorageCommand, PeerTubeServer, setAccessTokensToServers, waitJobs @@ -13,39 +18,39 @@ import { const expect = chai.expect -describe('Test create transcoding jobs', function () { - let servers: PeerTubeServer[] = [] - const videosUUID: string[] = [] +async function checkFilesInObjectStorage (files: VideoFile[], type: 'webtorrent' | 'playlist') { + for (const file of files) { + const shouldStartWith = type === 'webtorrent' + ? ObjectStorageCommand.getWebTorrentBaseUrl() + : ObjectStorageCommand.getPlaylistBaseUrl() - const config = { - transcoding: { - enabled: false, - resolutions: { - '240p': true, - '360p': true, - '480p': true, - '720p': true, - '1080p': true, - '1440p': true, - '2160p': true - }, - hls: { - enabled: false - } - } + expectStartWith(file.fileUrl, shouldStartWith) + + await makeRawRequest(file.fileUrl, HttpStatusCode.OK_200) } +} + +function runTests (objectStorage: boolean) { + let servers: PeerTubeServer[] = [] + const videosUUID: string[] = [] before(async function () { this.timeout(60000) + const config = objectStorage + ? ObjectStorageCommand.getDefaultConfig() + : {} + // Run server 2 to have transcoding enabled - servers = await createMultipleServers(2) + servers = await createMultipleServers(2, config) await setAccessTokensToServers(servers) - await servers[0].config.updateCustomSubConfig({ newConfig: config }) + await servers[0].config.disableTranscoding() await doubleFollow(servers[0], servers[1]) + if (objectStorage) await ObjectStorageCommand.prepareDefaultBuckets() + for (let i = 1; i <= 5; i++) { const { uuid } = await servers[0].videos.upload({ attributes: { name: 'video' + i } }) videosUUID.push(uuid) @@ -81,27 +86,29 @@ describe('Test create transcoding jobs', function () { let infoHashes: { [id: number]: string } for (const video of data) { - const videoDetail = await server.videos.get({ id: video.uuid }) + const videoDetails = await server.videos.get({ id: video.uuid }) if (video.uuid === videosUUID[1]) { - expect(videoDetail.files).to.have.lengthOf(4) - expect(videoDetail.streamingPlaylists).to.have.lengthOf(0) + expect(videoDetails.files).to.have.lengthOf(4) + expect(videoDetails.streamingPlaylists).to.have.lengthOf(0) + + if (objectStorage) await checkFilesInObjectStorage(videoDetails.files, 'webtorrent') if (!infoHashes) { infoHashes = {} - for (const file of videoDetail.files) { + for (const file of videoDetails.files) { infoHashes[file.resolution.id.toString()] = file.magnetUri } } else { for (const resolution of Object.keys(infoHashes)) { - const file = videoDetail.files.find(f => f.resolution.id.toString() === resolution) + const file = videoDetails.files.find(f => f.resolution.id.toString() === resolution) expect(file.magnetUri).to.equal(infoHashes[resolution]) } } } else { - expect(videoDetail.files).to.have.lengthOf(1) - expect(videoDetail.streamingPlaylists).to.have.lengthOf(0) + expect(videoDetails.files).to.have.lengthOf(1) + expect(videoDetails.streamingPlaylists).to.have.lengthOf(0) } } } @@ -125,6 +132,8 @@ describe('Test create transcoding jobs', function () { expect(videoDetails.files[1].resolution.id).to.equal(480) expect(videoDetails.streamingPlaylists).to.have.lengthOf(0) + + if (objectStorage) await checkFilesInObjectStorage(videoDetails.files, 'webtorrent') } }) @@ -139,11 +148,15 @@ describe('Test create transcoding jobs', function () { const videoDetails = await server.videos.get({ id: videosUUID[2] }) expect(videoDetails.files).to.have.lengthOf(1) + if (objectStorage) await checkFilesInObjectStorage(videoDetails.files, 'webtorrent') + expect(videoDetails.streamingPlaylists).to.have.lengthOf(1) const files = videoDetails.streamingPlaylists[0].files expect(files).to.have.lengthOf(1) expect(files[0].resolution.id).to.equal(480) + + if (objectStorage) await checkFilesInObjectStorage(files, 'playlist') } }) @@ -160,6 +173,8 @@ describe('Test create transcoding jobs', function () { const files = videoDetails.streamingPlaylists[0].files expect(files).to.have.lengthOf(1) expect(files[0].resolution.id).to.equal(480) + + if (objectStorage) await checkFilesInObjectStorage(files, 'playlist') } }) @@ -178,15 +193,15 @@ describe('Test create transcoding jobs', function () { const files = videoDetails.streamingPlaylists[0].files expect(files).to.have.lengthOf(4) + + if (objectStorage) await checkFilesInObjectStorage(files, 'playlist') } }) it('Should optimize the video file and generate HLS videos if enabled in config', async function () { this.timeout(120000) - config.transcoding.hls.enabled = true - await servers[0].config.updateCustomSubConfig({ newConfig: config }) - + await servers[0].config.enableTranscoding() await servers[0].cli.execWithEnv(`npm run create-transcoding-job -- -v ${videosUUID[4]}`) await waitJobs(servers) @@ -197,10 +212,28 @@ describe('Test create transcoding jobs', function () { expect(videoDetails.files).to.have.lengthOf(4) expect(videoDetails.streamingPlaylists).to.have.lengthOf(1) expect(videoDetails.streamingPlaylists[0].files).to.have.lengthOf(4) + + if (objectStorage) { + await checkFilesInObjectStorage(videoDetails.files, 'webtorrent') + await checkFilesInObjectStorage(videoDetails.streamingPlaylists[0].files, 'playlist') + } } }) after(async function () { await cleanupTests(servers) }) +} + +describe('Test create transcoding jobs', function () { + + describe('On filesystem', function () { + runTests(false) + }) + + describe('On object storage', function () { + if (areObjectStorageTestsDisabled()) return + + runTests(true) + }) }) diff --git a/server/tests/helpers/request.ts b/server/tests/helpers/request.ts index 7f7873df349..c9a2eb8310e 100644 --- a/server/tests/helpers/request.ts +++ b/server/tests/helpers/request.ts @@ -13,7 +13,7 @@ describe('Request helpers', function () { it('Should throw an error when the bytes limit is exceeded for request', async function () { try { - await doRequest(FIXTURE_URLS.video4K, { bodyKBLimit: 3 }) + await doRequest(FIXTURE_URLS.file4K, { bodyKBLimit: 3 }) } catch { return } @@ -23,7 +23,7 @@ describe('Request helpers', function () { it('Should throw an error when the bytes limit is exceeded for request and save file', async function () { try { - await doRequestAndSaveToFile(FIXTURE_URLS.video4K, destPath1, { bodyKBLimit: 3 }) + await doRequestAndSaveToFile(FIXTURE_URLS.file4K, destPath1, { bodyKBLimit: 3 }) } catch { await wait(500) @@ -35,8 +35,8 @@ describe('Request helpers', function () { }) it('Should succeed if the file is below the limit', async function () { - await doRequest(FIXTURE_URLS.video4K, { bodyKBLimit: 5 }) - await doRequestAndSaveToFile(FIXTURE_URLS.video4K, destPath2, { bodyKBLimit: 5 }) + await doRequest(FIXTURE_URLS.file4K, { bodyKBLimit: 5 }) + await doRequestAndSaveToFile(FIXTURE_URLS.file4K, destPath2, { bodyKBLimit: 5 }) expect(await pathExists(destPath2)).to.be.true }) diff --git a/shared/extra-utils/miscs/tests.ts b/shared/extra-utils/miscs/tests.ts index 3dfb2487edf..dd86041fef9 100644 --- a/shared/extra-utils/miscs/tests.ts +++ b/shared/extra-utils/miscs/tests.ts @@ -28,7 +28,9 @@ const FIXTURE_URLS = { badVideo: 'https://download.cpy.re/peertube/bad_video.mp4', goodVideo: 'https://download.cpy.re/peertube/good_video.mp4', - video4K: 'https://download.cpy.re/peertube/4k_file.txt' + goodVideo720: 'https://download.cpy.re/peertube/good_video_720.mp4', + + file4K: 'https://download.cpy.re/peertube/4k_file.txt' } function parallelTests () { @@ -42,7 +44,15 @@ function isGithubCI () { function areHttpImportTestsDisabled () { const disabled = process.env.DISABLE_HTTP_IMPORT_TESTS === 'true' - if (disabled) console.log('Import tests are disabled') + if (disabled) console.log('DISABLE_HTTP_IMPORT_TESTS env set to "true" so import tests are disabled') + + return disabled +} + +function areObjectStorageTestsDisabled () { + const disabled = process.env.ENABLE_OBJECT_STORAGE_TESTS !== 'true' + + if (disabled) console.log('ENABLE_OBJECT_STORAGE_TESTS env is not set to "true" so object storage tests are disabled') return disabled } @@ -89,6 +99,7 @@ export { buildAbsoluteFixturePath, getFileSize, buildRequestStub, + areObjectStorageTestsDisabled, wait, root } diff --git a/shared/extra-utils/mock-servers/mock-object-storage.ts b/shared/extra-utils/mock-servers/mock-object-storage.ts index a6a52d87863..19ea7c87c03 100644 --- a/shared/extra-utils/mock-servers/mock-object-storage.ts +++ b/shared/extra-utils/mock-servers/mock-object-storage.ts @@ -3,8 +3,7 @@ import got, { RequestError } from 'got' import { Server } from 'http' import { pipeline } from 'stream' import { randomInt } from '@shared/core-utils' -import { HttpStatusCode } from '@shared/models' -import { makePostBodyRequest } from '../requests' +import { ObjectStorageCommand } from '../server' export class MockObjectStorage { private server: Server @@ -14,7 +13,7 @@ export class MockObjectStorage { const app = express() app.get('/:bucketName/:path(*)', (req: express.Request, res: express.Response, next: express.NextFunction) => { - const url = `http://${req.params.bucketName}.${this.getEndpointHost()}/${req.params.path}` + const url = `http://${req.params.bucketName}.${ObjectStorageCommand.getEndpointHost()}/${req.params.path}` if (process.env.DEBUG) { console.log('Receiving request on mocked server %s.', req.url) @@ -37,41 +36,6 @@ export class MockObjectStorage { }) } - getCrendentialsConfig () { - return { - access_key_id: 'AKIAIOSFODNN7EXAMPLE', - secret_access_key: 'wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY' - } - } - - getEndpointHost () { - return 'localhost:9444' - } - - getRegion () { - return 'us-east-1' - } - - async createBucket (name: string) { - await makePostBodyRequest({ - url: this.getEndpointHost(), - path: '/ui/' + name + '?delete', - expectedStatus: HttpStatusCode.TEMPORARY_REDIRECT_307 - }) - - await makePostBodyRequest({ - url: this.getEndpointHost(), - path: '/ui/' + name + '?create', - expectedStatus: HttpStatusCode.TEMPORARY_REDIRECT_307 - }) - - await makePostBodyRequest({ - url: this.getEndpointHost(), - path: '/ui/' + name + '?make-public', - expectedStatus: HttpStatusCode.TEMPORARY_REDIRECT_307 - }) - } - terminate () { if (this.server) this.server.close() } diff --git a/shared/extra-utils/requests/requests.ts b/shared/extra-utils/requests/requests.ts index 70f7902222d..e3ecd1af246 100644 --- a/shared/extra-utils/requests/requests.ts +++ b/shared/extra-utils/requests/requests.ts @@ -121,6 +121,20 @@ function unwrapText (test: request.Test): Promise { return test.then(res => res.text) } +function unwrapBodyOrDecodeToJSON (test: request.Test): Promise { + return test.then(res => { + if (res.body instanceof Buffer) { + return JSON.parse(new TextDecoder().decode(res.body)) + } + + return res.body + }) +} + +function unwrapTextOrDecode (test: request.Test): Promise { + return test.then(res => res.text || new TextDecoder().decode(res.body)) +} + // --------------------------------------------------------------------------- export { @@ -134,6 +148,8 @@ export { makeRawRequest, makeActivityPubGetRequest, unwrapBody, + unwrapTextOrDecode, + unwrapBodyOrDecodeToJSON, unwrapText } diff --git a/shared/extra-utils/server/config-command.ts b/shared/extra-utils/server/config-command.ts index 11148aa4665..51d04fa6389 100644 --- a/shared/extra-utils/server/config-command.ts +++ b/shared/extra-utils/server/config-command.ts @@ -18,6 +18,70 @@ export class ConfigCommand extends AbstractCommand { } } + enableImports () { + return this.updateExistingSubConfig({ + newConfig: { + import: { + videos: { + http: { + enabled: true + }, + + torrent: { + enabled: true + } + } + } + } + }) + } + + enableLive (options: { + allowReplay?: boolean + transcoding?: boolean + } = {}) { + return this.updateExistingSubConfig({ + newConfig: { + live: { + enabled: true, + allowReplay: options.allowReplay ?? true, + transcoding: { + enabled: options.transcoding ?? true, + resolutions: ConfigCommand.getCustomConfigResolutions(true) + } + } + } + }) + } + + disableTranscoding () { + return this.updateExistingSubConfig({ + newConfig: { + transcoding: { + enabled: false + } + } + }) + } + + enableTranscoding (webtorrent = true, hls = true) { + return this.updateExistingSubConfig({ + newConfig: { + transcoding: { + enabled: true, + resolutions: ConfigCommand.getCustomConfigResolutions(true), + + webtorrent: { + enabled: webtorrent + }, + hls: { + enabled: hls + } + } + } + }) + } + getConfig (options: OverrideCommandOptions = {}) { const path = '/api/v1/config' @@ -81,6 +145,14 @@ export class ConfigCommand extends AbstractCommand { }) } + async updateExistingSubConfig (options: OverrideCommandOptions & { + newConfig: DeepPartial + }) { + const existing = await this.getCustomConfig(options) + + return this.updateCustomConfig({ ...options, newCustomConfig: merge({}, existing, options.newConfig) }) + } + updateCustomSubConfig (options: OverrideCommandOptions & { newConfig: DeepPartial }) { diff --git a/shared/extra-utils/server/index.ts b/shared/extra-utils/server/index.ts index 9055dfc573a..92ff7a0f9b3 100644 --- a/shared/extra-utils/server/index.ts +++ b/shared/extra-utils/server/index.ts @@ -6,6 +6,7 @@ export * from './follows-command' export * from './follows' export * from './jobs' export * from './jobs-command' +export * from './object-storage-command' export * from './plugins-command' export * from './plugins' export * from './redundancy-command' diff --git a/shared/extra-utils/server/jobs-command.ts b/shared/extra-utils/server/jobs-command.ts index c4eb12dc252..91771c17600 100644 --- a/shared/extra-utils/server/jobs-command.ts +++ b/shared/extra-utils/server/jobs-command.ts @@ -5,6 +5,16 @@ import { AbstractCommand, OverrideCommandOptions } from '../shared' export class JobsCommand extends AbstractCommand { + async getLatest (options: OverrideCommandOptions & { + jobType: JobType + }) { + const { data } = await this.getJobsList({ ...options, start: 0, count: 1, sort: '-createdAt' }) + + if (data.length === 0) return undefined + + return data[0] + } + getJobsList (options: OverrideCommandOptions & { state?: JobState jobType?: JobType diff --git a/shared/extra-utils/server/jobs.ts b/shared/extra-utils/server/jobs.ts index 64a0353eba5..27104bfdfbf 100644 --- a/shared/extra-utils/server/jobs.ts +++ b/shared/extra-utils/server/jobs.ts @@ -3,7 +3,7 @@ import { JobState } from '../../models' import { wait } from '../miscs' import { PeerTubeServer } from './server' -async function waitJobs (serversArg: PeerTubeServer[] | PeerTubeServer) { +async function waitJobs (serversArg: PeerTubeServer[] | PeerTubeServer, skipDelayed = false) { const pendingJobWait = process.env.NODE_PENDING_JOB_WAIT ? parseInt(process.env.NODE_PENDING_JOB_WAIT, 10) : 250 @@ -13,7 +13,9 @@ async function waitJobs (serversArg: PeerTubeServer[] | PeerTubeServer) { if (Array.isArray(serversArg) === false) servers = [ serversArg as PeerTubeServer ] else servers = serversArg as PeerTubeServer[] - const states: JobState[] = [ 'waiting', 'active', 'delayed' ] + const states: JobState[] = [ 'waiting', 'active' ] + if (!skipDelayed) states.push('delayed') + const repeatableJobs = [ 'videos-views', 'activitypub-cleaner' ] let pendingRequests: boolean diff --git a/shared/extra-utils/server/object-storage-command.ts b/shared/extra-utils/server/object-storage-command.ts new file mode 100644 index 00000000000..b4de8f4cbbb --- /dev/null +++ b/shared/extra-utils/server/object-storage-command.ts @@ -0,0 +1,77 @@ + +import { HttpStatusCode } from '@shared/models' +import { makePostBodyRequest } from '../requests' +import { AbstractCommand } from '../shared' + +export class ObjectStorageCommand extends AbstractCommand { + static readonly DEFAULT_PLAYLIST_BUCKET = 'streaming-playlists' + static readonly DEFAULT_WEBTORRENT_BUCKET = 'videos' + + static getDefaultConfig () { + return { + object_storage: { + enabled: true, + endpoint: 'http://' + this.getEndpointHost(), + region: this.getRegion(), + + credentials: this.getCredentialsConfig(), + + streaming_playlists: { + bucket_name: this.DEFAULT_PLAYLIST_BUCKET + }, + + videos: { + bucket_name: this.DEFAULT_WEBTORRENT_BUCKET + } + } + } + } + + static getCredentialsConfig () { + return { + access_key_id: 'AKIAIOSFODNN7EXAMPLE', + secret_access_key: 'wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY' + } + } + + static getEndpointHost () { + return 'localhost:9444' + } + + static getRegion () { + return 'us-east-1' + } + + static getWebTorrentBaseUrl () { + return `http://${this.DEFAULT_WEBTORRENT_BUCKET}.${this.getEndpointHost()}/` + } + + static getPlaylistBaseUrl () { + return `http://${this.DEFAULT_PLAYLIST_BUCKET}.${this.getEndpointHost()}/` + } + + static async prepareDefaultBuckets () { + await this.createBucket(this.DEFAULT_PLAYLIST_BUCKET) + await this.createBucket(this.DEFAULT_WEBTORRENT_BUCKET) + } + + static async createBucket (name: string) { + await makePostBodyRequest({ + url: this.getEndpointHost(), + path: '/ui/' + name + '?delete', + expectedStatus: HttpStatusCode.TEMPORARY_REDIRECT_307 + }) + + await makePostBodyRequest({ + url: this.getEndpointHost(), + path: '/ui/' + name + '?create', + expectedStatus: HttpStatusCode.TEMPORARY_REDIRECT_307 + }) + + await makePostBodyRequest({ + url: this.getEndpointHost(), + path: '/ui/' + name + '?make-public', + expectedStatus: HttpStatusCode.TEMPORARY_REDIRECT_307 + }) + } +} diff --git a/shared/extra-utils/server/server.ts b/shared/extra-utils/server/server.ts index 3c335b8e448..bc5e1cd5fa1 100644 --- a/shared/extra-utils/server/server.ts +++ b/shared/extra-utils/server/server.ts @@ -38,11 +38,13 @@ import { PluginsCommand } from './plugins-command' import { RedundancyCommand } from './redundancy-command' import { ServersCommand } from './servers-command' import { StatsCommand } from './stats-command' +import { ObjectStorageCommand } from './object-storage-command' export type RunServerOptions = { hideLogs?: boolean nodeArgs?: string[] peertubeArgs?: string[] + env?: { [ id: string ]: string } } export class PeerTubeServer { @@ -121,6 +123,7 @@ export class PeerTubeServer { servers?: ServersCommand login?: LoginCommand users?: UsersCommand + objectStorage?: ObjectStorageCommand videos?: VideosCommand constructor (options: { serverNumber: number } | { url: string }) { @@ -202,6 +205,10 @@ export class PeerTubeServer { env['NODE_APP_INSTANCE'] = this.internalServerNumber.toString() env['NODE_CONFIG'] = JSON.stringify(configOverride) + if (options.env) { + Object.assign(env, options.env) + } + const forkOptions = { silent: true, env, @@ -209,10 +216,17 @@ export class PeerTubeServer { execArgv: options.nodeArgs || [] } - return new Promise(res => { + return new Promise((res, rej) => { const self = this this.app = fork(join(root(), 'dist', 'server.js'), options.peertubeArgs || [], forkOptions) + + const onExit = function () { + return rej(new Error('Process exited')) + } + + this.app.on('exit', onExit) + this.app.stdout.on('data', function onStdout (data) { let dontContinue = false @@ -241,6 +255,7 @@ export class PeerTubeServer { console.log(data.toString()) } else { self.app.stdout.removeListener('data', onStdout) + self.app.removeListener('exit', onExit) } process.on('exit', () => { @@ -365,5 +380,6 @@ export class PeerTubeServer { this.login = new LoginCommand(this) this.users = new UsersCommand(this) this.videos = new VideosCommand(this) + this.objectStorage = new ObjectStorageCommand(this) } } diff --git a/shared/extra-utils/videos/live.ts b/shared/extra-utils/videos/live.ts index 9a6df07a87f..29f99ed6d0b 100644 --- a/shared/extra-utils/videos/live.ts +++ b/shared/extra-utils/videos/live.ts @@ -89,6 +89,12 @@ async function waitUntilLivePublishedOnAllServers (servers: PeerTubeServer[], vi } } +async function waitUntilLiveSavedOnAllServers (servers: PeerTubeServer[], videoId: string) { + for (const server of servers) { + await server.live.waitUntilSaved({ videoId }) + } +} + async function checkLiveCleanupAfterSave (server: PeerTubeServer, videoUUID: string, resolutions: number[] = []) { const basePath = server.servers.buildDirectory('streaming-playlists') const hlsPath = join(basePath, 'hls', videoUUID) @@ -126,5 +132,6 @@ export { testFfmpegStreamError, stopFfmpeg, waitUntilLivePublishedOnAllServers, + waitUntilLiveSavedOnAllServers, checkLiveCleanupAfterSave } diff --git a/shared/extra-utils/videos/streaming-playlists-command.ts b/shared/extra-utils/videos/streaming-playlists-command.ts index 9662685da1a..5d40d35cb9e 100644 --- a/shared/extra-utils/videos/streaming-playlists-command.ts +++ b/shared/extra-utils/videos/streaming-playlists-command.ts @@ -1,5 +1,5 @@ import { HttpStatusCode } from '@shared/models' -import { unwrapBody, unwrapText } from '../requests' +import { unwrapBody, unwrapTextOrDecode, unwrapBodyOrDecodeToJSON } from '../requests' import { AbstractCommand, OverrideCommandOptions } from '../shared' export class StreamingPlaylistsCommand extends AbstractCommand { @@ -7,7 +7,7 @@ export class StreamingPlaylistsCommand extends AbstractCommand { get (options: OverrideCommandOptions & { url: string }) { - return unwrapText(this.getRawRequest({ + return unwrapTextOrDecode(this.getRawRequest({ ...options, url: options.url, @@ -33,7 +33,7 @@ export class StreamingPlaylistsCommand extends AbstractCommand { getSegmentSha256 (options: OverrideCommandOptions & { url: string }) { - return unwrapBody<{ [ id: string ]: string }>(this.getRawRequest({ + return unwrapBodyOrDecodeToJSON<{ [ id: string ]: string }>(this.getRawRequest({ ...options, url: options.url, diff --git a/shared/extra-utils/videos/streaming-playlists.ts b/shared/extra-utils/videos/streaming-playlists.ts index a224b8f5f67..6671e3fa6bc 100644 --- a/shared/extra-utils/videos/streaming-playlists.ts +++ b/shared/extra-utils/videos/streaming-playlists.ts @@ -9,17 +9,16 @@ async function checkSegmentHash (options: { server: PeerTubeServer baseUrlPlaylist: string baseUrlSegment: string - videoUUID: string resolution: number hlsPlaylist: VideoStreamingPlaylist }) { - const { server, baseUrlPlaylist, baseUrlSegment, videoUUID, resolution, hlsPlaylist } = options + const { server, baseUrlPlaylist, baseUrlSegment, resolution, hlsPlaylist } = options const command = server.streamingPlaylists const file = hlsPlaylist.files.find(f => f.resolution.id === resolution) const videoName = basename(file.fileUrl) - const playlist = await command.get({ url: `${baseUrlPlaylist}/${videoUUID}/${removeFragmentedMP4Ext(videoName)}.m3u8` }) + const playlist = await command.get({ url: `${baseUrlPlaylist}/${removeFragmentedMP4Ext(videoName)}.m3u8` }) const matches = /#EXT-X-BYTERANGE:(\d+)@(\d+)/.exec(playlist) @@ -28,7 +27,7 @@ async function checkSegmentHash (options: { const range = `${offset}-${offset + length - 1}` const segmentBody = await command.getSegment({ - url: `${baseUrlSegment}/${videoUUID}/${videoName}`, + url: `${baseUrlSegment}/${videoName}`, expectedStatus: HttpStatusCode.PARTIAL_CONTENT_206, range: `bytes=${range}` }) diff --git a/shared/models/server/job.model.ts b/shared/models/server/job.model.ts index 973cacef3b8..ff96283a4f5 100644 --- a/shared/models/server/job.model.ts +++ b/shared/models/server/job.model.ts @@ -140,4 +140,5 @@ export interface ActorKeysPayload { export interface MoveObjectStoragePayload { videoUUID: string + isNewVideo: boolean } diff --git a/shared/models/videos/video-storage.enum.ts b/shared/models/videos/video-storage.enum.ts index d9f52ff931b..7c6690db2a6 100644 --- a/shared/models/videos/video-storage.enum.ts +++ b/shared/models/videos/video-storage.enum.ts @@ -1,4 +1,4 @@ export const enum VideoStorage { - LOCAL, + FILE_SYSTEM, OBJECT_STORAGE, } diff --git a/support/docker/production/config/custom-environment-variables.yaml b/support/docker/production/config/custom-environment-variables.yaml index ce0f89d7b8a..1b474582a94 100644 --- a/support/docker/production/config/custom-environment-variables.yaml +++ b/support/docker/production/config/custom-environment-variables.yaml @@ -45,6 +45,29 @@ smtp: __format: "json" from_address: "PEERTUBE_SMTP_FROM" +object_storage: + enabled: + __name: "PEERTUBE_OBJECT_STORAGE_ENABLED" + __format: "json" + + endpoint: "PEERTUBE_OBJECT_STORAGE_ENDPOINT" + + region: "PEERTUBE_OBJECT_STORAGE_REGION" + + max_upload_part: + __name: "PEERTUBE_OBJECT_STORAGE_MAX_UPLOAD_PART" + __format: "json" + + streaming_playlists: + bucket_name: "PEERTUBE_OBJECT_STORAGE_STREAMING_PLAYLISTS_BUCKET_NAME" + prefix: "PEERTUBE_OBJECT_STORAGE_STREAMING_PLAYLISTS_PREFIX" + base_url: "PEERTUBE_OBJECT_STORAGE_STREAMING_PLAYLISTS_BASE_URL" + + videos: + bucket_name: "PEERTUBE_OBJECT_STORAGE_VIDEOS_BUCKET_NAME" + prefix: "PEERTUBE_OBJECT_STORAGE_VIDEOS_PREFIX" + base_url: "PEERTUBE_OBJECT_STORAGE_VIDEOS_BASE_URL" + log: level: "PEERTUBE_LOG_LEVEL" log_ping_requests: From ffa0c8c53ed80bba19329336d6fb2a94aefa0fbd Mon Sep 17 00:00:00 2001 From: Chocobozzz Date: Mon, 16 Aug 2021 17:48:37 +0200 Subject: [PATCH 22/23] Support move to external storage job in client --- client/src/app/+admin/system/jobs/jobs.component.ts | 3 ++- .../shared/information/video-alert.component.html | 4 ++++ .../+video-watch/shared/information/video-alert.component.ts | 4 ++++ 3 files changed, 10 insertions(+), 1 deletion(-) diff --git a/client/src/app/+admin/system/jobs/jobs.component.ts b/client/src/app/+admin/system/jobs/jobs.component.ts index 29ba95c5c2a..4b02e1bc121 100644 --- a/client/src/app/+admin/system/jobs/jobs.component.ts +++ b/client/src/app/+admin/system/jobs/jobs.component.ts @@ -36,7 +36,8 @@ export class JobsComponent extends RestTable implements OnInit { 'video-live-ending', 'video-redundancy', 'video-transcoding', - 'videos-views' + 'videos-views', + 'move-to-object-storage' ] jobs: Job[] = [] diff --git a/client/src/app/+videos/+video-watch/shared/information/video-alert.component.html b/client/src/app/+videos/+video-watch/shared/information/video-alert.component.html index 3480d3656de..e2dd44bf7cb 100644 --- a/client/src/app/+videos/+video-watch/shared/information/video-alert.component.html +++ b/client/src/app/+videos/+video-watch/shared/information/video-alert.component.html @@ -6,6 +6,10 @@ The video is being transcoded, it may not work properly. +
+ The video is being moved to an external server, it may not work properly. +
+
This video will be published on {{ video.scheduledUpdate.updateAt | date: 'full' }}.
diff --git a/client/src/app/+videos/+video-watch/shared/information/video-alert.component.ts b/client/src/app/+videos/+video-watch/shared/information/video-alert.component.ts index 8a46ba0d59e..0072492ac5b 100644 --- a/client/src/app/+videos/+video-watch/shared/information/video-alert.component.ts +++ b/client/src/app/+videos/+video-watch/shared/information/video-alert.component.ts @@ -18,6 +18,10 @@ export class VideoAlertComponent { return this.video && this.video.state.id === VideoState.TO_IMPORT } + isVideoToMoveToExternalStorage () { + return this.video && this.video.state.id === VideoState.TO_MOVE_TO_EXTERNAL_STORAGE + } + hasVideoScheduledPublication () { return this.video && this.video.scheduledUpdate !== undefined } From 6b7c3bc5cf39f69f36843f378c312aa916d57563 Mon Sep 17 00:00:00 2001 From: Chocobozzz Date: Mon, 16 Aug 2021 18:07:53 +0200 Subject: [PATCH 23/23] Fix live object storage tests --- shared/extra-utils/videos/live-command.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/shared/extra-utils/videos/live-command.ts b/shared/extra-utils/videos/live-command.ts index 81ae458e01e..74f5d30899c 100644 --- a/shared/extra-utils/videos/live-command.ts +++ b/shared/extra-utils/videos/live-command.ts @@ -126,7 +126,7 @@ export class LiveCommand extends AbstractCommand { video = await this.server.videos.getWithToken({ token: options.token, id: options.videoId }) await wait(500) - } while (video.isLive === true && video.state.id !== VideoState.PUBLISHED) + } while (video.isLive === true || video.state.id !== VideoState.PUBLISHED) } async countPlaylists (options: OverrideCommandOptions & {