Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: allow to customise max file size and mime types at bucket level #277

Merged
merged 5 commits into from
Feb 21, 2023
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions src/http/routes/bucket/createBucket.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,8 @@ const createBucketBodySchema = {
name: { type: 'string', examples: ['avatars'] },
id: { type: 'string', examples: ['avatars'] },
public: { type: 'boolean', examples: [false] },
max_file_size_kb: { type: 'integer', minimum: 1 },
allowed_mime_types: { type: 'array', items: { type: 'string' } },
},
required: ['name'],
} as const
Expand Down
10 changes: 8 additions & 2 deletions src/http/routes/bucket/updateBucket.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@ const updateBucketBodySchema = {
type: 'object',
properties: {
public: { type: 'boolean', examples: [false] },
maxFileSizeKb: { type: 'integer', minimum: 1, examples: [1000] },
allowedMimeTypes: { type: 'array', items: { type: 'string' } },
},
} as const
const updateBucketParamsSchema = {
Expand Down Expand Up @@ -44,9 +46,13 @@ export default async function routes(fastify: FastifyInstance) {
async (request, response) => {
const { bucketId } = request.params

const { public: isPublic } = request.body
const { public: isPublic, maxFileSizeKb, allowedMimeTypes } = request.body
fenos marked this conversation as resolved.
Show resolved Hide resolved

await request.storage.updateBucket(bucketId, isPublic)
await request.storage.updateBucket(bucketId, {
public: isPublic,
max_file_size_kb: maxFileSizeKb,
allowed_mime_types: allowedMimeTypes,
})

return response.status(200).send(createResponse('Successfully updated'))
}
Expand Down
2 changes: 2 additions & 0 deletions src/monitoring/logger.ts
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,8 @@ export const logger = pino({
res(reply) {
return {
statusCode: reply.statusCode,
contentLength: reply.headers['content-length'],
contentType: reply.headers['content-type'],
}
},
req(request) {
Expand Down
18 changes: 15 additions & 3 deletions src/storage/database.ts
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,12 @@ export class Database {
return new Database(this.options.superAdmin, this.options)
}

async createBucket(data: Pick<Bucket, 'id' | 'name' | 'public' | 'owner'>) {
async createBucket(
data: Pick<
Bucket,
'id' | 'name' | 'public' | 'owner' | 'max_file_size_kb' | 'allowed_mime_types'
>
) {
const {
data: results,
error,
Expand All @@ -67,6 +72,8 @@ export class Database {
name: data.name,
owner: data.owner,
public: data.public,
allowed_mime_types: data.allowed_mime_types,
max_file_size_kb: data.max_file_size_kb,
},
],
{
Expand Down Expand Up @@ -157,11 +164,16 @@ export class Database {
return data as Bucket[]
}

async updateBucket(bucketId: string, isPublic?: boolean) {
async updateBucket(
bucketId: string,
fields: Pick<Bucket, 'public' | 'max_file_size_kb' | 'allowed_mime_types'>
) {
const { error, status, data } = await this.postgrest
.from<Bucket>('buckets')
.update({
public: isPublic,
public: fields.public,
max_file_size_kb: fields.max_file_size_kb,
allowed_mime_types: fields.allowed_mime_types,
})
.match({ id: bucketId })
.single()
Expand Down
12 changes: 12 additions & 0 deletions src/storage/object.ts
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,10 @@ export class ObjectStorage {
* @param options
*/
async uploadNewObject(request: FastifyRequest, options: UploadObjectOptions) {
const bucket = await this.db
.asSuperUser()
.findBucketById(this.bucketId, 'id, max_file_size_kb, allowed_mime_types')

await this.createObject(
{
name: options.objectName,
Expand All @@ -55,6 +59,8 @@ export class ObjectStorage {
const uploader = new Uploader(this.backend)
const objectMetadata = await uploader.upload(request, {
key: s3Key,
maxFileSizeKb: bucket.max_file_size_kb,
allowedMimeTypes: bucket.allowed_mime_types,
})

await this.db
Expand Down Expand Up @@ -86,6 +92,10 @@ export class ObjectStorage {
request: FastifyRequest,
options: Omit<UploadObjectOptions, 'isUpsert'>
) {
const bucket = await this.db
.asSuperUser()
.findBucketById(this.bucketId, 'id, max_file_size_kb, allowed_mime_types')

await this.updateObjectOwner(options.objectName, options.owner)

const path = `${this.bucketId}/${options.objectName}`
Expand All @@ -95,6 +105,8 @@ export class ObjectStorage {
const uploader = new Uploader(this.backend)
const objectMetadata = await uploader.upload(request, {
key: s3Key,
maxFileSizeKb: bucket.max_file_size_kb,
allowedMimeTypes: bucket.allowed_mime_types,
})

await this.db
Expand Down
2 changes: 2 additions & 0 deletions src/storage/schemas/bucket.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,8 @@ export const bucketSchema = {
name: { type: 'string' },
owner: { type: 'string' },
public: { type: 'boolean' },
max_file_size_kb: { type: 'integer' },
allowed_mime_types: { type: 'array', items: { type: 'string' } },
created_at: { type: 'string' },
updated_at: { type: 'string' },
},
Expand Down
33 changes: 28 additions & 5 deletions src/storage/storage.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ import { StorageBackendAdapter } from './backend'
import { Database, FindBucketFilters } from './database'
import { StorageBackendError } from './errors'
import { ImageRenderer, AssetRenderer, HeadRenderer } from './renderer'
import { mustBeValidBucketName } from './limits'
import { getFileSizeLimit, mustBeValidBucketName } from './limits'
import { Uploader } from './uploader'
import { getConfig } from '../config'
import { ObjectStorage } from './object'
Expand Down Expand Up @@ -81,19 +81,29 @@ export class Storage {
* Creates a bucket
* @param data
*/
createBucket(data: Parameters<Database['createBucket']>[0]) {
async createBucket(data: Parameters<Database['createBucket']>[0]) {
mustBeValidBucketName(data.name, 'Bucket name invalid')

if (data.max_file_size_kb) {
await this.validateMaxSizeLimit(data.max_file_size_kb)
}

return this.db.createBucket(data)
}

/**
* Updates a bucket
* @param id
* @param isPublic
* @param data
*/
updateBucket(id: string, isPublic: boolean | undefined) {
async updateBucket(id: string, data: Parameters<Database['updateBucket']>[1]) {
mustBeValidBucketName(id, 'Bucket name invalid')
return this.db.updateBucket(id, isPublic)

if (data.max_file_size_kb) {
await this.validateMaxSizeLimit(data.max_file_size_kb)
}

return this.db.updateBucket(id, data)
}

/**
Expand Down Expand Up @@ -170,4 +180,17 @@ export class Storage {
}
}
}

protected async validateMaxSizeLimit(maxFileLimit: number) {
const globalMaxLimit = await getFileSizeLimit(this.db.tenantId)
const globalMaxLimitKb = globalMaxLimit * 1000

if (maxFileLimit > globalMaxLimitKb) {
throw new StorageBackendError(
'max_file_size',
422,
'the requested max_file_size exceed the global limit'
)
}
}
}
23 changes: 20 additions & 3 deletions src/storage/uploader.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@ import { StorageBackendError } from './errors'

interface UploaderOptions {
key: string
maxFileSizeKb?: number
allowedMimeTypes?: string[]
}

const { globalS3Bucket } = getConfig()
Expand All @@ -24,7 +26,13 @@ export class Uploader {
* @param options
*/
async upload(request: FastifyRequest, options: UploaderOptions) {
const file = await this.incomingFileInfo(request)
const file = await this.incomingFileInfo(request, options)

if (options.allowedMimeTypes && options.allowedMimeTypes.length > 0) {
if (!options.allowedMimeTypes.includes(file.mimeType)) {
throw new StorageBackendError('invalid_mime_type', 422, 'mime type not supported')
}
}

const objectMetadata = await this.backend.uploadObject(
globalS3Bucket,
Expand All @@ -45,9 +53,18 @@ export class Uploader {
return objectMetadata
}

protected async incomingFileInfo(request: FastifyRequest) {
protected async incomingFileInfo(
request: FastifyRequest,
options?: Pick<UploaderOptions, 'maxFileSizeKb'>
) {
const contentType = request.headers['content-type']
const fileSizeLimit = await getFileSizeLimit(request.tenantId)
let fileSizeLimit = await getFileSizeLimit(request.tenantId)

if (options?.maxFileSizeKb) {
if (options.maxFileSizeKb * 1000 <= fileSizeLimit) {
fileSizeLimit = options.maxFileSizeKb * 1000
}
}

let body: NodeJS.ReadableStream
let mimeType: string
Expand Down
2 changes: 1 addition & 1 deletion src/test/bucket.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ describe('testing GET all buckets', () => {
})
expect(response.statusCode).toBe(200)
const responseJSON = JSON.parse(response.body)
expect(responseJSON.length).toBe(7)
expect(responseJSON.length).toBe(10)
})

test('checking RLS: anon user is not able to get all buckets', async () => {
Expand Down
19 changes: 11 additions & 8 deletions src/test/db/04-dummy-data.sql.sample
Original file line number Diff line number Diff line change
Expand Up @@ -5,14 +5,17 @@ INSERT INTO "auth"."users" ("instance_id", "id", "aud", "role", "email", "encryp
('00000000-0000-0000-0000-000000000000', 'd8c7bce9-cfeb-497b-bd61-e66ce2cbdaa2', 'authenticated', 'authenticated', 'inian+admin@supabase.io', '', NULL, '2021-02-17 04:40:42.901743+00', '3EG99GjT_e3NC4eGEBXOjw', '2021-02-17 04:40:42.901743+00', '', NULL, '', '', NULL, NULL, '{"provider": "email"}', 'null', 'f', '2021-02-17 04:40:42.890632+00', '2021-02-17 04:40:42.890637+00');

-- insert buckets
INSERT INTO "storage"."buckets" ("id", "name", "owner", "created_at", "updated_at", "public") VALUES
('bucket2', 'bucket2', '4d56e902-f0a0-4662-8448-a4d9e643c142', '2021-02-17 04:43:32.770206+00', '2021-02-17 04:43:32.770206+00', false),
('bucket3', 'bucket3', '4d56e902-f0a0-4662-8448-a4d9e643c142', '2021-02-17 04:43:32.770206+00', '2021-02-17 04:43:32.770206+00', false),
('bucket4', 'bucket4', '317eadce-631a-4429-a0bb-f19a7a517b4a', '2021-02-25 09:23:01.58385+00', '2021-02-25 09:23:01.58385+00', false),
('bucket5', 'bucket5', '317eadce-631a-4429-a0bb-f19a7a517b4a', '2021-02-27 03:04:25.6386+00', '2021-02-27 03:04:25.6386+00', false),
('bucket6', 'bucket6', '317eadce-631a-4429-a0bb-f19a7a517b4a', '2021-02-27 03:04:25.6386+00', '2021-02-27 03:04:25.6386+00', false),
('public-bucket', 'public-bucket', '317eadce-631a-4429-a0bb-f19a7a517b4a', '2021-02-27 03:04:25.6386+00', '2021-02-27 03:04:25.6386+00', true),
('public-bucket-2', 'public-bucket-2', '317eadce-631a-4429-a0bb-f19a7a517b4a', '2021-02-27 03:04:25.6386+00', '2021-02-27 03:04:25.6386+00', true);
INSERT INTO "storage"."buckets" ("id", "name", "owner", "created_at", "updated_at", "public", "max_file_size_kb", "allowed_mime_types") VALUES
('bucket2', 'bucket2', '4d56e902-f0a0-4662-8448-a4d9e643c142', '2021-02-17 04:43:32.770206+00', '2021-02-17 04:43:32.770206+00', false, NULL, NULL),
('bucket3', 'bucket3', '4d56e902-f0a0-4662-8448-a4d9e643c142', '2021-02-17 04:43:32.770206+00', '2021-02-17 04:43:32.770206+00', false, NULL, NULL),
('bucket4', 'bucket4', '317eadce-631a-4429-a0bb-f19a7a517b4a', '2021-02-25 09:23:01.58385+00', '2021-02-25 09:23:01.58385+00', false, NULL, NULL),
('bucket5', 'bucket5', '317eadce-631a-4429-a0bb-f19a7a517b4a', '2021-02-27 03:04:25.6386+00', '2021-02-27 03:04:25.6386+00', false, NULL, NULL),
('bucket6', 'bucket6', '317eadce-631a-4429-a0bb-f19a7a517b4a', '2021-02-27 03:04:25.6386+00', '2021-02-27 03:04:25.6386+00', false, NULL, NULL),
('public-bucket', 'public-bucket', '317eadce-631a-4429-a0bb-f19a7a517b4a', '2021-02-27 03:04:25.6386+00', '2021-02-27 03:04:25.6386+00', true, NULL, NULL),
('public-bucket-2', 'public-bucket-2', '317eadce-631a-4429-a0bb-f19a7a517b4a', '2021-02-27 03:04:25.6386+00', '2021-02-27 03:04:25.6386+00', true, NULL, NULL),
('public-limit-max-size', 'public-limit-max-size', '317eadce-631a-4429-a0bb-f19a7a517b4a', '2021-02-27 03:04:25.6386+00', '2021-02-27 03:04:25.6386+00', true, 10, NULL),
('public-limit-max-size-2', 'public-limit-max-size-2', '317eadce-631a-4429-a0bb-f19a7a517b4a', '2021-02-27 03:04:25.6386+00', '2021-02-27 03:04:25.6386+00', true, 3746, NULL),
('public-limit-mime-types', 'public-limit-mime-types', '317eadce-631a-4429-a0bb-f19a7a517b4a', '2021-02-27 03:04:25.6386+00', '2021-02-27 03:04:25.6386+00', true, NULL, '{"image/jpeg"}');


-- insert objects
Expand Down
85 changes: 84 additions & 1 deletion src/test/object.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -275,6 +275,90 @@ describe('testing POST object via multipart upload', () => {
expect(S3Backend.prototype.uploadObject).not.toHaveBeenCalled()
})

test('return 200 when uploading an object within bucket max size limit', async () => {
const form = new FormData()
form.append('file', fs.createReadStream(`./src/test/assets/sadcat.jpg`))
const headers = Object.assign({}, form.getHeaders(), {
authorization: `Bearer ${serviceKey}`,
'x-upsert': 'true',
})

const response = await app().inject({
method: 'POST',
url: '/object/public-limit-max-size-2/sadcat-upload25.png',
headers,
payload: form,
})
expect(response.statusCode).toBe(200)
expect(S3Backend.prototype.uploadObject).toHaveBeenCalled()
})

test('return 400 when uploading an object that exceed bucket level max size', async () => {
const form = new FormData()
form.append('file', fs.createReadStream(`./src/test/assets/sadcat.jpg`))
const headers = Object.assign({}, form.getHeaders(), {
authorization: `Bearer ${serviceKey}`,
'x-upsert': 'true',
})

const response = await app().inject({
method: 'POST',
url: '/object/public-limit-max-size/sadcat-upload23.png',
headers,
payload: form,
})
expect(response.statusCode).toBe(400)
expect(await response.json()).toEqual({
error: 'Payload too large',
message: 'The object exceeded the maximum allowed size',
statusCode: '413',
})
expect(S3Backend.prototype.uploadObject).toHaveBeenCalled()
})

test('successfully uploading an object with a the allowed mime-type', async () => {
const form = new FormData()
form.append('file', fs.createReadStream(`./src/test/assets/sadcat.jpg`))
const headers = Object.assign({}, form.getHeaders(), {
authorization: `Bearer ${serviceKey}`,
'x-upsert': 'true',
'content-type': 'image/jpeg',
})

const response = await app().inject({
method: 'POST',
url: '/object/public-limit-mime-types/sadcat-upload23.png',
headers,
payload: form,
})
expect(response.statusCode).toBe(200)
expect(S3Backend.prototype.uploadObject).toHaveBeenCalled()
})

test('return 422 when uploading an object with a not allowed mime-type', async () => {
const form = new FormData()
form.append('file', fs.createReadStream(`./src/test/assets/sadcat.jpg`))
const headers = Object.assign({}, form.getHeaders(), {
authorization: `Bearer ${serviceKey}`,
'x-upsert': 'true',
'content-type': 'image/png',
})

const response = await app().inject({
method: 'POST',
url: '/object/public-limit-mime-types/sadcat-upload23.png',
headers,
payload: form,
})
expect(response.statusCode).toBe(400)
expect(await response.json()).toEqual({
error: 'invalid_mime_type',
message: 'mime type not supported',
statusCode: '422',
})
expect(S3Backend.prototype.uploadObject).not.toHaveBeenCalled()
})

test('return 200 when upserting duplicate object', async () => {
const form = new FormData()
form.append('file', fs.createReadStream(`./src/test/assets/sadcat.jpg`))
Expand All @@ -290,7 +374,6 @@ describe('testing POST object via multipart upload', () => {
payload: form,
})
expect(response.statusCode).toBe(200)
expect(S3Backend.prototype.uploadObject).toHaveBeenCalled()
})

test('return 400 when exceeding file size limit', async () => {
Expand Down