Skip to content

Commit

Permalink
feat: custom-metadata, exists, info methods
Browse files Browse the repository at this point in the history
  • Loading branch information
fenos committed Jul 16, 2024
1 parent 03ad444 commit 9b2da42
Show file tree
Hide file tree
Showing 6 changed files with 234 additions and 10 deletions.
2 changes: 1 addition & 1 deletion infra/storage/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
FROM supabase/storage-api:v1.2.1
FROM supabase/storage-api:v1.7.1

RUN apk add curl --no-cache
35 changes: 30 additions & 5 deletions src/lib/fetch.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,15 +11,19 @@ export interface FetchOptions {
noResolveJson?: boolean
}

export type RequestMethodType = 'GET' | 'POST' | 'PUT' | 'DELETE'
export type RequestMethodType = 'GET' | 'POST' | 'PUT' | 'DELETE' | 'HEAD'

const _getErrorMessage = (err: any): string =>
err.msg || err.message || err.error_description || err.error || JSON.stringify(err)

const handleError = async (error: unknown, reject: (reason?: any) => void) => {
const handleError = async (
error: unknown,
reject: (reason?: any) => void,
options?: FetchOptions
) => {
const Res = await resolveResponse()

if (error instanceof Res) {
if (error instanceof Res && !options?.noResolveJson) {
error
.json()
.then((err) => {
Expand All @@ -46,7 +50,10 @@ const _getRequestParams = (
}

params.headers = { 'Content-Type': 'application/json', ...options?.headers }
params.body = JSON.stringify(body)

if (body) {
params.body = JSON.stringify(body)
}
return { ...params, ...parameters }
}

Expand All @@ -66,7 +73,7 @@ async function _handleRequest(
return result.json()
})
.then((data) => resolve(data))
.catch((error) => handleError(error, reject))
.catch((error) => handleError(error, reject, options))
})
}

Expand Down Expand Up @@ -99,6 +106,24 @@ export async function put(
return _handleRequest(fetcher, 'PUT', url, options, parameters, body)
}

export async function head(
fetcher: Fetch,
url: string,
options?: FetchOptions,
parameters?: FetchParameters
): Promise<any> {
return _handleRequest(
fetcher,
'HEAD',
url,
{
...options,
noResolveJson: true,
},
parameters
)
}

export async function remove(
fetcher: Fetch,
url: string,
Expand Down
14 changes: 14 additions & 0 deletions src/lib/helpers.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,3 +21,17 @@ export const resolveResponse = async (): Promise<typeof Response> => {

return Response
}

export const recursiveToCamel = (item: unknown): unknown => {
if (Array.isArray(item)) {
return item.map((el: unknown) => recursiveToCamel(el))
} else if (typeof item === 'function' || item !== Object(item)) {
return item
}
return Object.fromEntries(

Check failure on line 31 in src/lib/helpers.ts

View workflow job for this annotation

GitHub Actions / Test / OS ubuntu-latest / Node 20

Property 'fromEntries' does not exist on type 'ObjectConstructor'. Do you need to change your target library? Try changing the 'lib' compiler option to 'es2019' or later.
Object.entries(item as Record<string, unknown>).map(([key, value]: [string, unknown]) => [
key.replace(/([-_][a-z])/gi, (c) => c.toUpperCase().replace(/[-_]/g, '')),
recursiveToCamel(value),
])
)
}
34 changes: 34 additions & 0 deletions src/lib/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,22 @@ export interface FileObject {
buckets: Bucket
}

export interface FileObjectV2 {
id: string
version: string
name: string
bucket_id: string
updated_at: string
created_at: string
last_accessed_at: string
size?: number
cache_control?: string
content_type?: string
etag?: string
last_modified?: string
metadata?: Record<string, any>
}

export interface SortBy {
column?: string
order?: string
Expand All @@ -43,6 +59,16 @@ export interface FileOptions {
* The duplex option is a string parameter that enables or disables duplex streaming, allowing for both reading and writing data in the same stream. It can be passed as an option to the fetch() method.
*/
duplex?: string

/**
* The metadata option is an object that allows you to store additional information about the file. This information can be used to filter and search for files. The metadata object can contain any key-value pairs you want to store.
*/
metadata?: Record<string, any>

/**
* Optionally add extra headers
*/
headers?: Record<string, string>
}

export interface DestinationOptions {
Expand Down Expand Up @@ -113,3 +139,11 @@ export interface TransformOptions {
*/
format?: 'origin'
}

type CamelCase<S extends string> = S extends `${infer P1}_${infer P2}${infer P3}`
? `${Lowercase<P1>}${Uppercase<P2>}${CamelCase<P3>}`
: S

export type Camelize<T> = {
[K in keyof T as CamelCase<Extract<K, string>>]: T[K]
}
108 changes: 104 additions & 4 deletions src/packages/StorageFileApi.ts
Original file line number Diff line number Diff line change
@@ -1,13 +1,15 @@
import { isStorageError, StorageError } from '../lib/errors'
import { Fetch, get, post, remove } from '../lib/fetch'
import { resolveFetch } from '../lib/helpers'
import { isStorageError, StorageError, StorageUnknownError } from '../lib/errors'
import { Fetch, get, head, post, remove } from '../lib/fetch'
import { recursiveToCamel, resolveFetch } from '../lib/helpers'
import {
FileObject,
FileOptions,
SearchOptions,
FetchParameters,
TransformOptions,
DestinationOptions,
FileObjectV2,
Camelize,
} from '../lib/types'

const DEFAULT_SEARCH_OPTIONS = {
Expand Down Expand Up @@ -80,22 +82,39 @@ export default class StorageFileApi {
try {
let body
const options = { ...DEFAULT_FILE_OPTIONS, ...fileOptions }
const headers: Record<string, string> = {
let headers: Record<string, string> = {
...this.headers,
...(method === 'POST' && { 'x-upsert': String(options.upsert as boolean) }),
}

const userMetadata = options.metadata

if (typeof Blob !== 'undefined' && fileBody instanceof Blob) {
body = new FormData()
body.append('cacheControl', options.cacheControl as string)
body.append('', fileBody)

if (userMetadata) {
body.append('userMetadata', this.encodeMetadata(userMetadata))
}
} else if (typeof FormData !== 'undefined' && fileBody instanceof FormData) {
body = fileBody
body.append('cacheControl', options.cacheControl as string)
if (userMetadata) {
body.append('userMetadata', this.encodeMetadata(userMetadata))
}
} else {
body = fileBody
headers['cache-control'] = `max-age=${options.cacheControl}`
headers['content-type'] = options.contentType as string

if (userMetadata) {
headers['x-metadata'] = this.toBase64(this.encodeMetadata(userMetadata))
}
}

if (fileOptions?.headers) {
headers = { ...headers, ...fileOptions.headers }
}

const cleanPath = this._removeEmptyFolders(path)
Expand Down Expand Up @@ -525,6 +544,76 @@ export default class StorageFileApi {
}
}

/**
* Retrieves the details of an existing file.
* @param path
*/
async info(
path: string
): Promise<
| {
data: Camelize<FileObjectV2>
error: null
}
| {
data: null
error: StorageError
}
> {
const _path = this._getFinalPath(path)

try {
const data = await get(this.fetch, `${this.url}/object/info/${_path}`, {
headers: this.headers,
})

return { data: recursiveToCamel(data) as Camelize<FileObjectV2>, error: null }
} catch (error) {
if (isStorageError(error)) {
return { data: null, error }
}

throw error
}
}

/**
* Retrieves the details of an existing file.
* @param path
*/
async exists(
path: string
): Promise<
| {
data: boolean
error: null
}
| {
data: boolean
error: StorageError
}
> {
const _path = this._getFinalPath(path)

try {
await head(this.fetch, `${this.url}/object/${_path}`, {
headers: this.headers,
})

return { data: true, error: null }
} catch (error) {
if (isStorageError(error) && error instanceof StorageUnknownError) {
const originalError = (error.originalError as unknown) as { status: number }

if ([400, 404].includes(originalError?.status)) {
return { data: false, error }
}
}

throw error
}
}

/**
* A simple convenience function to get the URL for an asset in a public bucket. If you do not want to use this function, you can construct the public URL by concatenating the bucket URL with the path to the asset.
* This function does not verify if the bucket is public. If a public URL is created for a bucket which is not public, you will not be able to download the asset.
Expand Down Expand Up @@ -700,6 +789,17 @@ export default class StorageFileApi {
}
}

protected encodeMetadata(metadata: Record<string, any>) {
return JSON.stringify(metadata)
}

toBase64(data: string) {
if (typeof Buffer !== 'undefined') {
return Buffer.from(data).toString('base64')
}
return btoa(data)
}

private _getFinalPath(path: string) {
return `${this.bucketId}/${path}`
}
Expand Down
51 changes: 51 additions & 0 deletions test/storageFileApi.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -163,6 +163,25 @@ describe('Object API', () => {
expect(updateRes.data?.path).toEqual(uploadPath)
})

test('can upload with custom metadata', async () => {
const res = await storage.from(bucketName).upload(uploadPath, file, {
metadata: {
custom: 'metadata',
second: 'second',
third: 'third',
},
})
expect(res.error).toBeNull()

const updateRes = await storage.from(bucketName).info(uploadPath)
expect(updateRes.error).toBeNull()
expect(updateRes.data?.metadata).toEqual({
custom: 'metadata',
second: 'second',
third: 'third',
})
})

test('can upload a file within the file size limit', async () => {
const bucketName = 'with-limit' + Date.now()
await storage.createBucket(bucketName, {
Expand Down Expand Up @@ -368,6 +387,38 @@ describe('Object API', () => {
}),
])
})

test('get object info', async () => {
await storage.from(bucketName).upload(uploadPath, file)
const res = await storage.from(bucketName).info(uploadPath)

expect(res.error).toBeNull()
expect(res.data).toEqual(
expect.objectContaining({
id: expect.any(String),
name: expect.any(String),
created_at: expect.any(String),
cacheControl: expect.any(String),
size: expect.any(Number),
etag: expect.any(String),
lastModified: expect.any(String),
contentType: expect.any(String),
metadata: {},
version: expect.any(String),
})
)
})

test('check if object exists', async () => {
await storage.from(bucketName).upload(uploadPath, file)
const res = await storage.from(bucketName).exists(uploadPath)

expect(res.error).toBeNull()
expect(res.data).toEqual(true)

const resNotExists = await storage.from(bucketName).exists('do-not-exists')
expect(resNotExists.data).toEqual(false)
})
})

describe('Transformations', () => {
Expand Down

0 comments on commit 9b2da42

Please sign in to comment.