From 5b74ace55750871e36aaf97b6c47172e4219b67f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Eduardo=20Bou=C3=A7as?= Date: Tue, 24 Oct 2023 16:52:33 +0100 Subject: [PATCH 1/7] feat: add `list()` method --- README.md | 33 +++- src/backend/list.ts | 11 ++ src/client.ts | 79 ++++++--- src/list.test.ts | 388 ++++++++++++++++++++++++++++++++++++++++++++ src/main.test.ts | 22 +-- src/store.ts | 139 ++++++++++++++-- src/util.ts | 8 + 7 files changed, 627 insertions(+), 53 deletions(-) create mode 100644 src/backend/list.ts create mode 100644 src/list.test.ts diff --git a/README.md b/README.md index fbd837f..a79187c 100644 --- a/README.md +++ b/README.md @@ -186,7 +186,7 @@ second parameter, with one of the following values: If an object with the given key is not found, `null` is returned. ```javascript -const entry = await blobs.get('some-key', { type: 'json' }) +const entry = await store.get('some-key', { type: 'json' }) console.log(entry) ``` @@ -209,7 +209,7 @@ second parameter, with one of the following values: If an object with the given key is not found, `null` is returned. ```javascript -const blob = await blobs.getWithMetadata('some-key', { type: 'json' }) +const blob = await store.getWithMetadata('some-key', { type: 'json' }) console.log(blob.data, blob.etag, blob.metadata) ``` @@ -223,7 +223,7 @@ const cachedETag = getFromMockCache('my-key') // Get entry from the blob store only if its ETag is different from the one you // have locally, which means the entry has changed since you last obtained it -const { data, etag, fresh } = await blobs.getWithMetadata('some-key', { etag: cachedETag }) +const { data, etag, fresh } = await store.getWithMetadata('some-key', { etag: cachedETag }) if (fresh) { // `data` is `null` because the local blob is fresh @@ -240,7 +240,7 @@ Creates an object with the given key and value. If an entry with the given key already exists, its value is overwritten. ```javascript -await blobs.set('some-key', 'This is a string value') +await store.set('some-key', 'This is a string value') ``` ### `setJSON(key: string, value: any, { metadata?: object }): Promise` @@ -250,7 +250,7 @@ Convenience method for creating a JSON-serialized object with the given key. If an entry with the given key already exists, its value is overwritten. ```javascript -await blobs.setJSON('some-key', { +await store.setJSON('some-key', { foo: 'bar', }) ``` @@ -260,7 +260,28 @@ await blobs.setJSON('some-key', { Deletes an object with the given key, if one exists. ```javascript -await blobs.delete('my-key') +await store.delete('my-key') +``` + +### `list(options?: { cursor?: string, paginate?: boolean. prefix?: string }): Promise<{ blobs: BlobResult[] }>` + +Returns a list of blobs in a given store. + +```javascript +const { blobs } = await store.list() + +// [ { etag: 'etag1', key: 'some-key' }, { etag: 'etag2', key: 'another-key' } ] +console.log(blobs) +``` + +To filter down the entries that should be returned, an optional `prefix` parameter can be supplied. When used, only the +entries whose key starts with that prefix are returned. + +```javascript +const { blobs } = await store.list({ prefix: 'some' }) + +// [ { etag: 'etag1', key: 'some-key' } ] +console.log(blobs) ``` ## Contributing diff --git a/src/backend/list.ts b/src/backend/list.ts new file mode 100644 index 0000000..06d4713 --- /dev/null +++ b/src/backend/list.ts @@ -0,0 +1,11 @@ +export interface ListResponse { + blobs?: ListResponseBlob[] + next_cursor?: string +} + +export interface ListResponseBlob { + etag: string + last_modified: string + size: number + key: string +} diff --git a/src/client.ts b/src/client.ts index 9631d48..5c8a4a8 100644 --- a/src/client.ts +++ b/src/client.ts @@ -6,9 +6,10 @@ import { BlobInput, Fetcher, HTTPMethod } from './types.ts' interface MakeStoreRequestOptions { body?: BlobInput | null headers?: Record - key: string + key?: string metadata?: Metadata method: HTTPMethod + parameters?: Record storeName: string } @@ -20,6 +21,14 @@ export interface ClientOptions { token: string } +interface GetFinalRequestOptions { + key: string | undefined + metadata?: Metadata + method: string + parameters?: Record + storeName: string +} + export class Client { private apiURL?: string private edgeURL?: string @@ -41,7 +50,7 @@ export class Client { } } - private async getFinalRequest(storeName: string, key: string, method: string, metadata?: Metadata) { + private async getFinalRequest({ key, metadata, method, parameters = {}, storeName }: GetFinalRequestOptions) { const encodedMetadata = encodeMetadata(metadata) if (this.edgeURL) { @@ -53,38 +62,72 @@ export class Client { headers[METADATA_HEADER_EXTERNAL] = encodedMetadata } + const path = key ? `/${this.siteID}/${storeName}/${key}` : `/${this.siteID}/${storeName}` + const url = new URL(path, this.edgeURL) + + for (const key in parameters) { + url.searchParams.set(key, parameters[key]) + } + return { headers, - url: `${this.edgeURL}/${this.siteID}/${storeName}/${key}`, + url: url.toString(), } } - const apiURL = `${this.apiURL ?? 'https://api.netlify.com'}/api/v1/sites/${ - this.siteID - }/blobs/${key}?context=${storeName}` const apiHeaders: Record = { authorization: `Bearer ${this.token}` } + const url = new URL(`/api/v1/sites/${this.siteID}/blobs`, this.apiURL ?? 'https://api.netlify.com') + + for (const key in parameters) { + url.searchParams.set(key, parameters[key]) + } + + url.searchParams.set('context', storeName) + + if (key === undefined) { + return { + headers: apiHeaders, + url: url.toString(), + } + } + + url.pathname += `/${key}` if (encodedMetadata) { apiHeaders[METADATA_HEADER_EXTERNAL] = encodedMetadata } - const res = await this.fetch(apiURL, { headers: apiHeaders, method }) + const res = await this.fetch(url.toString(), { headers: apiHeaders, method }) if (res.status !== 200) { - throw new Error(`${method} operation has failed: API returned a ${res.status} response`) + throw new Error(`Netlify Blobs has generated an internal error: ${res.status} response`) } - const { url } = await res.json() + const { url: signedURL } = await res.json() const userHeaders = encodedMetadata ? { [METADATA_HEADER_INTERNAL]: encodedMetadata } : undefined return { headers: userHeaders, - url, + url: signedURL, } } - async makeRequest({ body, headers: extraHeaders, key, metadata, method, storeName }: MakeStoreRequestOptions) { - const { headers: baseHeaders = {}, url } = await this.getFinalRequest(storeName, key, method, metadata) + async makeRequest({ + body, + headers: extraHeaders, + key, + metadata, + method, + parameters, + storeName, + }: MakeStoreRequestOptions) { + const { headers: baseHeaders = {}, url } = await this.getFinalRequest({ + key, + metadata, + method, + parameters, + storeName, + }) const headers: Record = { ...baseHeaders, ...extraHeaders, @@ -106,17 +149,7 @@ export class Client { options.duplex = 'half' } - const res = await fetchAndRetry(this.fetch, url, options) - - if (res.status === 404 && method === HTTPMethod.GET) { - return null - } - - if (res.status !== 200 && res.status !== 304) { - throw new Error(`${method} operation has failed: store returned a ${res.status} response`) - } - - return res + return fetchAndRetry(this.fetch, url, options) } } diff --git a/src/list.test.ts b/src/list.test.ts new file mode 100644 index 0000000..0c0725a --- /dev/null +++ b/src/list.test.ts @@ -0,0 +1,388 @@ +import { env, version as nodeVersion } from 'node:process' + +import semver from 'semver' +import { describe, test, expect, beforeAll, afterEach } from 'vitest' + +import { MockFetch } from '../test/mock_fetch.js' + +import { getStore } from './main.js' + +beforeAll(async () => { + if (semver.lt(nodeVersion, '18.0.0')) { + const nodeFetch = await import('node-fetch') + + // @ts-expect-error Expected type mismatch between native implementation and node-fetch + globalThis.fetch = nodeFetch.default + // @ts-expect-error Expected type mismatch between native implementation and node-fetch + globalThis.Request = nodeFetch.Request + // @ts-expect-error Expected type mismatch between native implementation and node-fetch + globalThis.Response = nodeFetch.Response + // @ts-expect-error Expected type mismatch between native implementation and node-fetch + globalThis.Headers = nodeFetch.Headers + } +}) + +afterEach(() => { + delete env.NETLIFY_BLOBS_CONTEXT +}) + +const siteID = '9a003659-aaaa-0000-aaaa-63d3720d8621' +const storeName = 'mystore' +const apiToken = 'some token' +const edgeToken = 'some other token' +const edgeURL = 'https://cloudfront.url' + +describe('list', () => { + describe('With API credentials', () => { + test('Lists entries and handles pagination', async () => { + const mockStore = new MockFetch() + .get({ + headers: { authorization: `Bearer ${apiToken}` }, + response: new Response( + JSON.stringify({ + blobs: [ + { + etag: 'etag1', + key: 'key1', + size: 1, + last_modified: '2023-07-18T12:59:06Z', + }, + { + etag: 'etag2', + key: 'key2', + size: 2, + last_modified: '2023-07-18T12:59:06Z', + }, + ], + next_cursor: 'cursor_1', + }), + ), + url: `https://api.netlify.com/api/v1/sites/${siteID}/blobs?context=${storeName}`, + }) + .get({ + headers: { authorization: `Bearer ${apiToken}` }, + response: new Response( + JSON.stringify({ + blobs: [ + { + etag: 'etag3', + key: 'key3', + size: 3, + last_modified: '2023-07-18T12:59:06Z', + }, + { + etag: 'etag4', + key: 'key4', + size: 4, + last_modified: '2023-07-18T12:59:06Z', + }, + ], + next_cursor: 'cursor_2', + }), + ), + url: `https://api.netlify.com/api/v1/sites/${siteID}/blobs?cursor=cursor_1&context=${storeName}`, + }) + .get({ + headers: { authorization: `Bearer ${apiToken}` }, + response: new Response( + JSON.stringify({ + blobs: [ + { + etag: 'etag5', + key: 'key5', + size: 5, + last_modified: '2023-07-18T12:59:06Z', + }, + ], + }), + ), + url: `https://api.netlify.com/api/v1/sites/${siteID}/blobs?cursor=cursor_2&context=${storeName}`, + }) + + globalThis.fetch = mockStore.fetch + + const store = getStore({ + name: 'mystore', + token: apiToken, + siteID, + }) + + const { blobs } = await store.list() + + expect(blobs).toEqual([ + { etag: 'etag1', key: 'key1' }, + { etag: 'etag2', key: 'key2' }, + { etag: 'etag3', key: 'key3' }, + { etag: 'etag4', key: 'key4' }, + { etag: 'etag5', key: 'key5' }, + ]) + expect(mockStore.fulfilled).toBeTruthy() + }) + + test('Accepts a `prefix` property', async () => { + const mockStore = new MockFetch().get({ + headers: { authorization: `Bearer ${apiToken}` }, + response: new Response( + JSON.stringify({ + blobs: [ + { + etag: 'etag1', + key: 'group/key1', + size: 1, + last_modified: '2023-07-18T12:59:06Z', + }, + { + etag: 'etag2', + key: 'group/key2', + size: 2, + last_modified: '2023-07-18T12:59:06Z', + }, + ], + }), + ), + url: `https://api.netlify.com/api/v1/sites/${siteID}/blobs?prefix=group%2F&context=${storeName}`, + }) + + globalThis.fetch = mockStore.fetch + + const store = getStore({ + name: 'mystore', + token: apiToken, + siteID, + }) + + const { blobs } = await store.list({ + prefix: 'group/', + }) + + expect(blobs).toEqual([ + { etag: 'etag1', key: 'group/key1' }, + { etag: 'etag2', key: 'group/key2' }, + ]) + expect(mockStore.fulfilled).toBeTruthy() + }) + + test('Paginates manually with `cursor` if `paginate: false`', async () => { + const mockStore = new MockFetch().get({ + headers: { authorization: `Bearer ${apiToken}` }, + response: new Response( + JSON.stringify({ + blobs: [ + { + etag: 'etag3', + key: 'key3', + size: 3, + last_modified: '2023-07-18T12:59:06Z', + }, + { + etag: 'etag4', + key: 'key4', + size: 4, + last_modified: '2023-07-18T12:59:06Z', + }, + ], + next_cursor: 'cursor_2', + }), + ), + url: `https://api.netlify.com/api/v1/sites/${siteID}/blobs?cursor=cursor_1&context=${storeName}`, + }) + + globalThis.fetch = mockStore.fetch + + const store = getStore({ + name: 'mystore', + token: apiToken, + siteID, + }) + + const { blobs } = await store.list({ + cursor: 'cursor_1', + paginate: false, + }) + + expect(blobs).toEqual([ + { etag: 'etag3', key: 'key3' }, + { etag: 'etag4', key: 'key4' }, + ]) + expect(mockStore.fulfilled).toBeTruthy() + }) + }) + + describe('With edge credentials', () => { + test('Lists entries and handles pagination', async () => { + const mockStore = new MockFetch() + .get({ + headers: { authorization: `Bearer ${edgeToken}` }, + response: new Response( + JSON.stringify({ + blobs: [ + { + etag: 'etag1', + key: 'key1', + size: 1, + last_modified: '2023-07-18T12:59:06Z', + }, + { + etag: 'etag2', + key: 'key2', + size: 2, + last_modified: '2023-07-18T12:59:06Z', + }, + ], + next_cursor: 'cursor_1', + }), + ), + url: `${edgeURL}/${siteID}/${storeName}`, + }) + .get({ + headers: { authorization: `Bearer ${edgeToken}` }, + response: new Response( + JSON.stringify({ + blobs: [ + { + etag: 'etag3', + key: 'key3', + size: 3, + last_modified: '2023-07-18T12:59:06Z', + }, + { + etag: 'etag4', + key: 'key4', + size: 4, + last_modified: '2023-07-18T12:59:06Z', + }, + ], + next_cursor: 'cursor_2', + }), + ), + url: `${edgeURL}/${siteID}/${storeName}?cursor=cursor_1`, + }) + .get({ + headers: { authorization: `Bearer ${edgeToken}` }, + response: new Response( + JSON.stringify({ + blobs: [ + { + etag: 'etag5', + key: 'key5', + size: 5, + last_modified: '2023-07-18T12:59:06Z', + }, + ], + }), + ), + url: `${edgeURL}/${siteID}/${storeName}?cursor=cursor_2`, + }) + + globalThis.fetch = mockStore.fetch + + const store = getStore({ + edgeURL, + name: storeName, + token: edgeToken, + siteID, + }) + + const { blobs } = await store.list() + + expect(blobs).toEqual([ + { etag: 'etag1', key: 'key1' }, + { etag: 'etag2', key: 'key2' }, + { etag: 'etag3', key: 'key3' }, + { etag: 'etag4', key: 'key4' }, + { etag: 'etag5', key: 'key5' }, + ]) + expect(mockStore.fulfilled).toBeTruthy() + }) + + test('Accepts a `prefix` property', async () => { + const mockStore = new MockFetch().get({ + headers: { authorization: `Bearer ${edgeToken}` }, + response: new Response( + JSON.stringify({ + blobs: [ + { + etag: 'etag1', + key: 'group/key1', + size: 1, + last_modified: '2023-07-18T12:59:06Z', + }, + { + etag: 'etag2', + key: 'group/key2', + size: 2, + last_modified: '2023-07-18T12:59:06Z', + }, + ], + }), + ), + url: `${edgeURL}/${siteID}/${storeName}?prefix=group%2F`, + }) + + globalThis.fetch = mockStore.fetch + + const store = getStore({ + edgeURL, + name: storeName, + token: edgeToken, + siteID, + }) + + const { blobs } = await store.list({ + prefix: 'group/', + }) + + expect(blobs).toEqual([ + { etag: 'etag1', key: 'group/key1' }, + { etag: 'etag2', key: 'group/key2' }, + ]) + expect(mockStore.fulfilled).toBeTruthy() + }) + + test('Paginates manually with `cursor` if `paginate: false`', async () => { + const mockStore = new MockFetch().get({ + headers: { authorization: `Bearer ${edgeToken}` }, + response: new Response( + JSON.stringify({ + blobs: [ + { + etag: 'etag3', + key: 'key3', + size: 3, + last_modified: '2023-07-18T12:59:06Z', + }, + { + etag: 'etag4', + key: 'key4', + size: 4, + last_modified: '2023-07-18T12:59:06Z', + }, + ], + next_cursor: 'cursor_2', + }), + ), + url: `${edgeURL}/${siteID}/${storeName}?cursor=cursor_1`, + }) + + globalThis.fetch = mockStore.fetch + + const store = getStore({ + edgeURL, + name: storeName, + token: edgeToken, + siteID, + }) + + const { blobs } = await store.list({ + cursor: 'cursor_1', + paginate: false, + }) + + expect(blobs).toEqual([ + { etag: 'etag3', key: 'key3' }, + { etag: 'etag4', key: 'key4' }, + ]) + expect(mockStore.fulfilled).toBeTruthy() + }) + }) +}) diff --git a/src/main.test.ts b/src/main.test.ts index 450f817..97af08c 100644 --- a/src/main.test.ts +++ b/src/main.test.ts @@ -131,7 +131,7 @@ describe('get', () => { }) expect(async () => await blobs.get(key)).rejects.toThrowError( - 'get operation has failed: API returned a 401 response', + `Netlify Blobs has generated an internal error: 401 response`, ) expect(mockStore.fulfilled).toBeTruthy() }) @@ -157,7 +157,7 @@ describe('get', () => { }) await expect(async () => await blobs.get(key)).rejects.toThrowError( - 'get operation has failed: store returned a 401 response', + `Netlify Blobs has generated an internal error: 401 response`, ) expect(mockStore.fulfilled).toBeTruthy() @@ -233,7 +233,7 @@ describe('get', () => { }) await expect(async () => await blobs.get(key)).rejects.toThrowError( - 'get operation has failed: store returned a 401 response', + `Netlify Blobs has generated an internal error: 401 response`, ) expect(mockStore.fulfilled).toBeTruthy() @@ -592,7 +592,7 @@ describe('set', () => { }) expect(async () => await blobs.set(key, 'value')).rejects.toThrowError( - 'put operation has failed: API returned a 401 response', + `Netlify Blobs has generated an internal error: 401 response`, ) expect(mockStore.fulfilled).toBeTruthy() }) @@ -718,7 +718,7 @@ describe('set', () => { }) await expect(async () => await blobs.set(key, value)).rejects.toThrowError( - 'put operation has failed: store returned a 401 response', + `Netlify Blobs has generated an internal error: 401 response`, ) expect(mockStore.fulfilled).toBeTruthy() @@ -929,7 +929,7 @@ describe('delete', () => { }) expect(async () => await blobs.delete(key)).rejects.toThrowError( - 'delete operation has failed: API returned a 401 response', + `Netlify Blobs has generated an internal error: 401 response`, ) expect(mockStore.fulfilled).toBeTruthy() }) @@ -974,7 +974,7 @@ describe('delete', () => { }) await expect(async () => await blobs.delete(key)).rejects.toThrowError( - 'delete operation has failed: store returned a 401 response', + `Netlify Blobs has generated an internal error: 401 response`, ) expect(mockStore.fulfilled).toBeTruthy() @@ -1023,7 +1023,7 @@ describe('Deploy scope', () => { .get({ headers: { authorization: `Bearer ${apiToken}` }, response: new Response(JSON.stringify({ url: signedURL })), - url: `https://api.netlify.com/api/v1/sites/${siteID}/blobs/${key}?context=deploy:${deployID}`, + url: `https://api.netlify.com/api/v1/sites/${siteID}/blobs/${key}?context=deploy%3A${deployID}`, }) .get({ response: new Response(value), @@ -1032,7 +1032,7 @@ describe('Deploy scope', () => { .get({ headers: { authorization: `Bearer ${apiToken}` }, response: new Response(JSON.stringify({ url: signedURL })), - url: `https://api.netlify.com/api/v1/sites/${siteID}/blobs/${key}?context=deploy:${deployID}`, + url: `https://api.netlify.com/api/v1/sites/${siteID}/blobs/${key}?context=deploy%3A${deployID}`, }) .get({ response: new Response(value), @@ -1093,7 +1093,7 @@ describe('Deploy scope', () => { .get({ headers: { authorization: `Bearer ${apiToken}` }, response: new Response(JSON.stringify({ url: signedURL })), - url: `https://api.netlify.com/api/v1/sites/${siteID}/blobs/${key}?context=deploy:${deployID}`, + url: `https://api.netlify.com/api/v1/sites/${siteID}/blobs/${key}?context=deploy%3A${deployID}`, }) .get({ response: new Response(value), @@ -1102,7 +1102,7 @@ describe('Deploy scope', () => { .get({ headers: { authorization: `Bearer ${apiToken}` }, response: new Response(JSON.stringify({ url: signedURL })), - url: `https://api.netlify.com/api/v1/sites/${siteID}/blobs/${key}?context=deploy:${deployID}`, + url: `https://api.netlify.com/api/v1/sites/${siteID}/blobs/${key}?context=deploy%3A${deployID}`, }) .get({ response: new Response(value), diff --git a/src/store.ts b/src/store.ts index b3286e6..a60dc27 100644 --- a/src/store.ts +++ b/src/store.ts @@ -1,6 +1,8 @@ +import { ListResponse, ListResponseBlob } from './backend/list.ts' import { Client } from './client.ts' import { decodeMetadata, Metadata } from './metadata.ts' import { BlobInput, HTTPMethod } from './types.ts' +import { BlobsInternalError } from './util.ts' interface BaseStoreOptions { client: Client @@ -26,6 +28,21 @@ interface GetWithMetadataResult { metadata: Metadata } +interface ListResult { + blobs: ListResultBlob[] +} + +interface ListResultBlob { + etag: string + key: string +} + +interface ListOptions { + cursor?: string + paginate?: boolean + prefix?: string +} + interface SetOptions { /** * Arbitrary metadata object to associate with an entry. Must be seralizable @@ -55,7 +72,11 @@ export class Store { } async delete(key: string) { - await this.client.makeRequest({ key, method: HTTPMethod.DELETE, storeName: this.name }) + const res = await this.client.makeRequest({ key, method: HTTPMethod.DELETE, storeName: this.name }) + + if (res.status !== 200 && res.status !== 404) { + throw new BlobsInternalError(res.status) + } } async get(key: string): Promise @@ -72,8 +93,12 @@ export class Store { const { type } = options ?? {} const res = await this.client.makeRequest({ key, method: HTTPMethod.GET, storeName: this.name }) - if (res === null) { - return res + if (res.status === 404) { + return null + } + + if (res.status !== 200) { + throw new BlobsInternalError(res.status) } if (type === undefined || type === 'text') { @@ -96,7 +121,7 @@ export class Store { return res.body } - throw new Error(`Invalid 'type' property: ${type}. Expected: arrayBuffer, blob, json, stream, or text.`) + throw new BlobsInternalError(res.status) } async getWithMetadata( @@ -145,6 +170,15 @@ export class Store { const { etag: requestETag, type } = options ?? {} const headers = requestETag ? { 'if-none-match': requestETag } : undefined const res = await this.client.makeRequest({ headers, key, method: HTTPMethod.GET, storeName: this.name }) + + if (res.status === 404) { + return null + } + + if (res.status !== 200 && res.status !== 304) { + throw new BlobsInternalError(res.status) + } + const responseETag = res?.headers.get('etag') ?? undefined let metadata: Metadata = {} @@ -157,10 +191,6 @@ export class Store { ) } - if (res === null) { - return null - } - const result: GetWithMetadataResult = { etag: responseETag, fresh: false, @@ -194,16 +224,36 @@ export class Store { throw new Error(`Invalid 'type' property: ${type}. Expected: arrayBuffer, blob, json, stream, or text.`) } + async list(options: ListOptions = {}): Promise { + const cursor = options.paginate === false ? options.cursor : undefined + const maxPages = options.paginate === false ? 1 : Number.POSITIVE_INFINITY + const res = await this.listAndPaginate({ + currentPage: 1, + maxPages, + nextCursor: cursor, + prefix: options.prefix, + }) + const blobs = res.blobs?.map(Store.formatListResult).filter(Boolean) as ListResultBlob[] + + return { + blobs, + } + } + async set(key: string, data: BlobInput, { metadata }: SetOptions = {}) { Store.validateKey(key) - await this.client.makeRequest({ + const res = await this.client.makeRequest({ body: data, key, metadata, method: HTTPMethod.PUT, storeName: this.name, }) + + if (res.status !== 200) { + throw new BlobsInternalError(res.status) + } } async setJSON(key: string, data: unknown, { metadata }: SetOptions = {}) { @@ -214,7 +264,7 @@ export class Store { 'content-type': 'application/json', } - await this.client.makeRequest({ + const res = await this.client.makeRequest({ body: payload, headers, key, @@ -222,9 +272,24 @@ export class Store { method: HTTPMethod.PUT, storeName: this.name, }) + + if (res.status !== 200) { + throw new BlobsInternalError(res.status) + } } - static validateKey(key: string) { + private static formatListResult(result: ListResponseBlob): ListResultBlob | null { + if (!result.key) { + return null + } + + return { + etag: result.etag, + key: result.key, + } + } + + private static validateKey(key: string) { if (key.startsWith('/') || !/^[\w%!.*'()/-]{1,600}$/.test(key)) { throw new Error( "Keys can only contain letters, numbers, percentage signs (%), exclamation marks (!), dots (.), asterisks (*), single quotes ('), parentheses (()), dashes (-) and underscores (_) up to a maximum of 600 characters. Keys can also contain forward slashes (/), but must not start with one.", @@ -232,7 +297,7 @@ export class Store { } } - static validateDeployID(deployID: string) { + private static validateDeployID(deployID: string) { // We could be stricter here and require a length of 24 characters, but the // CLI currently uses a deploy of `0` when running Netlify Dev, since there // is no actual deploy at that point. Let's go with a more loose validation @@ -242,7 +307,7 @@ export class Store { } } - static validateStoreName(name: string) { + private static validateStoreName(name: string) { if (name.startsWith('deploy:')) { throw new Error('Store name cannot start with the string `deploy:`, which is a reserved namespace.') } @@ -253,4 +318,52 @@ export class Store { ) } } + + private async listAndPaginate(options: { + accumulator?: ListResponse + currentPage: number + maxPages: number + nextCursor?: string + prefix?: string + }): Promise { + const { accumulator = { blobs: [] }, currentPage, maxPages, nextCursor, prefix } = options + + if (currentPage > maxPages || (currentPage > 1 && !nextCursor)) { + return accumulator + } + + const parameters: Record = {} + + if (nextCursor) { + parameters.cursor = nextCursor + } + + if (prefix) { + parameters.prefix = prefix + } + + const res = await this.client.makeRequest({ + method: HTTPMethod.GET, + parameters, + storeName: this.name, + }) + + if (res.status !== 200) { + throw new BlobsInternalError(res.status) + } + + try { + const listResponse = (await res.json()) as ListResponse + const { blobs = [], next_cursor: nextCursor } = listResponse + + return this.listAndPaginate({ + accumulator: { ...listResponse, blobs: [...(accumulator.blobs || []), ...blobs] }, + currentPage: currentPage + 1, + maxPages, + nextCursor, + }) + } catch (error: unknown) { + throw new Error(`'list()' has returned an internal error: ${error}`) + } + } } diff --git a/src/util.ts b/src/util.ts index 6fc2585..8394d0a 100644 --- a/src/util.ts +++ b/src/util.ts @@ -1,3 +1,11 @@ +export class BlobsInternalError extends Error { + constructor(statusCode: number) { + super(`Netlify Blobs has generated an internal error: ${statusCode} response`) + + this.name = 'BlobsInternalError' + } +} + export const isNodeError = (error: unknown): error is NodeJS.ErrnoException => error instanceof Error export type Logger = (...message: unknown[]) => void From cb16e0fd28e7ade545261cb95f24ecd58545affd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Eduardo=20Bou=C3=A7as?= Date: Tue, 24 Oct 2023 17:01:18 +0100 Subject: [PATCH 2/7] chore: update test --- src/server.test.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/server.test.ts b/src/server.test.ts index 62e59c0..9765a27 100644 --- a/src/server.test.ts +++ b/src/server.test.ts @@ -105,7 +105,7 @@ describe('Local server', () => { }) await expect(async () => await blobs.get(key)).rejects.toThrowError( - 'get operation has failed: store returned a 403 response', + 'Netlify Blobs has generated an internal error: 403 response', ) await server.stop() From 4d2f3f9ff7dd9d5db7d0ebd76308243c72d5510f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Eduardo=20Bou=C3=A7as?= Date: Wed, 25 Oct 2023 15:07:34 +0100 Subject: [PATCH 3/7] refactor: use `directories` parameter --- README.md | 54 +++++++++ src/backend/list.ts | 1 + src/list.test.ts | 270 +++++++++++++++++++++++++++++++++++++++++++- src/store.ts | 48 ++++++-- 4 files changed, 361 insertions(+), 12 deletions(-) diff --git a/README.md b/README.md index a79187c..9bfda78 100644 --- a/README.md +++ b/README.md @@ -284,6 +284,60 @@ const { blobs } = await store.list({ prefix: 'some' }) console.log(blobs) ``` +Optionally, you can choose to group blobs together under a common prefix and then browse them hierarchically when +listing a store. + +To do this, you must use the `/` character in your keys to separate keys into multiple levels. + +Take the following list of keys as an example: + +``` +cats/garfield.jpg +cats/tom.jpg +mice/jerry.jpg +mice/mickey.jpg +pink-panther.jpg +``` + +By default, calling `store.list()` will return all five keys. + +```javascript +const { blobs } = await store.list() + +// [ +// { etag: "etag1", key: "cats/garfield.jpg" }, +// { etag: "etag2", key: "cats/tom.jpg" }, +// { etag: "etag3", key: "mice/jerry.jpg" }, +// { etag: "etag4", key: "mice/mickey.jpg" }, +// { etag: "etag5", key: "pink-panther.jg" }, +// ] +console.log(blobs) +``` + +But if you want to list entries hierarchically, use the `directories` parameter. + +```javascript +const { blobs, directories } = await store.list({ directories: true }) + +// [ { etag: "etag1", key: "pink-panther.jpg" } ] +console.log(blobs) + +// [ "cats", "mice" ] +console.log(directories) +``` + +To drill down into a directory and get a list of its items, you can use the directory name as the `prefix` value. + +```javascript +const { blobs, directories } = await store.list({ prefix: 'mice/' }) + +// [ { etag: "etag3", key: "mice/jerry.jpg" }, { etag: "etag4", key: "mice/mickey.jpg" } ] +console.log(blobs) + +// [ ] +console.log(directories) +``` + ## Contributing Contributions are welcome! If you encounter any issues or have suggestions for improvements, please open an issue or diff --git a/src/backend/list.ts b/src/backend/list.ts index 06d4713..17c14e6 100644 --- a/src/backend/list.ts +++ b/src/backend/list.ts @@ -1,5 +1,6 @@ export interface ListResponse { blobs?: ListResponseBlob[] + directories?: string[] next_cursor?: string } diff --git a/src/list.test.ts b/src/list.test.ts index 0c0725a..bee82c7 100644 --- a/src/list.test.ts +++ b/src/list.test.ts @@ -34,7 +34,7 @@ const edgeURL = 'https://cloudfront.url' describe('list', () => { describe('With API credentials', () => { - test('Lists entries and handles pagination', async () => { + test('Lists blobs and handles pagination by default', async () => { const mockStore = new MockFetch() .get({ headers: { authorization: `Bearer ${apiToken}` }, @@ -54,6 +54,7 @@ describe('list', () => { last_modified: '2023-07-18T12:59:06Z', }, ], + directories: [], next_cursor: 'cursor_1', }), ), @@ -77,6 +78,7 @@ describe('list', () => { last_modified: '2023-07-18T12:59:06Z', }, ], + directories: [], next_cursor: 'cursor_2', }), ), @@ -94,6 +96,7 @@ describe('list', () => { last_modified: '2023-07-18T12:59:06Z', }, ], + directories: [], }), ), url: `https://api.netlify.com/api/v1/sites/${siteID}/blobs?cursor=cursor_2&context=${storeName}`, @@ -116,6 +119,120 @@ describe('list', () => { { etag: 'etag4', key: 'key4' }, { etag: 'etag5', key: 'key5' }, ]) + + expect(mockStore.fulfilled).toBeTruthy() + }) + + test('Accepts a `directories` parameter', async () => { + const mockStore = new MockFetch() + .get({ + headers: { authorization: `Bearer ${apiToken}` }, + response: new Response( + JSON.stringify({ + blobs: [ + { + etag: 'etag1', + key: 'key1', + size: 1, + last_modified: '2023-07-18T12:59:06Z', + }, + { + etag: 'etag2', + key: 'key2', + size: 2, + last_modified: '2023-07-18T12:59:06Z', + }, + ], + directories: ['dir1'], + next_cursor: 'cursor_1', + }), + ), + url: `https://api.netlify.com/api/v1/sites/${siteID}/blobs?directories=true&context=${storeName}`, + }) + .get({ + headers: { authorization: `Bearer ${apiToken}` }, + response: new Response( + JSON.stringify({ + blobs: [ + { + etag: 'etag3', + key: 'key3', + size: 3, + last_modified: '2023-07-18T12:59:06Z', + }, + { + etag: 'etag4', + key: 'key4', + size: 4, + last_modified: '2023-07-18T12:59:06Z', + }, + ], + directories: ['dir2'], + next_cursor: 'cursor_2', + }), + ), + url: `https://api.netlify.com/api/v1/sites/${siteID}/blobs?cursor=cursor_1&directories=true&context=${storeName}`, + }) + .get({ + headers: { authorization: `Bearer ${apiToken}` }, + response: new Response( + JSON.stringify({ + blobs: [ + { + etag: 'etag5', + key: 'key5', + size: 5, + last_modified: '2023-07-18T12:59:06Z', + }, + ], + directories: ['dir3'], + }), + ), + url: `https://api.netlify.com/api/v1/sites/${siteID}/blobs?cursor=cursor_2&directories=true&context=${storeName}`, + }) + .get({ + headers: { authorization: `Bearer ${apiToken}` }, + response: new Response( + JSON.stringify({ + blobs: [ + { + etag: 'etag6', + key: 'key6', + size: 6, + last_modified: '2023-07-18T12:59:06Z', + }, + ], + directories: [], + }), + ), + url: `https://api.netlify.com/api/v1/sites/${siteID}/blobs?prefix=dir2%2F&directories=true&context=${storeName}`, + }) + + globalThis.fetch = mockStore.fetch + + const store = getStore({ + name: 'mystore', + token: apiToken, + siteID, + }) + + const root = await store.list({ directories: true }) + + expect(root.blobs).toEqual([ + { etag: 'etag1', key: 'key1' }, + { etag: 'etag2', key: 'key2' }, + { etag: 'etag3', key: 'key3' }, + { etag: 'etag4', key: 'key4' }, + { etag: 'etag5', key: 'key5' }, + ]) + + expect(root.directories).toEqual(['dir1', 'dir2', 'dir3']) + + const directory = await store.list({ directories: true, prefix: `dir2/` }) + + expect(directory.blobs).toEqual([{ etag: 'etag6', key: 'key6' }]) + expect(directory.directories).toEqual([]) + expect(mockStore.fulfilled).toBeTruthy() }) @@ -209,7 +326,7 @@ describe('list', () => { }) describe('With edge credentials', () => { - test('Lists entries and handles pagination', async () => { + test('Lists blobs and handles pagination by default', async () => { const mockStore = new MockFetch() .get({ headers: { authorization: `Bearer ${edgeToken}` }, @@ -229,6 +346,7 @@ describe('list', () => { last_modified: '2023-07-18T12:59:06Z', }, ], + directories: ['dir1'], next_cursor: 'cursor_1', }), ), @@ -252,6 +370,7 @@ describe('list', () => { last_modified: '2023-07-18T12:59:06Z', }, ], + directories: ['dir2'], next_cursor: 'cursor_2', }), ), @@ -269,10 +388,28 @@ describe('list', () => { last_modified: '2023-07-18T12:59:06Z', }, ], + directories: ['dir3'], }), ), url: `${edgeURL}/${siteID}/${storeName}?cursor=cursor_2`, }) + .get({ + headers: { authorization: `Bearer ${edgeToken}` }, + response: new Response( + JSON.stringify({ + blobs: [ + { + etag: 'etag6', + key: 'key6', + size: 6, + last_modified: '2023-07-18T12:59:06Z', + }, + ], + directories: [], + }), + ), + url: `${edgeURL}/${siteID}/${storeName}?prefix=dir2%2F`, + }) globalThis.fetch = mockStore.fetch @@ -283,15 +420,140 @@ describe('list', () => { siteID, }) - const { blobs } = await store.list() + const root = await store.list() - expect(blobs).toEqual([ + expect(root.blobs).toEqual([ { etag: 'etag1', key: 'key1' }, { etag: 'etag2', key: 'key2' }, { etag: 'etag3', key: 'key3' }, { etag: 'etag4', key: 'key4' }, { etag: 'etag5', key: 'key5' }, ]) + + // @ts-expect-error `directories` is not part of the return type + expect(root.directories).toBe(undefined) + + const directory = await store.list({ prefix: 'dir2/' }) + + expect(directory.blobs).toEqual([{ etag: 'etag6', key: 'key6' }]) + + // @ts-expect-error `directories` is not part of the return type + expect(directory.directories).toBe(undefined) + + expect(mockStore.fulfilled).toBeTruthy() + }) + + test('Accepts a `directories` parameter', async () => { + const mockStore = new MockFetch() + .get({ + headers: { authorization: `Bearer ${edgeToken}` }, + response: new Response( + JSON.stringify({ + blobs: [ + { + etag: 'etag1', + key: 'key1', + size: 1, + last_modified: '2023-07-18T12:59:06Z', + }, + { + etag: 'etag2', + key: 'key2', + size: 2, + last_modified: '2023-07-18T12:59:06Z', + }, + ], + directories: ['dir1'], + next_cursor: 'cursor_1', + }), + ), + url: `${edgeURL}/${siteID}/${storeName}?directories=true`, + }) + .get({ + headers: { authorization: `Bearer ${edgeToken}` }, + response: new Response( + JSON.stringify({ + blobs: [ + { + etag: 'etag3', + key: 'key3', + size: 3, + last_modified: '2023-07-18T12:59:06Z', + }, + { + etag: 'etag4', + key: 'key4', + size: 4, + last_modified: '2023-07-18T12:59:06Z', + }, + ], + directories: ['dir2'], + next_cursor: 'cursor_2', + }), + ), + url: `${edgeURL}/${siteID}/${storeName}?cursor=cursor_1&directories=true`, + }) + .get({ + headers: { authorization: `Bearer ${edgeToken}` }, + response: new Response( + JSON.stringify({ + blobs: [ + { + etag: 'etag5', + key: 'key5', + size: 5, + last_modified: '2023-07-18T12:59:06Z', + }, + ], + directories: ['dir3'], + }), + ), + url: `${edgeURL}/${siteID}/${storeName}?cursor=cursor_2&directories=true`, + }) + .get({ + headers: { authorization: `Bearer ${edgeToken}` }, + response: new Response( + JSON.stringify({ + blobs: [ + { + etag: 'etag6', + key: 'key6', + size: 6, + last_modified: '2023-07-18T12:59:06Z', + }, + ], + directories: [], + }), + ), + url: `${edgeURL}/${siteID}/${storeName}?prefix=dir2%2F&directories=true`, + }) + + globalThis.fetch = mockStore.fetch + + const store = getStore({ + edgeURL, + name: storeName, + token: edgeToken, + siteID, + }) + + const root = await store.list({ directories: true }) + + expect(root.blobs).toEqual([ + { etag: 'etag1', key: 'key1' }, + { etag: 'etag2', key: 'key2' }, + { etag: 'etag3', key: 'key3' }, + { etag: 'etag4', key: 'key4' }, + { etag: 'etag5', key: 'key5' }, + ]) + + expect(root.directories).toEqual(['dir1', 'dir2', 'dir3']) + + const directory = await store.list({ directories: true, prefix: 'dir2/' }) + + expect(directory.blobs).toEqual([{ etag: 'etag6', key: 'key6' }]) + expect(directory.directories).toEqual([]) + expect(mockStore.fulfilled).toBeTruthy() }) diff --git a/src/store.ts b/src/store.ts index a60dc27..50ac421 100644 --- a/src/store.ts +++ b/src/store.ts @@ -32,6 +32,10 @@ interface ListResult { blobs: ListResultBlob[] } +interface ListResultWithDirectories extends ListResult { + directories: string[] +} + interface ListResultBlob { etag: string key: string @@ -39,6 +43,7 @@ interface ListResultBlob { interface ListOptions { cursor?: string + directories?: boolean paginate?: boolean prefix?: string } @@ -224,16 +229,26 @@ export class Store { throw new Error(`Invalid 'type' property: ${type}. Expected: arrayBuffer, blob, json, stream, or text.`) } - async list(options: ListOptions = {}): Promise { + async list(options: ListOptions & { directories: true }): Promise + async list(options?: ListOptions & { directories?: false }): Promise + async list(options: ListOptions = {}): Promise { const cursor = options.paginate === false ? options.cursor : undefined const maxPages = options.paginate === false ? 1 : Number.POSITIVE_INFINITY const res = await this.listAndPaginate({ currentPage: 1, + directories: options.directories, maxPages, nextCursor: cursor, prefix: options.prefix, }) - const blobs = res.blobs?.map(Store.formatListResult).filter(Boolean) as ListResultBlob[] + const blobs = res.blobs?.map(Store.formatListResultBlob).filter(Boolean) as ListResultBlob[] + + if (options?.directories) { + return { + blobs, + directories: res.directories?.filter(Boolean) as string[], + } + } return { blobs, @@ -278,7 +293,7 @@ export class Store { } } - private static formatListResult(result: ListResponseBlob): ListResultBlob | null { + private static formatListResultBlob(result: ListResponseBlob): ListResultBlob | null { if (!result.key) { return null } @@ -321,12 +336,20 @@ export class Store { private async listAndPaginate(options: { accumulator?: ListResponse + directories?: boolean currentPage: number maxPages: number nextCursor?: string prefix?: string }): Promise { - const { accumulator = { blobs: [] }, currentPage, maxPages, nextCursor, prefix } = options + const { + accumulator = { blobs: [], directories: [] }, + currentPage, + directories, + maxPages, + nextCursor, + prefix, + } = options if (currentPage > maxPages || (currentPage > 1 && !nextCursor)) { return accumulator @@ -342,6 +365,10 @@ export class Store { parameters.prefix = prefix } + if (directories) { + parameters.directories = 'true' + } + const res = await this.client.makeRequest({ method: HTTPMethod.GET, parameters, @@ -353,14 +380,19 @@ export class Store { } try { - const listResponse = (await res.json()) as ListResponse - const { blobs = [], next_cursor: nextCursor } = listResponse + const current = (await res.json()) as ListResponse + const newAccumulator = { + ...current, + blobs: [...(accumulator.blobs || []), ...(current.blobs || [])], + directories: [...(accumulator.directories || []), ...(current.directories || [])], + } return this.listAndPaginate({ - accumulator: { ...listResponse, blobs: [...(accumulator.blobs || []), ...blobs] }, + accumulator: newAccumulator, currentPage: currentPage + 1, + directories, maxPages, - nextCursor, + nextCursor: current.next_cursor, }) } catch (error: unknown) { throw new Error(`'list()' has returned an internal error: ${error}`) From dad2a712f3968a8d19895edb9b1c1bf22d6dbb23 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Eduardo=20Bou=C3=A7as?= Date: Wed, 25 Oct 2023 16:31:58 +0100 Subject: [PATCH 4/7] chore: update README --- README.md | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index 9bfda78..714029b 100644 --- a/README.md +++ b/README.md @@ -285,9 +285,7 @@ console.log(blobs) ``` Optionally, you can choose to group blobs together under a common prefix and then browse them hierarchically when -listing a store. - -To do this, you must use the `/` character in your keys to separate keys into multiple levels. +listing a store. To do this, use the `/` character in your keys to group them into multiple levels. Take the following list of keys as an example: @@ -309,7 +307,7 @@ const { blobs } = await store.list() // { etag: "etag2", key: "cats/tom.jpg" }, // { etag: "etag3", key: "mice/jerry.jpg" }, // { etag: "etag4", key: "mice/mickey.jpg" }, -// { etag: "etag5", key: "pink-panther.jg" }, +// { etag: "etag5", key: "pink-panther.jpg" }, // ] console.log(blobs) ``` @@ -329,7 +327,7 @@ console.log(directories) To drill down into a directory and get a list of its items, you can use the directory name as the `prefix` value. ```javascript -const { blobs, directories } = await store.list({ prefix: 'mice/' }) +const { blobs, directories } = await store.list({ directories: true, prefix: 'mice/' }) // [ { etag: "etag3", key: "mice/jerry.jpg" }, { etag: "etag4", key: "mice/mickey.jpg" } ] console.log(blobs) From 3c662f24ffc1cd6f8c78c71f7fc7b44c8afe408a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Eduardo=20Bou=C3=A7as?= Date: Thu, 26 Oct 2023 09:24:55 +0100 Subject: [PATCH 5/7] chore: update README --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 714029b..a48aa4f 100644 --- a/README.md +++ b/README.md @@ -263,7 +263,7 @@ Deletes an object with the given key, if one exists. await store.delete('my-key') ``` -### `list(options?: { cursor?: string, paginate?: boolean. prefix?: string }): Promise<{ blobs: BlobResult[] }>` +### `list(options?: { cursor?: string, paginate?: boolean. prefix?: string }): Promise<{ blobs: BlobResult[], directories: string[] }>` Returns a list of blobs in a given store. From 59416420d23f88cb0b1ffc2b22a9657769d3d8c4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Eduardo=20Bou=C3=A7as?= Date: Thu, 26 Oct 2023 12:35:53 +0100 Subject: [PATCH 6/7] feat: support `list()` in local server --- src/metadata.ts | 12 +-- src/server.test.ts | 251 +++++++++++++++++++++++++++++++-------------- src/server.ts | 179 ++++++++++++++++++++++++++++---- src/store.ts | 4 +- 4 files changed, 337 insertions(+), 109 deletions(-) diff --git a/src/metadata.ts b/src/metadata.ts index 5ba1ad1..288a70d 100644 --- a/src/metadata.ts +++ b/src/metadata.ts @@ -22,18 +22,12 @@ export const encodeMetadata = (metadata?: Metadata) => { return payload } -export const decodeMetadata = (headers?: Headers): Metadata => { - if (!headers) { +export const decodeMetadata = (header: string | null): Metadata => { + if (!header || !header.startsWith(BASE64_PREFIX)) { return {} } - const metadataHeader = headers.get(METADATA_HEADER_INTERNAL) - - if (!metadataHeader || !metadataHeader.startsWith(BASE64_PREFIX)) { - return {} - } - - const encodedData = metadataHeader.slice(BASE64_PREFIX.length) + const encodedData = header.slice(BASE64_PREFIX.length) const decodedData = Buffer.from(encodedData, 'base64').toString() const metadata = JSON.parse(decodedData) diff --git a/src/server.test.ts b/src/server.test.ts index 9765a27..f8d3259 100644 --- a/src/server.test.ts +++ b/src/server.test.ts @@ -3,7 +3,7 @@ import { env, version as nodeVersion } from 'node:process' import semver from 'semver' import tmp from 'tmp-promise' -import { describe, test, expect, beforeAll, afterEach } from 'vitest' +import { test, expect, beforeAll, afterEach } from 'vitest' import { getStore } from './main.js' import { BlobsServer } from './server.js' @@ -31,84 +31,183 @@ const siteID = '9a003659-aaaa-0000-aaaa-63d3720d8621' const key = '54321' const token = 'my-very-secret-token' -describe('Local server', () => { - test('Reads and writes from the file system', async () => { - const directory = await tmp.dir() - const server = new BlobsServer({ - directory: directory.path, - token, - }) - const { port } = await server.start() - const blobs = getStore({ - edgeURL: `http://localhost:${port}`, - name: 'mystore', - token, - siteID, - }) - - await blobs.set(key, 'value 1') - expect(await blobs.get(key)).toBe('value 1') - - await blobs.set(key, 'value 2') - expect(await blobs.get(key)).toBe('value 2') - - await blobs.delete(key) - expect(await blobs.get(key)).toBe(null) - - await server.stop() - await fs.rm(directory.path, { force: true, recursive: true }) +test('Reads and writes from the file system', async () => { + const directory = await tmp.dir() + const server = new BlobsServer({ + directory: directory.path, + token, + }) + const { port } = await server.start() + const blobs = getStore({ + edgeURL: `http://localhost:${port}`, + name: 'mystore', + token, + siteID, + }) + const metadata = { + features: { + blobs: true, + functions: true, + }, + name: 'Netlify', + } + + await blobs.set(key, 'value 1') + expect(await blobs.get(key)).toBe('value 1') + + await blobs.set(key, 'value 2', { metadata }) + expect(await blobs.get(key)).toBe('value 2') + + const entry = await blobs.getWithMetadata(key) + expect(entry.metadata).toEqual(metadata) + + await blobs.delete(key) + expect(await blobs.get(key)).toBe(null) + + await server.stop() + await fs.rm(directory.path, { force: true, recursive: true }) +}) + +test('Separates keys from different stores', async () => { + const directory = await tmp.dir() + const server = new BlobsServer({ + directory: directory.path, + token, + }) + const { port } = await server.start() + + const store1 = getStore({ + edgeURL: `http://localhost:${port}`, + name: 'mystore1', + token, + siteID, + }) + const store2 = getStore({ + edgeURL: `http://localhost:${port}`, + name: 'mystore2', + token, + siteID, }) - test('Separates keys from different stores', async () => { - const directory = await tmp.dir() - const server = new BlobsServer({ - directory: directory.path, - token, - }) - const { port } = await server.start() - - const store1 = getStore({ - edgeURL: `http://localhost:${port}`, - name: 'mystore1', - token, - siteID, - }) - const store2 = getStore({ - edgeURL: `http://localhost:${port}`, - name: 'mystore2', - token, - siteID, - }) - - await store1.set(key, 'value 1 for store 1') - await store2.set(key, 'value 1 for store 2') - - expect(await store1.get(key)).toBe('value 1 for store 1') - expect(await store2.get(key)).toBe('value 1 for store 2') - - await server.stop() - await fs.rm(directory.path, { force: true, recursive: true }) + await store1.set(key, 'value 1 for store 1') + await store2.set(key, 'value 1 for store 2') + + expect(await store1.get(key)).toBe('value 1 for store 1') + expect(await store2.get(key)).toBe('value 1 for store 2') + + await server.stop() + await fs.rm(directory.path, { force: true, recursive: true }) +}) + +test('If a token is set, rejects any requests with an invalid `authorization` header', async () => { + const directory = await tmp.dir() + const server = new BlobsServer({ + directory: directory.path, + token, }) + const { port } = await server.start() + const blobs = getStore({ + edgeURL: `http://localhost:${port}`, + name: 'mystore', + token: 'another token', + siteID, + }) + + await expect(async () => await blobs.get(key)).rejects.toThrowError( + 'Netlify Blobs has generated an internal error: 403 response', + ) - test('If a token is set, rejects any requests with an invalid `authorization` header', async () => { - const directory = await tmp.dir() - const server = new BlobsServer({ - directory: directory.path, - token, - }) - const { port } = await server.start() - const blobs = getStore({ - edgeURL: `http://localhost:${port}`, - name: 'mystore', - token: 'another token', - siteID, - }) - - await expect(async () => await blobs.get(key)).rejects.toThrowError( - 'Netlify Blobs has generated an internal error: 403 response', - ) - - await server.stop() - await fs.rm(directory.path, { force: true, recursive: true }) + await server.stop() + await fs.rm(directory.path, { force: true, recursive: true }) +}) + +test('Lists entries', async () => { + const directory = await tmp.dir() + const server = new BlobsServer({ + directory: directory.path, + token, + }) + const { port } = await server.start() + const blobs = getStore({ + edgeURL: `http://localhost:${port}`, + name: 'mystore', + token, + siteID, }) + const songs: Record = { + 'coldplay/parachutes/shiver': "I'll always be waiting for you", + 'coldplay/parachutes/spies': 'And the spies came out of the water', + 'coldplay/parachutes/trouble': 'And I:I never meant to cause you trouble', + 'coldplay/a-rush-of-blood-to-the-head/politik': 'Give me heart and give me soul', + 'coldplay/a-rush-of-blood-to-the-head/in-my-place': 'How long must you wait for it?', + 'coldplay/a-rush-of-blood-to-the-head/the-scientist': 'Questions of science, science and progress', + 'phoenix/united/too-young': "Oh rainfalls and hard times coming they won't leave me tonight", + 'phoenix/united/party-time': 'Summertime is gone', + 'phoenix/ti-amo/j-boy': 'Something in the middle of the side of the store', + 'phoenix/ti-amo/fleur-de-lys': 'No rest till I get to you, no rest till I get to you', + } + + for (const title in songs) { + await blobs.set(title, songs[title]) + } + + const allSongs = await blobs.list() + + for (const title in songs) { + const match = allSongs.blobs.find((blob) => blob.key === title) + + expect(match).toBeTruthy() + } + + const coldplaySongs = await blobs.list({ prefix: 'coldplay/' }) + + for (const title in songs) { + if (!title.startsWith('coldplay/')) { + continue + } + + const match = coldplaySongs.blobs.find((blob) => blob.key === title) + + expect(match).toBeTruthy() + } + + const parachutesSongs = await blobs.list({ prefix: 'coldplay/parachutes/' }) + + for (const title in songs) { + if (!title.startsWith('coldplay/parachutes/')) { + continue + } + + const match = parachutesSongs.blobs.find((blob) => blob.key === title) + + expect(match).toBeTruthy() + } + + const fooFightersSongs = await blobs.list({ prefix: 'foo-fighters/' }) + + expect(fooFightersSongs.blobs).toEqual([]) + + const artists = await blobs.list({ directories: true }) + + expect(artists.blobs).toEqual([]) + expect(artists.directories).toEqual(['coldplay', 'phoenix']) + + const coldplayAlbums = await blobs.list({ directories: true, prefix: 'coldplay/' }) + + expect(coldplayAlbums.blobs).toEqual([]) + expect(coldplayAlbums.directories).toEqual(['coldplay/a-rush-of-blood-to-the-head', 'coldplay/parachutes']) + + const parachutesSongs2 = await blobs.list({ directories: true, prefix: 'coldplay/parachutes/' }) + + for (const title in songs) { + if (!title.startsWith('coldplay/parachutes/')) { + continue + } + + const match = parachutesSongs2.blobs.find((blob) => blob.key === title) + + expect(match).toBeTruthy() + } + + expect(parachutesSongs2.directories).toEqual([]) }) diff --git a/src/server.ts b/src/server.ts index 9dc5d94..1f80fad 100644 --- a/src/server.ts +++ b/src/server.ts @@ -1,8 +1,10 @@ import { createReadStream, createWriteStream, promises as fs } from 'node:fs' import http from 'node:http' import { tmpdir } from 'node:os' -import { basename, dirname, join, resolve } from 'node:path' +import { dirname, join, relative, resolve } from 'node:path' +import { ListResponse } from './backend/list.ts' +import { decodeMetadata, encodeMetadata, METADATA_HEADER_EXTERNAL, METADATA_HEADER_INTERNAL } from './metadata.ts' import { isNodeError, Logger } from './util.ts' interface BlobsServerOptions { @@ -35,6 +37,7 @@ interface BlobsServerOptions { } export class BlobsServer { + private address: string private debug: boolean private directory: string private logger: Logger @@ -43,6 +46,7 @@ export class BlobsServer { private token?: string constructor({ debug, directory, logger, port, token }: BlobsServerOptions) { + this.address = '' this.debug = debug === true this.directory = directory this.logger = logger ?? console.log @@ -59,9 +63,10 @@ export class BlobsServer { } async delete(req: http.IncomingMessage, res: http.ServerResponse) { - const { dataPath } = this.getFilePathFromURL(req.url) + const url = new URL(req.url ?? '', this.address) + const { dataPath, key } = this.getLocalPaths(url) - if (!dataPath) { + if (!dataPath || !key) { return this.sendResponse(req, res, 400) } @@ -78,13 +83,37 @@ export class BlobsServer { return this.sendResponse(req, res, 200) } - get(req: http.IncomingMessage, res: http.ServerResponse) { - const { dataPath } = this.getFilePathFromURL(req.url) + async get(req: http.IncomingMessage, res: http.ServerResponse) { + const url = new URL(req.url ?? '', this.address) + const { dataPath, key, metadataPath, rootPath } = this.getLocalPaths(url) - if (!dataPath) { + if (!dataPath || !metadataPath) { return this.sendResponse(req, res, 400) } + // If there is no key in the URL, it means a `list` operation. + if (!key) { + return this.list({ dataPath, metadataPath, rootPath, req, res, url }) + } + + const headers: Record = {} + + try { + const rawData = await fs.readFile(metadataPath, 'utf8') + const metadata = JSON.parse(rawData) + const encodedMetadata = encodeMetadata(metadata) + + if (encodedMetadata) { + headers[METADATA_HEADER_INTERNAL] = encodedMetadata + } + } catch (error) { + this.logDebug('Could not read metadata file:', error) + } + + for (const name in headers) { + res.setHeader(name, headers[name]) + } + const stream = createReadStream(dataPath) stream.on('error', (error: NodeJS.ErrnoException) => { @@ -94,17 +123,49 @@ export class BlobsServer { return this.sendResponse(req, res, 500) }) - stream.on('finish', () => this.sendResponse(req, res, 200)) stream.pipe(res) } + async list(options: { + dataPath: string + metadataPath: string + rootPath: string + req: http.IncomingMessage + res: http.ServerResponse + url: URL + }) { + const { dataPath, rootPath, req, res, url } = options + const directories = url.searchParams.get('directories') === 'true' + const prefix = url.searchParams.get('prefix') ?? '' + const result: ListResponse = { + blobs: [], + directories: [], + } + + try { + await BlobsServer.walk({ directories, path: dataPath, prefix, rootPath, result }) + } catch (error) { + this.logDebug('Could not perform list:', error) + + return this.sendResponse(req, res, 500) + } + + res.setHeader('content-type', 'application/json') + + return this.sendResponse(req, res, 200, JSON.stringify(result)) + } + async put(req: http.IncomingMessage, res: http.ServerResponse) { - const { dataPath } = this.getFilePathFromURL(req.url) + const url = new URL(req.url ?? '', this.address) + const { dataPath, key, metadataPath } = this.getLocalPaths(url) - if (!dataPath) { + if (!dataPath || !key || !metadataPath) { return this.sendResponse(req, res, 400) } + const metadataHeader = req.headers[METADATA_HEADER_EXTERNAL] + const metadata = decodeMetadata(Array.isArray(metadataHeader) ? metadataHeader[0] : metadataHeader ?? null) + try { // We can't have multiple requests writing to the same file, which could // lead to corrupted data. Ideally we'd have a mechanism where the last @@ -112,18 +173,25 @@ export class BlobsServer { // now, we address this by writing data to a temporary file and then // moving it to the right path after the write has succeeded. const tempDirectory = await fs.mkdtemp(join(tmpdir(), 'netlify-blobs')) - const tempPath = join(tempDirectory, basename(dataPath)) + const relativeDataPath = relative(this.directory, dataPath) + const tempDataPath = join(tempDirectory, relativeDataPath) + + await fs.mkdir(dirname(tempDataPath), { recursive: true }) await new Promise((resolve, reject) => { - req.pipe(createWriteStream(tempPath)) + req.pipe(createWriteStream(tempDataPath)) req.on('end', resolve) req.on('error', reject) }) await fs.mkdir(dirname(dataPath), { recursive: true }) - await fs.rename(tempPath, dataPath) + await fs.rename(tempDataPath, dataPath) await fs.rm(tempDirectory, { force: true, recursive: true }) + + await fs.mkdir(dirname(metadataPath), { recursive: true }) + await fs.writeFile(metadataPath, JSON.stringify(metadata)) } catch (error) { + console.error(error) this.logDebug('Error when writing data:', error) return this.sendResponse(req, res, 500) @@ -133,23 +201,25 @@ export class BlobsServer { } /** - * Returns the path to the local file associated with a given combination of - * site ID, store name, and object, which are extracted from a URL path. + * Parses the URL and returns the filesystem paths where entries and metadata + * should be stored. */ - getFilePathFromURL(urlPath?: string) { - if (!urlPath) { + getLocalPaths(url?: URL) { + if (!url) { return {} } - const [, siteID, storeName, key] = urlPath.split('/') + const [, siteID, storeName, ...key] = url.pathname.split('/') - if (!siteID || !storeName || !key) { + if (!siteID || !storeName) { return {} } - const dataPath = resolve(this.directory, 'entries', siteID, storeName, key) + const rootPath = resolve(this.directory, 'entries', siteID, storeName) + const dataPath = resolve(rootPath, ...key) + const metadataPath = resolve(this.directory, 'metadata', siteID, storeName, ...key) - return { dataPath } + return { dataPath, key: key.join('/'), metadataPath, rootPath } } handleRequest(req: http.IncomingMessage, res: http.ServerResponse) { @@ -172,11 +242,11 @@ export class BlobsServer { } } - sendResponse(req: http.IncomingMessage, res: http.ServerResponse, status: number) { + sendResponse(req: http.IncomingMessage, res: http.ServerResponse, status: number, body?: string) { this.logDebug(`${req.method} ${req.url}: ${status}`) res.writeHead(status) - res.end() + res.end(body) } async start(): Promise<{ address: string; family: string; port: number }> { @@ -194,6 +264,8 @@ export class BlobsServer { return reject(new Error('Server cannot be started on a pipe or Unix socket')) } + this.address = `http://localhost:${address.port}` + resolve(address) }) }) @@ -229,4 +301,67 @@ export class BlobsServer { return parts[1] === this.token } + + /** + * Traverses a path and collects both blobs and directories into a `result` + * object, taking into account the `directories` and `prefix` parameters. + */ + private static async walk(options: { + directories: boolean + path: string + prefix: string + result: ListResponse + rootPath: string + }) { + const { directories, path, prefix, result, rootPath } = options + const entries = await fs.readdir(path) + + for (const entry of entries) { + const entryPath = join(path, entry) + const stat = await fs.stat(entryPath) + const key = relative(rootPath, entryPath) + + // To match the key against the prefix, we start by creating a "mask", + // which consists of the subset of the key up to the length of the + // prefix. + const mask = key.slice(0, prefix.length) + + // There is a match if the mask matches the prefix. + const isMatch = prefix.startsWith(mask) + + if (!isMatch) { + continue + } + + // If the entry is a file, add it to the `blobs` bucket. + if (!stat.isDirectory()) { + // We don't support conditional requests in the local server, so we + // generate a random ETag for each entry. + const etag = Math.random().toString().slice(2) + + result.blobs?.push({ + etag, + key, + last_modified: stat.mtime.toISOString(), + size: stat.size, + }) + + continue + } + + // The entry is a directory. We push it to the `directories` bucket only + // if the `directories` parameter is enabled and we're at the same level + // as the prefix. For example, if the prefix is `animals/cats/` and the + // key we're processing is `animals`, we don't want to push it to the + // `directories` bucket. We want to traverse it. + if (directories && key.startsWith(prefix)) { + result.directories?.push(key) + + continue + } + + // Call this method recursively with the directory as the starting point. + await BlobsServer.walk({ directories, path: entryPath, prefix, rootPath, result }) + } + } } diff --git a/src/store.ts b/src/store.ts index 50ac421..5b8a809 100644 --- a/src/store.ts +++ b/src/store.ts @@ -1,6 +1,6 @@ import { ListResponse, ListResponseBlob } from './backend/list.ts' import { Client } from './client.ts' -import { decodeMetadata, Metadata } from './metadata.ts' +import { decodeMetadata, Metadata, METADATA_HEADER_INTERNAL } from './metadata.ts' import { BlobInput, HTTPMethod } from './types.ts' import { BlobsInternalError } from './util.ts' @@ -189,7 +189,7 @@ export class Store { let metadata: Metadata = {} try { - metadata = decodeMetadata(res?.headers) + metadata = decodeMetadata(res?.headers.get(METADATA_HEADER_INTERNAL)) } catch { throw new Error( 'An internal error occurred while trying to retrieve the metadata for an entry. Please try updating to the latest version of the Netlify Blobs client.', From 3e511d384354768937331f378b50c60f9e612923 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Eduardo=20Bou=C3=A7as?= Date: Thu, 26 Oct 2023 16:32:28 +0100 Subject: [PATCH 7/7] fix: use cross-OS paths --- src/server.test.ts | 22 +++++++++++++--------- src/server.ts | 13 +++++++++---- 2 files changed, 22 insertions(+), 13 deletions(-) diff --git a/src/server.test.ts b/src/server.test.ts index f8d3259..4ac564c 100644 --- a/src/server.test.ts +++ b/src/server.test.ts @@ -28,7 +28,6 @@ afterEach(() => { }) const siteID = '9a003659-aaaa-0000-aaaa-63d3720d8621' -const key = '54321' const token = 'my-very-secret-token' test('Reads and writes from the file system', async () => { @@ -52,17 +51,21 @@ test('Reads and writes from the file system', async () => { name: 'Netlify', } - await blobs.set(key, 'value 1') - expect(await blobs.get(key)).toBe('value 1') + await blobs.set('simple-key', 'value 1') + expect(await blobs.get('simple-key')).toBe('value 1') - await blobs.set(key, 'value 2', { metadata }) - expect(await blobs.get(key)).toBe('value 2') + await blobs.set('simple-key', 'value 2', { metadata }) + expect(await blobs.get('simple-key')).toBe('value 2') - const entry = await blobs.getWithMetadata(key) + await blobs.set('parent/child', 'value 3') + expect(await blobs.get('parent/child')).toBe('value 3') + expect(await blobs.get('parent')).toBe(null) + + const entry = await blobs.getWithMetadata('simple-key') expect(entry.metadata).toEqual(metadata) - await blobs.delete(key) - expect(await blobs.get(key)).toBe(null) + await blobs.delete('simple-key') + expect(await blobs.get('simple-key')).toBe(null) await server.stop() await fs.rm(directory.path, { force: true, recursive: true }) @@ -88,6 +91,7 @@ test('Separates keys from different stores', async () => { token, siteID, }) + const key = 'my-key' await store1.set(key, 'value 1 for store 1') await store2.set(key, 'value 1 for store 2') @@ -113,7 +117,7 @@ test('If a token is set, rejects any requests with an invalid `authorization` he siteID, }) - await expect(async () => await blobs.get(key)).rejects.toThrowError( + await expect(async () => await blobs.get('some-key')).rejects.toThrowError( 'Netlify Blobs has generated an internal error: 403 response', ) diff --git a/src/server.ts b/src/server.ts index 1f80fad..73f6df5 100644 --- a/src/server.ts +++ b/src/server.ts @@ -1,7 +1,7 @@ import { createReadStream, createWriteStream, promises as fs } from 'node:fs' import http from 'node:http' import { tmpdir } from 'node:os' -import { dirname, join, relative, resolve } from 'node:path' +import { dirname, join, relative, resolve, sep } from 'node:path' import { ListResponse } from './backend/list.ts' import { decodeMetadata, encodeMetadata, METADATA_HEADER_EXTERNAL, METADATA_HEADER_INTERNAL } from './metadata.ts' @@ -117,7 +117,7 @@ export class BlobsServer { const stream = createReadStream(dataPath) stream.on('error', (error: NodeJS.ErrnoException) => { - if (error.code === 'ENOENT') { + if (error.code === 'EISDIR' || error.code === 'ENOENT') { return this.sendResponse(req, res, 404) } @@ -191,7 +191,6 @@ export class BlobsServer { await fs.mkdir(dirname(metadataPath), { recursive: true }) await fs.writeFile(metadataPath, JSON.stringify(metadata)) } catch (error) { - console.error(error) this.logDebug('Error when writing data:', error) return this.sendResponse(req, res, 500) @@ -319,7 +318,13 @@ export class BlobsServer { for (const entry of entries) { const entryPath = join(path, entry) const stat = await fs.stat(entryPath) - const key = relative(rootPath, entryPath) + + let key = relative(rootPath, entryPath) + + // Normalize keys to use `/` as delimiter regardless of OS. + if (sep !== '/') { + key = key.split(sep).join('/') + } // To match the key against the prefix, we start by creating a "mask", // which consists of the subset of the key up to the length of the