From 0e88ef5031bd72f4f2cb0d288d0275b9186ae7b2 Mon Sep 17 00:00:00 2001 From: Harminder virk Date: Wed, 31 Jul 2019 09:50:52 +0530 Subject: [PATCH] refactor: rewrite of multipart file handling Now one can selectively stream files and still make bodyparser validate the files for them. --- adonis-typings/bodyparser.ts | 78 +++-- adonis-typings/index.ts | 12 +- config/index.ts | 4 +- ..._bodyparserprovider_.bodyparserprovider.md | 8 +- ..._bodyparser_index_.bodyparsermiddleware.md | 10 +- .../_src_formfields_index_.formfields.md | 4 +- docs/classes/_src_multipart_file_.file.md | 84 +++-- .../_src_multipart_index_.multipart.md | 89 ++--- ..._src_multipart_parthandler_.parthandler.md | 110 +++++++ docs/globals.md | 11 +- docs/modules/_config_index_.md | 4 +- .../modules/_providers_bodyparserprovider_.md | 4 +- docs/modules/_src_bindings_request_.md | 25 ++ docs/modules/_src_bodyparser_index_.md | 4 +- docs/modules/_src_formfields_index_.md | 4 +- docs/modules/_src_multipart_file_.md | 4 +- docs/modules/_src_multipart_index_.md | 4 +- docs/modules/_src_multipart_parthandler_.md | 11 + .../_src_multipart_processmultipart_.md | 29 -- docs/modules/_src_multipart_streamfile_.md | 4 +- docs/modules/_src_utils_.md | 72 ++++ docs/modules/_test_helpers_index_.md | 70 ++++ japaFile.js | 1 + package.json | 14 +- providers/BodyParserProvider.ts | 30 +- src/Bindings/Request.ts | 84 +++++ src/BodyParser/index.ts | 40 ++- src/Multipart/File.ts | 120 +------ src/Multipart/PartHandler.ts | 213 ++++++++++++ src/Multipart/index.ts | 258 +++++++++------ src/Multipart/processMultipart.ts | 93 ------ src/Multipart/streamFile.ts | 2 +- src/utils.ts | 116 +++++++ test-helpers/index.ts | 22 ++ test/body-parser.spec.ts | 252 ++++++++++++-- test/file.spec.ts | 180 ---------- test/multipart.spec.ts | 310 +++++++++++++----- 37 files changed, 1542 insertions(+), 838 deletions(-) create mode 100644 docs/classes/_src_multipart_parthandler_.parthandler.md create mode 100644 docs/modules/_src_bindings_request_.md create mode 100644 docs/modules/_src_multipart_parthandler_.md delete mode 100644 docs/modules/_src_multipart_processmultipart_.md create mode 100644 docs/modules/_src_utils_.md create mode 100644 docs/modules/_test_helpers_index_.md create mode 100644 src/Bindings/Request.ts create mode 100644 src/Multipart/PartHandler.ts delete mode 100644 src/Multipart/processMultipart.ts create mode 100644 src/utils.ts create mode 100644 test-helpers/index.ts delete mode 100644 test/file.spec.ts diff --git a/adonis-typings/bodyparser.ts b/adonis-typings/bodyparser.ts index 9e7f43f..ea97bfb 100644 --- a/adonis-typings/bodyparser.ts +++ b/adonis-typings/bodyparser.ts @@ -12,8 +12,8 @@ declare module '@ioc:Adonis/Addons/BodyParser' { import { FileTypeResult } from 'file-type' /** - * Readable stream along with some extra - * data + * Readable stream along with some extra data. This is what + * is passed to `onFile` handlers. */ export type MultipartStream = Readable & { headers: any, @@ -23,7 +23,8 @@ declare module '@ioc:Adonis/Addons/BodyParser' { } /** - * File validation options + * The options that can be used to validate a given + * file */ export type FileValidationOptions = { size: string | number, @@ -32,14 +33,12 @@ declare module '@ioc:Adonis/Addons/BodyParser' { } /** - * Stream part handler + * The callback handler for a given file part */ - export type PartHandler = (part: MultipartStream) => Promise - - /** - * Field handler - */ - export type FieldHandler = (key: string, value: string) => void + export type PartHandlerContract = ( + part: MultipartStream, + reportChunk: (chunk: Buffer) => void, + ) => Promise<({ filePath?: string, tmpPath?: string } & { [key: string]: any }) | void> /** * Qs module config @@ -72,28 +71,28 @@ declare module '@ioc:Adonis/Addons/BodyParser' { /** * Body parser config for parsing JSON requests */ - type BodyParserJSONConfig = BodyParserBaseConfig & { + export type BodyParserJSONConfigContract = BodyParserBaseConfig & { strict: boolean, } /** * Parser config for parsing form data */ - type BodyParserFormConfig = BodyParserBaseConfig & { + export type BodyParserFormConfigContract = BodyParserBaseConfig & { queryString: QueryStringConfig, } /** * Parser config for parsing raw body (untouched) */ - type BodyParserRawConfig = BodyParserBaseConfig & { + export type BodyParserRawConfigContract = BodyParserBaseConfig & { queryString: QueryStringConfig, } /** * Parser config for parsing multipart requests */ - type BodyParserMultipartConfig = BodyParserBaseConfig & { + export type BodyParserMultipartConfigContract = BodyParserBaseConfig & { autoProcess: boolean, maxFields: number, processManually: string[], @@ -103,24 +102,27 @@ declare module '@ioc:Adonis/Addons/BodyParser' { /** * Body parser config for all different types */ - export type BodyParserConfig = { + export type BodyParserConfigContract = { whitelistedMethods: string[], - json: BodyParserJSONConfig, - form: BodyParserFormConfig, - raw: BodyParserRawConfig, - multipart: BodyParserMultipartConfig, + json: BodyParserJSONConfigContract, + form: BodyParserFormConfigContract, + raw: BodyParserRawConfigContract, + multipart: BodyParserMultipartConfigContract, } /** - * Multipart class contract, since it's exposed on the + * Multipart class contract, since it is exposed on the * request object, we need the interface to extend * typings */ export interface MultipartContract { consumed: boolean, - onFile (name: string, callback: PartHandler): this, - onField (key: string, value: any): this, - process (): Promise, + onFile ( + name: string, + options: Partial, + callback: PartHandlerContract, + ): this, + process (config?: Partial<{ limit: string | number, maxFields: number }>): Promise, } /** @@ -130,7 +132,7 @@ declare module '@ioc:Adonis/Addons/BodyParser' { fieldName: string, clientName: string, message: string, - type: 'size' | 'extname', + type: 'size' | 'extname' | 'fatal', } /** @@ -138,30 +140,36 @@ declare module '@ioc:Adonis/Addons/BodyParser' { */ export type FileInputNode = { fieldName: string, - fileName: string, - tmpPath: string, + clientName: string, bytes: number, headers: { [key: string]: string, }, - fileType?: FileTypeResult, + filePath?: string, + tmpPath?: string, + meta: any, + fileType: { + ext: string, + type: string, + subtype: string, + }, } /** - * Multipart file interface, used to loose coupling + * Multipart file interface */ export interface MultipartFileContract { - isValid: boolean, - clientName: string, - fileName?: string, fieldName: string, - tmpPath: string, + clientName: string, + tmpPath?: string, + filePath?: string, size: number, - type?: string, - subtype?: string, + type: string, + subtype: string, + isValid: boolean, status: 'pending' | 'moved' | 'error', extname: string, - setValidationOptions (options: Partial): this, + validated: boolean, errors: FileUploadError[], } } diff --git a/adonis-typings/index.ts b/adonis-typings/index.ts index 9a1ecc2..ada5ef9 100644 --- a/adonis-typings/index.ts +++ b/adonis-typings/index.ts @@ -14,9 +14,19 @@ import { MultipartContract, } from '@ioc:Adonis/Addons/BodyParser' +/** + * Extending the `request` interface on the core module + */ declare module '@ioc:Adonis/Core/Request' { interface RequestContract { - file (key: string, options?: Partial): MultipartFileContract, + file ( + key: string, + options?: Partial, + ): MultipartFileContract | null, + files ( + key: string, + options?: Partial, + ): MultipartFileContract[], multipart: MultipartContract, } } diff --git a/config/index.ts b/config/index.ts index a183d19..f30a8c7 100644 --- a/config/index.ts +++ b/config/index.ts @@ -8,13 +8,13 @@ */ import * as uuid from 'uuid/v1' -import { BodyParserConfig } from '@ioc:Adonis/Addons/BodyParser' +import { BodyParserConfigContract } from '@ioc:Adonis/Addons/BodyParser' /** * Default config to be used. It will be deep merged * with the user config */ -export const config: BodyParserConfig = { +export const config: BodyParserConfigContract = { /* |-------------------------------------------------------------------------- | White listed methods diff --git a/docs/classes/_providers_bodyparserprovider_.bodyparserprovider.md b/docs/classes/_providers_bodyparserprovider_.bodyparserprovider.md index 98f0b80..8570f19 100644 --- a/docs/classes/_providers_bodyparserprovider_.bodyparserprovider.md +++ b/docs/classes/_providers_bodyparserprovider_.bodyparserprovider.md @@ -8,17 +8,17 @@ * **BodyParserProvider** -### Index +## Index -#### Constructors +### Constructors * [constructor](_providers_bodyparserprovider_.bodyparserprovider.md#constructor) -#### Properties +### Properties * [$container](_providers_bodyparserprovider_.bodyparserprovider.md#protected-$container) -#### Methods +### Methods * [boot](_providers_bodyparserprovider_.bodyparserprovider.md#boot) * [register](_providers_bodyparserprovider_.bodyparserprovider.md#register) diff --git a/docs/classes/_src_bodyparser_index_.bodyparsermiddleware.md b/docs/classes/_src_bodyparser_index_.bodyparsermiddleware.md index a90402a..71fd059 100644 --- a/docs/classes/_src_bodyparser_index_.bodyparsermiddleware.md +++ b/docs/classes/_src_bodyparser_index_.bodyparsermiddleware.md @@ -11,13 +11,13 @@ request body to be read later in the request lifecycle. * **BodyParserMiddleware** -### Index +## Index -#### Constructors +### Constructors * [constructor](_src_bodyparser_index_.bodyparsermiddleware.md#constructor) -#### Methods +### Methods * [handle](_src_bodyparser_index_.bodyparsermiddleware.md#handle) @@ -25,13 +25,13 @@ request body to be read later in the request lifecycle. ### constructor -\+ **new BodyParserMiddleware**(`_config`: `BodyParserConfig`): *[BodyParserMiddleware](_src_bodyparser_index_.bodyparsermiddleware.md)* +\+ **new BodyParserMiddleware**(`_config`: `BodyParserConfigContract`): *[BodyParserMiddleware](_src_bodyparser_index_.bodyparsermiddleware.md)* **Parameters:** Name | Type | ------ | ------ | -`_config` | `BodyParserConfig` | +`_config` | `BodyParserConfigContract` | **Returns:** *[BodyParserMiddleware](_src_bodyparser_index_.bodyparsermiddleware.md)* diff --git a/docs/classes/_src_formfields_index_.formfields.md b/docs/classes/_src_formfields_index_.formfields.md index 1767589..652eeab 100644 --- a/docs/classes/_src_formfields_index_.formfields.md +++ b/docs/classes/_src_formfields_index_.formfields.md @@ -11,9 +11,9 @@ array gracefully * **FormFields** -### Index +## Index -#### Methods +### Methods * [add](_src_formfields_index_.formfields.md#add) * [get](_src_formfields_index_.formfields.md#get) diff --git a/docs/classes/_src_multipart_file_.file.md b/docs/classes/_src_multipart_file_.file.md index d0b3db6..608d6a5 100644 --- a/docs/classes/_src_multipart_file_.file.md +++ b/docs/classes/_src_multipart_file_.file.md @@ -4,8 +4,8 @@ # Class: File -File class exposes a friendly API to validate or save uploaded -files. +The file holds the meta/data for an uploaded file, along with +an errors occurred during the upload process. ## Hierarchy @@ -15,33 +15,31 @@ files. * `MultipartFileContract` -### Index +## Index -#### Constructors +### Constructors * [constructor](_src_multipart_file_.file.md#constructor) -#### Properties +### Properties * [clientName](_src_multipart_file_.file.md#clientname) * [errors](_src_multipart_file_.file.md#errors) * [extname](_src_multipart_file_.file.md#extname) * [fieldName](_src_multipart_file_.file.md#fieldname) -* [fileName](_src_multipart_file_.file.md#filename) +* [filePath](_src_multipart_file_.file.md#optional-filepath) +* [meta](_src_multipart_file_.file.md#meta) * [size](_src_multipart_file_.file.md#size) * [subtype](_src_multipart_file_.file.md#subtype) -* [tmpPath](_src_multipart_file_.file.md#tmppath) +* [tmpPath](_src_multipart_file_.file.md#optional-tmppath) * [type](_src_multipart_file_.file.md#type) +* [validated](_src_multipart_file_.file.md#validated) -#### Accessors +### Accessors * [isValid](_src_multipart_file_.file.md#isvalid) * [status](_src_multipart_file_.file.md#status) -#### Methods - -* [setValidationOptions](_src_multipart_file_.file.md#setvalidationoptions) - ## Constructors ### constructor @@ -60,7 +58,7 @@ Name | Type | ### clientName -• **clientName**: *string* = this._data.fileName +• **clientName**: *string* = this._data.clientName Client name is the file name on the user client @@ -76,13 +74,9 @@ ___ ### extname -• **extname**: *string* = this._data.fileType - ? this._data.fileType.ext - : extname(this.clientName).replace(/^\./, '') +• **extname**: *string* = this._data.fileType.ext -The extname for the file. We pull the file extension from the file -name when `fileType` is undefined. Check [processMultipart](../modules/_src_multipart_processmultipart_.md#processmultipart) -method to known how fileType value is computed. +The extname for the file. ___ @@ -94,14 +88,22 @@ Field name is the name of the field ___ -### fileName +### `Optional` filePath -• **fileName**: *string* +• **filePath**? : *undefined | string* = this._data.filePath Filename is only set after the move operation ___ +### meta + +• **meta**: *string* = this._data.meta + +The file meta data + +___ + ### size • **size**: *number* = this._data.bytes @@ -112,24 +114,33 @@ ___ ### subtype -• **subtype**: *string* +• **subtype**: *string* = this._data.fileType.subtype ___ -### tmpPath +### `Optional` tmpPath -• **tmpPath**: *string* = this._data.tmpPath +• **tmpPath**? : *undefined | string* = this._data.tmpPath -Path to the tmp folder +Tmp path, only exists when file is uploaded using the +classic mode. ___ ### type -• **type**: *string* +• **type**: *string* = this._data.fileType.type Type and subtype are extracted from the `content-type` -header +header or from the file magic number + +___ + +### validated + +• **validated**: *boolean* = false + +Whether or not this file has been validated ## Accessors @@ -150,21 +161,4 @@ ___ Current status of the file -**Returns:** *"pending" | "moved" | "error"* - -## Methods - -### setValidationOptions - -▸ **setValidationOptions**(`options`: `Partial`): *this* - -Set validation options to be used for -validating the file - -**Parameters:** - -Name | Type | ------- | ------ | -`options` | `Partial` | - -**Returns:** *this* \ No newline at end of file +**Returns:** *"pending" | "moved" | "error"* \ No newline at end of file diff --git a/docs/classes/_src_multipart_index_.multipart.md b/docs/classes/_src_multipart_index_.multipart.md index 8fa7416..f24d195 100644 --- a/docs/classes/_src_multipart_index_.multipart.md +++ b/docs/classes/_src_multipart_index_.multipart.md @@ -8,25 +8,6 @@ Multipart class offers a low level API to interact the incoming HTTP request data as a stream. This makes it super easy to write files to s3 without saving them to the disk first. -### Usage - -```js -const multipart = new Multipart(options) - -multipart.onFile('profile', async (stream) => { - stream.pipe(fs.createWriteStream('./profile.jpg')) -}) - -multipart.onField('*', async (key, value) => { -}) - -try { - await multipart.process() -} catch (error) { - // all errors are sent to the process method -} -``` - ## Hierarchy * **Multipart** @@ -35,19 +16,18 @@ try { * `MultipartContract` -### Index +## Index -#### Constructors +### Constructors * [constructor](_src_multipart_index_.multipart.md#constructor) -#### Properties +### Properties * [consumed](_src_multipart_index_.multipart.md#consumed) -#### Methods +### Methods -* [onField](_src_multipart_index_.multipart.md#onfield) * [onFile](_src_multipart_index_.multipart.md#onfile) * [process](_src_multipart_index_.multipart.md#process) @@ -55,14 +35,14 @@ try { ### constructor -\+ **new Multipart**(`_request`: `IncomingMessage`, `_config`: object): *[Multipart](_src_multipart_index_.multipart.md)* +\+ **new Multipart**(`_request`: `RequestContract`, `_config`: undefined | object): *[Multipart](_src_multipart_index_.multipart.md)* **Parameters:** -Name | Type | ------- | ------ | -`_request` | `IncomingMessage` | -`_config` | object | +Name | Type | Default | +------ | ------ | ------ | +`_request` | `RequestContract` | - | +`_config` | undefined \| object | {} | **Returns:** *[Multipart](_src_multipart_index_.multipart.md)* @@ -78,49 +58,19 @@ boolean must be checked first ## Methods -### onField - -▸ **onField**(`name`: string, `handler`: `FieldHandler`): *this* - -Get notified on a given field or all fields. An exception inside -the callback will abort the request body parsing and raises -and exception. - -**`example`** -``` -multipart.onField('username', (key, value) => { -}) - -multipart.onField('*', (key, value) => { -}) -``` - -**Parameters:** - -Name | Type | ------- | ------ | -`name` | string | -`handler` | `FieldHandler` | - -**Returns:** *this* - -___ - ### onFile -▸ **onFile**(`name`: string, `handler`: `PartHandler`): *this* +▸ **onFile**(`name`: string, `options`: object, `handler`: `PartHandlerContract`): *this* Attach handler for a given file. To handle all files, you -can attach a wildcard handler. Also only can handler -can be defined, since processing a stream at multiple -locations is not possible. +can attach a wildcard handler. **`example`** -``` -multipart.onFile('package', async (stream) => { +```ts +multipart.onFile('package', {}, async (stream) => { }) -multipart.onFile('*', async (stream) => { +multipart.onFile('*', {}, async (stream) => { }) ``` @@ -129,7 +79,8 @@ multipart.onFile('*', async (stream) => { Name | Type | ------ | ------ | `name` | string | -`handler` | `PartHandler` | +`options` | object | +`handler` | `PartHandlerContract` | **Returns:** *this* @@ -137,9 +88,15 @@ ___ ### process -▸ **process**(): *`Promise`* +▸ **process**(`config?`: undefined | object): *`Promise`* Process the request by going all the file and field streams. +**Parameters:** + +Name | Type | +------ | ------ | +`config?` | undefined \| object | + **Returns:** *`Promise`* \ No newline at end of file diff --git a/docs/classes/_src_multipart_parthandler_.parthandler.md b/docs/classes/_src_multipart_parthandler_.parthandler.md new file mode 100644 index 0000000..7da3e6a --- /dev/null +++ b/docs/classes/_src_multipart_parthandler_.parthandler.md @@ -0,0 +1,110 @@ +> **[@adonisjs/bodyparser](../README.md)** + +[Globals](../globals.md) / ["src/Multipart/PartHandler"](../modules/_src_multipart_parthandler_.md) / [PartHandler](_src_multipart_parthandler_.parthandler.md) / + +# Class: PartHandler + +Part handler handles the progress of a stream and also internally validates +it's size and extension. + +This class offloads the task of validating a file stream, regardless of how +the stream is consumed. For example: + +In classic scanerio, we will process the file stream and write files to the +tmp directory and in more advanced cases, the end user can handle the +stream by themselves and report each chunk to this class. + +## Hierarchy + +* **PartHandler** + +## Index + +### Constructors + +* [constructor](_src_multipart_parthandler_.parthandler.md#constructor) + +### Methods + +* [getFile](_src_multipart_parthandler_.parthandler.md#getfile) +* [reportError](_src_multipart_parthandler_.parthandler.md#reporterror) +* [reportProgress](_src_multipart_parthandler_.parthandler.md#reportprogress) +* [reportSuccess](_src_multipart_parthandler_.parthandler.md#reportsuccess) + +## Constructors + +### constructor + +\+ **new PartHandler**(`_part`: `MultipartStream`, `_options`: `Partial`): *[PartHandler](_src_multipart_parthandler_.parthandler.md)* + +**Parameters:** + +Name | Type | +------ | ------ | +`_part` | `MultipartStream` | +`_options` | `Partial` | + +**Returns:** *[PartHandler](_src_multipart_parthandler_.parthandler.md)* + +## Methods + +### getFile + +▸ **getFile**(): *[File](_src_multipart_file_.file.md) | null* + +Returns the file instance only when the progress of +the file has been reported atleast once. + +**Returns:** *[File](_src_multipart_file_.file.md) | null* + +___ + +### reportError + +▸ **reportError**(`error`: any): *void* + +Report errors encountered while processing the stream. These can be errors +apart from the one reported by this class. For example: The `s3` failure +due to some bad credentails. + +**Parameters:** + +Name | Type | +------ | ------ | +`error` | any | + +**Returns:** *void* + +___ + +### reportProgress + +▸ **reportProgress**(`line`: `Buffer`, `bufferLength`: number): *void* + +Handles the file upload progress by validating the file size and +extension. + +**Parameters:** + +Name | Type | +------ | ------ | +`line` | `Buffer` | +`bufferLength` | number | + +**Returns:** *void* + +___ + +### reportSuccess + +▸ **reportSuccess**(`data?`: object & object): *void* + +Report success data about the file. + +**Parameters:** + +Name | Type | +------ | ------ | +`data?` | object & object | + +**Returns:** *void* \ No newline at end of file diff --git a/docs/globals.md b/docs/globals.md index d4fe128..ad7ecac 100644 --- a/docs/globals.md +++ b/docs/globals.md @@ -4,17 +4,20 @@ # @adonisjs/bodyparser -### Index +## Index -#### External modules +### External modules * ["adonis-typings/bodyparser"](modules/_adonis_typings_bodyparser_.md) * ["adonis-typings/index"](modules/_adonis_typings_index_.md) * ["config/index"](modules/_config_index_.md) * ["providers/BodyParserProvider"](modules/_providers_bodyparserprovider_.md) +* ["src/Bindings/Request"](modules/_src_bindings_request_.md) * ["src/BodyParser/index"](modules/_src_bodyparser_index_.md) * ["src/FormFields/index"](modules/_src_formfields_index_.md) * ["src/Multipart/File"](modules/_src_multipart_file_.md) +* ["src/Multipart/PartHandler"](modules/_src_multipart_parthandler_.md) * ["src/Multipart/index"](modules/_src_multipart_index_.md) -* ["src/Multipart/processMultipart"](modules/_src_multipart_processmultipart_.md) -* ["src/Multipart/streamFile"](modules/_src_multipart_streamfile_.md) \ No newline at end of file +* ["src/Multipart/streamFile"](modules/_src_multipart_streamfile_.md) +* ["src/utils"](modules/_src_utils_.md) +* ["test-helpers/index"](modules/_test_helpers_index_.md) \ No newline at end of file diff --git a/docs/modules/_config_index_.md b/docs/modules/_config_index_.md index f73a99f..61cba2c 100644 --- a/docs/modules/_config_index_.md +++ b/docs/modules/_config_index_.md @@ -4,9 +4,9 @@ # External module: "config/index" -### Index +## Index -#### Object literals +### Object literals * [config](_config_index_.md#const-config) diff --git a/docs/modules/_providers_bodyparserprovider_.md b/docs/modules/_providers_bodyparserprovider_.md index 582e389..b964535 100644 --- a/docs/modules/_providers_bodyparserprovider_.md +++ b/docs/modules/_providers_bodyparserprovider_.md @@ -4,8 +4,8 @@ # External module: "providers/BodyParserProvider" -### Index +## Index -#### Classes +### Classes * [BodyParserProvider](../classes/_providers_bodyparserprovider_.bodyparserprovider.md) \ No newline at end of file diff --git a/docs/modules/_src_bindings_request_.md b/docs/modules/_src_bindings_request_.md new file mode 100644 index 0000000..9f7f563 --- /dev/null +++ b/docs/modules/_src_bindings_request_.md @@ -0,0 +1,25 @@ +> **[@adonisjs/bodyparser](../README.md)** + +[Globals](../globals.md) / ["src/Bindings/Request"](_src_bindings_request_.md) / + +# External module: "src/Bindings/Request" + +## Index + +### Functions + +* [extendRequest](_src_bindings_request_.md#extendrequest) + +## Functions + +### extendRequest + +▸ **extendRequest**(`Request`: `RequestConstructorContract`): *void* + +**Parameters:** + +Name | Type | +------ | ------ | +`Request` | `RequestConstructorContract` | + +**Returns:** *void* \ No newline at end of file diff --git a/docs/modules/_src_bodyparser_index_.md b/docs/modules/_src_bodyparser_index_.md index b045e27..b31bae3 100644 --- a/docs/modules/_src_bodyparser_index_.md +++ b/docs/modules/_src_bodyparser_index_.md @@ -4,8 +4,8 @@ # External module: "src/BodyParser/index" -### Index +## Index -#### Classes +### Classes * [BodyParserMiddleware](../classes/_src_bodyparser_index_.bodyparsermiddleware.md) \ No newline at end of file diff --git a/docs/modules/_src_formfields_index_.md b/docs/modules/_src_formfields_index_.md index cf2b3bb..3ad76fe 100644 --- a/docs/modules/_src_formfields_index_.md +++ b/docs/modules/_src_formfields_index_.md @@ -4,8 +4,8 @@ # External module: "src/FormFields/index" -### Index +## Index -#### Classes +### Classes * [FormFields](../classes/_src_formfields_index_.formfields.md) \ No newline at end of file diff --git a/docs/modules/_src_multipart_file_.md b/docs/modules/_src_multipart_file_.md index e37f760..2f8acb6 100644 --- a/docs/modules/_src_multipart_file_.md +++ b/docs/modules/_src_multipart_file_.md @@ -4,8 +4,8 @@ # External module: "src/Multipart/File" -### Index +## Index -#### Classes +### Classes * [File](../classes/_src_multipart_file_.file.md) \ No newline at end of file diff --git a/docs/modules/_src_multipart_index_.md b/docs/modules/_src_multipart_index_.md index 1c1f701..c8e68c8 100644 --- a/docs/modules/_src_multipart_index_.md +++ b/docs/modules/_src_multipart_index_.md @@ -4,8 +4,8 @@ # External module: "src/Multipart/index" -### Index +## Index -#### Classes +### Classes * [Multipart](../classes/_src_multipart_index_.multipart.md) \ No newline at end of file diff --git a/docs/modules/_src_multipart_parthandler_.md b/docs/modules/_src_multipart_parthandler_.md new file mode 100644 index 0000000..569507b --- /dev/null +++ b/docs/modules/_src_multipart_parthandler_.md @@ -0,0 +1,11 @@ +> **[@adonisjs/bodyparser](../README.md)** + +[Globals](../globals.md) / ["src/Multipart/PartHandler"](_src_multipart_parthandler_.md) / + +# External module: "src/Multipart/PartHandler" + +## Index + +### Classes + +* [PartHandler](../classes/_src_multipart_parthandler_.parthandler.md) \ No newline at end of file diff --git a/docs/modules/_src_multipart_processmultipart_.md b/docs/modules/_src_multipart_processmultipart_.md deleted file mode 100644 index ad39e8f..0000000 --- a/docs/modules/_src_multipart_processmultipart_.md +++ /dev/null @@ -1,29 +0,0 @@ -> **[@adonisjs/bodyparser](../README.md)** - -[Globals](../globals.md) / ["src/Multipart/processMultipart"](_src_multipart_processmultipart_.md) / - -# External module: "src/Multipart/processMultipart" - -### Index - -#### Functions - -* [processMultipart](_src_multipart_processmultipart_.md#processmultipart) - -## Functions - -### processMultipart - -▸ **processMultipart**(`multipart`: [Multipart](../classes/_src_multipart_index_.multipart.md), `config`: `BodyParserMultipartConfig`): *`Promise`* - -Processes the incoming multipart stream by moving files to the -tmp directory and return `files` and `fields` data map. - -**Parameters:** - -Name | Type | ------- | ------ | -`multipart` | [Multipart](../classes/_src_multipart_index_.multipart.md) | -`config` | `BodyParserMultipartConfig` | - -**Returns:** *`Promise`* \ No newline at end of file diff --git a/docs/modules/_src_multipart_streamfile_.md b/docs/modules/_src_multipart_streamfile_.md index d888a32..0e47252 100644 --- a/docs/modules/_src_multipart_streamfile_.md +++ b/docs/modules/_src_multipart_streamfile_.md @@ -4,9 +4,9 @@ # External module: "src/Multipart/streamFile" -### Index +## Index -#### Functions +### Functions * [streamFile](_src_multipart_streamfile_.md#streamfile) diff --git a/docs/modules/_src_utils_.md b/docs/modules/_src_utils_.md new file mode 100644 index 0000000..aa16655 --- /dev/null +++ b/docs/modules/_src_utils_.md @@ -0,0 +1,72 @@ +> **[@adonisjs/bodyparser](../README.md)** + +[Globals](../globals.md) / ["src/utils"](_src_utils_.md) / + +# External module: "src/utils" + +## Index + +### Functions + +* [getFileType](_src_utils_.md#getfiletype) +* [validateExtension](_src_utils_.md#validateextension) +* [validateSize](_src_utils_.md#validatesize) + +## Functions + +### getFileType + +▸ **getFileType**(`fileContents`: `Buffer`, `clientName`: string, `headers`: object, `force`: boolean): *null | object* + +Returns the file `type`, `subtype` and `extension`. + +**Parameters:** + +Name | Type | Default | +------ | ------ | ------ | +`fileContents` | `Buffer` | - | +`clientName` | string | - | +`headers` | object | - | +`force` | boolean | false | + +**Returns:** *null | object* + +___ + +### validateExtension + +▸ **validateExtension**(`fieldName`: string, `clientName`: string, `extname`: string, `allowedExtensions?`: string[]): *`FileUploadError` | null* + +Returns an error when file extension isn't one of the allowed file +extensions. + +**Parameters:** + +Name | Type | +------ | ------ | +`fieldName` | string | +`clientName` | string | +`extname` | string | +`allowedExtensions?` | string[] | + +**Returns:** *`FileUploadError` | null* + +___ + +### validateSize + +▸ **validateSize**(`fieldName`: string, `clientName`: string, `actualBytes`: number, `expectedBytes?`: string | number): *`FileUploadError` | null* + +Returns an error when file size is over the expected +bytes. + +**Parameters:** + +Name | Type | +------ | ------ | +`fieldName` | string | +`clientName` | string | +`actualBytes` | number | +`expectedBytes?` | string \| number | + +**Returns:** *`FileUploadError` | null* \ No newline at end of file diff --git a/docs/modules/_test_helpers_index_.md b/docs/modules/_test_helpers_index_.md new file mode 100644 index 0000000..a5f11d2 --- /dev/null +++ b/docs/modules/_test_helpers_index_.md @@ -0,0 +1,70 @@ +> **[@adonisjs/bodyparser](../README.md)** + +[Globals](../globals.md) / ["test-helpers/index"](_test_helpers_index_.md) / + +# External module: "test-helpers/index" + +## Index + +### Variables + +* [packageFilePath](_test_helpers_index_.md#const-packagefilepath) +* [packageFileSize](_test_helpers_index_.md#const-packagefilesize) + +### Functions + +* [sleep](_test_helpers_index_.md#const-sleep) + +### Object literals + +* [requestConfig](_test_helpers_index_.md#const-requestconfig) + +## Variables + +### `Const` packageFilePath + +• **packageFilePath**: *string* = join(__dirname, '../package.json') + +___ + +### `Const` packageFileSize + +• **packageFileSize**: *number* = Buffer.from(contents, 'utf-8').length + 1 + +## Functions + +### `Const` sleep + +▸ **sleep**(`time`: number): *`Promise`* + +**Parameters:** + +Name | Type | +------ | ------ | +`time` | number | + +**Returns:** *`Promise`* + +## Object literals + +### `Const` requestConfig + +### ▪ **requestConfig**: *object* + +### allowMethodSpoofing + +• **allowMethodSpoofing**: *boolean* = false + +### generateRequestId + +• **generateRequestId**: *boolean* = false + +### subdomainOffset + +• **subdomainOffset**: *number* = 2 + +### trustProxy + +▸ **trustProxy**(): *boolean* + +**Returns:** *boolean* \ No newline at end of file diff --git a/japaFile.js b/japaFile.js index 46b457c..6dc1d51 100644 --- a/japaFile.js +++ b/japaFile.js @@ -1,3 +1,4 @@ +process.env.TS_NODE_FILES = 'true' require('ts-node/register') const { configure } = require('japa') diff --git a/package.json b/package.json index ee28b55..a882d74 100644 --- a/package.json +++ b/package.json @@ -22,19 +22,25 @@ "version": "npm run build" }, "devDependencies": { - "@adonisjs/core": "^2.0.6", + "@adonisjs/core": "^2.0.10", "@adonisjs/dev-utils": "^1.4.0", + "@adonisjs/fold": "^4.0.9", "@adonisjs/mrm-preset": "^2.0.3", - "@poppinss/logger": "^1.1.1", + "@poppinss/http-server": "^1.2.0", + "@poppinss/logger": "^1.1.2", + "@poppinss/request": "^1.0.13", + "@types/fs-extra": "^8.0.0", "@types/node": "^12.6.8", + "@types/supertest": "^2.0.8", "commitizen": "^4.0.3", "cz-conventional-changelog": "^3.0.2", "del-cli": "^2.0.0", "doctoc": "^1.4.0", - "husky": "^3.0.1", + "husky": "^3.0.2", "japa": "^2.0.10", "mrm": "^1.2.2", "np": "^5.0.3", + "supertest": "^4.0.2", "ts-node": "^8.3.0", "tslint": "^5.18.0", "tslint-eslint-rules": "^5.4.0", @@ -71,8 +77,6 @@ "anyBranch": false }, "dependencies": { - "@poppinss/http-server": "^1.1.8", - "@poppinss/request": "^1.0.10", "@poppinss/utils": "^1.0.4", "bytes": "^3.1.0", "co-body": "git+https://github.com/thetutlage/co-body.git", diff --git a/providers/BodyParserProvider.ts b/providers/BodyParserProvider.ts index 55bffb8..0a80ff7 100644 --- a/providers/BodyParserProvider.ts +++ b/providers/BodyParserProvider.ts @@ -7,10 +7,8 @@ * file that was distributed with this source code. */ -/// - import { BodyParserMiddleware } from '../src/BodyParser/index' -import { FileValidationOptions, MultipartFileContract } from '@ioc:Adonis/Addons/BodyParser' +import extendRequest from '../src/Bindings/Request' export default class BodyParserProvider { constructor (protected $container: any) { @@ -21,8 +19,8 @@ export default class BodyParserProvider { */ public register () { this.$container.bind('Adonis/Addons/BodyParserMiddleware', () => { - const config = this.$container.use('Adonis/Core/Config') - return new BodyParserMiddleware(config) + const Config = this.$container.use('Adonis/Core/Config') + return new BodyParserMiddleware(Config.get('bodyparser', {})) }) } @@ -30,26 +28,6 @@ export default class BodyParserProvider { * Adding the `file` macro to add support for reading request files. */ public boot () { - const Request = this.$container.use('Adonis/Core/Config') - - /** - * Adding `file` macro to the request class. - */ - Request.macro('file', function getFile (key: string, options?: Partial) { - const file = this._files[key] - if (!file) { - return null - } - - if (options) { - if (file instanceof Array) { - (file as MultipartFileContract[]).forEach((one) => one.setValidationOptions(options)) - } else { - (file as MultipartFileContract).setValidationOptions(options) - } - } - - return file - }) + extendRequest(this.$container.use('Adonis/Core/Request')) } } diff --git a/src/Bindings/Request.ts b/src/Bindings/Request.ts new file mode 100644 index 0000000..198e007 --- /dev/null +++ b/src/Bindings/Request.ts @@ -0,0 +1,84 @@ +/* + * @adonisjs/bodyparser + * + * (c) Harminder Virk + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. +*/ + +/// + +import { get } from 'lodash' +import { RequestConstructorContract } from '@ioc:Adonis/Core/Request' +import { FileValidationOptions, MultipartFileContract } from '@ioc:Adonis/Addons/BodyParser' +import { validateExtension, validateSize } from '../utils' + +/** + * Validating a given file. + */ +function validateFile (file: MultipartFileContract, options?: Partial) { + if (file.validated) { + return + } + + file.validated = true + + if (!options) { + return + } + + const sizeError = validateSize(file.fieldName, file.clientName, file.size, options.size) + if (sizeError) { + file.errors.push(sizeError) + } + + const extError = validateExtension(file.fieldName, file.clientName, file.extname, options.extnames) + if (extError) { + file.errors.push(extError) + } +} + +/** + * Validates and returns a file for a given key + */ +function getFile ( + files: { [key: string]: MultipartFileContract | MultipartFileContract[] }, + key: string, + getOne: boolean, + options?: Partial, +) { + const file = get(files, key) + + /** + * Return null when there is no file + */ + if (!file) { + return null + } + + if (Array.isArray(file) && getOne) { + validateFile(file[0], options) + return file[0] + } else if (Array.isArray(file)) { + file.forEach((one) => validateFile(one, options)) + return file + } else { + validateFile(file, options) + return file + } +} + +export default function extendRequest (Request: RequestConstructorContract) { + Request.macro('file', function file (key: string, options?: Partial) { + return getFile(this._files, key, true, options) + }) + + Request.macro('files', function files (key: string, options?: Partial) { + const files = getFile(this._files, key, false, options) + if (!files) { + return [] + } + return Array.isArray(files) ? files : [files] + }) +} diff --git a/src/BodyParser/index.ts b/src/BodyParser/index.ts index 23309c7..40e658e 100644 --- a/src/BodyParser/index.ts +++ b/src/BodyParser/index.ts @@ -9,27 +9,30 @@ /// +import { join } from 'path' +import { tmpdir } from 'os' import * as coBody from 'co-body' import { Exception } from '@poppinss/utils' -import { RequestContract } from '@poppinss/request' -import { HttpContextContract } from '@poppinss/http-server' -import { BodyParserConfig } from '@ioc:Adonis/Addons/BodyParser' + +import { BodyParserConfigContract } from '@ioc:Adonis/Addons/BodyParser' +import { HttpContextContract } from '@ioc:Adonis/Core/HttpContext' +import { RequestContract } from '@ioc:Adonis/Core/Request' import { Multipart } from '../Multipart' -import { processMultipart } from '../Multipart/processMultipart' +import { streamFile } from '../Multipart/streamFile' /** * BodyParser middleware parses the incoming request body and set it as * request body to be read later in the request lifecycle. */ export class BodyParserMiddleware { - constructor (private _config: BodyParserConfig) { + constructor (private _config: BodyParserConfigContract) { } /** * Returns config for a given type */ - private _getConfigFor (type: K): BodyParserConfig[K] { + private _getConfigFor (type: K): BodyParserConfigContract[K] { const config = this._config[type] config['returnRawBody'] = true return config @@ -75,10 +78,15 @@ export class BodyParserMiddleware { { request, route }: HttpContextContract, next: () => Promise, ): Promise { + /** + * Initiating the `_files` private property as an object + */ + request['_files'] = {} + /** * Only process for whitelisted nodes */ - if (this._config.whitelistedMethods.indexOf(request.method()) === -1) { + if (!this._config.whitelistedMethods.includes(request.method())) { return next() } @@ -99,8 +107,9 @@ export class BodyParserMiddleware { const multipartConfig = this._getConfigFor('multipart') if (this._isType(request, multipartConfig.types)) { - request['multipart'] = new Multipart(request.request, { + request.multipart = new Multipart(request, { maxFields: multipartConfig.maxFields, + limit: multipartConfig.limit, }) /** @@ -111,9 +120,18 @@ export class BodyParserMiddleware { return next() } - const { files, fields } = await processMultipart(request['multipart'], multipartConfig) - request.setInitialBody(fields) - request['_files'] = files + /** + * Make sure we are not running any validations on the uploaded files. They are + * deferred for the end user when they will access file using `request.file` + * method. + */ + request.multipart.onFile('*', { deferValidations: true }, async (part, reporter) => { + const tmpPath = join(tmpdir(), multipartConfig.tmpFileName()) + await streamFile(part, tmpPath, reporter) + return { tmpPath } + }) + + await request.multipart.process() return next() } diff --git a/src/Multipart/File.ts b/src/Multipart/File.ts index bd61ab9..5ca9e76 100644 --- a/src/Multipart/File.ts +++ b/src/Multipart/File.ts @@ -9,20 +9,15 @@ /// -import { extname } from 'path' -import * as bytes from 'bytes' -import * as mediaTyper from 'media-typer' - import { MultipartFileContract, - FileValidationOptions, FileUploadError, FileInputNode, } from '@ioc:Adonis/Addons/BodyParser' /** - * File class exposes a friendly API to validate or save uploaded - * files. + * The file holds the meta/data for an uploaded file, along with + * an errors occurred during the upload process. */ export class File implements MultipartFileContract { /** @@ -33,7 +28,7 @@ export class File implements MultipartFileContract { /** * Client name is the file name on the user client */ - public clientName = this._data.fileName + public clientName = this._data.clientName /** * File size in bytes @@ -41,18 +36,9 @@ export class File implements MultipartFileContract { public size = this._data.bytes /** - * Path to the tmp folder - */ - public tmpPath = this._data.tmpPath - - /** - * The extname for the file. We pull the file extension from the file - * name when `fileType` is undefined. Check [[processMultipart]] - * method to known how fileType value is computed. + * The extname for the file. */ - public extname = this._data.fileType - ? this._data.fileType.ext - : extname(this.clientName).replace(/^\./, '') + public extname = this._data.fileType.ext /** * Upload errors @@ -61,81 +47,33 @@ export class File implements MultipartFileContract { /** * Type and subtype are extracted from the `content-type` - * header + * header or from the file magic number */ - public type: string - public subtype: string + public type: string = this._data.fileType.type + public subtype: string = this._data.fileType.subtype /** * Filename is only set after the move operation */ - public fileName: string + public filePath?: string = this._data.filePath /** - * Validation options for the file + * Tmp path, only exists when file is uploaded using the + * classic mode. */ - private _validationOptions: Partial = {} - - constructor (private _data: FileInputNode) { - this._parseContentType() - } + public tmpPath?: string = this._data.tmpPath /** - * Parses the content type header to extract the type - * and subtype + * The file meta data */ - private _parseContentType () { - try { - const parsed = mediaTyper.parse( - this._data.fileType ? this._data.fileType.mime : this._data.headers['content-type'], - ) - this.type = parsed.type - this.subtype = parsed.subtype - } catch (error) { - } - } + public meta: string = this._data.meta /** - * Validates the file size and updates the errors array + * Whether or not this file has been validated */ - private _validateSize () { - let expectedBytes = this._validationOptions.size - if (expectedBytes === undefined) { - return - } - - expectedBytes = typeof (expectedBytes) === 'string' ? bytes(expectedBytes) : expectedBytes + public validated: boolean = false - if (this.size > expectedBytes!) { - this.errors.push({ - fieldName: this.fieldName, - clientName: this.clientName, - message: `File size should be less than ${bytes(this.size)}`, - type: 'size', - }) - } - } - - /** - * Validates file extension and updates the errors array - */ - private _validateExtName () { - const extnames = this._validationOptions.extnames - if (extnames === undefined || !Array.isArray(extnames) || extnames.length === 0) { - return - } - - if (extnames.indexOf(this.extname) === -1) { - const verb = extnames.length === 1 ? 'is' : 'are' - const message = `Invalid file extension ${this.extname}. Only ${extnames.join(', ')} ${verb} allowed` - - this.errors.push({ - fieldName: this.fieldName, - clientName: this.clientName, - message: message, - type: 'extname', - }) - } + constructor (private _data: FileInputNode) { } /** @@ -150,28 +88,6 @@ export class File implements MultipartFileContract { * Current status of the file */ public get status (): 'pending' | 'moved' | 'error' { - return this.errors.length ? 'error' : (this.fileName ? 'moved' : 'pending') - } - - /** - * Set validation options to be used for - * validating the file - */ - public setValidationOptions (options: Partial): this { - this._validationOptions = options - - /** - * Reset errors, maybe errors are not relevant after the - * new options - */ - this.errors = [] - - /** - * Re-run validations - */ - this._validateSize() - this._validateExtName() - - return this + return this.errors.length ? 'error' : (this.filePath ? 'moved' : 'pending') } } diff --git a/src/Multipart/PartHandler.ts b/src/Multipart/PartHandler.ts new file mode 100644 index 0000000..c7c6743 --- /dev/null +++ b/src/Multipart/PartHandler.ts @@ -0,0 +1,213 @@ +/* +* @adonisjs/bodyparser +* +* (c) Harminder Virk +* +* For the full copyright and license information, please view the LICENSE +* file that was distributed with this source code. +*/ + +/// + +import { Exception } from '@poppinss/utils' +import { validateExtension, validateSize, getFileType } from '../utils' + +import { + MultipartStream, + FileValidationOptions, + FileUploadError, +} from '@ioc:Adonis/Addons/BodyParser' + +import { File } from './File' + +/** + * Part handler handles the progress of a stream and also internally validates + * it's size and extension. + * + * This class offloads the task of validating a file stream, regardless of how + * the stream is consumed. For example: + * + * In classic scanerio, we will process the file stream and write files to the + * tmp directory and in more advanced cases, the end user can handle the + * stream by themselves and report each chunk to this class. + */ +export class PartHandler { + private _buff: Buffer + private _bufferLength: number = 0 + private _fileType: ReturnType + + private _fieldName = this._part.name + private _clientName = this._part.filename + private _headers = this._part.headers + + /** + * Collected errors + */ + private _errors: FileUploadError[] = [] + + /** + * The data that we want to forward to the file after successfully + * handling it's upload + */ + private _postProcessFileData: any = {} + + constructor ( + private _part: MultipartStream, + private _options: Partial, + ) {} + + /** + * Validates the file size when validations are not deferred. + */ + private _validateSize () { + if (this._options.deferValidations) { + return + } + + const error = validateSize( + this._fieldName, + this._clientName, + this._bufferLength, + this._options.size, + ) + + if (error) { + this._errors.push(error) + } + } + + /** + * Validates the file extension when validation is not + * deferred and file type has been detected. + */ + private _validateExtension () { + if (this._options.deferValidations || !this._fileType) { + return + } + + const error = validateExtension( + this._fieldName, + this._clientName, + this._fileType.ext, + this._options.extnames, + ) + + if (error) { + this._errors.push(error) + } + } + + /** + * Returns the file instance only when the progress of + * the file has been reported atleast once. + */ + public getFile (): File | null { + if (!this._buff) { + return null + } + + /** + * If we failed to pull the file type earlier, then lets make + * another attempt. + */ + if (!this._fileType) { + this._fileType = getFileType(this._buff, this._clientName, this._headers, true) + this._validateExtension() + } + + const { filePath, tmpPath, ...meta } = this._postProcessFileData + + /** + * Create a new file instance + */ + const file = new File({ + clientName: this._part.filename, + fieldName: this._part.name, + bytes: this._bufferLength, + headers: this._part.headers, + fileType: this._fileType!, + filePath: filePath, + tmpPath: tmpPath, + meta: meta, + }) + + /** + * Set file errors, if we have encountered any + */ + if (this._errors.length) { + file.errors = this._errors + } + + /** + * Mark file as being already validated, when the validations have + * not been deferred + */ + if (!this._options.deferValidations) { + file.validated = true + } + + return file + } + + /** + * Handles the file upload progress by validating the file size and + * extension. + */ + public reportProgress (line: Buffer, bufferLength: number) { + this._buff = this._buff ? Buffer.concat([this._buff, line]) : line + this._bufferLength = this._bufferLength + bufferLength + + /** + * Attempt to validate the file size with every chunk of line + */ + this._validateSize() + + /** + * Attempt to find the file type unless we are able to figure it out + */ + if (!this._fileType) { + this._fileType = getFileType(this._buff, this._clientName, this._headers) + this._validateExtension() + } + + /** + * We need to emit the error, to shortcircuit the writable stream. Their will be + * more than one error only when `deferValidations=false` and size of ext + * checks were failed. + */ + if (this._errors.length) { + this._part.emit( + 'error', + new Exception('stream validation failed', 413, 'E_STREAM_VALIDATION_FAILURE'), + ) + } + } + + /** + * Report errors encountered while processing the stream. These can be errors + * apart from the one reported by this class. For example: The `s3` failure + * due to some bad credentails. + */ + public reportError (error: any) { + /** + * Ignore self errors + */ + if (error.code && error.code === 'E_STREAM_VALIDATION_FAILURE') { + return + } + + this._errors.push({ + fieldName: this._fieldName, + clientName: this._clientName, + type: 'fatal', + message: error.message, + }) + } + + /** + * Report success data about the file. + */ + public reportSuccess (data?: { filePath?: string, tmpPath?: string } & { [key: string]: any }) { + this._postProcessFileData = data || {} + } +} diff --git a/src/Multipart/index.ts b/src/Multipart/index.ts index 77ea869..988ee5a 100644 --- a/src/Multipart/index.ts +++ b/src/Multipart/index.ts @@ -9,49 +9,46 @@ /// -import { IncomingMessage } from 'http' +import * as bytes from 'bytes' import * as multiparty from 'multiparty' import { Exception } from '@poppinss/utils' +import { RequestContract } from '@ioc:Adonis/Core/Request' import { MultipartContract, - PartHandler, + PartHandlerContract, MultipartStream, - FieldHandler, } from '@ioc:Adonis/Addons/BodyParser' +import { FormFields } from '../FormFields' +import { PartHandler } from './PartHandler' + /** * Multipart class offers a low level API to interact the incoming * HTTP request data as a stream. This makes it super easy to * write files to s3 without saving them to the disk first. - * - * ### Usage - * - * ```js - * const multipart = new Multipart(options) - * - * multipart.onFile('profile', async (stream) => { - * stream.pipe(fs.createWriteStream('./profile.jpg')) - * }) - * - * multipart.onField('*', async (key, value) => { - * }) - * - * try { - * await multipart.process() - * } catch (error) { - * // all errors are sent to the process method - * } - * ``` */ export class Multipart implements MultipartContract { + /** + * The registered handlers to handle the file uploads + */ private _handlers: { - files: { [key: string]: PartHandler }, - fields: { [key: string]: FieldHandler }, - } = { - files: {}, - fields: {}, - } + [key: string]: { + handler: PartHandlerContract, + options: Parameters[1], + }, + } = {} + + /** + * Collected fields from the multipart stream + */ + private _fields = new FormFields() + + /** + * Collected files from the multipart stream. Files are only collected + * when there is an attached listener for a given file. + */ + private _files = new FormFields() /** * We track the finishing of `this.onFile` async handlers @@ -61,11 +58,21 @@ export class Multipart implements MultipartContract { private _pendingHandlers = 0 /** - * A boolean to know, if there are any handlers defined - * to the read the request body. Otherwise avoid reading - * the body + * Tracking the total size of files, so that we can enforce + * upper limit. + */ + private _filesTotalSize = 0 + + /** + * The reference to underlying multiparty form */ - private _gotHandlers = false + private _form + + /** + * Total size limit of the multipart stream. If it goes beyond + * this limit, then an exception will be raised. + */ + private _upperLimit?: number /** * Consumed is set to true when `process` is called. Calling @@ -74,15 +81,17 @@ export class Multipart implements MultipartContract { */ public consumed = false - constructor (private _request: IncomingMessage, private _config: { maxFields: number }) { - } + constructor ( + private _request: RequestContract, + private _config: Parameters[0] = {}, + ) {} /** * Returns a boolean telling whether all streams have been * consumed along with all handlers execution */ - private _isClosed (form: any): boolean { - return form.flushing <= 0 && this._pendingHandlers <= 0 + private _isClosed (): boolean { + return this._form.flushing <= 0 && this._pendingHandlers <= 0 } /** @@ -113,67 +122,120 @@ export class Multipart implements MultipartContract { /** * Skip, if their is no handler to consume the part. */ - const handler = this._handlers.files[name] || this._handlers.files['*'] + const handler = this._handlers[name] || this._handlers['*'] if (!handler) { part.resume() return } this._pendingHandlers++ - await handler(part) + + const partHandler = new PartHandler(part, handler.options) + + try { + const response = await handler.handler(part, (line) => { + const lineLength = line.length + + /** + * If there is an upper limit for all the files, then we need to track + * the total processed bytes and shortcircuit in case of too much + * data + */ + if (this._upperLimit) { + this._filesTotalSize += lineLength + if (this._filesTotalSize > this._upperLimit) { + const error = new Exception('request entity too large', 413, 'E_REQUEST_ENTITY_TOO_LARGE') + + /** + * Shortcircuit current part + */ + part.emit('error', error) + + /** + * Shortcircuit the entire stream + */ + this._form.emit('error', error) + } + } + + partHandler.reportProgress(line, lineLength) + }) + + /** + * Reporting success to the partHandler, which ends up on the + * file instance + */ + if (response) { + partHandler.reportSuccess(response) + } + } catch (error) { + partHandler.reportError(error) + } + + /** + * Pull the file from the `partHandler`. The file can also be `null` when + * the part consumer doesn't report progress + */ + const file = partHandler.getFile() + if (file) { + this._files.add(file.fieldName, file) + } + this._pendingHandlers-- } /** - * Passes field key value pair to the pre-defined handler + * Record the fields inside multipart contract */ private _handleField (key: string, value: string) { - const handler = this._handlers.fields[key] || this._handlers.fields['*'] - if (!handler) { - return + if (key) { + this._fields.add(key, value) } + } + + /** + * Processes the user config and computes the `upperLimit` value from + * it. + */ + private _processConfig (config?: Parameters[0]) { + this._config = Object.assign(this._config, config) - handler(key, value) + /** + * Getting bytes from the `config.limit` option, which can + * also be a string + */ + this._upperLimit = typeof (this._config!.limit) === 'string' + ? bytes(this._config!.limit) + : this._config!.limit } /** - * Attach handler for a given file. To handle all files, you - * can attach a wildcard handler. Also only can handler - * can be defined, since processing a stream at multiple - * locations is not possible. - * - * @example - * ``` - * multipart.onFile('package', async (stream) => { - * }) - * - * multipart.onFile('*', async (stream) => { - * }) - * ``` + * Set files and fields on the request class */ - public onFile (name: string, handler: PartHandler): this { - this._gotHandlers = true - this._handlers.files[name] = handler - return this + private _close () { + this._request['_files'] = this._files.get() + this._request.setInitialBody(this._fields.get()) } /** - * Get notified on a given field or all fields. An exception inside - * the callback will abort the request body parsing and raises - * and exception. + * Attach handler for a given file. To handle all files, you + * can attach a wildcard handler. * * @example - * ``` - * multipart.onField('username', (key, value) => { + * ```ts + * multipart.onFile('package', {}, async (stream) => { * }) * - * multipart.onField('*', (key, value) => { + * multipart.onFile('*', {}, async (stream) => { * }) * ``` */ - public onField (name: string, handler: FieldHandler): this { - this._gotHandlers = true - this._handlers.fields[name] = handler + public onFile ( + name: string, + options: Parameters[1], + handler: PartHandlerContract, + ): this { + this._handlers[name] = { handler, options } return this } @@ -181,10 +243,14 @@ export class Multipart implements MultipartContract { * Process the request by going all the file and field * streams. */ - public process (): Promise { + public process (config?: Parameters[0]): Promise { return new Promise((resolve, reject) => { if (this.consumed) { - reject(new Exception('multipart stream has already been consumed', 500, 'E_RUNTIME_EXCEPTION')) + reject(new Exception( + 'multipart stream has already been consumed', + 500, + 'E_RUNTIME_EXCEPTION', + )) return } @@ -193,23 +259,15 @@ export class Multipart implements MultipartContract { * to the `process` method */ this.consumed = true + this._processConfig(config) - /** - * Do not get into the process of parsing the request, when - * no one is listening for the fields or files. - */ - if (!this._gotHandlers) { - resolve() - return - } - - const form = new multiparty.Form(this._config) + this._form = new multiparty.Form({ maxFields: this._config!.maxFields }) /** * Raise error when form encounters an * error */ - form.on('error', (error: Error) => { + this._form.on('error', (error: Error) => { if (error.message === 'maxFields 1 exceeded.') { reject(new Exception('Max fields limit exceeded', 413, 'E_REQUEST_ENTITY_TOO_LARGE')) } else { @@ -222,31 +280,28 @@ export class Multipart implements MultipartContract { * promise when all parts are consumed and processed * by their handlers */ - form.on('part', async (part: MultipartStream) => { - try { - await this._handlePart(part) - - /** - * When a stream finishes before the handler, the close `event` - * will not resolve the current Promise. So in that case, we - * check and resolve from here - */ - if (this._isClosed(form)) { - resolve() - } - } catch (error) { - form.emit('error', error) + this._form.on('part', async (part: MultipartStream) => { + await this._handlePart(part) + + /** + * When a stream finishes before the handler, the close `event` + * will not resolve the current Promise. So in that case, we + * check and resolve from here + */ + if (this._isClosed()) { + this._close() + resolve() } }) /** * Listen for fields */ - form.on('field', (key: string, value: any) => { + this._form.on('field', (key: string, value: any) => { try { this._handleField(key, value) } catch (error) { - form.emit('error', error) + this._form.emit('error', error) } }) @@ -254,13 +309,14 @@ export class Multipart implements MultipartContract { * Resolve promise on close, when all internal * file handlers are done processing files */ - form.on('close', () => { - if (this._isClosed(form)) { + this._form.on('close', () => { + if (this._isClosed()) { + this._close() resolve() } }) - form.parse(this._request) + this._form.parse(this._request.request) }) } } diff --git a/src/Multipart/processMultipart.ts b/src/Multipart/processMultipart.ts deleted file mode 100644 index 965fc7d..0000000 --- a/src/Multipart/processMultipart.ts +++ /dev/null @@ -1,93 +0,0 @@ -/* -* @adonisjs/bodyparser -* -* (c) Harminder Virk -* -* For the full copyright and license information, please view the LICENSE -* file that was distributed with this source code. -*/ - -/// - -import { join } from 'path' -import { homedir } from 'os' -import * as fileType from 'file-type' -import { Exception } from '@poppinss/utils' - -import { File } from './File' -import { Multipart } from './index' -import { streamFile } from './streamFile' -import { FormFields } from '../FormFields' -import { BodyParserMultipartConfig } from '@ioc:Adonis/Addons/BodyParser' - -/** - * Processes the incoming multipart stream by moving files to the - * tmp directory and return `files` and `fields` data map. - */ -export async function processMultipart (multipart: Multipart, config: BodyParserMultipartConfig) { - let totalBytes = 0 - const fields = new FormFields() - const files = new FormFields() - - /** - * Reading all fields data - */ - multipart.onField('*', (key, value) => { - if (key) { - fields.add(key, value) - } - }) - - /** - * Reading all files data - */ - multipart.onFile('*', async (part) => { - const tmpPath = join(homedir(), config.tmpFileName()) - let buff = Buffer.from('') - - /** - * Stream the file to tmpPath, but also keep an - * eye on total bytes - */ - await streamFile(part, tmpPath, (line) => { - buff = Buffer.concat([buff, line]) - totalBytes += buff.length - - /** - * Ensure request data isn't getting over the defined limit. Otherwise, - * we need to raise an exception - */ - if (totalBytes > config.limit) { - part.emit( - 'error', - new Exception('request entity too large', 413, 'E_REQUEST_ENTITY_TOO_LARGE'), - ) - } - }) - - /** - * Creating [[File]] instance for interacting with the - * files at later stage - */ - const file = new File({ - fileName: part.filename, - fieldName: part.name, - tmpPath: tmpPath, - bytes: buff.length, - headers: part.headers, - fileType: fileType(buff), - }) - - files.add(file.fieldName, file) - }) - - /** - * Start reading data from the stream - */ - await multipart.process() - - return { - fields: fields.get(), - files: files.get(), - } -} diff --git a/src/Multipart/streamFile.ts b/src/Multipart/streamFile.ts index b7b5a3b..7ae19d5 100644 --- a/src/Multipart/streamFile.ts +++ b/src/Multipart/streamFile.ts @@ -7,9 +7,9 @@ * file that was distributed with this source code. */ +import { Readable } from 'stream' import * as eos from 'end-of-stream' import { open, close, createWriteStream, unlink } from 'fs-extra' -import { Readable } from 'stream' /** * Writes readable stream to the given location by properly cleaning up readable diff --git a/src/utils.ts b/src/utils.ts new file mode 100644 index 0000000..fe21578 --- /dev/null +++ b/src/utils.ts @@ -0,0 +1,116 @@ +/* +* @adonisjs/bodyparser +* +* (c) Harminder Virk +* +* For the full copyright and license information, please view the LICENSE +* file that was distributed with this source code. +*/ + +/// + +import * as bytes from 'bytes' +import { extname } from 'path' +import * as fileType from 'file-type' +import * as mediaTyper from 'media-typer' +import { FileUploadError } from '@ioc:Adonis/Addons/BodyParser' + +/** + * Returns an error when file size is over the expected + * bytes. + */ +export function validateSize ( + fieldName: string, + clientName: string, + actualBytes: number, + expectedBytes?: string | number, +): FileUploadError | null { + if (expectedBytes === undefined) { + return null + } + + expectedBytes = typeof (expectedBytes) === 'string' + ? bytes(expectedBytes) + : expectedBytes + + if (actualBytes > expectedBytes!) { + return { + fieldName, + clientName, + message: `File size should be less than ${bytes(expectedBytes)}`, + type: 'size', + } + } + + return null +} + +/** + * Returns an error when file extension isn't one of the allowed file + * extensions. + */ +export function validateExtension ( + fieldName: string, + clientName: string, + extname: string, + allowedExtensions?: string[], +): FileUploadError | null { + if (!Array.isArray(allowedExtensions) || allowedExtensions.length === 0) { + return null + } + + if (allowedExtensions.includes(extname)) { + return null + } + + const suffix = allowedExtensions.length === 1 ? 'is' : 'are' + const message = [ + `Invalid file extension ${extname}.`, + `Only ${allowedExtensions.join(', ')} ${suffix} allowed`, + ].join(' ') + + return { + fieldName, + clientName, + message: message, + type: 'extname', + } +} + +/** + * Returns the file `type`, `subtype` and `extension`. + */ +export function getFileType ( + fileContents: Buffer, + clientName: string, + headers: { [key: string]: string }, + force: boolean = false, +): null | { ext: string, subtype: string, type: string } { + /** + * Attempt to detect file type from it's content + */ + const magicType = fileType(fileContents) + if (magicType) { + return Object.assign(mediaTyper.parse(magicType.mime), { + ext: magicType.ext, + }) + } + + /** + * If we are unable to pull the file magicType and the current + * bytes of the content is under the minimumBytes required, + * then we should return `null` and force the consumer + * to re-call this method after new content + */ + if (fileContents.length < fileType.minimumBytes && !force) { + return null + } + + /** + * Otherwise fallback to file extension from it's client name + * and pull type/subtype from the headers content type. + */ + return Object.assign(mediaTyper.parse(headers['content-type']), { + ext: extname(clientName).replace(/^\./, ''), + }) +} diff --git a/test-helpers/index.ts b/test-helpers/index.ts new file mode 100644 index 0000000..aeb4cc0 --- /dev/null +++ b/test-helpers/index.ts @@ -0,0 +1,22 @@ +/* +* @adonisjs/bodyparser +* +* (c) Harminder Virk +* +* For the full copyright and license information, please view the LICENSE +* file that was distributed with this source code. +*/ + +import { join } from 'path' + +const contents = JSON.stringify(require('../package.json'), null, 2) + +export const packageFilePath = join(__dirname, '../package.json') +export const packageFileSize = Buffer.from(contents, 'utf-8').length + 1 +export const sleep = (time: number) => new Promise((resolve) => setTimeout(resolve, time)) +export const requestConfig = { + allowMethodSpoofing: false, + trustProxy: () => true, + subdomainOffset: 2, + generateRequestId: false, +} diff --git a/test/body-parser.spec.ts b/test/body-parser.spec.ts index ee91903..8a7808a 100644 --- a/test/body-parser.spec.ts +++ b/test/body-parser.spec.ts @@ -7,6 +7,9 @@ * file that was distributed with this source code. */ +/// +/// + import { join } from 'path' import { homedir } from 'os' import * as test from 'japa' @@ -14,16 +17,23 @@ import { merge } from 'lodash' import { createServer } from 'http' import { pathExists } from 'fs-extra' import * as supertest from 'supertest' -import { HttpContext } from '@poppinss/http-server' -import { BodyParserMiddleware } from '../src/BodyParser' -import { Multipart } from '../src/Multipart' +import { Request } from '@poppinss/request' +import { HttpContext as BaseHttpContext } from '@poppinss/http-server' +import { HttpContextConstructorContract } from '@ioc:Adonis/Core/HttpContext' + import { config } from '../config/index' +import { Multipart } from '../src/Multipart' +import { BodyParserMiddleware } from '../src/BodyParser' +import extendRequest from '../src/Bindings/Request' + +import { packageFilePath, packageFileSize } from '../test-helpers' +extendRequest(Request) -const PACKAGE_FILE_PATH = join(__dirname, '../package.json') -const PACKAGE_FILE_SIZE = Buffer.from( - JSON.stringify(require('../package.json'), null, 2), - 'utf-8', -).length + 1 +/** + * The shape of `AdonisJs HTTP context` is bit different from `@poppinss/http-server`. So + * we need to cast the types here for TS to work. + */ +const HttpContext = BaseHttpContext as unknown as HttpContextConstructorContract test.group('BodyParser Middleware | generic', () => { test('do not parse get requests', async (assert) => { @@ -303,16 +313,18 @@ test.group('BodyParser Middleware | multipart', () => { res.end(JSON.stringify({ tmpPath: ctx.request['_files'].package.tmpPath, size: ctx.request['_files'].package.size, + validated: ctx.request['_files'].package.validated, })) }) }) const { body } = await supertest(server) .post('/') - .attach('package', PACKAGE_FILE_PATH) + .attach('package', packageFilePath) assert.isAbove(body.size, 0) assert.exists(body.tmpPath) + assert.isFalse(body.validated) }) test('handle request with files and fields', async (assert) => { @@ -324,6 +336,7 @@ test.group('BodyParser Middleware | multipart', () => { res.writeHead(200, { 'content-type': 'application/json' }) res.end(JSON.stringify({ size: ctx.request['_files'].package.size, + validated: ctx.request['_files'].package.validated, username: ctx.request.input('username'), })) }) @@ -331,11 +344,12 @@ test.group('BodyParser Middleware | multipart', () => { const { body } = await supertest(server) .post('/') - .attach('package', PACKAGE_FILE_PATH) + .attach('package', packageFilePath) .field('username', 'virk') assert.isAbove(body.size, 0) assert.equal(body.username, 'virk') + assert.isFalse(body.validated) }) test('handle request array of files', async (assert) => { @@ -353,8 +367,8 @@ test.group('BodyParser Middleware | multipart', () => { const { body } = await supertest(server) .post('/') - .attach('package[]', PACKAGE_FILE_PATH) - .attach('package[]', PACKAGE_FILE_PATH) + .attach('package[]', packageFilePath) + .attach('package[]', packageFilePath) assert.deepEqual(body, { multiple: true }) }) @@ -371,7 +385,7 @@ test.group('BodyParser Middleware | multipart', () => { return `${index++}.tmp` }, types: ['multipart/form-data'], - limit: (PACKAGE_FILE_SIZE * 2) - 10, + limit: (packageFileSize * 2) - 10, }, })) @@ -385,8 +399,8 @@ test.group('BodyParser Middleware | multipart', () => { const { text } = await supertest(server) .post('/') - .attach('package[]', PACKAGE_FILE_PATH) - .attach('package[]', PACKAGE_FILE_PATH) + .attach('package[]', packageFilePath) + .attach('package[]', packageFilePath) assert.equal(text, 'E_REQUEST_ENTITY_TOO_LARGE: request entity too large') @@ -410,7 +424,7 @@ test.group('BodyParser Middleware | multipart', () => { const { body } = await supertest(server) .post('/') - .attach('package', PACKAGE_FILE_PATH) + .attach('package', packageFilePath) .field('', 'virk') assert.deepEqual(body, {}) @@ -429,7 +443,7 @@ test.group('BodyParser Middleware | multipart', () => { const { text } = await supertest(server) .post('/') - .attach('', PACKAGE_FILE_PATH) + .attach('', packageFilePath) assert.deepEqual(text, '0') }) @@ -446,7 +460,7 @@ test.group('BodyParser Middleware | multipart', () => { })) await middleware.handle(ctx, async () => { - assert.isUndefined(ctx.request['_files']) + assert.deepEqual(ctx.request['_files'], {}) assert.instanceOf(ctx.request['multipart'], Multipart) res.end() }) @@ -454,7 +468,7 @@ test.group('BodyParser Middleware | multipart', () => { await supertest(server) .post('/') - .attach('package', PACKAGE_FILE_PATH) + .attach('package', packageFilePath) .field('username', 'virk') }) @@ -471,7 +485,7 @@ test.group('BodyParser Middleware | multipart', () => { })) await middleware.handle(ctx, async () => { - assert.isUndefined(ctx.request['_files']) + assert.deepEqual(ctx.request['_files'], {}) assert.instanceOf(ctx.request['multipart'], Multipart) res.end() }) @@ -479,7 +493,7 @@ test.group('BodyParser Middleware | multipart', () => { await supertest(server) .post('/') - .attach('package', PACKAGE_FILE_PATH) + .attach('package', packageFilePath) .field('username', 'virk') }) @@ -496,7 +510,7 @@ test.group('BodyParser Middleware | multipart', () => { })) await middleware.handle(ctx, async () => { - assert.isUndefined(ctx.request['_files']) + assert.deepEqual(ctx.request['_files'], {}) assert.instanceOf(ctx.request['multipart'], Multipart) res.end() }) @@ -504,7 +518,7 @@ test.group('BodyParser Middleware | multipart', () => { await supertest(server) .post('/') - .attach('package', PACKAGE_FILE_PATH) + .attach('package', packageFilePath) .field('username', 'virk') }) @@ -519,7 +533,6 @@ test.group('BodyParser Middleware | multipart', () => { type: ctx.request['_files'].avatar.type, subtype: ctx.request['_files'].avatar.subtype, extname: ctx.request['_files'].avatar.extname, - fileType: ctx.request['_files'].avatar._data.fileType, })) }) }) @@ -534,10 +547,193 @@ test.group('BodyParser Middleware | multipart', () => { type: 'image', subtype: 'png', extname: 'png', - fileType: { - ext: 'png', - mime: 'image/png', - }, }) }) + + test('validate file when access via request.file method', async (assert) => { + const server = createServer(async (req, res) => { + const ctx = HttpContext.create('/', {}, req, res) + const middleware = new BodyParserMiddleware(config) + + await middleware.handle(ctx, async () => { + res.writeHead(200, { 'content-type': 'application/json' }) + const pkgFile = ctx.request.file('package', { size: 10 })! + + res.end(JSON.stringify({ + tmpPath: pkgFile.tmpPath!, + size: pkgFile.size, + validated: pkgFile.validated, + isValid: pkgFile.isValid, + errors: pkgFile.errors, + })) + }) + }) + + const { body } = await supertest(server) + .post('/') + .attach('package', packageFilePath) + + assert.equal(body.size, packageFileSize) + assert.exists(body.tmpPath) + assert.isTrue(body.validated) + assert.isFalse(body.isValid) + assert.deepEqual(body.errors, [{ + fieldName: 'package', + clientName: 'package.json', + message: 'File size should be less than 10B', + type: 'size', + }]) + }) + + test('validate array of files when access via request.file method', async (assert) => { + const server = createServer(async (req, res) => { + const ctx = HttpContext.create('/', {}, req, res) + const middleware = new BodyParserMiddleware(config) + + await middleware.handle(ctx, async () => { + res.writeHead(200, { 'content-type': 'application/json' }) + const pkgFiles = ctx.request.files('package', { size: 10 }).map((pkgFile) => { + return { + tmpPath: pkgFile.tmpPath!, + size: pkgFile.size, + validated: pkgFile.validated, + isValid: pkgFile.isValid, + errors: pkgFile.errors, + } + }) + + res.end(JSON.stringify(pkgFiles)) + }) + }) + + const { body } = await supertest(server) + .post('/') + .attach('package[0]', packageFilePath) + .attach('package[1]', packageFilePath) + + assert.lengthOf(body, 2) + assert.equal(body[0].size, packageFileSize) + assert.equal(body[1].size, packageFileSize) + + assert.exists(body[0].tmpPath) + assert.exists(body[1].tmpPath) + + assert.isTrue(body[0].validated) + assert.isTrue(body[1].validated) + + assert.isFalse(body[0].isValid) + assert.isFalse(body[1].isValid) + + assert.deepEqual(body[0].errors, [{ + fieldName: 'package[0]', + clientName: 'package.json', + message: 'File size should be less than 10B', + type: 'size', + }]) + + assert.deepEqual(body[1].errors, [{ + fieldName: 'package[1]', + clientName: 'package.json', + message: 'File size should be less than 10B', + type: 'size', + }]) + }) + + test('pull first file even when source is an array', async (assert) => { + const server = createServer(async (req, res) => { + const ctx = HttpContext.create('/', {}, req, res) + const middleware = new BodyParserMiddleware(config) + + await middleware.handle(ctx, async () => { + res.writeHead(200, { 'content-type': 'application/json' }) + const pkgFile = ctx.request.file('package', { size: 10 })! + + res.end(JSON.stringify({ + tmpPath: pkgFile.tmpPath!, + size: pkgFile.size, + validated: pkgFile.validated, + isValid: pkgFile.isValid, + errors: pkgFile.errors, + })) + }) + }) + + const { body } = await supertest(server) + .post('/') + .attach('package[0]', packageFilePath) + .attach('package[1]', packageFilePath) + + assert.equal(body.size, packageFileSize) + assert.exists(body.tmpPath) + assert.isTrue(body.validated) + assert.isFalse(body.isValid) + assert.deepEqual(body.errors, [{ + fieldName: 'package[0]', + clientName: 'package.json', + message: 'File size should be less than 10B', + type: 'size', + }]) + }) + + test('return null when file doesn\'t exists', async (assert) => { + const server = createServer(async (req, res) => { + const ctx = HttpContext.create('/', {}, req, res) + const middleware = new BodyParserMiddleware(config) + + await middleware.handle(ctx, async () => { + res.writeHead(200, { 'content-type': 'application/json' }) + const pkgFile = ctx.request.file('package', { size: 10 }) + res.end(JSON.stringify(pkgFile)) + }) + }) + + const { body } = await supertest(server).post('/') + assert.isNull(body) + }) + + test('return empty array file doesn\'t exists', async (assert) => { + const server = createServer(async (req, res) => { + const ctx = HttpContext.create('/', {}, req, res) + const middleware = new BodyParserMiddleware(config) + + await middleware.handle(ctx, async () => { + res.writeHead(200, { 'content-type': 'application/json' }) + const pkgFile = ctx.request.files('package', { size: 10 }) + res.end(JSON.stringify(pkgFile)) + }) + }) + + const { body } = await supertest(server).post('/') + assert.deepEqual(body, []) + }) + + test('get file from nested object', async (assert) => { + const server = createServer(async (req, res) => { + const ctx = HttpContext.create('/', {}, req, res) + const middleware = new BodyParserMiddleware(config) + + await middleware.handle(ctx, async () => { + res.writeHead(200, { 'content-type': 'application/json' }) + const pkgFile = ctx.request.file('user.package')! + + res.end(JSON.stringify({ + tmpPath: pkgFile.tmpPath!, + size: pkgFile.size, + validated: pkgFile.validated, + isValid: pkgFile.isValid, + errors: pkgFile.errors, + })) + }) + }) + + const { body } = await supertest(server) + .post('/') + .attach('user.package', packageFilePath) + + assert.equal(body.size, packageFileSize) + assert.exists(body.tmpPath) + assert.isTrue(body.validated) + assert.isTrue(body.isValid) + assert.deepEqual(body.errors, []) + }) }) diff --git a/test/file.spec.ts b/test/file.spec.ts deleted file mode 100644 index aaa4cd2..0000000 --- a/test/file.spec.ts +++ /dev/null @@ -1,180 +0,0 @@ -/* -* @adonisjs/bodyparser -* -* (c) Harminder Virk -* -* For the full copyright and license information, please view the LICENSE -* file that was distributed with this source code. -*/ - -import * as test from 'japa' -import { join } from 'path' -import { createServer } from 'http' -import * as supertest from 'supertest' - -import { Multipart } from '../src/Multipart' -import { File } from '../src/Multipart/File' - -const PACKAGE_FILE_PATH = join(__dirname, '../package.json') - -test.group('File', () => { - test('use part data to set file metadata', async (assert) => { - const server = createServer(async (req, res) => { - let file: File | null = null - - const multipart = new Multipart(req, { maxFields: 1000 }) - multipart.onFile('package', async (p) => { - file = new File({ - fileName: p.filename, - fieldName: p.name, - headers: p.headers, - bytes: 100, - tmpPath: 'fake.json', - }) - - p.resume() - }) - - await multipart.process() - - res.writeHead(200, { 'content-type': 'application/json' }) - res.end(JSON.stringify({ - fieldName: file!.fieldName, - clientName: file!.clientName, - size: file!.size, - tmpPath: file!.tmpPath, - extname: file!.extname, - status: file!.status, - type: file!.type, - subtype: file!.subtype, - })) - }) - - const { body } = await supertest(server).post('/').attach('package', PACKAGE_FILE_PATH) - assert.deepEqual(body, { - clientName: 'package.json', - fieldName: 'package', - size: 100, - tmpPath: 'fake.json', - extname: 'json', - status: 'pending', - subtype: 'json', - type: 'application', - }) - }) - - test('return isValid to true, when there are no validations in place', async (assert) => { - const server = createServer(async (req, res) => { - let file: File | null = null - - const multipart = new Multipart(req, { maxFields: 1000 }) - multipart.onFile('package', async (p) => { - file = new File({ - fileName: p.filename, - fieldName: p.name, - headers: p.headers, - bytes: 100, - tmpPath: 'fake.json', - }) - - p.resume() - }) - - await multipart.process() - - res.writeHead(200, { 'content-type': 'application/json' }) - res.end(JSON.stringify({ isValid: file!.isValid, status: file!.status })) - }) - - const { body } = await supertest(server).post('/').attach('package', PACKAGE_FILE_PATH) - assert.deepEqual(body, { isValid: true, status: 'pending' }) - }) - - test('validate ext with validation options are set', async (assert) => { - const server = createServer(async (req, res) => { - let file: File | null = null - - const multipart = new Multipart(req, { maxFields: 1000 }) - multipart.onFile('package', async (p) => { - file = new File({ - fileName: p.filename, - fieldName: p.name, - headers: p.headers, - bytes: 100, - tmpPath: 'fake.json', - }) - - file.setValidationOptions({ - extnames: ['jpg'], - }) - - p.resume() - }) - - await multipart.process() - - res.writeHead(200, { 'content-type': 'application/json' }) - res.end(JSON.stringify({ - isValid: file!.isValid, - errors: file!.errors, - status: file!.status, - })) - }) - - const { body } = await supertest(server).post('/').attach('package', PACKAGE_FILE_PATH) - assert.deepEqual(body, { - isValid: false, - errors: [{ - fieldName: 'package', - clientName: 'package.json', - message: 'Invalid file extension json. Only jpg is allowed', - type: 'extname', - }], - status: 'error', - }) - }) - - test('validate size with validation options are set', async (assert) => { - const server = createServer(async (req, res) => { - let file: File | null = null - - const multipart = new Multipart(req, { maxFields: 1000 }) - multipart.onFile('package', async (p) => { - file = new File({ - fileName: p.filename, - fieldName: p.name, - headers: p.headers, - bytes: 100, - tmpPath: 'fake.json', - }) - - file.setValidationOptions({ - size: 1, - }) - - p.resume() - }) - - await multipart.process() - - res.writeHead(200, { 'content-type': 'application/json' }) - res.end(JSON.stringify({ - isValid: file!.isValid, - errors: file!.errors, - status: file!.status, - })) - }) - - const { body } = await supertest(server).post('/').attach('package', PACKAGE_FILE_PATH) - assert.deepEqual(body, { - isValid: false, - errors: [{ - fieldName: 'package', - clientName: 'package.json', - message: 'File size should be less than 100B', - type: 'size', - }], - status: 'error', - }) - }) -}) diff --git a/test/multipart.spec.ts b/test/multipart.spec.ts index 007efc6..946edf2 100644 --- a/test/multipart.spec.ts +++ b/test/multipart.spec.ts @@ -9,72 +9,91 @@ /// +import { join } from 'path' import * as test from 'japa' -import { createServer } from 'http' +import { createServer, IncomingMessage, ServerResponse } from 'http' import * as supertest from 'supertest' -import { join } from 'path' +import { Exception } from '@poppinss/utils' +import { Request as BaseRequest } from '@poppinss/request' import { pathExists, remove, createWriteStream } from 'fs-extra' +import { RequestContract, RequestConfigContract } from '@ioc:Adonis/Core/Request' import { Multipart } from '../src/Multipart' -import { MultipartStream } from '@ioc:Adonis/Addons/BodyParser' +import { File } from '../src/Multipart/File' +import { sleep, requestConfig, packageFilePath, packageFileSize } from '../test-helpers' -function sleep (time) { - return new Promise((resolve) => setTimeout(resolve, time)) +const Request = BaseRequest as unknown as { + new (req: IncomingMessage, res: ServerResponse, config: RequestConfigContract): RequestContract, } -const PACKAGE_FILE_PATH = join(__dirname, '../package.json') - test.group('Multipart', () => { test('process file by attaching handler on field name', async (assert) => { - let part: null | MultipartStream = null + let files: null | { [key: string]: File } = null const server = createServer(async (req, res) => { - const multipart = new Multipart(req, { maxFields: 1000 }) - multipart.onFile('package', async (p) => { - part = p - part.resume() + const request = new Request(req, res, requestConfig) + const multipart = new Multipart(request, { maxFields: 1000, limit: 4000 }) + + multipart.onFile('package', {}, async (part, reporter) => { + part.on('data', (line) => { + reporter(line) + }) }) + await multipart.process() + files = request['_files'] res.end() }) - await supertest(server).post('/').attach('package', PACKAGE_FILE_PATH) - - assert.equal(part!.name, 'package') - assert.equal(part!.filename, 'package.json') - assert.isTrue(part!['_readableState'].ended) + await supertest(server).post('/').attach('package', packageFilePath) + assert.property(files, 'package') + assert.isTrue(files!.package.isValid) + assert.equal(files!.package.size, packageFileSize) }) - test('error inside onFile handler should propogate to main process', async (assert) => { - let part: null | MultipartStream = null + test('error inside onFile handler should propogate to file errors', async (assert) => { + let files: null | { [key: string]: File } = null const server = createServer(async (req, res) => { - const multipart = new Multipart(req, { maxFields: 1000 }) - multipart.onFile('package', async () => { + const request = new Request(req, res, requestConfig) + const multipart = new Multipart(request, { maxFields: 1000, limit: 4000 }) + + multipart.onFile('package', {}, async (part, reporter) => { + part.on('data', (line) => { + reporter(line) + }) throw new Error('Cannot process') }) - try { - await multipart.process() - res.end() - } catch (error) { - res.writeHead(500) - res.end(error.message) - } + await multipart.process() + files = request['_files'] || null + res.end() }) - const { text } = await supertest(server).post('/').attach('package', PACKAGE_FILE_PATH) - - assert.isNull(part) - assert.equal(text, 'Cannot process') + await supertest(server).post('/').attach('package', packageFilePath) + assert.property(files, 'package') + assert.isFalse(files!.package.isValid) + assert.deepEqual(files!.package.errors, [{ + fieldName: 'package', + clientName: 'package.json', + message: 'Cannot process', + type: 'fatal', + }]) }) test('wait for promise to return even when part has been streamed', async (assert) => { + let files: null | { [key: string]: File } = null const stack: string[] = [] const server = createServer(async (req, res) => { - const multipart = new Multipart(req, { maxFields: 1000 }) - multipart.onFile('package', async (part) => { + const request = new Request(req, res, requestConfig) + const multipart = new Multipart(request, { maxFields: 1000, limit: 4000 }) + + multipart.onFile('package', {}, async (part, reporter) => { + part.on('data', (line) => { + reporter(line) + }) + stack.push('before') part.resume() await sleep(100) @@ -82,41 +101,61 @@ test.group('Multipart', () => { }) await multipart.process() + files = request['_files'] stack.push('ended') res.end() }) - await supertest(server).post('/').attach('package', PACKAGE_FILE_PATH) + await supertest(server).post('/').attach('package', packageFilePath) assert.deepEqual(stack, ['before', 'after', 'ended']) + assert.property(files, 'package') + assert.isTrue(files!.package.isValid) + assert.equal(files!.package.size, packageFileSize) }) test('work fine when stream is piped to a destination', async (assert) => { const SAMPLE_FILE_PATH = join(__dirname, './sample.json') + let files: null | { [key: string]: File } = null const server = createServer(async (req, res) => { - const multipart = new Multipart(req, { maxFields: 1000 }) + const request = new Request(req, res, requestConfig) + const multipart = new Multipart(request, { maxFields: 1000, limit: 4000 }) - multipart.onFile('package', async (part) => { + multipart.onFile('package', {}, async (part, reporter) => { + part.on('data', (line) => { + reporter(line) + }) part.pipe(createWriteStream(SAMPLE_FILE_PATH)) }) + await multipart.process() + files = request['_files'] const hasFile = await pathExists(SAMPLE_FILE_PATH) res.end(String(hasFile)) }) - const { text } = await supertest(server).post('/').attach('package', PACKAGE_FILE_PATH) + const { text } = await supertest(server).post('/').attach('package', packageFilePath) + assert.property(files, 'package') + assert.isTrue(files!.package.isValid) + assert.equal(files!.package.size, packageFileSize) assert.equal(text, 'true') + await remove(SAMPLE_FILE_PATH) }) test('work fine with array of files', async (assert) => { const stack: string[] = [] + let files: null | { [key: string]: File } = null const server = createServer(async (req, res) => { - const multipart = new Multipart(req, { maxFields: 1000 }) - multipart.onFile('package', async (part) => { + const request = new Request(req, res, requestConfig) + const multipart = new Multipart(request, { maxFields: 1000, limit: 4000 }) + + multipart.onFile('package', {}, async (part, reporter) => { + part.on('data', reporter) + stack.push('before') part.resume() await sleep(100) @@ -124,20 +163,30 @@ test.group('Multipart', () => { }) await multipart.process() + files = request['_files'] stack.push('ended') res.end() }) - await supertest(server).post('/').attach('package[]', PACKAGE_FILE_PATH) + await supertest(server).post('/').attach('package[]', packageFilePath) + assert.deepEqual(stack, ['before', 'after', 'ended']) + assert.property(files, 'package') + assert.isTrue(files!.package[0].isValid) + assert.equal(files!.package[0].size, packageFileSize) }) test('work fine with indexed array of files', async (assert) => { const stack: string[] = [] + let files: null | { [key: string]: File } = null const server = createServer(async (req, res) => { - const multipart = new Multipart(req, { maxFields: 1000 }) - multipart.onFile('package', async (part) => { + const request = new Request(req, res, requestConfig) + const multipart = new Multipart(request, { maxFields: 1000, limit: 4000 }) + + multipart.onFile('package', {}, async (part, reporter) => { + part.on('data', reporter) + stack.push('before') part.resume() await sleep(100) @@ -145,20 +194,29 @@ test.group('Multipart', () => { }) await multipart.process() + files = request['_files'] stack.push('ended') res.end() }) - await supertest(server).post('/').attach('package[0]', PACKAGE_FILE_PATH) + await supertest(server).post('/').attach('package[0]', packageFilePath) assert.deepEqual(stack, ['before', 'after', 'ended']) + assert.property(files, 'package') + assert.isTrue(files!.package[0].isValid) + assert.equal(files!.package[0].size, packageFileSize) }) test('pass file to wildcard handler when defined', async (assert) => { const stack: string[] = [] + let files: null | { [key: string]: File } = null const server = createServer(async (req, res) => { - const multipart = new Multipart(req, { maxFields: 1000 }) - multipart.onFile('*', async (part) => { + const request = new Request(req, res, requestConfig) + const multipart = new Multipart(request, { maxFields: 1000, limit: 4000 }) + + multipart.onFile('*', {}, async (part, reporter) => { + part.on('data', reporter) + stack.push('before') part.resume() await sleep(100) @@ -166,52 +224,59 @@ test.group('Multipart', () => { }) await multipart.process() + files = request['_files'] stack.push('ended') res.end() }) - await supertest(server).post('/').attach('package', PACKAGE_FILE_PATH) + await supertest(server).post('/').attach('package', packageFilePath) assert.deepEqual(stack, ['before', 'after', 'ended']) + assert.property(files, 'package') + assert.isTrue(files!.package.isValid) + assert.equal(files!.package.size, packageFileSize) }) - test('get fields from the fields handler', async (assert) => { + test('collect fields automatically', async (assert) => { const stack: string[] = [] - assert.plan(3) + let files: null | { [key: string]: File } = null + let fields: null | { [key: string]: any } = null const server = createServer(async (req, res) => { - const multipart = new Multipart(req, { maxFields: 1000 }) - multipart.onFile('*', async (part) => { + const request = new Request(req, res, requestConfig) + const multipart = new Multipart(request, { maxFields: 1000, limit: 4000 }) + + multipart.onFile('*', {}, async (part, reporter) => { + part.on('data', reporter) stack.push('file') part.resume() }) - multipart.onField('name', (key, value) => { - assert.equal(key, 'name') - assert.equal(value, 'virk') - stack.push('field') - }) - await multipart.process() + files = request['_files'] + fields = request.all() stack.push('ended') res.end() }) await supertest(server) .post('/') - .attach('package', PACKAGE_FILE_PATH) + .attach('package', packageFilePath) .field('name', 'virk') - assert.deepEqual(stack, ['file', 'field', 'ended']) + assert.deepEqual(stack, ['file', 'ended']) + assert.property(files, 'package') + assert.isTrue(files!.package.isValid) + assert.equal(files!.package.size, packageFileSize) + assert.deepEqual(fields, { name: 'virk' }) }) - test('pass errors from field handler to upstream', async (assert) => { + test('raise error when process is invoked multiple times', async (assert) => { const server = createServer(async (req, res) => { - const multipart = new Multipart(req, { maxFields: 1000 }) - multipart.onField('name', () => { - throw new Error('bad name') - }) + const request = new Request(req, res, requestConfig) + const multipart = new Multipart(request, { maxFields: 1000, limit: 4000 }) try { + await multipart.process() await multipart.process() res.end() } catch (error) { @@ -222,17 +287,17 @@ test.group('Multipart', () => { const { text } = await supertest(server) .post('/') - .attach('package', PACKAGE_FILE_PATH) .field('name', 'virk') - assert.equal(text, 'bad name') + assert.equal(text, 'E_RUNTIME_EXCEPTION: multipart stream has already been consumed') }) - test('raise error when process is invoked multipart times', async (assert) => { + test('raise error when maxFields are crossed', async (assert) => { const server = createServer(async (req, res) => { - const multipart = new Multipart(req, { maxFields: 1000 }) + const request = new Request(req, res, requestConfig) + const multipart = new Multipart(request, { maxFields: 1, limit: 4000 }) + try { - await multipart.process() await multipart.process() res.end() } catch (error) { @@ -244,30 +309,107 @@ test.group('Multipart', () => { const { text } = await supertest(server) .post('/') .field('name', 'virk') + .field('age', '22') - assert.equal(text, 'E_RUNTIME_EXCEPTION: multipart stream has already been consumed') + assert.equal(text, 'E_REQUEST_ENTITY_TOO_LARGE: Max fields limit exceeded') }) - test('raise error when maxFields are crossed', async (assert) => { + test('report size validation errors', async (assert) => { + let files: null | { [key: string]: File } = null + assert.plan(4) + const server = createServer(async (req, res) => { - const multipart = new Multipart(req, { maxFields: 1 }) - multipart.onField('*', async () => { + const request = new Request(req, res, requestConfig) + const multipart = new Multipart(request, { maxFields: 1000, limit: 4000 }) + + multipart.onFile('*', { + size: 10, + }, async (part, reporter) => { + part.on('error', (error: Exception) => { + assert.equal(error.code, 'E_STREAM_VALIDATION_FAILURE') + }) + part.on('data', reporter) }) - try { - await multipart.process() - res.end() - } catch (error) { - res.writeHead(500) - res.end(error.message) - } + await multipart.process() + files = request['_files'] || null + res.end() }) - const { text } = await supertest(server) + await supertest(server) .post('/') - .field('name', 'virk') - .field('age', '22') + .attach('package', packageFilePath) + + assert.property(files, 'package') + assert.isFalse(files!.package.isValid) + assert.deepEqual(files!.package.errors, [{ + type: 'size', + clientName: 'package.json', + fieldName: 'package', + message: 'File size should be less than 10B', + }]) + }) - assert.equal(text, 'E_REQUEST_ENTITY_TOO_LARGE: Max fields limit exceeded') + test('report extension validation errors', async (assert) => { + let files: null | { [key: string]: File } = null + + const server = createServer(async (req, res) => { + const request = new Request(req, res, requestConfig) + const multipart = new Multipart(request, { maxFields: 1000, limit: 4000 }) + + multipart.onFile('*', { + extnames: ['jpg'], + }, async (part, reporter) => { + part.on('error', () => {}) + part.on('data', reporter) + }) + + await multipart.process() + files = request['_files'] || null + res.end() + }) + + await supertest(server) + .post('/') + .attach('package', packageFilePath) + + assert.property(files, 'package') + assert.isFalse(files!.package.isValid) + assert.deepEqual(files!.package.errors, [{ + type: 'extname', + clientName: 'package.json', + fieldName: 'package', + message: 'Invalid file extension json. Only jpg is allowed', + }]) + }) + + test('do not run validations when deferValidations is set to true', async (assert) => { + let files: null | { [key: string]: File } = null + + const server = createServer(async (req, res) => { + const request = new Request(req, res, requestConfig) + const multipart = new Multipart(request, { maxFields: 1000, limit: 4000 }) + + multipart.onFile('*', { + size: 10, + deferValidations: true, + }, async (part, reporter) => { + part.on('data', reporter) + }) + + await multipart.process() + files = request['_files'] || null + res.end() + }) + + await supertest(server) + .post('/') + .attach('package', packageFilePath) + + assert.property(files, 'package') + assert.isTrue(files!.package.isValid) + assert.isFalse(files!.package.validated) + assert.equal(files!.package.extname, 'json') + assert.deepEqual(files!.package.errors, []) }) })