diff --git a/lib/browser/managed_upload.js b/lib/browser/managed_upload.js index 311b58bd8..304c6d289 100644 --- a/lib/browser/managed_upload.js +++ b/lib/browser/managed_upload.js @@ -17,12 +17,18 @@ const proto = exports; * @param {String} name * @param {String|File} file * @param {Object} options - * {Object} options.callback The callback parameter is composed of a JSON string encoded in Base64 - * {String} options.callback.url the OSS sends a callback request to this URL - * {String} options.callback.host The host header value for initiating callback requests - * {String} options.callback.body The value of the request body when a callback is initiated - * {String} options.callback.contentType The Content-Type of the callback requests initiatiated - * {Object} options.callback.customValue Custom parameters are a map of key-values, e.g: + * {Object} options.callback + * The callback parameter is composed of a JSON string encoded in Base64 + * {String} options.callback.url + * the OSS sends a callback request to this URL + * {String} options.callback.host + * The host header value for initiating callback requests + * {String} options.callback.body + * The value of the request body when a callback is initiated + * {String} options.callback.contentType + * The Content-Type of the callback requests initiatiated + * {Object} options.callback.customValue + * Custom parameters are a map of key-values, e.g: * customValue = { * key1: 'value1', * key2: 'value2' @@ -107,7 +113,7 @@ proto._resumeMultipart = function* _resumeMultipart(checkpoint, options) { const partOffs = this._divideParts(fileSize, partSize); const numParts = partOffs.length; - const uploadPartJob = function* (self, partNo) { + const uploadPartJob = function* uploadPartJob(self, partNo) { if (!self.isCancel()) { try { const pi = partOffs[partNo - 1]; @@ -117,16 +123,19 @@ proto._resumeMultipart = function* _resumeMultipart(checkpoint, options) { }; const result = yield self._uploadPart(name, uploadId, partNo, data); - doneParts.push({ - number: partNo, - etag: result.res.headers.etag, - }); - checkpoint.doneParts = doneParts; - - if (!self.isCancel() && options && options.progress) { - yield options.progress(doneParts.length / numParts, checkpoint, result.res); + if (!self.isCancel()) { + doneParts.push({ + number: partNo, + etag: result.res.headers.etag, + }); + checkpoint.doneParts = doneParts; + + if (options && options.progress) { + yield options.progress(doneParts.length / numParts, checkpoint, result.res); + } } } catch (err) { + self.cancel(); err.partNum = partNo; throw err; } @@ -156,24 +165,25 @@ proto._resumeMultipart = function* _resumeMultipart(checkpoint, options) { // start uploads jobs const errors = yield this._thunkPool(jobs, parallel); - if (this.isCancel()) { - jobs = null; - throw this._makeCancelEvent(); - } - // check errors after all jobs are completed if (errors && errors.length > 0) { + this.resetCancelFlag(); const err = errors[0]; err.message = `Failed to upload some parts with error: ${err.toString()} part_num: ${err.partNum}`; throw err; } + + if (this.isCancel()) { + jobs = null; + throw this._makeCancelEvent(); + } } return yield this.completeMultipartUpload(name, uploadId, doneParts, options); }; -is.file = function (file) { - return typeof (File) !== 'undefined' && file instanceof File; +is.file = function file(obj) { + return typeof (File) !== 'undefined' && obj instanceof File; }; /** @@ -241,7 +251,7 @@ WebFileReadStream.prototype._read = function _read(size) { size = size || defaultReadSize; const that = this; - this.reader.onload = function (e) { + this.reader.onload = function onload(e) { that.fileBuffer = new Buffer(new Uint8Array(e.target.result)); that.file = null; that.readFileAndPush(size); @@ -270,7 +280,7 @@ proto._createStream = function _createStream(file, start, end) { proto._getPartSize = function _getPartSize(fileSize, partSize) { const maxNumParts = 10 * 1000; - const defaultPartSize = 1 * 1024 * 1024; + const defaultPartSize = 1024 * 1024; if (!partSize) { return defaultPartSize; @@ -300,10 +310,9 @@ proto._divideParts = function _divideParts(fileSize, partSize) { }; // cancel is not error , so create an object -proto._makeCancelEvent = function () { - const cancelEvent = { +proto._makeCancelEvent = function _makeCancelEvent() { + return { status: 0, name: 'cancel', }; - return cancelEvent; }; diff --git a/lib/common/thunkpool.js b/lib/common/thunkpool.js index 0a6f7e5c1..4c5c2e6c8 100644 --- a/lib/common/thunkpool.js +++ b/lib/common/thunkpool.js @@ -39,7 +39,7 @@ proto._thunkPool = function thunkPool(thunks, parallel) { if (endQueueSum === concurrency) { queue.fns = []; queue.buffer = []; - resolve(); + resolve(_errs); } } diff --git a/test/browser/browser.test.js b/test/browser/browser.test.js index f20b94fc3..e169bc184 100644 --- a/test/browser/browser.test.js +++ b/test/browser/browser.test.js @@ -786,10 +786,14 @@ describe('browser', () => { const name = `${prefix}multipart/upload-file-exception`; const stubUploadPart = sinon.stub(this.store, '_uploadPart'); - stubUploadPart.throws('TestUploadPartException'); + const testUploadPartException = new Error(); + testUploadPartException.name = 'TestUploadPartException'; + testUploadPartException.status = 403; + stubUploadPart.throws(testUploadPartException); let errorMsg = ''; let partNumz = 0; + let errStatus = 0; try { yield this.store.multipartUpload(name, file, { progress() { @@ -797,16 +801,19 @@ describe('browser', () => { done(); }; }, + partSize: 100 * 1024, }); } catch (err) { errorMsg = err.message; partNumz = err.partNum; + errStatus = err.status; } assert.equal( errorMsg, 'Failed to upload some parts with error: TestUploadPartException part_num: 1', ); assert.equal(partNumz, 1); + assert.equal(errStatus, 403); this.store._uploadPart.restore(); });