-
Notifications
You must be signed in to change notification settings - Fork 175
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Feat: Improve node file upload #459
Conversation
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Looks good in the initial review. Would love to see the completed with resumable upload, I see a Todo and also a function for resumable upload. Also please provide a generated SDK to make review easier once completed.
@lohanidamodar Implemented both TODOs. This is example of generated code: // inputFile.js
const { Readable } = require('stream');
const fs = require('fs');
const { promisify } = require('util');
class InputFile {
stream; // Content of file, readable stream
size; // Total final size of the file content
name; // File name
static fromPath = (filePath, name) => {
const stream = fs.createReadStream(filePath);
const { size } = fs.statSync(filePath);
return new InputFile(stream, name, size);
};
static fromBuffer = (buffer, name) => {
const stream = Readable.from(buffer.toString());
const size = Buffer.byteLength(buffer);
return new InputFile(stream, name, size);
};
static fromBlob = (blob, name) => {
const buffer = blob.arrayBuffer();
const stream = Readable.from(buffer.toString());
const size = Buffer.byteLength(buffer);
return new InputFile(stream, name);
};
static fromStream = (stream, name, size) => {
return new InputFile(stream, name, size);
};
static fromPlainText = (content, name) => {
const buffer = Buffer.from(content, "utf-8");
const stream = Readable.from(buffer.toString());
const size = Buffer.byteLength(buffer);
return new InputFile(stream, name, size);
};
constructor(stream, name, size) {
this.stream = stream;
this.name = name;
this.size = size;
}
}
module.exports = InputFile; // createFile method (storage)
/**
* Create File
*
* Create a new file. Before using this route, you should create a new bucket
* resource using either a [server
* integration](/docs/server/database#storageCreateBucket) API or directly
* from your Appwrite console.
*
* Larger files should be uploaded using multiple requests with the
* [content-range](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Range)
* header to send a partial request with a maximum supported chunk of `5MB`.
* The `content-range` header values should always be in bytes.
*
* When the first request is sent, the server will return the **File** object,
* and the subsequent part request must include the file's **id** in
* `x-appwrite-id` header to allow the server to know that the partial upload
* is for the existing file and not for a new one.
*
* If you're creating a new file using one of the Appwrite SDKs, all the
* chunking logic will be managed by the SDK internally.
*
*
* @param {string} bucketId
* @param {string} fileId
* @param {InputFile} file
* @param {string[]} read
* @param {string[]} write
* @throws {AppwriteException}
* @returns {Promise}
*/
async createFile(bucketId, fileId, file, read, write, onProgress = () => {}) {
if (typeof bucketId === 'undefined') {
throw new AppwriteException('Missing required parameter: "bucketId"');
}
if (typeof fileId === 'undefined') {
throw new AppwriteException('Missing required parameter: "fileId"');
}
if (typeof file === 'undefined') {
throw new AppwriteException('Missing required parameter: "file"');
}
let path = '/storage/buckets/{bucketId}/files'.replace('{bucketId}', bucketId);
let payload = {};
if (typeof fileId !== 'undefined') {
payload['fileId'] = fileId;
}
if (typeof file !== 'undefined') {
payload['file'] = file;
}
if (typeof read !== 'undefined') {
payload['read'] = read;
}
if (typeof write !== 'undefined') {
payload['write'] = write;
}
const size = file.size;
const headers = {
'content-type': 'multipart/form-data',
};
let id = undefined;
let response = undefined;
let chunksUploaded = 0;
if(fileId != 'unique()') {
try {
response = await this.client.call('get', path + '/' + fileId, headers);
chunksUploaded = response.chunksUploaded;
} catch(e) {
}
}
let currentChunk = Buffer.from('');
let currentChunkSize = 0;
let currentChunkStart = 0;
const selfClient = this.client;
async function uploadChunk(lastUpload = false) {
if(chunksUploaded - 1 >= currentChunkStart / client.CHUNK_SIZE) {
return;
}
const start = currentChunkStart;
const end = Math.min(((start + client.CHUNK_SIZE) - 1), size);
if(!lastUpload || currentChunkStart !== 0) {
headers['content-range'] = 'bytes ' + start + '-' + end + '/' + size;
}
if (id) {
headers['x-appwrite-id'] = id;
}
const stream = Stream.Readable.from(currentChunk);
payload['file'] = { type: 'file', file: stream, filename: file.name };
response = await selfClient.call('post', path, headers, payload);
if (!id) {
id = response['$id'];
}
if (onProgress !== null) {
onProgress({
$id: response['$id'],
progress: Math.min((start+client.CHUNK_SIZE) * client.CHUNK_SIZE, size) / size * 100,
sizeUploaded: end+1,
chunksTotal: response['chunksTotal'],
chunksUploaded: response['chunksUploaded']
});
}
currentChunkStart += client.CHUNK_SIZE;
}
return await new Promise((resolve, reject) => {
const writeStream = new Stream.Writable();
writeStream._write = async (mainChunk, encoding, next) => {
// Segment incoming chunk into up to 5MB chunks
const mainChunkSize = Buffer.byteLength(mainChunk);
const chunksCount = Math.ceil(mainChunkSize / client.CHUNK_SIZE);
const chunks = [];
for(let i = 0; i < chunksCount; i++) {
const chunk = mainChunk.slice(i * client.CHUNK_SIZE, client.CHUNK_SIZE);
chunks.push(chunk);
}
for (const chunk of chunks) {
const chunkSize = Buffer.byteLength(chunk);
if(chunkSize + currentChunkSize == client.CHUNK_SIZE) {
// Upload chunk
currentChunk = Buffer.concat([currentChunk, chunk]);
await uploadChunk();
currentChunk = Buffer.from('');
currentChunkSize = 0;
} else if(chunkSize + currentChunkSize > client.CHUNK_SIZE) {
// Upload chunk, put rest into next chunk
const bytesToUpload = client.CHUNK_SIZE - currentChunkSize;
const newChunkSection = chunk.slice(0, bytesToUpload);
currentChunk = Buffer.concat([currentChunk, newChunkSection]);
currentChunkSize = Buffer.byteLength(currentChunk);
await uploadChunk();
currentChunk = chunk.slice(bytesToUpload, undefined);
currentChunkSize = chunkSize - bytesToUpload;
} else {
// Append into current chunk
currentChunk = Buffer.concat([currentChunk, chunk]);
currentChunkSize = chunkSize + currentChunkSize;
}
}
next();
}
writeStream.on("finish", async () => {
if(currentChunkSize > 0) {
await uploadChunk(true);
}
resolve(response);
});
writeStream.on("error", (err) => {
reject(err);
});
file.stream.pipe(writeStream);
});
} |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
LGTM
@Meldiron let's resolve the conflict and get this merged today |
Following snippet shows what will now be possible in Node:
Output:
Appwrite Storage: