diff --git a/package.json b/package.json index 01a1bad1b6..857e05ef28 100644 --- a/package.json +++ b/package.json @@ -107,7 +107,7 @@ "ipfs-repo": "^0.29.0", "ipfs-unixfs": "~0.1.16", "ipfs-unixfs-exporter": "^0.38.0", - "ipfs-unixfs-importer": "^0.40.0", + "ipfs-unixfs-importer": "ipfs/js-ipfs-unixfs-importer#concurrent-file-import", "ipfs-utils": "~0.4.0", "ipld": "~0.25.0", "ipld-bitcoin": "~0.3.0", diff --git a/src/cli/commands/add.js b/src/cli/commands/add.js index 3bfe6f9866..26ca6816db 100644 --- a/src/cli/commands/add.js +++ b/src/cli/commands/add.js @@ -49,10 +49,20 @@ module.exports = { default: false, describe: 'Only chunk and hash, do not write' }, + 'block-write-concurrency': { + type: 'integer', + default: 10, + describe: 'After a file has been chunked, this controls how many chunks to hash and add to the block store concurrently' + }, chunker: { default: 'size-262144', describe: 'Chunking algorithm to use, formatted like [size-{size}, rabin, rabin-{avg}, rabin-{min}-{avg}-{max}]' }, + 'file-import-concurrency': { + type: 'integer', + default: 50, + describe: 'How many files to import at once' + }, 'enable-sharding-experiment': { type: 'boolean', default: false @@ -124,7 +134,10 @@ module.exports = { wrapWithDirectory: argv.wrapWithDirectory, pin: argv.pin, chunker: argv.chunker, - preload: argv.preload + preload: argv.preload, + nonatomic: argv.nonatomic, + fileImportConcurrency: argv.fileImportConcurrency, + blockWriteConcurrency: argv.blockWriteConcurrency } if (options.enableShardingExperiment && argv.isDaemonOn()) { diff --git a/src/core/components/files-regular/add-async-iterator.js b/src/core/components/files-regular/add-async-iterator.js index e138a1cd66..562d35c039 100644 --- a/src/core/components/files-regular/add-async-iterator.js +++ b/src/core/components/files-regular/add-async-iterator.js @@ -22,8 +22,7 @@ module.exports = function (self) { : Infinity }, options, { strategy: 'balanced', - chunker: chunkerOptions.chunker, - chunkerOptions: chunkerOptions.chunkerOptions + ...chunkerOptions }) // CID v0 is for multihashes encoded with sha2-256 diff --git a/src/core/components/files-regular/utils.js b/src/core/components/files-regular/utils.js index 876d0b0d48..8cd05f45d4 100644 --- a/src/core/components/files-regular/utils.js +++ b/src/core/components/files-regular/utils.js @@ -45,14 +45,12 @@ const parseChunkerString = (chunker) => { } return { chunker: 'fixed', - chunkerOptions: { - maxChunkSize: size - } + maxChunkSize: size } } else if (chunker.startsWith('rabin')) { return { chunker: 'rabin', - chunkerOptions: parseRabinString(chunker) + ...parseRabinString(chunker) } } else { throw new Error(`Unrecognized chunker option: ${chunker}`) diff --git a/src/http/api/resources/files-regular.js b/src/http/api/resources/files-regular.js index bc963ce3d1..f449328f7e 100644 --- a/src/http/api/resources/files-regular.js +++ b/src/http/api/resources/files-regular.js @@ -159,6 +159,8 @@ exports.add = { 'only-hash': Joi.boolean(), pin: Joi.boolean().default(true), 'wrap-with-directory': Joi.boolean(), + 'file-import-concurrency': Joi.number().integer().min(0).default(50), + 'block-write-concurrency': Joi.number().integer().min(0).default(10), chunker: Joi.string(), trickle: Joi.boolean(), preload: Joi.boolean().default(true) @@ -218,7 +220,9 @@ exports.add = { pin: request.query.pin, chunker: request.query.chunker, trickle: request.query.trickle, - preload: request.query.preload + preload: request.query.preload, + fileImportConcurrency: request.query.fileImportConcurrency, + blockWriteConcurrency: request.query.blockWriteConcurrency }) }, async function (source) {