diff --git a/package.json b/package.json index 92be97c..fa78ae4 100644 --- a/package.json +++ b/package.json @@ -4,7 +4,7 @@ "description": "eip-4844 blobs upload sdk", "main": "dist/index.cjs.js", "module": "dist/index.esm.js", - "types": "types/index.d.ts", + "types": "index.d.ts", "exports": { ".": { "require": "./dist/index.cjs.js", @@ -15,13 +15,15 @@ } }, "scripts": { - "build": "rollup -c" + "build": "rollup -c", + "prepublishOnly": "npm run build" }, "dependencies": { "async-mutex": "^0.5.0", "dotenv": "^16.4.5", "ethers": "^6.13.1", - "kzg-wasm": "^0.4.0" + "kzg-wasm": "^0.4.0", + "workerpool": "^9.1.3" }, "repository": { "type": "git", diff --git a/rollup.config.mjs b/rollup.config.mjs index 5c838eb..e14dd3b 100644 --- a/rollup.config.mjs +++ b/rollup.config.mjs @@ -17,17 +17,27 @@ export default [ } ], plugins: [commonjs(), resolve()], - external: ["ethers", "kzg-wasm"] + external: ["ethers", "kzg-wasm", "workerpool"] }, { input: 'src/node/file.js', output: { file: 'dist/file.cjs.js', format: 'cjs', - sourcemap: true + sourcemap: true, }, plugins: [commonjs(), resolve()], external: ["ethers"] }, + { + input: 'src/worker/worker.js', + output: { + file: 'dist/worker.cjs.js', + format: 'cjs', + sourcemap: true, + }, + plugins: [commonjs(), resolve()], + external: ["kzg-wasm", "workerpool"] + }, ]; diff --git a/src/flatdirectory.js b/src/flatdirectory.js index 6c94b93..698859f 100644 --- a/src/flatdirectory.js +++ b/src/flatdirectory.js @@ -11,11 +11,14 @@ import { } from './param'; import { BlobUploader, encodeBlobs, - getChainId, getFileChunk, + getChainId, getFileChunk, getHash, isBuffer, isFile, - stringToHex + stringToHex, isNodejs } from "./utils"; +import workerpool from 'workerpool'; +const pool = workerpool.pool(__dirname + '/worker.cjs.js'); + const REMOVE_FAIL = -1; const REMOVE_NORMAL = 0; const REMOVE_SUCCESS = 1; @@ -132,7 +135,7 @@ export class FlatDirectory { const provider = new ethers.JsonRpcProvider(this.#ethStorageRpc); const contract = new ethers.Contract(this.#contractAddr, FlatDirectoryAbi, provider); try { - const blobCount = await contract.countChunks(hexName); + const blobCount = await this.#countChunks(contract, hexName); for (let i = 0; i < blobCount; i++) { const result = await contract.readChunk(hexName, i); const chunk = ethers.getBytes(result[0]); @@ -184,17 +187,14 @@ export class FlatDirectory { // private method async #estimateCostByBlob(request) { + const {key, content, gasIncPct = 0} = request; + if (!this.#isSupportBlob) { throw new Error(`FlatDirectory: The contract does not support blob upload!`); } - const {key, content, gasIncPct} = request; - let blobLength = 0; - if (isFile(content)) { - blobLength = Math.ceil(content.size / DEFAULT_BLOB_DATA_SIZE); - } else if (isBuffer(content)) { - blobLength = Math.ceil(content.length / DEFAULT_BLOB_DATA_SIZE); - } else { + const blobLength = this.#getBlobLength(content); + if (blobLength === -1) { throw new Error(`FlatDirectory: Invalid upload content!`); } @@ -210,31 +210,25 @@ export class FlatDirectory { let gasLimit = 0; const [cost, oldChunkLength, maxFeePerBlobGas, gasFeeData] = await Promise.all([ fileContract.upfrontPayment(), - fileContract.countChunks(hexName), + this.#countChunks(fileContract, hexName), this.#blobUploader.getBlobGasPrice(), this.#blobUploader.getGasPrice(), ]); // send for (let i = 0; i < blobLength; i += MAX_BLOB_COUNT) { - const data = isBuffer(content) ? Buffer.from(content).subarray(i * DEFAULT_BLOB_DATA_SIZE, (i + MAX_BLOB_COUNT) * DEFAULT_BLOB_DATA_SIZE) : - await getFileChunk(content, content.size, i * DEFAULT_BLOB_DATA_SIZE, (i + MAX_BLOB_COUNT) * DEFAULT_BLOB_DATA_SIZE); - const blobArr = encodeBlobs(data); - const chunkIdArr = []; - const chunkSizeArr = []; - const blobHashArr = []; - const blobHashRequestArr = []; - for (let j = 0; j < blobArr.length; j++) { - chunkIdArr.push(i + j); - chunkSizeArr.push(DEFAULT_BLOB_DATA_SIZE); - - blobHashArr.push(this.#blobUploader.getBlobHash(blobArr[j])); - blobHashRequestArr.push(fileContract.getChunkHash(hexName, i + j)); - } - + const { + blobArr, + chunkIdArr, + chunkSizeArr, + blobHashRequestArr + } = await this.#getBlobInfo(fileContract, content, hexName, blobLength, i); + + let blobHashArr; // check change if (chunkIdArr[0] < oldChunkLength) { - const isChange = await this.#checkChange(fileContract, blobHashArr, blobHashRequestArr); + blobHashArr = await this.#getBlobHashes(blobArr); + const isChange = await this.#checkChange(blobHashArr, blobHashRequestArr); if (!isChange) { continue; } @@ -246,22 +240,15 @@ export class FlatDirectory { totalStorageCost += value; // gas cost if (gasLimit === 0) { + blobHashArr = blobHashArr ? blobHashArr : await this.#getBlobHashes(blobArr); gasLimit = await fileContract.writeChunks.estimateGas(hexName, chunkIdArr, chunkSizeArr, { value: value, blobVersionedHashes: blobHashArr }); } - if (gasIncPct) { - const gasPrice = (gasFeeData.maxFeePerGas + gasFeeData.maxPriorityFeePerGas) * BigInt(100 + gasIncPct) / BigInt(100); - const gasCost = gasPrice * gasLimit; - const blobGasPrice = maxFeePerBlobGas * BigInt(100 + gasIncPct) / BigInt(100); - const blobGasCost = blobGasPrice * BigInt(BLOB_SIZE); - totalGasCost += gasCost + blobGasCost; - } else { - const gasCost = (gasFeeData.maxFeePerGas + gasFeeData.maxPriorityFeePerGas) * gasLimit; - const blobGasCost = maxFeePerBlobGas * BigInt(BLOB_SIZE); - totalGasCost += gasCost + blobGasCost; - } + const gasCost = (gasFeeData.maxFeePerGas + gasFeeData.maxPriorityFeePerGas) * BigInt(100 + gasIncPct) / BigInt(100) * gasLimit; + const blobGasCost = maxFeePerBlobGas * BigInt(100 + gasIncPct) / BigInt(100) * BigInt(BLOB_SIZE); + totalGasCost += gasCost + blobGasCost; } return { @@ -271,41 +258,10 @@ export class FlatDirectory { } async #estimateCostByCallData(request) { - const {key, content, gasIncPct} = request; + const {key, content, gasIncPct = 0} = request; - let chunkDataSize = 0; - let chunkLength = 1; - if (isFile(content)) { - chunkDataSize = content.size; - if (GALILEO_CHAIN_ID === this.#chainId) { - if (content.size > 475 * 1024) { - // Data need to be sliced if file > 475K - chunkDataSize = 475 * 1024; - chunkLength = Math.ceil(content.size / (475 * 1024)); - } - } else { - if (content.size > 24 * 1024 - 326) { - // Data need to be sliced if file > 24K - chunkDataSize = 24 * 1024 - 326; - chunkLength = Math.ceil(content.size / (24 * 1024 - 326)); - } - } - } else if (isBuffer(content)) { - chunkDataSize = content.length; - if (GALILEO_CHAIN_ID === this.#chainId) { - if (content.length > 475 * 1024) { - // Data need to be sliced if file > 475K - chunkDataSize = 475 * 1024; - chunkLength = Math.ceil(content.length / (475 * 1024)); - } - } else { - if (content.length > 24 * 1024 - 326) { - // Data need to be sliced if file > 24K - chunkDataSize = 24 * 1024 - 326; - chunkLength = Math.ceil(content.length / (24 * 1024 - 326)); - } - } - } else { + const {chunkDataSize, chunkLength} = this.#getChunkLength(content); + if (chunkDataSize === -1) { throw new Error(`FlatDirectory: Invalid upload content!`); } @@ -317,7 +273,7 @@ export class FlatDirectory { } const [oldChunkLength, gasFeeData] = await Promise.all([ - fileContract.countChunks(hexName), + this.#countChunks(fileContract, hexName), this.#blobUploader.getGasPrice(), ]); @@ -350,12 +306,8 @@ export class FlatDirectory { }); } totalStorageCost += cost; - if (gasIncPct) { - totalGasCost += (gasFeeData.maxFeePerGas + gasFeeData.maxPriorityFeePerGas) - * BigInt(100 + gasIncPct) / BigInt(100) * gasLimit; - } else { - totalGasCost += (gasFeeData.maxFeePerGas + gasFeeData.maxPriorityFeePerGas) * gasLimit; - } + totalGasCost += (gasFeeData.maxFeePerGas + gasFeeData.maxPriorityFeePerGas) + * BigInt(100 + gasIncPct) / BigInt(100) * gasLimit; } return { @@ -376,6 +328,13 @@ export class FlatDirectory { return; } + const blobLength = this.#getBlobLength(content); + if (blobLength === -1) { + callback.onFail(new Error(`FlatDirectory: Invalid upload content!`)); + callback.onFinish(totalUploadChunks, totalUploadSize, totalStorageCost); + return; + } + const hexName = stringToHex(key); const fileContract = new ethers.Contract(this.#contractAddr, FlatDirectoryAbi, this.#wallet); const fileMod = await fileContract.getStorageMode(hexName); @@ -385,20 +344,11 @@ export class FlatDirectory { return; } - let blobLength; - if (isFile(content)) { - blobLength = Math.ceil(content.size / DEFAULT_BLOB_DATA_SIZE); - } else if (isBuffer(content)) { - blobLength = Math.ceil(content.length / DEFAULT_BLOB_DATA_SIZE); - } else { - callback.onFail(new Error(`FlatDirectory: Invalid upload content!`)); - callback.onFinish(totalUploadChunks, totalUploadSize, totalStorageCost); - return; - } + // check old data const [cost, oldBlobLength] = await Promise.all([ fileContract.upfrontPayment(), - fileContract.countChunks(hexName), + this.#countChunks(fileContract, hexName) ]); const clearState = await this.#clearOldFile(hexName, blobLength, oldBlobLength); if (clearState === REMOVE_FAIL) { @@ -409,29 +359,19 @@ export class FlatDirectory { // send for (let i = 0; i < blobLength; i += MAX_BLOB_COUNT) { - const data = isBuffer(content) ? Buffer.from(content).subarray(i * DEFAULT_BLOB_DATA_SIZE, (i + MAX_BLOB_COUNT) * DEFAULT_BLOB_DATA_SIZE) : - await getFileChunk(content, content.size, i * DEFAULT_BLOB_DATA_SIZE, (i + MAX_BLOB_COUNT) * DEFAULT_BLOB_DATA_SIZE); - const blobArr = encodeBlobs(data); - const chunkIdArr = []; - const chunkSizeArr = []; - const blobHashArr = []; - const blobHashRequestArr = []; - for (let j = 0; j < blobArr.length; j++) { - chunkIdArr.push(i + j); - if (i + j === blobLength - 1) { - const size = isBuffer(content) ? content.length : content.size; - chunkSizeArr.push(size - DEFAULT_BLOB_DATA_SIZE * (blobLength - 1)); - } else { - chunkSizeArr.push(DEFAULT_BLOB_DATA_SIZE); - } - blobHashArr.push(this.#blobUploader.getBlobHash(blobArr[j])); - blobHashRequestArr.push(fileContract.getChunkHash(hexName, i + j)); - } + const { + blobArr, + chunkIdArr, + chunkSizeArr, + blobHashRequestArr + } = await this.#getBlobInfo(fileContract, content, hexName, blobLength, i); + const blobCommitmentArr = await this.#getBlobCommitments(blobArr); // check change if (clearState === REMOVE_NORMAL) { try { - const isChange = await this.#checkChange(fileContract, blobHashArr, blobHashRequestArr); + const blobHashArr = this.#getHashes(blobCommitmentArr); + const isChange = await this.#checkChange(blobHashArr, blobHashRequestArr); if (!isChange) { callback.onProgress(chunkIdArr[chunkIdArr.length - 1], blobLength, false); continue; @@ -444,7 +384,7 @@ export class FlatDirectory { // upload try { - const status = await this.#uploadBlob(fileContract, key, hexName, blobArr, chunkIdArr, chunkSizeArr, cost, gasIncPct); + const status = await this.#uploadBlob(fileContract, key, hexName, blobArr, blobCommitmentArr, chunkIdArr, chunkSizeArr, cost, gasIncPct); if (!status) { callback.onFail(new Error("FlatDirectory: Sending transaction failed.")); break; @@ -472,6 +412,13 @@ export class FlatDirectory { let totalStorageCost = 0n; const {key, content, callback, gasIncPct} = request; + const {chunkDataSize, chunkLength} = this.#getChunkLength(content); + if (chunkDataSize === -1) { + callback.onFail(new Error(`FlatDirectory: Invalid upload content!`)); + callback.onFinish(totalUploadChunks, totalUploadSize, totalStorageCost); + return; + } + const hexName = stringToHex(key); const fileContract = new ethers.Contract(this.#contractAddr, FlatDirectoryAbi, this.#wallet); const fileMod = await fileContract.getStorageMode(hexName); @@ -481,46 +428,8 @@ export class FlatDirectory { return; } - let chunkDataSize; - let chunkLength = 1; - if (isFile(content)) { - chunkDataSize = content.size; - if (GALILEO_CHAIN_ID === this.#chainId) { - if (content.size > 475 * 1024) { - // Data need to be sliced if file > 475K - chunkDataSize = 475 * 1024; - chunkLength = Math.ceil(content.size / (475 * 1024)); - } - } else { - if (content.size > 24 * 1024 - 326) { - // Data need to be sliced if file > 24K - chunkDataSize = 24 * 1024 - 326; - chunkLength = Math.ceil(content.size / (24 * 1024 - 326)); - } - } - } else if (isBuffer(content)) { - chunkDataSize = content.length; - if (GALILEO_CHAIN_ID === this.#chainId) { - if (content.length > 475 * 1024) { - // Data need to be sliced if file > 475K - chunkDataSize = 475 * 1024; - chunkLength = Math.ceil(content.length / (475 * 1024)); - } - } else { - if (content.length > 24 * 1024 - 326) { - // Data need to be sliced if file > 24K - chunkDataSize = 24 * 1024 - 326; - chunkLength = Math.ceil(content.length / (24 * 1024 - 326)); - } - } - } else { - callback.onFail(new Error(`FlatDirectory: Invalid upload content!`)); - callback.onFinish(totalUploadChunks, totalUploadSize, totalStorageCost); - return; - } - // check old data - const oldChunkLength = await fileContract.countChunks(hexName); + const oldChunkLength = await this.#countChunks(fileContract, hexName); const clearState = await this.#clearOldFile(hexName, chunkLength, oldChunkLength); if (clearState === REMOVE_FAIL) { callback.onFail(new Error(`FlatDirectory: Failed to delete old data!`)); @@ -531,6 +440,7 @@ export class FlatDirectory { for (let i = 0; i < chunkLength; i++) { const chunk = isBuffer(content) ? Buffer.from(content).subarray(i * chunkDataSize, (i + 1) * chunkDataSize) : await getFileChunk(content, content.size, i * chunkDataSize, (i + 1) * chunkDataSize); + // check is change if (clearState === REMOVE_NORMAL) { const localHash = ethers.keccak256(chunk); @@ -546,7 +456,6 @@ export class FlatDirectory { } } - // upload // upload try { const status = await this.#uploadCallData(fileContract, key, hexName, i, chunk, gasIncPct); @@ -586,19 +495,17 @@ export class FlatDirectory { } } - async #checkChange(fileContract, blobHashArr, blobHashRequestArr) { - let hasChange = false; + async #checkChange(blobHashArr, blobHashRequestArr) { const dataHashArr = await Promise.all(blobHashRequestArr); for (let i = 0; i < blobHashArr.length; i++) { if (blobHashArr[i] !== dataHashArr[i]) { - hasChange = true; - break; + return true; } } - return hasChange; + return false; } - async #uploadBlob(fileContract, key, hexName, blobArr, chunkIdArr, chunkSizeArr, cost, gasIncPct) { + async #uploadBlob(fileContract, key, hexName, blobArr, blobCommitmentArr, chunkIdArr, chunkSizeArr, cost, gasIncPct) { // create tx const value = cost * BigInt(blobArr.length); const tx = await fileContract.writeChunks.populateTransaction(hexName, chunkIdArr, chunkSizeArr, { @@ -615,8 +522,8 @@ export class FlatDirectory { tx.maxFeePerBlobGas = blobGas * BigInt(100 + gasIncPct) / BigInt(100); } // send - const txResponse = await this.#blobUploader.sendTxLock(tx, blobArr); - console.log(`FlatDirectory: The ${chunkIdArr} chunks hash is ${txResponse.hash}`, key); + const txResponse = await this.#blobUploader.sendTxLock(tx, blobArr, blobCommitmentArr); + console.log(`FlatDirectory: The ${chunkIdArr} chunks hash is ${txResponse.hash}`, "", key); const txReceipt = await txResponse.wait(); return txReceipt && txReceipt.status; } @@ -637,8 +544,107 @@ export class FlatDirectory { // send const txResponse = await this.#blobUploader.sendTxLock(tx); - console.log(`FlatDirectory: The ${chunkId} chunk hash is ${txResponse.hash}`, key); + console.log(`FlatDirectory: The ${chunkId} chunk hash is ${txResponse.hash}`, "", key); const txReceipt = await txResponse.wait(); return txReceipt && txReceipt.status; } + + async #countChunks(fileContract, hexName) { + const count = await fileContract.countChunks(hexName); + // Bigint to number + return Number(count); + } + + async #getBlobCommitments(blobArr) { + const promises = isNodejs() + ? blobArr.map(blob => pool.exec('getCommitment', [blob])) + : blobArr.map(blob => this.#blobUploader.getCommitment(blob)); + return await Promise.all(promises); + } + + #getHashes(blobCommitmentArr) { + return blobCommitmentArr.map(comment => getHash(comment)); + } + + async #getBlobHashes(blobArr) { + const commitments = await this.#getBlobCommitments(blobArr); + return this.#getHashes(commitments); + } + + + #getBlobLength(content) { + let blobLength = -1; + if (isFile(content)) { + blobLength = Math.ceil(content.size / DEFAULT_BLOB_DATA_SIZE); + } else if (isBuffer(content)) { + blobLength = Math.ceil(content.length / DEFAULT_BLOB_DATA_SIZE); + } + return blobLength; + } + + async #getBlobInfo(fileContract, content, hexName, blobLength, index) { + const data = isBuffer(content) + ? Buffer.from(content).subarray(index * DEFAULT_BLOB_DATA_SIZE, (index + MAX_BLOB_COUNT) * DEFAULT_BLOB_DATA_SIZE) : + await getFileChunk(content, content.size, index * DEFAULT_BLOB_DATA_SIZE, (index + MAX_BLOB_COUNT) * DEFAULT_BLOB_DATA_SIZE); + const blobArr = encodeBlobs(data); + const chunkIdArr = []; + const chunkSizeArr = []; + const blobHashRequestArr = []; + for (let j = 0; j < blobArr.length; j++) { + chunkIdArr.push(index + j); + if (index + j === blobLength - 1) { + const size = isBuffer(content) ? content.length : content.size; + chunkSizeArr.push(size - DEFAULT_BLOB_DATA_SIZE * (blobLength - 1)); + } else { + chunkSizeArr.push(DEFAULT_BLOB_DATA_SIZE); + } + blobHashRequestArr.push(fileContract.getChunkHash(hexName, index + j)); + } + return { + blobArr, + chunkIdArr, + chunkSizeArr, + blobHashRequestArr + } + } + + #getChunkLength(content) { + let chunkDataSize = -1; + let chunkLength = 1; + if (isFile(content)) { + chunkDataSize = content.size; + if (GALILEO_CHAIN_ID === this.#chainId) { + if (content.size > 475 * 1024) { + // Data need to be sliced if file > 475K + chunkDataSize = 475 * 1024; + chunkLength = Math.ceil(content.size / (475 * 1024)); + } + } else { + if (content.size > 24 * 1024 - 326) { + // Data need to be sliced if file > 24K + chunkDataSize = 24 * 1024 - 326; + chunkLength = Math.ceil(content.size / (24 * 1024 - 326)); + } + } + } else if (isBuffer(content)) { + chunkDataSize = content.length; + if (GALILEO_CHAIN_ID === this.#chainId) { + if (content.length > 475 * 1024) { + // Data need to be sliced if file > 475K + chunkDataSize = 475 * 1024; + chunkLength = Math.ceil(content.length / (475 * 1024)); + } + } else { + if (content.length > 24 * 1024 - 326) { + // Data need to be sliced if file > 24K + chunkDataSize = 24 * 1024 - 326; + chunkLength = Math.ceil(content.length / (24 * 1024 - 326)); + } + } + } + return { + chunkDataSize, + chunkLength + } + } } diff --git a/src/utils/uploader.js b/src/utils/uploader.js index cb740af..e0caac8 100644 --- a/src/utils/uploader.js +++ b/src/utils/uploader.js @@ -1,18 +1,7 @@ import {ethers} from "ethers"; import {loadKZG} from 'kzg-wasm'; import {Mutex} from 'async-mutex'; - -function computeVersionedHash(commitment, blobCommitmentVersion) { - const computedVersionedHash = new Uint8Array(32); - computedVersionedHash.set([blobCommitmentVersion], 0); - const hash = ethers.getBytes(ethers.sha256(commitment)); - computedVersionedHash.set(hash.subarray(1), 1); - return computedVersionedHash; -} - -function commitmentsToVersionedHashes(commitment) { - return computeVersionedHash(commitment, 0x01); -} +import {getHash, commitmentsToVersionedHashes} from "./util"; // blob gas price const MIN_BLOB_GASPRICE = 1n; @@ -80,7 +69,7 @@ export class BlobUploader { return null; } - async sendTx(tx, blobs) { + async sendTx(tx, blobs = null, commitments = null) { if (!blobs) { return await this.#wallet.sendTransaction(tx); } @@ -95,7 +84,7 @@ export class BlobUploader { const versionedHashes = []; for (let i = 0; i < blobs.length; i++) { const blob = blobs[i]; - const commitment = kzg.blobToKzgCommitment(blob); + const commitment = (commitments && commitments.length > i) ? commitments[i] : kzg.blobToKzgCommitment(blob); const proof = kzg.computeBlobKzgProof(blob, commitment); ethersBlobs.push({ data: blob, @@ -115,21 +104,21 @@ export class BlobUploader { return await this.#wallet.sendTransaction(tx); } - async sendTxLock(tx, blobs) { + async sendTxLock(tx, blobs = null, commitments = null) { const release = await this.#mutex.acquire(); try { - return await this.sendTx(tx, blobs); + return await this.sendTx(tx, blobs, commitments); } finally { release(); } } + getCommitment(blob) { + return this.#kzg.blobToKzgCommitment(blob); + } + getBlobHash(blob) { - const kzg = this.#kzg; - const commit = kzg.blobToKzgCommitment(blob); - const localHash = commitmentsToVersionedHashes(commit); - const hash = new Uint8Array(32); - hash.set(localHash.subarray(0, 32 - 8)); - return ethers.hexlify(hash); + const commit = this.getCommitment(blob); + return getHash(commit); } } diff --git a/src/utils/util.js b/src/utils/util.js index d457c00..b2ae7cc 100644 --- a/src/utils/util.js +++ b/src/utils/util.js @@ -32,3 +32,22 @@ export function isFile(content) { export function isNodejs() { return typeof process !== 'undefined' && !!process.versions && !!process.versions.node; } + +function computeVersionedHash(commitment, blobCommitmentVersion) { + const computedVersionedHash = new Uint8Array(32); + computedVersionedHash.set([blobCommitmentVersion], 0); + const hash = ethers.getBytes(ethers.sha256(commitment)); + computedVersionedHash.set(hash.subarray(1), 1); + return computedVersionedHash; +} + +export function commitmentsToVersionedHashes(commitment) { + return computeVersionedHash(commitment, 0x01); +} + +export function getHash(commit) { + const localHash = commitmentsToVersionedHashes(commit); + const hash = new Uint8Array(32); + hash.set(localHash.subarray(0, 32 - 8)); + return ethers.hexlify(hash); +} diff --git a/src/worker/worker.js b/src/worker/worker.js new file mode 100644 index 0000000..c3f3309 --- /dev/null +++ b/src/worker/worker.js @@ -0,0 +1,20 @@ +import workerpool from 'workerpool'; +import {loadKZG} from 'kzg-wasm'; + +let kzgInstance = null; + +async function initializeKzg() { + if (!kzgInstance) { + kzgInstance = await loadKZG(); + } + return kzgInstance; +} + +async function getCommitment(blob) { + const kzg = await initializeKzg(); + return kzg.blobToKzgCommitment(blob); +} + +workerpool.worker({ + getCommitment: getCommitment, +}); diff --git a/yarn.lock b/yarn.lock index 4af0a41..94093a1 100644 --- a/yarn.lock +++ b/yarn.lock @@ -583,6 +583,11 @@ undici-types@~5.26.4: resolved "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz#bcd539893d00b56e964fd2657a4866b221a65617" integrity sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA== +workerpool@^9.1.3: + version "9.1.3" + resolved "https://registry.npmjs.org/workerpool/-/workerpool-9.1.3.tgz#34b81f50f777a0e549c6dfaa0926575735e3f4b4" + integrity sha512-LhUrk4tbxJRDQmRrrFWA9EnboXI79fe0ZNTy3u8m+dqPN1EkVSIsQYAB8OF/fkyhG8Rtup+c/bzj/+bzbG8fqg== + wrappy@1: version "1.0.2" resolved "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f"