diff --git a/__mocks__/typedDataExample.json b/__mocks__/typedData/baseExample.json similarity index 100% rename from __mocks__/typedDataExample.json rename to __mocks__/typedData/baseExample.json diff --git a/__mocks__/typedData/example_baseTypes.json b/__mocks__/typedData/example_baseTypes.json new file mode 100644 index 000000000..78b154305 --- /dev/null +++ b/__mocks__/typedData/example_baseTypes.json @@ -0,0 +1,39 @@ +{ + "types": { + "StarknetDomain": [ + { "name": "name", "type": "shortstring" }, + { "name": "version", "type": "shortstring" }, + { "name": "chainId", "type": "shortstring" }, + { "name": "revision", "type": "shortstring" } + ], + "Example": [ + { "name": "n0", "type": "felt" }, + { "name": "n1", "type": "bool" }, + { "name": "n2", "type": "string" }, + { "name": "n3", "type": "selector" }, + { "name": "n4", "type": "u128" }, + { "name": "n5", "type": "ContractAddress" }, + { "name": "n6", "type": "ClassHash" }, + { "name": "n7", "type": "timestamp" }, + { "name": "n8", "type": "shortstring" } + ] + }, + "primaryType": "Example", + "domain": { + "name": "StarkNet Mail", + "version": "1", + "chainId": "1", + "revision": "1" + }, + "message": { + "n0": "0x3e8", + "n1": true, + "n2": "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.", + "n3": "transfer", + "n4": "0x3e8", + "n5": "0x3e8", + "n6": "0x3e8", + "n7": 1000, + "n8": "transfer" + } +} diff --git a/__mocks__/typedData/example_enum.json b/__mocks__/typedData/example_enum.json new file mode 100644 index 000000000..c10ae9904 --- /dev/null +++ b/__mocks__/typedData/example_enum.json @@ -0,0 +1,28 @@ +{ + "types": { + "StarknetDomain": [ + { "name": "name", "type": "shortstring" }, + { "name": "version", "type": "shortstring" }, + { "name": "chainId", "type": "shortstring" }, + { "name": "revision", "type": "shortstring" } + ], + "Example": [{ "name": "someEnum", "type": "enum", "contains": "MyEnum" }], + "MyEnum": [ + { "name": "Variant 1", "type": "()" }, + { "name": "Variant 2", "type": "(u128,u128*)" }, + { "name": "Variant 3", "type": "(u128)" } + ] + }, + "primaryType": "Example", + "domain": { + "name": "StarkNet Mail", + "version": "1", + "chainId": "1", + "revision": "1" + }, + "message": { + "someEnum": { + "Variant 2": [2, [0, 1]] + } + } +} diff --git a/__mocks__/typedData/example_presetTypes.json b/__mocks__/typedData/example_presetTypes.json new file mode 100644 index 000000000..f2cc9d7bc --- /dev/null +++ b/__mocks__/typedData/example_presetTypes.json @@ -0,0 +1,37 @@ +{ + "types": { + "StarknetDomain": [ + { "name": "name", "type": "shortstring" }, + { "name": "version", "type": "shortstring" }, + { "name": "chainId", "type": "shortstring" }, + { "name": "revision", "type": "shortstring" } + ], + "Example": [ + { "name": "n0", "type": "TokenAmount" }, + { "name": "n1", "type": "NftId" } + ] + }, + "primaryType": "Example", + "domain": { + "name": "StarkNet Mail", + "version": "1", + "chainId": "1", + "revision": "1" + }, + "message": { + "n0": { + "token_address": "0x049d36570d4e46f48e99674bd3fcc84644ddd6b96f7c741b1562b82f9e004dc7", + "amount": { + "low": "0x3e8", + "high": "0x0" + } + }, + "n1": { + "collection_address": "0x049d36570d4e46f48e99674bd3fcc84644ddd6b96f7c741b1562b82f9e004dc7", + "token_id": { + "low": "0x3e8", + "high": "0x0" + } + } + } +} diff --git a/__mocks__/typedDataStructArrayExample.json b/__mocks__/typedData/mail_StructArray.json similarity index 100% rename from __mocks__/typedDataStructArrayExample.json rename to __mocks__/typedData/mail_StructArray.json diff --git a/__mocks__/typedDataSessionExample.json b/__mocks__/typedData/session_MerkleTree.json similarity index 100% rename from __mocks__/typedDataSessionExample.json rename to __mocks__/typedData/session_MerkleTree.json diff --git a/__tests__/account.test.ts b/__tests__/account.test.ts index f972001fa..4926fd382 100644 --- a/__tests__/account.test.ts +++ b/__tests__/account.test.ts @@ -1,4 +1,4 @@ -import typedDataExample from '../__mocks__/typedDataExample.json'; +import typedDataExample from '../__mocks__/typedData/baseExample.json'; import { Account, Contract, diff --git a/__tests__/utils/merkle.test.ts b/__tests__/utils/merkle.test.ts index 192178da7..b1053ef39 100644 --- a/__tests__/utils/merkle.test.ts +++ b/__tests__/utils/merkle.test.ts @@ -1,253 +1,282 @@ -import { pedersen } from '@scure/starknet'; - +import { computePedersenHash, computePoseidonHash } from '../../src/utils/hash'; import { MerkleTree, proofMerklePath } from '../../src/utils/merkle'; +import { BigNumberish } from '../../src/utils/num'; + +type RawHashMethod = (a: BigNumberish, b: BigNumberish) => string; -describe('MerkleTree class', () => { +describe.each([ + { + name: 'Pedersen', + hashMethod: computePedersenHash, + manualHashMethod: ((a, b) => MerkleTree.hash(a, b, computePedersenHash)), + }, + { + name: 'Poseidon', + hashMethod: computePoseidonHash, + manualHashMethod: ((a, b) => MerkleTree.hash(a, b, computePoseidonHash)), + }, +])('MerkleTree class with $name hash', ({ hashMethod, manualHashMethod }) => { describe('calculate hashes', () => { - test('should generate hash with sorted arguments', async () => { + test(`should generate hash with sorted arguments`, async () => { let leaves = ['0x12', '0xa']; // 18, 10 - let merkleHash = MerkleTree.hash(leaves[0], leaves[1]); - let rawHash = pedersen(leaves[1], leaves[0]); + let merkleHash = MerkleTree.hash(leaves[0], leaves[1], hashMethod); + let rawHash = manualHashMethod(leaves[1], leaves[0]); expect(merkleHash).toBe(rawHash); leaves = ['0x5bb9440e27889a364bcb678b1f679ecd1347acdedcbf36e83494f857cc58026', '0x3']; - merkleHash = MerkleTree.hash(leaves[0], leaves[1]); - rawHash = pedersen(leaves[1], leaves[0]); + merkleHash = MerkleTree.hash(leaves[0], leaves[1], hashMethod); + rawHash = manualHashMethod(leaves[1], leaves[0]); expect(merkleHash).toBe(rawHash); }); }); + describe('generate roots', () => { test('should generate valid root for 1 elements', async () => { const leaves = ['0x1']; const tree = new MerkleTree(leaves); const manualMerkle = leaves[0]; - expect(tree.root).toBe(manualMerkle); }); + test('should generate valid root for 2 elements', async () => { const leaves = ['0x1', '0x2']; - const tree = new MerkleTree(leaves); - - const manualMerkle = MerkleTree.hash(leaves[0], leaves[1]); + const tree = new MerkleTree(leaves, hashMethod); + const manualMerkle = manualHashMethod(leaves[0], leaves[1]); expect(tree.root).toBe(manualMerkle); }); + test('should generate valid root for 4 elements', async () => { const leaves = ['0x1', '0x2', '0x3', '0x4']; - const tree = new MerkleTree(leaves); + const tree = new MerkleTree(leaves, hashMethod); - const manualMerkle = MerkleTree.hash( - MerkleTree.hash(leaves[0], leaves[1]), - MerkleTree.hash(leaves[2], leaves[3]) + const manualMerkle = manualHashMethod( + manualHashMethod(leaves[0], leaves[1]), + manualHashMethod(leaves[2], leaves[3]) ); - expect(tree.root).toBe(manualMerkle); }); + test('should generate valid root for 6 elements', async () => { const leaves = ['0x1', '0x2', '0x3', '0x4', '0x5', '0x6']; - const tree = new MerkleTree(leaves); + const tree = new MerkleTree(leaves, hashMethod); - const manualMerkle = MerkleTree.hash( - MerkleTree.hash( - MerkleTree.hash(leaves[0], leaves[1]), - MerkleTree.hash(leaves[2], leaves[3]) + const manualMerkle = manualHashMethod( + manualHashMethod( + manualHashMethod(leaves[0], leaves[1]), + manualHashMethod(leaves[2], leaves[3]) ), - MerkleTree.hash(MerkleTree.hash(leaves[4], leaves[5]), '0x0') + manualHashMethod(manualHashMethod(leaves[4], leaves[5]), '0x0') ); - expect(tree.root).toBe(manualMerkle); }); + test('should generate valid root for 7 elements', async () => { const leaves = ['0x1', '0x2', '0x3', '0x4', '0x5', '0x6', '0x7']; - const tree = new MerkleTree(leaves); + const tree = new MerkleTree(leaves, hashMethod); - const manualMerkle = MerkleTree.hash( - MerkleTree.hash( - MerkleTree.hash(leaves[0], leaves[1]), - MerkleTree.hash(leaves[2], leaves[3]) + const manualMerkle = manualHashMethod( + manualHashMethod( + manualHashMethod(leaves[0], leaves[1]), + manualHashMethod(leaves[2], leaves[3]) ), - MerkleTree.hash(MerkleTree.hash(leaves[4], leaves[5]), MerkleTree.hash(leaves[6], '0x0')) + manualHashMethod(manualHashMethod(leaves[4], leaves[5]), manualHashMethod(leaves[6], '0x0')) ); - expect(tree.root).toBe(manualMerkle); }); }); + describe('generate proofs', () => { let tree: MerkleTree; + beforeAll(() => { const leaves = ['0x1', '0x2', '0x3', '0x4', '0x5', '0x6', '0x7']; - tree = new MerkleTree(leaves); + tree = new MerkleTree(leaves, hashMethod); }); + test('should return proof path for valid child', async () => { const proof = tree.getProof('0x3'); - const manualProof = [ '0x4', - MerkleTree.hash('0x1', '0x2'), - MerkleTree.hash(MerkleTree.hash('0x5', '0x6'), MerkleTree.hash('0x7', '0x0')), + manualHashMethod('0x1', '0x2'), + manualHashMethod(manualHashMethod('0x5', '0x6'), manualHashMethod('0x7', '0x0')), ]; - expect(proof).toEqual(manualProof); }); + test('should return proof path for valid child', async () => { const proof = tree.getProof('0x7'); - const manualProof = [ '0x0', // proofs should always be as long as the tree is deep - MerkleTree.hash('0x5', '0x6'), - MerkleTree.hash(MerkleTree.hash('0x1', '0x2'), MerkleTree.hash('0x3', '0x4')), + manualHashMethod('0x5', '0x6'), + manualHashMethod(manualHashMethod('0x1', '0x2'), manualHashMethod('0x3', '0x4')), ]; - expect(proof).toEqual(manualProof); }); + test('should return proof path for valid child', async () => { const proof = tree.getProof('0x5'); - const manualProof = [ '0x6', - MerkleTree.hash('0x7', '0x0'), // tree should be padded with 0x0 so that all proofs are equals in size - MerkleTree.hash(MerkleTree.hash('0x1', '0x2'), MerkleTree.hash('0x3', '0x4')), + manualHashMethod('0x7', '0x0'), // tree should be padded with 0x0 so that all proofs are equals in size + manualHashMethod(manualHashMethod('0x1', '0x2'), manualHashMethod('0x3', '0x4')), ]; - expect(proof).toEqual(manualProof); }); + test('should throw for invalid child', () => { expect(() => tree.getProof('0x8')).toThrow('leaf not found'); }); }); + describe('verify proofs', () => { let tree: MerkleTree; + beforeAll(() => { const leaves = ['0x1', '0x2', '0x3', '0x4', '0x5', '0x6', '0x7']; - tree = new MerkleTree(leaves); + tree = new MerkleTree(leaves, hashMethod); }); test('should return true for valid manual proof', async () => { const manualProof = [ '0x0', // tree should be padded with 0x0 so that all proofs are equals in size - MerkleTree.hash('0x5', '0x6'), - MerkleTree.hash(MerkleTree.hash('0x1', '0x2'), MerkleTree.hash('0x3', '0x4')), + manualHashMethod('0x5', '0x6'), + manualHashMethod(manualHashMethod('0x1', '0x2'), manualHashMethod('0x3', '0x4')), ]; const leaf = '0x7'; const { root } = tree; - - expect(proofMerklePath(root, leaf, manualProof)).toBe(true); + expect(proofMerklePath(root, leaf, manualProof, hashMethod)).toBe(true); }); + test('should return true for valid manual proof', async () => { const manualProof = [ '0x6', - MerkleTree.hash('0x7', '0x0'), // tree should be padded with 0x0 so that all proofs are equals in size - MerkleTree.hash(MerkleTree.hash('0x1', '0x2'), MerkleTree.hash('0x3', '0x4')), + manualHashMethod('0x7', '0x0'), // tree should be padded with 0x0 so that all proofs are equals in size + manualHashMethod(manualHashMethod('0x1', '0x2'), manualHashMethod('0x3', '0x4')), ]; const leaf = '0x5'; const { root } = tree; - - expect(proofMerklePath(root, leaf, manualProof)).toBe(true); + expect(proofMerklePath(root, leaf, manualProof, hashMethod)).toBe(true); }); + test('should return true for valid proof', async () => { const proof = tree.getProof('0x3'); const leaf = '0x3'; const { root } = tree; - - expect(proofMerklePath(root, leaf, proof)).toBe(true); + expect(proofMerklePath(root, leaf, proof, hashMethod)).toBe(true); }); + test('should return false for invalid proof (root)', async () => { const proof = tree.getProof('0x3'); const leaf = '0x3'; const root = '0x4'; - - expect(proofMerklePath(root, leaf, proof)).toBe(false); + expect(proofMerklePath(root, leaf, proof, hashMethod)).toBe(false); }); + test('should return false for invalid proof (proof[0])', async () => { const proof = tree.getProof('0x3'); const leaf = '0x3'; const { root } = tree; proof[0] = '0x7'; - expect(proofMerklePath(root, leaf, proof)).toBe(false); + expect(proofMerklePath(root, leaf, proof, hashMethod)).toBe(false); }); + test('should return false for invalid proof (proof[1])', async () => { const proof = tree.getProof('0x3'); const leaf = '0x3'; const { root } = tree; proof[1] = '0x4'; - expect(proofMerklePath(root, leaf, proof)).toBe(false); + expect(proofMerklePath(root, leaf, proof, hashMethod)).toBe(false); }); + test('should return false for invalid proof (proof[2])', async () => { const proof = tree.getProof('0x3'); const leaf = '0x3'; const { root } = tree; proof[2] = '0x4'; - expect(proofMerklePath(root, leaf, proof)).toBe(false); + expect(proofMerklePath(root, leaf, proof, hashMethod)).toBe(false); }); }); + describe('verify 2-deep tree with empty data on the right', () => { let tree: MerkleTree; + beforeAll(() => { const leaves = ['0x1', '0x2', '0x3']; - tree = new MerkleTree(leaves); + tree = new MerkleTree(leaves, hashMethod); }); + test('should return 1-length proof in a 2-length tree', async () => { const proof = tree.getProof('0x3'); - const manualProof = ['0x0', MerkleTree.hash('0x1', '0x2')]; + const manualProof = ['0x0', manualHashMethod('0x1', '0x2')]; expect(proof).toEqual(manualProof); }); + test('should check the previous proof works fine', async () => { - const manualMerkle = MerkleTree.hash( - MerkleTree.hash('0x3', '0x0'), - MerkleTree.hash('0x1', '0x2') + const manualMerkle = manualHashMethod( + manualHashMethod('0x3', '0x0'), + manualHashMethod('0x1', '0x2') ); expect(tree.root).toBe(manualMerkle); }); }); + describe('verify 3-deep tree with empty data on the right', () => { let tree: MerkleTree; + beforeAll(() => { const leaves = ['0x1', '0x2', '0x3', '0x4', '0x5', '0x6']; - tree = new MerkleTree(leaves); + tree = new MerkleTree(leaves, hashMethod); }); + test('should return 2-length proof with the 2nd place skipped', async () => { const proof = tree.getProof('0x5'); const manualProof = [ '0x6', '0x0', - MerkleTree.hash(MerkleTree.hash('0x1', '0x2'), MerkleTree.hash('0x3', '0x4')), + manualHashMethod(manualHashMethod('0x1', '0x2'), manualHashMethod('0x3', '0x4')), ]; expect(proof).toEqual(manualProof); }); + test('should check the previous proof works fine', async () => { - const manualMerkle = MerkleTree.hash( - MerkleTree.hash(MerkleTree.hash('0x5', '0x6'), '0x0'), - MerkleTree.hash(MerkleTree.hash('0x1', '0x2'), MerkleTree.hash('0x3', '0x4')) + const manualMerkle = manualHashMethod( + manualHashMethod(manualHashMethod('0x5', '0x6'), '0x0'), + manualHashMethod(manualHashMethod('0x1', '0x2'), manualHashMethod('0x3', '0x4')) ); expect(tree.root).toBe(manualMerkle); }); }); + describe('verify 4-deep tree with empty data on the right', () => { let tree: MerkleTree; + beforeAll(() => { const leaves = ['0x1', '0x2', '0x3', '0x4', '0x5', '0x6', '0x7', '0x8', '0x9']; - tree = new MerkleTree(leaves); + tree = new MerkleTree(leaves, hashMethod); }); + test('should return 2-length proof with the 2nd place skipped', async () => { const proof = tree.getProof('0x9'); const manualProof = [ '0x0', '0x0', '0x0', - MerkleTree.hash( - MerkleTree.hash(MerkleTree.hash('0x1', '0x2'), MerkleTree.hash('0x3', '0x4')), - MerkleTree.hash(MerkleTree.hash('0x5', '0x6'), MerkleTree.hash('0x7', '0x8')) + manualHashMethod( + manualHashMethod(manualHashMethod('0x1', '0x2'), manualHashMethod('0x3', '0x4')), + manualHashMethod(manualHashMethod('0x5', '0x6'), manualHashMethod('0x7', '0x8')) ), ]; expect(proof).toEqual(manualProof); }); + test('should check the previous proof works fine', async () => { - const manualMerkle = MerkleTree.hash( - MerkleTree.hash(MerkleTree.hash(MerkleTree.hash('0x9', '0x0'), '0x0'), '0x0'), - MerkleTree.hash( - MerkleTree.hash(MerkleTree.hash('0x1', '0x2'), MerkleTree.hash('0x3', '0x4')), - MerkleTree.hash(MerkleTree.hash('0x5', '0x6'), MerkleTree.hash('0x7', '0x8')) + const manualMerkle = manualHashMethod( + manualHashMethod(manualHashMethod(manualHashMethod('0x9', '0x0'), '0x0'), '0x0'), + manualHashMethod( + manualHashMethod(manualHashMethod('0x1', '0x2'), manualHashMethod('0x3', '0x4')), + manualHashMethod(manualHashMethod('0x5', '0x6'), manualHashMethod('0x7', '0x8')) ) ); expect(tree.root).toBe(manualMerkle); diff --git a/__tests__/utils/typedData.test.ts b/__tests__/utils/typedData.test.ts index e6b975bfa..9ede00d77 100644 --- a/__tests__/utils/typedData.test.ts +++ b/__tests__/utils/typedData.test.ts @@ -1,10 +1,16 @@ -import typedDataExample from '../../__mocks__/typedDataExample.json'; -import typedDataSessionExample from '../../__mocks__/typedDataSessionExample.json'; -import typedDataStructArrayExample from '../../__mocks__/typedDataStructArrayExample.json'; +import * as starkCurve from '@scure/starknet'; + +import typedDataExample from '../../__mocks__/typedData/baseExample.json'; +import exampleBaseTypes from '../../__mocks__/typedData/example_baseTypes.json'; +import exampleEnum from '../../__mocks__/typedData/example_enum.json'; +import examplePresetTypes from '../../__mocks__/typedData/example_presetTypes.json'; +import typedDataStructArrayExample from '../../__mocks__/typedData/mail_StructArray.json'; +import typedDataSessionExample from '../../__mocks__/typedData/session_MerkleTree.json'; import { BigNumberish, StarkNetDomain, num } from '../../src'; import { getSelectorFromName } from '../../src/utils/hash'; import { MerkleTree } from '../../src/utils/merkle'; import { + TypedDataRevision, encodeType, encodeValue, getMessageHash, @@ -12,43 +18,81 @@ import { getTypeHash, } from '../../src/utils/typedData'; +const exampleAddress = '0xCD2a3d9F938E13CD947Ec05AbC7FE734Df8DD826'; + describe('typedData', () => { test('should get right type encoding', () => { - const typeEncoding = encodeType(typedDataExample.types, 'Mail'); - expect(typeEncoding).toMatchInlineSnapshot( + let encoded: string; + encoded = encodeType(typedDataExample.types, 'Mail'); + expect(encoded).toMatchInlineSnapshot( `"Mail(from:Person,to:Person,contents:felt)Person(name:felt,wallet:felt)"` ); - const typeEncodingStructArr = encodeType(typedDataStructArrayExample.types, 'Mail'); - expect(typeEncodingStructArr).toMatchInlineSnapshot( + encoded = encodeType(typedDataStructArrayExample.types, 'Mail'); + expect(encoded).toMatchInlineSnapshot( `"Mail(from:Person,to:Person,posts_len:felt,posts:Post*)Person(name:felt,wallet:felt)Post(title:felt,content:felt)"` ); + encoded = encodeType(typedDataExample.types, 'Mail', TypedDataRevision.Active); + expect(encoded).toMatchInlineSnapshot( + JSON.stringify( + '"Mail"("from":"Person","to":"Person","contents":"felt")"Person"("name":"felt","wallet":"felt")' + ) + ); + encoded = encodeType(typedDataStructArrayExample.types, 'Mail', TypedDataRevision.Active); + expect(encoded).toMatchInlineSnapshot( + `"\\"Mail\\"(\\"from\\":\\"Person\\",\\"to\\":\\"Person\\",\\"posts_len\\":\\"felt\\",\\"posts\\":\\"Post*\\")\\"Person\\"(\\"name\\":\\"felt\\",\\"wallet\\":\\"felt\\")\\"Post\\"(\\"title\\":\\"felt\\",\\"content\\":\\"felt\\")"` + ); + encoded = encodeType(exampleBaseTypes.types, 'Example', TypedDataRevision.Active); + expect(encoded).toMatchInlineSnapshot( + `"\\"Example\\"(\\"n0\\":\\"felt\\",\\"n1\\":\\"bool\\",\\"n2\\":\\"string\\",\\"n3\\":\\"selector\\",\\"n4\\":\\"u128\\",\\"n5\\":\\"ContractAddress\\",\\"n6\\":\\"ClassHash\\",\\"n7\\":\\"timestamp\\",\\"n8\\":\\"shortstring\\")"` + ); + encoded = encodeType(examplePresetTypes.types, 'Example', TypedDataRevision.Active); + expect(encoded).toMatchInlineSnapshot( + `"\\"Example\\"(\\"n0\\":\\"TokenAmount\\",\\"n1\\":\\"NftId\\")"` + ); + encoded = encodeType(exampleEnum.types, 'Example', TypedDataRevision.Active); + expect(encoded).toMatchInlineSnapshot( + `"\\"Example\\"(\\"someEnum\\":\\"MyEnum\\")\\"MyEnum\\"(\\"Variant 1\\":(),\\"Variant 2\\":(\\"u128\\",\\"u128*\\"),\\"Variant 3\\":(\\"u128\\"))"` + ); }); test('should get right type hash', () => { - const typeHashDomain = getTypeHash(typedDataExample.types, 'StarkNetDomain'); - expect(typeHashDomain).toMatchInlineSnapshot( + let typeHash: string; + typeHash = getTypeHash(typedDataExample.types, 'StarkNetDomain'); + expect(typeHash).toMatchInlineSnapshot( `"0x1bfc207425a47a5dfa1a50a4f5241203f50624ca5fdf5e18755765416b8e288"` ); - const typeHashPerson = getTypeHash(typedDataExample.types, 'Person'); - expect(typeHashPerson).toMatchInlineSnapshot( + typeHash = getTypeHash(typedDataExample.types, 'Person'); + expect(typeHash).toMatchInlineSnapshot( `"0x2896dbe4b96a67110f454c01e5336edc5bbc3635537efd690f122f4809cc855"` ); - const typeHashMail = getTypeHash(typedDataExample.types, 'Mail'); - expect(typeHashMail).toMatchInlineSnapshot( + typeHash = getTypeHash(typedDataExample.types, 'Mail'); + expect(typeHash).toMatchInlineSnapshot( `"0x13d89452df9512bf750f539ba3001b945576243288137ddb6c788457d4b2f79"` ); - const typeHashPost = getTypeHash(typedDataStructArrayExample.types, 'Post'); - expect(typeHashPost).toMatchInlineSnapshot( + typeHash = getTypeHash(typedDataStructArrayExample.types, 'Post'); + expect(typeHash).toMatchInlineSnapshot( `"0x1d71e69bf476486b43cdcfaf5a85c00bb2d954c042b281040e513080388356d"` ); - const typeHashMailWithStructArray = getTypeHash(typedDataStructArrayExample.types, 'Mail'); - expect(typeHashMailWithStructArray).toMatchInlineSnapshot( + typeHash = getTypeHash(typedDataStructArrayExample.types, 'Mail'); + expect(typeHash).toMatchInlineSnapshot( `"0x873b878e35e258fc99e3085d5aaad3a81a0c821f189c08b30def2cde55ff27"` ); - const selectorTypeHash = getTypeHash({}, 'selector'); - expect(selectorTypeHash).toMatchInlineSnapshot( + typeHash = getTypeHash({}, 'selector'); + expect(typeHash).toMatchInlineSnapshot( `"0x1d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470"` ); + typeHash = getTypeHash(exampleBaseTypes.types, 'Example', TypedDataRevision.Active); + expect(typeHash).toMatchInlineSnapshot( + `"0x2e5b7e12ca4388c49b4ceb305d853b8f7bf5f36525fea5e4255346b80153249"` + ); + typeHash = getTypeHash(examplePresetTypes.types, 'Example', TypedDataRevision.Active); + expect(typeHash).toMatchInlineSnapshot( + `"0x155de33c6a0cc7f2b8926afc7a71fc2ac31ffc26726aee5da0570c5d517a763"` + ); + typeHash = getTypeHash(exampleEnum.types, 'Example', TypedDataRevision.Active); + expect(typeHash).toMatchInlineSnapshot( + `"0x380a54d417fb58913b904675d94a8a62e2abc3467f4b5439de0fd65fafdd1a8"` + ); }); test('should transform type selector', () => { @@ -132,16 +176,25 @@ describe('typedData', () => { ); }); + test('should get right hash for StarknetDomain', () => { + const hash = getStructHash( + exampleBaseTypes.types, + 'StarknetDomain', + exampleBaseTypes.domain as StarkNetDomain, + TypedDataRevision.Active + ); + expect(hash).toMatchInlineSnapshot( + `"0x555f72e550b308e50c1a4f8611483a174026c982a9893a05c185eeb85399657"` + ); + }); + test('should get right hash for entire message', () => { - const hash = getMessageHash(typedDataExample, '0xCD2a3d9F938E13CD947Ec05AbC7FE734Df8DD826'); + const hash = getMessageHash(typedDataExample, exampleAddress); expect(hash).toMatchInlineSnapshot( `"0x6fcff244f63e38b9d88b9e3378d44757710d1b244282b435cb472053c8d78d0"` ); - const hashStructArr = getMessageHash( - typedDataStructArrayExample, - '0xCD2a3d9F938E13CD947Ec05AbC7FE734Df8DD826' - ); + const hashStructArr = getMessageHash(typedDataStructArrayExample, exampleAddress); expect(hashStructArr).toMatchInlineSnapshot( `"0x5914ed2764eca2e6a41eb037feefd3d2e33d9af6225a9e7fe31ac943ff712c"` ); @@ -187,7 +240,7 @@ describe('typedData', () => { message: { from: { name: 'Cow', - wallet: '0xCD2a3d9F938E13CD947Ec05AbC7FE734Df8DD826', + wallet: exampleAddress, }, to: { name: 'Bob', @@ -200,22 +253,43 @@ describe('typedData', () => { }; test('should transform strings correctly', () => { - const hash = getMessageHash( - typedDataStringExample, - '0xCD2a3d9F938E13CD947Ec05AbC7FE734Df8DD826' - ); + const hash = getMessageHash(typedDataStringExample, exampleAddress); expect(hash).toMatchInlineSnapshot( `"0x70338fb11b8f70b68b261de8a322bcb004bd85e88ac47d9147982c7f5ac66fd"` ); }); test('should transform session message correctly', () => { - const hash = getMessageHash( - typedDataSessionExample, - '0xCD2a3d9F938E13CD947Ec05AbC7FE734Df8DD826' - ); + const hash = getMessageHash(typedDataSessionExample, exampleAddress); expect(hash).toMatchInlineSnapshot( `"0x751fb7d98545f7649d0d0eadc80d770fcd88d8cfaa55590b284f4e1b701ef0a"` ); }); + + test('should hash messages with revision 1 types', () => { + // necessary to spy dependecy since function spies (hash.computePedersenHash; hash.computePoseidonHash) won't work + const spyPedersen = jest.spyOn(starkCurve, 'pedersen'); + const spyPoseidon = jest.spyOn(starkCurve, 'poseidonHashMany'); + + let messageHash: string; + messageHash = getMessageHash(exampleBaseTypes, exampleAddress); + expect(messageHash).toMatchInlineSnapshot( + `"0x790d9fa99cf9ad91c515aaff9465fcb1c87784d9cfb27271ed193675cd06f9c"` + ); + + messageHash = getMessageHash(examplePresetTypes, exampleAddress); + expect(messageHash).toMatchInlineSnapshot( + `"0x26e7b8cedfa63cdbed14e7e51b60ee53ac82bdf26724eb1e3f0710cb8987522"` + ); + + messageHash = getMessageHash(exampleEnum, exampleAddress); + expect(messageHash).toMatchInlineSnapshot( + `"0x3df10475ad5a8f49db4345a04a5b09164d2e24b09f6e1e236bc1ccd87627cc"` + ); + + expect(spyPedersen).not.toHaveBeenCalled(); + expect(spyPoseidon).toHaveBeenCalled(); + spyPedersen.mockRestore(); + spyPoseidon.mockRestore(); + }); }); diff --git a/src/types/typedData.ts b/src/types/typedData.ts index d626b931c..84c108830 100644 --- a/src/types/typedData.ts +++ b/src/types/typedData.ts @@ -1,3 +1,16 @@ +// TODO: adjust starknet casing in v6 + +export enum TypedDataRevision { + Active = '1', + Legacy = '0', +} + +export type StarkNetEnumType = { + name: string; + type: 'enum'; + contains: string; +}; + export type StarkNetMerkleType = { name: string; type: 'merkletree'; @@ -15,6 +28,7 @@ export type StarkNetType = name: string; type: string; } + | StarkNetEnumType | StarkNetMerkleType; /** @@ -24,6 +38,7 @@ export interface StarkNetDomain extends Record { name?: string; version?: string; chainId?: string | number; + revision?: string; } /** diff --git a/src/utils/hash.ts b/src/utils/hash.ts index 3c8268d2c..cbeac3dc0 100644 --- a/src/utils/hash.ts +++ b/src/utils/hash.ts @@ -29,7 +29,7 @@ import { felt } from './calldata/cairo'; import { starkCurve } from './ec'; import { addHexPrefix, utf8ToArray } from './encode'; import { parse, stringify } from './json'; -import { toBigInt, toHex } from './num'; +import { toHex } from './num'; import { getSelectorFromName } from './selector'; import { encodeShortString } from './shortString'; @@ -50,16 +50,30 @@ export function getVersionsByType(versionType?: 'fee' | 'transaction') { : { v1: transactionVersion, v2: transactionVersion_2 }; } +export function computePedersenHash(a: BigNumberish, b: BigNumberish): string { + return starkCurve.pedersen(BigInt(a), BigInt(b)); +} + +export function computePoseidonHash(a: BigNumberish, b: BigNumberish): string { + return toHex(starkCurve.poseidonHash(BigInt(a), BigInt(b))); +} + /** * Compute pedersen hash from data * @returns format: hex-string - pedersen hash */ export function computeHashOnElements(data: BigNumberish[]): string { return [...data, data.length] - .reduce((x: BigNumberish, y: BigNumberish) => starkCurve.pedersen(toBigInt(x), toBigInt(y)), 0) + .reduce((x: BigNumberish, y: BigNumberish) => starkCurve.pedersen(BigInt(x), BigInt(y)), 0) .toString(); } +export const computePedersenHashOnElements = computeHashOnElements; + +export function computePoseidonHashOnElements(data: BigNumberish[]) { + return toHex(poseidonHashMany(data.map((x) => BigInt(x)))); +} + /** * Calculate transaction pedersen hash for common properties * diff --git a/src/utils/merkle.ts b/src/utils/merkle.ts index 779d57e67..71c850dee 100644 --- a/src/utils/merkle.ts +++ b/src/utils/merkle.ts @@ -1,5 +1,5 @@ -import { starkCurve } from './ec'; -import { toBigInt } from './num'; +import { BigNumberish } from '../types'; +import { computePedersenHash } from './hash'; export class MerkleTree { public leaves: string[]; @@ -8,7 +8,13 @@ export class MerkleTree { public root: string; - constructor(leafHashes: string[]) { + public hashMethod: (a: BigNumberish, b: BigNumberish) => string; + + constructor( + leafHashes: string[], + hashMethod: (a: BigNumberish, b: BigNumberish) => string = computePedersenHash + ) { + this.hashMethod = hashMethod; this.leaves = leafHashes; this.root = this.build(leafHashes); } @@ -28,21 +34,25 @@ export class MerkleTree { const newLeaves: string[] = []; for (let i = 0; i < leaves.length; i += 2) { if (i + 1 === leaves.length) { - newLeaves.push(MerkleTree.hash(leaves[i], '0x0')); + newLeaves.push(MerkleTree.hash(leaves[i], '0x0', this.hashMethod)); } else { - newLeaves.push(MerkleTree.hash(leaves[i], leaves[i + 1])); + newLeaves.push(MerkleTree.hash(leaves[i], leaves[i + 1], this.hashMethod)); } } return this.build(newLeaves); } /** - * Create pedersen hash from a and b + * Create hash from ordered a and b, Pedersen hash default * @returns format: hex-string */ - static hash(a: string, b: string) { - const [aSorted, bSorted] = [toBigInt(a), toBigInt(b)].sort((x, y) => (x >= y ? 1 : -1)); - return starkCurve.pedersen(aSorted, bSorted); + static hash( + a: BigNumberish, + b: BigNumberish, + hashMethod: (a: BigNumberish, b: BigNumberish) => string = computePedersenHash + ) { + const [aSorted, bSorted] = [BigInt(a), BigInt(b)].sort((x, y) => (x >= y ? 1 : -1)); + return hashMethod(aSorted, bSorted); } /** @@ -69,7 +79,7 @@ export class MerkleTree { : this.branches.findIndex((b) => b.length === branch.length); const nextBranch = this.branches[currentBranchLevelIndex + 1] ?? [this.root]; return this.getProof( - MerkleTree.hash(isLeft ? leaf : neededBranch, isLeft ? neededBranch : leaf), + MerkleTree.hash(isLeft ? leaf : neededBranch, isLeft ? neededBranch : leaf, this.hashMethod), nextBranch, newHashPath ); @@ -81,11 +91,17 @@ export class MerkleTree { * @param root hex-string * @param leaf hex-string * @param path hex-string array + * @param hashMethod hash method override, Pedersen default */ -export function proofMerklePath(root: string, leaf: string, path: string[]): boolean { +export function proofMerklePath( + root: string, + leaf: string, + path: string[], + hashMethod: (a: BigNumberish, b: BigNumberish) => string = computePedersenHash +): boolean { if (path.length === 0) { return root === leaf; } const [next, ...rest] = path; - return proofMerklePath(root, MerkleTree.hash(leaf, next), rest); + return proofMerklePath(root, MerkleTree.hash(leaf, next, hashMethod), rest, hashMethod); } diff --git a/src/utils/typedData.ts b/src/utils/typedData.ts index 1d581b5e3..3dfb99a5f 100644 --- a/src/utils/typedData.ts +++ b/src/utils/typedData.ts @@ -1,12 +1,97 @@ -import { BigNumberish, StarkNetMerkleType, StarkNetType, TypedData } from '../types'; -import { computeHashOnElements, getSelectorFromName } from './hash'; +/* eslint-disable no-param-reassign */ +import { + BigNumberish, + TypedDataRevision as Revision, + StarkNetEnumType, + StarkNetMerkleType, + StarkNetType, + TypedData, +} from '../types'; +import { + computePedersenHash, + computePedersenHashOnElements, + computePoseidonHash, + computePoseidonHashOnElements, + getSelectorFromName, +} from './hash'; import { MerkleTree } from './merkle'; import { isHex, toHex } from './num'; -import { encodeShortString } from './shortString'; +import { encodeShortString, splitLongString } from './shortString'; /** @deprecated prefer importing from 'types' over 'typedData' */ export * from '../types/typedData'; +interface Context { + parent?: string; + key?: string; +} + +interface Configuration { + domain: string; + hashMethod: (data: BigNumberish[]) => string; + hashMerkleMethod: (a: BigNumberish, b: BigNumberish) => string; + escapeTypeString: (s: string) => string; + presetTypes: TypedData['types']; +} + +const presetTypes: TypedData['types'] = { + u256: JSON.parse('[{ "name": "low", "type": "u128" }, { "name": "high", "type": "u128" }]'), + TokenAmount: JSON.parse( + '[{ "name": "token_address", "type": "ContractAddress" }, { "name": "amount", "type": "u256" }]' + ), + NftId: JSON.parse( + '[{ "name": "collection_address", "type": "ContractAddress" }, { "name": "token_id", "type": "u256" }]' + ), +}; + +const revisionConfiguration: Record = { + [Revision.Active]: { + domain: 'StarknetDomain', + hashMethod: computePoseidonHashOnElements, + hashMerkleMethod: computePoseidonHash, + escapeTypeString: (s) => `"${s}"`, + presetTypes, + }, + [Revision.Legacy]: { + domain: 'StarkNetDomain', + hashMethod: computePedersenHashOnElements, + hashMerkleMethod: computePedersenHash, + escapeTypeString: (s) => s, + presetTypes: {}, + }, +}; + +// TODO: replace with utils byteArrayFromString from PR#891 once it is available +export function byteArrayFromString(targetString: string) { + const shortStrings: string[] = splitLongString(targetString); + const remainder: string = shortStrings[shortStrings.length - 1]; + const shortStringsEncoded: BigNumberish[] = shortStrings.map(encodeShortString); + + const [pendingWord, pendingWordLength] = + remainder === undefined || remainder.length === 31 + ? ['0x00', 0] + : [shortStringsEncoded.pop()!, remainder.length]; + + return { + data: shortStringsEncoded.length === 0 ? ['0x00'] : shortStringsEncoded, + pending_word: pendingWord, + pending_word_len: pendingWordLength, + }; +} + +function identifyRevision({ types, domain }: TypedData) { + if (revisionConfiguration[Revision.Active].domain in types && domain.revision === Revision.Active) + return Revision.Active; + + if ( + revisionConfiguration[Revision.Legacy].domain in types && + (domain.revision ?? Revision.Legacy) === Revision.Legacy + ) + return Revision.Legacy; + + return undefined; +} + function getHex(value: BigNumberish): string { try { return toHex(value); @@ -20,18 +105,13 @@ function getHex(value: BigNumberish): string { /** * Validates that `data` matches the EIP-712 JSON schema. - * - * @param {any} data - * @return {boolean} */ -const validateTypedData = (data: unknown): data is TypedData => { +function validateTypedData(data: unknown): data is TypedData { const typedData = data as TypedData; - - // Validate that the data matches the EIP-712 JSON schema - const valid = Boolean(typedData.types && typedData.primaryType && typedData.message); - - return valid; -}; + return Boolean( + typedData.message && typedData.primaryType && typedData.types && identifyRevision(typedData) + ); +} export function prepareSelector(selector: string): string { return isHex(selector) ? selector : getSelectorFromName(selector); @@ -41,52 +121,48 @@ export function isMerkleTreeType(type: StarkNetType): type is StarkNetMerkleType return type.type === 'merkletree'; } -interface Context { - parent?: string; - key?: string; -} - /** * Get the dependencies of a struct type. If a struct has the same dependency multiple times, it's only included once * in the resulting array. - * - * @param {TypedData} typedData - * @param {string} type - * @param {string[]} [dependencies] - * @return {string[]} */ -export const getDependencies = ( +export function getDependencies( types: TypedData['types'], type: string, - dependencies: string[] = [] -): string[] => { + dependencies: string[] = [], + contains: string = '', + revision: Revision = Revision.Legacy +): string[] { // Include pointers (struct arrays) if (type[type.length - 1] === '*') { - // eslint-disable-next-line no-param-reassign type = type.slice(0, -1); + } else if (revision === Revision.Active) { + // enum base + if (type === 'enum') { + type = contains; + } + // enum element types + else if (type.match(/^\(.*\)$/)) { + type = type.slice(1, -1); + } } - if (dependencies.includes(type)) { - return dependencies; - } - - if (!types[type]) { + if (dependencies.includes(type) || !types[type]) { return dependencies; } return [ type, - ...types[type].reduce( + ...(types[type] as StarkNetEnumType[]).reduce( (previous, t) => [ ...previous, - ...getDependencies(types, t.type, previous).filter( + ...getDependencies(types, t.type, previous, t.contains, revision).filter( (dependency) => !previous.includes(dependency) ), ], [] ), ]; -}; +} function getMerkleTreeType(types: TypedData['types'], ctx: Context) { if (ctx.parent && ctx.key) { @@ -107,130 +183,215 @@ function getMerkleTreeType(types: TypedData['types'], ctx: Context) { /** * Encode a type to a string. All dependent types are alphabetically sorted. */ -export const encodeType = (types: TypedData['types'], type: string): string => { - const [primary, ...dependencies] = getDependencies(types, type); +export function encodeType( + types: TypedData['types'], + type: string, + revision: Revision = Revision.Legacy +): string { + const [primary, ...dependencies] = getDependencies(types, type, undefined, undefined, revision); const newTypes = !primary ? [] : [primary, ...dependencies.sort()]; + const esc = revisionConfiguration[revision].escapeTypeString; + return newTypes .map((dependency) => { - return `${dependency}(${types[dependency].map((t) => `${t.name}:${t.type}`)})`; + const dependencyElements = types[dependency].map((t) => { + const targetType = + t.type === 'enum' && revision === Revision.Active + ? (t as StarkNetEnumType).contains + : t.type; + // parentheses handling for enum variant types + const typeString = targetType.match(/^\(.*\)$/) + ? `(${targetType + .slice(1, -1) + .split(',') + .map((e) => (e ? esc(e) : e)) + .join(',')})` + : esc(targetType); + return `${esc(t.name)}:${typeString}`; + }); + return `${esc(dependency)}(${dependencyElements})`; }) .join(''); -}; +} /** * Get a type string as hash. */ -export const getTypeHash = (types: TypedData['types'], type: string): string => { - return getSelectorFromName(encodeType(types, type)); -}; +export function getTypeHash( + types: TypedData['types'], + type: string, + revision: Revision = Revision.Legacy +): string { + return getSelectorFromName(encodeType(types, type, revision)); +} /** * Encodes a single value to an ABI serialisable string, number or Buffer. Returns the data as tuple, which consists of * an array of ABI compatible types, and an array of corresponding values. */ -export const encodeValue = ( +export function encodeValue( types: TypedData['types'], type: string, data: unknown, - ctx: Context = {} -): [string, string] => { + ctx: Context = {}, + revision: Revision = Revision.Legacy +): [string, string] { if (types[type]) { - // eslint-disable-next-line @typescript-eslint/no-use-before-define - return [type, getStructHash(types, type, data as Record)]; + return [type, getStructHash(types, type, data as TypedData['message'], revision)]; } - if ( - Object.keys(types) - .map((x) => `${x}*`) - .includes(type) - ) { - const structHashes: string[] = (data as unknown[]).map((struct) => { - // eslint-disable-next-line @typescript-eslint/no-use-before-define - return getStructHash(types, type.slice(0, -1), struct as Record); - }); - return [type, computeHashOnElements(structHashes)]; + if (revisionConfiguration[revision].presetTypes[type]) { + return [ + type, + getStructHash( + revisionConfiguration[revision].presetTypes, + type, + data as TypedData['message'], + revision + ), + ]; } - if (type === 'merkletree') { - const merkleTreeType = getMerkleTreeType(types, ctx); - const structHashes: string[] = (data as unknown[]).map((struct) => { - // eslint-disable-next-line @typescript-eslint/no-use-before-define - return encodeValue(types, merkleTreeType, struct as Record)[1]; - }); - const { root } = new MerkleTree(structHashes as string[]); - return ['felt', root]; + if (type.endsWith('*')) { + const hashes: string[] = (data as Array).map( + (entry) => encodeValue(types, type.slice(0, -1), entry, undefined, revision)[1] + ); + return [type, revisionConfiguration[revision].hashMethod(hashes)]; } - if (type === 'felt*') { - return ['felt*', computeHashOnElements(data as string[])]; - } + switch (type) { + case 'enum': { + if (revision === Revision.Active) { + const [variantKey, variantData] = Object.entries(data as TypedData['message'])[0]; - if (type === 'selector') { - return ['felt', prepareSelector(data as string)]; - } + const parentType = types[ctx.parent as string][0] as StarkNetEnumType; + const enumType = types[parentType.contains]; + const variantType = enumType.find((t) => t.name === variantKey) as StarkNetType; + const variantIndex = enumType.indexOf(variantType); - return [type, getHex(data as string)]; -}; + const encodedSubtypes = variantType.type + .slice(1, -1) + .split(',') + .map((subtype, index) => { + if (!subtype) return subtype; + const subtypeData = (variantData as unknown[])[index]; + return encodeValue(types, subtype, subtypeData, undefined, revision)[1]; + }); + return [ + type, + revisionConfiguration[revision].hashMethod([variantIndex, ...encodedSubtypes]), + ]; + } // else fall through to default + return [type, getHex(data as string)]; + } + case 'merkletree': { + const merkleTreeType = getMerkleTreeType(types, ctx); + const structHashes: string[] = (data as Array).map((struct) => { + return encodeValue(types, merkleTreeType, struct, undefined, revision)[1]; + }); + const { root } = new MerkleTree( + structHashes as string[], + revisionConfiguration[revision].hashMerkleMethod + ); + return ['felt', root]; + } + case 'selector': { + return ['felt', prepareSelector(data as string)]; + } + case 'string': { + if (revision === Revision.Active) { + const byteArray = byteArrayFromString(data as string); + const elements = [ + byteArray.data.length, + ...byteArray.data, + byteArray.pending_word, + byteArray.pending_word_len, + ]; + return [type, revisionConfiguration[revision].hashMethod(elements)]; + } // else fall through to default + return [type, getHex(data as string)]; + } + case 'felt': + case 'bool': + case 'u128': + case 'i128': + case 'ContractAddress': + case 'ClassHash': + case 'timestamp': + case 'shortstring': + return [type, getHex(data as string)]; + default: { + if (revision === Revision.Active) { + throw new Error(`Unsupported type: ${type}`); + } + return [type, getHex(data as string)]; + } + } +} /** * Encode the data to an ABI encoded Buffer. The data should be a key -> value object with all the required values. * All dependent types are automatically encoded. */ -export const encodeData = ( +export function encodeData( types: T['types'], type: string, - data: T['message'] -) => { - const [returnTypes, values] = types[type].reduce<[string[], string[]]>( + data: T['message'], + revision: Revision = Revision.Legacy +) { + const targetType = types[type] ?? revisionConfiguration[revision].presetTypes[type]; + const [returnTypes, values] = targetType.reduce<[string[], string[]]>( ([ts, vs], field) => { - if (data[field.name] === undefined || data[field.name] === null) { + if (data[field.name] === undefined || (data[field.name] === null && field.type !== 'enum')) { throw new Error(`Cannot encode data: missing data for '${field.name}'`); } const value = data[field.name]; - const [t, encodedValue] = encodeValue(types, field.type, value, { - parent: type, - key: field.name, - }); + const ctx = { parent: type, key: field.name }; + const [t, encodedValue] = encodeValue(types, field.type, value, ctx, revision); return [ [...ts, t], [...vs, encodedValue], ]; }, - [['felt'], [getTypeHash(types, type)]] + [['felt'], [getTypeHash(types, type, revision)]] ); return [returnTypes, values]; -}; +} /** * Get encoded data as a hash. The data should be a key -> value object with all the required values. * All dependent types are automatically encoded. */ -export const getStructHash = ( +export function getStructHash( types: T['types'], type: string, - data: T['message'] -) => { - return computeHashOnElements(encodeData(types, type, data)[1]); -}; + data: T['message'], + revision: Revision = Revision.Legacy +) { + return revisionConfiguration[revision].hashMethod(encodeData(types, type, data, revision)[1]); +} /** - * Get the EIP-191 encoded message to sign, from the typedData object. + * Get the SNIP-12 encoded message to sign, from the typedData object. */ -export const getMessageHash = (typedData: TypedData, account: BigNumberish): string => { +export function getMessageHash(typedData: TypedData, account: BigNumberish): string { if (!validateTypedData(typedData)) { throw new Error('Typed data does not match JSON schema'); } + const revision = identifyRevision(typedData) as Revision; + const { domain, hashMethod } = revisionConfiguration[revision]; + const message = [ encodeShortString('StarkNet Message'), - getStructHash(typedData.types, 'StarkNetDomain', typedData.domain), + getStructHash(typedData.types, domain, typedData.domain, revision), account, - getStructHash(typedData.types, typedData.primaryType, typedData.message), + getStructHash(typedData.types, typedData.primaryType, typedData.message, revision), ]; - return computeHashOnElements(message); -}; + return hashMethod(message); +} diff --git a/tsconfig.json b/tsconfig.json index dd414c37f..f67bd6a42 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -11,9 +11,9 @@ // "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */ /* Language and Environment */ - "target": "ES2020" /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */, + "target": "ES2022" /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */, "lib": [ - "ES2020", + "ES2022", "dom" ] /* Specify a set of bundled library declaration files that describe the target runtime environment. */, // "jsx": "preserve", /* Specify what JSX code is generated. */