diff --git a/btcec/go.mod b/btcec/go.mod index 8c7cc1d63c..452399cfc6 100644 --- a/btcec/go.mod +++ b/btcec/go.mod @@ -6,6 +6,11 @@ require ( github.com/btcsuite/btcd/chaincfg/chainhash v1.0.1 github.com/davecgh/go-spew v1.1.1 github.com/decred/dcrd/dcrec/secp256k1/v4 v4.0.1 + github.com/stretchr/testify v1.8.0 ) -require github.com/decred/dcrd/crypto/blake256 v1.0.0 // indirect +require ( + github.com/decred/dcrd/crypto/blake256 v1.0.0 // indirect + github.com/pmezard/go-difflib v1.0.0 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect +) diff --git a/btcec/go.sum b/btcec/go.sum index 0004a783bb..73b8f2b5ac 100644 --- a/btcec/go.sum +++ b/btcec/go.sum @@ -1,8 +1,21 @@ github.com/btcsuite/btcd/chaincfg/chainhash v1.0.1 h1:q0rUy8C/TYNBQS1+CGKw68tLOFYSNEs0TFnxxnS9+4U= github.com/btcsuite/btcd/chaincfg/chainhash v1.0.1/go.mod h1:7SFka0XMvUgj3hfZtydOrQY2mwhPclbT2snogU7SQQc= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/decred/dcrd/crypto/blake256 v1.0.0 h1:/8DMNYp9SGi5f0w7uCm6d6M4OU2rGFK09Y2A4Xv7EE0= github.com/decred/dcrd/crypto/blake256 v1.0.0/go.mod h1:sQl2p6Y26YV+ZOcSTP6thNdn47hh8kt6rqSlvmrXFAc= github.com/decred/dcrd/dcrec/secp256k1/v4 v4.0.1 h1:YLtO71vCjJRCBcrPMtQ9nqBsqpA1m5sE92cU+pd5Mcc= github.com/decred/dcrd/dcrec/secp256k1/v4 v4.0.1/go.mod h1:hyedUtir6IdtD/7lIxGeCxkaw7y45JueMRL4DIyJDKs= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= +github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.8.0 h1:pSgiaMZlXftHpm5L7V1+rVB+AZJydKsMxsQBIJw4PKk= +github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/btcec/schnorr/musig2/context.go b/btcec/schnorr/musig2/context.go index 19cef34fa9..0dfc6f38af 100644 --- a/btcec/schnorr/musig2/context.go +++ b/btcec/schnorr/musig2/context.go @@ -206,12 +206,7 @@ func NewContext(signingKey *btcec.PrivateKey, shouldSort bool, option(opts) } - pubKey, err := schnorr.ParsePubKey( - schnorr.SerializePubKey(signingKey.PubKey()), - ) - if err != nil { - return nil, err - } + pubKey := signingKey.PubKey() ctx := &Context{ signingKey: signingKey, @@ -243,7 +238,10 @@ func NewContext(signingKey *btcec.PrivateKey, shouldSort bool, // the nonce now to pass in to the session once all the callers // are known. if opts.earlyNonce { - ctx.sessionNonce, err = GenNonces() + var err error + ctx.sessionNonce, err = GenNonces( + WithNonceSecretKeyAux(signingKey), + ) if err != nil { return nil, err } diff --git a/btcec/schnorr/musig2/data/key_agg_vectors.json b/btcec/schnorr/musig2/data/key_agg_vectors.json new file mode 100644 index 0000000000..b2e623de60 --- /dev/null +++ b/btcec/schnorr/musig2/data/key_agg_vectors.json @@ -0,0 +1,88 @@ +{ + "pubkeys": [ + "02F9308A019258C31049344F85F89D5229B531C845836F99B08601F113BCE036F9", + "03DFF1D77F2A671C5F36183726DB2341BE58FEAE1DA2DECED843240F7B502BA659", + "023590A94E768F8E1815C2F24B4D80A8E3149316C3518CE7B7AD338368D038CA66", + "020000000000000000000000000000000000000000000000000000000000000005", + "02FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC30", + "04F9308A019258C31049344F85F89D5229B531C845836F99B08601F113BCE036F9", + "03935F972DA013F80AE011890FA89B67A27B7BE6CCB24D3274D18B2D4067F261A9" + ], + "tweaks": [ + "FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141", + "252E4BD67410A76CDF933D30EAA1608214037F1B105A013ECCD3C5C184A6110B" + ], + "valid_test_cases": [ + { + "key_indices": [0, 1, 2], + "expected": "90539EEDE565F5D054F32CC0C220126889ED1E5D193BAF15AEF344FE59D4610C" + }, + { + "key_indices": [2, 1, 0], + "expected": "6204DE8B083426DC6EAF9502D27024D53FC826BF7D2012148A0575435DF54B2B" + }, + { + "key_indices": [0, 0, 0], + "expected": "B436E3BAD62B8CD409969A224731C193D051162D8C5AE8B109306127DA3AA935" + }, + { + "key_indices": [0, 0, 1, 1], + "expected": "69BC22BFA5D106306E48A20679DE1D7389386124D07571D0D872686028C26A3E" + } + ], + "error_test_cases": [ + { + "key_indices": [0, 3], + "tweak_indices": [], + "is_xonly": [], + "error": { + "type": "invalid_contribution", + "signer": 1, + "contrib": "pubkey" + }, + "comment": "Invalid public key" + }, + { + "key_indices": [0, 4], + "tweak_indices": [], + "is_xonly": [], + "error": { + "type": "invalid_contribution", + "signer": 1, + "contrib": "pubkey" + }, + "comment": "Public key exceeds field size" + }, + { + "key_indices": [5, 0], + "tweak_indices": [], + "is_xonly": [], + "error": { + "type": "invalid_contribution", + "signer": 0, + "contrib": "pubkey" + }, + "comment": "First byte of public key is not 2 or 3" + }, + { + "key_indices": [0, 1], + "tweak_indices": [0], + "is_xonly": [true], + "error": { + "type": "value", + "message": "The tweak must be less than n." + }, + "comment": "Tweak is out of range" + }, + { + "key_indices": [6], + "tweak_indices": [1], + "is_xonly": [false], + "error": { + "type": "value", + "message": "The result of tweaking cannot be infinity." + }, + "comment": "Intermediate tweaking result is point at infinity" + } + ] +} diff --git a/btcec/schnorr/musig2/data/key_sort_vectors.json b/btcec/schnorr/musig2/data/key_sort_vectors.json new file mode 100644 index 0000000000..022f3417fa --- /dev/null +++ b/btcec/schnorr/musig2/data/key_sort_vectors.json @@ -0,0 +1,16 @@ +{ + "pubkeys": [ + "02DD308AFEC5777E13121FA72B9CC1B7CC0139715309B086C960E18FD969774EB8", + "02F9308A019258C31049344F85F89D5229B531C845836F99B08601F113BCE036F9", + "03DFF1D77F2A671C5F36183726DB2341BE58FEAE1DA2DECED843240F7B502BA659", + "023590A94E768F8E1815C2F24B4D80A8E3149316C3518CE7B7AD338368D038CA66", + "02DD308AFEC5777E13121FA72B9CC1B7CC0139715309B086C960E18FD969774EB8" + ], + "sorted_pubkeys": [ + "023590A94E768F8E1815C2F24B4D80A8E3149316C3518CE7B7AD338368D038CA66", + "02DD308AFEC5777E13121FA72B9CC1B7CC0139715309B086C960E18FD969774EB8", + "02DD308AFEC5777E13121FA72B9CC1B7CC0139715309B086C960E18FD969774EB8", + "02F9308A019258C31049344F85F89D5229B531C845836F99B08601F113BCE036F9", + "03DFF1D77F2A671C5F36183726DB2341BE58FEAE1DA2DECED843240F7B502BA659" + ] +} diff --git a/btcec/schnorr/musig2/data/nonce_agg_vectors.json b/btcec/schnorr/musig2/data/nonce_agg_vectors.json new file mode 100644 index 0000000000..597246dd72 --- /dev/null +++ b/btcec/schnorr/musig2/data/nonce_agg_vectors.json @@ -0,0 +1,54 @@ +{ + "pnonces": [ + "020151C80F435648DF67A22B749CD798CE54E0321D034B92B709B567D60A42E66603BA47FBC1834437B3212E89A84D8425E7BF12E0245D98262268EBDCB385D50641", + "03FF406FFD8ADB9CD29877E4985014F66A59F6CD01C0E88CAA8E5F3166B1F676A60248C264CDD57D3C24D79990B0F865674EB62A0F9018277A95011B41BFC193B833", + "020151C80F435648DF67A22B749CD798CE54E0321D034B92B709B567D60A42E6660279BE667EF9DCBBAC55A06295CE870B07029BFCDB2DCE28D959F2815B16F81798", + "03FF406FFD8ADB9CD29877E4985014F66A59F6CD01C0E88CAA8E5F3166B1F676A60379BE667EF9DCBBAC55A06295CE870B07029BFCDB2DCE28D959F2815B16F81798", + "04FF406FFD8ADB9CD29877E4985014F66A59F6CD01C0E88CAA8E5F3166B1F676A60248C264CDD57D3C24D79990B0F865674EB62A0F9018277A95011B41BFC193B833", + "03FF406FFD8ADB9CD29877E4985014F66A59F6CD01C0E88CAA8E5F3166B1F676A60248C264CDD57D3C24D79990B0F865674EB62A0F9018277A95011B41BFC193B831", + "03FF406FFD8ADB9CD29877E4985014F66A59F6CD01C0E88CAA8E5F3166B1F676A602FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC30" + ], + "valid_test_cases": [ + { + "pnonce_indices": [0, 1], + "expected": "035FE1873B4F2967F52FEA4A06AD5A8ECCBE9D0FD73068012C894E2E87CCB5804B024725377345BDE0E9C33AF3C43C0A29A9249F2F2956FA8CFEB55C8573D0262DC8" + }, + { + "pnonce_indices": [2, 3], + "expected": "035FE1873B4F2967F52FEA4A06AD5A8ECCBE9D0FD73068012C894E2E87CCB5804B000000000000000000000000000000000000000000000000000000000000000000", + "comment": "Sum of second points encoded in the nonces is point at infinity which is serialized as 33 zero bytes" + } + ], + "error_test_cases": [ + { + "pnonce_indices": [0, 4], + "error": { + "type": "invalid_contribution", + "signer": 1, + "contrib": "pubnonce" + }, + "comment": "Public nonce from signer 1 is invalid due wrong tag, 0x04, in the first half", + "btcec_err": "invalid public key: unsupported format: 4" + }, + { + "pnonce_indices": [5, 1], + "error": { + "type": "invalid_contribution", + "signer": 0, + "contrib": "pubnonce" + }, + "comment": "Public nonce from signer 0 is invalid because the second half does not correspond to an X coordinate", + "btcec_err": "invalid public key: x coordinate 48c264cdd57d3c24d79990b0f865674eb62a0f9018277a95011b41bfc193b831 is not on the secp256k1 curve" + }, + { + "pnonce_indices": [6, 1], + "error": { + "type": "invalid_contribution", + "signer": 0, + "contrib": "pubnonce" + }, + "comment": "Public nonce from signer 0 is invalid because second half exceeds field size", + "btcec_err": "invalid public key: x >= field prime" + } + ] +} diff --git a/btcec/schnorr/musig2/data/nonce_gen_vectors.json b/btcec/schnorr/musig2/data/nonce_gen_vectors.json new file mode 100644 index 0000000000..9727cfe93b --- /dev/null +++ b/btcec/schnorr/musig2/data/nonce_gen_vectors.json @@ -0,0 +1,36 @@ +{ + "test_cases": [ + { + "rand_": "0000000000000000000000000000000000000000000000000000000000000000", + "sk": "0202020202020202020202020202020202020202020202020202020202020202", + "aggpk": "0707070707070707070707070707070707070707070707070707070707070707", + "msg": "0101010101010101010101010101010101010101010101010101010101010101", + "extra_in": "0808080808080808080808080808080808080808080808080808080808080808", + "expected": "BC6C683EBBCC39DCB3C29B3D010D2AAA7C86CFB562FC41ED9A460EE061013E75FB4AD2F0B816713269800D018803906D5481E00A940EAB4F4AC49B4A372EB0F4" + }, + { + "rand_": "0000000000000000000000000000000000000000000000000000000000000000", + "sk": "0202020202020202020202020202020202020202020202020202020202020202", + "aggpk": "0707070707070707070707070707070707070707070707070707070707070707", + "msg": "", + "extra_in": "0808080808080808080808080808080808080808080808080808080808080808", + "expected": "AAC4BFD707F4953B4063851D7E4AAD5C59D5D0BFB0E71012788A85698B5ACF8F11834D5051928424BA501C8CD064F3F942F8D4A07D8A2ED79F153E4ABD9EBBE9" + }, + { + "rand_": "0000000000000000000000000000000000000000000000000000000000000000", + "sk": "0202020202020202020202020202020202020202020202020202020202020202", + "aggpk": "0707070707070707070707070707070707070707070707070707070707070707", + "msg": "2626262626262626262626262626262626262626262626262626262626262626262626262626", + "extra_in": "0808080808080808080808080808080808080808080808080808080808080808", + "expected": "DF54500DD2B503DBA3753C48A9D6B67E6C11EC4325EDD1DC256C7F75D6A85DBECA6D9857A6F3F292FB3B50DBCBF69FADB67B1CDDB0EA6EB693F6455C4C9088E1" + }, + { + "rand_": "0000000000000000000000000000000000000000000000000000000000000000", + "sk": null, + "aggpk": null, + "msg": null, + "extra_in": null, + "expected": "7B3B5A002356471AF0E961DE2549C121BD0D48ABCEEDC6E034BDDF86AD3E0A187ECEE674CEF7364B0BC4BEEFB8B66CAD89F98DE2F8C5A5EAD5D1D1E4BD7D04CD" + } + ] +} diff --git a/btcec/schnorr/musig2/data/sig_agg_vectors.json b/btcec/schnorr/musig2/data/sig_agg_vectors.json new file mode 100644 index 0000000000..7ae9444fd5 --- /dev/null +++ b/btcec/schnorr/musig2/data/sig_agg_vectors.json @@ -0,0 +1,86 @@ +{ + "pubkeys": [ + "03935F972DA013F80AE011890FA89B67A27B7BE6CCB24D3274D18B2D4067F261A9", + "02D2DC6F5DF7C56ACF38C7FA0AE7A759AE30E19B37359DFDE015872324C7EF6E05", + "03C7FB101D97FF930ACD0C6760852EF64E69083DE0B06AC6335724754BB4B0522C", + "02352433B21E7E05D3B452B81CAE566E06D2E003ECE16D1074AABA4289E0E3D581" + ], + "pnonces": [ + "0300A32F8548F59C533F55DB9754E3C0BA3C2544F085649FDCE42B8BD3F244C2CA0384449BED61004E8863452A38534E91875516C3CC543122CE2BE1F31845025588", + "03F66B072A869BC2A57D776D487151D707E82B4F1B885066A589858C1BF3871DB603ED391C9658AB6031A96ACBD5E2D9FEC465EFDC8C0D0B765C9B9F3579D520FB6F", + "03A5791CA078E278126EF457C25B5C835F7282C0A47BDBF464BA35C3769427D5CD034D40350F8A5590985E38AAEFC3C695DF671C2E5498E2B60C082C546E06ECAF78", + "020DE6382B8C0550E8174D5263B981224EBCFEF7706588B6936177FEB68E639B8C02BA5F18DDB3487AD087F63CEF7D7818AC8ECA3D6B736113FF36FB25D113F514F6", + "031883080513BB69B31367F9A7B5F4E81246C627060A7414B7F137FA8459F261990345445505F158EDCFDF0D4BF26E04E018C143BF76B5D457AE57DF06CA41371DF0", + "0300028E83123E7FAB1E1F230547CE8B96CC23F13197312972DE72AACBA98EF9870274C2D8566E9E021AA7E2DDDA01B52AE670E0742418F147610528B65ACDB4D0B3" + ], + "tweaks": [ + "B511DA492182A91B0FFB9A98020D55F260AE86D7ECBD0399C7383D59A5F2AF7C", + "A815FE049EE3C5AAB66310477FBC8BCCCAC2F3395F59F921C364ACD78A2F48DC", + "75448A87274B056468B977BE06EB1E9F657577B7320B0A3376EA51FD420D18A8" + ], + "psigs": [ + "7918521F42E5727FE2E82D802876E0C8844336FDA1B58C82696A55B0188C8B3D", + "599044037AE15C4A99FB94F022B48E7AB215BF703954EC0B83D0E06230476001", + "F05BE3CA783AD1FAF68C5059B43F859BFD4EBB0242459DF2C6BF013F4217F7E7", + "BF85B2A751066466C24A5E7FA6C90DBAADAC2DF1F0BB48546AE239E340437CEB", + "142076B034A7401123EFB07E2317DF819B86B3FFA17180DDD093997D018270D0", + "B7A0C7F5B325B7993925E56B60F53EF8198169F31E1AF7E62BBEF1C5DCD1BA22", + "C717ECA32C148CE8EB8882CD9656DF9C64929DCAE9AF798E381B1E888DDF0F8F", + "5988823E78488D8005311E16E5EA67AF70514CB44F5A5CD51FFA262BEEAA21CE", + "FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141" + ], + "msg": "599C67EA410D005B9DA90817CF03ED3B1C868E4DA4EDF00A5880B0082C237869", + "valid_test_cases": [ + { + "aggnonce": "02BC34CDF6FA1298D7B6A126812FAD0739005BC44E45C21276EEFE41AAF841C86F03F3562AED52243BB99F43D1677DB59F0FEFB961633997F7AC924B78FBD0B0334F", + "nonce_indices": [0, 1], + "key_indices": [0, 1], + "tweak_indices": [], + "is_xonly": [], + "psig_indices": [0, 1], + "expected": "CA3C28729659E50F829F55DC5DB1DE88A05D1702B4165B85F95B627FC57733F8D2A89622BDC6CECA7CE3C2704B2B6F433658F66DDB0A788DED3B361248D3EB3E" + }, + { + "aggnonce": "035538518B8043CF4EACD0E701A80657B741C0E6445EC1D6C6177964D22C642971030CFE657EC882F4E08E751B883A78AC1491B30FC86CB57AF2DFF012C2BE6DF1F2", + "nonce_indices": [0, 2], + "key_indices": [0, 2], + "tweak_indices": [], + "is_xonly": [], + "psig_indices": [2, 3], + "expected": "3997A11DFF76349532CF25E761365EA1D4F24B62EB23A12A9DAABD5976C3DB9FAFE19671C9413661B8D6AED95B089357F04C0C0D83B8460B71CEDC95B2253391" + }, + { + "aggnonce": "024366775E6FFBEBBB954225936BAED71A3884C7933B18225088D19E7AF12D8D5D028D79A520B347B793FFE897A7EB79A4366A3FDCDC652C243FAC3976B3D6DF8AB2", + "nonce_indices": [0, 3], + "key_indices": [0, 2], + "tweak_indices": [0], + "is_xonly": [false], + "psig_indices": [4, 5], + "expected": "5AF759C2839B7FEE59D31DAB800F82FC21258457773A3B1F69F5228C80CAD4317EA39AD756601030E4D4051B7C9A25AB4DE7CB39BED26E0A03A1B2ED5B747F7F" + }, + { + "aggnonce": "03B25098C6D0B72DC5717314AF26C126609B4776AA468553DD4354EE20B216B227027D242E9203499173A74E286C1F796F2711E171EE937706BBEA2F4DB10C4E6809", + "nonce_indices": [0, 4], + "key_indices": [0, 3], + "tweak_indices": [0, 1, 2], + "is_xonly": [true, false, true], + "psig_indices": [6, 7], + "expected": "B495A478F91D6E10BF08A156E46D9E62B4C5399C1AEDDA1A9D306F06AFB8A52F2C078FD6B50DDBC33BFFE583C3C1E3D0D5E52891E190101C70D2278BCA943457" + } + ], + "error_test_cases": [ + { + "aggnonce": "03B25098C6D0B72DC5717314AF26C126609B4776AA468553DD4354EE20B216B227027D242E9203499173A74E286C1F796F2711E171EE937706BBEA2F4DB10C4E6809", + "nonce_indices": [0, 4], + "key_indices": [0, 3], + "tweak_indices": [0, 1, 2], + "is_xonly": [true, false, true], + "psig_indices": [7, 8], + "error": { + "type": "invalid_contribution", + "signer": 1 + }, + "comment": "Partial signature is invalid because it exceeds group size" + } + ] +} diff --git a/btcec/schnorr/musig2/data/sign_verify_vectors.json b/btcec/schnorr/musig2/data/sign_verify_vectors.json new file mode 100644 index 0000000000..e2499c7fb6 --- /dev/null +++ b/btcec/schnorr/musig2/data/sign_verify_vectors.json @@ -0,0 +1,183 @@ +{ + "sk": "7FB9E0E687ADA1EEBF7ECFE2F21E73EBDB51A7D450948DFE8D76D7F2D1007671", + "pubkeys": [ + "03935F972DA013F80AE011890FA89B67A27B7BE6CCB24D3274D18B2D4067F261A9", + "02F9308A019258C31049344F85F89D5229B531C845836F99B08601F113BCE036F9", + "02DFF1D77F2A671C5F36183726DB2341BE58FEAE1DA2DECED843240F7B502BA661", + "020000000000000000000000000000000000000000000000000000000000000007" + ], + "secnonces": [ + "508B81A611F100A6B2B6B29656590898AF488BCF2E1F55CF22E5CFB84421FE61FA27FD49B1D50085B481285E1CA205D55C82CC1B31FF5CD54A489829355901F7", + "00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" + ], + "pnonces": [ + "0337C87821AFD50A8644D820A8F3E02E499C931865C2360FB43D0A0D20DAFE07EA0287BF891D2A6DEAEBADC909352AA9405D1428C15F4B75F04DAE642A95C2548480", + "0279BE667EF9DCBBAC55A06295CE870B07029BFCDB2DCE28D959F2815B16F817980279BE667EF9DCBBAC55A06295CE870B07029BFCDB2DCE28D959F2815B16F81798", + "032DE2662628C90B03F5E720284EB52FF7D71F4284F627B68A853D78C78E1FFE9303E4C5524E83FFE1493B9077CF1CA6BEB2090C93D930321071AD40B2F44E599046", + "0237C87821AFD50A8644D820A8F3E02E499C931865C2360FB43D0A0D20DAFE07EA0387BF891D2A6DEAEBADC909352AA9405D1428C15F4B75F04DAE642A95C2548480", + "020000000000000000000000000000000000000000000000000000000000000009" + ], + "aggnonces": [ + "028465FCF0BBDBCF443AABCCE533D42B4B5A10966AC09A49655E8C42DAAB8FCD61037496A3CC86926D452CAFCFD55D25972CA1675D549310DE296BFF42F72EEEA8C9", + "000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "048465FCF0BBDBCF443AABCCE533D42B4B5A10966AC09A49655E8C42DAAB8FCD61037496A3CC86926D452CAFCFD55D25972CA1675D549310DE296BFF42F72EEEA8C9", + "028465FCF0BBDBCF443AABCCE533D42B4B5A10966AC09A49655E8C42DAAB8FCD61020000000000000000000000000000000000000000000000000000000000000009", + "028465FCF0BBDBCF443AABCCE533D42B4B5A10966AC09A49655E8C42DAAB8FCD6102FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC30" + ], + "msgs": [ + "F95466D086770E689964664219266FE5ED215C92AE20BAB5C9D79ADDDDF3C0CF", + "", + "2626262626262626262626262626262626262626262626262626262626262626262626262626" + ], + "valid_test_cases": [ + { + "key_indices": [0, 1, 2], + "nonce_indices": [0, 1, 2], + "aggnonce_index": 0, + "msg_index": 0, + "signer_index": 0, + "expected": "012ABBCB52B3016AC03AD82395A1A415C48B93DEF78718E62A7A90052FE224FB" + }, + { + "key_indices": [1, 0, 2], + "nonce_indices": [1, 0, 2], + "aggnonce_index": 0, + "msg_index": 0, + "signer_index": 1, + "expected": "9FF2F7AAA856150CC8819254218D3ADEEB0535269051897724F9DB3789513A52" + }, + { + "key_indices": [1, 2, 0], + "nonce_indices": [1, 2, 0], + "aggnonce_index": 0, + "msg_index": 0, + "signer_index": 2, + "expected": "FA23C359F6FAC4E7796BB93BC9F0532A95468C539BA20FF86D7C76ED92227900" + }, + { + "key_indices": [0, 1], + "nonce_indices": [0, 3], + "aggnonce_index": 1, + "msg_index": 0, + "signer_index": 0, + "expected": "AE386064B26105404798F75DE2EB9AF5EDA5387B064B83D049CB7C5E08879531", + "comment": "Both halves of aggregate nonce correspond to point at infinity" + } + ], + "sign_error_test_cases": [ + { + "key_indices": [1, 0, 3], + "aggnonce_index": 0, + "msg_index": 0, + "secnonce_index": 0, + "error": { + "type": "invalid_contribution", + "signer": 2, + "contrib": "pubkey" + }, + "comment": "Signer 2 provided an invalid public key" + }, + { + "key_indices": [1, 2, 0], + "aggnonce_index": 2, + "msg_index": 0, + "secnonce_index": 0, + "error": { + "type": "invalid_contribution", + "signer": null, + "contrib": "aggnonce" + }, + "comment": "Aggregate nonce is invalid due wrong tag, 0x04, in the first half" + }, + { + "key_indices": [1, 2, 0], + "aggnonce_index": 3, + "msg_index": 0, + "secnonce_index": 0, + "error": { + "type": "invalid_contribution", + "signer": null, + "contrib": "aggnonce" + }, + "comment": "Aggregate nonce is invalid because the second half does not correspond to an X coordinate" + }, + { + "key_indices": [1, 2, 0], + "aggnonce_index": 4, + "msg_index": 0, + "secnonce_index": 0, + "error": { + "type": "invalid_contribution", + "signer": null, + "contrib": "aggnonce" + }, + "comment": "Aggregate nonce is invalid because second half exceeds field size" + }, + { + "key_indices": [0, 1, 2], + "aggnonce_index": 0, + "msg_index": 0, + "signer_index": 0, + "secnonce_index": 1, + "error": { + "type": "value", + "message": "first secnonce value is out of range." + }, + "comment": "Secnonce is invalid which may indicate nonce reuse" + } + ], + "verify_fail_test_cases": [ + { + "sig": "97AC833ADCB1AFA42EBF9E0725616F3C9A0D5B614F6FE283CEAAA37A8FFAF406", + "key_indices": [0, 1, 2], + "nonce_indices": [0, 1, 2], + "msg_index": 0, + "signer_index": 0, + "comment": "Wrong signature (which is equal to the negation of valid signature)" + }, + { + "sig": "68537CC5234E505BD14061F8DA9E90C220A181855FD8BDB7F127BB12403B4D3B", + "key_indices": [0, 1, 2], + "nonce_indices": [0, 1, 2], + "msg_index": 0, + "signer_index": 1, + "comment": "Wrong signer" + }, + { + "sig": "FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141", + "key_indices": [0, 1, 2], + "nonce_indices": [0, 1, 2], + "msg_index": 0, + "signer_index": 0, + "comment": "Signature exceeds group size" + } + ], + "verify_error_test_cases": [ + { + "sig": "68537CC5234E505BD14061F8DA9E90C220A181855FD8BDB7F127BB12403B4D3B", + "key_indices": [0, 1, 2], + "nonce_indices": [4, 1, 2], + "msg_index": 0, + "signer_index": 0, + "error": { + "type": "invalid_contribution", + "signer": 0, + "contrib": "pubnonce" + }, + "comment": "Invalid pubnonce" + }, + { + "sig": "68537CC5234E505BD14061F8DA9E90C220A181855FD8BDB7F127BB12403B4D3B", + "key_indices": [3, 1, 2], + "nonce_indices": [0, 1, 2], + "msg_index": 0, + "signer_index": 0, + "error": { + "type": "invalid_contribution", + "signer": 0, + "contrib": "pubkey" + }, + "comment": "Invalid pubkey" + } + ] +} diff --git a/btcec/schnorr/musig2/data/tweak_vectors.json b/btcec/schnorr/musig2/data/tweak_vectors.json new file mode 100644 index 0000000000..01ccb8b1b3 --- /dev/null +++ b/btcec/schnorr/musig2/data/tweak_vectors.json @@ -0,0 +1,84 @@ +{ + "sk": "7FB9E0E687ADA1EEBF7ECFE2F21E73EBDB51A7D450948DFE8D76D7F2D1007671", + "pubkeys": [ + "03935F972DA013F80AE011890FA89B67A27B7BE6CCB24D3274D18B2D4067F261A9", + "02F9308A019258C31049344F85F89D5229B531C845836F99B08601F113BCE036F9", + "02DFF1D77F2A671C5F36183726DB2341BE58FEAE1DA2DECED843240F7B502BA659" + ], + "secnonce": "508B81A611F100A6B2B6B29656590898AF488BCF2E1F55CF22E5CFB84421FE61FA27FD49B1D50085B481285E1CA205D55C82CC1B31FF5CD54A489829355901F7", + "pnonces": [ + "0337C87821AFD50A8644D820A8F3E02E499C931865C2360FB43D0A0D20DAFE07EA0287BF891D2A6DEAEBADC909352AA9405D1428C15F4B75F04DAE642A95C2548480", + "0279BE667EF9DCBBAC55A06295CE870B07029BFCDB2DCE28D959F2815B16F817980279BE667EF9DCBBAC55A06295CE870B07029BFCDB2DCE28D959F2815B16F81798", + "032DE2662628C90B03F5E720284EB52FF7D71F4284F627B68A853D78C78E1FFE9303E4C5524E83FFE1493B9077CF1CA6BEB2090C93D930321071AD40B2F44E599046" + ], + "aggnonce": "028465FCF0BBDBCF443AABCCE533D42B4B5A10966AC09A49655E8C42DAAB8FCD61037496A3CC86926D452CAFCFD55D25972CA1675D549310DE296BFF42F72EEEA8C9", + "tweaks": [ + "E8F791FF9225A2AF0102AFFF4A9A723D9612A682A25EBE79802B263CDFCD83BB", + "AE2EA797CC0FE72AC5B97B97F3C6957D7E4199A167A58EB08BCAFFDA70AC0455", + "F52ECBC565B3D8BEA2DFD5B75A4F457E54369809322E4120831626F290FA87E0", + "1969AD73CC177FA0B4FCED6DF1F7BF9907E665FDE9BA196A74FED0A3CF5AEF9D", + "FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141" + ], + "msg": "F95466D086770E689964664219266FE5ED215C92AE20BAB5C9D79ADDDDF3C0CF", + "valid_test_cases": [ + { + "key_indices": [1, 2, 0], + "nonce_indices": [1, 2, 0], + "tweak_indices": [0], + "is_xonly": [true], + "signer_index": 2, + "expected": "E28A5C66E61E178C2BA19DB77B6CF9F7E2F0F56C17918CD13135E60CC848FE91", + "comment": "A single x-only tweak" + }, + { + "key_indices": [1, 2, 0], + "nonce_indices": [1, 2, 0], + "tweak_indices": [0], + "is_xonly": [false], + "signer_index": 2, + "expected": "38B0767798252F21BF5702C48028B095428320F73A4B14DB1E25DE58543D2D2D", + "comment": "A single plain tweak" + }, + { + "key_indices": [1, 2, 0], + "nonce_indices": [1, 2, 0], + "tweak_indices": [0, 1], + "is_xonly": [false, true], + "signer_index": 2, + "expected": "408A0A21C4A0F5DACAF9646AD6EB6FECD7F7A11F03ED1F48DFFF2185BC2C2408", + "comment": "A plain tweak followed by an x-only tweak" + }, + { + "key_indices": [1, 2, 0], + "nonce_indices": [1, 2, 0], + "tweak_indices": [0, 1, 2, 3], + "is_xonly": [false, false, true, true], + "signer_index": 2, + "expected": "45ABD206E61E3DF2EC9E264A6FEC8292141A633C28586388235541F9ADE75435", + "comment": "Four tweaks: plain, plain, x-only, x-only." + }, + { + "key_indices": [1, 2, 0], + "nonce_indices": [1, 2, 0], + "tweak_indices": [0, 1, 2, 3], + "is_xonly": [true, false, true, false], + "signer_index": 2, + "expected": "B255FDCAC27B40C7CE7848E2D3B7BF5EA0ED756DA81565AC804CCCA3E1D5D239", + "comment": "Four tweaks: x-only, plain, x-only, plain. If an implementation prohibits applying plain tweaks after x-only tweaks, it can skip this test vector or return an error." + } + ], + "error_test_cases": [ + { + "key_indices": [1, 2, 0], + "nonce_indices": [1, 2, 0], + "tweak_indices": [4], + "is_xonly": [false], + "signer_index": 2, + "error": { + "type": "value", + "message": "The tweak must be less than n." + }, + "comment": "Tweak is invalid because it exceeds group size" + } + ] +} diff --git a/btcec/schnorr/musig2/keys.go b/btcec/schnorr/musig2/keys.go index 8c86c624fb..4ee63be2eb 100644 --- a/btcec/schnorr/musig2/keys.go +++ b/btcec/schnorr/musig2/keys.go @@ -29,7 +29,7 @@ var ( // ErrTweakedKeyOverflows is returned if a tweaking key is larger than // 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141. - ErrTweakedKeyOverflows = fmt.Errorf("tweaked key is to large") + ErrTweakedKeyOverflows = fmt.Errorf("tweaked key is too large") ) // sortableKeys defines a type of slice of public keys that implements the sort @@ -40,8 +40,8 @@ type sortableKeys []*btcec.PublicKey // with index j. func (s sortableKeys) Less(i, j int) bool { // TODO(roasbeef): more efficient way to compare... - keyIBytes := schnorr.SerializePubKey(s[i]) - keyJBytes := schnorr.SerializePubKey(s[j]) + keyIBytes := s[i].SerializeCompressed() + keyJBytes := s[j].SerializeCompressed() return bytes.Compare(keyIBytes, keyJBytes) == -1 } @@ -56,9 +56,9 @@ func (s sortableKeys) Len() int { return len(s) } -// sortKeys takes a set of schnorr public keys and returns a new slice that is -// a copy of the keys sorted in lexicographical order bytes on the x-only -// pubkey serialization. +// sortKeys takes a set of public keys and returns a new slice that is a copy +// of the keys sorted in lexicographical order bytes on the x-only pubkey +// serialization. func sortKeys(keys []*btcec.PublicKey) []*btcec.PublicKey { keySet := sortableKeys(keys) if sort.IsSorted(keySet) { @@ -72,7 +72,7 @@ func sortKeys(keys []*btcec.PublicKey) []*btcec.PublicKey { // keyHashFingerprint computes the tagged hash of the series of (sorted) public // keys passed as input. This is used to compute the aggregation coefficient // for each key. The final computation is: -// * H(tag=KeyAgg list, pk1 || pk2..) +// - H(tag=KeyAgg list, pk1 || pk2..) func keyHashFingerprint(keys []*btcec.PublicKey, sort bool) []byte { if sort { keys = sortKeys(keys) @@ -80,28 +80,25 @@ func keyHashFingerprint(keys []*btcec.PublicKey, sort bool) []byte { // We'll create a single buffer and slice into that so the bytes buffer // doesn't continually need to grow the underlying buffer. - keyAggBuf := make([]byte, 32*len(keys)) + keyAggBuf := make([]byte, 33*len(keys)) keyBytes := bytes.NewBuffer(keyAggBuf[0:0]) for _, key := range keys { - keyBytes.Write(schnorr.SerializePubKey(key)) + keyBytes.Write(key.SerializeCompressed()) } h := chainhash.TaggedHash(KeyAggTagList, keyBytes.Bytes()) return h[:] } -// keyBytesEqual returns true if two keys are the same from the PoV of BIP -// 340's 32-byte x-only public keys. +// keyBytesEqual returns true if two keys are the same based on the compressed +// serialization of each key. func keyBytesEqual(a, b *btcec.PublicKey) bool { - return bytes.Equal( - schnorr.SerializePubKey(a), - schnorr.SerializePubKey(b), - ) + return bytes.Equal(a.SerializeCompressed(), b.SerializeCompressed()) } // aggregationCoefficient computes the key aggregation coefficient for the // specified target key. The coefficient is computed as: -// * H(tag=KeyAgg coefficient, keyHashFingerprint(pks) || pk) +// - H(tag=KeyAgg coefficient, keyHashFingerprint(pks) || pk) func aggregationCoefficient(keySet []*btcec.PublicKey, targetKey *btcec.PublicKey, keysHash []byte, secondKeyIdx int) *btcec.ModNScalar { @@ -116,9 +113,9 @@ func aggregationCoefficient(keySet []*btcec.PublicKey, // Otherwise, we'll compute the full finger print hash for this given // key and then use that to compute the coefficient tagged hash: // * H(tag=KeyAgg coefficient, keyHashFingerprint(pks, pk) || pk) - var coefficientBytes [64]byte + var coefficientBytes [65]byte copy(coefficientBytes[:], keysHash[:]) - copy(coefficientBytes[32:], schnorr.SerializePubKey(targetKey)) + copy(coefficientBytes[32:], targetKey.SerializeCompressed()) muHash := chainhash.TaggedHash(KeyAggTagCoeff, coefficientBytes[:]) diff --git a/btcec/schnorr/musig2/keys_test.go b/btcec/schnorr/musig2/keys_test.go new file mode 100644 index 0000000000..9eb16b537f --- /dev/null +++ b/btcec/schnorr/musig2/keys_test.go @@ -0,0 +1,394 @@ +// Copyright 2013-2022 The btcsuite developers + +package musig2 + +import ( + "encoding/hex" + "encoding/json" + "fmt" + "os" + "path" + "strings" + "testing" + + "github.com/btcsuite/btcd/btcec/v2" + "github.com/btcsuite/btcd/btcec/v2/schnorr" + secp "github.com/decred/dcrd/dcrec/secp256k1/v4" + "github.com/stretchr/testify/require" +) + +const ( + keySortTestVectorFileName = "key_sort_vectors.json" + + keyAggTestVectorFileName = "key_agg_vectors.json" + + keyTweakTestVectorFileName = "tweak_vectors.json" +) + +type keySortTestVector struct { + PubKeys []string `json:"pubkeys"` + + SortedKeys []string `json:"sorted_pubkeys"` +} + +// TestMusig2KeySort tests that keys are properly sorted according to the +// musig2 test vectors. +func TestMusig2KeySort(t *testing.T) { + t.Parallel() + + testVectorPath := path.Join( + testVectorBaseDir, keySortTestVectorFileName, + ) + testVectorBytes, err := os.ReadFile(testVectorPath) + require.NoError(t, err) + + var testCase keySortTestVector + require.NoError(t, json.Unmarshal(testVectorBytes, &testCase)) + + keys := make([]*btcec.PublicKey, len(testCase.PubKeys)) + for i, keyStr := range testCase.PubKeys { + pubKey, err := btcec.ParsePubKey(mustParseHex(keyStr)) + require.NoError(t, err) + + keys[i] = pubKey + } + + sortedKeys := sortKeys(keys) + + expectedKeys := make([]*btcec.PublicKey, len(testCase.PubKeys)) + for i, keyStr := range testCase.SortedKeys { + pubKey, err := btcec.ParsePubKey(mustParseHex(keyStr)) + require.NoError(t, err) + + expectedKeys[i] = pubKey + } + + require.Equal(t, sortedKeys, expectedKeys) +} + +type keyAggValidTest struct { + Indices []int `json:"key_indices"` + Expected string `json:"expected"` +} + +type keyAggError struct { + Type string `json:"type"` + Signer int `json:"signer"` + Contring string `json:"contrib"` +} + +type keyAggInvalidTest struct { + Indices []int `json:"key_indices"` + + TweakIndices []int `json:"tweak_indices"` + + IsXOnly []bool `json:"is_xonly"` + + Comment string `json:"comment"` +} + +type keyAggTestVectors struct { + PubKeys []string `json:"pubkeys"` + + Tweaks []string `json:"tweaks"` + + ValidCases []keyAggValidTest `json:"valid_test_cases"` + + InvalidCases []keyAggInvalidTest `json:"error_test_cases"` +} + +func keysFromIndices(t *testing.T, indices []int, + pubKeys []string) ([]*btcec.PublicKey, error) { + + t.Helper() + + inputKeys := make([]*btcec.PublicKey, len(indices)) + for i, keyIdx := range indices { + var err error + inputKeys[i], err = btcec.ParsePubKey( + mustParseHex(pubKeys[keyIdx]), + ) + if err != nil { + return nil, err + } + } + + return inputKeys, nil +} + +func tweaksFromIndices(t *testing.T, indices []int, + tweaks []string, isXonly []bool) []KeyTweakDesc { + + t.Helper() + + testTweaks := make([]KeyTweakDesc, len(indices)) + for i, idx := range indices { + var rawTweak [32]byte + copy(rawTweak[:], mustParseHex(tweaks[idx])) + + testTweaks[i] = KeyTweakDesc{ + Tweak: rawTweak, + IsXOnly: isXonly[i], + } + } + + return testTweaks +} + +// TestMuSig2KeyAggTestVectors tests that this implementation of musig2 key +// aggregation lines up with the secp256k1-zkp test vectors. +func TestMuSig2KeyAggTestVectors(t *testing.T) { + t.Parallel() + + testVectorPath := path.Join( + testVectorBaseDir, keyAggTestVectorFileName, + ) + testVectorBytes, err := os.ReadFile(testVectorPath) + require.NoError(t, err) + + var testCases keyAggTestVectors + require.NoError(t, json.Unmarshal(testVectorBytes, &testCases)) + + tweaks := make([][]byte, len(testCases.Tweaks)) + for i := range testCases.Tweaks { + tweaks[i] = mustParseHex(testCases.Tweaks[i]) + } + + for i, testCase := range testCases.ValidCases { + testCase := testCase + + // Assemble the set of keys we'll pass in based on their key + // index. We don't use sorting to ensure we send the keys in + // the exact same order as the test vectors do. + inputKeys, err := keysFromIndices( + t, testCase.Indices, testCases.PubKeys, + ) + require.NoError(t, err) + + t.Run(fmt.Sprintf("test_case=%v", i), func(t *testing.T) { + uniqueKeyIndex := secondUniqueKeyIndex(inputKeys, false) + opts := []KeyAggOption{WithUniqueKeyIndex(uniqueKeyIndex)} + + combinedKey, _, _, err := AggregateKeys( + inputKeys, false, opts..., + ) + require.NoError(t, err) + + require.Equal( + t, schnorr.SerializePubKey(combinedKey.FinalKey), + mustParseHex(testCase.Expected), + ) + }) + } + + for _, testCase := range testCases.InvalidCases { + testCase := testCase + + testName := fmt.Sprintf("invalid_%v", + strings.ToLower(testCase.Comment)) + t.Run(testName, func(t *testing.T) { + // For each test, we'll extract the set of input keys + // as well as the tweaks since this set of cases also + // exercises error cases related to the set of tweaks. + inputKeys, err := keysFromIndices( + t, testCase.Indices, testCases.PubKeys, + ) + + // In this set of test cases, we should only get this + // for the very first vector. + if err != nil { + switch testCase.Comment { + case "Invalid public key": + require.ErrorIs( + t, err, + secp.ErrPubKeyNotOnCurve, + ) + + case "Public key exceeds field size": + require.ErrorIs( + t, err, secp.ErrPubKeyXTooBig, + ) + + case "First byte of public key is not 2 or 3": + require.ErrorIs( + t, err, + secp.ErrPubKeyInvalidFormat, + ) + + default: + t.Fatalf("uncaught err: %v", err) + } + + return + } + + var tweaks []KeyTweakDesc + if len(testCase.TweakIndices) != 0 { + tweaks = tweaksFromIndices( + t, testCase.TweakIndices, testCases.Tweaks, + testCase.IsXOnly, + ) + } + + uniqueKeyIndex := secondUniqueKeyIndex(inputKeys, false) + opts := []KeyAggOption{ + WithUniqueKeyIndex(uniqueKeyIndex), + } + + if len(tweaks) != 0 { + opts = append(opts, WithKeyTweaks(tweaks...)) + } + + _, _, _, err = AggregateKeys( + inputKeys, false, opts..., + ) + require.Error(t, err) + + switch testCase.Comment { + case "Tweak is out of range": + require.ErrorIs(t, err, ErrTweakedKeyOverflows) + + case "Intermediate tweaking result is point at infinity": + + require.ErrorIs(t, err, ErrTweakedKeyIsInfinity) + + default: + t.Fatalf("uncaught err: %v", err) + } + }) + } +} + +type keyTweakInvalidTest struct { + Indices []int `json:"key_indices"` + + NonceIndices []int `json:"nonce_indices"` + + TweakIndices []int `json:"tweak_indices"` + + IsXOnly []bool `json:"is_only"` + + SignerIndex int `json:"signer_index"` + + Comment string `json:"comment"` +} + +type keyTweakValidTest struct { + Indices []int `json:"key_indices"` + + NonceIndices []int `json:"nonce_indices"` + + TweakIndices []int `json:"tweak_indices"` + + IsXOnly []bool `json:"is_xonly"` + + SignerIndex int `json:"signer_index"` + + Expected string `json:"expected"` + + Comment string `json:"comment"` +} + +type keyTweakVector struct { + PrivKey string `json:"sk"` + + PubKeys []string `json:"pubkeys"` + + PrivNonce string `json:"secnonce"` + + PubNonces []string `json:"pnonces"` + + AggNnoce string `json:"aggnonce"` + + Tweaks []string `json:"tweaks"` + + Msg string `json:"msg"` + + ValidCases []keyTweakValidTest `json:"valid_test_cases"` + + InvalidCases []keyTweakInvalidTest `json:"error_test_cases"` +} + +func pubNoncesFromIndices(t *testing.T, nonceIndices []int, pubNonces []string) [][PubNonceSize]byte { + + nonces := make([][PubNonceSize]byte, len(nonceIndices)) + + for i, idx := range nonceIndices { + var pubNonce [PubNonceSize]byte + copy(pubNonce[:], mustParseHex(pubNonces[idx])) + + nonces[i] = pubNonce + } + + return nonces +} + +// TestMuSig2TweakTestVectors tests that we properly handle the various edge +// cases related to tweaking public keys. +func TestMuSig2TweakTestVectors(t *testing.T) { + t.Parallel() + + testVectorPath := path.Join( + testVectorBaseDir, keyTweakTestVectorFileName, + ) + testVectorBytes, err := os.ReadFile(testVectorPath) + require.NoError(t, err) + + var testCases keyTweakVector + require.NoError(t, json.Unmarshal(testVectorBytes, &testCases)) + + privKey, _ := btcec.PrivKeyFromBytes(mustParseHex(testCases.PrivKey)) + + var msg [32]byte + copy(msg[:], mustParseHex(testCases.Msg)) + + var secNonce [SecNonceSize]byte + copy(secNonce[:], mustParseHex(testCases.PrivNonce)) + + for _, testCase := range testCases.ValidCases { + testCase := testCase + + testName := fmt.Sprintf("valid_%v", + strings.ToLower(testCase.Comment)) + t.Run(testName, func(t *testing.T) { + pubKeys, err := keysFromIndices( + t, testCase.Indices, testCases.PubKeys, + ) + require.NoError(t, err) + + var tweaks []KeyTweakDesc + if len(testCase.TweakIndices) != 0 { + tweaks = tweaksFromIndices( + t, testCase.TweakIndices, + testCases.Tweaks, testCase.IsXOnly, + ) + } + + pubNonces := pubNoncesFromIndices( + t, testCase.NonceIndices, testCases.PubNonces, + ) + + combinedNonce, err := AggregateNonces(pubNonces) + require.NoError(t, err) + + var opts []SignOption + if len(tweaks) != 0 { + opts = append(opts, WithTweaks(tweaks...)) + } + + partialSig, err := Sign( + secNonce, privKey, combinedNonce, pubKeys, + msg, opts..., + ) + + var partialSigBytes [32]byte + partialSig.S.PutBytesUnchecked(partialSigBytes[:]) + + require.Equal( + t, hex.EncodeToString(partialSigBytes[:]), + hex.EncodeToString(mustParseHex(testCase.Expected)), + ) + + }) + } +} diff --git a/btcec/schnorr/musig2/musig2_test.go b/btcec/schnorr/musig2/musig2_test.go index e58a6d4328..6842e911d9 100644 --- a/btcec/schnorr/musig2/musig2_test.go +++ b/btcec/schnorr/musig2/musig2_test.go @@ -3,268 +3,20 @@ package musig2 import ( - "bytes" "crypto/sha256" "encoding/hex" - "encoding/json" "errors" - "flag" "fmt" - "io/ioutil" "sync" "testing" "github.com/btcsuite/btcd/btcec/v2" - "github.com/btcsuite/btcd/btcec/v2/schnorr" - "github.com/btcsuite/btcd/chaincfg/chainhash" - "github.com/decred/dcrd/dcrec/secp256k1/v4" ) -var ( - key1Bytes, _ = hex.DecodeString("F9308A019258C31049344F85F89D5229B53" + - "1C845836F99B08601F113BCE036F9") - key2Bytes, _ = hex.DecodeString("DFF1D77F2A671C5F36183726DB2341BE58F" + - "EAE1DA2DECED843240F7B502BA659") - key3Bytes, _ = hex.DecodeString("3590A94E768F8E1815C2F24B4D80A8E3149" + - "316C3518CE7B7AD338368D038CA66") - - invalidPk1, _ = hex.DecodeString("00000000000000000000000000000000" + - "00000000000000000000000000000005") - invalidPk2, _ = hex.DecodeString("FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF" + - "FFFFFFFFFFFFFFFFFFFFFFFEFFFFFC30") - invalidTweak, _ = hex.DecodeString("FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFE" + - "BAAEDCE6AF48A03BBFD25E8CD0364141") - - testKeys = [][]byte{key1Bytes, key2Bytes, key3Bytes, invalidPk1, - invalidPk2} - - keyCombo1, _ = hex.DecodeString("E5830140512195D74C8307E39637CBE5FB73" + - "0EBEAB80EC514CF88A877CEEEE0B") - keyCombo2, _ = hex.DecodeString("D70CD69A2647F7390973DF48CBFA2CCC407B" + - "8B2D60B08C5F1641185C7998A290") - keyCombo3, _ = hex.DecodeString("81A8B093912C9E481408D09776CEFB48AEB8" + - "B65481B6BAAFB3C5810106717BEB") - keyCombo4, _ = hex.DecodeString("2EB18851887E7BDC5E830E89B19DDBC28078" + - "F1FA88AAD0AD01CA06FE4F80210B") -) - -// getInfinityTweak returns a tweak that, when tweaking the Generator, triggers -// the ErrTweakedKeyIsInfinity error. -func getInfinityTweak() KeyTweakDesc { - generator := btcec.Generator() - - keySet := []*btcec.PublicKey{generator} - - keysHash := keyHashFingerprint(keySet, true) - uniqueKeyIndex := secondUniqueKeyIndex(keySet, true) - - n := &btcec.ModNScalar{} - - n.SetByteSlice(invalidTweak) - - coeff := aggregationCoefficient( - keySet, generator, keysHash, uniqueKeyIndex, - ).Negate().Add(n) - - return KeyTweakDesc{ - Tweak: coeff.Bytes(), - IsXOnly: false, - } -} - const ( - keyAggTestVectorName = "key_agg_vectors.json" - - nonceAggTestVectorName = "nonce_agg_vectors.json" - - signTestVectorName = "sign_vectors.json" + testVectorBaseDir = "data" ) -var dumpJson = flag.Bool("dumpjson", false, "if true, a JSON version of the "+ - "test vectors will be written to the cwd") - -type jsonKeyAggTestCase struct { - Keys []string `json:"keys"` - Tweaks []jsonTweak `json:"tweaks"` - ExpectedKey string `json:"expected_key"` - ExpectedError string `json:"expected_error"` -} - -// TestMuSig2KeyAggTestVectors tests that this implementation of musig2 key -// aggregation lines up with the secp256k1-zkp test vectors. -func TestMuSig2KeyAggTestVectors(t *testing.T) { - t.Parallel() - - var jsonCases []jsonKeyAggTestCase - - testCases := []struct { - keyOrder []int - explicitKeys []*btcec.PublicKey - tweaks []KeyTweakDesc - expectedKey []byte - expectedError error - }{ - // Keys in backwards lexicographical order. - { - keyOrder: []int{0, 1, 2}, - expectedKey: keyCombo1, - }, - - // Keys in sorted order. - { - keyOrder: []int{2, 1, 0}, - expectedKey: keyCombo2, - }, - - // Only the first key. - { - keyOrder: []int{0, 0, 0}, - expectedKey: keyCombo3, - }, - - // Duplicate the first key and second keys. - { - keyOrder: []int{0, 0, 1, 1}, - expectedKey: keyCombo4, - }, - - // Invalid public key. - { - keyOrder: []int{0, 3}, - expectedError: secp256k1.ErrPubKeyNotOnCurve, - }, - - // Public key exceeds field size. - { - keyOrder: []int{0, 4}, - expectedError: secp256k1.ErrPubKeyXTooBig, - }, - - // Tweak is out of range. - { - keyOrder: []int{0, 1}, - tweaks: []KeyTweakDesc{ - KeyTweakDesc{ - Tweak: to32ByteSlice(invalidTweak), - IsXOnly: true, - }, - }, - expectedError: ErrTweakedKeyOverflows, - }, - - // Intermediate tweaking result is point at infinity. - { - explicitKeys: []*secp256k1.PublicKey{btcec.Generator()}, - tweaks: []KeyTweakDesc{ - getInfinityTweak(), - }, - expectedError: ErrTweakedKeyIsInfinity, - }, - } - for i, testCase := range testCases { - testName := fmt.Sprintf("%v", testCase.keyOrder) - t.Run(testName, func(t *testing.T) { - var ( - keys []*btcec.PublicKey - strKeys []string - strTweaks []jsonTweak - jsonError string - ) - for _, keyIndex := range testCase.keyOrder { - keyBytes := testKeys[keyIndex] - pub, err := schnorr.ParsePubKey(keyBytes) - - switch { - case testCase.expectedError != nil && - errors.Is(err, testCase.expectedError): - return - case err != nil: - t.Fatalf("unable to parse pubkeys: %v", err) - } - - keys = append(keys, pub) - strKeys = append(strKeys, hex.EncodeToString(keyBytes)) - } - - for _, explicitKey := range testCase.explicitKeys { - keys = append(keys, explicitKey) - strKeys = append( - strKeys, - hex.EncodeToString( - explicitKey.SerializeCompressed(), - )) - } - - for _, tweak := range testCase.tweaks { - strTweaks = append( - strTweaks, - jsonTweak{ - Tweak: hex.EncodeToString( - tweak.Tweak[:], - ), - XOnly: tweak.IsXOnly, - }) - } - - if testCase.expectedError != nil { - jsonError = testCase.expectedError.Error() - } - - jsonCases = append( - jsonCases, - jsonKeyAggTestCase{ - Keys: strKeys, - Tweaks: strTweaks, - ExpectedKey: hex.EncodeToString( - testCase.expectedKey), - ExpectedError: jsonError, - }) - - uniqueKeyIndex := secondUniqueKeyIndex(keys, false) - opts := []KeyAggOption{WithUniqueKeyIndex(uniqueKeyIndex)} - if len(testCase.tweaks) > 0 { - opts = append(opts, WithKeyTweaks(testCase.tweaks...)) - } - - combinedKey, _, _, err := AggregateKeys( - keys, false, opts..., - ) - - switch { - case testCase.expectedError != nil && - errors.Is(err, testCase.expectedError): - return - - case err != nil: - t.Fatalf("case #%v, got error %v", i, err) - } - - combinedKeyBytes := schnorr.SerializePubKey(combinedKey.FinalKey) - if !bytes.Equal(combinedKeyBytes, testCase.expectedKey) { - t.Fatalf("case: #%v, invalid aggregation: "+ - "expected %x, got %x", i, testCase.expectedKey, - combinedKeyBytes) - } - }) - } - - if *dumpJson { - jsonBytes, err := json.Marshal(jsonCases) - if err != nil { - t.Fatalf("unable to encode json: %v", err) - } - - var formattedJson bytes.Buffer - json.Indent(&formattedJson, jsonBytes, "", "\t") - err = ioutil.WriteFile( - keyAggTestVectorName, formattedJson.Bytes(), 0644, - ) - if err != nil { - t.Fatalf("unable to write file: %v", err) - } - } -} - func mustParseHex(str string) []byte { b, err := hex.DecodeString(str) if err != nil { @@ -274,664 +26,6 @@ func mustParseHex(str string) []byte { return b } -func parseKey(xHex string) *btcec.PublicKey { - xB, err := hex.DecodeString(xHex) - if err != nil { - panic(err) - } - - var x, y btcec.FieldVal - x.SetByteSlice(xB) - if !btcec.DecompressY(&x, false, &y) { - panic("x not on curve") - } - y.Normalize() - - return btcec.NewPublicKey(&x, &y) -} - -var ( - signSetPrivKey, _ = btcec.PrivKeyFromBytes( - mustParseHex("7FB9E0E687ADA1EEBF7ECFE2F21E73EBDB51A7D450948DF" + - "E8D76D7F2D1007671"), - ) - signSetPubKey = schnorr.SerializePubKey(signSetPrivKey.PubKey()) - - signTestMsg = mustParseHex("F95466D086770E689964664219266FE5ED215C92A" + - "E20BAB5C9D79ADDDDF3C0CF") - - signSetKey2 = mustParseHex("F9308A019258C31049344F85F89D5229B531C8458" + - "36F99B08601F113BCE036F9") - - signSetKey3 = mustParseHex("DFF1D77F2A671C5F36183726DB2341BE58FEAE1DA" + - "2DECED843240F7B502BA659") - - invalidSetKey1 = mustParseHex("00000000000000000000000000000000" + - "00000000000000000000000000000007") - - signExpected1 = mustParseHex("68537CC5234E505BD14061F8DA9E90C220A1818" + - "55FD8BDB7F127BB12403B4D3B") - signExpected2 = mustParseHex("2DF67BFFF18E3DE797E13C6475C963048138DAE" + - "C5CB20A357CECA7C8424295EA") - signExpected3 = mustParseHex("0D5B651E6DE34A29A12DE7A8B4183B4AE6A7F7F" + - "BE15CDCAFA4A3D1BCAABC7517") - - signExpected4 = mustParseHex("8D5E0407FB4756EEBCD86264C32D792EE36EEB6" + - "9E952BBB30B8E41BEBC4D22FA") - - signSetKeys = [][]byte{signSetPubKey, signSetKey2, signSetKey3, invalidPk1} - - aggregatedNonce = toPubNonceSlice(mustParseHex("028465FCF0BBDBCF443AA" + - "BCCE533D42B4B5A10966AC09A49655E8C42DAAB8FCD61037496A3CC86926" + - "D452CAFCFD55D25972CA1675D549310DE296BFF42F72EEEA8C9")) - verifyPnonce1 = mustParsePubNonce("0337C87821AFD50A8644D820A8F3E02E49" + - "9C931865C2360FB43D0A0D20DAFE07EA0287BF891D2A6DEAEBADC909352A" + - "A9405D1428C15F4B75F04DAE642A95C2548480") - verifyPnonce2 = mustParsePubNonce("0279BE667EF9DCBBAC55A06295CE870B07" + - "029BFCDB2DCE28D959F2815B16F817980279BE667EF9DCBBAC55A06295CE" + - "870B07029BFCDB2DCE28D959F2815B16F81798") - verifyPnonce3 = mustParsePubNonce("032DE2662628C90B03F5E720284EB52FF7" + - "D71F4284F627B68A853D78C78E1FFE9303E4C5524E83FFE1493B9077CF1C" + - "A6BEB2090C93D930321071AD40B2F44E599046") - verifyPnonce4 = mustParsePubNonce("0237C87821AFD50A8644D820A8F3E02E49" + - "9C931865C2360FB43D0A0D20DAFE07EA0387BF891D2A6DEAEBADC909352A" + - "A9405D1428C15F4B75F04DAE642A95C2548480") - - tweak1 = KeyTweakDesc{ - Tweak: [32]byte{ - 0xE8, 0xF7, 0x91, 0xFF, 0x92, 0x25, 0xA2, 0xAF, - 0x01, 0x02, 0xAF, 0xFF, 0x4A, 0x9A, 0x72, 0x3D, - 0x96, 0x12, 0xA6, 0x82, 0xA2, 0x5E, 0xBE, 0x79, - 0x80, 0x2B, 0x26, 0x3C, 0xDF, 0xCD, 0x83, 0xBB, - }, - } - tweak2 = KeyTweakDesc{ - Tweak: [32]byte{ - 0xae, 0x2e, 0xa7, 0x97, 0xcc, 0xf, 0xe7, 0x2a, - 0xc5, 0xb9, 0x7b, 0x97, 0xf3, 0xc6, 0x95, 0x7d, - 0x7e, 0x41, 0x99, 0xa1, 0x67, 0xa5, 0x8e, 0xb0, - 0x8b, 0xca, 0xff, 0xda, 0x70, 0xac, 0x4, 0x55, - }, - } - tweak3 = KeyTweakDesc{ - Tweak: [32]byte{ - 0xf5, 0x2e, 0xcb, 0xc5, 0x65, 0xb3, 0xd8, 0xbe, - 0xa2, 0xdf, 0xd5, 0xb7, 0x5a, 0x4f, 0x45, 0x7e, - 0x54, 0x36, 0x98, 0x9, 0x32, 0x2e, 0x41, 0x20, - 0x83, 0x16, 0x26, 0xf2, 0x90, 0xfa, 0x87, 0xe0, - }, - } - tweak4 = KeyTweakDesc{ - Tweak: [32]byte{ - 0x19, 0x69, 0xad, 0x73, 0xcc, 0x17, 0x7f, 0xa0, - 0xb4, 0xfc, 0xed, 0x6d, 0xf1, 0xf7, 0xbf, 0x99, - 0x7, 0xe6, 0x65, 0xfd, 0xe9, 0xba, 0x19, 0x6a, - 0x74, 0xfe, 0xd0, 0xa3, 0xcf, 0x5a, 0xef, 0x9d, - }, - } -) - -func formatTweakParity(tweaks []KeyTweakDesc) string { - var s string - for _, tweak := range tweaks { - s += fmt.Sprintf("%v/", tweak.IsXOnly) - } - - // Snip off that last '/'. - s = s[:len(s)-1] - - return s -} - -func genTweakParity(tweak KeyTweakDesc, isXOnly bool) KeyTweakDesc { - tweak.IsXOnly = isXOnly - return tweak -} - -type jsonTweak struct { - Tweak string `json:"tweak"` - XOnly bool `json:"x_only"` -} - -type jsonTweakSignCase struct { - Keys []string `json:"keys"` - Tweaks []jsonTweak `json:"tweaks,omitempty"` - AggNonce string `json:"agg_nonce"` - - ExpectedSig string `json:"expected_sig"` - ExpectedError string `json:"expected_error` -} - -type jsonSignTestCase struct { - SecNonce string `json:"secret_nonce"` - SigningKey string `json:"signing_key"` - Msg string `json:"msg"` - - TestCases []jsonTweakSignCase `json:"test_cases"` -} - -// TestMuSig2SigningTestVectors tests that the musig2 implementation produces -// the same set of signatures. -func TestMuSig2SigningTestVectors(t *testing.T) { - t.Parallel() - - var jsonCases jsonSignTestCase - - jsonCases.SigningKey = hex.EncodeToString(signSetPrivKey.Serialize()) - jsonCases.Msg = hex.EncodeToString(signTestMsg) - - var secNonce [SecNonceSize]byte - copy(secNonce[:], mustParseHex("508B81A611F100A6B2B6B29656590898AF488B"+ - "CF2E1F55CF22E5CFB84421FE61")) - copy(secNonce[32:], mustParseHex("FA27FD49B1D50085B481285E1CA205D55C82"+ - "CC1B31FF5CD54A489829355901F7")) - - jsonCases.SecNonce = hex.EncodeToString(secNonce[:]) - - testCases := []struct { - keyOrder []int - aggNonce [66]byte - expectedPartialSig []byte - tweaks []KeyTweakDesc - expectedError error - }{ - // Vector 1 - { - keyOrder: []int{0, 1, 2}, - aggNonce: aggregatedNonce, - expectedPartialSig: signExpected1, - }, - - // Vector 2 - { - keyOrder: []int{1, 0, 2}, - aggNonce: aggregatedNonce, - expectedPartialSig: signExpected2, - }, - - // Vector 3 - { - keyOrder: []int{1, 2, 0}, - aggNonce: aggregatedNonce, - expectedPartialSig: signExpected3, - }, - // Vector 4 Both halves of aggregate nonce correspond to point at infinity - { - keyOrder: []int{0, 1}, - aggNonce: mustNonceAgg([][66]byte{verifyPnonce1, verifyPnonce4}), - expectedPartialSig: signExpected4, - }, - - // Vector 5: Signer 2 provided an invalid public key - { - keyOrder: []int{1, 0, 3}, - aggNonce: aggregatedNonce, - expectedError: secp256k1.ErrPubKeyNotOnCurve, - }, - - // Vector 6: Aggregate nonce is invalid due wrong tag, 0x04, - // in the first half. - { - - keyOrder: []int{1, 2, 0}, - aggNonce: toPubNonceSlice( - mustParseHex("048465FCF0BBDBCF443AABCCE533D42" + - "B4B5A10966AC09A49655E8C42DAAB8FCD610" + - "37496A3CC86926D452CAFCFD55D25972CA16" + - "75D549310DE296BFF42F72EEEA8C9")), - expectedError: secp256k1.ErrPubKeyInvalidFormat, - }, - - // Vector 7: Aggregate nonce is invalid because the second half - // does not correspond to an X coordinate. - { - - keyOrder: []int{1, 2, 0}, - aggNonce: toPubNonceSlice( - mustParseHex("028465FCF0BBDBCF443AABCCE533D42" + - "B4B5A10966AC09A49655E8C42DAAB8FCD610" + - "200000000000000000000000000000000000" + - "00000000000000000000000000009")), - expectedError: secp256k1.ErrPubKeyNotOnCurve, - }, - - // Vector 8: Aggregate nonce is invalid because the second half - // exceeds field size. - { - - keyOrder: []int{1, 2, 0}, - aggNonce: toPubNonceSlice( - mustParseHex("028465FCF0BBDBCF443AABCCE533D42" + - "B4B5A10966AC09A49655E8C42DAAB8FCD610" + - "2FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF" + - "FFFFFFFFFFFFFFFFFFFFEFFFFFC30")), - expectedError: secp256k1.ErrPubKeyXTooBig, - }, - - // A single x-only tweak. - { - keyOrder: []int{1, 2, 0}, - aggNonce: aggregatedNonce, - expectedPartialSig: mustParseHex("5e24c7496b565debc3b" + - "9639e6f1304a21597f9603d3ab05b4913641775e1375b"), - tweaks: []KeyTweakDesc{genTweakParity(tweak1, true)}, - }, - - // A single ordinary tweak. - { - keyOrder: []int{1, 2, 0}, - aggNonce: aggregatedNonce, - expectedPartialSig: mustParseHex("78408ddcab4813d1394c" + - "97d493ef1084195c1d4b52e63ecd7bc5991644e44ddd"), - tweaks: []KeyTweakDesc{genTweakParity(tweak1, false)}, - }, - - // An ordinary tweak then an x-only tweak. - { - keyOrder: []int{1, 2, 0}, - aggNonce: aggregatedNonce, - expectedPartialSig: mustParseHex("C3A829A81480E36EC3A" + - "B052964509A94EBF34210403D16B226A6F16EC85B7357"), - tweaks: []KeyTweakDesc{ - genTweakParity(tweak1, false), - genTweakParity(tweak2, true), - }, - }, - - // Four tweaks, in the order: x-only, ordinary, x-only, ordinary. - { - keyOrder: []int{1, 2, 0}, - aggNonce: aggregatedNonce, - expectedPartialSig: mustParseHex("8C4473C6A382BD3C4AD" + - "7BE59818DA5ED7CF8CEC4BC21996CFDA08BB4316B8BC7"), - tweaks: []KeyTweakDesc{ - genTweakParity(tweak1, true), - genTweakParity(tweak2, false), - genTweakParity(tweak3, true), - genTweakParity(tweak4, false), - }, - }, - } - - var msg [32]byte - copy(msg[:], signTestMsg) - - for _, testCase := range testCases { - testName := fmt.Sprintf("%v/tweak=%v", testCase.keyOrder, len(testCase.tweaks) != 0) - if len(testCase.tweaks) != 0 { - testName += fmt.Sprintf("/x_only=%v", formatTweakParity(testCase.tweaks)) - } - t.Run(testName, func(t *testing.T) { - keySet := make([]*btcec.PublicKey, 0, len(testCase.keyOrder)) - for _, keyIndex := range testCase.keyOrder { - keyBytes := signSetKeys[keyIndex] - pub, err := schnorr.ParsePubKey(keyBytes) - - switch { - case testCase.expectedError != nil && - errors.Is(err, testCase.expectedError): - - return - case err != nil: - t.Fatalf("unable to parse pubkeys: %v", err) - } - - keySet = append(keySet, pub) - } - - var opts []SignOption - if len(testCase.tweaks) != 0 { - opts = append( - opts, WithTweaks(testCase.tweaks...), - ) - } - - partialSig, err := Sign( - secNonce, signSetPrivKey, testCase.aggNonce, - keySet, msg, opts..., - ) - - switch { - case testCase.expectedError != nil && - errors.Is(err, testCase.expectedError): - - return - case err != nil: - t.Fatalf("unable to generate partial sig: %v", err) - } - - var partialSigBytes [32]byte - partialSig.S.PutBytesUnchecked(partialSigBytes[:]) - - if !bytes.Equal(partialSigBytes[:], testCase.expectedPartialSig) { - t.Fatalf("sigs don't match: expected %x, got %x", - testCase.expectedPartialSig, partialSigBytes, - ) - } - - }) - - if *dumpJson { - var ( - strKeys []string - jsonError string - ) - - for _, keyIndex := range testCase.keyOrder { - keyBytes := signSetKeys[keyIndex] - strKeys = append(strKeys, hex.EncodeToString(keyBytes)) - } - - if testCase.expectedError != nil { - jsonError = testCase.expectedError.Error() - } - - tweakSignCase := jsonTweakSignCase{ - Keys: strKeys, - ExpectedSig: hex.EncodeToString(testCase.expectedPartialSig), - AggNonce: hex.EncodeToString(testCase.aggNonce[:]), - ExpectedError: jsonError, - } - - var jsonTweaks []jsonTweak - for _, tweak := range testCase.tweaks { - jsonTweaks = append( - jsonTweaks, - jsonTweak{ - Tweak: hex.EncodeToString(tweak.Tweak[:]), - XOnly: tweak.IsXOnly, - }) - } - tweakSignCase.Tweaks = jsonTweaks - - jsonCases.TestCases = append(jsonCases.TestCases, tweakSignCase) - } - } - - if *dumpJson { - jsonBytes, err := json.Marshal(jsonCases) - if err != nil { - t.Fatalf("unable to encode json: %v", err) - } - - var formattedJson bytes.Buffer - json.Indent(&formattedJson, jsonBytes, "", "\t") - err = ioutil.WriteFile( - signTestVectorName, formattedJson.Bytes(), 0644, - ) - if err != nil { - t.Fatalf("unable to write file: %v", err) - } - } -} - -func TestMusig2PartialSigVerifyTestVectors(t *testing.T) { - t.Parallel() - - testCases := []struct { - partialSig []byte - nonces [][66]byte - pubnonceIndex int - keyOrder []int - tweaks []KeyTweakDesc - expectedError error - }{ - // A single x-only tweak. - { - keyOrder: []int{1, 2, 0}, - nonces: [][66]byte{ - verifyPnonce2, - verifyPnonce3, - verifyPnonce1, - }, - pubnonceIndex: 2, - partialSig: mustParseHex("5e24c7496b565debc3b9639e" + - "6f1304a21597f9603d3ab05b4913641775e1375b"), - tweaks: []KeyTweakDesc{genTweakParity(tweak1, true)}, - }, - // A single ordinary tweak. - { - keyOrder: []int{1, 2, 0}, - nonces: [][66]byte{ - verifyPnonce2, - verifyPnonce3, - verifyPnonce1, - }, - pubnonceIndex: 2, - partialSig: mustParseHex("78408ddcab4813d1394c97d4" + - "93ef1084195c1d4b52e63ecd7bc5991644e44ddd"), - tweaks: []KeyTweakDesc{genTweakParity(tweak1, false)}, - }, - // An ordinary tweak then an x-only tweak. - { - keyOrder: []int{1, 2, 0}, - nonces: [][66]byte{ - verifyPnonce2, - verifyPnonce3, - verifyPnonce1, - }, - pubnonceIndex: 2, - partialSig: mustParseHex("C3A829A81480E36EC3AB0529" + - "64509A94EBF34210403D16B226A6F16EC85B7357"), - tweaks: []KeyTweakDesc{ - genTweakParity(tweak1, false), - genTweakParity(tweak2, true), - }, - }, - - // Four tweaks, in the order: x-only, ordinary, x-only, ordinary. - { - keyOrder: []int{1, 2, 0}, - nonces: [][66]byte{ - verifyPnonce2, - verifyPnonce3, - verifyPnonce1, - }, - pubnonceIndex: 2, - partialSig: mustParseHex("8C4473C6A382BD3C4AD7BE5" + - "9818DA5ED7CF8CEC4BC21996CFDA08BB4316B8BC7"), - tweaks: []KeyTweakDesc{ - genTweakParity(tweak1, true), - genTweakParity(tweak2, false), - genTweakParity(tweak3, true), - genTweakParity(tweak4, false), - }, - }, - // Vector 9. - { - - partialSig: signExpected1, - pubnonceIndex: 0, - keyOrder: []int{0, 1, 2}, - nonces: [][66]byte{ - verifyPnonce1, - verifyPnonce2, - verifyPnonce3, - }, - }, - // Vector 10. - { - - partialSig: signExpected2, - pubnonceIndex: 1, - keyOrder: []int{1, 0, 2}, - nonces: [][66]byte{ - verifyPnonce2, - verifyPnonce1, - verifyPnonce3, - }, - }, - // Vector 11. - { - - partialSig: signExpected3, - pubnonceIndex: 2, - keyOrder: []int{1, 2, 0}, - nonces: [][66]byte{ - verifyPnonce2, - verifyPnonce3, - verifyPnonce1, - }, - }, - // Vector 12: Both halves of aggregate nonce correspond to - // point at infinity. - { - - partialSig: signExpected4, - pubnonceIndex: 0, - keyOrder: []int{0, 1}, - nonces: [][66]byte{ - verifyPnonce1, - verifyPnonce4, - }, - }, - // Vector 13: Wrong signature (which is equal to the negation - // of valid signature expected[0]). - { - - partialSig: mustParseHex("97AC833ADCB1AFA42EBF9E0" + - "725616F3C9A0D5B614F6FE283CEAAA37A8FFAF406"), - pubnonceIndex: 0, - keyOrder: []int{0, 1, 2}, - nonces: [][66]byte{ - verifyPnonce1, - verifyPnonce2, - verifyPnonce3, - }, - expectedError: ErrPartialSigInvalid, - }, - // Vector 12: Wrong signer. - { - - partialSig: signExpected1, - pubnonceIndex: 1, - keyOrder: []int{0, 1, 2}, - nonces: [][66]byte{ - verifyPnonce1, - verifyPnonce2, - verifyPnonce3, - }, - expectedError: ErrPartialSigInvalid, - }, - // Vector 13: Signature exceeds group size. - { - - partialSig: mustParseHex("FFFFFFFFFFFFFFFFFFFFFFFF" + - "FFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141"), - pubnonceIndex: 0, - keyOrder: []int{0, 1, 2}, - nonces: [][66]byte{ - verifyPnonce1, - verifyPnonce2, - verifyPnonce3, - }, - expectedError: ErrPartialSigInvalid, - }, - // Vector 14: Invalid pubnonce. - { - - partialSig: signExpected1, - pubnonceIndex: 0, - keyOrder: []int{0, 1, 2}, - nonces: [][66]byte{ - canParsePubNonce("020000000000000000000000000" + - "000000000000000000000000000000000000009"), - verifyPnonce2, - verifyPnonce3, - }, - expectedError: secp256k1.ErrPubKeyNotOnCurve, - }, - // Vector 15: Invalid public key. - { - - partialSig: signExpected1, - pubnonceIndex: 0, - keyOrder: []int{3, 1, 2}, - nonces: [][66]byte{ - verifyPnonce1, - verifyPnonce2, - verifyPnonce3, - }, - expectedError: secp256k1.ErrPubKeyNotOnCurve, - }, - } - - for _, testCase := range testCases { - - // todo find name - testName := fmt.Sprintf("%v/tweak=%v", testCase.pubnonceIndex, testCase.keyOrder) - - t.Run(testName, func(t *testing.T) { - - combinedNonce, err := AggregateNonces(testCase.nonces) - - switch { - case testCase.expectedError != nil && - errors.Is(err, testCase.expectedError): - - return - case err != nil: - t.Fatalf("unable to aggregate nonces %v", err) - } - - keySet := make([]*btcec.PublicKey, 0, len(testCase.keyOrder)) - for _, keyIndex := range testCase.keyOrder { - keyBytes := signSetKeys[keyIndex] - pub, err := schnorr.ParsePubKey(keyBytes) - - switch { - case testCase.expectedError != nil && - errors.Is(err, testCase.expectedError): - - return - case err != nil: - t.Fatalf("unable to parse pubkeys: %v", err) - } - - keySet = append(keySet, pub) - } - - ps := &PartialSignature{} - err = ps.Decode(bytes.NewBuffer(testCase.partialSig)) - - switch { - case testCase.expectedError != nil && - errors.Is(err, testCase.expectedError): - - return - case err != nil: - t.Fatal(err) - } - - var opts []SignOption - if len(testCase.tweaks) != 0 { - opts = append( - opts, WithTweaks(testCase.tweaks...), - ) - } - - err = verifyPartialSig( - ps, - testCase.nonces[testCase.pubnonceIndex], - combinedNonce, - keySet, - signSetKeys[testCase.keyOrder[testCase.pubnonceIndex]], - to32ByteSlice(signTestMsg), - opts..., - ) - - switch { - case testCase.expectedError != nil && - errors.Is(err, testCase.expectedError): - - return - case err != nil: - t.Fatalf("unable to aggregate nonces %v", err) - } - }) - } -} - type signer struct { privKey *btcec.PrivateKey pubKey *btcec.PublicKey @@ -993,12 +87,7 @@ func testMultiPartySign(t *testing.T, taprootTweak []byte, t.Fatalf("unable to gen priv key: %v", err) } - pubKey, err := schnorr.ParsePubKey( - schnorr.SerializePubKey(privKey.PubKey()), - ) - if err != nil { - t.Fatalf("unable to gen key: %v", err) - } + pubKey := privKey.PubKey() signerKeys[i] = privKey signSet[i] = pubKey @@ -1345,210 +434,6 @@ func TestMuSigEarlyNonce(t *testing.T) { } } -// TestMusig2NonceGenTestVectors tests the nonce generation function with -// the testvectors defined in the Musig2 BIP. -func TestMusig2NonceGenTestVectors(t *testing.T) { - t.Parallel() - - msg := bytes.Repeat([]byte{0x01}, 32) - sk := bytes.Repeat([]byte{0x02}, 32) - aggpk := bytes.Repeat([]byte{0x07}, 32) - extra_in := bytes.Repeat([]byte{0x08}, 32) - - testCases := []struct { - opts nonceGenOpts - expectedNonce string - }{ - { - opts: nonceGenOpts{ - randReader: &memsetRandReader{i: 0}, - secretKey: sk[:], - combinedKey: aggpk[:], - auxInput: extra_in[:], - msg: msg[:], - }, - expectedNonce: "E8F2E103D86800F19A4E97338D371CB885DB2" + - "F19D08C0BD205BBA9B906C971D0D786A17718AAFAD6D" + - "E025DDDD99DC823E2DFC1AE1DDFE920888AD53FFF423FC4", - }, - { - opts: nonceGenOpts{ - randReader: &memsetRandReader{i: 0}, - secretKey: sk[:], - combinedKey: aggpk[:], - auxInput: extra_in[:], - msg: nil, - }, - expectedNonce: "8A633F5EECBDB690A6BE4921426F41BE78D50" + - "9DC1CE894C1215844C0E4C6DE7ABC9A5BE0A3BF3FE31" + - "2CCB7E4817D2CB17A7CEA8382B73A99A583E323387B3C32", - }, - { - opts: nonceGenOpts{ - randReader: &memsetRandReader{i: 0}, - secretKey: nil, - combinedKey: nil, - auxInput: nil, - msg: nil, - }, - expectedNonce: "7B3B5A002356471AF0E961DE2549C121BD0D4" + - "8ABCEEDC6E034BDDF86AD3E0A187ECEE674CEF7364B0" + - "BC4BEEFB8B66CAD89F98DE2F8C5A5EAD5D1D1E4BD7D04CD", - }, - } - - for _, testCase := range testCases { - nonce, err := GenNonces(withCustomOptions(testCase.opts)) - if err != nil { - t.Fatalf("err gen nonce aux bytes %v", err) - } - - expectedBytes, _ := hex.DecodeString(testCase.expectedNonce) - if !bytes.Equal(nonce.SecNonce[:], expectedBytes) { - - t.Fatalf("nonces don't match: expected %x, got %x", - expectedBytes, nonce.SecNonce[:]) - } - } - -} - -var ( - pNonce1, _ = hex.DecodeString("020151C80F435648DF67A22B749CD798CE54E0321D034B92B709B567D60A42E666" + - "03BA47FBC1834437B3212E89A84D8425E7BF12E0245D98262268EBDCB385D50641") - pNonce2, _ = hex.DecodeString("03FF406FFD8ADB9CD29877E4985014F66A59F6CD01C0E88CAA8E5F3166B1F676A6" + - "0248C264CDD57D3C24D79990B0F865674EB62A0F9018277A95011B41BFC193B833") - - expectedNonce, _ = hex.DecodeString("035FE1873B4F2967F52FEA4A06AD5A8ECCBE9D0FD73068012C894E2E87CCB5804B" + - "024725377345BDE0E9C33AF3C43C0A29A9249F2F2956FA8CFEB55C8573D0262DC8") - - invalidNonce1, _ = hex.DecodeString("04FF406FFD8ADB9CD29877E4985014F66A59F6CD01C0E88CAA8E5F3166B1F676A6" + "0248C264CDD57D3C24D79990B0F865674EB62A0F9018277A95011B41BFC193B833") - invalidNonce2, _ = hex.DecodeString("03FF406FFD8ADB9CD29877E4985014F66A59F6CD01C0E88CAA8E5F3166B1F676A6" + "0248C264CDD57D3C24D79990B0F865674EB62A0F9018277A95011B41BFC193B831") - invalidNonce3, _ = hex.DecodeString("03FF406FFD8ADB9CD29877E4985014F66A59F6CD01C0E88CAA8E5F3166B1F676A6" + "02FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC30") -) - -type jsonNonceAggTestCase struct { - Nonces []string `json:"nonces"` - ExpectedNonce string `json:"expected_key"` - ExpectedError string `json:"expected_error"` -} - -func TestMusig2AggregateNoncesTestVectors(t *testing.T) { - t.Parallel() - - var jsonCases []jsonNonceAggTestCase - - testCases := []struct { - nonces [][]byte - expectedNonce []byte - expectedError error - }{ - // Vector 1: Valid. - { - nonces: [][]byte{pNonce1, pNonce2}, - expectedNonce: expectedNonce, - }, - - // Vector 2: Public nonce from signer 1 is invalid due wrong - // tag, 0x04, inthe first half. - { - nonces: [][]byte{pNonce1, invalidNonce1}, - expectedError: secp256k1.ErrPubKeyInvalidFormat, - }, - - // Vector 3: Public nonce from signer 0 is invalid because the - // second half does not correspond to an X coordinate. - { - nonces: [][]byte{invalidNonce2, pNonce2}, - expectedError: secp256k1.ErrPubKeyNotOnCurve, - }, - - // Vector 4: Public nonce from signer 0 is invalid because - // second half exceeds field size. - { - nonces: [][]byte{invalidNonce3, pNonce2}, - expectedError: secp256k1.ErrPubKeyXTooBig, - }, - - // Vector 5: Sum of second points encoded in the nonces would - // be point at infinity, therefore set sum to base point G. - { - nonces: [][]byte{ - append( - append([]byte{}, pNonce1[0:33]...), - getGBytes()..., - ), - append( - append([]byte{}, pNonce2[0:33]...), - getNegGBytes()..., - ), - }, - expectedNonce: append( - append([]byte{}, expectedNonce[0:33]...), - getInfinityBytes()..., - ), - }, - } - for i, testCase := range testCases { - testName := fmt.Sprintf("Vector %v", i+1) - t.Run(testName, func(t *testing.T) { - var ( - nonces [][66]byte - strNonces []string - jsonError string - ) - for _, nonce := range testCase.nonces { - nonces = append(nonces, toPubNonceSlice(nonce)) - strNonces = append(strNonces, hex.EncodeToString(nonce)) - } - - if testCase.expectedError != nil { - jsonError = testCase.expectedError.Error() - } - - jsonCases = append(jsonCases, jsonNonceAggTestCase{ - Nonces: strNonces, - ExpectedNonce: hex.EncodeToString(expectedNonce), - ExpectedError: jsonError, - }) - - aggregatedNonce, err := AggregateNonces(nonces) - - switch { - case testCase.expectedError != nil && - errors.Is(err, testCase.expectedError): - - return - case err != nil: - t.Fatalf("aggregating nonce error: %v", err) - } - - if !bytes.Equal(testCase.expectedNonce, aggregatedNonce[:]) { - t.Fatalf("case: #%v, invalid nonce aggregation: "+ - "expected %x, got %x", i, testCase.expectedNonce, - aggregatedNonce) - } - - }) - } - - if *dumpJson { - jsonBytes, err := json.Marshal(jsonCases) - if err != nil { - t.Fatalf("unable to encode json: %v", err) - } - - var formattedJson bytes.Buffer - json.Indent(&formattedJson, jsonBytes, "", "\t") - err = ioutil.WriteFile( - nonceAggTestVectorName, formattedJson.Bytes(), 0644, - ) - if err != nil { - t.Fatalf("unable to write file: %v", err) - } - } -} - type memsetRandReader struct { i int } @@ -1559,339 +444,3 @@ func (mr *memsetRandReader) Read(buf []byte) (n int, err error) { } return len(buf), nil } - -var ( - combineSigKey0 = mustParseHex("487D1B83B41B4CBBD07A111F1BBC7BDC8864CF" + - "EF5DBF96E46E51C68399B0BEF6") - combineSigKey1 = mustParseHex("4795C22501BF534BC478FF619407A7EC9E8D88" + - "83646D69BD43A0728944EA802F") - combineSigKey2 = mustParseHex("0F5BE837F3AB7E7FEFF1FAA44D673C2017206A" + - "E836D2C7893CDE4ACB7D55EDEB") - combineSigKey3 = mustParseHex("0FD453223E444FCA91FB5310990AE8A0C5DAA1" + - "4D2A4C8944E1C0BC80C30DF682") - - combineSigKeys = [][]byte{combineSigKey0, combineSigKey1, - combineSigKey2, combineSigKey3} - - combineSigAggNonce0 = mustParsePubNonce("024FA51009A56F0D6DF737131CE1" + - "FBBD833797AF3B4FE6BF0D68F4D49F68B0947E0248FB3BB9191F0CFF1380" + - "6A3A2F1429C23012654FCE4E41F7EC9169EAA6056B21") - combineSigAggNonce1 = mustParsePubNonce("023B11E63E2460E5E0F1561BB700" + - "FEA95B991DD9CA2CBBE92A3960641FA7469F6702CA4CD38375FE8BEB857C" + - "770807225BFC7D712F42BA896B83FC71138E56409B21") - combineSigAggNonce2 = mustParsePubNonce("03F98BEAA32B8A38FE3797C4E813" + - "DC9CE05ADBE32200035FB37EB0A030B735E9B6030E6118EC98EA2BA7A358" + - "C2E38E7E13E63681EEB683E067061BF7D52DCF08E615") - combineSigAggNonce3 = mustParsePubNonce("026491FBCFD47148043A0F7310E6" + - "2EF898C10F2D0376EE6B232EAAD36F3C2E29E303020CB17D168908E2904D" + - "E2EB571CD232CA805A6981D0F86CDBBD2F12BD91F6D0") - - psig0 = mustParseHex("E5C1CBD6E7E89FE9EE30D5F3B6D06B9C218846E4A1DEF4E" + - "E851410D51ABBD850") - psig1 = mustParseHex("9BC470F7F1C9BC848BDF179B0023282FFEF40908E0EF884" + - "59784A4355FC86D0C") - psig2 = mustParseHex("D5D8A09929BA264B2F5DF15ACA1CF2DEFA47C048DF0C323" + - "2E965FFE2F2831B1D") - psig3 = mustParseHex("A915197503C1051EA77DC91F01C3A0E60BFD64473BD536C" + - "B613F9645BD61C843") - psig4 = mustParseHex("99A144D7076A128022134E036B8BDF33811F7EAED9A1E48" + - "549B46D8A63D64DC9") - psig5 = mustParseHex("716A72A0C1E531EBB4555C8E29FD35C796F4F231C3B0391" + - "93D7E8D7AEFBDF5F7") - psig6 = mustParseHex("06B6DD04BC0F1EF740916730AD7DAC794255B1612217197" + - "65BDE9686A26633DC") - psig7 = mustParseHex("BF6D85D4930062726EBC6EBB184AFD68DBB3FED159C5019" + - "89690A62600D6FBAB") - - combineSigExpected0 = mustParseHex("4006D4D069F3B51E968762FF8074153E2" + - "78E5BCD221AABE0743CA001B77E79F581863CCED9B25C6E7A0FED8EB6F39" + - "3CD65CD7306D385DCF85CC6567DAA4E041B") - combineSigExpected1 = mustParseHex("98BCD40DFD94B47A3DA37D7B78EB6CCE8" + - "ABEACA23C3ADE6F4678902410EB35C67EEDBA0E2D7B2B69D6DBBA79CBE09" + - "3C64B9647A96B98C8C28AD3379BDFAEA21F") - combineSigExpected2 = mustParseHex("3741FEDCCDD7508B58DCB9A780FF5D974" + - "52EC8C0448D8C97004EA7175C14F2007A54D1DE356EBA6719278436EF111" + - "DFA8F1B832368371B9B7A25001709039679") - combineSigExpected3 = mustParseHex("F4B3DA3CF0D0F7CF5C1840593BF1A1A41" + - "5DA341619AE848F2210696DC8C7512540962C84EF7F0CEC491065F2D5772" + - "13CF10E8A63D153297361B3B172BE27B61F") - - combineSigTweak0 = mustParseHex32("B511DA492182A91B0FFB9A98020D55F260" + - "AE86D7ECBD0399C7383D59A5F2AF7C") - combineSigTweak1 = mustParseHex32("A815FE049EE3C5AAB66310477FBC8BCCCA" + - "C2F3395F59F921C364ACD78A2F48DC") - combineSigTweak2 = mustParseHex32("75448A87274B056468B977BE06EB1E9F65" + - "7577B7320B0A3376EA51FD420D18A8") - tweak0False = KeyTweakDesc{ - Tweak: combineSigTweak0, - IsXOnly: false, - } - tweak0True = KeyTweakDesc{ - Tweak: combineSigTweak0, - IsXOnly: true, - } - tweak1False = KeyTweakDesc{ - Tweak: combineSigTweak1, - IsXOnly: false, - } - tweak2True = KeyTweakDesc{ - Tweak: combineSigTweak2, - IsXOnly: true, - } - combineSigsMsg = mustParseHex32("599C67EA410D005B9DA90817CF03ED3B1C86" + - "8E4DA4EDF00A5880B0082C237869") -) - -func TestMusig2CombineSigsTestVectors(t *testing.T) { - - testCases := []struct { - partialSigs [][]byte - aggNonce [66]byte - keyOrder []int - expected []byte - tweaks []KeyTweakDesc - expectedError error - }{ - // Vector 1 - { - partialSigs: [][]byte{psig0, psig1}, - aggNonce: combineSigAggNonce0, - keyOrder: []int{0, 1}, - expected: combineSigExpected0, - }, - // Vector 2 - { - partialSigs: [][]byte{psig2, psig3}, - aggNonce: combineSigAggNonce1, - keyOrder: []int{0, 2}, - expected: combineSigExpected1, - }, - // Vector 3 - { - partialSigs: [][]byte{psig4, psig5}, - aggNonce: combineSigAggNonce2, - keyOrder: []int{0, 2}, - expected: combineSigExpected2, - tweaks: []KeyTweakDesc{tweak0False}, - }, - // Vector 4 - { - partialSigs: [][]byte{psig6, psig7}, - aggNonce: combineSigAggNonce3, - keyOrder: []int{0, 3}, - expected: combineSigExpected3, - tweaks: []KeyTweakDesc{ - tweak0True, - tweak1False, - tweak2True, - }, - }, - // Vector 5: Partial signature is invalid because it exceeds group size - { - partialSigs: [][]byte{ - psig7, - mustParseHex("FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF" + - "EBAAEDCE6AF48A03BBFD25E8CD0364141"), - }, - aggNonce: combineSigAggNonce3, - expectedError: ErrPartialSigInvalid, - }, - } - - for _, testCase := range testCases { - var pSigs []*PartialSignature - for _, partialSig := range testCase.partialSigs { - pSig := &PartialSignature{} - err := pSig.Decode(bytes.NewReader(partialSig)) - - switch { - case testCase.expectedError != nil && - errors.Is(err, testCase.expectedError): - - return - case err != nil: - t.Fatal(err) - } - - pSigs = append(pSigs, pSig) - } - - keySet := make([]*btcec.PublicKey, 0, len(testCase.keyOrder)) - for _, keyIndex := range testCase.keyOrder { - keyBytes := combineSigKeys[keyIndex] - pub, err := schnorr.ParsePubKey(keyBytes) - if err != nil { - t.Fatalf("unable to parse pubkeys: %v", err) - } - keySet = append(keySet, pub) - } - - uniqueKeyIndex := secondUniqueKeyIndex(keySet, false) - aggOpts := []KeyAggOption{ - WithUniqueKeyIndex(uniqueKeyIndex), - } - if len(testCase.tweaks) > 0 { - aggOpts = append(aggOpts, WithKeyTweaks(testCase.tweaks...)) - } - - combinedKey, _, _, err := AggregateKeys( - keySet, false, aggOpts..., - ) - if err != nil { - t.Fatal(err) - } - - aggPubkey, err := aggNonceToPubkey( - testCase.aggNonce, combinedKey, combineSigsMsg, - ) - if err != nil { - t.Fatal(err) - } - - var opts []CombineOption - if len(testCase.tweaks) > 0 { - opts = append(opts, WithTweakedCombine( - combineSigsMsg, keySet, testCase.tweaks, false, - )) - } - - sig := CombineSigs(aggPubkey, pSigs, opts...) - expectedSig, err := schnorr.ParseSignature(testCase.expected) - if err != nil { - t.Fatal(err) - } - - if !bytes.Equal(sig.Serialize(), expectedSig.Serialize()) { - t.Fatalf("sigs not expected %x \n got %x", expectedSig.Serialize(), sig.Serialize()) - } - - if !sig.Verify(combineSigsMsg[:], combinedKey.FinalKey) { - t.Fatal("sig not valid for m") - } - } -} - -// aggNonceToPubkey gets a nonce as a public key for the TestMusig2CombineSigsTestVectors -// test. -// TODO(sputn1ck): build into intermediate routine. -func aggNonceToPubkey(combinedNonce [66]byte, combinedKey *AggregateKey, - msg [32]byte) (*btcec.PublicKey, error) { - - // b = int_from_bytes(tagged_hash('MuSig/noncecoef', aggnonce + bytes_from_point(Q) + msg)) % n - var ( - nonceMsgBuf bytes.Buffer - nonceBlinder btcec.ModNScalar - ) - nonceMsgBuf.Write(combinedNonce[:]) - nonceMsgBuf.Write(schnorr.SerializePubKey(combinedKey.FinalKey)) - nonceMsgBuf.Write(msg[:]) - nonceBlindHash := chainhash.TaggedHash(NonceBlindTag, nonceMsgBuf.Bytes()) - nonceBlinder.SetByteSlice(nonceBlindHash[:]) - - r1, err := btcec.ParsePubKey( - combinedNonce[:btcec.PubKeyBytesLenCompressed], - ) - if err != nil { - return nil, err - } - - r2, err := btcec.ParsePubKey( - combinedNonce[btcec.PubKeyBytesLenCompressed:], - ) - if err != nil { - return nil, err - } - - var nonce, r1J, r2J btcec.JacobianPoint - r1.AsJacobian(&r1J) - r2.AsJacobian(&r2J) - - // With our nonce blinding value, we'll now combine both the public - // nonces, using the blinding factor to tweak the second nonce: - // * R = R_1 + b*R_2 - btcec.ScalarMultNonConst(&nonceBlinder, &r2J, &r2J) - btcec.AddNonConst(&r1J, &r2J, &nonce) - - nonce.ToAffine() - - return btcec.NewPublicKey( - &nonce.X, &nonce.Y, - ), nil - -} - -func mustNonceAgg(nonces [][66]byte) [66]byte { - aggNonce, err := AggregateNonces(nonces) - if err != nil { - panic("can't aggregate nonces") - } - return aggNonce -} - -func memsetLoop(a []byte, v uint8) { - for i := range a { - a[i] = byte(v) - } -} - -func to32ByteSlice(input []byte) [32]byte { - if len(input) != 32 { - panic("input byte slice has invalid length") - } - var output [32]byte - copy(output[:], input) - return output -} - -func toPubNonceSlice(input []byte) [PubNonceSize]byte { - var output [PubNonceSize]byte - copy(output[:], input) - - return output -} - -func getGBytes() []byte { - return btcec.Generator().SerializeCompressed() -} - -func getNegGBytes() []byte { - pk := getGBytes() - pk[0] = 0x3 - - return pk -} - -func getInfinityBytes() []byte { - return make([]byte, 33) -} - -func mustParseHex32(str string) [32]byte { - b, err := hex.DecodeString(str) - if err != nil { - panic(fmt.Errorf("unable to parse hex: %v", err)) - } - if len(b) != 32 { - panic(fmt.Errorf("not a 32 byte slice: %v", err)) - } - - return to32ByteSlice(b) -} - -func mustParsePubNonce(str string) [PubNonceSize]byte { - b, err := hex.DecodeString(str) - if err != nil { - panic(fmt.Errorf("unable to parse hex: %v", err)) - } - if len(b) != PubNonceSize { - panic(fmt.Errorf("not a public nonce: %v", err)) - } - return toPubNonceSlice(b) -} - -func canParsePubNonce(str string) [PubNonceSize]byte { - b, err := hex.DecodeString(str) - if err != nil { - panic(fmt.Errorf("unable to parse hex: %v", err)) - } - return toPubNonceSlice(b) -} diff --git a/btcec/schnorr/musig2/nonces.go b/btcec/schnorr/musig2/nonces.go index 66d2cb9c22..4b2509a754 100644 --- a/btcec/schnorr/musig2/nonces.go +++ b/btcec/schnorr/musig2/nonces.go @@ -184,13 +184,15 @@ func withCustomOptions(customOpts nonceGenOpts) NonceGenOption { o.randReader = customOpts.randReader o.secretKey = customOpts.secretKey o.combinedKey = customOpts.combinedKey - o.msg = customOpts.msg o.auxInput = customOpts.auxInput + o.msg = customOpts.msg } } // lengthWriter is a function closure that allows a caller to control how the // length prefix of a byte slice is written. +// +// TODO(roasbeef): use type params once we bump repo version type lengthWriter func(w io.Writer, b []byte) error // uint8Writer is an implementation of lengthWriter that writes the length of @@ -205,6 +207,12 @@ func uint32Writer(w io.Writer, b []byte) error { return binary.Write(w, byteOrder, uint32(len(b))) } +// uint32Writer is an implementation of lengthWriter that writes the length of +// the byte slice using 8 bytes. +func uint64Writer(w io.Writer, b []byte) error { + return binary.Write(w, byteOrder, uint64(len(b))) +} + // writeBytesPrefix is used to write out: len(b) || b, to the passed io.Writer. // The lengthWriter function closure is used to allow the caller to specify the // precise byte packing of the length. @@ -225,10 +233,12 @@ func writeBytesPrefix(w io.Writer, b []byte, lenWriter lengthWriter) error { // genNonceAuxBytes writes out the full byte string used to derive a secret // nonce based on some initial randomness as well as the series of optional // fields. The byte string used for derivation is: -// * tagged_hash("MuSig/nonce", rand || len(aggpk) || aggpk || len(m) -// || m || len(in) || in || i). +// - tagged_hash("MuSig/nonce", rand || len(aggpk) || aggpk || m_prefixed +// || len(in) || in || i). // -// where i is the ith secret nonce being generated. +// where i is the ith secret nonce being generated and m_prefixed is: +// - bytes(1, 0) if the message is blank +// - bytes(1, 1) || bytes(8, len(m)) || m if the message is present. func genNonceAuxBytes(rand []byte, i int, opts *nonceGenOpts) (*chainhash.Hash, error) { @@ -245,10 +255,28 @@ func genNonceAuxBytes(rand []byte, i int, return nil, err } - // Next, we'll write out the length prefixed message. - err = writeBytesPrefix(&w, opts.msg, uint8Writer) - if err != nil { - return nil, err + switch { + // If the message isn't present, then we'll just write out a single + // uint8 of a zero byte: m_prefixed = bytes(1, 0). + case opts.msg == nil: + if _, err := w.Write([]byte{0x00}); err != nil { + return nil, err + } + + // Otherwise, we'll write a single byte of 0x01 with a 1 byte length + // prefix, followed by the message itself with an 8 byte length prefix: + // m_prefixed = bytes(1, 1) || bytes(8, len(m)) || m. + case len(opts.msg) == 0: + fallthrough + default: + if _, err := w.Write([]byte{0x01}); err != nil { + return nil, err + } + + err = writeBytesPrefix(&w, opts.msg, uint64Writer) + if err != nil { + return nil, err + } } // Finally we'll write out the auxiliary input. diff --git a/btcec/schnorr/musig2/nonces_test.go b/btcec/schnorr/musig2/nonces_test.go new file mode 100644 index 0000000000..bdb76dbd35 --- /dev/null +++ b/btcec/schnorr/musig2/nonces_test.go @@ -0,0 +1,164 @@ +// Copyright 2013-2022 The btcsuite developers + +package musig2 + +import ( + "bytes" + "encoding/hex" + "encoding/json" + "fmt" + "os" + "path" + "testing" + + "github.com/stretchr/testify/require" +) + +type nonceGenTestCase struct { + Rand string `json:"rand_"` + Sk string `json:"sk"` + AggPk string `json:"aggpk"` + Msg *string `json:"msg"` + ExtraIn string `json:"extra_in"` + + Expected string `json:"expected"` +} + +type nonceGenTestCases struct { + TestCases []nonceGenTestCase `json:"test_cases"` +} + +const ( + nonceGenTestVectorsFileName = "nonce_gen_vectors.json" + nonceAggTestVectorsFileName = "nonce_agg_vectors.json" +) + +// TestMusig2NonceGenTestVectors tests the nonce generation function with the +// testvectors defined in the Musig2 BIP. +func TestMusig2NonceGenTestVectors(t *testing.T) { + t.Parallel() + + testVectorPath := path.Join( + testVectorBaseDir, nonceGenTestVectorsFileName, + ) + testVectorBytes, err := os.ReadFile(testVectorPath) + require.NoError(t, err) + + var testCases nonceGenTestCases + require.NoError(t, json.Unmarshal(testVectorBytes, &testCases)) + + for i, testCase := range testCases.TestCases { + testCase := testCase + + customOpts := nonceGenOpts{ + randReader: &memsetRandReader{i: 0}, + secretKey: mustParseHex(testCase.Sk), + combinedKey: mustParseHex(testCase.AggPk), + auxInput: mustParseHex(testCase.ExtraIn), + } + if testCase.Msg != nil { + customOpts.msg = mustParseHex(*testCase.Msg) + } + + t.Run(fmt.Sprintf("test_case=%v", i), func(t *testing.T) { + nonce, err := GenNonces(withCustomOptions(customOpts)) + if err != nil { + t.Fatalf("err gen nonce aux bytes %v", err) + } + + expectedBytes, _ := hex.DecodeString(testCase.Expected) + if !bytes.Equal(nonce.SecNonce[:], expectedBytes) { + + t.Fatalf("nonces don't match: expected %x, got %x", + expectedBytes, nonce.SecNonce[:]) + } + }) + } +} + +type nonceAggError struct { + Type string `json:"type"` + Signer int `json:"signer"` + Contrib string `json:"contrib"` +} + +type nonceAggValidCase struct { + Indices []int `json:"pnonce_indices"` + + Expected string `json:"expected"` + + Comment string `json:"comment"` +} + +type nonceAggInvalidCase struct { + Indices []int `json:"pnonce_indices"` + + Error nonceAggError `json:"error"` + + Comment string `json:"comment"` + + ExpectedErr string `json:"btcec_err"` +} + +type nonceAggTestCases struct { + Nonces []string `json:"pnonces"` + + ValidCases []nonceAggValidCase `json:"valid_test_cases"` + + InvalidCases []nonceAggInvalidCase `json:"error_test_cases"` +} + +// TestMusig2AggregateNoncesTestVectors tests that the musig2 implementation +// passes the nonce aggregration test vectors for musig2 1.0. +func TestMusig2AggregateNoncesTestVectors(t *testing.T) { + t.Parallel() + + testVectorPath := path.Join( + testVectorBaseDir, nonceAggTestVectorsFileName, + ) + testVectorBytes, err := os.ReadFile(testVectorPath) + require.NoError(t, err) + + var testCases nonceAggTestCases + require.NoError(t, json.Unmarshal(testVectorBytes, &testCases)) + + nonces := make([][PubNonceSize]byte, len(testCases.Nonces)) + for i := range testCases.Nonces { + var nonce [PubNonceSize]byte + copy(nonce[:], mustParseHex(testCases.Nonces[i])) + + nonces[i] = nonce + } + + for i, testCase := range testCases.ValidCases { + testCase := testCase + + var testNonces [][PubNonceSize]byte + for _, idx := range testCase.Indices { + testNonces = append(testNonces, nonces[idx]) + } + + t.Run(fmt.Sprintf("valid_case=%v", i), func(t *testing.T) { + aggregatedNonce, err := AggregateNonces(testNonces) + require.NoError(t, err) + + var expectedNonce [PubNonceSize]byte + copy(expectedNonce[:], mustParseHex(testCase.Expected)) + + require.Equal(t, aggregatedNonce[:], expectedNonce[:]) + }) + } + + for i, testCase := range testCases.InvalidCases { + var testNonces [][PubNonceSize]byte + for _, idx := range testCase.Indices { + testNonces = append(testNonces, nonces[idx]) + } + + t.Run(fmt.Sprintf("invalid_case=%v", i), func(t *testing.T) { + _, err := AggregateNonces(testNonces) + require.True(t, err != nil) + require.Equal(t, testCase.ExpectedErr, err.Error()) + }) + } +} diff --git a/btcec/schnorr/musig2/sign.go b/btcec/schnorr/musig2/sign.go index fce61aa8a8..2028571137 100644 --- a/btcec/schnorr/musig2/sign.go +++ b/btcec/schnorr/musig2/sign.go @@ -186,6 +186,58 @@ func WithBip86SignTweak() SignOption { } } +// computeSigningNonce calculates the final nonce used for signing. This will +// be the R value used in the final signature. +func computeSigningNonce(combinedNonce [PubNonceSize]byte, + combinedKey *btcec.PublicKey, msg [32]byte) ( + *btcec.JacobianPoint, *btcec.ModNScalar, error) { + + // Next we'll compute the value b, that blinds our second public + // nonce: + // * b = h(tag=NonceBlindTag, combinedNonce || combinedKey || m). + var ( + nonceMsgBuf bytes.Buffer + nonceBlinder btcec.ModNScalar + ) + nonceMsgBuf.Write(combinedNonce[:]) + nonceMsgBuf.Write(schnorr.SerializePubKey(combinedKey)) + nonceMsgBuf.Write(msg[:]) + nonceBlindHash := chainhash.TaggedHash( + NonceBlindTag, nonceMsgBuf.Bytes(), + ) + nonceBlinder.SetByteSlice(nonceBlindHash[:]) + + // Next, we'll parse the public nonces into R1 and R2. + r1J, err := btcec.ParseJacobian( + combinedNonce[:btcec.PubKeyBytesLenCompressed], + ) + if err != nil { + return nil, nil, err + } + r2J, err := btcec.ParseJacobian( + combinedNonce[btcec.PubKeyBytesLenCompressed:], + ) + if err != nil { + return nil, nil, err + } + + // With our nonce blinding value, we'll now combine both the public + // nonces, using the blinding factor to tweak the second nonce: + // * R = R_1 + b*R_2 + var nonce btcec.JacobianPoint + btcec.ScalarMultNonConst(&nonceBlinder, &r2J, &r2J) + btcec.AddNonConst(&r1J, &r2J, &nonce) + + // If the combined nonce is the point at infinity, we'll use the + // generator point instead. + if nonce == infinityPoint { + G := btcec.Generator() + G.AsJacobian(&nonce) + } + + return &nonce, &nonceBlinder, nil +} + // Sign generates a musig2 partial signature given the passed key set, secret // nonce, public nonce, and private keys. This method returns an error if the // generated nonces are either too large, or end up mapping to the point at @@ -230,48 +282,16 @@ func Sign(secNonce [SecNonceSize]byte, privKey *btcec.PrivateKey, return nil, err } - // Next we'll compute the value b, that blinds our second public - // nonce: - // * b = h(tag=NonceBlindTag, combinedNonce || combinedKey || m). - var ( - nonceMsgBuf bytes.Buffer - nonceBlinder btcec.ModNScalar - ) - nonceMsgBuf.Write(combinedNonce[:]) - nonceMsgBuf.Write(schnorr.SerializePubKey(combinedKey.FinalKey)) - nonceMsgBuf.Write(msg[:]) - nonceBlindHash := chainhash.TaggedHash( - NonceBlindTag, nonceMsgBuf.Bytes(), - ) - nonceBlinder.SetByteSlice(nonceBlindHash[:]) - - // Next, we'll parse the public nonces into R1 and R2. - r1J, err := btcec.ParseJacobian( - combinedNonce[:btcec.PubKeyBytesLenCompressed], - ) - if err != nil { - return nil, err - } - r2J, err := btcec.ParseJacobian( - combinedNonce[btcec.PubKeyBytesLenCompressed:], + // We'll now combine both the public nonces, using the blinding factor + // to tweak the second nonce: + // * R = R_1 + b*R_2 + nonce, nonceBlinder, err := computeSigningNonce( + combinedNonce, combinedKey.FinalKey, msg, ) if err != nil { return nil, err } - // With our nonce blinding value, we'll now combine both the public - // nonces, using the blinding factor to tweak the second nonce: - // * R = R_1 + b*R_2 - var nonce btcec.JacobianPoint - btcec.ScalarMultNonConst(&nonceBlinder, &r2J, &r2J) - btcec.AddNonConst(&r1J, &r2J, &nonce) - - // If the combined nonce it eh point at infinity, then we'll bail out. - if nonce == infinityPoint { - G := btcec.Generator() - G.AsJacobian(&nonce) - } - // Next we'll parse out our two secret nonces, which we'll be using in // the core signing process below. var k1, k2 btcec.ModNScalar @@ -299,31 +319,22 @@ func Sign(secNonce [SecNonceSize]byte, privKey *btcec.PrivateKey, } pubKey := privKey.PubKey() - pubKeyYIsOdd := func() bool { - pubKeyBytes := pubKey.SerializeCompressed() - return pubKeyBytes[0] == secp.PubKeyFormatCompressedOdd - }() combinedKeyYIsOdd := func() bool { combinedKeyBytes := combinedKey.FinalKey.SerializeCompressed() return combinedKeyBytes[0] == secp.PubKeyFormatCompressedOdd }() - // Next we'll compute our two parity factors for Q the combined public - // key, and P, the public key we're signing with. If the keys are odd, - // then we'll negate them. + // Next we'll compute the two parity factors for Q, the combined key. + // If the key is odd, then we'll negate it. parityCombinedKey := new(btcec.ModNScalar).SetInt(1) - paritySignKey := new(btcec.ModNScalar).SetInt(1) if combinedKeyYIsOdd { parityCombinedKey.Negate() } - if pubKeyYIsOdd { - paritySignKey.Negate() - } // Before we sign below, we'll multiply by our various parity factors // to ensure that the signing key is properly negated (if necessary): - // * d = gv⋅gaccv⋅gp⋅d' - privKeyScalar.Mul(parityCombinedKey).Mul(paritySignKey).Mul(parityAcc) + // * d = g⋅gacc⋅d' + privKeyScalar.Mul(parityCombinedKey).Mul(parityAcc) // Next we'll create the challenge hash that commits to the combined // nonce, combined public key and also the message: @@ -345,7 +356,7 @@ func Sign(secNonce [SecNonceSize]byte, privKey *btcec.PrivateKey, // With mu constructed, we can finally generate our partial signature // as: s = (k1_1 + b*k_2 + e*a*d) mod n. s := new(btcec.ModNScalar) - s.Add(&k1).Add(k2.Mul(&nonceBlinder)).Add(e.Mul(a).Mul(&privKeyScalar)) + s.Add(&k1).Add(k2.Mul(nonceBlinder)).Add(e.Mul(a).Mul(&privKeyScalar)) sig := NewPartialSignature(s, nonceKey) @@ -372,7 +383,7 @@ func (p *PartialSignature) Verify(pubNonce [PubNonceSize]byte, combinedNonce [PubNonceSize]byte, keySet []*btcec.PublicKey, signingKey *btcec.PublicKey, msg [32]byte, signOpts ...SignOption) bool { - pubKey := schnorr.SerializePubKey(signingKey) + pubKey := signingKey.SerializeCompressed() return verifyPartialSig( p, pubNonce, combinedNonce, keySet, pubKey, msg, signOpts..., @@ -398,7 +409,6 @@ func verifyPartialSig(partialSig *PartialSignature, pubNonce [PubNonceSize]byte, // Next we'll parse out the two public nonces into something we can // use. // - // Compute the hash of all the keys here as we'll need it do aggregate // the keys and also at the final step of verification. keysHash := keyHashFingerprint(keySet, opts.sortKeys) @@ -458,7 +468,6 @@ func verifyPartialSig(partialSig *PartialSignature, pubNonce [PubNonceSize]byte, // With our nonce blinding value, we'll now combine both the public // nonces, using the blinding factor to tweak the second nonce: // * R = R_1 + b*R_2 - var nonce btcec.JacobianPoint btcec.ScalarMultNonConst(&nonceBlinder, &r2J, &r2J) btcec.AddNonConst(&r1J, &r2J, &nonce) @@ -516,7 +525,7 @@ func verifyPartialSig(partialSig *PartialSignature, pubNonce [PubNonceSize]byte, var e btcec.ModNScalar e.SetByteSlice(challengeBytes[:]) - signingKey, err := schnorr.ParsePubKey(pubKey) + signingKey, err := btcec.ParsePubKey(pubKey) if err != nil { return err } @@ -527,27 +536,24 @@ func verifyPartialSig(partialSig *PartialSignature, pubNonce [PubNonceSize]byte, // If the combined key has an odd y coordinate, then we'll negate // parity factor for the signing key. - paritySignKey := new(btcec.ModNScalar).SetInt(1) + parityCombinedKey := new(btcec.ModNScalar).SetInt(1) combinedKeyBytes := combinedKey.FinalKey.SerializeCompressed() if combinedKeyBytes[0] == secp.PubKeyFormatCompressedOdd { - paritySignKey.Negate() + parityCombinedKey.Negate() } // Next, we'll construct the final parity factor by multiplying the // sign key parity factor with the accumulated parity factor for all // the keys. - finalParityFactor := paritySignKey.Mul(parityAcc) + finalParityFactor := parityCombinedKey.Mul(parityAcc) - // Now we'll multiply the parity factor by our signing key, which'll - // take care of the amount of negation needed. var signKeyJ btcec.JacobianPoint signingKey.AsJacobian(&signKeyJ) - btcec.ScalarMultNonConst(finalParityFactor, &signKeyJ, &signKeyJ) - // In the final set, we'll check that: s*G == R' + e*a*P. + // In the final set, we'll check that: s*G == R' + e*a*g*P. var sG, rP btcec.JacobianPoint btcec.ScalarBaseMultNonConst(s, &sG) - btcec.ScalarMultNonConst(e.Mul(a), &signKeyJ, &rP) + btcec.ScalarMultNonConst(e.Mul(a).Mul(finalParityFactor), &signKeyJ, &rP) btcec.AddNonConst(&rP, &pubNonceJ, &rP) sG.ToAffine() diff --git a/btcec/schnorr/musig2/sign_test.go b/btcec/schnorr/musig2/sign_test.go new file mode 100644 index 0000000000..a7f5d79d5d --- /dev/null +++ b/btcec/schnorr/musig2/sign_test.go @@ -0,0 +1,391 @@ +// Copyright 2013-2022 The btcsuite developers + +package musig2 + +import ( + "bytes" + "encoding/hex" + "encoding/json" + "fmt" + "os" + "path" + "strings" + "testing" + + "github.com/btcsuite/btcd/btcec/v2" + secp "github.com/decred/dcrd/dcrec/secp256k1/v4" + "github.com/stretchr/testify/require" +) + +const ( + signVerifyTestVectorFileName = "sign_verify_vectors.json" + + sigCombineTestVectorFileName = "sig_agg_vectors.json" +) + +type signVerifyValidCase struct { + Indices []int `json:"key_indices"` + + NonceIndices []int `json:"nonce_indices"` + + AggNonceIndex int `json:"aggnonce_index"` + + MsgIndex int `json:"msg_index"` + + SignerIndex int `json:"signer_index"` + + Expected string `json:"expected"` +} + +type signErrorCase struct { + Indices []int `json:"key_indices"` + + AggNonceIndex int `json:"aggnonce_index"` + + MsgIndex int `json:"msg_index"` + + SecNonceIndex int `json:"secnonce_index"` + + Comment string `json:"comment"` +} + +type verifyFailCase struct { + Sig string `json:"sig"` + + Indices []int `json:"key_indices"` + + NonceIndices []int `json:"nonce_indices"` + + MsgIndex int `json:"msg_index"` + + SignerIndex int `json:"signer_index"` + + Comment string `json:"comment"` +} + +type verifyErrorCase struct { + Sig string `json:"sig"` + + Indices []int `json:"key_indices"` + + NonceIndices []int `json:"nonce_indices"` + + MsgIndex int `json:"msg_index"` + + SignerIndex int `json:"signer_index"` + + Comment string `json:"comment"` +} + +type signVerifyTestVectors struct { + PrivKey string `json:"sk"` + + PubKeys []string `json:"pubkeys"` + + PrivNonces []string `json:"secnonces"` + + PubNonces []string `json:"pnonces"` + + AggNonces []string `json:"aggnonces"` + + Msgs []string `json:"msgs"` + + ValidCases []signVerifyValidCase `json:"valid_test_cases"` + + SignErrorCases []signErrorCase `json:"sign_error_test_cases"` + + VerifyFailCases []verifyFailCase `json:"verify_fail_test_cases"` + + VerifyErrorCases []verifyErrorCase `json:"verify_error_test_cases"` +} + +// TestMusig2SignVerify tests that we pass the musig2 verification tests. +func TestMusig2SignVerify(t *testing.T) { + t.Parallel() + + testVectorPath := path.Join( + testVectorBaseDir, signVerifyTestVectorFileName, + ) + testVectorBytes, err := os.ReadFile(testVectorPath) + require.NoError(t, err) + + var testCases signVerifyTestVectors + require.NoError(t, json.Unmarshal(testVectorBytes, &testCases)) + + privKey, _ := btcec.PrivKeyFromBytes(mustParseHex(testCases.PrivKey)) + + for i, testCase := range testCases.ValidCases { + testCase := testCase + + testName := fmt.Sprintf("valid_case_%v", i) + t.Run(testName, func(t *testing.T) { + pubKeys, err := keysFromIndices( + t, testCase.Indices, testCases.PubKeys, + ) + require.NoError(t, err) + + pubNonces := pubNoncesFromIndices( + t, testCase.NonceIndices, testCases.PubNonces, + ) + + combinedNonce, err := AggregateNonces(pubNonces) + require.NoError(t, err) + + var msg [32]byte + copy(msg[:], mustParseHex(testCases.Msgs[testCase.MsgIndex])) + + var secNonce [SecNonceSize]byte + copy(secNonce[:], mustParseHex(testCases.PrivNonces[0])) + + partialSig, err := Sign( + secNonce, privKey, combinedNonce, pubKeys, + msg, + ) + + var partialSigBytes [32]byte + partialSig.S.PutBytesUnchecked(partialSigBytes[:]) + + require.Equal( + t, hex.EncodeToString(partialSigBytes[:]), + hex.EncodeToString(mustParseHex(testCase.Expected)), + ) + }) + } + + for _, testCase := range testCases.SignErrorCases { + testCase := testCase + + testName := fmt.Sprintf("invalid_case_%v", + strings.ToLower(testCase.Comment)) + + t.Run(testName, func(t *testing.T) { + pubKeys, err := keysFromIndices( + t, testCase.Indices, testCases.PubKeys, + ) + if err != nil { + require.ErrorIs(t, err, secp.ErrPubKeyNotOnCurve) + return + } + + var aggNonce [PubNonceSize]byte + copy( + aggNonce[:], + mustParseHex( + testCases.AggNonces[testCase.AggNonceIndex], + ), + ) + + var msg [32]byte + copy(msg[:], mustParseHex(testCases.Msgs[testCase.MsgIndex])) + + var secNonce [SecNonceSize]byte + copy( + secNonce[:], + mustParseHex( + testCases.PrivNonces[testCase.SecNonceIndex], + ), + ) + + _, err = Sign( + secNonce, privKey, aggNonce, pubKeys, + msg, + ) + require.Error(t, err) + }) + } + + for _, testCase := range testCases.VerifyFailCases { + testCase := testCase + + testName := fmt.Sprintf("verify_fail_%v", + strings.ToLower(testCase.Comment)) + t.Run(testName, func(t *testing.T) { + pubKeys, err := keysFromIndices( + t, testCase.Indices, testCases.PubKeys, + ) + require.NoError(t, err) + + pubNonces := pubNoncesFromIndices( + t, testCase.NonceIndices, testCases.PubNonces, + ) + + combinedNonce, err := AggregateNonces(pubNonces) + require.NoError(t, err) + + var msg [32]byte + copy( + msg[:], + mustParseHex(testCases.Msgs[testCase.MsgIndex]), + ) + + var secNonce [SecNonceSize]byte + copy(secNonce[:], mustParseHex(testCases.PrivNonces[0])) + + signerNonce := secNonceToPubNonce(secNonce) + + var partialSig PartialSignature + err = partialSig.Decode( + bytes.NewReader(mustParseHex(testCase.Sig)), + ) + if err != nil && strings.Contains(testCase.Comment, "group size") { + require.ErrorIs(t, err, ErrPartialSigInvalid) + } + + err = verifyPartialSig( + &partialSig, signerNonce, combinedNonce, + pubKeys, privKey.PubKey().SerializeCompressed(), + msg, + ) + require.Error(t, err) + }) + } + + for _, testCase := range testCases.VerifyErrorCases { + testCase := testCase + + testName := fmt.Sprintf("verify_error_%v", + strings.ToLower(testCase.Comment)) + t.Run(testName, func(t *testing.T) { + switch testCase.Comment { + case "Invalid pubnonce": + pubNonces := pubNoncesFromIndices( + t, testCase.NonceIndices, testCases.PubNonces, + ) + _, err := AggregateNonces(pubNonces) + require.ErrorIs(t, err, secp.ErrPubKeyNotOnCurve) + + case "Invalid pubkey": + _, err := keysFromIndices( + t, testCase.Indices, testCases.PubKeys, + ) + require.ErrorIs(t, err, secp.ErrPubKeyNotOnCurve) + + default: + t.Fatalf("unhandled case: %v", testCase.Comment) + } + }) + } + +} + +type sigCombineValidCase struct { + AggNonce string `json:"aggnonce"` + + NonceIndices []int `json:"nonce_indices"` + + Indices []int `json:"key_indices"` + + TweakIndices []int `json:"tweak_indices"` + + IsXOnly []bool `json:"is_xonly"` + + PSigIndices []int `json:"psig_indices"` + + Expected string `json:"expected"` +} + +type sigCombineTestVectors struct { + PubKeys []string `json:"pubkeys"` + + PubNonces []string `json:"pnonces"` + + Tweaks []string `json:"tweaks"` + + Psigs []string `json:"psigs"` + + Msg string `json:"msg"` + + ValidCases []sigCombineValidCase `json:"valid_test_cases"` +} + +func pSigsFromIndicies(t *testing.T, sigs []string, indices []int) []*PartialSignature { + pSigs := make([]*PartialSignature, len(indices)) + for i, idx := range indices { + var pSig PartialSignature + err := pSig.Decode(bytes.NewReader(mustParseHex(sigs[idx]))) + require.NoError(t, err) + + pSigs[i] = &pSig + } + + return pSigs +} + +// TestMusig2SignCombine tests that we pass the musig2 sig combination tests. +func TestMusig2SignCombine(t *testing.T) { + t.Parallel() + + testVectorPath := path.Join( + testVectorBaseDir, sigCombineTestVectorFileName, + ) + testVectorBytes, err := os.ReadFile(testVectorPath) + require.NoError(t, err) + + var testCases sigCombineTestVectors + require.NoError(t, json.Unmarshal(testVectorBytes, &testCases)) + + var msg [32]byte + copy(msg[:], mustParseHex(testCases.Msg)) + + for i, testCase := range testCases.ValidCases { + testCase := testCase + + testName := fmt.Sprintf("valid_case_%v", i) + t.Run(testName, func(t *testing.T) { + pubKeys, err := keysFromIndices( + t, testCase.Indices, testCases.PubKeys, + ) + require.NoError(t, err) + + pubNonces := pubNoncesFromIndices( + t, testCase.NonceIndices, testCases.PubNonces, + ) + + partialSigs := pSigsFromIndicies( + t, testCases.Psigs, testCase.PSigIndices, + ) + + var ( + combineOpts []CombineOption + keyOpts []KeyAggOption + ) + if len(testCase.TweakIndices) > 0 { + tweaks := tweaksFromIndices( + t, testCase.TweakIndices, + testCases.Tweaks, testCase.IsXOnly, + ) + + combineOpts = append(combineOpts, WithTweakedCombine( + msg, pubKeys, tweaks, false, + )) + + keyOpts = append(keyOpts, WithKeyTweaks(tweaks...)) + } + + combinedKey, _, _, err := AggregateKeys( + pubKeys, false, keyOpts..., + ) + require.NoError(t, err) + + combinedNonce, err := AggregateNonces(pubNonces) + require.NoError(t, err) + + finalNonceJ, _, err := computeSigningNonce( + combinedNonce, combinedKey.FinalKey, msg, + ) + + finalNonceJ.ToAffine() + finalNonce := btcec.NewPublicKey( + &finalNonceJ.X, &finalNonceJ.Y, + ) + + combinedSig := CombineSigs( + finalNonce, partialSigs, combineOpts..., + ) + require.Equal(t, + strings.ToLower(testCase.Expected), + hex.EncodeToString(combinedSig.Serialize()), + ) + }) + } +}