From a545f0e0faf4e027cbc73e7e6f0fdf8d37e72104 Mon Sep 17 00:00:00 2001 From: xyephy Date: Thu, 30 May 2024 21:27:20 +0300 Subject: [PATCH 001/101] update deprecated actions/checkout@v2 --- .github/workflows/clippy-lint.yaml | 2 +- .github/workflows/fmt.yaml | 2 +- .github/workflows/run-and-track-benchmarks-on-main.yaml | 2 +- .github/workflows/sv2-header-check.yaml | 2 +- .github/workflows/test.yaml | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/clippy-lint.yaml b/.github/workflows/clippy-lint.yaml index 6480b59d5..fa2c45fac 100644 --- a/.github/workflows/clippy-lint.yaml +++ b/.github/workflows/clippy-lint.yaml @@ -25,7 +25,7 @@ jobs: target: x86_64-unknown-linux-musl steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: actions-rs/toolchain@v1 with: profile: minimal diff --git a/.github/workflows/fmt.yaml b/.github/workflows/fmt.yaml index 6a4d900ad..b0b3ecc18 100644 --- a/.github/workflows/fmt.yaml +++ b/.github/workflows/fmt.yaml @@ -25,7 +25,7 @@ jobs: target: x86_64-unknown-linux-musl steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: actions-rs/toolchain@v1 with: profile: minimal diff --git a/.github/workflows/run-and-track-benchmarks-on-main.yaml b/.github/workflows/run-and-track-benchmarks-on-main.yaml index 54a21b9e1..33be08bc5 100644 --- a/.github/workflows/run-and-track-benchmarks-on-main.yaml +++ b/.github/workflows/run-and-track-benchmarks-on-main.yaml @@ -76,7 +76,7 @@ jobs: toolchain: 1.75.0 override: true - name: Checkout repository - uses: actions/checkout@v2 + uses: actions/checkout@v4 - name: Install Valgrind run: | sudo apt-get update diff --git a/.github/workflows/sv2-header-check.yaml b/.github/workflows/sv2-header-check.yaml index 800256ef7..9d5149a78 100644 --- a/.github/workflows/sv2-header-check.yaml +++ b/.github/workflows/sv2-header-check.yaml @@ -19,7 +19,7 @@ jobs: target: x86_64-unknown-linux-musl steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: actions-rs/toolchain@v1 with: diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index 78c5cf37f..5b669ebc0 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -26,7 +26,7 @@ jobs: steps: - name: Install stable toolchain & components - uses: actions/checkout@v2 + uses: actions/checkout@v4 with: profile: minimal toolchain: nightly From 002e30b5a25fed6c115e71e0c128c7076207e641 Mon Sep 17 00:00:00 2001 From: Nikolay Bryskin Date: Mon, 15 Apr 2024 16:41:46 +0000 Subject: [PATCH 002/101] fix(translator): fix HexError(OddLength) in mining.subscribe handler --- benches/benches/src/sv1/lib/client.rs | 3 +-- protocols/v1/src/lib.rs | 5 +---- protocols/v1/src/methods/client_to_server.rs | 14 ++++++-------- protocols/v1/src/methods/server_to_client.rs | 2 +- 4 files changed, 9 insertions(+), 15 deletions(-) diff --git a/benches/benches/src/sv1/lib/client.rs b/benches/benches/src/sv1/lib/client.rs index cd74a93f4..b1713a560 100644 --- a/benches/benches/src/sv1/lib/client.rs +++ b/benches/benches/src/sv1/lib/client.rs @@ -199,8 +199,7 @@ impl IsClient<'static> for Client { } } pub fn extranonce_from_hex(hex: &str) -> Extranonce<'static> { - let data = utils::decode_hex(hex).unwrap(); - Extranonce::try_from(data).expect("Failed to convert hex to U256") + Extranonce::try_from(hex).expect("Failed to convert hex to U256") } pub fn prevhash_from_hex<'a>(hex: &str) -> PrevHash<'a> { let data = utils::decode_hex(hex).unwrap(); diff --git a/protocols/v1/src/lib.rs b/protocols/v1/src/lib.rs index e21ceafba..52280ba23 100644 --- a/protocols/v1/src/lib.rs +++ b/protocols/v1/src/lib.rs @@ -333,10 +333,7 @@ pub trait IsClient<'a> { // so it doesnt really matter what the server sets the extranonce to in the mining.configure handler debug!("NOTICE: Subscribe extranonce is hardcoded by server"); let subscribe = self - .subscribe( - configure.id, - Some(Extranonce::try_from(hex::decode("08000002")?)?), - ) + .subscribe(configure.id, Some(Extranonce::try_from("08000002")?)) .ok(); Ok(subscribe) } diff --git a/protocols/v1/src/methods/client_to_server.rs b/protocols/v1/src/methods/client_to_server.rs index 0e837db48..ee5f11e84 100644 --- a/protocols/v1/src/methods/client_to_server.rs +++ b/protocols/v1/src/methods/client_to_server.rs @@ -173,7 +173,7 @@ impl<'a> TryFrom for Submit<'a> { [JString(a), JString(b), JString(c), JNumber(d), JNumber(e), JString(f)] => ( a.into(), b.into(), - Extranonce::try_from(hex::decode(c)?)?, + Extranonce::try_from(c.as_str())?, HexU32Be(d.as_u64().unwrap() as u32), HexU32Be(e.as_u64().unwrap() as u32), Some((f.as_str()).try_into()?), @@ -181,7 +181,7 @@ impl<'a> TryFrom for Submit<'a> { [JString(a), JString(b), JString(c), JString(d), JString(e), JString(f)] => ( a.into(), b.into(), - Extranonce::try_from(hex::decode(c)?)?, + Extranonce::try_from(c.as_str())?, (d.as_str()).try_into()?, (e.as_str()).try_into()?, Some((f.as_str()).try_into()?), @@ -189,7 +189,7 @@ impl<'a> TryFrom for Submit<'a> { [JString(a), JString(b), JString(c), JNumber(d), JNumber(e)] => ( a.into(), b.into(), - Extranonce::try_from(hex::decode(c)?)?, + Extranonce::try_from(c.as_str())?, HexU32Be(d.as_u64().unwrap() as u32), HexU32Be(e.as_u64().unwrap() as u32), None, @@ -197,7 +197,7 @@ impl<'a> TryFrom for Submit<'a> { [JString(a), JString(b), JString(c), JString(d), JString(e)] => ( a.into(), b.into(), - Extranonce::try_from(hex::decode(c)?)?, + Extranonce::try_from(c.as_str())?, (d.as_str()).try_into()?, (e.as_str()).try_into()?, None, @@ -229,7 +229,7 @@ impl Arbitrary for Submit<'static> { println!("\nEXTRA: {:?}\n", extra); let bits = Option::::arbitrary(g); println!("\nBITS: {:?}\n", bits); - let extra: Extranonce = extra.try_into().unwrap(); + let extra: Extranonce = hex::encode(extra).as_str().try_into().unwrap(); let bits = bits.map(|x| HexU32Be(x)); println!("\nBITS: {:?}\n", bits); Submit { @@ -319,9 +319,7 @@ impl<'a> TryFrom for Subscribe<'a> { let (agent_signature, extranonce1) = match ¶ms[..] { // bosminer subscribe message [JString(a), Null, JString(_), Null] => (a.into(), None), - [JString(a), JString(b)] => { - (a.into(), Some(Extranonce::try_from(hex::decode(b)?)?)) - } + [JString(a), JString(b)] => (a.into(), Some(Extranonce::try_from(b.as_str())?)), [JString(a)] => (a.into(), None), [] => ("".to_string(), None), _ => return Err(ParsingMethodError::wrong_args_from_value(msg.params)), diff --git a/protocols/v1/src/methods/server_to_client.rs b/protocols/v1/src/methods/server_to_client.rs index dc5e6163e..9be2a03c6 100644 --- a/protocols/v1/src/methods/server_to_client.rs +++ b/protocols/v1/src/methods/server_to_client.rs @@ -221,7 +221,7 @@ impl<'a> TryFrom for SetExtranonce<'a> { .ok_or_else(|| ParsingMethodError::not_array_from_value(msg.params.clone()))?; let (extra_nonce1, extra_nonce2_size) = match ¶ms[..] { [JString(a), JNumber(b)] => ( - Extranonce::try_from(hex::decode(a)?)?, + Extranonce::try_from(a.as_str())?, b.as_u64() .ok_or_else(|| ParsingMethodError::not_unsigned_from_value(b.clone()))? as usize, From 59b00c142d2641dd6b4562257666feaeaff44c6e Mon Sep 17 00:00:00 2001 From: Nikolay Bryskin Date: Wed, 24 Apr 2024 22:45:31 +0300 Subject: [PATCH 003/101] chore(v1): add tests for mining.subscribe --- protocols/v1/src/methods/client_to_server.rs | 33 ++++++++++++++++++++ 1 file changed, 33 insertions(+) diff --git a/protocols/v1/src/methods/client_to_server.rs b/protocols/v1/src/methods/client_to_server.rs index ee5f11e84..78f1ed3c5 100644 --- a/protocols/v1/src/methods/client_to_server.rs +++ b/protocols/v1/src/methods/client_to_server.rs @@ -714,3 +714,36 @@ fn test_version_extension_with_no_bit_count() { _ => panic!(), }; } + +#[test] +fn test_mining_subscribe_even_sized_extranonce() { + let client_message = r#"{"id":0, + "method": "mining.subscribe", + "params": ["user agent/version", "aaeeffdd"] + }"#; + let client_message: StandardRequest = serde_json::from_str(&client_message).unwrap(); + Subscribe::try_from(client_message).unwrap(); +} + +#[test] +fn test_mining_subscribe_odd_sized_extranonce() { + let client_message = r#"{"id":0, + "method": "mining.subscribe", + "params": ["user agent/version", "aeeffdd"] + }"#; + let client_message: StandardRequest = serde_json::from_str(&client_message).unwrap(); + Subscribe::try_from(client_message).unwrap(); +} + +#[test] +#[should_panic( + expected = "called `Result::unwrap()` on an `Err` value: HexError(InvalidHexCharacter { c: 'z', index: 0 })" +)] +fn test_mining_subscribe_invalid_extranonce() { + let client_message = r#"{"id":0, + "method": "mining.subscribe", + "params": ["user agent/version", "zxczxc"] + }"#; + let client_message: StandardRequest = serde_json::from_str(&client_message).unwrap(); + Subscribe::try_from(client_message).unwrap(); +} From d42add15607c7c68daa89525038d6c1fcf10fe6d Mon Sep 17 00:00:00 2001 From: fi3 Date: Sun, 2 Jun 2024 12:24:51 +0200 Subject: [PATCH 004/101] Add droppable methods for buffer pool and codec --- protocols/Cargo.lock | 42 ++++++++++----------- protocols/v2/codec-sv2/Cargo.toml | 2 +- protocols/v2/codec-sv2/src/decoder.rs | 5 ++- protocols/v2/codec-sv2/src/encoder.rs | 4 ++ roles/Cargo.lock | 4 +- utils/Cargo.lock | 54 +++++++++++++-------------- utils/buffer/Cargo.toml | 2 +- utils/buffer/src/buffer.rs | 8 ++++ utils/buffer/src/buffer_pool/mod.rs | 15 +++++++- utils/buffer/src/lib.rs | 1 + 10 files changed, 83 insertions(+), 54 deletions(-) diff --git a/protocols/Cargo.lock b/protocols/Cargo.lock index 9fdf24f21..fbe5ddcea 100644 --- a/protocols/Cargo.lock +++ b/protocols/Cargo.lock @@ -138,7 +138,7 @@ dependencies = [ [[package]] name = "buffer_sv2" -version = "1.0.0" +version = "1.1.0" dependencies = [ "aes-gcm", "serde", @@ -152,9 +152,9 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "cc" -version = "1.0.97" +version = "1.0.98" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "099a5357d84c4c61eb35fc8eafa9a79a902c2f76911e5747ced4e032edd8d9b4" +checksum = "41c270e7540d725e65ac7f1b212ac8ce349719624d7bcff99f8e2e488e8cf03f" [[package]] name = "cfg-if" @@ -199,7 +199,7 @@ dependencies = [ [[package]] name = "codec_sv2" -version = "1.1.0" +version = "1.2.0" dependencies = [ "binary_sv2", "buffer_sv2", @@ -356,9 +356,9 @@ checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" [[package]] name = "hex-conservative" -version = "0.1.1" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30ed443af458ccb6d81c1e7e661545f94d3176752fb1df2f543b902a1e0f51e2" +checksum = "212ab92002354b4819390025006c897e8140934349e8635c9b077f47b4dcbd20" [[package]] name = "humantime" @@ -395,9 +395,9 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.154" +version = "0.2.155" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae743338b92ff9146ce83992f766a31066a91a8c84a45e0e9f21e7cf6de6d346" +checksum = "97b3888a4aecf77e811145cadf6eef5901f4782c53886191b2f693f24761847c" [[package]] name = "log" @@ -501,9 +501,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.82" +version = "1.0.85" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ad3d49ab951a01fbaafe34f2ec74122942fe18a3f9814c3268f1bb72042131b" +checksum = "22244ce15aa966053a896d1accb3a6e68469b97c7f33f284b99f0d576879fc23" dependencies = [ "unicode-ident", ] @@ -674,22 +674,22 @@ dependencies = [ [[package]] name = "serde" -version = "1.0.201" +version = "1.0.203" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "780f1cebed1629e4753a1a38a3c72d30b97ec044f0aef68cb26650a3c5cf363c" +checksum = "7253ab4de971e72fb7be983802300c30b5a7f0c2e56fab8abfc6a214307c0094" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.201" +version = "1.0.203" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5e405930b9796f1c00bee880d03fc7e0bb4b9a11afc776885ffe84320da2865" +checksum = "500cbc0ebeb6f46627f50f3f5811ccf6bf00643be300b4c3eabc0ef55dc5b5ba" dependencies = [ "proc-macro2", "quote", - "syn 2.0.62", + "syn 2.0.66", ] [[package]] @@ -711,7 +711,7 @@ checksum = "6c64451ba24fc7a6a2d60fc75dd9c83c90903b19028d4eff35e88fc1e86564e9" dependencies = [ "proc-macro2", "quote", - "syn 2.0.62", + "syn 2.0.66", ] [[package]] @@ -785,9 +785,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.62" +version = "2.0.66" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f660c3bfcefb88c538776b6685a0c472e3128b51e74d48793dc2a488196e8eb" +checksum = "c42f3f41a2de00b01c0aaad383c5a45241efc8b2d1eda5661812fda5f3cdcff5" dependencies = [ "proc-macro2", "quote", @@ -842,7 +842,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.62", + "syn 2.0.66", ] [[package]] @@ -994,6 +994,6 @@ checksum = "bec47e5bfd1bff0eeaf6d8b485cc1074891a197ab4225d504cb7a1ab88b02bf0" [[package]] name = "zeroize" -version = "1.7.0" +version = "1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "525b4ec142c6b68a2d10f01f7bbf6755599ca3f81ea53b8431b7dd348f5fdb2d" +checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde" diff --git a/protocols/v2/codec-sv2/Cargo.toml b/protocols/v2/codec-sv2/Cargo.toml index 75645d56b..9e2bddf1f 100644 --- a/protocols/v2/codec-sv2/Cargo.toml +++ b/protocols/v2/codec-sv2/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "codec_sv2" -version = "1.1.0" +version = "1.2.0" authors = ["fi3 "] edition = "2018" description = "Sv2 data format" diff --git a/protocols/v2/codec-sv2/src/decoder.rs b/protocols/v2/codec-sv2/src/decoder.rs index 760861cfc..cba5843f2 100644 --- a/protocols/v2/codec-sv2/src/decoder.rs +++ b/protocols/v2/codec-sv2/src/decoder.rs @@ -135,7 +135,7 @@ impl<'a, T: Serialize + GetSize + Deserialize<'a>, B: IsBuffer + AeadBuffer> Wit let decrypted_payload = self.sv2_buffer.get_writable(end - start); decrypted_payload.copy_from_slice(&encrypted_payload.as_ref()[start..end]); self.sv2_buffer.danger_set_start(decrypted_len); - noise_codec.decrypt(&mut self.sv2_buffer).unwrap(); + noise_codec.decrypt(&mut self.sv2_buffer)?; start = end; end = (start + SV2_FRAME_CHUNK_SIZE).min(encrypted_payload_len); decrypted_len += self.sv2_buffer.as_ref().len(); @@ -161,6 +161,9 @@ impl<'a, T: Serialize + GetSize + Deserialize<'a>, B: IsBuffer + AeadBuffer> Wit pub fn writable(&mut self) -> &mut [u8] { self.noise_buffer.get_writable(self.missing_noise_b) } + pub fn droppable(&self) -> bool { + self.noise_buffer.is_droppable() && self.sv2_buffer.is_droppable() + } } #[cfg(feature = "noise_sv2")] diff --git a/protocols/v2/codec-sv2/src/encoder.rs b/protocols/v2/codec-sv2/src/encoder.rs index d2855742f..e4f544a19 100644 --- a/protocols/v2/codec-sv2/src/encoder.rs +++ b/protocols/v2/codec-sv2/src/encoder.rs @@ -113,6 +113,10 @@ impl NoiseEncoder { } Ok(()) } + + pub fn droppable(&self) -> bool { + self.noise_buffer.is_droppable() && self.sv2_buffer.is_droppable() + } } #[cfg(feature = "noise_sv2")] diff --git a/roles/Cargo.lock b/roles/Cargo.lock index 7b588075e..fbaeadc6d 100644 --- a/roles/Cargo.lock +++ b/roles/Cargo.lock @@ -481,7 +481,7 @@ dependencies = [ [[package]] name = "buffer_sv2" -version = "1.0.0" +version = "1.1.0" dependencies = [ "aes-gcm", "serde", @@ -594,7 +594,7 @@ checksum = "98cc8fbded0c607b7ba9dd60cd98df59af97e84d24e49c8557331cfc26d301ce" [[package]] name = "codec_sv2" -version = "1.1.0" +version = "1.2.0" dependencies = [ "binary_sv2", "buffer_sv2", diff --git a/utils/Cargo.lock b/utils/Cargo.lock index 3c7e5d36c..6c368c956 100644 --- a/utils/Cargo.lock +++ b/utils/Cargo.lock @@ -167,7 +167,7 @@ dependencies = [ [[package]] name = "buffer_sv2" -version = "1.0.0" +version = "1.1.0" dependencies = [ "aes-gcm", "criterion", @@ -190,9 +190,9 @@ checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" [[package]] name = "cc" -version = "1.0.97" +version = "1.0.98" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "099a5357d84c4c61eb35fc8eafa9a79a902c2f76911e5747ced4e032edd8d9b4" +checksum = "41c270e7540d725e65ac7f1b212ac8ce349719624d7bcff99f8e2e488e8cf03f" [[package]] name = "cfg-if" @@ -287,9 +287,9 @@ dependencies = [ [[package]] name = "crossbeam-utils" -version = "0.8.19" +version = "0.8.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "248e3bacc7dc6baa3b21e405ee045c3047101a49145e7e9eca583ab4c2ca5345" +checksum = "22ec99545bb0ed0ea7bb9b8e1e9122ea386ff8a48c0922e43f36d45ab09e0e80" [[package]] name = "crypto-common" @@ -343,9 +343,9 @@ dependencies = [ [[package]] name = "either" -version = "1.11.0" +version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a47c1c47d2f5964e29c61246e81db715514cd532db6b5116a25ea3c03d6780a2" +checksum = "3dca9240753cf90908d7e4aac30f630662b02aebaa1b58a3cadabdb23385b58b" [[package]] name = "error_handling" @@ -448,7 +448,7 @@ dependencies = [ [[package]] name = "key-utils" -version = "1.0.0" +version = "1.1.0" dependencies = [ "bs58", "secp256k1 0.28.2", @@ -464,9 +464,9 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" [[package]] name = "libc" -version = "0.2.154" +version = "0.2.155" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae743338b92ff9146ce83992f766a31066a91a8c84a45e0e9f21e7cf6de6d346" +checksum = "97b3888a4aecf77e811145cadf6eef5901f4782c53886191b2f693f24761847c" [[package]] name = "log" @@ -509,9 +509,9 @@ checksum = "c08d65885ee38876c4f86fa503fb49d7b507c2b62552df7c70b2fce627e06381" [[package]] name = "plotters" -version = "0.3.5" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2c224ba00d7cadd4d5c660deaf2098e5e80e07846537c51f9cfa4be50c1fd45" +checksum = "a15b6eccb8484002195a3e44fe65a4ce8e93a625797a063735536fd59cb01cf3" dependencies = [ "num-traits", "plotters-backend", @@ -522,15 +522,15 @@ dependencies = [ [[package]] name = "plotters-backend" -version = "0.3.5" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e76628b4d3a7581389a35d5b6e2139607ad7c75b17aed325f210aa91f4a9609" +checksum = "414cec62c6634ae900ea1c56128dfe87cf63e7caece0852ec76aba307cebadb7" [[package]] name = "plotters-svg" -version = "0.3.5" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38f6d39893cca0701371e3c27294f09797214b86f1fb951b89ade8ec04e2abab" +checksum = "81b30686a7d9c3e010b84284bdd26a29f2138574f52f5eb6f794fc0ad924e705" dependencies = [ "plotters-backend", ] @@ -555,9 +555,9 @@ checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" [[package]] name = "proc-macro2" -version = "1.0.82" +version = "1.0.85" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ad3d49ab951a01fbaafe34f2ec74122942fe18a3f9814c3268f1bb72042131b" +checksum = "22244ce15aa966053a896d1accb3a6e68469b97c7f33f284b99f0d576879fc23" dependencies = [ "unicode-ident", ] @@ -705,9 +705,9 @@ dependencies = [ [[package]] name = "serde" -version = "1.0.201" +version = "1.0.203" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "780f1cebed1629e4753a1a38a3c72d30b97ec044f0aef68cb26650a3c5cf363c" +checksum = "7253ab4de971e72fb7be983802300c30b5a7f0c2e56fab8abfc6a214307c0094" dependencies = [ "serde_derive", ] @@ -724,13 +724,13 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.201" +version = "1.0.203" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5e405930b9796f1c00bee880d03fc7e0bb4b9a11afc776885ffe84320da2865" +checksum = "500cbc0ebeb6f46627f50f3f5811ccf6bf00643be300b4c3eabc0ef55dc5b5ba" dependencies = [ "proc-macro2", "quote", - "syn 2.0.62", + "syn 2.0.66", ] [[package]] @@ -794,9 +794,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.62" +version = "2.0.66" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f660c3bfcefb88c538776b6685a0c472e3128b51e74d48793dc2a488196e8eb" +checksum = "c42f3f41a2de00b01c0aaad383c5a45241efc8b2d1eda5661812fda5f3cdcff5" dependencies = [ "proc-macro2", "quote", @@ -902,7 +902,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn 2.0.62", + "syn 2.0.66", "wasm-bindgen-shared", ] @@ -924,7 +924,7 @@ checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.62", + "syn 2.0.66", "wasm-bindgen-backend", "wasm-bindgen-shared", ] diff --git a/utils/buffer/Cargo.toml b/utils/buffer/Cargo.toml index 798ba3c63..b3a8f543d 100644 --- a/utils/buffer/Cargo.toml +++ b/utils/buffer/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "buffer_sv2" -version = "1.0.0" +version = "1.1.0" authors = ["fi3 "] edition = "2018" description = "buffer" diff --git a/utils/buffer/src/buffer.rs b/utils/buffer/src/buffer.rs index 9e611969e..f4b0dab50 100644 --- a/utils/buffer/src/buffer.rs +++ b/utils/buffer/src/buffer.rs @@ -70,6 +70,11 @@ impl Buffer for BufferFromSystemMemory { fn danger_set_start(&mut self, index: usize) { self.start = index; } + + #[inline] + fn is_droppable(&self) -> bool { + true + } } #[cfg(test)] @@ -101,6 +106,9 @@ impl Buffer for TestBufferFromMemory { fn danger_set_start(&mut self, _index: usize) { todo!() } + fn is_droppable(&self) -> bool { + true + } } impl AsRef<[u8]> for BufferFromSystemMemory { diff --git a/utils/buffer/src/buffer_pool/mod.rs b/utils/buffer/src/buffer_pool/mod.rs index a28fa46de..eae9c41ab 100644 --- a/utils/buffer/src/buffer_pool/mod.rs +++ b/utils/buffer/src/buffer_pool/mod.rs @@ -673,12 +673,25 @@ impl Buffer for BufferPool { fn danger_set_start(&mut self, index: usize) { self.start = index; } + + #[inline(always)] + fn is_droppable(&self) -> bool { + self.shared_state.load(Ordering::Relaxed) == 0 + } } #[cfg(not(test))] impl Drop for BufferPool { fn drop(&mut self) { - while self.shared_state.load(Ordering::Relaxed) != 0 {} + while self.shared_state.load(Ordering::Relaxed) != 0 { + std::hint::spin_loop(); + } + } +} + +impl BufferPool { + pub fn droppable(&self) -> bool { + self.shared_state.load(Ordering::Relaxed) == 0 } } diff --git a/utils/buffer/src/lib.rs b/utils/buffer/src/lib.rs index e184422d6..be1d3f8c3 100644 --- a/utils/buffer/src/lib.rs +++ b/utils/buffer/src/lib.rs @@ -85,4 +85,5 @@ pub trait Buffer { fn is_empty(&self) -> bool { self.len() == 0 } + fn is_droppable(&self) -> bool; } From 470a20789d12f5d4d78a5e24a684d1d1c5a20ff5 Mon Sep 17 00:00:00 2001 From: plebhash Date: Sun, 2 Jun 2024 19:29:12 -0300 Subject: [PATCH 005/101] Revert "chore(v1): add tests for mining.subscribe" This reverts commit 59b00c142d2641dd6b4562257666feaeaff44c6e. --- protocols/v1/src/methods/client_to_server.rs | 33 -------------------- 1 file changed, 33 deletions(-) diff --git a/protocols/v1/src/methods/client_to_server.rs b/protocols/v1/src/methods/client_to_server.rs index 78f1ed3c5..ee5f11e84 100644 --- a/protocols/v1/src/methods/client_to_server.rs +++ b/protocols/v1/src/methods/client_to_server.rs @@ -714,36 +714,3 @@ fn test_version_extension_with_no_bit_count() { _ => panic!(), }; } - -#[test] -fn test_mining_subscribe_even_sized_extranonce() { - let client_message = r#"{"id":0, - "method": "mining.subscribe", - "params": ["user agent/version", "aaeeffdd"] - }"#; - let client_message: StandardRequest = serde_json::from_str(&client_message).unwrap(); - Subscribe::try_from(client_message).unwrap(); -} - -#[test] -fn test_mining_subscribe_odd_sized_extranonce() { - let client_message = r#"{"id":0, - "method": "mining.subscribe", - "params": ["user agent/version", "aeeffdd"] - }"#; - let client_message: StandardRequest = serde_json::from_str(&client_message).unwrap(); - Subscribe::try_from(client_message).unwrap(); -} - -#[test] -#[should_panic( - expected = "called `Result::unwrap()` on an `Err` value: HexError(InvalidHexCharacter { c: 'z', index: 0 })" -)] -fn test_mining_subscribe_invalid_extranonce() { - let client_message = r#"{"id":0, - "method": "mining.subscribe", - "params": ["user agent/version", "zxczxc"] - }"#; - let client_message: StandardRequest = serde_json::from_str(&client_message).unwrap(); - Subscribe::try_from(client_message).unwrap(); -} From 1f194d070708831a69b7a6721c66ac971b7bf858 Mon Sep 17 00:00:00 2001 From: plebhash Date: Sun, 2 Jun 2024 19:29:21 -0300 Subject: [PATCH 006/101] Revert "fix(translator): fix HexError(OddLength) in mining.subscribe handler" This reverts commit 002e30b5a25fed6c115e71e0c128c7076207e641. --- benches/benches/src/sv1/lib/client.rs | 3 ++- protocols/v1/src/lib.rs | 5 ++++- protocols/v1/src/methods/client_to_server.rs | 14 ++++++++------ protocols/v1/src/methods/server_to_client.rs | 2 +- 4 files changed, 15 insertions(+), 9 deletions(-) diff --git a/benches/benches/src/sv1/lib/client.rs b/benches/benches/src/sv1/lib/client.rs index b1713a560..cd74a93f4 100644 --- a/benches/benches/src/sv1/lib/client.rs +++ b/benches/benches/src/sv1/lib/client.rs @@ -199,7 +199,8 @@ impl IsClient<'static> for Client { } } pub fn extranonce_from_hex(hex: &str) -> Extranonce<'static> { - Extranonce::try_from(hex).expect("Failed to convert hex to U256") + let data = utils::decode_hex(hex).unwrap(); + Extranonce::try_from(data).expect("Failed to convert hex to U256") } pub fn prevhash_from_hex<'a>(hex: &str) -> PrevHash<'a> { let data = utils::decode_hex(hex).unwrap(); diff --git a/protocols/v1/src/lib.rs b/protocols/v1/src/lib.rs index 52280ba23..e21ceafba 100644 --- a/protocols/v1/src/lib.rs +++ b/protocols/v1/src/lib.rs @@ -333,7 +333,10 @@ pub trait IsClient<'a> { // so it doesnt really matter what the server sets the extranonce to in the mining.configure handler debug!("NOTICE: Subscribe extranonce is hardcoded by server"); let subscribe = self - .subscribe(configure.id, Some(Extranonce::try_from("08000002")?)) + .subscribe( + configure.id, + Some(Extranonce::try_from(hex::decode("08000002")?)?), + ) .ok(); Ok(subscribe) } diff --git a/protocols/v1/src/methods/client_to_server.rs b/protocols/v1/src/methods/client_to_server.rs index ee5f11e84..0e837db48 100644 --- a/protocols/v1/src/methods/client_to_server.rs +++ b/protocols/v1/src/methods/client_to_server.rs @@ -173,7 +173,7 @@ impl<'a> TryFrom for Submit<'a> { [JString(a), JString(b), JString(c), JNumber(d), JNumber(e), JString(f)] => ( a.into(), b.into(), - Extranonce::try_from(c.as_str())?, + Extranonce::try_from(hex::decode(c)?)?, HexU32Be(d.as_u64().unwrap() as u32), HexU32Be(e.as_u64().unwrap() as u32), Some((f.as_str()).try_into()?), @@ -181,7 +181,7 @@ impl<'a> TryFrom for Submit<'a> { [JString(a), JString(b), JString(c), JString(d), JString(e), JString(f)] => ( a.into(), b.into(), - Extranonce::try_from(c.as_str())?, + Extranonce::try_from(hex::decode(c)?)?, (d.as_str()).try_into()?, (e.as_str()).try_into()?, Some((f.as_str()).try_into()?), @@ -189,7 +189,7 @@ impl<'a> TryFrom for Submit<'a> { [JString(a), JString(b), JString(c), JNumber(d), JNumber(e)] => ( a.into(), b.into(), - Extranonce::try_from(c.as_str())?, + Extranonce::try_from(hex::decode(c)?)?, HexU32Be(d.as_u64().unwrap() as u32), HexU32Be(e.as_u64().unwrap() as u32), None, @@ -197,7 +197,7 @@ impl<'a> TryFrom for Submit<'a> { [JString(a), JString(b), JString(c), JString(d), JString(e)] => ( a.into(), b.into(), - Extranonce::try_from(c.as_str())?, + Extranonce::try_from(hex::decode(c)?)?, (d.as_str()).try_into()?, (e.as_str()).try_into()?, None, @@ -229,7 +229,7 @@ impl Arbitrary for Submit<'static> { println!("\nEXTRA: {:?}\n", extra); let bits = Option::::arbitrary(g); println!("\nBITS: {:?}\n", bits); - let extra: Extranonce = hex::encode(extra).as_str().try_into().unwrap(); + let extra: Extranonce = extra.try_into().unwrap(); let bits = bits.map(|x| HexU32Be(x)); println!("\nBITS: {:?}\n", bits); Submit { @@ -319,7 +319,9 @@ impl<'a> TryFrom for Subscribe<'a> { let (agent_signature, extranonce1) = match ¶ms[..] { // bosminer subscribe message [JString(a), Null, JString(_), Null] => (a.into(), None), - [JString(a), JString(b)] => (a.into(), Some(Extranonce::try_from(b.as_str())?)), + [JString(a), JString(b)] => { + (a.into(), Some(Extranonce::try_from(hex::decode(b)?)?)) + } [JString(a)] => (a.into(), None), [] => ("".to_string(), None), _ => return Err(ParsingMethodError::wrong_args_from_value(msg.params)), diff --git a/protocols/v1/src/methods/server_to_client.rs b/protocols/v1/src/methods/server_to_client.rs index 9be2a03c6..dc5e6163e 100644 --- a/protocols/v1/src/methods/server_to_client.rs +++ b/protocols/v1/src/methods/server_to_client.rs @@ -221,7 +221,7 @@ impl<'a> TryFrom for SetExtranonce<'a> { .ok_or_else(|| ParsingMethodError::not_array_from_value(msg.params.clone()))?; let (extra_nonce1, extra_nonce2_size) = match ¶ms[..] { [JString(a), JNumber(b)] => ( - Extranonce::try_from(a.as_str())?, + Extranonce::try_from(hex::decode(a)?)?, b.as_u64() .ok_or_else(|| ParsingMethodError::not_unsigned_from_value(b.clone()))? as usize, From 3891bc8abdd5558e31697fe7286be0d9a034f006 Mon Sep 17 00:00:00 2001 From: pythcoiner Date: Fri, 7 Jun 2024 09:18:36 +0200 Subject: [PATCH 007/101] remove useless converstion after u__::arbitrary(g) --- .../v2/subprotocols/common-messages/src/lib.rs | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/protocols/v2/subprotocols/common-messages/src/lib.rs b/protocols/v2/subprotocols/common-messages/src/lib.rs index 33b2baf2b..e720306ab 100644 --- a/protocols/v2/subprotocols/common-messages/src/lib.rs +++ b/protocols/v2/subprotocols/common-messages/src/lib.rs @@ -33,7 +33,7 @@ pub extern "C" fn _c_export_setup_conn_succ(_a: SetupConnectionSuccess) {} impl ChannelEndpointChanged { pub fn from_gen(g: &mut Gen) -> Self { ChannelEndpointChanged { - channel_id: u32::arbitrary(g).try_into().unwrap(), + channel_id: u32::arbitrary(g), } } } @@ -70,11 +70,11 @@ impl SetupConnection<'static> { SetupConnection { protocol, - min_version: u16::arbitrary(g).try_into().unwrap(), - max_version: u16::arbitrary(g).try_into().unwrap(), - flags: u32::arbitrary(g).try_into().unwrap(), + min_version: u16::arbitrary(g), + max_version: u16::arbitrary(g), + flags: u32::arbitrary(g), endpoint_host, - endpoint_port: u16::arbitrary(g).try_into().unwrap(), + endpoint_port: u16::arbitrary(g), vendor, hardware_version, firmware, @@ -92,7 +92,7 @@ impl SetupConnectionError<'static> { let error_code: binary_sv2::Str0255 = error_code.try_into().unwrap(); SetupConnectionError { - flags: u32::arbitrary(g).try_into().unwrap(), + flags: u32::arbitrary(g), error_code, } } @@ -102,8 +102,8 @@ impl SetupConnectionError<'static> { impl SetupConnectionSuccess { pub fn from_gen(g: &mut Gen) -> Self { SetupConnectionSuccess { - used_version: u16::arbitrary(g).try_into().unwrap(), - flags: u32::arbitrary(g).try_into().unwrap(), + used_version: u16::arbitrary(g), + flags: u32::arbitrary(g), } } } From 8c5bcf111d5a8403cc0383edcb2831399239ac59 Mon Sep 17 00:00:00 2001 From: pythcoiner Date: Fri, 7 Jun 2024 06:34:44 +0200 Subject: [PATCH 008/101] typo --- .../v2/subprotocols/common-messages/src/setup_connection.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/protocols/v2/subprotocols/common-messages/src/setup_connection.rs b/protocols/v2/subprotocols/common-messages/src/setup_connection.rs index f0f730aae..49312b686 100644 --- a/protocols/v2/subprotocols/common-messages/src/setup_connection.rs +++ b/protocols/v2/subprotocols/common-messages/src/setup_connection.rs @@ -391,11 +391,11 @@ mod test { #[test] fn test_check_flag() { let protocol = crate::Protocol::MiningProtocol; - let flag_avaiable = 0b_0000_0000_0000_0000_0000_0000_0000_0000; + let flag_available = 0b_0000_0000_0000_0000_0000_0000_0000_0000; let flag_required = 0b_0000_0000_0000_0000_0000_0000_0000_0001; assert!(SetupConnection::check_flags( protocol, - flag_avaiable, + flag_available, flag_required )); } From 21d02a2133f13aabc0f1eab2282509e7a683b80f Mon Sep 17 00:00:00 2001 From: Sjors Provoost Date: Fri, 7 Jun 2024 20:49:42 +0200 Subject: [PATCH 009/101] Log Bitcoin target as hex, align targets --- .../roles-logic-sv2/src/channel_logic/channel_factory.rs | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs b/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs index 8bf351a04..7e3f66cec 100644 --- a/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs +++ b/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs @@ -826,14 +826,17 @@ impl ChannelFactory { if tracing::level_enabled!(tracing::Level::DEBUG) || tracing::level_enabled!(tracing::Level::TRACE) { - debug!("Bitcoin target: {:?}", bitcoin_target); + let bitcoin_target_log: binary_sv2::U256 = bitcoin_target.clone().into(); + let mut bitcoin_target_log = bitcoin_target_log.to_vec(); + bitcoin_target_log.reverse(); + debug!("Bitcoin target : {:?}", bitcoin_target_log.to_hex()); let upstream_target: binary_sv2::U256 = upstream_target.clone().into(); let mut upstream_target = upstream_target.to_vec(); upstream_target.reverse(); debug!("Upstream target: {:?}", upstream_target.to_vec().to_hex()); let mut hash = hash; hash.reverse(); - debug!("Hash: {:?}", hash.to_vec().to_hex()); + debug!("Hash : {:?}", hash.to_vec().to_hex()); } let hash: Target = hash.into(); From 0b182941c847ad28530b83fcbdfc8035466ad601 Mon Sep 17 00:00:00 2001 From: pythcoiner Date: Sat, 8 Jun 2024 06:56:00 +0200 Subject: [PATCH 010/101] fix typo Messaege -> Message --- .../subprotocols/mining/src/close_channel.rs | 4 ++-- .../subprotocols/mining/src/new_mining_job.rs | 8 ++++---- .../subprotocols/mining/src/open_channel.rs | 20 +++++++++---------- .../v2/subprotocols/mining/src/reconnect.rs | 4 ++-- .../mining/src/set_custom_mining_job.rs | 12 +++++------ .../mining/src/set_extranonce_prefix.rs | 4 ++-- .../mining/src/set_group_channel.rs | 4 ++-- .../mining/src/set_new_prev_hash.rs | 4 ++-- .../v2/subprotocols/mining/src/set_target.rs | 4 ++-- .../subprotocols/mining/src/submit_shares.rs | 8 ++++---- .../subprotocols/mining/src/update_channel.rs | 8 ++++---- .../template-distribution/src/new_template.rs | 4 ++-- 12 files changed, 42 insertions(+), 42 deletions(-) diff --git a/protocols/v2/subprotocols/mining/src/close_channel.rs b/protocols/v2/subprotocols/mining/src/close_channel.rs index 4957f8266..48de6d608 100644 --- a/protocols/v2/subprotocols/mining/src/close_channel.rs +++ b/protocols/v2/subprotocols/mining/src/close_channel.rs @@ -39,9 +39,9 @@ impl<'d> GetSize for CloseChannel<'d> { #[cfg(feature = "with_serde")] impl<'a> CloseChannel<'a> { pub fn into_static(self) -> CloseChannel<'static> { - panic!("This function shouldn't be called by the Messaege Generator"); + panic!("This function shouldn't be called by the Message Generator"); } pub fn as_static(&self) -> CloseChannel<'static> { - panic!("This function shouldn't be called by the Messaege Generator"); + panic!("This function shouldn't be called by the Message Generator"); } } diff --git a/protocols/v2/subprotocols/mining/src/new_mining_job.rs b/protocols/v2/subprotocols/mining/src/new_mining_job.rs index 5b720f5e1..1e876adab 100644 --- a/protocols/v2/subprotocols/mining/src/new_mining_job.rs +++ b/protocols/v2/subprotocols/mining/src/new_mining_job.rs @@ -231,18 +231,18 @@ mod tests { #[cfg(feature = "with_serde")] impl<'a> NewExtendedMiningJob<'a> { pub fn into_static(self) -> NewExtendedMiningJob<'static> { - panic!("This function shouldn't be called by the Messaege Generator"); + panic!("This function shouldn't be called by the Message Generator"); } pub fn as_static(&self) -> NewExtendedMiningJob<'static> { - panic!("This function shouldn't be called by the Messaege Generator"); + panic!("This function shouldn't be called by the Message Generator"); } } #[cfg(feature = "with_serde")] impl<'a> NewMiningJob<'a> { pub fn into_static(self) -> NewMiningJob<'static> { - panic!("This function shouldn't be called by the Messaege Generator"); + panic!("This function shouldn't be called by the Message Generator"); } pub fn as_static(&self) -> NewMiningJob<'static> { - panic!("This function shouldn't be called by the Messaege Generator"); + panic!("This function shouldn't be called by the Message Generator"); } } diff --git a/protocols/v2/subprotocols/mining/src/open_channel.rs b/protocols/v2/subprotocols/mining/src/open_channel.rs index 4c86f9183..d64b1d0a7 100644 --- a/protocols/v2/subprotocols/mining/src/open_channel.rs +++ b/protocols/v2/subprotocols/mining/src/open_channel.rs @@ -375,45 +375,45 @@ mod tests { #[cfg(feature = "with_serde")] impl<'a> OpenExtendedMiningChannel<'a> { pub fn into_static(self) -> OpenExtendedMiningChannel<'static> { - panic!("This function shouldn't be called by the Messaege Generator"); + panic!("This function shouldn't be called by the Message Generator"); } pub fn as_static(&self) -> OpenExtendedMiningChannel<'static> { - panic!("This function shouldn't be called by the Messaege Generator"); + panic!("This function shouldn't be called by the Message Generator"); } } #[cfg(feature = "with_serde")] impl<'a> OpenExtendedMiningChannelSuccess<'a> { pub fn into_static(self) -> OpenExtendedMiningChannelSuccess<'static> { - panic!("This function shouldn't be called by the Messaege Generator"); + panic!("This function shouldn't be called by the Message Generator"); } pub fn as_static(&self) -> OpenExtendedMiningChannelSuccess<'static> { - panic!("This function shouldn't be called by the Messaege Generator"); + panic!("This function shouldn't be called by the Message Generator"); } } #[cfg(feature = "with_serde")] impl<'a> OpenMiningChannelError<'a> { pub fn into_static(self) -> OpenMiningChannelError<'static> { - panic!("This function shouldn't be called by the Messaege Generator"); + panic!("This function shouldn't be called by the Message Generator"); } pub fn as_static(&self) -> OpenMiningChannelError<'static> { - panic!("This function shouldn't be called by the Messaege Generator"); + panic!("This function shouldn't be called by the Message Generator"); } } #[cfg(feature = "with_serde")] impl<'a> OpenStandardMiningChannel<'a> { pub fn into_static(self) -> OpenStandardMiningChannel<'static> { - panic!("This function shouldn't be called by the Messaege Generator"); + panic!("This function shouldn't be called by the Message Generator"); } pub fn as_static(&self) -> OpenStandardMiningChannel<'static> { - panic!("This function shouldn't be called by the Messaege Generator"); + panic!("This function shouldn't be called by the Message Generator"); } } #[cfg(feature = "with_serde")] impl<'a> OpenStandardMiningChannelSuccess<'a> { pub fn into_static(self) -> OpenStandardMiningChannelSuccess<'static> { - panic!("This function shouldn't be called by the Messaege Generator"); + panic!("This function shouldn't be called by the Message Generator"); } pub fn as_static(&self) -> OpenStandardMiningChannelSuccess<'static> { - panic!("This function shouldn't be called by the Messaege Generator"); + panic!("This function shouldn't be called by the Message Generator"); } } diff --git a/protocols/v2/subprotocols/mining/src/reconnect.rs b/protocols/v2/subprotocols/mining/src/reconnect.rs index cb71c9da2..18201ab92 100644 --- a/protocols/v2/subprotocols/mining/src/reconnect.rs +++ b/protocols/v2/subprotocols/mining/src/reconnect.rs @@ -40,9 +40,9 @@ impl<'d> GetSize for Reconnect<'d> { #[cfg(feature = "with_serde")] impl<'a> Reconnect<'a> { pub fn into_static(self) -> Reconnect<'static> { - panic!("This function shouldn't be called by the Messaege Generator"); + panic!("This function shouldn't be called by the Message Generator"); } pub fn as_static(&self) -> Reconnect<'static> { - panic!("This function shouldn't be called by the Messaege Generator"); + panic!("This function shouldn't be called by the Message Generator"); } } diff --git a/protocols/v2/subprotocols/mining/src/set_custom_mining_job.rs b/protocols/v2/subprotocols/mining/src/set_custom_mining_job.rs index fbc12b4ba..1d937d9ab 100644 --- a/protocols/v2/subprotocols/mining/src/set_custom_mining_job.rs +++ b/protocols/v2/subprotocols/mining/src/set_custom_mining_job.rs @@ -129,27 +129,27 @@ impl<'d> GetSize for SetCustomMiningJobError<'d> { #[cfg(feature = "with_serde")] impl<'a> SetCustomMiningJob<'a> { pub fn into_static(self) -> SetCustomMiningJob<'static> { - panic!("This function shouldn't be called by the Messaege Generator"); + panic!("This function shouldn't be called by the Message Generator"); } pub fn as_static(&self) -> SetCustomMiningJob<'static> { - panic!("This function shouldn't be called by the Messaege Generator"); + panic!("This function shouldn't be called by the Message Generator"); } } #[cfg(feature = "with_serde")] impl<'a> SetCustomMiningJobError<'a> { pub fn into_static(self) -> SetCustomMiningJobError<'static> { - panic!("This function shouldn't be called by the Messaege Generator"); + panic!("This function shouldn't be called by the Message Generator"); } pub fn as_static(&self) -> SetCustomMiningJobError<'static> { - panic!("This function shouldn't be called by the Messaege Generator"); + panic!("This function shouldn't be called by the Message Generator"); } } #[cfg(feature = "with_serde")] impl SetCustomMiningJobSuccess { pub fn into_static(self) -> SetCustomMiningJobSuccess { - panic!("This function shouldn't be called by the Messaege Generator"); + panic!("This function shouldn't be called by the Message Generator"); } pub fn as_static(&self) -> SetCustomMiningJobSuccess { - panic!("This function shouldn't be called by the Messaege Generator"); + panic!("This function shouldn't be called by the Message Generator"); } } diff --git a/protocols/v2/subprotocols/mining/src/set_extranonce_prefix.rs b/protocols/v2/subprotocols/mining/src/set_extranonce_prefix.rs index b8fef3259..140fdf1d3 100644 --- a/protocols/v2/subprotocols/mining/src/set_extranonce_prefix.rs +++ b/protocols/v2/subprotocols/mining/src/set_extranonce_prefix.rs @@ -31,9 +31,9 @@ impl<'d> GetSize for SetExtranoncePrefix<'d> { #[cfg(feature = "with_serde")] impl<'a> SetExtranoncePrefix<'a> { pub fn into_static(self) -> SetExtranoncePrefix<'static> { - panic!("This function shouldn't be called by the Messaege Generator"); + panic!("This function shouldn't be called by the Message Generator"); } pub fn as_static(&self) -> SetExtranoncePrefix<'static> { - panic!("This function shouldn't be called by the Messaege Generator"); + panic!("This function shouldn't be called by the Message Generator"); } } diff --git a/protocols/v2/subprotocols/mining/src/set_group_channel.rs b/protocols/v2/subprotocols/mining/src/set_group_channel.rs index 65338b17b..f00d8d5a3 100644 --- a/protocols/v2/subprotocols/mining/src/set_group_channel.rs +++ b/protocols/v2/subprotocols/mining/src/set_group_channel.rs @@ -42,9 +42,9 @@ impl<'d> GetSize for SetGroupChannel<'d> { #[cfg(feature = "with_serde")] impl<'a> SetGroupChannel<'a> { pub fn into_static(self) -> SetGroupChannel<'static> { - panic!("This function shouldn't be called by the Messaege Generator"); + panic!("This function shouldn't be called by the Message Generator"); } pub fn as_static(&self) -> SetGroupChannel<'static> { - panic!("This function shouldn't be called by the Messaege Generator"); + panic!("This function shouldn't be called by the Message Generator"); } } diff --git a/protocols/v2/subprotocols/mining/src/set_new_prev_hash.rs b/protocols/v2/subprotocols/mining/src/set_new_prev_hash.rs index 94efdcb4c..7b76b1ef2 100644 --- a/protocols/v2/subprotocols/mining/src/set_new_prev_hash.rs +++ b/protocols/v2/subprotocols/mining/src/set_new_prev_hash.rs @@ -47,9 +47,9 @@ impl<'d> GetSize for SetNewPrevHash<'d> { #[cfg(feature = "with_serde")] impl<'a> SetNewPrevHash<'a> { pub fn into_static(self) -> SetNewPrevHash<'static> { - panic!("This function shouldn't be called by the Messaege Generator"); + panic!("This function shouldn't be called by the Message Generator"); } pub fn as_static(&self) -> SetNewPrevHash<'static> { - panic!("This function shouldn't be called by the Messaege Generator"); + panic!("This function shouldn't be called by the Message Generator"); } } diff --git a/protocols/v2/subprotocols/mining/src/set_target.rs b/protocols/v2/subprotocols/mining/src/set_target.rs index a4804ba3b..fba42a02f 100644 --- a/protocols/v2/subprotocols/mining/src/set_target.rs +++ b/protocols/v2/subprotocols/mining/src/set_target.rs @@ -37,9 +37,9 @@ impl<'d> GetSize for SetTarget<'d> { #[cfg(feature = "with_serde")] impl<'a> SetTarget<'a> { pub fn into_static(self) -> SetTarget<'static> { - panic!("This function shouldn't be called by the Messaege Generator"); + panic!("This function shouldn't be called by the Message Generator"); } pub fn as_static(&self) -> SetTarget<'static> { - panic!("This function shouldn't be called by the Messaege Generator"); + panic!("This function shouldn't be called by the Message Generator"); } } diff --git a/protocols/v2/subprotocols/mining/src/submit_shares.rs b/protocols/v2/subprotocols/mining/src/submit_shares.rs index 1a6f489d6..5c8bea64f 100644 --- a/protocols/v2/subprotocols/mining/src/submit_shares.rs +++ b/protocols/v2/subprotocols/mining/src/submit_shares.rs @@ -157,18 +157,18 @@ impl<'d> GetSize for SubmitSharesError<'d> { #[cfg(feature = "with_serde")] impl<'a> SubmitSharesError<'a> { pub fn into_static(self) -> SubmitSharesError<'static> { - panic!("This function shouldn't be called by the Messaege Generator"); + panic!("This function shouldn't be called by the Message Generator"); } pub fn as_static(&self) -> SubmitSharesError<'static> { - panic!("This function shouldn't be called by the Messaege Generator"); + panic!("This function shouldn't be called by the Message Generator"); } } #[cfg(feature = "with_serde")] impl<'a> SubmitSharesExtended<'a> { pub fn into_static(self) -> SubmitSharesExtended<'static> { - panic!("This function shouldn't be called by the Messaege Generator"); + panic!("This function shouldn't be called by the Message Generator"); } pub fn as_static(&self) -> SubmitSharesExtended<'static> { - panic!("This function shouldn't be called by the Messaege Generator"); + panic!("This function shouldn't be called by the Message Generator"); } } diff --git a/protocols/v2/subprotocols/mining/src/update_channel.rs b/protocols/v2/subprotocols/mining/src/update_channel.rs index e165662c5..a8843213c 100644 --- a/protocols/v2/subprotocols/mining/src/update_channel.rs +++ b/protocols/v2/subprotocols/mining/src/update_channel.rs @@ -61,18 +61,18 @@ impl<'d> GetSize for UpdateChannelError<'d> { #[cfg(feature = "with_serde")] impl<'a> UpdateChannel<'a> { pub fn into_static(self) -> UpdateChannel<'static> { - panic!("This function shouldn't be called by the Messaege Generator"); + panic!("This function shouldn't be called by the Message Generator"); } pub fn as_static(&self) -> UpdateChannel<'static> { - panic!("This function shouldn't be called by the Messaege Generator"); + panic!("This function shouldn't be called by the Message Generator"); } } #[cfg(feature = "with_serde")] impl<'a> UpdateChannelError<'a> { pub fn into_static(self) -> UpdateChannelError<'static> { - panic!("This function shouldn't be called by the Messaege Generator"); + panic!("This function shouldn't be called by the Message Generator"); } pub fn as_static(&self) -> UpdateChannelError<'static> { - panic!("This function shouldn't be called by the Messaege Generator"); + panic!("This function shouldn't be called by the Message Generator"); } } diff --git a/protocols/v2/subprotocols/template-distribution/src/new_template.rs b/protocols/v2/subprotocols/template-distribution/src/new_template.rs index c2ee52d7a..67c5b1290 100644 --- a/protocols/v2/subprotocols/template-distribution/src/new_template.rs +++ b/protocols/v2/subprotocols/template-distribution/src/new_template.rs @@ -156,10 +156,10 @@ impl<'d> GetSize for NewTemplate<'d> { #[cfg(feature = "with_serde")] impl<'a> NewTemplate<'a> { pub fn into_static(self) -> NewTemplate<'static> { - panic!("This function shouldn't be called by the Messaege Generator"); + panic!("This function shouldn't be called by the Message Generator"); } pub fn as_static(&self) -> NewTemplate<'static> { - panic!("This function shouldn't be called by the Messaege Generator"); + panic!("This function shouldn't be called by the Message Generator"); } } From 4fd9098329c5722b6d41a6437521f5eb15bdebe7 Mon Sep 17 00:00:00 2001 From: plebhash Date: Sun, 9 Jun 2024 19:51:42 -0300 Subject: [PATCH 011/101] patch bosminer subscribe edge case --- protocols/v1/src/methods/client_to_server.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/protocols/v1/src/methods/client_to_server.rs b/protocols/v1/src/methods/client_to_server.rs index 0e837db48..669598eec 100644 --- a/protocols/v1/src/methods/client_to_server.rs +++ b/protocols/v1/src/methods/client_to_server.rs @@ -319,6 +319,8 @@ impl<'a> TryFrom for Subscribe<'a> { let (agent_signature, extranonce1) = match ¶ms[..] { // bosminer subscribe message [JString(a), Null, JString(_), Null] => (a.into(), None), + // bosminer subscribe message + [JString(a), Null] => (a.into(), None), [JString(a), JString(b)] => { (a.into(), Some(Extranonce::try_from(hex::decode(b)?)?)) } From 63b8d23b51378a781d951320eab79f46ee4f7026 Mon Sep 17 00:00:00 2001 From: fi3 Date: Mon, 10 Jun 2024 12:09:44 +0200 Subject: [PATCH 012/101] Bump sv1 version --- protocols/v1/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/protocols/v1/Cargo.toml b/protocols/v1/Cargo.toml index 1a665b3c1..0e7bd1826 100644 --- a/protocols/v1/Cargo.toml +++ b/protocols/v1/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "sv1_api" -version = "1.0.0" +version = "1.0.1" authors = ["user"] edition = "2018" description = "API for bridging SV1 miners to SV2 pools" From ee3c8478a24bfdba353147b0883e3918650f623c Mon Sep 17 00:00:00 2001 From: plebhash Date: Mon, 10 Jun 2024 17:39:58 -0300 Subject: [PATCH 013/101] add project maturity to README --- README.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/README.md b/README.md index 71df3afc7..b1b2cd119 100644 --- a/README.md +++ b/README.md @@ -69,6 +69,12 @@ Our roadmap is publicly available, outlining current and future plans. Decisions [View the SRI Roadmap](https://github.com/orgs/stratum-mining/projects/5) +### 🏅 Project Maturity + +Low-level crates (`protocols` directory) are considered **beta** software. Rust API Docs is a [work-in-progress](https://github.com/stratum-mining/stratum/issues/845), and the community should still expect small breaking API changes and patches. + +Application-level crates (`roles` directory) are considered **alpha** software, and bugs are expected. They should be used as a guide on how to consume the low-level crates as dependencies. + ### 🎯 Goals The goals of this project are to provide: From f7d83786637892f7de41a856f909f71f049a2eb5 Mon Sep 17 00:00:00 2001 From: jbesraa Date: Fri, 7 Jun 2024 13:34:05 +0300 Subject: [PATCH 014/101] Add missing `TemplateDistributionTypes` case ..to `TryFrom<(u8, &'a mut [u8])> for PoolMessages` --- protocols/v2/roles-logic-sv2/src/parsers.rs | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/protocols/v2/roles-logic-sv2/src/parsers.rs b/protocols/v2/roles-logic-sv2/src/parsers.rs index b0f5f44a0..6eaf5e016 100644 --- a/protocols/v2/roles-logic-sv2/src/parsers.rs +++ b/protocols/v2/roles-logic-sv2/src/parsers.rs @@ -1106,11 +1106,18 @@ impl<'a> TryFrom<(u8, &'a mut [u8])> for PoolMessages<'a> { let is_common: Result = v.0.try_into(); let is_mining: Result = v.0.try_into(); let is_job_declaration: Result = v.0.try_into(); - match (is_common, is_mining, is_job_declaration) { - (Ok(_), Err(_), Err(_)) => Ok(Self::Common(v.try_into()?)), - (Err(_), Ok(_), Err(_)) => Ok(Self::Mining(v.try_into()?)), - (Err(_), Err(_), Ok(_)) => Ok(Self::JobDeclaration(v.try_into()?)), - (Err(e), Err(_), Err(_)) => Err(e), + let is_template_distribution: Result = v.0.try_into(); + match ( + is_common, + is_mining, + is_job_declaration, + is_template_distribution, + ) { + (Ok(_), Err(_), Err(_), Err(_)) => Ok(Self::Common(v.try_into()?)), + (Err(_), Ok(_), Err(_), Err(_)) => Ok(Self::Mining(v.try_into()?)), + (Err(_), Err(_), Ok(_), Err(_)) => Ok(Self::JobDeclaration(v.try_into()?)), + (Err(_), Err(_), Err(_), Ok(_)) => Ok(Self::TemplateDistribution(v.try_into()?)), + (Err(e), Err(_), Err(_), Err(_)) => Err(e), // This is an impossible state is safe to panic here _ => panic!(), } From 2efa74b65d54b62f46199ec8d723c444325859a8 Mon Sep 17 00:00:00 2001 From: fi3 Date: Wed, 19 Jun 2024 13:43:04 +0200 Subject: [PATCH 015/101] Update CpuMiner use tokio rather then async_std --- roles/Cargo.lock | 4 ++-- roles/test-utils/mining-device/Cargo.toml | 4 ++-- roles/test-utils/mining-device/src/main.rs | 24 ++++++++++++---------- 3 files changed, 17 insertions(+), 15 deletions(-) diff --git a/roles/Cargo.lock b/roles/Cargo.lock index fbaeadc6d..e20c6b9a2 100644 --- a/roles/Cargo.lock +++ b/roles/Cargo.lock @@ -1341,7 +1341,6 @@ version = "0.1.1" dependencies = [ "async-channel 1.9.0", "async-recursion 0.3.2", - "async-std", "binary_sv2", "buffer_sv2", "clap", @@ -1354,6 +1353,7 @@ dependencies = [ "roles_logic_sv2", "sha2 0.10.8", "stratum-common", + "tokio", "tracing", "tracing-subscriber", ] @@ -2053,7 +2053,7 @@ dependencies = [ [[package]] name = "sv1_api" -version = "1.0.0" +version = "1.0.1" dependencies = [ "binary_sv2", "bitcoin_hashes 0.3.2", diff --git a/roles/test-utils/mining-device/Cargo.toml b/roles/test-utils/mining-device/Cargo.toml index 4ce6c5037..feedba02e 100644 --- a/roles/test-utils/mining-device/Cargo.toml +++ b/roles/test-utils/mining-device/Cargo.toml @@ -12,9 +12,8 @@ codec_sv2 = { version = "^1.0.1", path = "../../../protocols/v2/codec-sv2", feat roles_logic_sv2 = { version = "1.0.0", path = "../../../protocols/v2/roles-logic-sv2" } const_sv2 = { version = "1.0.0", path = "../../../protocols/v2/const-sv2" } async-channel = "1.5.1" -async-std={version = "1.8.0", features = ["attributes"]} binary_sv2 = { version = "1.0.0", path = "../../../protocols/v2/binary-sv2/binary-sv2" } -network_helpers_sv2 = { version = "2.0.0", path = "../../roles-utils/network-helpers", features=["async_std"] } +network_helpers_sv2 = { version = "2.0.0", path = "../../roles-utils/network-helpers", features=["tokio"] } buffer_sv2 = { version = "1.0.0", path = "../../../utils/buffer"} async-recursion = "0.3.2" rand = "0.8.4" @@ -24,3 +23,4 @@ clap = { version = "^4.5.4", features = ["derive"] } tracing = { version = "0.1" } tracing-subscriber = "0.3" sha2 = "0.10.6" +tokio = "^1.36.0" diff --git a/roles/test-utils/mining-device/src/main.rs b/roles/test-utils/mining-device/src/main.rs index 1bfbf6737..912775d0b 100644 --- a/roles/test-utils/mining-device/src/main.rs +++ b/roles/test-utils/mining-device/src/main.rs @@ -1,10 +1,14 @@ -use async_std::net::TcpStream; use key_utils::Secp256k1PublicKey; -use network_helpers_sv2::Connection; +use network_helpers_sv2::noise_connection_tokio::Connection; use roles_logic_sv2::utils::Id; -use std::{net::SocketAddr, sync::Arc, thread::sleep, time::Duration}; +use std::{ + net::{SocketAddr, ToSocketAddrs}, + sync::Arc, + thread::sleep, + time::Duration, +}; +use tokio::net::TcpStream; -use async_std::net::ToSocketAddrs; use clap::Parser; use rand::{thread_rng, Rng}; use std::time::Instant; @@ -57,14 +61,12 @@ async fn connect( let address = address .clone() .to_socket_addrs() - .await .expect("Invalid pool address, use one of this formats: ip:port, domain:port") .next() .expect("Invalid pool address, use one of this formats: ip:port, domain:port"); info!("Connecting to pool at {}", address); let socket = loop { - let pool = - async_std::future::timeout(Duration::from_secs(5), TcpStream::connect(address)).await; + let pool = tokio::time::timeout(Duration::from_secs(5), TcpStream::connect(address)).await; match pool { Ok(result) => match result { Ok(socket) => break socket, @@ -85,15 +87,15 @@ async fn connect( info!("Pool tcp connection established at {}", address); let address = socket.peer_addr().unwrap(); let initiator = Initiator::new(pub_key.map(|e| e.0)); - let (receiver, sender): (Receiver, Sender) = - Connection::new(socket, codec_sv2::HandshakeRole::Initiator(initiator), 10) + let (receiver, sender, _, _): (Receiver, Sender, _, _) = + Connection::new(socket, codec_sv2::HandshakeRole::Initiator(initiator)) .await .unwrap(); info!("Pool noise connection established at {}", address); Device::start(receiver, sender, address, device_id, user_id, handicap).await } -#[async_std::main] +#[tokio::main] async fn main() { let args = Args::parse(); tracing_subscriber::fmt::init(); @@ -303,7 +305,7 @@ impl Device { .unwrap(); }); - async_std::task::spawn(async move { + tokio::task::spawn(async move { let recv = share_recv.clone(); loop { let (nonce, job_id, version, ntime) = recv.recv().await.unwrap(); From 93b7d822840ef44435cff5f1b64ebc2fc2512772 Mon Sep 17 00:00:00 2001 From: plebhash Date: Wed, 15 May 2024 21:53:42 -0300 Subject: [PATCH 016/101] add test for tProxy receiving a share with old job_id --- .../messages/mining_messages.json | 25 +++++ .../message-generator/mock/upstream-mock.json | 97 ++++++++++++++++ .../test/translation-proxy-old-share.json | 105 ++++++++++++++++++ 3 files changed, 227 insertions(+) create mode 100644 test/message-generator/mock/upstream-mock.json create mode 100644 test/message-generator/test/translation-proxy-old-share.json diff --git a/test/message-generator/messages/mining_messages.json b/test/message-generator/messages/mining_messages.json index fa7385ba6..79da299c5 100644 --- a/test/message-generator/messages/mining_messages.json +++ b/test/message-generator/messages/mining_messages.json @@ -1,5 +1,30 @@ { "mining_messages": [ + { + "message": { + "type": "NewExtendedMiningJob", + "job_id": 1, + "channel_id": 0, + "min_ntime": [], + "version": 536870912, + "version_rolling_allowed": true, + "merkle_path": [], + "coinbase_tx_prefix": [2, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 255, 255, 255, 36, 2, 3, 15, 0], + "coinbase_tx_suffix": [255, 255, 255, 255, 2, 149, 0, 0, 0, 0, 0, 0, 0, 67, 65, 4, 70, 109, 127, 202, 229, 99, 229, 203, 9, 160, 209, 135, 11, 181, 128, 52, 72, 4, 97, 120, 121, 161, 73, 73, 207, 34, 40, 95, 27, 174, 63, 39, 103, 40, 23, 108, 60, 100, 49, 248, 238, 218, 69, 56, 220, 55, 200, 101, 226, 120, 79, 58, 158, 119, 208, 68, 243, 62, 64, 119, 151, 225, 39, 138, 172, 0, 0, 0, 0, 0, 0, 0, 0, 38, 106, 36, 170, 33, 169, 237, 226, 246, 28, 63, 113, 209, 222, 253, 63, 169, 153, 223, 163, 105, 83, 117, 92, 105, 6, 137, 121, 153, 98, 180, 139, 235, 216, 54, 151, 78, 140, 249, 1, 32, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] + }, + "id": "new_extended_mining_job" + }, + { + "message":{ + "type": "SetNewPrevHash", + "channel_id": 0, + "job_id": 1, + "prev_hash": [91, 30, 84, 205, 18, 124, 218, 102, 28, 163, 155, 204, 173, 55, 119, 61, 224, 199, 68, 229, 144, 22, 92, 0, 53, 44, 15, 204, 200, 245, 149, 0], + "min_ntime": 1679128496, + "nbits": 545259519 + }, + "id": "set_new_prev_hash" + }, { "message": { "type": "OpenExtendedMiningChannelSuccess", diff --git a/test/message-generator/mock/upstream-mock.json b/test/message-generator/mock/upstream-mock.json new file mode 100644 index 000000000..094d2dc67 --- /dev/null +++ b/test/message-generator/mock/upstream-mock.json @@ -0,0 +1,97 @@ +{ + "version": "2", + "doc": [ + "This test does", + "Mock an Upstream", + "Start listening on port 34254", + "Receive SetupConnection", + "Sends SetupConnection.Success", + "Receive OpenExtendedMiningChannel", + "Send OpenExtendedMiningChannel.Success", + "Send NewExtendedMiningJob" + ], + "frame_builders": [ + { + "type": "automatic", + "message_id": "test/message-generator/messages/common_messages.json::setup_connection_success_tproxy" + }, + { + "type": "automatic", + "message_id": "test/message-generator/messages/mining_messages.json::open_extended_mining_channel_success" + }, + { + "type": "automatic", + "message_id": "test/message-generator/messages/mining_messages.json::set_custom_mining_job_success" + }, + { + "type": "automatic", + "message_id": "test/message-generator/messages/mining_messages.json::submit_shares_error" + }, + { + "type": "automatic", + "message_id": "test/message-generator/messages/mining_messages.json::new_extended_mining_job" + }, + { + "type": "automatic", + "message_id": "test/message-generator/messages/mining_messages.json::set_new_prev_hash" + } + ], + "actions": [ + { + "message_ids": [], + "role": "server", + "results": [ + { + "type": "match_message_type", + "value": "0x00" + } + ], + "actiondoc": "Checks that a SetupConnection message is received from Downstream" + }, + { + "message_ids": ["setup_connection_success_tproxy"], + "role": "server", + "results": [ + { + "type": "match_message_type", + "value": "0x13" + } + ], + "actiondoc": "Sends SetupConnection.Success to Downstream, then checks that a OpenExtendedMiningChannel is sent from Downstream" + }, + { + "message_ids": ["open_extended_mining_channel_success"], + "role": "server", + "results": [], + "actiondoc": "Sends OpenExtendedMiningChannel.Success to Downstream" + }, + { + "message_ids": ["new_extended_mining_job"], + "role": "server", + "results": [], + "actiondoc": "Sends NewExtendedMiningJob to Downstream" + }, + { + "message_ids": ["set_new_prev_hash"], + "role": "server", + "results": [], + "actiondoc": "Sends SetNewPrevHash to Downstream" + } + ], + "setup_commands": [], + "execution_commands": [], + "cleanup_commands": [ + { + "command": "sleep", + "args": ["10000"], + "conditions": "None" + } + ], + "role": "server", + "upstream": { + "ip": "127.0.0.1", + "port": 34254, + "pub_key": "9auqWEzQDVyd2oe1JVGFLMLHZtCo2FFqZwtKA5gd9xbuEu7PH72", + "secret_key": "mkDLTBBRxdBv998612qipDYoTK3YUrqLe8uWw7gu3iXbSrn2n" + } +} diff --git a/test/message-generator/test/translation-proxy-old-share.json b/test/message-generator/test/translation-proxy-old-share.json new file mode 100644 index 000000000..38814382f --- /dev/null +++ b/test/message-generator/test/translation-proxy-old-share.json @@ -0,0 +1,105 @@ +{ + "version": "1", + "doc": [ + "This test does", + "Mock an Upstream", + "Run tProxy", + "tProxy receives NewExtendedMiningJob (with job_id = 1)", + "tProxy receives mining.configure, mining.subscribe, mining.authorize", + "tProxy sends mining.notify", + "tProxy receives mining.submit (old/invalid share with job_id=0)" + ], + "sv1_messages": [ + { + "message": { + "id": 1, + "method": "mining.authorize", + "params": ["username", "password"] + }, + "id": "mining.authorize" + }, + { + "message": { + "id": 2, + "method": "mining.submit", + "params": ["username", "0", "0000000000000000", "641577b0", "7a600640"] + }, + "id": "mining.submit" + } + ], + "frame_builders": [ + ], + "actions": [ + { + "message_ids": ["mining.authorize"], + "results": [ + { + "type": "match_message_id", + "value": 1 + } + ] + }, + { + "message_ids": ["mining.submit"], + "results": [ + { + "type": "match_message_field", + "value": [ + "mining.submit", + [ + [ + "result", + false + ] + ] + ] + } + ], + "actiondoc": "Checks that the mining.submit request (with wrong job_id) generates a response with false, indicating that the share was rejected" + } + ], + "setup_commands": [ + { + "command": "cargo", + "args": [ + "run", + "../../test/message-generator/mock/upstream-mock.json" + ], + "conditions": "None" + }, + { + "command": "cargo", + "args": [ + "run", + "-p", + "translator_sv2", + "--", + "-c", + "../test/config/tproxy-config-no-jd-sv1-cpu-md.toml" + ], + "conditions": { + "WithConditions": { + "conditions": [ + { + "output_string": "", + "output_location": "StdOut", + "late_condition": false, + "condition": true + } + ], + "timer_secs": 260, + "warn_no_panic": false + } + } + } + ], + "execution_commands": [ + ], + "cleanup_commands": [ + ], + "role": "client", + "downstream": { + "ip": "0.0.0.0", + "port": 34255 + } +} From 69398ae38b30c570bde45fad38054692d8a3a2f4 Mon Sep 17 00:00:00 2001 From: plebhash Date: Mon, 13 May 2024 20:20:28 -0300 Subject: [PATCH 017/101] fix rejection of old sv1 shares --- .../src/lib/upstream_sv2/upstream.rs | 2 +- .../src/lib/downstream_sv1/diff_management.rs | 11 +++--- .../src/lib/downstream_sv1/downstream.rs | 35 +++++++++++-------- roles/translator/src/lib/proxy/bridge.rs | 6 ++-- .../src/lib/upstream_sv2/upstream.rs | 2 +- 5 files changed, 31 insertions(+), 25 deletions(-) diff --git a/roles/jd-client/src/lib/upstream_sv2/upstream.rs b/roles/jd-client/src/lib/upstream_sv2/upstream.rs index 857cbd308..b89db0fa6 100644 --- a/roles/jd-client/src/lib/upstream_sv2/upstream.rs +++ b/roles/jd-client/src/lib/upstream_sv2/upstream.rs @@ -144,7 +144,7 @@ impl Upstream { /// `UpstreamConnection` with a channel to send and receive messages from the SV2 Upstream /// role and uses channels provided in the function arguments to send and receive messages /// from the `Downstream`. - #[cfg_attr(feature = "cargo-clippy", allow(clippy::too_many_arguments))] + #[allow(clippy::too_many_arguments)] pub async fn new( address: SocketAddr, authority_public_key: Secp256k1PublicKey, diff --git a/roles/translator/src/lib/downstream_sv1/diff_management.rs b/roles/translator/src/lib/downstream_sv1/diff_management.rs index eb8f0af38..35a4bdafa 100644 --- a/roles/translator/src/lib/downstream_sv1/diff_management.rs +++ b/roles/translator/src/lib/downstream_sv1/diff_management.rs @@ -324,7 +324,7 @@ mod test { let initial_nominal_hashrate = measure_hashrate(5); let target = match roles_logic_sv2::utils::hash_rate_to_target( initial_nominal_hashrate, - expected_shares_per_minute.into(), + expected_shares_per_minute, ) { Ok(target) => target, Err(_) => panic!(), @@ -344,7 +344,7 @@ mod test { let calculated_share_per_min = count as f32 / (elapsed.as_secs_f32() / 60.0); // This is the error margin for a confidence of 99% given the expect number of shares per // minute TODO the review the math under it - let error_margin = get_error(expected_shares_per_minute.into()); + let error_margin = get_error(expected_shares_per_minute); let error = (calculated_share_per_min - expected_shares_per_minute as f32).abs(); assert!( error <= error_margin as f32, @@ -379,9 +379,9 @@ mod test { } let elapsed_secs = start_time.elapsed().as_secs_f64(); - let hashrate = hashes as f64 / elapsed_secs; - let nominal_hash_rate = hashrate; - nominal_hash_rate + + + hashes as f64 / elapsed_secs } fn hash(share: &mut [u8; 80]) -> Target { @@ -438,6 +438,7 @@ mod test { 0, downstream_conf.clone(), Arc::new(Mutex::new(upstream_config)), + "0".to_string(), ); downstream.difficulty_mgmt.min_individual_miner_hashrate = start_hashrate as f32; diff --git a/roles/translator/src/lib/downstream_sv1/downstream.rs b/roles/translator/src/lib/downstream_sv1/downstream.rs index ddf74fe58..8c833b9db 100644 --- a/roles/translator/src/lib/downstream_sv1/downstream.rs +++ b/roles/translator/src/lib/downstream_sv1/downstream.rs @@ -62,6 +62,7 @@ pub struct Downstream { extranonce2_len: usize, pub(super) difficulty_mgmt: DownstreamDifficultyConfig, pub(super) upstream_difficulty_config: Arc>, + last_job_id: String, // we usually receive a String on SV1 messages, no need to cast to u32 } impl Downstream { @@ -78,6 +79,7 @@ impl Downstream { extranonce2_len: usize, difficulty_mgmt: DownstreamDifficultyConfig, upstream_difficulty_config: Arc>, + last_job_id: String, ) -> Self { Downstream { connection_id, @@ -91,6 +93,7 @@ impl Downstream { extranonce2_len, difficulty_mgmt, upstream_difficulty_config, + last_job_id, } } /// Instantiate a new `Downstream`. @@ -131,6 +134,7 @@ impl Downstream { extranonce2_len, difficulty_mgmt: difficulty_config, upstream_difficulty_config, + last_job_id: "".to_string(), })); let self_ = downstream.clone(); @@ -290,9 +294,11 @@ impl Downstream { // if hashrate has changed, update difficulty management, and send new mining.set_difficulty handle_result!(tx_status_notify, Self::try_update_difficulty_settings(downstream.clone()).await); - let sv1_mining_notify_msg = handle_result!(tx_status_notify, res); - let message: json_rpc::Message = sv1_mining_notify_msg.into(); + let message: json_rpc::Message = sv1_mining_notify_msg.clone().into(); + + self_.safe_lock(|s| s.last_job_id = sv1_mining_notify_msg.job_id).unwrap(); + handle_result!(tx_status_notify, Downstream::send_message_downstream(downstream.clone(), message).await); }, _ = rx_shutdown.recv().fuse() => { @@ -491,24 +497,23 @@ impl IsServer<'static> for Downstream { /// When miner find the job which meets requested difficulty, it can submit share to the server. /// Only [Submit](client_to_server::Submit) requests for authorized user names can be submitted. fn handle_submit(&self, request: &client_to_server::Submit<'static>) -> bool { - info!("Down: Submitting Share"); + info!("Down: Submitting Share {:?}", request); debug!("Down: Handling mining.submit: {:?}", &request); // TODO: Check if receiving valid shares by adding diff field to Downstream - if self.first_job_received { - let to_send = SubmitShareWithChannelId { - channel_id: self.connection_id, - share: request.clone(), - extranonce: self.extranonce1.clone(), - extranonce2_len: self.extranonce2_len, - version_rolling_mask: self.version_rolling_mask.clone(), - }; - self.tx_sv1_bridge - .try_send(DownstreamMessages::SubmitShares(to_send)) - .unwrap(); + let to_send = SubmitShareWithChannelId { + channel_id: self.connection_id, + share: request.clone(), + extranonce: self.extranonce1.clone(), + extranonce2_len: self.extranonce2_len, + version_rolling_mask: self.version_rolling_mask.clone(), }; - true + self.tx_sv1_bridge + .try_send(DownstreamMessages::SubmitShares(to_send)) + .unwrap(); + + request.job_id == self.last_job_id } /// Indicates to the server that the client supports the mining.set_extranonce method. diff --git a/roles/translator/src/lib/proxy/bridge.rs b/roles/translator/src/lib/proxy/bridge.rs index 22aeaa18f..ee9ad4337 100644 --- a/roles/translator/src/lib/proxy/bridge.rs +++ b/roles/translator/src/lib/proxy/bridge.rs @@ -22,7 +22,7 @@ use super::super::{ }; use error_handling::handle_result; use roles_logic_sv2::{channel_logic::channel_factory::OnNewShare, Error as RolesLogicError}; -use tracing::{debug, error, info}; +use tracing::{debug, error, info, warn}; /// Bridge between the SV2 `Upstream` and SV1 `Downstream` responsible for the following messaging /// translation: @@ -235,7 +235,7 @@ impl Bridge { match res { Ok(Ok(OnNewShare::SendErrorDownstream(e))) => { - error!( + warn!( "Submit share error {:?}", std::str::from_utf8(&e.error_code.to_vec()[..]) ); @@ -595,7 +595,7 @@ mod test { previous_output: p_out, script_sig: vec![89_u8; 16].into(), sequence: bitcoin::Sequence(0), - witness: Witness::from_vec(vec![]).into(), + witness: Witness::from_vec(vec![]), }; let tx = bitcoin::Transaction { version: 1, diff --git a/roles/translator/src/lib/upstream_sv2/upstream.rs b/roles/translator/src/lib/upstream_sv2/upstream.rs index f6d192f75..856ccb45f 100644 --- a/roles/translator/src/lib/upstream_sv2/upstream.rs +++ b/roles/translator/src/lib/upstream_sv2/upstream.rs @@ -112,7 +112,7 @@ impl Upstream { /// `UpstreamConnection` with a channel to send and receive messages from the SV2 Upstream /// role and uses channels provided in the function arguments to send and receive messages /// from the `Downstream`. - #[cfg_attr(feature = "cargo-clippy", allow(clippy::too_many_arguments))] + #[allow(clippy::too_many_arguments)] pub async fn new( address: SocketAddr, authority_public_key: Secp256k1PublicKey, From 9bf5a145a36eefa5e6cf5c3816bd8d34ccd22554 Mon Sep 17 00:00:00 2001 From: plebhash Date: Sun, 2 Jun 2024 14:02:47 -0300 Subject: [PATCH 018/101] follow new mg template + fix job_id on sv1_messages.mining.submit --- .../translation-proxy-old-share.json | 2 +- .../translation-proxy-old-share.sh | 11 +++++++++++ 2 files changed, 12 insertions(+), 1 deletion(-) rename test/message-generator/test/{ => translation-proxy-old-share}/translation-proxy-old-share.json (99%) create mode 100644 test/message-generator/test/translation-proxy-old-share/translation-proxy-old-share.sh diff --git a/test/message-generator/test/translation-proxy-old-share.json b/test/message-generator/test/translation-proxy-old-share/translation-proxy-old-share.json similarity index 99% rename from test/message-generator/test/translation-proxy-old-share.json rename to test/message-generator/test/translation-proxy-old-share/translation-proxy-old-share.json index 38814382f..e04771fe1 100644 --- a/test/message-generator/test/translation-proxy-old-share.json +++ b/test/message-generator/test/translation-proxy-old-share/translation-proxy-old-share.json @@ -20,7 +20,7 @@ }, { "message": { - "id": 2, + "id": 0, "method": "mining.submit", "params": ["username", "0", "0000000000000000", "641577b0", "7a600640"] }, diff --git a/test/message-generator/test/translation-proxy-old-share/translation-proxy-old-share.sh b/test/message-generator/test/translation-proxy-old-share/translation-proxy-old-share.sh new file mode 100644 index 000000000..30149552d --- /dev/null +++ b/test/message-generator/test/translation-proxy-old-share/translation-proxy-old-share.sh @@ -0,0 +1,11 @@ +cd roles + +cargo build -p translator_sv2 +cargo build -p + +cd ../utils/message-generator/ +cargo build + +RUST_LOG=debug cargo run ../../test/message-generator/test/translation-proxy-old-share/translation-proxy-old-share.json || { echo 'mg test failed' ; exit 1; } + +sleep 10 From 50556aedf625ec0d3832f92080eb8a8bcc6f5ce6 Mon Sep 17 00:00:00 2001 From: plebhash Date: Sun, 2 Jun 2024 14:40:52 -0300 Subject: [PATCH 019/101] fix test doc --- .../translation-proxy-old-share.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/message-generator/test/translation-proxy-old-share/translation-proxy-old-share.json b/test/message-generator/test/translation-proxy-old-share/translation-proxy-old-share.json index e04771fe1..79833f2c0 100644 --- a/test/message-generator/test/translation-proxy-old-share/translation-proxy-old-share.json +++ b/test/message-generator/test/translation-proxy-old-share/translation-proxy-old-share.json @@ -5,7 +5,7 @@ "Mock an Upstream", "Run tProxy", "tProxy receives NewExtendedMiningJob (with job_id = 1)", - "tProxy receives mining.configure, mining.subscribe, mining.authorize", + "tProxy receives mining.authorize", "tProxy sends mining.notify", "tProxy receives mining.submit (old/invalid share with job_id=0)" ], From e5fd09b6378a6c3a93f64bb65873759b2e7b1741 Mon Sep 17 00:00:00 2001 From: plebhash Date: Sun, 2 Jun 2024 14:52:58 -0300 Subject: [PATCH 020/101] add translation-proxy-old-share to Github Actions --- .github/workflows/coverage.yaml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/workflows/coverage.yaml b/.github/workflows/coverage.yaml index b35d1974e..5309c26fc 100644 --- a/.github/workflows/coverage.yaml +++ b/.github/workflows/coverage.yaml @@ -121,6 +121,9 @@ jobs: - name: Run translation-proxy run: sh ./test/message-generator/test/translation-proxy/translation-proxy.sh + - name: Run translation-proxy-old-share + run: sh ./test/message-generator/test/translation-proxy-old-share/translation-proxy-old-share.sh + - name: Coverage report run: sh ./code-coverage-report.sh From 39603e93c147ac418696dfce3381c9619bf22239 Mon Sep 17 00:00:00 2001 From: plebhash Date: Sun, 2 Jun 2024 16:51:56 -0300 Subject: [PATCH 021/101] fmt --- roles/translator/src/lib/downstream_sv1/diff_management.rs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/roles/translator/src/lib/downstream_sv1/diff_management.rs b/roles/translator/src/lib/downstream_sv1/diff_management.rs index 35a4bdafa..378134560 100644 --- a/roles/translator/src/lib/downstream_sv1/diff_management.rs +++ b/roles/translator/src/lib/downstream_sv1/diff_management.rs @@ -379,8 +379,7 @@ mod test { } let elapsed_secs = start_time.elapsed().as_secs_f64(); - - + hashes as f64 / elapsed_secs } From 0456be685d96950d8774d3a933e65fe4c4e26a19 Mon Sep 17 00:00:00 2001 From: plebhash Date: Wed, 5 Jun 2024 14:20:36 -0300 Subject: [PATCH 022/101] avoid sending old share upstream --- .../src/lib/downstream_sv1/downstream.rs | 29 +++++++++++-------- 1 file changed, 17 insertions(+), 12 deletions(-) diff --git a/roles/translator/src/lib/downstream_sv1/downstream.rs b/roles/translator/src/lib/downstream_sv1/downstream.rs index 8c833b9db..fe5a6f5a4 100644 --- a/roles/translator/src/lib/downstream_sv1/downstream.rs +++ b/roles/translator/src/lib/downstream_sv1/downstream.rs @@ -502,18 +502,23 @@ impl IsServer<'static> for Downstream { // TODO: Check if receiving valid shares by adding diff field to Downstream - let to_send = SubmitShareWithChannelId { - channel_id: self.connection_id, - share: request.clone(), - extranonce: self.extranonce1.clone(), - extranonce2_len: self.extranonce2_len, - version_rolling_mask: self.version_rolling_mask.clone(), - }; - self.tx_sv1_bridge - .try_send(DownstreamMessages::SubmitShares(to_send)) - .unwrap(); - - request.job_id == self.last_job_id + if request.job_id == self.last_job_id { + let to_send = SubmitShareWithChannelId { + channel_id: self.connection_id, + share: request.clone(), + extranonce: self.extranonce1.clone(), + extranonce2_len: self.extranonce2_len, + version_rolling_mask: self.version_rolling_mask.clone(), + }; + + self.tx_sv1_bridge + .try_send(DownstreamMessages::SubmitShares(to_send)) + .unwrap(); + + return true + } else { + return false + } } /// Indicates to the server that the client supports the mining.set_extranonce method. From ff817d16cdf6a05a37fcb57eb98bb57f6d7d9da6 Mon Sep 17 00:00:00 2001 From: plebhash Date: Wed, 5 Jun 2024 14:24:40 -0300 Subject: [PATCH 023/101] revert clippy changes --- roles/jd-client/src/lib/upstream_sv2/upstream.rs | 2 +- roles/translator/src/lib/upstream_sv2/upstream.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/roles/jd-client/src/lib/upstream_sv2/upstream.rs b/roles/jd-client/src/lib/upstream_sv2/upstream.rs index b89db0fa6..857cbd308 100644 --- a/roles/jd-client/src/lib/upstream_sv2/upstream.rs +++ b/roles/jd-client/src/lib/upstream_sv2/upstream.rs @@ -144,7 +144,7 @@ impl Upstream { /// `UpstreamConnection` with a channel to send and receive messages from the SV2 Upstream /// role and uses channels provided in the function arguments to send and receive messages /// from the `Downstream`. - #[allow(clippy::too_many_arguments)] + #[cfg_attr(feature = "cargo-clippy", allow(clippy::too_many_arguments))] pub async fn new( address: SocketAddr, authority_public_key: Secp256k1PublicKey, diff --git a/roles/translator/src/lib/upstream_sv2/upstream.rs b/roles/translator/src/lib/upstream_sv2/upstream.rs index 856ccb45f..f6d192f75 100644 --- a/roles/translator/src/lib/upstream_sv2/upstream.rs +++ b/roles/translator/src/lib/upstream_sv2/upstream.rs @@ -112,7 +112,7 @@ impl Upstream { /// `UpstreamConnection` with a channel to send and receive messages from the SV2 Upstream /// role and uses channels provided in the function arguments to send and receive messages /// from the `Downstream`. - #[allow(clippy::too_many_arguments)] + #[cfg_attr(feature = "cargo-clippy", allow(clippy::too_many_arguments))] pub async fn new( address: SocketAddr, authority_public_key: Secp256k1PublicKey, From f317f7607499b9c36479cd74914fa9fda1551472 Mon Sep 17 00:00:00 2001 From: plebhash Date: Wed, 5 Jun 2024 21:08:40 -0300 Subject: [PATCH 024/101] fmt --- roles/translator/src/lib/downstream_sv1/downstream.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/roles/translator/src/lib/downstream_sv1/downstream.rs b/roles/translator/src/lib/downstream_sv1/downstream.rs index fe5a6f5a4..b5157d605 100644 --- a/roles/translator/src/lib/downstream_sv1/downstream.rs +++ b/roles/translator/src/lib/downstream_sv1/downstream.rs @@ -515,9 +515,9 @@ impl IsServer<'static> for Downstream { .try_send(DownstreamMessages::SubmitShares(to_send)) .unwrap(); - return true + return true; } else { - return false + return false; } } From 228b869dae8da5551c67adbbf4d86d8f145b3f91 Mon Sep 17 00:00:00 2001 From: plebhash Date: Wed, 19 Jun 2024 08:06:24 -0300 Subject: [PATCH 025/101] fix sv1-mining-device handle_notify manipulation of job_id --- roles/test-utils/sv1-mining-device/src/job.rs | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/roles/test-utils/sv1-mining-device/src/job.rs b/roles/test-utils/sv1-mining-device/src/job.rs index ef02b009e..12d0655bf 100644 --- a/roles/test-utils/sv1-mining-device/src/job.rs +++ b/roles/test-utils/sv1-mining-device/src/job.rs @@ -25,9 +25,7 @@ pub(crate) struct Job { impl Job { pub fn from_notify(notify_msg: server_to_client::Notify<'_>, extranonce: Vec) -> Self { - // TODO: Hard coded for demo. Should be properly translated from received Notify message - // Right now, Notify.job_id is a string, but the Job.job_id is a u32 here. - let job_id = 1u32; + let job_id = notify_msg.job_id.parse::().expect("expect valid job_id on String"); // Convert prev hash from Vec into expected [u32; 8] let prev_hash_vec: Vec = notify_msg.prev_hash.into(); From 08684877f72e4633459bc04e5c5c5b94589c890e Mon Sep 17 00:00:00 2001 From: plebhash Date: Wed, 19 Jun 2024 09:23:46 -0300 Subject: [PATCH 026/101] update last_job_id if is_a && && last_notify.is_some() --- roles/translator/src/lib/downstream_sv1/downstream.rs | 3 +++ 1 file changed, 3 insertions(+) diff --git a/roles/translator/src/lib/downstream_sv1/downstream.rs b/roles/translator/src/lib/downstream_sv1/downstream.rs index b5157d605..4361e624b 100644 --- a/roles/translator/src/lib/downstream_sv1/downstream.rs +++ b/roles/translator/src/lib/downstream_sv1/downstream.rs @@ -275,6 +275,9 @@ impl Downstream { ); let sv1_mining_notify_msg = last_notify.clone().unwrap(); + + self_.safe_lock(|s| s.last_job_id = sv1_mining_notify_msg.clone().job_id).unwrap(); + let message: json_rpc::Message = sv1_mining_notify_msg.into(); handle_result!( tx_status_notify, From 67b817d41d40f8edee5778b4b76d5b00c9b27f18 Mon Sep 17 00:00:00 2001 From: plebhash Date: Wed, 19 Jun 2024 09:24:49 -0300 Subject: [PATCH 027/101] reduce hashrate for tproxy-config-no-jd-sv1-cpu-md.toml --- test/config/tproxy-config-no-jd-sv1-cpu-md.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/config/tproxy-config-no-jd-sv1-cpu-md.toml b/test/config/tproxy-config-no-jd-sv1-cpu-md.toml index fdca70399..7c90479f0 100644 --- a/test/config/tproxy-config-no-jd-sv1-cpu-md.toml +++ b/test/config/tproxy-config-no-jd-sv1-cpu-md.toml @@ -35,7 +35,7 @@ coinbase_reward_sat = 5_000_000_000 # Difficulty params [downstream_difficulty_config] # hashes/s of the weakest miner that will be connecting -min_individual_miner_hashrate=100_000.0 +min_individual_miner_hashrate=100.0 # minimum number of shares needed before a mining.set_difficulty is sent for updating targets miner_num_submits_before_update=5 # target number of shares per minute the miner should be sending From ca141d0bd095792d5a00fca62c04549c8b2ddb4e Mon Sep 17 00:00:00 2001 From: plebhash Date: Wed, 19 Jun 2024 09:32:32 -0300 Subject: [PATCH 028/101] fmt --- roles/test-utils/sv1-mining-device/src/job.rs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/roles/test-utils/sv1-mining-device/src/job.rs b/roles/test-utils/sv1-mining-device/src/job.rs index 12d0655bf..1d6b3d2bc 100644 --- a/roles/test-utils/sv1-mining-device/src/job.rs +++ b/roles/test-utils/sv1-mining-device/src/job.rs @@ -25,7 +25,10 @@ pub(crate) struct Job { impl Job { pub fn from_notify(notify_msg: server_to_client::Notify<'_>, extranonce: Vec) -> Self { - let job_id = notify_msg.job_id.parse::().expect("expect valid job_id on String"); + let job_id = notify_msg + .job_id + .parse::() + .expect("expect valid job_id on String"); // Convert prev hash from Vec into expected [u32; 8] let prev_hash_vec: Vec = notify_msg.prev_hash.into(); From 0fa39868a32372f98ce1fd798ab3f10130b15bd4 Mon Sep 17 00:00:00 2001 From: plebhash Date: Wed, 19 Jun 2024 09:45:26 -0300 Subject: [PATCH 029/101] clippy --- roles/translator/src/lib/downstream_sv1/downstream.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/roles/translator/src/lib/downstream_sv1/downstream.rs b/roles/translator/src/lib/downstream_sv1/downstream.rs index 4361e624b..1001a7ef8 100644 --- a/roles/translator/src/lib/downstream_sv1/downstream.rs +++ b/roles/translator/src/lib/downstream_sv1/downstream.rs @@ -518,9 +518,9 @@ impl IsServer<'static> for Downstream { .try_send(DownstreamMessages::SubmitShares(to_send)) .unwrap(); - return true; + true } else { - return false; + false } } From 61b3959c3f030cd78c98816058534c86d83536b5 Mon Sep 17 00:00:00 2001 From: plebhash Date: Wed, 19 Jun 2024 09:47:32 -0300 Subject: [PATCH 030/101] fmt --- roles/translator/src/lib/downstream_sv1/downstream.rs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/roles/translator/src/lib/downstream_sv1/downstream.rs b/roles/translator/src/lib/downstream_sv1/downstream.rs index 1001a7ef8..9e44a96fb 100644 --- a/roles/translator/src/lib/downstream_sv1/downstream.rs +++ b/roles/translator/src/lib/downstream_sv1/downstream.rs @@ -276,7 +276,9 @@ impl Downstream { let sv1_mining_notify_msg = last_notify.clone().unwrap(); - self_.safe_lock(|s| s.last_job_id = sv1_mining_notify_msg.clone().job_id).unwrap(); + self_ + .safe_lock(|s| s.last_job_id = sv1_mining_notify_msg.clone().job_id) + .unwrap(); let message: json_rpc::Message = sv1_mining_notify_msg.into(); handle_result!( From 0c05a518ebca9efe22cfbfa65286c5a06871bde1 Mon Sep 17 00:00:00 2001 From: plebhash Date: Mon, 24 Jun 2024 11:57:38 -0300 Subject: [PATCH 031/101] fix translation-proxy-old-share.sh permission --- .../translation-proxy-old-share/translation-proxy-old-share.sh | 0 1 file changed, 0 insertions(+), 0 deletions(-) mode change 100644 => 100755 test/message-generator/test/translation-proxy-old-share/translation-proxy-old-share.sh diff --git a/test/message-generator/test/translation-proxy-old-share/translation-proxy-old-share.sh b/test/message-generator/test/translation-proxy-old-share/translation-proxy-old-share.sh old mode 100644 new mode 100755 From 569f3061db3d192e6bf2336547987a4a9a47a6c0 Mon Sep 17 00:00:00 2001 From: plebhash Date: Fri, 21 Jun 2024 12:59:18 -0300 Subject: [PATCH 032/101] config examples for testnet4 --- .../jd-server/config-examples/jds-config-hosted-example.toml | 4 ++-- roles/jd-server/config-examples/jds-config-local-example.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/roles/jd-server/config-examples/jds-config-hosted-example.toml b/roles/jd-server/config-examples/jds-config-hosted-example.toml index c8a75ef0f..328cffd52 100644 --- a/roles/jd-server/config-examples/jds-config-hosted-example.toml +++ b/roles/jd-server/config-examples/jds-config-hosted-example.toml @@ -18,8 +18,8 @@ coinbase_outputs = [ # SRI Pool JD config listen_jd_address = "0.0.0.0:34264" # RPC config for mempool (it can be also the same TP if correctly configured) -core_rpc_url = "http://127.0.0.1" -core_rpc_port = 18332 +core_rpc_url = "http://75.119.150.111" +core_rpc_port = 48332 core_rpc_user = "username" core_rpc_pass = "password" # Time interval used for JDS mempool update diff --git a/roles/jd-server/config-examples/jds-config-local-example.toml b/roles/jd-server/config-examples/jds-config-local-example.toml index 07aabd514..a3c7622ac 100644 --- a/roles/jd-server/config-examples/jds-config-local-example.toml +++ b/roles/jd-server/config-examples/jds-config-local-example.toml @@ -19,7 +19,7 @@ coinbase_outputs = [ listen_jd_address = "127.0.0.1:34264" # RPC config for mempool (it can be also the same TP if correctly configured) core_rpc_url = "http://127.0.0.1" -core_rpc_port = 18332 +core_rpc_port = 48332 core_rpc_user = "username" core_rpc_pass = "password" # Time interval used for JDS mempool update From ae76f93d1dde3712dc25a1c1e5d6d27248f9f815 Mon Sep 17 00:00:00 2001 From: jbesraa Date: Thu, 20 Jun 2024 13:57:29 +0300 Subject: [PATCH 033/101] Log config error in `jd-client`. --- roles/jd-client/src/main.rs | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/roles/jd-client/src/main.rs b/roles/jd-client/src/main.rs index ac246987e..c12a17b39 100644 --- a/roles/jd-client/src/main.rs +++ b/roles/jd-client/src/main.rs @@ -107,7 +107,10 @@ async fn main() { let proxy_config = match process_cli_args() { Ok(p) => p, - Err(_) => return, + Err(e) => { + error!("Failed to read config file: {}", e); + return; + } }; loop { @@ -205,7 +208,13 @@ async fn initialize_jd_as_solo_miner( task_collector: Arc>>, timeout: Duration, ) { - let proxy_config = process_cli_args().unwrap(); + let proxy_config = match process_cli_args() { + Ok(p) => p, + Err(e) => { + error!("Failed to read config file: {}", e); + return; + } + }; let miner_tx_out = lib::proxy_config::get_coinbase_output(&proxy_config).unwrap(); // When Downstream receive a share that meets bitcoin target it transformit in a From 20c16ea970b87731518a0fbcfdccb706bfb50fec Mon Sep 17 00:00:00 2001 From: esraa Date: Fri, 21 Jun 2024 15:31:06 +0300 Subject: [PATCH 034/101] Add commit guidance to `CONTRIBUTION.MD` --- CONTRIBUTING.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 3a70a6a3e..baf13845a 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -83,6 +83,8 @@ The SRI project follows an open contributor model, where anyone is welcome to co 2. **Create a Branch** 3. **Commit Your Changes** + + **Note:** Commits should cover both the issue fixed and the solution's rationale. These [guidelines](https://chris.beams.io/posts/git-commit/) should be kept in mind. 4. **Run Tests, Clippy, and Formatter:** From cfcb97292604a927579b14095ac6e6df400afaf2 Mon Sep 17 00:00:00 2001 From: bit-aloo Date: Wed, 26 Jun 2024 22:57:12 -0400 Subject: [PATCH 035/101] Add unit test for jds-module: lib/mods and lib/status --- roles/jd-server/src/lib/mod.rs | 73 ++++++++ roles/jd-server/src/lib/status.rs | 281 +++++++++++++++++++++++++++++- 2 files changed, 353 insertions(+), 1 deletion(-) diff --git a/roles/jd-server/src/lib/mod.rs b/roles/jd-server/src/lib/mod.rs index a76c80cf1..5fb12c75e 100644 --- a/roles/jd-server/src/lib/mod.rs +++ b/roles/jd-server/src/lib/mod.rs @@ -98,3 +98,76 @@ where _ => Err(serde::de::Error::custom("Unsupported duration unit")), } } + +#[cfg(test)] +mod tests { + use std::path::PathBuf; + + use super::*; + + fn load_config(path: &str) -> Configuration { + let config_path = PathBuf::from(path); + assert!( + config_path.exists(), + "No config file found at {:?}", + config_path + ); + + let config_string = + std::fs::read_to_string(config_path).expect("Failed to read the config file"); + toml::from_str(&config_string).expect("Failed to parse config") + } + + #[test] + fn test_get_coinbase_output_non_empty() { + let config = load_config("config-examples/jds-config-hosted-example.toml"); + let outputs = get_coinbase_output(&config).expect("Failed to get coinbase output"); + + let expected_output = CoinbaseOutput_ { + output_script_type: "P2WPKH".to_string(), + output_script_value: + "036adc3bdf21e6f9a0f0fb0066bf517e5b7909ed1563d6958a10993849a7554075".to_string(), + }; + let expected_script: Script = expected_output.try_into().unwrap(); + let expected_transaction_output = TxOut { + value: 0, + script_pubkey: expected_script, + }; + + assert_eq!(outputs[0], expected_transaction_output); + } + + #[test] + fn test_get_coinbase_output_empty() { + let mut config = load_config("config-examples/jds-config-hosted-example.toml"); + config.coinbase_outputs.clear(); + + let result = get_coinbase_output(&config); + assert!( + matches!(result, Err(Error::EmptyCoinbaseOutputs)), + "Expected an error for empty coinbase outputs" + ); + } + + #[test] + fn test_try_from_valid_input() { + let input = CoinbaseOutput { + output_script_type: "P2PKH".to_string(), + output_script_value: + "036adc3bdf21e6f9a0f0fb0066bf517e5b7909ed1563d6958a10993849a7554075".to_string(), + }; + let result: Result = (&input).try_into(); + assert!(result.is_ok()); + } + + #[test] + fn test_try_from_invalid_input() { + let input = CoinbaseOutput { + output_script_type: "INVALID".to_string(), + output_script_value: + "036adc3bdf21e6f9a0f0fb0066bf517e5b7909ed1563d6958a10993849a7554075".to_string(), + }; + let result: Result = (&input).try_into(); + assert!(matches!(result, Err(Error::UnknownOutputScriptType))); + } +} diff --git a/roles/jd-server/src/lib/status.rs b/roles/jd-server/src/lib/status.rs index 83a50026f..fe9981617 100644 --- a/roles/jd-server/src/lib/status.rs +++ b/roles/jd-server/src/lib/status.rs @@ -36,7 +36,7 @@ pub struct Status { pub state: State, } -/// this function is used to discern which componnent experienced the event. +/// this function is used to discern which component experienced the event. /// With this knowledge we can wrap the status message with information (`State` variants) so /// the main status loop can decide what should happen async fn send_status( @@ -129,3 +129,282 @@ pub async fn handle_error(sender: &Sender, e: JdsError) -> error_handling::Error } } } + +#[cfg(test)] +mod tests { + use std::{convert::TryInto, io::Error}; + + use super::*; + use async_channel::{bounded, RecvError}; + use roles_logic_sv2::mining_sv2::OpenMiningChannelError; + + #[tokio::test] + async fn test_send_status_downstream_listener_shutdown() { + let (tx, rx) = bounded(1); + let sender = Sender::DownstreamListener(tx); + let error = JdsError::ChannelRecv(async_channel::RecvError); + + send_status(&sender, error, error_handling::ErrorBranch::Continue).await; + match rx.recv().await { + Ok(status) => match status.state { + State::DownstreamShutdown(e) => { + assert_eq!(e.to_string(), "Channel recv failed: `RecvError`") + } + _ => panic!("Unexpected state received"), + }, + Err(_) => panic!("Failed to receive status"), + } + } + + #[tokio::test] + async fn test_send_status_upstream_shutdown() { + let (tx, rx) = bounded(1); + let sender = Sender::Upstream(tx); + let error = JdsError::MempoolError(crate::mempool::error::JdsMempoolError::EmptyMempool); + let error_string = error.to_string(); + send_status(&sender, error, error_handling::ErrorBranch::Continue).await; + + match rx.recv().await { + Ok(status) => match status.state { + State::TemplateProviderShutdown(e) => assert_eq!(e.to_string(), error_string), + _ => panic!("Unexpected state received"), + }, + Err(_) => panic!("Failed to receive status"), + } + } + + #[tokio::test] + async fn test_handle_error_io_error() { + let (tx, rx) = bounded(1); + let sender = Sender::Downstream(tx); + let error = JdsError::Io(Error::new(std::io::ErrorKind::Interrupted, "IO error")); + let error_string = error.to_string(); + + handle_error(&sender, error).await; + match rx.recv().await { + Ok(status) => match status.state { + State::Healthy(e) => assert_eq!(e, error_string), + _ => panic!("Unexpected state received"), + }, + Err(_) => panic!("Failed to receive status"), + } + } + + #[tokio::test] + async fn test_handle_error_channel_send_error() { + let (tx, rx) = bounded(1); + let sender = Sender::Downstream(tx); + let error = JdsError::ChannelSend(Box::new("error")); + let error_string = error.to_string(); + + handle_error(&sender, error).await; + match rx.recv().await { + Ok(status) => match status.state { + State::Healthy(e) => assert_eq!(e, error_string), + _ => panic!("Unexpected state received"), + }, + Err(_) => panic!("Failed to receive status"), + } + } + + #[tokio::test] + async fn test_handle_error_channel_receive_error() { + let (tx, rx) = bounded(1); + let sender = Sender::Downstream(tx); + let error = JdsError::ChannelRecv(RecvError); + let error_string = error.to_string(); + + handle_error(&sender, error).await; + match rx.recv().await { + Ok(status) => match status.state { + State::DownstreamShutdown(e) => assert_eq!(e.to_string(), error_string), + _ => panic!("Unexpected state received"), + }, + Err(_) => panic!("Failed to receive status"), + } + } + + #[tokio::test] + async fn test_handle_error_binary_sv2_error() { + let (tx, rx) = bounded(1); + let sender = Sender::Downstream(tx); + let error = JdsError::BinarySv2(binary_sv2::Error::IoError); + let error_string = error.to_string(); + handle_error(&sender, error).await; + match rx.recv().await { + Ok(status) => match status.state { + State::Healthy(e) => assert_eq!(e, error_string), + _ => panic!("Unexpected state received"), + }, + Err(_) => panic!("Failed to receive status"), + } + } + + #[tokio::test] + async fn test_handle_error_codec_error() { + let (tx, rx) = bounded(1); + let sender = Sender::Downstream(tx); + let error = JdsError::Codec(codec_sv2::Error::InvalidStepForInitiator); + let error_string = error.to_string(); + handle_error(&sender, error).await; + match rx.recv().await { + Ok(status) => match status.state { + State::Healthy(e) => assert_eq!(e, error_string), + _ => panic!("Unexpected state received"), + }, + Err(_) => panic!("Failed to receive status"), + } + } + + #[tokio::test] + async fn test_handle_error_noise_error() { + let (tx, rx) = bounded(1); + let sender = Sender::Downstream(tx); + let error = JdsError::Noise(noise_sv2::Error::HandshakeNotFinalized); + let error_string = error.to_string(); + handle_error(&sender, error).await; + match rx.recv().await { + Ok(status) => match status.state { + State::Healthy(e) => assert_eq!(e, error_string), + _ => panic!("Unexpected state received"), + }, + Err(_) => panic!("Failed to receive status"), + } + } + + #[tokio::test] + async fn test_handle_error_roles_logic_error() { + let (tx, rx) = bounded(1); + let sender = Sender::Downstream(tx); + let error = JdsError::RolesLogic(roles_logic_sv2::Error::BadPayloadSize); + let error_string = error.to_string(); + handle_error(&sender, error).await; + match rx.recv().await { + Ok(status) => match status.state { + State::Healthy(e) => assert_eq!(e, error_string), + _ => panic!("Unexpected state received"), + }, + Err(_) => panic!("Failed to receive status"), + } + } + + #[tokio::test] + async fn test_handle_error_custom_error() { + let (tx, rx) = bounded(1); + let sender = Sender::Downstream(tx); + let error = JdsError::Custom("error".to_string()); + let error_string = error.to_string(); + handle_error(&sender, error).await; + match rx.recv().await { + Ok(status) => match status.state { + State::Healthy(e) => assert_eq!(e, error_string), + _ => panic!("Unexpected state received"), + }, + Err(_) => panic!("Failed to receive status"), + } + } + + #[tokio::test] + async fn test_handle_error_framing_error() { + let (tx, rx) = bounded(1); + let sender = Sender::Downstream(tx); + let error = JdsError::Framing(codec_sv2::framing_sv2::Error::ExpectedHandshakeFrame); + let error_string = error.to_string(); + handle_error(&sender, error).await; + match rx.recv().await { + Ok(status) => match status.state { + State::Healthy(e) => assert_eq!(e, error_string), + _ => panic!("Unexpected state received"), + }, + Err(_) => panic!("Failed to receive status"), + } + } + + #[tokio::test] + async fn test_handle_error_poison_lock_error() { + let (tx, rx) = bounded(1); + let sender = Sender::Downstream(tx); + let error = JdsError::PoisonLock("error".to_string()); + let error_string = error.to_string(); + handle_error(&sender, error).await; + match rx.recv().await { + Ok(status) => match status.state { + State::Healthy(e) => assert_eq!(e, error_string), + _ => panic!("Unexpected state received"), + }, + Err(_) => panic!("Failed to receive status"), + } + } + + #[tokio::test] + async fn test_handle_error_impossible_to_reconstruct_block_error() { + let (tx, rx) = bounded(1); + let sender = Sender::Downstream(tx); + let error = JdsError::ImpossibleToReconstructBlock("Impossible".to_string()); + let error_string = error.to_string(); + handle_error(&sender, error).await; + match rx.recv().await { + Ok(status) => match status.state { + State::Healthy(e) => assert_eq!(e, error_string), + _ => panic!("Unexpected state received"), + }, + Err(_) => panic!("Failed to receive status"), + } + } + + #[tokio::test] + async fn test_handle_error_no_last_declared_job_error() { + let (tx, rx) = bounded(1); + let sender = Sender::Downstream(tx); + let error = JdsError::NoLastDeclaredJob; + let error_string = error.to_string(); + handle_error(&sender, error).await; + match rx.recv().await { + Ok(status) => match status.state { + State::Healthy(e) => assert_eq!(e, error_string), + _ => panic!("Unexpected state received"), + }, + Err(_) => panic!("Failed to receive status"), + } + } + + #[tokio::test] + async fn test_handle_error_last_mempool_error() { + let (tx, rx) = bounded(1); + let sender = Sender::Downstream(tx); + let error = JdsError::MempoolError(crate::mempool::error::JdsMempoolError::EmptyMempool); + let error_string = error.to_string(); + handle_error(&sender, error).await; + match rx.recv().await { + Ok(status) => match status.state { + State::TemplateProviderShutdown(e) => assert_eq!(e.to_string(), error_string), + _ => panic!("Unexpected state received"), + }, + Err(_) => panic!("Failed to receive status"), + } + } + + #[tokio::test] + async fn test_handle_error_sv2_protocol_error() { + let (tx, rx) = bounded(1); + let sender = Sender::Downstream(tx); + let inner: [u8; 32] = rand::random(); + let value = inner.to_vec().try_into().unwrap(); + let error = JdsError::Sv2ProtocolError(( + 12, + Mining::OpenMiningChannelError(OpenMiningChannelError { + request_id: 1, + error_code: value, + }), + )); + let error_string = "12"; + handle_error(&sender, error).await; + match rx.recv().await { + Ok(status) => match status.state { + State::DownstreamInstanceDropped(e) => assert_eq!(e.to_string(), error_string), + _ => panic!("Unexpected state received"), + }, + Err(_) => panic!("Failed to receive status"), + } + } +} From 29c9acc3fa99740d5125d687bb907255829a0960 Mon Sep 17 00:00:00 2001 From: plebhash Date: Thu, 27 Jun 2024 21:46:58 -0300 Subject: [PATCH 036/101] fix GetSize for SubmitSolutionJd the GetSize implementation is not accounting for self.version.get_size()! that is causing NoiseEncoder to allocate a buffer that's smaller than what is actually needed, which eventually triggers FramingSv2Error(BinarySv2Error(WriteError)) during encoding --- protocols/v2/subprotocols/job-declaration/src/submit_solution.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/protocols/v2/subprotocols/job-declaration/src/submit_solution.rs b/protocols/v2/subprotocols/job-declaration/src/submit_solution.rs index 4b7c30e4c..9687313ce 100644 --- a/protocols/v2/subprotocols/job-declaration/src/submit_solution.rs +++ b/protocols/v2/subprotocols/job-declaration/src/submit_solution.rs @@ -30,5 +30,6 @@ impl<'d> GetSize for SubmitSolutionJd<'d> { + self.ntime.get_size() + self.nonce.get_size() + self.nbits.get_size() + + self.version.get_size() } } From aaa5a3c03030e8413971de2f7b9bdfc1ca3a922c Mon Sep 17 00:00:00 2001 From: esraa Date: Sun, 23 Jun 2024 12:52:11 +0300 Subject: [PATCH 037/101] Move scripts to a folder --- .github/workflows/coverage.yaml | 4 ++-- .github/workflows/sv2-header-check.yaml | 2 +- build_header.sh | 17 ----------------- examples/interop-cpp/README.md | 2 +- examples/interop-cpp/run.sh | 2 +- .../build-on-all-workspaces.sh | 0 scripts/build_header.sh | 17 +++++++++++++++++ .../check-versioning-lib-release.sh | 0 .../clippy-on-all-workspaces.sh | 0 .../code-coverage-report.sh | 0 .../message-generator-tests.sh | 0 .../mg-codecov-tests.sh | 0 .../sv2-header-check.sh | 5 +++-- sv2-publish.sh => scripts/sv2-publish.sh | 0 tarpaulin.sh => scripts/tarpaulin.sh | 0 15 files changed, 25 insertions(+), 24 deletions(-) delete mode 100755 build_header.sh rename build-on-all-workspaces.sh => scripts/build-on-all-workspaces.sh (100%) create mode 100755 scripts/build_header.sh rename check-versioning-lib-release.sh => scripts/check-versioning-lib-release.sh (100%) rename clippy-on-all-workspaces.sh => scripts/clippy-on-all-workspaces.sh (100%) rename code-coverage-report.sh => scripts/code-coverage-report.sh (100%) rename message-generator-tests.sh => scripts/message-generator-tests.sh (100%) rename mg-codecov-tests.sh => scripts/mg-codecov-tests.sh (100%) rename sv2-header-check.sh => scripts/sv2-header-check.sh (96%) rename sv2-publish.sh => scripts/sv2-publish.sh (100%) rename tarpaulin.sh => scripts/tarpaulin.sh (100%) diff --git a/.github/workflows/coverage.yaml b/.github/workflows/coverage.yaml index 5309c26fc..21d56abe4 100644 --- a/.github/workflows/coverage.yaml +++ b/.github/workflows/coverage.yaml @@ -37,7 +37,7 @@ jobs: - name: Generate code coverage run: | - ./tarpaulin.sh + ./scripts/tarpaulin.sh - name: Archive Tarpaulin code coverage results uses: actions/upload-artifact@v4 @@ -125,7 +125,7 @@ jobs: run: sh ./test/message-generator/test/translation-proxy-old-share/translation-proxy-old-share.sh - name: Coverage report - run: sh ./code-coverage-report.sh + run: sh ./scripts/code-coverage-report.sh - name: Archive MG code coverage results uses: actions/upload-artifact@v4 diff --git a/.github/workflows/sv2-header-check.yaml b/.github/workflows/sv2-header-check.yaml index 9d5149a78..732bd316b 100644 --- a/.github/workflows/sv2-header-check.yaml +++ b/.github/workflows/sv2-header-check.yaml @@ -29,4 +29,4 @@ jobs: - name: Check sv2 header file is up to date with commit run: | echo Check sv2 header file is up to date with commit - sh ./sv2-header-check.sh + sh ./scripts/sv2-header-check.sh diff --git a/build_header.sh b/build_header.sh deleted file mode 100755 index 3d798731b..000000000 --- a/build_header.sh +++ /dev/null @@ -1,17 +0,0 @@ -#! /bin/sh - -cargo install --version 0.20.0 cbindgen - -rm -f ./sv2.h -touch ./sv2.h - -dir=${1:-protocols} - -cd "$dir" - cbindgen --crate const_sv2 >> ../sv2.h - cbindgen --crate binary_codec_sv2 >> ../sv2.h - cbindgen --crate common_messages_sv2 >> ../sv2.h - cbindgen --crate template_distribution_sv2 >> ../sv2.h - cbindgen --crate codec_sv2 >> ../sv2.h - cbindgen --crate sv2_ffi >> ../sv2.h -cd .. diff --git a/examples/interop-cpp/README.md b/examples/interop-cpp/README.md index fff3df9f2..779b2fb37 100644 --- a/examples/interop-cpp/README.md +++ b/examples/interop-cpp/README.md @@ -207,7 +207,7 @@ installation phase of `sv2_ffi` is replaced and `sv2.h` and the newly built `lib in the container (they are installed in `/gnu/store/[hash]-Rust-sv2_ffi-[version]/`). The manifest it expect to find `sv2.h` in the `sv2_ffi` package. Since the `sv2.h` is created manually with -`/build_header.sh`, it is very easy to commit code with an out of date header file. To ensure all commits include +`scripts/build_header.sh`, it is very easy to commit code with an out of date header file. To ensure all commits include the most updated header file, a GitHub Actions check is planned to be added. ## Install cbindgen diff --git a/examples/interop-cpp/run.sh b/examples/interop-cpp/run.sh index fe0dbdd18..31f914e5d 100755 --- a/examples/interop-cpp/run.sh +++ b/examples/interop-cpp/run.sh @@ -14,7 +14,7 @@ cargo build \ -p sv2_ffi && \ cp ../../protocols/target/release/libsv2_ffi.a ./ -../../build_header.sh ../../protocols && mv ../../sv2.h . +../../scripts/build_header.sh ../../protocols && mv ../../scripts/sv2.h . g++ -I ./ ./template-provider/template-provider.cpp libsv2_ffi.a -lpthread -ldl diff --git a/build-on-all-workspaces.sh b/scripts/build-on-all-workspaces.sh similarity index 100% rename from build-on-all-workspaces.sh rename to scripts/build-on-all-workspaces.sh diff --git a/scripts/build_header.sh b/scripts/build_header.sh new file mode 100755 index 000000000..e0e20cd7e --- /dev/null +++ b/scripts/build_header.sh @@ -0,0 +1,17 @@ +#! /bin/sh + +cargo install --version 0.20.0 cbindgen + +rm -f ./scripts/sv2.h +touch ./scripts/sv2.h + +dir=${1:-../protocols} + +cd "$dir" + cbindgen --crate const_sv2 >> ../scripts/sv2.h + cbindgen --crate binary_codec_sv2 >> ../scripts/sv2.h + cbindgen --crate common_messages_sv2 >> ../scripts/sv2.h + cbindgen --crate template_distribution_sv2 >> ../scripts/sv2.h + cbindgen --crate codec_sv2 >> ../scripts/sv2.h + cbindgen --crate sv2_ffi >> ../scripts/sv2.h +cd .. diff --git a/check-versioning-lib-release.sh b/scripts/check-versioning-lib-release.sh similarity index 100% rename from check-versioning-lib-release.sh rename to scripts/check-versioning-lib-release.sh diff --git a/clippy-on-all-workspaces.sh b/scripts/clippy-on-all-workspaces.sh similarity index 100% rename from clippy-on-all-workspaces.sh rename to scripts/clippy-on-all-workspaces.sh diff --git a/code-coverage-report.sh b/scripts/code-coverage-report.sh similarity index 100% rename from code-coverage-report.sh rename to scripts/code-coverage-report.sh diff --git a/message-generator-tests.sh b/scripts/message-generator-tests.sh similarity index 100% rename from message-generator-tests.sh rename to scripts/message-generator-tests.sh diff --git a/mg-codecov-tests.sh b/scripts/mg-codecov-tests.sh similarity index 100% rename from mg-codecov-tests.sh rename to scripts/mg-codecov-tests.sh diff --git a/sv2-header-check.sh b/scripts/sv2-header-check.sh similarity index 96% rename from sv2-header-check.sh rename to scripts/sv2-header-check.sh index 6e0469143..b39b0af54 100755 --- a/sv2-header-check.sh +++ b/scripts/sv2-header-check.sh @@ -21,9 +21,10 @@ set -ex # cargo install cbindgen --force bts # cbindgen -V -cd ./protocols/v2/sv2-ffi +echo $PWD +cd protocols/v2/sv2-ffi SHA1_1=$(sha1sum sv2.h) -cd ../../.. +cd ../../../scripts BUILD_SCRIPT="./build_header.sh" sh ./"$BUILD_SCRIPT" diff --git a/sv2-publish.sh b/scripts/sv2-publish.sh similarity index 100% rename from sv2-publish.sh rename to scripts/sv2-publish.sh diff --git a/tarpaulin.sh b/scripts/tarpaulin.sh similarity index 100% rename from tarpaulin.sh rename to scripts/tarpaulin.sh From 7c3c45d68399a5624f358db1d70473ab8fa48208 Mon Sep 17 00:00:00 2001 From: fi3 Date: Wed, 19 Jun 2024 15:14:32 +0200 Subject: [PATCH 038/101] Add multi thread capabilities to cpu miner --- roles/Cargo.lock | 8 +- roles/test-utils/mining-device/Cargo.toml | 2 +- roles/test-utils/mining-device/README.md | 21 ++++ roles/test-utils/mining-device/src/main.rs | 134 +++++++++++++++++---- 4 files changed, 136 insertions(+), 29 deletions(-) create mode 100644 roles/test-utils/mining-device/README.md diff --git a/roles/Cargo.lock b/roles/Cargo.lock index e20c6b9a2..ea0dcfa1c 100644 --- a/roles/Cargo.lock +++ b/roles/Cargo.lock @@ -2117,9 +2117,9 @@ dependencies = [ [[package]] name = "tokio" -version = "1.37.0" +version = "1.38.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1adbebffeca75fcfd058afa480fb6c0b81e165a0323f9c9d39c9697e37c46787" +checksum = "ba4f4a02a7a80d6f274636f0aa95c7e383b912d41fe721a31f29e29698585a4a" dependencies = [ "backtrace", "bytes", @@ -2136,9 +2136,9 @@ dependencies = [ [[package]] name = "tokio-macros" -version = "2.2.0" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" +checksum = "5f5ae998a069d4b5aba8ee9dad856af7d520c3699e6159b185c2acd48155d39a" dependencies = [ "proc-macro2", "quote", diff --git a/roles/test-utils/mining-device/Cargo.toml b/roles/test-utils/mining-device/Cargo.toml index feedba02e..ca710fefb 100644 --- a/roles/test-utils/mining-device/Cargo.toml +++ b/roles/test-utils/mining-device/Cargo.toml @@ -23,4 +23,4 @@ clap = { version = "^4.5.4", features = ["derive"] } tracing = { version = "0.1" } tracing-subscriber = "0.3" sha2 = "0.10.6" -tokio = "^1.36.0" +tokio = "^1.38.0" diff --git a/roles/test-utils/mining-device/README.md b/roles/test-utils/mining-device/README.md new file mode 100644 index 000000000..4065c2c30 --- /dev/null +++ b/roles/test-utils/mining-device/README.md @@ -0,0 +1,21 @@ +# CPU Sv2 mining device + +Header only sv2 cpu miner. + +``` +Usage: mining-device [OPTIONS] --address-pool + +Options: + -p, --pubkey-pool Pool pub key, when left empty the pool certificate is not checked + -i, --id-device Sometimes used by the pool to identify the device + -a, --address-pool Address of the pool in this format ip:port or domain:port + --handicap This value is used to slow down the cpu miner, it represents the number of micro-seconds that are awaited between hashes [default: 0] + --id-user User id, used when a new channel is opened, it can be used by the pool to identify the miner + -h, --help Print help + -V, --version Print version +``` + +Usage example: +``` +cargo run --release -- --address-pool 127.0.0.1:20000 --id-device device_id::SOLO::bc1qxy2kgdygjrsqtzq2n0yrf2493p83kkfjhx0wlh +``` diff --git a/roles/test-utils/mining-device/src/main.rs b/roles/test-utils/mining-device/src/main.rs index 912775d0b..e2208bff4 100644 --- a/roles/test-utils/mining-device/src/main.rs +++ b/roles/test-utils/mining-device/src/main.rs @@ -1,10 +1,13 @@ +#![allow(clippy::option_map_unit_fn)] use key_utils::Secp256k1PublicKey; use network_helpers_sv2::noise_connection_tokio::Connection; use roles_logic_sv2::utils::Id; use std::{ net::{SocketAddr, ToSocketAddrs}, - sync::Arc, - thread::sleep, + sync::{ + atomic::{AtomicBool, Ordering}, + Arc, + }, time::Duration, }; use tokio::net::TcpStream; @@ -40,7 +43,7 @@ struct Args { address_pool: String, #[arg( long, - help = "This value is used to slow down the cpu miner, it rapresents the number of micro-seconds that are awaited between hashes", + help = "This value is used to slow down the cpu miner, it represents the number of micro-seconds that are awaited between hashes", default_value = "0" )] handicap: u32, @@ -75,7 +78,7 @@ async fn connect( "Failed to connect to Upstream role at {}, retrying in 5s: {}", address, e ); - sleep(Duration::from_secs(5)); + tokio::time::sleep(Duration::from_secs(5)).await; } }, Err(_) => { @@ -95,7 +98,7 @@ async fn connect( Device::start(receiver, sender, address, device_id, user_id, handicap).await } -#[tokio::main] +#[tokio::main(flavor = "current_thread")] async fn main() { let args = Args::parse(); tracing_subscriber::fmt::init(); @@ -220,6 +223,12 @@ impl ParseUpstreamCommonMessages for SetupConnectionHandler { } } +#[derive(Debug, Clone)] +struct NewWorkNotifier { + should_send: bool, + sender: Sender<()>, +} + #[derive(Debug)] pub struct Device { #[allow(dead_code)] @@ -232,13 +241,15 @@ pub struct Device { jobs: Vec>, prev_hash: Option>, sequence_numbers: Id, + notify_changes_to_mining_thread: NewWorkNotifier, } fn open_channel(device_id: Option) -> OpenStandardMiningChannel<'static> { let user_identity = device_id.unwrap_or_default().try_into().unwrap(); let id: u32 = 10; info!("Measuring CPU hashrate"); - let nominal_hash_rate = measure_hashrate(5) as f32; + let p = std::thread::available_parallelism().unwrap().get() as u32 - 3; + let nominal_hash_rate = measure_hashrate(5) as f32 * p as f32; info!("Pc hashrate is {}", nominal_hash_rate); info!("MINING DEVICE: send open channel with request id {}", id); OpenStandardMiningChannel { @@ -269,6 +280,7 @@ impl Device { .await; info!("Pool sv2 connection established at {}", addr); let miner = Arc::new(Mutex::new(Miner::new(handicap))); + let (notify_changes_to_mining_thread, update_miners) = async_channel::unbounded(); let self_ = Self { channel_opened: false, receiver: receiver.clone(), @@ -278,6 +290,10 @@ impl Device { prev_hash: None, channel_id: None, sequence_numbers: Id::new(), + notify_changes_to_mining_thread: NewWorkNotifier { + should_send: true, + sender: notify_changes_to_mining_thread, + }, }; let open_channel = MiningDeviceMessages::Mining(Mining::OpenStandardMiningChannel(open_channel(user_id))); @@ -288,23 +304,7 @@ impl Device { let (share_send, share_recv) = async_channel::unbounded(); - let handicap = miner.safe_lock(|m| m.handicap).unwrap(); - std::thread::spawn(move || loop { - std::thread::sleep(std::time::Duration::from_micros(handicap.into())); - if miner.safe_lock(|m| m.next_share()).unwrap().is_valid() { - let nonce = miner.safe_lock(|m| m.header.unwrap().nonce).unwrap(); - let time = miner.safe_lock(|m| m.header.unwrap().time).unwrap(); - let job_id = miner.safe_lock(|m| m.job_id).unwrap(); - let version = miner.safe_lock(|m| m.version).unwrap(); - share_send - .try_send((nonce, job_id.unwrap(), version.unwrap(), time)) - .unwrap(); - } - miner - .safe_lock(|m| m.header.as_mut().map(|h| h.nonce += 1)) - .unwrap(); - }); - + start_mining_threads(update_miners, miner, share_send); tokio::task::spawn(async move { let recv = share_recv.clone(); loop { @@ -324,6 +324,17 @@ impl Device { MiningRoutingLogic::None, ) .unwrap(); + let mut notify_changes_to_mining_thread = self_mutex + .safe_lock(|s| s.notify_changes_to_mining_thread.clone()) + .unwrap(); + if notify_changes_to_mining_thread.should_send { + notify_changes_to_mining_thread + .sender + .send(()) + .await + .unwrap(); + notify_changes_to_mining_thread.should_send = false; + }; match next { SendTo::RelayNewMessageToRemote(_, m) => { let sv2_frame: StdFrame = MiningDeviceMessages::Mining(m).try_into().unwrap(); @@ -427,6 +438,7 @@ impl ParseUpstreamMiningMessages<(), NullDownstreamMiningSelector, NoRouting> fo self.miner .safe_lock(|miner| miner.new_target(m.target.to_vec())) .unwrap(); + self.notify_changes_to_mining_thread.should_send = true; Ok(SendTo::None(None)) } @@ -479,6 +491,7 @@ impl ParseUpstreamMiningMessages<(), NullDownstreamMiningSelector, NoRouting> fo .safe_lock(|miner| miner.new_header(p_h, &m)) .unwrap(); self.jobs = vec![m.as_static()]; + self.notify_changes_to_mining_thread.should_send = true; } (true, _) => self.jobs.push(m.as_static()), (false, None) => { @@ -511,6 +524,7 @@ impl ParseUpstreamMiningMessages<(), NullDownstreamMiningSelector, NoRouting> fo .unwrap(); self.jobs = vec![jobs[0].clone()]; self.prev_hash = Some(m.as_static()); + self.notify_changes_to_mining_thread.should_send = true; } _ => panic!(), } @@ -535,6 +549,7 @@ impl ParseUpstreamMiningMessages<(), NullDownstreamMiningSelector, NoRouting> fo self.miner .safe_lock(|miner| miner.new_target(m.maximum_target.to_vec())) .unwrap(); + self.notify_changes_to_mining_thread.should_send = true; Ok(SendTo::None(None)) } @@ -543,7 +558,7 @@ impl ParseUpstreamMiningMessages<(), NullDownstreamMiningSelector, NoRouting> fo } } -#[derive(Debug)] +#[derive(Debug, Clone)] struct Miner { header: Option, target: Option, @@ -671,3 +686,74 @@ fn generate_random_32_byte_array() -> [u8; 32] { rng.fill(&mut arr[..]); arr } + +fn start_mining_threads( + have_new_job: Receiver<()>, + miner: Arc>, + share_send: Sender<(u32, u32, u32, u32)>, +) { + tokio::task::spawn(async move { + let mut killers: Vec> = vec![]; + loop { + let available_parallelism = u32::max( + 2, + std::thread::available_parallelism().unwrap().get() as u32, + ); + let p = available_parallelism - 1; + let unit = u32::MAX / p; + while have_new_job.recv().await.is_ok() { + while let Some(killer) = killers.pop() { + killer.store(true, Ordering::Relaxed); + } + let miner = miner.safe_lock(|m| m.clone()).unwrap(); + for i in 0..p { + let mut miner = miner.clone(); + let share_send = share_send.clone(); + let killer = Arc::new(AtomicBool::new(false)); + miner.header.as_mut().map(|h| h.nonce = i * unit); + killers.push(killer.clone()); + std::thread::spawn(move || { + mine(miner, share_send, killer); + }); + } + } + } + }); +} + +fn mine(mut miner: Miner, share_send: Sender<(u32, u32, u32, u32)>, kill: Arc) { + if miner.handicap != 0 { + loop { + if kill.load(Ordering::Relaxed) { + break; + } + std::thread::sleep(std::time::Duration::from_micros(miner.handicap.into())); + if miner.next_share().is_valid() { + let nonce = miner.header.unwrap().nonce; + let time = miner.header.unwrap().time; + let job_id = miner.job_id.unwrap(); + let version = miner.version; + share_send + .try_send((nonce, job_id, version.unwrap(), time)) + .unwrap(); + } + miner.header.as_mut().map(|h| h.nonce += 1); + } + } else { + loop { + if miner.next_share().is_valid() { + if kill.load(Ordering::Relaxed) { + break; + } + let nonce = miner.header.unwrap().nonce; + let time = miner.header.unwrap().time; + let job_id = miner.job_id.unwrap(); + let version = miner.version; + share_send + .try_send((nonce, job_id, version.unwrap(), time)) + .unwrap(); + } + miner.header.as_mut().map(|h| h.nonce += 1); + } + } +} From 1bc0bd500deb70557b1c9f3a921e1dbbf1cd9c3c Mon Sep 17 00:00:00 2001 From: plebhash Date: Mon, 24 Jun 2024 15:04:12 -0300 Subject: [PATCH 039/101] fix mg submit solution jd --- utils/message-generator/src/parser/sv2_messages.rs | 3 +++ 1 file changed, 3 insertions(+) diff --git a/utils/message-generator/src/parser/sv2_messages.rs b/utils/message-generator/src/parser/sv2_messages.rs index 4919fc324..091d0c58c 100644 --- a/utils/message-generator/src/parser/sv2_messages.rs +++ b/utils/message-generator/src/parser/sv2_messages.rs @@ -372,6 +372,8 @@ pub enum JobDeclaration<'a> { ProvideMissingTransactions(ProvideMissingTransactions<'a>), #[serde(borrow)] ProvideMissingTransactionsSuccess(ProvideMissingTransactionsSuccess<'a>), + #[serde(borrow)] + SubmitSolution(SubmitSolutionJd<'a>), } impl<'a> From> for roles_logic_sv2::parsers::JobDeclaration<'a> { @@ -390,6 +392,7 @@ impl<'a> From> for roles_logic_sv2::parsers::JobDeclaration<' JobDeclaration::ProvideMissingTransactionsSuccess(m) => { Self::ProvideMissingTransactionsSuccess(m) } + JobDeclaration::SubmitSolution(m) => Self::SubmitSolution(m), } } } From 862a25f1fc1239b1ff863bf4eb08ebdcdb7c4e4f Mon Sep 17 00:00:00 2001 From: bit-aloo Date: Sat, 29 Jun 2024 23:20:17 -0400 Subject: [PATCH 040/101] Refactor: JobDeclarator redefined last_declare_job sender --- roles/jd-client/src/lib/downstream.rs | 8 ++-- .../src/lib/job_declarator/message_handler.rs | 20 ++++++--- roles/jd-client/src/lib/job_declarator/mod.rs | 42 ++++++++++++------- roles/jd-client/src/lib/mod.rs | 2 +- roles/jd-client/src/main.rs | 14 +++---- 5 files changed, 52 insertions(+), 34 deletions(-) diff --git a/roles/jd-client/src/lib/downstream.rs b/roles/jd-client/src/lib/downstream.rs index ddc6e5bd0..da14c7b0a 100644 --- a/roles/jd-client/src/lib/downstream.rs +++ b/roles/jd-client/src/lib/downstream.rs @@ -32,8 +32,8 @@ pub type EitherFrame = StandardEitherFrame; /// 1 to 1 connection with a downstream node that implement the mining (sub)protocol can be either /// a mining device or a downstream proxy. -/// A downstream can only be linked with an upstream at a time. Support multi upstrems for -/// downstream do no make much sense. +/// A downstream can only be linked with an upstream at a time. Support multi upstreams for +/// downstream do not make much sense. #[derive(Debug)] pub struct DownstreamMiningNode { receiver: Receiver, @@ -181,7 +181,7 @@ impl DownstreamMiningNode { } } - /// Send SetupConnectionSuccess to donwstream and start processing new messages coming from + /// Send SetupConnectionSuccess to downstream and start processing new messages coming from /// downstream pub async fn start( self_mutex: &Arc>, @@ -225,7 +225,7 @@ impl DownstreamMiningNode { // mining channel success fn set_channel_factory(self_mutex: Arc>) { if !self_mutex.safe_lock(|s| s.status.is_solo_miner()).unwrap() { - // Safe unwrap already checked if it contains an upstream withe `is_solo_miner` + // Safe unwrap already checked if it contains an upstream with `is_solo_miner` let upstream = self_mutex .safe_lock(|s| s.status.get_upstream().unwrap()) .unwrap(); diff --git a/roles/jd-client/src/lib/job_declarator/message_handler.rs b/roles/jd-client/src/lib/job_declarator/message_handler.rs index 7516f24f9..72d58a912 100644 --- a/roles/jd-client/src/lib/job_declarator/message_handler.rs +++ b/roles/jd-client/src/lib/job_declarator/message_handler.rs @@ -55,12 +55,20 @@ impl ParseServerJobDeclarationMessages for JobDeclarator { ) -> Result { let tx_list = self .last_declare_mining_jobs_sent - .get(&message.request_id) - .unwrap() - .clone() - .unwrap() - .tx_list - .into_inner(); + .iter() + .find_map(|entry| { + if let Some((id, last_declare_job)) = entry { + if *id == message.request_id { + Some(last_declare_job.clone().tx_list.into_inner()) + } else { + None + } + } else { + None + } + }) + .ok_or_else(|| Error::UnknownRequestId(message.request_id))?; + let unknown_tx_position_list: Vec = message.unknown_tx_position_list.into_inner(); let missing_transactions: Vec = unknown_tx_position_list .iter() diff --git a/roles/jd-client/src/lib/job_declarator/mod.rs b/roles/jd-client/src/lib/job_declarator/mod.rs index abaf852ca..5e5191cb1 100644 --- a/roles/jd-client/src/lib/job_declarator/mod.rs +++ b/roles/jd-client/src/lib/job_declarator/mod.rs @@ -55,7 +55,7 @@ pub struct JobDeclarator { req_ids: Id, min_extranonce_size: u16, // (Sent DeclareMiningJob, is future, template id, merkle path) - last_declare_mining_jobs_sent: HashMap>, + last_declare_mining_jobs_sent: [Option<(u32, LastDeclareJob)>; 2], last_set_new_prev_hash: Option>, set_new_prev_hash_counter: u8, #[allow(clippy::type_complexity)] @@ -115,7 +115,7 @@ impl JobDeclarator { allocated_tokens: vec![], req_ids: Id::new(), min_extranonce_size, - last_declare_mining_jobs_sent: HashMap::with_capacity(10), + last_declare_mining_jobs_sent: [None, None], last_set_new_prev_hash: None, future_jobs: HashMap::with_hasher(BuildNoHashHasher::default()), up, @@ -130,14 +130,18 @@ impl JobDeclarator { Ok(self_) } - fn get_last_declare_job_sent(self_mutex: &Arc>, request_id: u32) -> LastDeclareJob { + fn get_last_declare_job_sent( + self_mutex: &Arc>, + request_id: u32, + ) -> Option { self_mutex .safe_lock(|s| { - s.last_declare_mining_jobs_sent - .get(&request_id) - .expect("LastDeclareJob not found") - .clone() - .expect("unreachable code") + for (id, job) in s.last_declare_mining_jobs_sent.iter().flatten() { + if *id == request_id { + return Some(job.to_owned()); + } + } + None }) .unwrap() } @@ -149,13 +153,20 @@ impl JobDeclarator { ) { self_mutex .safe_lock(|s| { - //check hashmap size in order to not let it grow indefinetely - if s.last_declare_mining_jobs_sent.len() < 10 { - s.last_declare_mining_jobs_sent.insert(request_id, Some(j)); - } else if let Some(min_key) = s.last_declare_mining_jobs_sent.keys().min().cloned() + if let Some(empty_index) = s + .last_declare_mining_jobs_sent + .iter() + .position(|entry| entry.is_none()) + { + s.last_declare_mining_jobs_sent[empty_index] = Some((request_id, j)); + } else if let Some((min_index, _)) = s + .last_declare_mining_jobs_sent + .iter() + .enumerate() + .filter_map(|(i, entry)| entry.as_ref().map(|(id, _)| (i, id))) + .min_by_key(|&(_, id)| id) { - s.last_declare_mining_jobs_sent.remove(&min_key); - s.last_declare_mining_jobs_sent.insert(request_id, Some(j)); + s.last_declare_mining_jobs_sent[min_index] = Some((request_id, j)); } }) .unwrap(); @@ -289,8 +300,7 @@ impl JobDeclarator { match next_message_to_send { Ok(SendTo::None(Some(JobDeclaration::DeclareMiningJobSuccess(m)))) => { let new_token = m.new_mining_job_token; - let last_declare = - Self::get_last_declare_job_sent(&self_mutex, m.request_id); + let last_declare = Self::get_last_declare_job_sent(&self_mutex, m.request_id).unwrap_or_else(|| panic!("Failed to get last declare job: job not found, Request Id: {:?}.", m.request_id)); let mut last_declare_mining_job_sent = last_declare.declare_job; let is_future = last_declare.template.future_template; let id = last_declare.template.template_id; diff --git a/roles/jd-client/src/lib/mod.rs b/roles/jd-client/src/lib/mod.rs index 489759420..455e901cf 100644 --- a/roles/jd-client/src/lib/mod.rs +++ b/roles/jd-client/src/lib/mod.rs @@ -15,7 +15,7 @@ use std::{sync::atomic::AtomicBool, time::Duration}; /// In the meantime if the context that is running the template receiver receives a SetNewPrevHash /// it wait until the value of this global is true before doing anything. /// -/// Acuire and Release memory ordering is used. +/// Acquire and Release memory ordering is used. /// /// Memory Ordering Explanation: /// We use Acquire-Release ordering instead of SeqCst or Relaxed for the following reasons: diff --git a/roles/jd-client/src/main.rs b/roles/jd-client/src/main.rs index c12a17b39..27f05f361 100644 --- a/roles/jd-client/src/main.rs +++ b/roles/jd-client/src/main.rs @@ -46,12 +46,12 @@ fn process_cli_args<'a>() -> ProxyResult<'a, ProxyConfig> { /// TODO on setupconnection with bitcoind (TP) JDC must signal that want a tx short hash list with /// the templates /// -/// TODO JDC must handle TxShortHahhList message +/// TODO JDC must handle TxShortHashList message /// /// This will start: /// 1. An Upstream, this will connect with the mining Pool /// 2. A listner that will wait for a mining downstream with ExtendedChannel capabilities (tproxy, -/// minin-proxy) +/// mining-proxy) /// 3. A JobDeclarator, this will connect with the job-declarator-server /// 4. A TemplateRx, this will connect with bitcoind /// @@ -78,15 +78,15 @@ fn process_cli_args<'a>() -> ProxyResult<'a, ProxyConfig> { /// Then we receive CommitMiningJobSuccess and we use the new token to send SetCustomMiningJob to /// the pool. /// When we receive SetCustomMiningJobSuccess we set in Upstream job_id equal to the one received -/// in SetCustomMiningJobSuccess so that we sill send shares upstream with the right job_id. +/// in SetCustomMiningJobSuccess so that we still send shares upstream with the right job_id. /// /// The above procedure, let us send NewExtendedMiningJob downstream right after a NewTemplate has /// been received this will reduce the time that pass from a NewTemplate and the mining-device /// starting to mine on the new job. /// /// In the case a future NewTemplate the SetCustomMiningJob is sent only if the canditate become -/// the actual NewTemplate so that we do not send a lot of usless future Job to the pool. That -/// means that SetCustomMiningJob is sent only when a NewTemplate becom "active" +/// the actual NewTemplate so that we do not send a lot of useless future Job to the pool. That +/// means that SetCustomMiningJob is sent only when a NewTemplate become "active" /// /// The JobDeclarator always have 2 avaiable token, that means that whenever a token is used to /// commit a job with upstream we require a new one. Having always a token when needed means that @@ -217,7 +217,7 @@ async fn initialize_jd_as_solo_miner( }; let miner_tx_out = lib::proxy_config::get_coinbase_output(&proxy_config).unwrap(); - // When Downstream receive a share that meets bitcoin target it transformit in a + // When Downstream receive a share that meets bitcoin target it transform it in a // SubmitSolution and send it to the TemplateReceiver let (send_solution, recv_solution) = bounded(10); @@ -290,7 +290,7 @@ async fn initialize_jd( port, ); - // When Downstream receive a share that meets bitcoin target it transformit in a + // When Downstream receive a share that meets bitcoin target it transform it in a // SubmitSolution and send it to the TemplateReceiver let (send_solution, recv_solution) = bounded(10); From c959f42b756111ae693ba93d88555dcaf5b1818c Mon Sep 17 00:00:00 2001 From: bit-aloo Date: Tue, 2 Jul 2024 07:44:37 -0400 Subject: [PATCH 041/101] Add update last declare job sent method description snippet --- roles/jd-client/src/lib/job_declarator/mod.rs | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/roles/jd-client/src/lib/job_declarator/mod.rs b/roles/jd-client/src/lib/job_declarator/mod.rs index 5e5191cb1..9fad85cf7 100644 --- a/roles/jd-client/src/lib/job_declarator/mod.rs +++ b/roles/jd-client/src/lib/job_declarator/mod.rs @@ -146,6 +146,10 @@ impl JobDeclarator { .unwrap() } + /// We maintain a window of 2 jobs. If more than 2 blocks are found, + /// the ordering will depend on the request ID. Only the 2 most recent request + /// IDs will be kept in memory, while the rest will be discarded. + /// More information can be found here: https://github.com/stratum-mining/stratum/pull/904#discussion_r1609469048 fn update_last_declare_job_sent( self_mutex: &Arc>, request_id: u32, From e4112fb1132e094c5b1007f6c14b4776f1135b9d Mon Sep 17 00:00:00 2001 From: jbesraa Date: Tue, 18 Jun 2024 12:47:41 +0300 Subject: [PATCH 042/101] Refactor `header.rs` 1. Removed `NoiseHeader` struct in favor of three constants defined at the top of the file. 2. Added documentation and changed visibility to `pub(crate)` where needed. 3. Removed `Header::Default` and `Sv2Frame::Default` impls as they are unused. 4. Removed `unwrap()`s --- protocols/v2/codec-sv2/src/decoder.rs | 18 ++--- protocols/v2/codec-sv2/src/encoder.rs | 4 +- protocols/v2/framing-sv2/src/error.rs | 9 ++- protocols/v2/framing-sv2/src/framing2.rs | 24 ++---- protocols/v2/framing-sv2/src/header.rs | 96 +++++++++++++++--------- 5 files changed, 84 insertions(+), 67 deletions(-) diff --git a/protocols/v2/codec-sv2/src/decoder.rs b/protocols/v2/codec-sv2/src/decoder.rs index cba5843f2..aebc0aee1 100644 --- a/protocols/v2/codec-sv2/src/decoder.rs +++ b/protocols/v2/codec-sv2/src/decoder.rs @@ -10,7 +10,7 @@ use core::marker::PhantomData; #[cfg(feature = "noise_sv2")] use framing_sv2::framing2::HandShakeFrame; #[cfg(feature = "noise_sv2")] -use framing_sv2::header::NoiseHeader; +use framing_sv2::header::{NOISE_HEADER_ENCRYPTED_SIZE, NOISE_HEADER_SIZE}; use framing_sv2::{ framing2::{EitherFrame, Frame as F_, Sv2Frame}, header::Header, @@ -58,7 +58,7 @@ impl<'a, T: Serialize + GetSize + Deserialize<'a>, B: IsBuffer + AeadBuffer> Wit let hint = *msg_len - self.noise_buffer.as_ref().len(); match hint { 0 => { - self.missing_noise_b = NoiseHeader::HEADER_SIZE; + self.missing_noise_b = NOISE_HEADER_SIZE; Ok(self.while_handshaking()) } _ => { @@ -71,20 +71,20 @@ impl<'a, T: Serialize + GetSize + Deserialize<'a>, B: IsBuffer + AeadBuffer> Wit let hint = if IsBuffer::len(&self.sv2_buffer) < SV2_FRAME_HEADER_SIZE { let len = IsBuffer::len(&self.noise_buffer); let src = self.noise_buffer.get_data_by_ref(len); - if src.len() < NoiseHeader::SIZE { - NoiseHeader::SIZE - src.len() + if src.len() < NOISE_HEADER_ENCRYPTED_SIZE { + NOISE_HEADER_ENCRYPTED_SIZE - src.len() } else { 0 } } else { - let src = self.sv2_buffer.get_data_by_ref_(SV2_FRAME_HEADER_SIZE); + let src = self.sv2_buffer.get_data_by_ref(SV2_FRAME_HEADER_SIZE); let header = Header::from_bytes(src)?; header.encrypted_len() - IsBuffer::len(&self.noise_buffer) }; match hint { 0 => { - self.missing_noise_b = NoiseHeader::SIZE; + self.missing_noise_b = NOISE_HEADER_ENCRYPTED_SIZE; self.decode_noise_frame(noise_codec) } _ => { @@ -106,14 +106,14 @@ impl<'a, T: Serialize + GetSize + Deserialize<'a>, B: IsBuffer + AeadBuffer> Wit IsBuffer::len(&self.sv2_buffer), ) { // HERE THE SV2 HEADER IS READY TO BE DECRYPTED - (NoiseHeader::SIZE, 0) => { + (NOISE_HEADER_ENCRYPTED_SIZE, 0) => { let src = self.noise_buffer.get_data_owned(); - let decrypted_header = self.sv2_buffer.get_writable(NoiseHeader::SIZE); + let decrypted_header = self.sv2_buffer.get_writable(NOISE_HEADER_ENCRYPTED_SIZE); decrypted_header.copy_from_slice(src.as_ref()); self.sv2_buffer.as_ref(); noise_codec.decrypt(&mut self.sv2_buffer)?; let header = - Header::from_bytes(self.sv2_buffer.get_data_by_ref_(SV2_FRAME_HEADER_SIZE))?; + Header::from_bytes(self.sv2_buffer.get_data_by_ref(SV2_FRAME_HEADER_SIZE))?; self.missing_noise_b = header.encrypted_len(); Err(Error::MissingBytes(header.encrypted_len())) } diff --git a/protocols/v2/codec-sv2/src/encoder.rs b/protocols/v2/codec-sv2/src/encoder.rs index e4f544a19..a12342217 100644 --- a/protocols/v2/codec-sv2/src/encoder.rs +++ b/protocols/v2/codec-sv2/src/encoder.rs @@ -9,7 +9,7 @@ use core::marker::PhantomData; use framing_sv2::framing2::{EitherFrame, HandShakeFrame}; use framing_sv2::framing2::{Frame as F_, Sv2Frame}; #[allow(unused_imports)] -pub use framing_sv2::header::NoiseHeader; +pub use framing_sv2::header::NOISE_HEADER_ENCRYPTED_SIZE; #[cfg(feature = "noise_sv2")] use tracing::error; @@ -76,7 +76,7 @@ impl NoiseEncoder { } else { SV2_FRAME_CHUNK_SIZE + start - AEAD_MAC_LEN }; - let mut encrypted_len = NoiseHeader::SIZE; + let mut encrypted_len = NOISE_HEADER_ENCRYPTED_SIZE; while start < sv2.len() { let to_encrypt = self.noise_buffer.get_writable(end - start); diff --git a/protocols/v2/framing-sv2/src/error.rs b/protocols/v2/framing-sv2/src/error.rs index 808b47d51..44b0c95ce 100644 --- a/protocols/v2/framing-sv2/src/error.rs +++ b/protocols/v2/framing-sv2/src/error.rs @@ -24,8 +24,13 @@ impl fmt::Display for Error { ExpectedSv2Frame => { write!(f, "Expected `Sv2Frame`, received `HandshakeFrame`") } - UnexpectedHeaderLength(i) => { - write!(f, "Unexpected `Header` length: `{}`", i) + UnexpectedHeaderLength(actual_size) => { + write!( + f, + "Unexpected `Header` length: `{}`, should be equal or more to {}", + actual_size, + const_sv2::SV2_FRAME_HEADER_SIZE + ) } } } diff --git a/protocols/v2/framing-sv2/src/framing2.rs b/protocols/v2/framing-sv2/src/framing2.rs index 4ba896063..692a22af4 100644 --- a/protocols/v2/framing-sv2/src/framing2.rs +++ b/protocols/v2/framing-sv2/src/framing2.rs @@ -1,5 +1,5 @@ use crate::{ - header::{Header, NoiseHeader}, + header::{Header, NOISE_HEADER_LEN_OFFSET, NOISE_HEADER_SIZE}, Error, }; use alloc::vec::Vec; @@ -83,16 +83,6 @@ pub struct Sv2Frame { serialized: Option, } -impl Default for Sv2Frame { - fn default() -> Self { - Sv2Frame { - header: Header::default(), - payload: None, - serialized: None, - } - } -} - /// Abstraction for a Noise Handshake Frame /// Contains only a `Slice` payload with a fixed length /// Only used during Noise Handshake process @@ -253,7 +243,7 @@ impl<'a> Frame<'a, Slice> for HandShakeFrame { /// Get the Noise Frame payload #[inline] fn payload(&'a mut self) -> &'a mut [u8] { - &mut self.payload[NoiseHeader::HEADER_SIZE..] + &mut self.payload[NOISE_HEADER_SIZE..] } /// `HandShakeFrame` always returns `None`. @@ -280,17 +270,17 @@ impl<'a> Frame<'a, Slice> for HandShakeFrame { /// indicates the surplus of bytes beyond the expected size. #[inline] fn size_hint(bytes: &[u8]) -> isize { - if bytes.len() < NoiseHeader::HEADER_SIZE { - return (NoiseHeader::HEADER_SIZE - bytes.len()) as isize; + if bytes.len() < NOISE_HEADER_SIZE { + return (NOISE_HEADER_SIZE - bytes.len()) as isize; }; - let len_b = &bytes[NoiseHeader::LEN_OFFSET..NoiseHeader::HEADER_SIZE]; + let len_b = &bytes[NOISE_HEADER_LEN_OFFSET..NOISE_HEADER_SIZE]; let expected_len = u16::from_le_bytes([len_b[0], len_b[1]]) as usize; - if bytes.len() - NoiseHeader::HEADER_SIZE == expected_len { + if bytes.len() - NOISE_HEADER_SIZE == expected_len { 0 } else { - expected_len as isize - (bytes.len() - NoiseHeader::HEADER_SIZE) as isize + expected_len as isize - (bytes.len() - NOISE_HEADER_SIZE) as isize } } diff --git a/protocols/v2/framing-sv2/src/header.rs b/protocols/v2/framing-sv2/src/header.rs index 05272b52a..3b3226156 100644 --- a/protocols/v2/framing-sv2/src/header.rs +++ b/protocols/v2/framing-sv2/src/header.rs @@ -7,68 +7,65 @@ use binary_sv2::{Deserialize, Serialize, U24}; use const_sv2::{AEAD_MAC_LEN, SV2_FRAME_CHUNK_SIZE}; use core::convert::TryInto; +// Previously `NoiseHeader::SIZE` +pub const NOISE_HEADER_ENCRYPTED_SIZE: usize = const_sv2::ENCRYPTED_SV2_FRAME_HEADER_SIZE; +// Previously `NoiseHeader::LEN_OFFSET` +pub const NOISE_HEADER_LEN_OFFSET: usize = const_sv2::NOISE_FRAME_HEADER_LEN_OFFSET; +// Previously `NoiseHeader::HEADER_SIZE` +pub const NOISE_HEADER_SIZE: usize = const_sv2::NOISE_FRAME_HEADER_SIZE; + /// Abstraction for a SV2 Frame Header. #[derive(Debug, Serialize, Deserialize, Copy, Clone)] pub struct Header { - extension_type: u16, // TODO use specific type? - msg_type: u8, // TODO use specific type? + /// Unique identifier of the extension describing this protocol message. Most significant bit + /// (i.e.bit 15, 0-indexed, aka channel_msg) indicates a message which is specific to a channel, + /// whereas if the most significant bit is unset, the message is to be interpreted by the + /// immediate receiving device. Note that the channel_msg bit is ignored in the extension + /// lookup, i.e.an extension_type of 0x8ABC is for the same "extension" as 0x0ABC. If the + /// channel_msg bit is set, the first four bytes of the payload field is a U32 representing the + /// channel_id this message is destined for. Note that for the Job Declaration and Template + /// Distribution Protocols the channel_msg bit is always unset. + extension_type: u16, // fix: use U16 type + /// Unique identifier of the extension describing this protocol message + msg_type: u8, // fix: use specific type? + /// Length of the protocol message, not including this header msg_length: U24, } -impl Default for Header { - fn default() -> Self { - Header { - extension_type: 0, - msg_type: 0, - // converting 0_32 into a U24 never panic - msg_length: 0_u32.try_into().unwrap(), - } - } -} - impl Header { - pub const LEN_OFFSET: usize = const_sv2::SV2_FRAME_HEADER_LEN_OFFSET; - pub const LEN_SIZE: usize = const_sv2::SV2_FRAME_HEADER_LEN_END; - pub const LEN_END: usize = Self::LEN_OFFSET + Self::LEN_SIZE; - pub const SIZE: usize = const_sv2::SV2_FRAME_HEADER_SIZE; - /// Construct a `Header` from ray bytes + /// Construct a `Header` from raw bytes #[inline] pub fn from_bytes(bytes: &[u8]) -> Result { if bytes.len() < Self::SIZE { - return Err(Error::UnexpectedHeaderLength( - (Self::SIZE - bytes.len()) as isize, - )); + return Err(Error::UnexpectedHeaderLength(bytes.len() as isize)); }; - let extension_type = u16::from_le_bytes([bytes[0], bytes[1]]); let msg_type = bytes[2]; - let msg_length = u32::from_le_bytes([bytes[3], bytes[4], bytes[5], 0]); - + let msg_length: U24 = u32::from_le_bytes([bytes[3], bytes[4], bytes[5], 0]).try_into()?; Ok(Self { extension_type, msg_type, - // Converting and u32 with the most significant byte set to 0 to and U24 never panic - msg_length: msg_length.try_into().unwrap(), + msg_length, }) } /// Get the payload length #[allow(clippy::len_without_is_empty)] #[inline] - pub fn len(&self) -> usize { + pub(crate) fn len(&self) -> usize { let inner: u32 = self.msg_length.into(); inner as usize } /// Construct a `Header` from payload length, type and extension type. #[inline] - pub fn from_len(len: u32, message_type: u8, extension_type: u16) -> Option
{ + pub(crate) fn from_len(msg_length: u32, msg_type: u8, extension_type: u16) -> Option
{ Some(Self { extension_type, - msg_type: message_type, - msg_length: len.try_into().ok()?, + msg_type, + msg_length: msg_length.try_into().ok()?, }) } @@ -83,9 +80,11 @@ impl Header { } /// Check if `Header` represents a channel message + /// + /// A header can represent a channel message if the MSB(Most Significant Bit) is set. pub fn channel_msg(&self) -> bool { - let mask = 0b0000_0000_0000_0001; - self.extension_type & mask == self.extension_type + const CHANNEL_MSG_MASK: u16 = 0b0000_0000_0000_0001; + self.extension_type & CHANNEL_MSG_MASK == self.extension_type } /// Calculate the length of the encrypted `Header` @@ -100,10 +99,33 @@ impl Header { } } -pub struct NoiseHeader {} +#[cfg(test)] +mod tests { + use super::*; + use alloc::vec; + + #[test] + fn test_header_from_bytes() { + let bytes = vec![0x01, 0x02, 0x03, 0x04, 0x05, 0x06]; + let header = Header::from_bytes(&bytes).unwrap(); + assert_eq!(header.extension_type, 0x0201); + assert_eq!(header.msg_type, 0x03); + assert_eq!(header.msg_length, 0x060504_u32.try_into().unwrap()); + } + + #[test] + fn test_header_from_len() { + let header = Header::from_len(0x1234, 0x56, 0x789a).unwrap(); + assert_eq!(header.extension_type, 0x789a); + assert_eq!(header.msg_type, 0x56); + assert_eq!(header.msg_length, 0x1234_u32.try_into().unwrap()); -impl NoiseHeader { - pub const SIZE: usize = const_sv2::ENCRYPTED_SV2_FRAME_HEADER_SIZE; - pub const LEN_OFFSET: usize = const_sv2::NOISE_FRAME_HEADER_LEN_OFFSET; - pub const HEADER_SIZE: usize = const_sv2::NOISE_FRAME_HEADER_SIZE; + let extension_type = 0; + let msg_type = 0x1; + let msg_length = 0x1234_u32; + let header = Header::from_len(msg_length, msg_type, extension_type).unwrap(); + assert_eq!(header.extension_type, 0); + assert_eq!(header.msg_type, 0x1); + assert_eq!(header.msg_length, 0x1234_u32.try_into().unwrap()); + } } From f0b672e5724f354c95609c1eaa073b87fff0e21d Mon Sep 17 00:00:00 2001 From: jbesraa Date: Tue, 18 Jun 2024 15:17:26 +0300 Subject: [PATCH 043/101] Remove `Frame` trait The `Frame` trait is used solely by a single struct and it is only adding biolerplate to the code without benifits. We could consider re-adding it in the future if needed. --- .../src/sv2/criterion_sv2_benchmark.rs | 2 +- benches/benches/src/sv2/iai_sv2_benchmark.rs | 2 +- examples/interop-cpp/src/main.rs | 2 +- examples/ping-pong-with-noise/src/node.rs | 2 +- examples/ping-pong-without-noise/src/node.rs | 2 +- examples/template-provider-test/src/main.rs | 2 +- protocols/fuzz-tests/src/main.rs | 2 +- protocols/v2/codec-sv2/src/decoder.rs | 2 +- protocols/v2/codec-sv2/src/encoder.rs | 2 +- protocols/v2/codec-sv2/src/lib.rs | 2 +- protocols/v2/framing-sv2/src/framing2.rs | 78 ++++--------------- protocols/v2/roles-logic-sv2/src/parsers.rs | 2 +- protocols/v2/sv2-ffi/src/lib.rs | 2 +- roles/jd-client/src/lib/downstream.rs | 2 +- roles/jd-client/src/lib/job_declarator/mod.rs | 1 - .../lib/job_declarator/setup_connection.rs | 2 +- .../src/lib/template_receiver/mod.rs | 2 +- .../lib/template_receiver/setup_connection.rs | 2 +- .../src/lib/upstream_sv2/upstream.rs | 2 +- roles/jd-server/src/lib/job_declarator/mod.rs | 2 +- .../mining-proxy/src/lib/downstream_mining.rs | 2 +- roles/mining-proxy/src/lib/upstream_mining.rs | 2 +- roles/pool/src/lib/mining_pool/mod.rs | 2 +- .../src/lib/mining_pool/setup_connection.rs | 1 - roles/pool/src/lib/template_receiver/mod.rs | 2 +- .../lib/template_receiver/setup_connection.rs | 1 - roles/test-utils/mining-device/src/main.rs | 2 +- .../src/lib/upstream_sv2/upstream.rs | 2 +- utils/message-generator/src/executor.rs | 2 +- utils/message-generator/src/main.rs | 2 +- utils/message-generator/src/parser/frames.rs | 2 +- 31 files changed, 41 insertions(+), 94 deletions(-) diff --git a/benches/benches/src/sv2/criterion_sv2_benchmark.rs b/benches/benches/src/sv2/criterion_sv2_benchmark.rs index 7aa35f158..18fab853d 100644 --- a/benches/benches/src/sv2/criterion_sv2_benchmark.rs +++ b/benches/benches/src/sv2/criterion_sv2_benchmark.rs @@ -1,4 +1,4 @@ -use codec_sv2::{Frame, StandardEitherFrame, StandardSv2Frame}; +use codec_sv2::{StandardEitherFrame, StandardSv2Frame}; use criterion::{black_box, Criterion}; use roles_logic_sv2::{ handlers::{common::ParseUpstreamCommonMessages, mining::ParseUpstreamMiningMessages}, diff --git a/benches/benches/src/sv2/iai_sv2_benchmark.rs b/benches/benches/src/sv2/iai_sv2_benchmark.rs index 2cab39cc4..b049b9dc4 100644 --- a/benches/benches/src/sv2/iai_sv2_benchmark.rs +++ b/benches/benches/src/sv2/iai_sv2_benchmark.rs @@ -1,4 +1,4 @@ -use codec_sv2::{Frame, StandardEitherFrame, StandardSv2Frame}; +use codec_sv2::{StandardEitherFrame, StandardSv2Frame}; use iai::{black_box, main}; use roles_logic_sv2::{ handlers::{common::ParseUpstreamCommonMessages, mining::ParseUpstreamMiningMessages, SendTo_}, diff --git a/examples/interop-cpp/src/main.rs b/examples/interop-cpp/src/main.rs index 34f6bef09..09950e94b 100644 --- a/examples/interop-cpp/src/main.rs +++ b/examples/interop-cpp/src/main.rs @@ -12,7 +12,7 @@ mod main_ { #[cfg(not(feature = "with_serde"))] mod main_ { - use codec_sv2::{Encoder, Frame, StandardDecoder, StandardSv2Frame}; + use codec_sv2::{Encoder, StandardDecoder, StandardSv2Frame}; use common_messages_sv2::{Protocol, SetupConnection, SetupConnectionError}; use const_sv2::{ CHANNEL_BIT_SETUP_CONNECTION, MESSAGE_TYPE_SETUP_CONNECTION, diff --git a/examples/ping-pong-with-noise/src/node.rs b/examples/ping-pong-with-noise/src/node.rs index 912d6e835..1ae042aa8 100644 --- a/examples/ping-pong-with-noise/src/node.rs +++ b/examples/ping-pong-with-noise/src/node.rs @@ -11,7 +11,7 @@ use async_std::{ }; use core::convert::TryInto; -use codec_sv2::{Frame, HandshakeRole, StandardEitherFrame, StandardSv2Frame}; +use codec_sv2::{HandshakeRole, StandardEitherFrame, StandardSv2Frame}; use std::time; diff --git a/examples/ping-pong-without-noise/src/node.rs b/examples/ping-pong-without-noise/src/node.rs index 97295d591..21edf617e 100644 --- a/examples/ping-pong-without-noise/src/node.rs +++ b/examples/ping-pong-without-noise/src/node.rs @@ -10,7 +10,7 @@ use async_std::{ task, }; -use codec_sv2::{Frame, StandardDecoder, StandardSv2Frame}; +use codec_sv2::{StandardDecoder, StandardSv2Frame}; #[derive(Debug)] enum Expected { diff --git a/examples/template-provider-test/src/main.rs b/examples/template-provider-test/src/main.rs index 78878227b..5a83aa39a 100644 --- a/examples/template-provider-test/src/main.rs +++ b/examples/template-provider-test/src/main.rs @@ -1,6 +1,6 @@ use async_channel::{Receiver, Sender}; use async_std::net::TcpStream; -use codec_sv2::{Frame, StandardEitherFrame, StandardSv2Frame, Sv2Frame}; +use codec_sv2::{StandardEitherFrame, StandardSv2Frame, Sv2Frame}; use network_helpers::PlainConnection; use roles_logic_sv2::{ parsers::{IsSv2Message, TemplateDistribution}, diff --git a/protocols/fuzz-tests/src/main.rs b/protocols/fuzz-tests/src/main.rs index c9623ecb5..bb2364c0f 100644 --- a/protocols/fuzz-tests/src/main.rs +++ b/protocols/fuzz-tests/src/main.rs @@ -2,7 +2,7 @@ use libfuzzer_sys::fuzz_target; use binary_codec_sv2::{Seq064K,U256,B0255,Seq0255}; use binary_codec_sv2::from_bytes; -use codec_sv2::{StandardDecoder,Sv2Frame,Frame}; +use codec_sv2::{StandardDecoder,Sv2Frame}; use roles_logic_sv2::parsers::PoolMessages; type F = Sv2Frame,Vec>; diff --git a/protocols/v2/codec-sv2/src/decoder.rs b/protocols/v2/codec-sv2/src/decoder.rs index aebc0aee1..4a946a1e1 100644 --- a/protocols/v2/codec-sv2/src/decoder.rs +++ b/protocols/v2/codec-sv2/src/decoder.rs @@ -12,7 +12,7 @@ use framing_sv2::framing2::HandShakeFrame; #[cfg(feature = "noise_sv2")] use framing_sv2::header::{NOISE_HEADER_ENCRYPTED_SIZE, NOISE_HEADER_SIZE}; use framing_sv2::{ - framing2::{EitherFrame, Frame as F_, Sv2Frame}, + framing2::{EitherFrame, Sv2Frame}, header::Header, }; #[cfg(feature = "noise_sv2")] diff --git a/protocols/v2/codec-sv2/src/encoder.rs b/protocols/v2/codec-sv2/src/encoder.rs index a12342217..9ecc37c3e 100644 --- a/protocols/v2/codec-sv2/src/encoder.rs +++ b/protocols/v2/codec-sv2/src/encoder.rs @@ -5,9 +5,9 @@ pub use const_sv2::{AEAD_MAC_LEN, SV2_FRAME_CHUNK_SIZE, SV2_FRAME_HEADER_SIZE}; #[cfg(feature = "noise_sv2")] use core::convert::TryInto; use core::marker::PhantomData; +use framing_sv2::framing2::Sv2Frame; #[cfg(feature = "noise_sv2")] use framing_sv2::framing2::{EitherFrame, HandShakeFrame}; -use framing_sv2::framing2::{Frame as F_, Sv2Frame}; #[allow(unused_imports)] pub use framing_sv2::header::NOISE_HEADER_ENCRYPTED_SIZE; diff --git a/protocols/v2/codec-sv2/src/lib.rs b/protocols/v2/codec-sv2/src/lib.rs index f5cbc013d..184c39fda 100644 --- a/protocols/v2/codec-sv2/src/lib.rs +++ b/protocols/v2/codec-sv2/src/lib.rs @@ -23,7 +23,7 @@ pub use encoder::NoiseEncoder; #[cfg(feature = "noise_sv2")] pub use framing_sv2::framing2::HandShakeFrame; -pub use framing_sv2::framing2::{Frame, Sv2Frame}; +pub use framing_sv2::framing2::Sv2Frame; #[cfg(feature = "noise_sv2")] pub use noise_sv2::{self, Initiator, NoiseCodec, Responder}; diff --git a/protocols/v2/framing-sv2/src/framing2.rs b/protocols/v2/framing-sv2/src/framing2.rs index 692a22af4..eec461fa9 100644 --- a/protocols/v2/framing-sv2/src/framing2.rs +++ b/protocols/v2/framing-sv2/src/framing2.rs @@ -1,3 +1,4 @@ +#![allow(dead_code)] use crate::{ header::{Header, NOISE_HEADER_LEN_OFFSET, NOISE_HEADER_SIZE}, Error, @@ -29,51 +30,6 @@ impl Sv2Frame { } } -pub trait Frame<'a, T: Serialize + GetSize>: Sized { - type Buffer: AsMut<[u8]>; - type Deserialized; - - /// Write the serialized `Frame` into `dst`. - fn serialize(self, dst: &mut [u8]) -> Result<(), Error>; - - /// Get the payload - fn payload(&'a mut self) -> &'a mut [u8]; - - /// Returns `Some(self.header)` when the frame has a header (`Sv2Frame`), returns `None` where it doesn't (`HandShakeFrame`). - fn get_header(&self) -> Option; - - /// Try to build a `Frame` from raw bytes. - /// Checks if the payload has the correct size (as stated in the `Header`). - /// Returns `Self` on success, or the number of the bytes needed to complete the frame - /// as an error. Nothing is assumed or checked about the correctness of the payload. - fn from_bytes(bytes: Self::Buffer) -> Result; - - /// Builds a `Frame` from raw bytes. - /// Does not check if the payload has the correct size (as stated in the `Header`). - /// Nothing is assumed or checked about the correctness of the payload. - fn from_bytes_unchecked(bytes: Self::Buffer) -> Self; - - /// Helps to determine if the frame size encoded in a byte array correctly representing the size of the frame. - /// - Returns `0` if the byte slice is of the expected size according to the header. - /// - Returns a negative value if the byte slice is smaller than a Noise Frame header; this value - /// represents how many bytes are missing. - /// - Returns a positive value if the byte slice is longer than expected; this value - /// indicates the surplus of bytes beyond the expected size. - fn size_hint(bytes: &[u8]) -> isize; - - /// Returns the size of the `Frame` payload. - fn encoded_length(&self) -> usize; - - /// Try to build a `Frame` from a serializable payload. - /// Returns `Some(Self)` if the size of the payload fits in the frame, `None` otherwise. - fn from_message( - message: T, - message_type: u8, - extension_type: u16, - channel_msg: bool, - ) -> Option; -} - /// Abstraction for a SV2 Frame. #[derive(Debug, Clone)] pub struct Sv2Frame { @@ -98,15 +54,12 @@ impl HandShakeFrame { } } -impl<'a, T: Serialize + GetSize, B: AsMut<[u8]> + AsRef<[u8]>> Frame<'a, T> for Sv2Frame { - type Buffer = B; - type Deserialized = B; - +impl + AsRef<[u8]>> Sv2Frame { /// Write the serialized `Sv2Frame` into `dst`. /// This operation when called on an already serialized frame is very cheap. /// When called on a non serialized frame, it is not so cheap (because it serializes it). #[inline] - fn serialize(self, dst: &mut [u8]) -> Result<(), Error> { + pub fn serialize(self, dst: &mut [u8]) -> Result<(), Error> { if let Some(mut serialized) = self.serialized { dst.swap_with_slice(serialized.as_mut()); Ok(()) @@ -132,7 +85,7 @@ impl<'a, T: Serialize + GetSize, B: AsMut<[u8]> + AsRef<[u8]>> Frame<'a, T> for /// This function is only intended as a fast way to get a reference to an /// already serialized payload. If the frame has not yet been /// serialized, this function should never be used (it will panic). - fn payload(&'a mut self) -> &'a mut [u8] { + pub fn payload(&mut self) -> &mut [u8] { if let Some(serialized) = self.serialized.as_mut() { &mut serialized.as_mut()[Header::SIZE..] } else { @@ -142,7 +95,7 @@ impl<'a, T: Serialize + GetSize, B: AsMut<[u8]> + AsRef<[u8]>> Frame<'a, T> for } /// `Sv2Frame` always returns `Some(self.header)`. - fn get_header(&self) -> Option { + pub fn get_header(&self) -> Option { Some(self.header) } @@ -150,7 +103,7 @@ impl<'a, T: Serialize + GetSize, B: AsMut<[u8]> + AsRef<[u8]>> Frame<'a, T> for /// Returns a `Sv2Frame` on success, or the number of the bytes needed to complete the frame /// as an error. `Self.serialized` is `Some`, but nothing is assumed or checked about the correctness of the payload. #[inline] - fn from_bytes(mut bytes: Self::Buffer) -> Result { + pub fn from_bytes(mut bytes: B) -> Result { let hint = Self::size_hint(bytes.as_mut()); if hint == 0 { @@ -161,7 +114,7 @@ impl<'a, T: Serialize + GetSize, B: AsMut<[u8]> + AsRef<[u8]>> Frame<'a, T> for } #[inline] - fn from_bytes_unchecked(mut bytes: Self::Buffer) -> Self { + pub fn from_bytes_unchecked(mut bytes: B) -> Self { // Unchecked function caller is supposed to already know that the passed bytes are valid let header = Header::from_bytes(bytes.as_mut()).expect("Invalid header"); Self { @@ -179,7 +132,7 @@ impl<'a, T: Serialize + GetSize, B: AsMut<[u8]> + AsRef<[u8]>> Frame<'a, T> for /// - Returns a positive value if the byte slice is longer than expected; this value /// indicates the surplus of bytes beyond the expected size. #[inline] - fn size_hint(bytes: &[u8]) -> isize { + pub fn size_hint(bytes: &[u8]) -> isize { match Header::from_bytes(bytes) { Err(_) => { // Returns how many bytes are missing from the expected frame size @@ -200,7 +153,7 @@ impl<'a, T: Serialize + GetSize, B: AsMut<[u8]> + AsRef<[u8]>> Frame<'a, T> for /// If `Sv2Frame` is serialized, returns the length of `self.serialized`, /// otherwise, returns the length of `self.payload`. #[inline] - fn encoded_length(&self) -> usize { + pub fn encoded_length(&self) -> usize { if let Some(serialized) = self.serialized.as_ref() { serialized.as_ref().len() } else if let Some(payload) = self.payload.as_ref() { @@ -213,7 +166,7 @@ impl<'a, T: Serialize + GetSize, B: AsMut<[u8]> + AsRef<[u8]>> Frame<'a, T> for /// Tries to build a `Sv2Frame` from a non-serialized payload. /// Returns a `Sv2Frame` if the size of the payload fits in the frame, `None` otherwise. - fn from_message( + pub fn from_message( message: T, message_type: u8, extension_type: u16, @@ -229,10 +182,7 @@ impl<'a, T: Serialize + GetSize, B: AsMut<[u8]> + AsRef<[u8]>> Frame<'a, T> for } } -impl<'a> Frame<'a, Slice> for HandShakeFrame { - type Buffer = Slice; - type Deserialized = &'a mut [u8]; - +impl HandShakeFrame { /// Put the Noise Frame payload into `dst` #[inline] fn serialize(mut self, dst: &mut [u8]) -> Result<(), Error> { @@ -242,7 +192,7 @@ impl<'a> Frame<'a, Slice> for HandShakeFrame { /// Get the Noise Frame payload #[inline] - fn payload(&'a mut self) -> &'a mut [u8] { + fn payload(&mut self) -> &mut [u8] { &mut self.payload[NOISE_HEADER_SIZE..] } @@ -252,12 +202,12 @@ impl<'a> Frame<'a, Slice> for HandShakeFrame { } /// Builds a `HandShakeFrame` from raw bytes. Nothing is assumed or checked about the correctness of the payload. - fn from_bytes(bytes: Self::Buffer) -> Result { + pub fn from_bytes(bytes: Slice) -> Result { Ok(Self::from_bytes_unchecked(bytes)) } #[inline] - fn from_bytes_unchecked(bytes: Self::Buffer) -> Self { + pub fn from_bytes_unchecked(bytes: Slice) -> Self { Self { payload: bytes } } diff --git a/protocols/v2/roles-logic-sv2/src/parsers.rs b/protocols/v2/roles-logic-sv2/src/parsers.rs index 6eaf5e016..4d80387c9 100644 --- a/protocols/v2/roles-logic-sv2/src/parsers.rs +++ b/protocols/v2/roles-logic-sv2/src/parsers.rs @@ -13,7 +13,7 @@ use binary_sv2::GetSize; use binary_sv2::{from_bytes, Deserialize}; -use framing_sv2::framing2::{Frame, Sv2Frame}; +use framing_sv2::framing2::Sv2Frame; use const_sv2::{ CHANNEL_BIT_ALLOCATE_MINING_JOB_TOKEN, CHANNEL_BIT_ALLOCATE_MINING_JOB_TOKEN_SUCCESS, diff --git a/protocols/v2/sv2-ffi/src/lib.rs b/protocols/v2/sv2-ffi/src/lib.rs index 346d497b9..9befa0ca7 100644 --- a/protocols/v2/sv2-ffi/src/lib.rs +++ b/protocols/v2/sv2-ffi/src/lib.rs @@ -4,7 +4,7 @@ use std::{ fmt::{Display, Formatter}, }; -use codec_sv2::{Encoder, Frame, StandardDecoder, StandardSv2Frame}; +use codec_sv2::{Encoder, StandardDecoder, StandardSv2Frame}; use common_messages_sv2::{ CSetupConnection, CSetupConnectionError, ChannelEndpointChanged, SetupConnection, SetupConnectionError, SetupConnectionSuccess, diff --git a/roles/jd-client/src/lib/downstream.rs b/roles/jd-client/src/lib/downstream.rs index da14c7b0a..5b26cef2f 100644 --- a/roles/jd-client/src/lib/downstream.rs +++ b/roles/jd-client/src/lib/downstream.rs @@ -21,7 +21,7 @@ use roles_logic_sv2::{ }; use tracing::{debug, error, info, warn}; -use codec_sv2::{Frame, HandshakeRole, Responder, StandardEitherFrame, StandardSv2Frame}; +use codec_sv2::{HandshakeRole, Responder, StandardEitherFrame, StandardSv2Frame}; use key_utils::{Secp256k1PublicKey, Secp256k1SecretKey}; use stratum_common::bitcoin::{consensus::Decodable, TxOut}; diff --git a/roles/jd-client/src/lib/job_declarator/mod.rs b/roles/jd-client/src/lib/job_declarator/mod.rs index 9fad85cf7..29fb2e4f2 100644 --- a/roles/jd-client/src/lib/job_declarator/mod.rs +++ b/roles/jd-client/src/lib/job_declarator/mod.rs @@ -17,7 +17,6 @@ use tokio::task::AbortHandle; use tracing::{error, info}; use async_recursion::async_recursion; -use codec_sv2::Frame; use nohash_hasher::BuildNoHashHasher; use roles_logic_sv2::{ handlers::job_declaration::ParseServerJobDeclarationMessages, diff --git a/roles/jd-client/src/lib/job_declarator/setup_connection.rs b/roles/jd-client/src/lib/job_declarator/setup_connection.rs index 063592c40..0e7b6fd8a 100644 --- a/roles/jd-client/src/lib/job_declarator/setup_connection.rs +++ b/roles/jd-client/src/lib/job_declarator/setup_connection.rs @@ -1,5 +1,5 @@ use async_channel::{Receiver, Sender}; -use codec_sv2::{Frame, StandardEitherFrame, StandardSv2Frame}; +use codec_sv2::{StandardEitherFrame, StandardSv2Frame}; use roles_logic_sv2::{ common_messages_sv2::{Protocol, SetupConnection}, handlers::common::{ParseUpstreamCommonMessages, SendTo}, diff --git a/roles/jd-client/src/lib/template_receiver/mod.rs b/roles/jd-client/src/lib/template_receiver/mod.rs index 02d3d0497..f418318a8 100644 --- a/roles/jd-client/src/lib/template_receiver/mod.rs +++ b/roles/jd-client/src/lib/template_receiver/mod.rs @@ -1,6 +1,6 @@ use super::{job_declarator::JobDeclarator, status, PoolChangerTrigger}; use async_channel::{Receiver, Sender}; -use codec_sv2::{Frame, HandshakeRole, Initiator, StandardEitherFrame, StandardSv2Frame}; +use codec_sv2::{HandshakeRole, Initiator, StandardEitherFrame, StandardSv2Frame}; use error_handling::handle_result; use key_utils::Secp256k1PublicKey; use network_helpers_sv2::noise_connection_tokio::Connection; diff --git a/roles/jd-client/src/lib/template_receiver/setup_connection.rs b/roles/jd-client/src/lib/template_receiver/setup_connection.rs index 45f48a2f4..505b945c3 100644 --- a/roles/jd-client/src/lib/template_receiver/setup_connection.rs +++ b/roles/jd-client/src/lib/template_receiver/setup_connection.rs @@ -1,5 +1,5 @@ use async_channel::{Receiver, Sender}; -use codec_sv2::{Frame, StandardEitherFrame, StandardSv2Frame}; +use codec_sv2::{StandardEitherFrame, StandardSv2Frame}; use roles_logic_sv2::{ common_messages_sv2::{Protocol, SetupConnection}, handlers::common::{ParseUpstreamCommonMessages, SendTo}, diff --git a/roles/jd-client/src/lib/upstream_sv2/upstream.rs b/roles/jd-client/src/lib/upstream_sv2/upstream.rs index 857cbd308..b04efa335 100644 --- a/roles/jd-client/src/lib/upstream_sv2/upstream.rs +++ b/roles/jd-client/src/lib/upstream_sv2/upstream.rs @@ -11,7 +11,7 @@ use super::super::{ }; use async_channel::{Receiver, Sender}; use binary_sv2::{Seq0255, U256}; -use codec_sv2::{Frame, HandshakeRole, Initiator}; +use codec_sv2::{HandshakeRole, Initiator}; use error_handling::handle_result; use key_utils::Secp256k1PublicKey; use network_helpers_sv2::noise_connection_tokio::Connection; diff --git a/roles/jd-server/src/lib/job_declarator/mod.rs b/roles/jd-server/src/lib/job_declarator/mod.rs index 56d56223c..34d9e66de 100644 --- a/roles/jd-server/src/lib/job_declarator/mod.rs +++ b/roles/jd-server/src/lib/job_declarator/mod.rs @@ -2,7 +2,7 @@ pub mod message_handler; use super::{error::JdsError, mempool::JDsMempool, status, Configuration, EitherFrame, StdFrame}; use async_channel::{Receiver, Sender}; use binary_sv2::{B0255, U256}; -use codec_sv2::{Frame, HandshakeRole, Responder}; +use codec_sv2::{HandshakeRole, Responder}; use error_handling::handle_result; use key_utils::{Secp256k1PublicKey, Secp256k1SecretKey, SignatureService}; use network_helpers_sv2::noise_connection_tokio::Connection; diff --git a/roles/mining-proxy/src/lib/downstream_mining.rs b/roles/mining-proxy/src/lib/downstream_mining.rs index d4e9bcb17..188055119 100644 --- a/roles/mining-proxy/src/lib/downstream_mining.rs +++ b/roles/mining-proxy/src/lib/downstream_mining.rs @@ -17,7 +17,7 @@ use roles_logic_sv2::{ }; use tracing::info; -use codec_sv2::{Frame, StandardEitherFrame, StandardSv2Frame}; +use codec_sv2::{StandardEitherFrame, StandardSv2Frame}; pub type Message = MiningDeviceMessages<'static>; pub type StdFrame = StandardSv2Frame; diff --git a/roles/mining-proxy/src/lib/upstream_mining.rs b/roles/mining-proxy/src/lib/upstream_mining.rs index 61a5d0f31..e3f6eef99 100644 --- a/roles/mining-proxy/src/lib/upstream_mining.rs +++ b/roles/mining-proxy/src/lib/upstream_mining.rs @@ -6,7 +6,7 @@ use roles_logic_sv2::utils::Id; use super::downstream_mining::{Channel, DownstreamMiningNode, StdFrame as DownstreamFrame}; use async_channel::{Receiver, SendError, Sender}; use async_recursion::async_recursion; -use codec_sv2::{Frame, HandshakeRole, Initiator, StandardEitherFrame, StandardSv2Frame}; +use codec_sv2::{HandshakeRole, Initiator, StandardEitherFrame, StandardSv2Frame}; use network_helpers_sv2::noise_connection_tokio::Connection; use nohash_hasher::BuildNoHashHasher; use roles_logic_sv2::{ diff --git a/roles/pool/src/lib/mining_pool/mod.rs b/roles/pool/src/lib/mining_pool/mod.rs index e189c5406..4b9d10b18 100644 --- a/roles/pool/src/lib/mining_pool/mod.rs +++ b/roles/pool/src/lib/mining_pool/mod.rs @@ -4,7 +4,7 @@ use super::{ }; use async_channel::{Receiver, Sender}; use binary_sv2::U256; -use codec_sv2::{Frame, HandshakeRole, Responder, StandardEitherFrame, StandardSv2Frame}; +use codec_sv2::{HandshakeRole, Responder, StandardEitherFrame, StandardSv2Frame}; use error_handling::handle_result; use key_utils::{Secp256k1PublicKey, Secp256k1SecretKey, SignatureService}; use network_helpers_sv2::noise_connection_tokio::Connection; diff --git a/roles/pool/src/lib/mining_pool/setup_connection.rs b/roles/pool/src/lib/mining_pool/setup_connection.rs index cf2c06022..f0c47e9a8 100644 --- a/roles/pool/src/lib/mining_pool/setup_connection.rs +++ b/roles/pool/src/lib/mining_pool/setup_connection.rs @@ -3,7 +3,6 @@ use super::super::{ mining_pool::{EitherFrame, StdFrame}, }; use async_channel::{Receiver, Sender}; -use codec_sv2::Frame; use roles_logic_sv2::{ common_messages_sv2::{ has_requires_std_job, has_version_rolling, has_work_selection, SetupConnection, diff --git a/roles/pool/src/lib/template_receiver/mod.rs b/roles/pool/src/lib/template_receiver/mod.rs index 49d58e82a..2eeaa554f 100644 --- a/roles/pool/src/lib/template_receiver/mod.rs +++ b/roles/pool/src/lib/template_receiver/mod.rs @@ -4,7 +4,7 @@ use super::{ status, }; use async_channel::{Receiver, Sender}; -use codec_sv2::{Frame, HandshakeRole, Initiator}; +use codec_sv2::{HandshakeRole, Initiator}; use error_handling::handle_result; use key_utils::Secp256k1PublicKey; use network_helpers_sv2::noise_connection_tokio::Connection; diff --git a/roles/pool/src/lib/template_receiver/setup_connection.rs b/roles/pool/src/lib/template_receiver/setup_connection.rs index 60c3cb4f8..6687eadc6 100644 --- a/roles/pool/src/lib/template_receiver/setup_connection.rs +++ b/roles/pool/src/lib/template_receiver/setup_connection.rs @@ -3,7 +3,6 @@ use super::super::{ mining_pool::{EitherFrame, StdFrame}, }; use async_channel::{Receiver, Sender}; -use codec_sv2::Frame; use roles_logic_sv2::{ common_messages_sv2::{Protocol, SetupConnection}, errors::Error, diff --git a/roles/test-utils/mining-device/src/main.rs b/roles/test-utils/mining-device/src/main.rs index 1bfbf6737..763f83af5 100644 --- a/roles/test-utils/mining-device/src/main.rs +++ b/roles/test-utils/mining-device/src/main.rs @@ -110,7 +110,7 @@ async fn main() { use async_channel::{Receiver, Sender}; use binary_sv2::u256_from_int; -use codec_sv2::{Frame, Initiator, StandardEitherFrame, StandardSv2Frame}; +use codec_sv2::{Initiator, StandardEitherFrame, StandardSv2Frame}; use roles_logic_sv2::{ common_messages_sv2::{Protocol, SetupConnection, SetupConnectionSuccess}, common_properties::{IsMiningUpstream, IsUpstream}, diff --git a/roles/translator/src/lib/upstream_sv2/upstream.rs b/roles/translator/src/lib/upstream_sv2/upstream.rs index f6d192f75..6aab5978e 100644 --- a/roles/translator/src/lib/upstream_sv2/upstream.rs +++ b/roles/translator/src/lib/upstream_sv2/upstream.rs @@ -11,7 +11,7 @@ use crate::{ use async_channel::{Receiver, Sender}; use async_std::{net::TcpStream, task}; use binary_sv2::u256_from_int; -use codec_sv2::{Frame, HandshakeRole, Initiator}; +use codec_sv2::{HandshakeRole, Initiator}; use error_handling::handle_result; use key_utils::Secp256k1PublicKey; use network_helpers_sv2::Connection; diff --git a/utils/message-generator/src/executor.rs b/utils/message-generator/src/executor.rs index b69bce0e6..f31991eca 100644 --- a/utils/message-generator/src/executor.rs +++ b/utils/message-generator/src/executor.rs @@ -7,7 +7,7 @@ use crate::{ }; use async_channel::{Receiver, Sender}; use binary_sv2::Serialize; -use codec_sv2::{Frame, StandardEitherFrame as EitherFrame, Sv2Frame}; +use codec_sv2::{StandardEitherFrame as EitherFrame, Sv2Frame}; use roles_logic_sv2::parsers::{self, AnyMessage}; use std::{collections::HashMap, convert::TryInto, sync::Arc}; diff --git a/utils/message-generator/src/main.rs b/utils/message-generator/src/main.rs index 695b3919c..e2633ed23 100644 --- a/utils/message-generator/src/main.rs +++ b/utils/message-generator/src/main.rs @@ -451,7 +451,7 @@ mod test { into_static::into_static, net::{setup_as_downstream, setup_as_upstream}, }; - use codec_sv2::{Frame, Sv2Frame}; + use codec_sv2::Sv2Frame; use roles_logic_sv2::{ mining_sv2::{ CloseChannel, NewExtendedMiningJob, OpenExtendedMiningChannel, diff --git a/utils/message-generator/src/parser/frames.rs b/utils/message-generator/src/parser/frames.rs index 633163200..cd4c2c582 100644 --- a/utils/message-generator/src/parser/frames.rs +++ b/utils/message-generator/src/parser/frames.rs @@ -1,5 +1,5 @@ use super::sv2_messages::{message_from_path, ReplaceField}; -use codec_sv2::{buffer_sv2::Slice, Frame as _Frame, Sv2Frame}; +use codec_sv2::{buffer_sv2::Slice, Sv2Frame}; use roles_logic_sv2::parsers::AnyMessage; use serde_json::{Map, Value}; use std::{collections::HashMap, convert::TryInto}; From 7c0e456a256c56e264a472688cdd8ed4f30fa4f3 Mon Sep 17 00:00:00 2001 From: jbesraa Date: Wed, 19 Jun 2024 10:52:29 +0300 Subject: [PATCH 044/101] Rename `framing2.rs` to `framing.rs` --- protocols/v2/codec-sv2/src/decoder.rs | 4 ++-- protocols/v2/codec-sv2/src/encoder.rs | 4 ++-- protocols/v2/codec-sv2/src/lib.rs | 6 +++--- protocols/v2/framing-sv2/src/{framing2.rs => framing.rs} | 0 protocols/v2/framing-sv2/src/lib.rs | 2 +- protocols/v2/roles-logic-sv2/src/parsers.rs | 2 +- 6 files changed, 9 insertions(+), 9 deletions(-) rename protocols/v2/framing-sv2/src/{framing2.rs => framing.rs} (100%) diff --git a/protocols/v2/codec-sv2/src/decoder.rs b/protocols/v2/codec-sv2/src/decoder.rs index 4a946a1e1..9f5f8e89a 100644 --- a/protocols/v2/codec-sv2/src/decoder.rs +++ b/protocols/v2/codec-sv2/src/decoder.rs @@ -8,11 +8,11 @@ pub use buffer_sv2::AeadBuffer; pub use const_sv2::{SV2_FRAME_CHUNK_SIZE, SV2_FRAME_HEADER_SIZE}; use core::marker::PhantomData; #[cfg(feature = "noise_sv2")] -use framing_sv2::framing2::HandShakeFrame; +use framing_sv2::framing::HandShakeFrame; #[cfg(feature = "noise_sv2")] use framing_sv2::header::{NOISE_HEADER_ENCRYPTED_SIZE, NOISE_HEADER_SIZE}; use framing_sv2::{ - framing2::{EitherFrame, Sv2Frame}, + framing::{EitherFrame, Sv2Frame}, header::Header, }; #[cfg(feature = "noise_sv2")] diff --git a/protocols/v2/codec-sv2/src/encoder.rs b/protocols/v2/codec-sv2/src/encoder.rs index 9ecc37c3e..39d10f00b 100644 --- a/protocols/v2/codec-sv2/src/encoder.rs +++ b/protocols/v2/codec-sv2/src/encoder.rs @@ -5,9 +5,9 @@ pub use const_sv2::{AEAD_MAC_LEN, SV2_FRAME_CHUNK_SIZE, SV2_FRAME_HEADER_SIZE}; #[cfg(feature = "noise_sv2")] use core::convert::TryInto; use core::marker::PhantomData; -use framing_sv2::framing2::Sv2Frame; +use framing_sv2::framing::Sv2Frame; #[cfg(feature = "noise_sv2")] -use framing_sv2::framing2::{EitherFrame, HandShakeFrame}; +use framing_sv2::framing::{EitherFrame, HandShakeFrame}; #[allow(unused_imports)] pub use framing_sv2::header::NOISE_HEADER_ENCRYPTED_SIZE; diff --git a/protocols/v2/codec-sv2/src/lib.rs b/protocols/v2/codec-sv2/src/lib.rs index 184c39fda..0a2492890 100644 --- a/protocols/v2/codec-sv2/src/lib.rs +++ b/protocols/v2/codec-sv2/src/lib.rs @@ -22,15 +22,15 @@ pub use encoder::Encoder; pub use encoder::NoiseEncoder; #[cfg(feature = "noise_sv2")] -pub use framing_sv2::framing2::HandShakeFrame; -pub use framing_sv2::framing2::Sv2Frame; +pub use framing_sv2::framing::HandShakeFrame; +pub use framing_sv2::framing::Sv2Frame; #[cfg(feature = "noise_sv2")] pub use noise_sv2::{self, Initiator, NoiseCodec, Responder}; pub use buffer_sv2; -pub use framing_sv2::{self, framing2::handshake_message_to_frame as h2f}; +pub use framing_sv2::{self, framing::handshake_message_to_frame as h2f}; #[cfg(feature = "noise_sv2")] #[derive(Debug)] diff --git a/protocols/v2/framing-sv2/src/framing2.rs b/protocols/v2/framing-sv2/src/framing.rs similarity index 100% rename from protocols/v2/framing-sv2/src/framing2.rs rename to protocols/v2/framing-sv2/src/framing.rs diff --git a/protocols/v2/framing-sv2/src/lib.rs b/protocols/v2/framing-sv2/src/lib.rs index 34fe8708b..33dd11fe2 100644 --- a/protocols/v2/framing-sv2/src/lib.rs +++ b/protocols/v2/framing-sv2/src/lib.rs @@ -23,7 +23,7 @@ extern crate alloc; /// SV2 framing types -pub mod framing2; +pub mod framing; /// SV2 framing errors pub mod error; diff --git a/protocols/v2/roles-logic-sv2/src/parsers.rs b/protocols/v2/roles-logic-sv2/src/parsers.rs index 4d80387c9..0274ce785 100644 --- a/protocols/v2/roles-logic-sv2/src/parsers.rs +++ b/protocols/v2/roles-logic-sv2/src/parsers.rs @@ -13,7 +13,7 @@ use binary_sv2::GetSize; use binary_sv2::{from_bytes, Deserialize}; -use framing_sv2::framing2::Sv2Frame; +use framing_sv2::framing::Sv2Frame; use const_sv2::{ CHANNEL_BIT_ALLOCATE_MINING_JOB_TOKEN, CHANNEL_BIT_ALLOCATE_MINING_JOB_TOKEN_SUCCESS, From 813fcfd8a3b2019cb3d311b6792813501e58be13 Mon Sep 17 00:00:00 2001 From: jbesraa Date: Wed, 19 Jun 2024 12:13:13 +0300 Subject: [PATCH 045/101] Reorder `framing.rs` so `structs` are ..followed by their `impl` - Also removed double `impl HandShakeFram {}` occurance --- protocols/v2/framing-sv2/src/framing.rs | 154 ++++++++++++------------ 1 file changed, 77 insertions(+), 77 deletions(-) diff --git a/protocols/v2/framing-sv2/src/framing.rs b/protocols/v2/framing-sv2/src/framing.rs index eec461fa9..23f3c18d0 100644 --- a/protocols/v2/framing-sv2/src/framing.rs +++ b/protocols/v2/framing-sv2/src/framing.rs @@ -15,21 +15,35 @@ type Slice = Vec; #[cfg(feature = "with_buffer_pool")] type Slice = buffer_sv2::Slice; -impl Sv2Frame { - /// Maps a `Sv2Frame` to `Sv2Frame` by applying `fun`, - /// which is assumed to be a closure that converts `A` to `C` - pub fn map(self, fun: fn(A) -> C) -> Sv2Frame { - let serialized = self.serialized; - let header = self.header; - let payload = self.payload.map(fun); - Sv2Frame { - header, - payload, - serialized, +/// A wrapper to be used in a context we need a generic reference to a frame +/// but it doesn't matter which kind of frame it is (`Sv2Frame` or `HandShakeFrame`) +#[derive(Debug)] +pub enum EitherFrame { + HandShake(HandShakeFrame), + Sv2(Sv2Frame), +} + +impl + AsRef<[u8]>> EitherFrame { + pub fn encoded_length(&self) -> usize { + match &self { + Self::HandShake(frame) => frame.encoded_length(), + Self::Sv2(frame) => frame.encoded_length(), } } } +impl From for EitherFrame { + fn from(v: HandShakeFrame) -> Self { + Self::HandShake(v) + } +} + +impl From> for EitherFrame { + fn from(v: Sv2Frame) -> Self { + Self::Sv2(v) + } +} + /// Abstraction for a SV2 Frame. #[derive(Debug, Clone)] pub struct Sv2Frame { @@ -39,21 +53,6 @@ pub struct Sv2Frame { serialized: Option, } -/// Abstraction for a Noise Handshake Frame -/// Contains only a `Slice` payload with a fixed length -/// Only used during Noise Handshake process -#[derive(Debug)] -pub struct HandShakeFrame { - payload: Slice, -} - -impl HandShakeFrame { - /// Returns payload of `HandShakeFrame` as a `Vec` - pub fn get_payload_when_handshaking(&self) -> Vec { - self.payload[0..].to_vec() - } -} - impl + AsRef<[u8]>> Sv2Frame { /// Write the serialized `Sv2Frame` into `dst`. /// This operation when called on an already serialized frame is very cheap. @@ -182,6 +181,41 @@ impl + AsRef<[u8]>> Sv2Frame { } } +impl Sv2Frame { + /// Maps a `Sv2Frame` to `Sv2Frame` by applying `fun`, + /// which is assumed to be a closure that converts `A` to `C` + pub fn map(self, fun: fn(A) -> C) -> Sv2Frame { + let serialized = self.serialized; + let header = self.header; + let payload = self.payload.map(fun); + Sv2Frame { + header, + payload, + serialized, + } + } +} + +impl TryFrom> for Sv2Frame { + type Error = Error; + + fn try_from(v: EitherFrame) -> Result { + match v { + EitherFrame::Sv2(frame) => Ok(frame), + EitherFrame::HandShake(_) => Err(Error::ExpectedSv2Frame), + } + } +} + + +/// Abstraction for a Noise Handshake Frame +/// Contains only a `Slice` payload with a fixed length +/// Only used during Noise Handshake process +#[derive(Debug)] +pub struct HandShakeFrame { + payload: Slice, +} + impl HandShakeFrame { /// Put the Noise Frame payload into `dst` #[inline] @@ -196,6 +230,11 @@ impl HandShakeFrame { &mut self.payload[NOISE_HEADER_SIZE..] } + /// Returns payload of `HandShakeFrame` as a `Vec` + pub fn get_payload_when_handshaking(&self) -> Vec { + self.payload[0..].to_vec() + } + /// `HandShakeFrame` always returns `None`. fn get_header(&self) -> Option { None @@ -261,6 +300,18 @@ impl HandShakeFrame { } } +impl TryFrom> for HandShakeFrame { + type Error = Error; + + fn try_from(v: EitherFrame) -> Result { + match v { + EitherFrame::HandShake(frame) => Ok(frame), + EitherFrame::Sv2(_) => Err(Error::ExpectedHandshakeFrame), + } + } +} + + /// Returns a `HandShakeFrame` from a generic byte array #[allow(clippy::useless_conversion)] pub fn handshake_message_to_frame>(message: T) -> HandShakeFrame { @@ -285,57 +336,6 @@ fn update_extension_type(extension_type: u16, channel_msg: bool) -> u16 { } } -/// A wrapper to be used in a context we need a generic reference to a frame -/// but it doesn't matter which kind of frame it is (`Sv2Frame` or `HandShakeFrame`) -#[derive(Debug)] -pub enum EitherFrame { - HandShake(HandShakeFrame), - Sv2(Sv2Frame), -} - -impl + AsRef<[u8]>> EitherFrame { - pub fn encoded_length(&self) -> usize { - match &self { - Self::HandShake(frame) => frame.encoded_length(), - Self::Sv2(frame) => frame.encoded_length(), - } - } -} - -impl TryFrom> for HandShakeFrame { - type Error = Error; - - fn try_from(v: EitherFrame) -> Result { - match v { - EitherFrame::HandShake(frame) => Ok(frame), - EitherFrame::Sv2(_) => Err(Error::ExpectedHandshakeFrame), - } - } -} - -impl TryFrom> for Sv2Frame { - type Error = Error; - - fn try_from(v: EitherFrame) -> Result { - match v { - EitherFrame::Sv2(frame) => Ok(frame), - EitherFrame::HandShake(_) => Err(Error::ExpectedSv2Frame), - } - } -} - -impl From for EitherFrame { - fn from(v: HandShakeFrame) -> Self { - Self::HandShake(v) - } -} - -impl From> for EitherFrame { - fn from(v: Sv2Frame) -> Self { - Self::Sv2(v) - } -} - #[cfg(test)] use binary_sv2::binary_codec_sv2; From 4368dbd1d6a813a631301cdbfc1132bf5fb720ce Mon Sep 17 00:00:00 2001 From: jbesraa Date: Wed, 19 Jun 2024 12:26:32 +0300 Subject: [PATCH 046/101] Remove unused functions from `HandShakeFrame` --- protocols/v2/framing-sv2/src/framing.rs | 69 +------------------------ 1 file changed, 1 insertion(+), 68 deletions(-) diff --git a/protocols/v2/framing-sv2/src/framing.rs b/protocols/v2/framing-sv2/src/framing.rs index 23f3c18d0..3ce948594 100644 --- a/protocols/v2/framing-sv2/src/framing.rs +++ b/protocols/v2/framing-sv2/src/framing.rs @@ -1,14 +1,8 @@ -#![allow(dead_code)] -use crate::{ - header::{Header, NOISE_HEADER_LEN_OFFSET, NOISE_HEADER_SIZE}, - Error, -}; +use crate::{header::Header, Error}; use alloc::vec::Vec; use binary_sv2::{to_writer, GetSize, Serialize}; use core::convert::TryFrom; -const NOISE_MAX_LEN: usize = const_sv2::NOISE_FRAME_MAX_SIZE; - #[cfg(not(feature = "with_buffer_pool"))] type Slice = Vec; @@ -217,29 +211,11 @@ pub struct HandShakeFrame { } impl HandShakeFrame { - /// Put the Noise Frame payload into `dst` - #[inline] - fn serialize(mut self, dst: &mut [u8]) -> Result<(), Error> { - dst.swap_with_slice(self.payload.as_mut()); - Ok(()) - } - - /// Get the Noise Frame payload - #[inline] - fn payload(&mut self) -> &mut [u8] { - &mut self.payload[NOISE_HEADER_SIZE..] - } - /// Returns payload of `HandShakeFrame` as a `Vec` pub fn get_payload_when_handshaking(&self) -> Vec { self.payload[0..].to_vec() } - /// `HandShakeFrame` always returns `None`. - fn get_header(&self) -> Option { - None - } - /// Builds a `HandShakeFrame` from raw bytes. Nothing is assumed or checked about the correctness of the payload. pub fn from_bytes(bytes: Slice) -> Result { Ok(Self::from_bytes_unchecked(bytes)) @@ -250,54 +226,11 @@ impl HandShakeFrame { Self { payload: bytes } } - /// After parsing the expected `HandShakeFrame` size from `bytes`, this function helps to determine if this value - /// correctly representing the size of the frame. - /// - Returns `0` if the byte slice is of the expected size according to the header. - /// - Returns a negative value if the byte slice is smaller than a Noise Frame header; this value - /// represents how many bytes are missing. - /// - Returns a positive value if the byte slice is longer than expected; this value - /// indicates the surplus of bytes beyond the expected size. - #[inline] - fn size_hint(bytes: &[u8]) -> isize { - if bytes.len() < NOISE_HEADER_SIZE { - return (NOISE_HEADER_SIZE - bytes.len()) as isize; - }; - - let len_b = &bytes[NOISE_HEADER_LEN_OFFSET..NOISE_HEADER_SIZE]; - let expected_len = u16::from_le_bytes([len_b[0], len_b[1]]) as usize; - - if bytes.len() - NOISE_HEADER_SIZE == expected_len { - 0 - } else { - expected_len as isize - (bytes.len() - NOISE_HEADER_SIZE) as isize - } - } - /// Returns the size of the `HandShakeFrame` payload. #[inline] fn encoded_length(&self) -> usize { self.payload.len() } - - /// Tries to build a `HandShakeFrame` frame from a byte slice. - /// Returns a `HandShakeFrame` if the size of the payload fits in the frame, `None` otherwise. - /// This is quite inefficient, and should be used only to build `HandShakeFrames` - // TODO check if is used only to build `HandShakeFrames` - #[allow(clippy::useless_conversion)] - fn from_message( - message: Slice, - _message_type: u8, - _extension_type: u16, - _channel_msg: bool, - ) -> Option { - if message.len() <= NOISE_MAX_LEN { - Some(Self { - payload: message.into(), - }) - } else { - None - } - } } impl TryFrom> for HandShakeFrame { From 5a2b01b7c0e9f4872d2dccaf30531d2b6355380f Mon Sep 17 00:00:00 2001 From: jbesraa Date: Wed, 19 Jun 2024 12:35:08 +0300 Subject: [PATCH 047/101] Rename `framing::EitherFrame` to `::Frame` --- protocols/v2/codec-sv2/src/decoder.rs | 13 +++++-------- protocols/v2/codec-sv2/src/encoder.rs | 4 ++-- protocols/v2/framing-sv2/src/framing.rs | 26 ++++++++++++------------- 3 files changed, 19 insertions(+), 24 deletions(-) diff --git a/protocols/v2/codec-sv2/src/decoder.rs b/protocols/v2/codec-sv2/src/decoder.rs index 9f5f8e89a..4d1440018 100644 --- a/protocols/v2/codec-sv2/src/decoder.rs +++ b/protocols/v2/codec-sv2/src/decoder.rs @@ -12,7 +12,7 @@ use framing_sv2::framing::HandShakeFrame; #[cfg(feature = "noise_sv2")] use framing_sv2::header::{NOISE_HEADER_ENCRYPTED_SIZE, NOISE_HEADER_SIZE}; use framing_sv2::{ - framing::{EitherFrame, Sv2Frame}, + framing::{Frame, Sv2Frame}, header::Header, }; #[cfg(feature = "noise_sv2")] @@ -36,7 +36,7 @@ use crate::State; #[cfg(feature = "noise_sv2")] pub type StandardNoiseDecoder = WithNoise; -pub type StandardEitherFrame = EitherFrame::Slice>; +pub type StandardEitherFrame = Frame::Slice>; pub type StandardSv2Frame = Sv2Frame::Slice>; pub type StandardDecoder = WithoutNoise; @@ -51,7 +51,7 @@ pub struct WithNoise { #[cfg(feature = "noise_sv2")] impl<'a, T: Serialize + GetSize + Deserialize<'a>, B: IsBuffer + AeadBuffer> WithNoise { #[inline] - pub fn next_frame(&mut self, state: &mut State) -> Result> { + pub fn next_frame(&mut self, state: &mut State) -> Result> { match state { State::HandShake(_) => unreachable!(), State::NotInitialized(msg_len) => { @@ -97,10 +97,7 @@ impl<'a, T: Serialize + GetSize + Deserialize<'a>, B: IsBuffer + AeadBuffer> Wit } #[inline] - fn decode_noise_frame( - &mut self, - noise_codec: &mut NoiseCodec, - ) -> Result> { + fn decode_noise_frame(&mut self, noise_codec: &mut NoiseCodec) -> Result> { match ( IsBuffer::len(&self.noise_buffer), IsBuffer::len(&self.sv2_buffer), @@ -148,7 +145,7 @@ impl<'a, T: Serialize + GetSize + Deserialize<'a>, B: IsBuffer + AeadBuffer> Wit } } - fn while_handshaking(&mut self) -> EitherFrame { + fn while_handshaking(&mut self) -> Frame { let src = self.noise_buffer.get_data_owned().as_mut().to_vec(); // below is inffalible as noise frame length has been already checked diff --git a/protocols/v2/codec-sv2/src/encoder.rs b/protocols/v2/codec-sv2/src/encoder.rs index 39d10f00b..21618fda5 100644 --- a/protocols/v2/codec-sv2/src/encoder.rs +++ b/protocols/v2/codec-sv2/src/encoder.rs @@ -7,7 +7,7 @@ use core::convert::TryInto; use core::marker::PhantomData; use framing_sv2::framing::Sv2Frame; #[cfg(feature = "noise_sv2")] -use framing_sv2::framing::{EitherFrame, HandShakeFrame}; +use framing_sv2::framing::{Frame, HandShakeFrame}; #[allow(unused_imports)] pub use framing_sv2::header::NOISE_HEADER_ENCRYPTED_SIZE; @@ -43,7 +43,7 @@ pub struct NoiseEncoder { } #[cfg(feature = "noise_sv2")] -type Item = EitherFrame; +type Item = Frame; #[cfg(feature = "noise_sv2")] impl NoiseEncoder { diff --git a/protocols/v2/framing-sv2/src/framing.rs b/protocols/v2/framing-sv2/src/framing.rs index 3ce948594..616d53354 100644 --- a/protocols/v2/framing-sv2/src/framing.rs +++ b/protocols/v2/framing-sv2/src/framing.rs @@ -12,12 +12,12 @@ type Slice = buffer_sv2::Slice; /// A wrapper to be used in a context we need a generic reference to a frame /// but it doesn't matter which kind of frame it is (`Sv2Frame` or `HandShakeFrame`) #[derive(Debug)] -pub enum EitherFrame { +pub enum Frame { HandShake(HandShakeFrame), Sv2(Sv2Frame), } -impl + AsRef<[u8]>> EitherFrame { +impl + AsRef<[u8]>> Frame { pub fn encoded_length(&self) -> usize { match &self { Self::HandShake(frame) => frame.encoded_length(), @@ -26,13 +26,13 @@ impl + AsRef<[u8]>> EitherFrame { } } -impl From for EitherFrame { +impl From for Frame { fn from(v: HandShakeFrame) -> Self { Self::HandShake(v) } } -impl From> for EitherFrame { +impl From> for Frame { fn from(v: Sv2Frame) -> Self { Self::Sv2(v) } @@ -190,18 +190,17 @@ impl Sv2Frame { } } -impl TryFrom> for Sv2Frame { +impl TryFrom> for Sv2Frame { type Error = Error; - fn try_from(v: EitherFrame) -> Result { + fn try_from(v: Frame) -> Result { match v { - EitherFrame::Sv2(frame) => Ok(frame), - EitherFrame::HandShake(_) => Err(Error::ExpectedSv2Frame), + Frame::Sv2(frame) => Ok(frame), + Frame::HandShake(_) => Err(Error::ExpectedSv2Frame), } } } - /// Abstraction for a Noise Handshake Frame /// Contains only a `Slice` payload with a fixed length /// Only used during Noise Handshake process @@ -233,18 +232,17 @@ impl HandShakeFrame { } } -impl TryFrom> for HandShakeFrame { +impl TryFrom> for HandShakeFrame { type Error = Error; - fn try_from(v: EitherFrame) -> Result { + fn try_from(v: Frame) -> Result { match v { - EitherFrame::HandShake(frame) => Ok(frame), - EitherFrame::Sv2(_) => Err(Error::ExpectedHandshakeFrame), + Frame::HandShake(frame) => Ok(frame), + Frame::Sv2(_) => Err(Error::ExpectedHandshakeFrame), } } } - /// Returns a `HandShakeFrame` from a generic byte array #[allow(clippy::useless_conversion)] pub fn handshake_message_to_frame>(message: T) -> HandShakeFrame { From e9d3a9fbac23de1680a46a8d7b465eaef5ed3d7f Mon Sep 17 00:00:00 2001 From: esraa Date: Sun, 23 Jun 2024 13:16:42 +0300 Subject: [PATCH 048/101] Increase `test_diff_management` test total runtime Its currently producing too much false alarms which can block new pull requests as well as make it harder to find issues in new pull requests. --- roles/translator/src/lib/downstream_sv1/diff_management.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/roles/translator/src/lib/downstream_sv1/diff_management.rs b/roles/translator/src/lib/downstream_sv1/diff_management.rs index 378134560..d4a7a47fb 100644 --- a/roles/translator/src/lib/downstream_sv1/diff_management.rs +++ b/roles/translator/src/lib/downstream_sv1/diff_management.rs @@ -320,7 +320,7 @@ mod test { #[test] fn test_diff_management() { let expected_shares_per_minute = 1000.0; - let total_run_time = std::time::Duration::from_secs(11); + let total_run_time = std::time::Duration::from_secs(30); let initial_nominal_hashrate = measure_hashrate(5); let target = match roles_logic_sv2::utils::hash_rate_to_target( initial_nominal_hashrate, @@ -342,13 +342,13 @@ mod test { } let calculated_share_per_min = count as f32 / (elapsed.as_secs_f32() / 60.0); - // This is the error margin for a confidence of 99% given the expect number of shares per + // This is the error margin for a confidence of 99.99...% given the expect number of shares per // minute TODO the review the math under it let error_margin = get_error(expected_shares_per_minute); let error = (calculated_share_per_min - expected_shares_per_minute as f32).abs(); assert!( error <= error_margin as f32, - "Calculated shares per minute are outside the 99% confidence interval. Error: {:?}, Error margin: {:?}, {:?}", error, error_margin,calculated_share_per_min + "Calculated shares per minute are outside the 99.99...% confidence interval. Error: {:?}, Error margin: {:?}, {:?}", error, error_margin,calculated_share_per_min ); } From 2383633e7580450f3f532783488da9cea8b9e676 Mon Sep 17 00:00:00 2001 From: jbesraa Date: Wed, 19 Jun 2024 11:05:51 +0300 Subject: [PATCH 049/101] Add MSRV 1.75 Github workflow check --- .github/workflows/rust-msrv.yaml | 37 ++++++++++++++++++++++++++++++++ 1 file changed, 37 insertions(+) create mode 100644 .github/workflows/rust-msrv.yaml diff --git a/.github/workflows/rust-msrv.yaml b/.github/workflows/rust-msrv.yaml new file mode 100644 index 000000000..3384e4a2f --- /dev/null +++ b/.github/workflows/rust-msrv.yaml @@ -0,0 +1,37 @@ +on: + push: + branches: + - main + - dev + pull_request: + branches: + - main + - dev + +name: MSRV 1.75 Check + +jobs: + + build: + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + rust: + - 1.75.0 # MSRV + + steps: + - uses: actions/checkout@v2 + - uses: Swatinem/rust-cache@v1.2.0 + - uses: actions-rs/toolchain@v1 + with: + toolchain: ${{ matrix.rust }} + override: true + - name: Build Benches + run: cargo build --manifest-path=benches/Cargo.toml + - name: Build Protocols + run: cargo build --manifest-path=protocols/Cargo.toml + - name: Build Roles + run: cargo build --manifest-path=roles/Cargo.toml + - name: Build Utils + run: cargo build --manifest-path=utils/Cargo.toml From 9db50a3e5544d788935e8b55e6d71c2c84feb6a1 Mon Sep 17 00:00:00 2001 From: jbesraa Date: Wed, 19 Jun 2024 11:49:35 +0300 Subject: [PATCH 050/101] Remove rust version pinning from `.toml` files --- protocols/Cargo.toml | 1 - roles/Cargo.toml | 1 - rust-toolchain.toml | 4 ---- utils/Cargo.toml | 1 - 4 files changed, 7 deletions(-) delete mode 100644 rust-toolchain.toml diff --git a/protocols/Cargo.toml b/protocols/Cargo.toml index 90d917fdd..d1f21ca36 100644 --- a/protocols/Cargo.toml +++ b/protocols/Cargo.toml @@ -2,7 +2,6 @@ name = "stratum_v2_protocols" version = "1.0.0" authors = ["The Stratum v2 Developers"] edition = "2021" -rust-version = "1.75.0" description = "The Stratum protocol defines how miners, proxies, and pools communicate to contribute hashrate to the Bitcoin network. Stratum v2 is a robust set of primitives which anyone can use to expand the protocol or implement a role." documentation = "https://github.com/stratum-mining/stratum" readme = "README.md" diff --git a/roles/Cargo.toml b/roles/Cargo.toml index 2155409ba..109cfd0ee 100644 --- a/roles/Cargo.toml +++ b/roles/Cargo.toml @@ -2,7 +2,6 @@ name = "stratum_v2_roles" version = "0.1.0" authors = ["The Stratum v2 Developers"] edition = "2021" -rust-version = "1.75.0" description = "The Stratum protocol defines how miners, proxies, and pools communicate to contribute hashrate to the Bitcoin network. Stratum v2 is a robust set of primitives which anyone can use to expand the protocol or implement a role." documentation = "https://github.com/stratum-mining/stratum" readme = "README.md" diff --git a/rust-toolchain.toml b/rust-toolchain.toml deleted file mode 100644 index ecc22580b..000000000 --- a/rust-toolchain.toml +++ /dev/null @@ -1,4 +0,0 @@ -[toolchain] -channel = "1.75.0" -components = [ "rustfmt", "clippy" ] -profile = "minimal" diff --git a/utils/Cargo.toml b/utils/Cargo.toml index 194360857..aa22ee1ae 100644 --- a/utils/Cargo.toml +++ b/utils/Cargo.toml @@ -2,7 +2,6 @@ name = "stratum_v2_utils" version = "1.0.0" authors = ["The Stratum v2 Developers"] edition = "2021" -rust-version = "1.75.0" description = "The Stratum protocol defines how miners, proxies, and pools communicate to contribute hashrate to the Bitcoin network. Stratum v2 is a robust set of primitives which anyone can use to expand the protocol or implement a role." documentation = "https://github.com/stratum-mining/stratum" readme = "README.md" From 92a6fbf6bda5b95285ff533d347b495197e4f7bf Mon Sep 17 00:00:00 2001 From: plebhash Date: Wed, 3 Jul 2024 12:02:17 -0300 Subject: [PATCH 051/101] add ActionResult::SustainConnection --- utils/message-generator/src/executor.rs | 114 +++++++++++++----- utils/message-generator/src/main.rs | 2 + utils/message-generator/src/parser/actions.rs | 1 + 3 files changed, 89 insertions(+), 28 deletions(-) diff --git a/utils/message-generator/src/executor.rs b/utils/message-generator/src/executor.rs index f31991eca..22843e03a 100644 --- a/utils/message-generator/src/executor.rs +++ b/utils/message-generator/src/executor.rs @@ -199,35 +199,21 @@ impl Executor { result ); - // If the connection should drop at this point then let's just break the loop - // Can't do anything else after the connection drops. - if *result == ActionResult::CloseConnection { - info!( - "Waiting 1 sec to make sure that remote have time to close the connection" - ); - tokio::time::sleep(std::time::Duration::from_millis(1000)).await; - recv.recv() - .await - .expect_err("Expecting the connection to be closed: wasn't"); - success = true; - break; - } - - let message = match recv.recv().await { - Ok(message) => message, - Err(_) => { - success = false; - error!("Connection closed before receiving the message"); - break; - } - }; - - let mut message: Sv2Frame, _> = message.try_into().unwrap(); - debug!("RECV {:#?}", message); - let header = message.get_header().unwrap(); - let payload = message.payload(); match result { ActionResult::MatchMessageType(message_type) => { + let message = match recv.recv().await { + Ok(message) => message, + Err(_) => { + success = false; + error!("Connection closed before receiving the message"); + break; + } + }; + + let message: Sv2Frame, _> = message.try_into().unwrap(); + debug!("RECV {:#?}", message); + let header = message.get_header().unwrap(); + if header.msg_type() != *message_type { error!( "WRONG MESSAGE TYPE expected: {} received: {}", @@ -245,6 +231,20 @@ impl Executor { message_type, field_data, // Vec<(String, Sv2Type)> )) => { + let message = match recv.recv().await { + Ok(message) => message, + Err(_) => { + success = false; + error!("Connection closed before receiving the message"); + break; + } + }; + + let mut message: Sv2Frame, _> = + message.try_into().unwrap(); + debug!("RECV {:#?}", message); + let header = message.get_header().unwrap(); + let payload = message.payload(); if subprotocol.as_str() == "CommonMessages" { match (header.msg_type(), payload).try_into() { Ok(roles_logic_sv2::parsers::CommonMessages::SetupConnection(m)) => { @@ -532,6 +532,20 @@ impl Executor { message_type: _, fields, } => { + let message = match recv.recv().await { + Ok(message) => message, + Err(_) => { + success = false; + error!("Connection closed before receiving the message"); + break; + } + }; + + let mut message: Sv2Frame, _> = + message.try_into().unwrap(); + debug!("RECV {:#?}", message); + let header = message.get_header().unwrap(); + let payload = message.payload(); if subprotocol.as_str() == "CommonMessages" { match (header.msg_type(), payload).try_into() { Ok(parsers::CommonMessages::SetupConnection(m)) => { @@ -730,6 +744,19 @@ impl Executor { }; } ActionResult::MatchMessageLen(message_len) => { + let message = match recv.recv().await { + Ok(message) => message, + Err(_) => { + success = false; + error!("Connection closed before receiving the message"); + break; + } + }; + + let mut message: Sv2Frame, _> = + message.try_into().unwrap(); + debug!("RECV {:#?}", message); + let payload = message.payload(); if payload.len() != *message_len { error!( "WRONG MESSAGE len expected: {} received: {}", @@ -741,6 +768,18 @@ impl Executor { } } ActionResult::MatchExtensionType(ext_type) => { + let message = match recv.recv().await { + Ok(message) => message, + Err(_) => { + success = false; + error!("Connection closed before receiving the message"); + break; + } + }; + + let message: Sv2Frame, _> = message.try_into().unwrap(); + debug!("RECV {:#?}", message); + let header = message.get_header().unwrap(); if header.ext_type() != *ext_type { error!( "WRONG EXTENSION TYPE expected: {} received: {}", @@ -752,7 +791,26 @@ impl Executor { } } ActionResult::CloseConnection => { - todo!() + info!( + "Waiting 1 sec to make sure that remote has time to close the connection" + ); + tokio::time::sleep(std::time::Duration::from_secs(1)).await; + if !recv.is_closed() { + error!("Expected connection to close, but it didn't. Test failed."); + success = false; + break; + } + } + ActionResult::SustainConnection => { + info!( + "Waiting 1 sec to make sure that remote has time to close the connection" + ); + tokio::time::sleep(std::time::Duration::from_secs(1)).await; + if recv.is_closed() { + error!("Expected connection to sustain, but it didn't. Test failed."); + success = false; + break; + } } ActionResult::None => todo!(), } diff --git a/utils/message-generator/src/main.rs b/utils/message-generator/src/main.rs index e2633ed23..327d50cbc 100644 --- a/utils/message-generator/src/main.rs +++ b/utils/message-generator/src/main.rs @@ -191,6 +191,7 @@ enum ActionResult { MatchMessageLen(usize), MatchExtensionType(u16), CloseConnection, + SustainConnection, None, } @@ -225,6 +226,7 @@ impl std::fmt::Display for ActionResult { write!(f, "MatchExtensionType: {}", extension_type) } ActionResult::CloseConnection => write!(f, "Close connection"), + ActionResult::SustainConnection => write!(f, "Sustain connection"), ActionResult::GetMessageField { subprotocol, fields, diff --git a/utils/message-generator/src/parser/actions.rs b/utils/message-generator/src/parser/actions.rs index ce84c7adf..23bf6188b 100644 --- a/utils/message-generator/src/parser/actions.rs +++ b/utils/message-generator/src/parser/actions.rs @@ -91,6 +91,7 @@ impl Sv2ActionParser { "close_connection" => { action_results.push(ActionResult::CloseConnection); } + "sustain_connection" => action_results.push(ActionResult::SustainConnection), "none" => { action_results.push(ActionResult::None); } From 6f80d045812aa14d1ce04076abe2fa0d5e7f4d36 Mon Sep 17 00:00:00 2001 From: plebhash Date: Tue, 16 Jul 2024 09:56:26 -0300 Subject: [PATCH 052/101] split MG CI jobs and separate them from Tarpaulin this is a paliative solution to #1028 we make MG CI slightly less worse by breaking tests into multiple jobs and allowing us to re-run false alarms in a more agile way MG Tests are also separated from Tarpaulin --- .github/workflows/coverage.yaml | 119 +----------------------- .github/workflows/mg.yaml | 160 ++++++++++++++++++++++++++++++++ 2 files changed, 161 insertions(+), 118 deletions(-) create mode 100644 .github/workflows/mg.yaml diff --git a/.github/workflows/coverage.yaml b/.github/workflows/coverage.yaml index 21d56abe4..5c20325c1 100644 --- a/.github/workflows/coverage.yaml +++ b/.github/workflows/coverage.yaml @@ -1,5 +1,4 @@ -# Performs test coverage of project's libraries using cargo-tarpaulin and the message-generator, -# and generates results using codecov.io. +# Performs test coverage of project's libraries using cargo-tarpaulin and generates results using codecov.io. # The following flags are set inside `tarpaulin.toml`: # `features = "..."`: Includes the code with the listed features. The following features result in a # tarpaulin error and are NOT included: derive, alloc, arbitrary-derive, attributes, and @@ -47,119 +46,3 @@ jobs: protocols/cobertura.xml roles/cobertura.xml utils/cobertura.xml - - message-generator-test: - needs: tarpaulin-test - - name: MG Test - runs-on: ubuntu-latest - - steps: - - name: Checkout repository - uses: actions/checkout@v4 - - - uses: actions-rs/toolchain@v1 - with: - toolchain: 1.75.0 - override: true - components: llvm-tools-preview - - - name: Log data from rustc - run: rustc -Vv - - - name: Install cargo-llvm-cov - uses: taiki-e/install-action@cargo-llvm-cov - - - name: Run bad-pool-config-test - run: sh ./test/message-generator/test/bad-pool-config-test/bad-pool-config-test.sh - - - name: Run interop-jd-translator - run: sh ./test/message-generator/test/interop-jd-translator/interop-jd-translator.sh - - #- name: Run interop-jdc-change-upstream - # run: sh ./test/message-generator/test/interop-jdc-change-upstream/interop-jdc-change-upstream.sh - - - name: Run interop-proxy-with-multi-ups - run: sh ./test/message-generator/test/interop-proxy-with-multi-ups/interop-proxy-with-multi-ups.sh - - - name: Run interop-proxy-with-multi-ups-extended - run: sh ./test/message-generator/test/interop-proxy-with-multi-ups-extended/interop-proxy-with-multi-ups-extended.sh - - - name: Run jds-do-not-fail-on-wrong-tsdatasucc - run: sh ./test/message-generator/test/jds-do-not-fail-on-wrong-tsdatasucc/jds-do-not-fail-on-wrong-tsdatasucc.sh - - - name: Run jds-do-not-panic-if-jdc-close-connection - run: sh ./test/message-generator/test/jds-do-not-panic-if-jdc-close-connection/jds-do-not-panic-if-jdc-close-connection.sh - - - name: Run jds-do-not-stackoverflow-when-no-token - run: sh ./test/message-generator/test/jds-do-not-stackoverflow-when-no-token/jds-do-not-stackoverflow-when-no-token.sh - - - name: Run pool-sri-test-1-standard - run: sh ./test/message-generator/test/pool-sri-test-1-standard/pool-sri-test-1-standard.sh - - - name: Run pool-sri-test-close-channel - run: sh ./test/message-generator/test/pool-sri-test-close-channel/pool-sri-test-close-channel.sh - - - name: Run pool-sri-test-extended_0 - run: sh ./test/message-generator/test/pool-sri-test-extended_0/pool-sri-test-extended_0.sh - - - name: Run pool-sri-test-extended_1 - run: sh ./test/message-generator/test/pool-sri-test-extended_1/pool-sri-test-extended_1.sh - - - name: Run pool-sri-test-reject-auth - run: sh ./test/message-generator/test/pool-sri-test-reject-auth/pool-sri-test-reject-auth.sh - - - name: Run standard-coverage - run: sh ./test/message-generator/test/standard-coverage-test/standard-coverage-test.sh - - - name: Run sv1-test - run: sh ./test/message-generator/test/sv1-test/sv1-test.sh - - - name: Run translation-proxy-broke-pool - run: sh ./test/message-generator/test/translation-proxy-broke-pool/translation-proxy-broke-pool.sh - - - name: Run translation-proxy - run: sh ./test/message-generator/test/translation-proxy/translation-proxy.sh - - - name: Run translation-proxy-old-share - run: sh ./test/message-generator/test/translation-proxy-old-share/translation-proxy-old-share.sh - - - name: Coverage report - run: sh ./scripts/code-coverage-report.sh - - - name: Archive MG code coverage results - uses: actions/upload-artifact@v4 - with: - name: coverage-report - path: 'target/*.xml' - - - name: Archive log files - if: always() - uses: actions/upload-artifact@v4 - with: - name: logs - path: './utils/message-generator/*.log' - - # codecov: - # needs: message-generator-test - - # name: Codecov Upload - # runs-on: ubuntu-latest - - # steps: - - # - name: Checkout repository - # uses: actions/checkout@v4 - - # - name: Download all workflow run artifacts - # uses: actions/download-artifact@v4 - - # - name: Display structure of downloaded files - # run: ls -R - - # - name: Upload to codecov.io - # uses: codecov/codecov-action@v3 - # with: - # files: coverage-report/*.xml, tarpaulin-report/*.xml - # fail_ci_if_error: true - # token: ${{ secrets.CODECOV_TOKEN }} diff --git a/.github/workflows/mg.yaml b/.github/workflows/mg.yaml new file mode 100644 index 000000000..7a98ff242 --- /dev/null +++ b/.github/workflows/mg.yaml @@ -0,0 +1,160 @@ +# Runs all Message Generator tests in separate jobs + +name: MG Test + +on: + push: + branches: [ main, dev ] + pull_request: + branches: [ main, dev ] + +jobs: + bad-pool-config-test: + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v4 + - name: Install cargo-llvm-cov + run: cargo install cargo-llvm-cov + - name: Run bad-pool-config-test + run: sh ./test/message-generator/test/bad-pool-config-test/bad-pool-config-test.sh + + interop-jd-translator: + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v4 + - name: Run interop-jd-translator + run: sh ./test/message-generator/test/interop-jd-translator/interop-jd-translator.sh + + interop-proxy-with-multi-ups: + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v4 + - name: Run interop-proxy-with-multi-ups + run: sh ./test/message-generator/test/interop-proxy-with-multi-ups/interop-proxy-with-multi-ups.sh + + interop-proxy-with-multi-ups-extended: + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v4 + - name: Run interop-proxy-with-multi-ups-extended + run: sh ./test/message-generator/test/interop-proxy-with-multi-ups-extended/interop-proxy-with-multi-ups-extended.sh + + jds-do-not-fail-on-wrong-tsdatasucc: + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v4 + - name: Run jds-do-not-fail-on-wrong-tsdatasucc + run: sh ./test/message-generator/test/jds-do-not-fail-on-wrong-tsdatasucc/jds-do-not-fail-on-wrong-tsdatasucc.sh + + jds-do-not-panic-if-jdc-close-connection: + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v4 + - name: Run jds-do-not-panic-if-jdc-close-connection + run: sh ./test/message-generator/test/jds-do-not-panic-if-jdc-close-connection/jds-do-not-panic-if-jdc-close-connection.sh + + jds-do-not-stackoverflow-when-no-token: + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v4 + - name: Run jds-do-not-stackoverflow-when-no-token + run: sh ./test/message-generator/test/jds-do-not-stackoverflow-when-no-token/jds-do-not-stackoverflow-when-no-token.sh + + pool-sri-test-1-standard: + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v4 + - name: Install cargo-llvm-cov + run: cargo install cargo-llvm-cov + - name: Run pool-sri-test-1-standard + run: sh ./test/message-generator/test/pool-sri-test-1-standard/pool-sri-test-1-standard.sh + + pool-sri-test-close-channel: + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v4 + - name: Run pool-sri-test-close-channel + run: sh ./test/message-generator/test/pool-sri-test-close-channel/pool-sri-test-close-channel.sh + + pool-sri-test-extended_0: + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v4 + - name: Install cargo-llvm-cov + run: cargo install cargo-llvm-cov + - name: Run pool-sri-test-extended_0 + run: sh ./test/message-generator/test/pool-sri-test-extended_0/pool-sri-test-extended_0.sh + + pool-sri-test-extended_1: + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v4 + - name: Install cargo-llvm-cov + run: cargo install cargo-llvm-cov + - name: Run pool-sri-test-extended_1 + run: sh ./test/message-generator/test/pool-sri-test-extended_1/pool-sri-test-extended_1.sh + + pool-sri-test-reject-auth: + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v4 + - name: Install cargo-llvm-cov + run: cargo install cargo-llvm-cov + - name: Run pool-sri-test-reject-auth + run: sh ./test/message-generator/test/pool-sri-test-reject-auth/pool-sri-test-reject-auth.sh + + standard-coverage: + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v4 + - name: Install cargo-llvm-cov + run: cargo install cargo-llvm-cov + - name: Run standard-coverage + run: sh ./test/message-generator/test/standard-coverage-test/standard-coverage-test.sh + + sv1-test: + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v4 + - name: Run sv1-test + run: sh ./test/message-generator/test/sv1-test/sv1-test.sh + + translation-proxy-broke-pool: + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v4 + - name: Run translation-proxy-broke-pool + run: sh ./test/message-generator/test/translation-proxy-broke-pool/translation-proxy-broke-pool.sh + + translation-proxy: + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v4 + - name: Install cargo-llvm-cov + run: cargo install cargo-llvm-cov + - name: Run translation-proxy + run: sh ./test/message-generator/test/translation-proxy/translation-proxy.sh + + translation-proxy-old-share: + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v4 + - name: Run translation-proxy-old-share + run: sh ./test/message-generator/test/translation-proxy-old-share/translation-proxy-old-share.sh \ No newline at end of file From 235993d7f15b73390677e7d55651fcdba0f183c1 Mon Sep 17 00:00:00 2001 From: plebhash Date: Wed, 17 Jul 2024 11:30:02 -0300 Subject: [PATCH 053/101] add mg-aggregate-results Github only allows branch protection rules based on specific CI jobs. So we are adding a new job that just takes all the other MG test jobs as a prerequisite, and we use this as an umbrella rule for branch protection. --- .github/workflows/mg.yaml | 27 ++++++++++++++++++++++++++- 1 file changed, 26 insertions(+), 1 deletion(-) diff --git a/.github/workflows/mg.yaml b/.github/workflows/mg.yaml index 7a98ff242..ed41fb9de 100644 --- a/.github/workflows/mg.yaml +++ b/.github/workflows/mg.yaml @@ -157,4 +157,29 @@ jobs: - name: Checkout repository uses: actions/checkout@v4 - name: Run translation-proxy-old-share - run: sh ./test/message-generator/test/translation-proxy-old-share/translation-proxy-old-share.sh \ No newline at end of file + run: sh ./test/message-generator/test/translation-proxy-old-share/translation-proxy-old-share.sh + + mg-aggregate-results: + name: "Aggregate MG Test Results" + runs-on: ubuntu-latest + needs: + - bad-pool-config-test + - interop-jd-translator + - interop-proxy-with-multi-ups + - interop-proxy-with-multi-ups-extended + - jds-do-not-fail-on-wrong-tsdatasucc + - jds-do-not-panic-if-jdc-close-connection + - jds-do-not-stackoverflow-when-no-token + - pool-sri-test-1-standard + - pool-sri-test-close-channel + - pool-sri-test-extended_0 + - pool-sri-test-extended_1 + - pool-sri-test-reject-auth + - standard-coverage + - sv1-test + - translation-proxy-broke-pool + - translation-proxy + - translation-proxy-old-share + steps: + - name: Aggregate MG Test Results + run: echo "All MG tests completed successfully" \ No newline at end of file From 9d62c72e3c147b68d76e87acc74de15782ab8396 Mon Sep 17 00:00:00 2001 From: plebhash Date: Wed, 17 Jul 2024 14:35:06 -0300 Subject: [PATCH 054/101] avoid skipping mg-aggregate-results on failed MG test jobs --- .github/workflows/mg.yaml | 66 ++++++++++++++++++++++++++------------- 1 file changed, 45 insertions(+), 21 deletions(-) diff --git a/.github/workflows/mg.yaml b/.github/workflows/mg.yaml index ed41fb9de..13aaef9e6 100644 --- a/.github/workflows/mg.yaml +++ b/.github/workflows/mg.yaml @@ -162,24 +162,48 @@ jobs: mg-aggregate-results: name: "Aggregate MG Test Results" runs-on: ubuntu-latest - needs: - - bad-pool-config-test - - interop-jd-translator - - interop-proxy-with-multi-ups - - interop-proxy-with-multi-ups-extended - - jds-do-not-fail-on-wrong-tsdatasucc - - jds-do-not-panic-if-jdc-close-connection - - jds-do-not-stackoverflow-when-no-token - - pool-sri-test-1-standard - - pool-sri-test-close-channel - - pool-sri-test-extended_0 - - pool-sri-test-extended_1 - - pool-sri-test-reject-auth - - standard-coverage - - sv1-test - - translation-proxy-broke-pool - - translation-proxy - - translation-proxy-old-share - steps: - - name: Aggregate MG Test Results - run: echo "All MG tests completed successfully" \ No newline at end of file + if: always() + needs: [ + bad-pool-config-test, + interop-jd-translator, + interop-proxy-with-multi-ups, + interop-proxy-with-multi-ups-extended, + jds-do-not-fail-on-wrong-tsdatasucc, + jds-do-not-panic-if-jdc-close-connection, + jds-do-not-stackoverflow-when-no-token, + pool-sri-test-1-standard, + pool-sri-test-close-channel, + pool-sri-test-extended_0, + pool-sri-test-extended_1, + pool-sri-test-reject-auth, + standard-coverage, + sv1-test, + translation-proxy-broke-pool, + translation-proxy, + translation-proxy-old-share + ] + steps: + - name: Aggregate Results + run: | + if [ "${{ needs.bad-pool-config-test.result }}" != "success" ] || + [ "${{ needs.interop-jd-translator.result }}" != "success" ] || + [ "${{ needs.interop-proxy-with-multi-ups.result }}" != "success" ] || + [ "${{ needs.interop-proxy-with-multi-ups-extended.result }}" != "success" ] || + [ "${{ needs.jds-do-not-fail-on-wrong-tsdatasucc.result }}" != "success" ] || + [ "${{ needs.jds-do-not-panic-if-jdc-close-connection.result }}" != "success" ] || + [ "${{ needs.jds-do-not-stackoverflow-when-no-token.result }}" != "success" ] || + [ "${{ needs.pool-sri-test-1-standard.result }}" != "success" ] || + [ "${{ needs.pool-sri-test-close-channel.result }}" != "success" ] || + [ "${{ needs.pool-sri-test-extended_0.result }}" != "success" ] || + [ "${{ needs.pool-sri-test-extended_1.result }}" != "success" ] || + [ "${{ needs.pool-sri-test-reject-auth.result }}" != "success" ] || + [ "${{ needs.standard-coverage.result }}" != "success" ] || + [ "${{ needs.sv1-test.result }}" != "success" ] || + [ "${{ needs.translation-proxy-broke-pool.result }}" != "success" ] || + [ "${{ needs.translation-proxy.result }}" != "success" ] || + [ "${{ needs.translation-proxy-old-share.result }}" != "success" ]; then + echo "One or more jobs failed." + exit 1 + else + echo "All MG tests completed successfully" + fi \ No newline at end of file From 5d569bbdb0f6d1dc7bc97a62abff9d6ded4b425d Mon Sep 17 00:00:00 2001 From: esraa Date: Fri, 21 Jun 2024 17:32:13 +0300 Subject: [PATCH 055/101] Add Job Declarator Client struct With the goal to write more tests, this commit moves JDC initialisation logic from `main.rs` to `lib.rs` so its easier to test and also separate the actual `lib` code from the binary. --- roles/jd-client/src/lib/mod.rs | 317 +++++++++++++++++++++++++++++++++ roles/jd-client/src/main.rs | 316 +------------------------------- 2 files changed, 322 insertions(+), 311 deletions(-) diff --git a/roles/jd-client/src/lib/mod.rs b/roles/jd-client/src/lib/mod.rs index 455e901cf..7e2fe7edc 100644 --- a/roles/jd-client/src/lib/mod.rs +++ b/roles/jd-client/src/lib/mod.rs @@ -8,6 +8,22 @@ pub mod upstream_sv2; use std::{sync::atomic::AtomicBool, time::Duration}; +use job_declarator::JobDeclarator; +use proxy_config::ProxyConfig; +use template_receiver::TemplateRx; + +use async_channel::{bounded, unbounded}; +use futures::{select, FutureExt}; +use roles_logic_sv2::utils::Mutex; +use std::{ + net::{IpAddr, SocketAddr}, + str::FromStr, + sync::Arc, +}; +use tokio::task::AbortHandle; + +use tracing::{error, info}; + /// Is used by the template receiver and the downstream. When a NewTemplate is received the context /// that is running the template receiver set this value to false and then the message is sent to /// the context that is running the Downstream that do something and then set it back to true. @@ -31,6 +47,307 @@ use std::{sync::atomic::AtomicBool, time::Duration}; /// between all the contexts is not necessary. pub static IS_NEW_TEMPLATE_HANDLED: AtomicBool = AtomicBool::new(true); +/// Job Declarator Client (or JDC) is the role which is Miner-side, in charge of creating new +/// mining jobs from the templates received by the Template Provider to which it is connected. It +/// declares custom jobs to the JDS, in order to start working on them. +/// JDC is also responsible for putting in action the Pool-fallback mechanism, automatically +/// switching to backup Pools in case of declared custom jobs refused by JDS (which is Pool side). +/// As a solution of last-resort, it is able to switch to Solo Mining until new safe Pools appear +/// in the market. +pub struct JobDeclaratorClient { + /// Configuration of the proxy server [`JobDeclaratorClient`] is connected to. + config: ProxyConfig, +} + +impl JobDeclaratorClient { + pub fn new(config: ProxyConfig) -> Self { + Self { config } + } + + pub async fn start(self) { + let mut upstream_index = 0; + let mut interrupt_signal_future = Box::pin(tokio::signal::ctrl_c().fuse()); + + // Channel used to manage failed tasks + let (tx_status, rx_status) = unbounded(); + + let task_collector = Arc::new(Mutex::new(vec![])); + + let proxy_config = &self.config; + + loop { + let task_collector = task_collector.clone(); + let tx_status = tx_status.clone(); + if let Some(upstream) = proxy_config.upstreams.get(upstream_index) { + self.initialize_jd(tx_status.clone(), task_collector.clone(), upstream.clone()) + .await; + } else { + self.initialize_jd_as_solo_miner(tx_status.clone(), task_collector.clone()) + .await; + } + // Check all tasks if is_finished() is true, if so exit + loop { + let task_status = select! { + task_status = rx_status.recv().fuse() => task_status, + interrupt_signal = interrupt_signal_future => { + match interrupt_signal { + Ok(()) => { + info!("Interrupt received"); + }, + Err(err) => { + error!("Unable to listen for interrupt signal: {}", err); + // we also shut down in case of error + }, + } + std::process::exit(0); + } + }; + let task_status: status::Status = task_status.unwrap(); + + match task_status.state { + // Should only be sent by the downstream listener + status::State::DownstreamShutdown(err) => { + error!("SHUTDOWN from: {}", err); + tokio::time::sleep(std::time::Duration::from_secs(1)).await; + task_collector + .safe_lock(|s| { + for handle in s { + handle.abort(); + } + }) + .unwrap(); + tokio::time::sleep(std::time::Duration::from_secs(1)).await; + break; + } + status::State::UpstreamShutdown(err) => { + error!("SHUTDOWN from: {}", err); + tokio::time::sleep(std::time::Duration::from_secs(1)).await; + task_collector + .safe_lock(|s| { + for handle in s { + handle.abort(); + } + }) + .unwrap(); + tokio::time::sleep(std::time::Duration::from_secs(1)).await; + break; + } + status::State::UpstreamRogue => { + error!("Changin Pool"); + tokio::time::sleep(std::time::Duration::from_secs(1)).await; + task_collector + .safe_lock(|s| { + for handle in s { + handle.abort(); + } + }) + .unwrap(); + upstream_index += 1; + tokio::time::sleep(std::time::Duration::from_secs(1)).await; + break; + } + status::State::Healthy(msg) => { + info!("HEALTHY message: {}", msg); + } + } + } + } + } + + async fn initialize_jd_as_solo_miner( + &self, + tx_status: async_channel::Sender>, + task_collector: Arc>>, + ) { + let proxy_config = &self.config; + let timeout = proxy_config.timeout; + let miner_tx_out = proxy_config::get_coinbase_output(proxy_config).unwrap(); + + // When Downstream receive a share that meets bitcoin target it transformit in a + // SubmitSolution and send it to the TemplateReceiver + let (send_solution, recv_solution) = bounded(10); + + // Format `Downstream` connection address + let downstream_addr = SocketAddr::new( + IpAddr::from_str(&proxy_config.downstream_address).unwrap(), + proxy_config.downstream_port, + ); + + // Wait for downstream to connect + let downstream = downstream::listen_for_downstream_mining( + downstream_addr, + None, + send_solution, + proxy_config.withhold, + proxy_config.authority_public_key, + proxy_config.authority_secret_key, + proxy_config.cert_validity_sec, + task_collector.clone(), + status::Sender::Downstream(tx_status.clone()), + miner_tx_out.clone(), + None, + ) + .await + .unwrap(); + + // Initialize JD part + let mut parts = proxy_config.tp_address.split(':'); + let ip_tp = parts.next().unwrap().to_string(); + let port_tp = parts.next().unwrap().parse::().unwrap(); + + TemplateRx::connect( + SocketAddr::new(IpAddr::from_str(ip_tp.as_str()).unwrap(), port_tp), + recv_solution, + status::Sender::TemplateReceiver(tx_status.clone()), + None, + downstream, + task_collector, + Arc::new(Mutex::new(PoolChangerTrigger::new(timeout))), + miner_tx_out.clone(), + proxy_config.tp_authority_public_key, + false, + ) + .await; + } + + async fn initialize_jd( + &self, + tx_status: async_channel::Sender>, + task_collector: Arc>>, + upstream_config: proxy_config::Upstream, + ) { + let proxy_config = &self.config; + let timeout = proxy_config.timeout; + let test_only_do_not_send_solution_to_tp = proxy_config + .test_only_do_not_send_solution_to_tp + .unwrap_or(false); + + // Format `Upstream` connection address + let mut parts = upstream_config.pool_address.split(':'); + let address = parts + .next() + .unwrap_or_else(|| panic!("Invalid pool address {}", upstream_config.pool_address)); + let port = parts + .next() + .and_then(|p| p.parse::().ok()) + .unwrap_or_else(|| panic!("Invalid pool address {}", upstream_config.pool_address)); + let upstream_addr = SocketAddr::new( + IpAddr::from_str(address).unwrap_or_else(|_| { + panic!("Invalid pool address {}", upstream_config.pool_address) + }), + port, + ); + + // When Downstream receive a share that meets bitcoin target it transformit in a + // SubmitSolution and send it to the TemplateReceiver + let (send_solution, recv_solution) = bounded(10); + + // Instantiate a new `Upstream` (SV2 Pool) + let upstream = match upstream_sv2::Upstream::new( + upstream_addr, + upstream_config.authority_pubkey, + 0, // TODO + upstream_config.pool_signature.clone(), + status::Sender::Upstream(tx_status.clone()), + task_collector.clone(), + Arc::new(Mutex::new(PoolChangerTrigger::new(timeout))), + ) + .await + { + Ok(upstream) => upstream, + Err(e) => { + error!("Failed to create upstream: {}", e); + panic!() + } + }; + + // Start receiving messages from the SV2 Upstream role + if let Err(e) = upstream_sv2::Upstream::parse_incoming(upstream.clone()) { + error!("failed to create sv2 parser: {}", e); + panic!() + } + + match upstream_sv2::Upstream::setup_connection( + upstream.clone(), + proxy_config.min_supported_version, + proxy_config.max_supported_version, + ) + .await + { + Ok(_) => info!("Connected to Upstream!"), + Err(e) => { + error!("Failed to connect to Upstream EXITING! : {}", e); + panic!() + } + } + + // Format `Downstream` connection address + let downstream_addr = SocketAddr::new( + IpAddr::from_str(&proxy_config.downstream_address).unwrap(), + proxy_config.downstream_port, + ); + + // Initialize JD part + let mut parts = proxy_config.tp_address.split(':'); + let ip_tp = parts.next().unwrap().to_string(); + let port_tp = parts.next().unwrap().parse::().unwrap(); + + let mut parts = upstream_config.jd_address.split(':'); + let ip_jd = parts.next().unwrap().to_string(); + let port_jd = parts.next().unwrap().parse::().unwrap(); + let jd = match JobDeclarator::new( + SocketAddr::new(IpAddr::from_str(ip_jd.as_str()).unwrap(), port_jd), + upstream_config.authority_pubkey.into_bytes(), + proxy_config.clone(), + upstream.clone(), + task_collector.clone(), + ) + .await + { + Ok(c) => c, + Err(e) => { + let _ = tx_status + .send(status::Status { + state: status::State::UpstreamShutdown(e), + }) + .await; + return; + } + }; + + // Wait for downstream to connect + let downstream = downstream::listen_for_downstream_mining( + downstream_addr, + Some(upstream), + send_solution, + proxy_config.withhold, + proxy_config.authority_public_key, + proxy_config.authority_secret_key, + proxy_config.cert_validity_sec, + task_collector.clone(), + status::Sender::Downstream(tx_status.clone()), + vec![], + Some(jd.clone()), + ) + .await + .unwrap(); + + TemplateRx::connect( + SocketAddr::new(IpAddr::from_str(ip_tp.as_str()).unwrap(), port_tp), + recv_solution, + status::Sender::TemplateReceiver(tx_status.clone()), + Some(jd.clone()), + downstream, + task_collector, + Arc::new(Mutex::new(PoolChangerTrigger::new(timeout))), + vec![], + proxy_config.tp_authority_public_key, + test_only_do_not_send_solution_to_tp, + ) + .await; + } +} + #[derive(Debug)] pub struct PoolChangerTrigger { timeout: Duration, diff --git a/roles/jd-client/src/main.rs b/roles/jd-client/src/main.rs index 27f05f361..cbc3fbb61 100644 --- a/roles/jd-client/src/main.rs +++ b/roles/jd-client/src/main.rs @@ -1,30 +1,15 @@ #![allow(special_module_name)] - mod args; mod lib; use lib::{ error::{Error, ProxyResult}, - job_declarator::JobDeclarator, proxy_config::ProxyConfig, - status, - template_receiver::TemplateRx, - PoolChangerTrigger, + status, JobDeclaratorClient, }; use args::Args; -use async_channel::{bounded, unbounded}; -use futures::{select, FutureExt}; -use roles_logic_sv2::utils::Mutex; -use std::{ - net::{IpAddr, SocketAddr}, - str::FromStr, - sync::Arc, - time::Duration, -}; -use tokio::task::AbortHandle; - -use tracing::{error, info}; +use tracing::error; /// Process CLI args, if any. #[allow(clippy::result_large_err)] @@ -96,305 +81,14 @@ fn process_cli_args<'a>() -> ProxyResult<'a, ProxyConfig> { #[tokio::main] async fn main() { tracing_subscriber::fmt::init(); - - let mut upstream_index = 0; - let mut interrupt_signal_future = Box::pin(tokio::signal::ctrl_c().fuse()); - - // Channel used to manage failed tasks - let (tx_status, rx_status) = unbounded(); - - let task_collector = Arc::new(Mutex::new(vec![])); - - let proxy_config = match process_cli_args() { - Ok(p) => p, - Err(e) => { - error!("Failed to read config file: {}", e); - return; - } - }; - - loop { - { - let task_collector = task_collector.clone(); - let tx_status = tx_status.clone(); - - if let Some(upstream) = proxy_config.upstreams.get(upstream_index) { - let initialize = initialize_jd( - tx_status.clone(), - task_collector, - upstream.clone(), - proxy_config.timeout, - ); - tokio::task::spawn(initialize); - } else { - let initialize = initialize_jd_as_solo_miner( - tx_status.clone(), - task_collector, - proxy_config.timeout, - ); - tokio::task::spawn(initialize); - } - } - // Check all tasks if is_finished() is true, if so exit - loop { - let task_status = select! { - task_status = rx_status.recv().fuse() => task_status, - interrupt_signal = interrupt_signal_future => { - match interrupt_signal { - Ok(()) => { - info!("Interrupt received"); - }, - Err(err) => { - error!("Unable to listen for interrupt signal: {}", err); - // we also shut down in case of error - }, - } - std::process::exit(0); - } - }; - let task_status: status::Status = task_status.unwrap(); - - match task_status.state { - // Should only be sent by the downstream listener - status::State::DownstreamShutdown(err) => { - error!("SHUTDOWN from: {}", err); - tokio::time::sleep(std::time::Duration::from_secs(1)).await; - task_collector - .safe_lock(|s| { - for handle in s { - handle.abort(); - } - }) - .unwrap(); - tokio::time::sleep(std::time::Duration::from_secs(1)).await; - break; - } - status::State::UpstreamShutdown(err) => { - error!("SHUTDOWN from: {}", err); - tokio::time::sleep(std::time::Duration::from_secs(1)).await; - task_collector - .safe_lock(|s| { - for handle in s { - handle.abort(); - } - }) - .unwrap(); - tokio::time::sleep(std::time::Duration::from_secs(1)).await; - break; - } - status::State::UpstreamRogue => { - error!("Changin Pool"); - tokio::time::sleep(std::time::Duration::from_secs(1)).await; - task_collector - .safe_lock(|s| { - for handle in s { - handle.abort(); - } - }) - .unwrap(); - upstream_index += 1; - tokio::time::sleep(std::time::Duration::from_secs(1)).await; - break; - } - status::State::Healthy(msg) => { - info!("HEALTHY message: {}", msg); - } - } - } - } -} -async fn initialize_jd_as_solo_miner( - tx_status: async_channel::Sender>, - task_collector: Arc>>, - timeout: Duration, -) { let proxy_config = match process_cli_args() { Ok(p) => p, Err(e) => { - error!("Failed to read config file: {}", e); - return; - } - }; - let miner_tx_out = lib::proxy_config::get_coinbase_output(&proxy_config).unwrap(); - - // When Downstream receive a share that meets bitcoin target it transform it in a - // SubmitSolution and send it to the TemplateReceiver - let (send_solution, recv_solution) = bounded(10); - - // Format `Downstream` connection address - let downstream_addr = SocketAddr::new( - IpAddr::from_str(&proxy_config.downstream_address).unwrap(), - proxy_config.downstream_port, - ); - - // Wait for downstream to connect - let downstream = lib::downstream::listen_for_downstream_mining( - downstream_addr, - None, - send_solution, - proxy_config.withhold, - proxy_config.authority_public_key, - proxy_config.authority_secret_key, - proxy_config.cert_validity_sec, - task_collector.clone(), - status::Sender::Downstream(tx_status.clone()), - miner_tx_out.clone(), - None, - ) - .await - .unwrap(); - - // Initialize JD part - let mut parts = proxy_config.tp_address.split(':'); - let ip_tp = parts.next().unwrap().to_string(); - let port_tp = parts.next().unwrap().parse::().unwrap(); - - TemplateRx::connect( - SocketAddr::new(IpAddr::from_str(ip_tp.as_str()).unwrap(), port_tp), - recv_solution, - status::Sender::TemplateReceiver(tx_status.clone()), - None, - downstream, - task_collector, - Arc::new(Mutex::new(PoolChangerTrigger::new(timeout))), - miner_tx_out.clone(), - proxy_config.tp_authority_public_key, - false, - ) - .await; -} - -async fn initialize_jd( - tx_status: async_channel::Sender>, - task_collector: Arc>>, - upstream_config: lib::proxy_config::Upstream, - timeout: Duration, -) { - let proxy_config = process_cli_args().unwrap(); - let test_only_do_not_send_solution_to_tp = proxy_config - .test_only_do_not_send_solution_to_tp - .unwrap_or(false); - - // Format `Upstream` connection address - let mut parts = upstream_config.pool_address.split(':'); - let address = parts - .next() - .unwrap_or_else(|| panic!("Invalid pool address {}", upstream_config.pool_address)); - let port = parts - .next() - .and_then(|p| p.parse::().ok()) - .unwrap_or_else(|| panic!("Invalid pool address {}", upstream_config.pool_address)); - let upstream_addr = SocketAddr::new( - IpAddr::from_str(address) - .unwrap_or_else(|_| panic!("Invalid pool address {}", upstream_config.pool_address)), - port, - ); - - // When Downstream receive a share that meets bitcoin target it transform it in a - // SubmitSolution and send it to the TemplateReceiver - let (send_solution, recv_solution) = bounded(10); - - // Instantiate a new `Upstream` (SV2 Pool) - let upstream = match lib::upstream_sv2::Upstream::new( - upstream_addr, - upstream_config.authority_pubkey, - 0, // TODO - upstream_config.pool_signature.clone(), - status::Sender::Upstream(tx_status.clone()), - task_collector.clone(), - Arc::new(Mutex::new(PoolChangerTrigger::new(timeout))), - ) - .await - { - Ok(upstream) => upstream, - Err(e) => { - error!("Failed to create upstream: {}", e); - panic!() - } - }; - - // Start receiving messages from the SV2 Upstream role - if let Err(e) = lib::upstream_sv2::Upstream::parse_incoming(upstream.clone()) { - error!("failed to create sv2 parser: {}", e); - panic!() - } - - match lib::upstream_sv2::Upstream::setup_connection( - upstream.clone(), - proxy_config.min_supported_version, - proxy_config.max_supported_version, - ) - .await - { - Ok(_) => info!("Connected to Upstream!"), - Err(e) => { - error!("Failed to connect to Upstream EXITING! : {}", e); - panic!() - } - } - - // Format `Downstream` connection address - let downstream_addr = SocketAddr::new( - IpAddr::from_str(&proxy_config.downstream_address).unwrap(), - proxy_config.downstream_port, - ); - - // Initialize JD part - let mut parts = proxy_config.tp_address.split(':'); - let ip_tp = parts.next().unwrap().to_string(); - let port_tp = parts.next().unwrap().parse::().unwrap(); - - let mut parts = upstream_config.jd_address.split(':'); - let ip_jd = parts.next().unwrap().to_string(); - let port_jd = parts.next().unwrap().parse::().unwrap(); - let jd = match JobDeclarator::new( - SocketAddr::new(IpAddr::from_str(ip_jd.as_str()).unwrap(), port_jd), - upstream_config.authority_pubkey.into_bytes(), - proxy_config.clone(), - upstream.clone(), - task_collector.clone(), - ) - .await - { - Ok(c) => c, - Err(e) => { - let _ = tx_status - .send(status::Status { - state: status::State::UpstreamShutdown(e), - }) - .await; + error!("Job Declarator Client Config error: {}", e); return; } }; - // Wait for downstream to connect - let downstream = lib::downstream::listen_for_downstream_mining( - downstream_addr, - Some(upstream), - send_solution, - proxy_config.withhold, - proxy_config.authority_public_key, - proxy_config.authority_secret_key, - proxy_config.cert_validity_sec, - task_collector.clone(), - status::Sender::Downstream(tx_status.clone()), - vec![], - Some(jd.clone()), - ) - .await - .unwrap(); - - TemplateRx::connect( - SocketAddr::new(IpAddr::from_str(ip_tp.as_str()).unwrap(), port_tp), - recv_solution, - status::Sender::TemplateReceiver(tx_status.clone()), - Some(jd.clone()), - downstream, - task_collector, - Arc::new(Mutex::new(PoolChangerTrigger::new(timeout))), - vec![], - proxy_config.tp_authority_public_key, - test_only_do_not_send_solution_to_tp, - ) - .await; + let jdc = JobDeclaratorClient::new(proxy_config); + jdc.start().await; } From c5617302092bd28a0852fc3cba0d903be8db98af Mon Sep 17 00:00:00 2001 From: Fi3 Date: Fri, 19 Jul 2024 11:25:42 +0200 Subject: [PATCH 056/101] Update roles/test-utils/mining-device/src/main.rs Co-authored-by: plebhash <147345153+plebhash@users.noreply.github.com> --- roles/test-utils/mining-device/src/main.rs | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/roles/test-utils/mining-device/src/main.rs b/roles/test-utils/mining-device/src/main.rs index e2208bff4..ba527e7c5 100644 --- a/roles/test-utils/mining-device/src/main.rs +++ b/roles/test-utils/mining-device/src/main.rs @@ -327,7 +327,11 @@ impl Device { let mut notify_changes_to_mining_thread = self_mutex .safe_lock(|s| s.notify_changes_to_mining_thread.clone()) .unwrap(); - if notify_changes_to_mining_thread.should_send { + if notify_changes_to_mining_thread.should_send + && (message_type == const_sv2::MESSAGE_TYPE_NEW_MINING_JOB + || message_type == const_sv2::MESSAGE_TYPE_SET_NEW_PREV_HASH + || message_type == const_sv2::MESSAGE_TYPE_SET_TARGET) + { notify_changes_to_mining_thread .sender .send(()) From 511b72cd37027d664f6d55e6c55d15f609ab0fc3 Mon Sep 17 00:00:00 2001 From: Johnny Santos Date: Tue, 9 Jul 2024 20:34:15 -0300 Subject: [PATCH 057/101] Add requirements on interoperability tests --- INTEROPERABILITY-TESTS.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/INTEROPERABILITY-TESTS.md b/INTEROPERABILITY-TESTS.md index 8791eff81..a78d08564 100644 --- a/INTEROPERABILITY-TESTS.md +++ b/INTEROPERABILITY-TESTS.md @@ -2,6 +2,10 @@ How to test Sv2 compliant software against the SRI implementation. +## Requirements + +- [Cargo LLVM Cov](https://github.com/taiki-e/cargo-llvm-cov#installation) + ## With Message Generator (MG) First thing you need to write a test that can be executed by the message generator. In order to do From 92bddbec271b01b0e1f3e5aa03fd938c6632604f Mon Sep 17 00:00:00 2001 From: Johnny Santos Date: Sat, 29 Jun 2024 21:48:38 -0300 Subject: [PATCH 058/101] Fix panic on premature exit via ctrl-c (or signal) Changes: - Add listener on exit signals - Add channel to unbind listener as well - Organize main fn to handle signals and ownership of sockets - Fix typos in docs - Format imports - Remove unused spawn - Simplifies the code a bit reducing nesting and matches with unused arms --- .../mining-proxy/src/lib/downstream_mining.rs | 114 +++++++++--------- roles/mining-proxy/src/lib/upstream_mining.rs | 34 +++--- roles/mining-proxy/src/main.rs | 48 ++++++-- 3 files changed, 112 insertions(+), 84 deletions(-) diff --git a/roles/mining-proxy/src/lib/downstream_mining.rs b/roles/mining-proxy/src/lib/downstream_mining.rs index 188055119..907b3d6b0 100644 --- a/roles/mining-proxy/src/lib/downstream_mining.rs +++ b/roles/mining-proxy/src/lib/downstream_mining.rs @@ -1,7 +1,14 @@ #![allow(dead_code)] -use super::upstream_mining::{StdFrame as UpstreamFrame, UpstreamMiningNode}; +use core::convert::TryInto; +use std::sync::Arc; + use async_channel::{Receiver, SendError, Sender}; +use tokio::{net::TcpListener, sync::oneshot::Receiver as TokioReceiver}; +use tracing::{info, warn}; + +use codec_sv2::{StandardEitherFrame, StandardSv2Frame}; +use network_helpers_sv2::plain_connection_tokio::PlainConnection; use roles_logic_sv2::{ common_messages_sv2::{SetupConnection, SetupConnectionSuccess}, common_properties::{CommonDownstreamData, IsDownstream, IsMiningDownstream}, @@ -15,9 +22,8 @@ use roles_logic_sv2::{ routing_logic::MiningProxyRoutingLogic, utils::Mutex, }; -use tracing::info; -use codec_sv2::{StandardEitherFrame, StandardSv2Frame}; +use super::upstream_mining::{ProxyRemoteSelector, StdFrame as UpstreamFrame, UpstreamMiningNode}; pub type Message = MiningDeviceMessages<'static>; pub type StdFrame = StandardSv2Frame; @@ -25,8 +31,8 @@ pub type EitherFrame = StandardEitherFrame; /// 1 to 1 connection with a downstream node that implement the mining (sub)protocol can be either /// a mining device or a downstream proxy. -/// A downstream can only be linked with an upstream at a time. Support multi upstrems for -/// downstream do no make much sense. +/// A downstream can only be linked with an upstream at a time. Support multi upstreams for +/// downstream do not make much sense. #[derive(Debug)] pub struct DownstreamMiningNode { id: u32, @@ -47,12 +53,12 @@ pub enum DownstreamMiningNodeStatus { #[derive(Debug, Clone)] #[allow(clippy::enum_variant_names)] pub enum Channel { - DowntreamHomUpstreamGroup { + DownstreamHomUpstreamGroup { data: CommonDownstreamData, channel_id: u32, group_id: u32, }, - DowntreamHomUpstreamExtended { + DownstreamHomUpstreamExtended { data: CommonDownstreamData, channel_id: u32, group_id: u32, @@ -101,7 +107,7 @@ impl DownstreamMiningNodeStatus { match self { DownstreamMiningNodeStatus::Initializing => panic!(), DownstreamMiningNodeStatus::Paired(data) => { - let channel = Channel::DowntreamHomUpstreamGroup { + let channel = Channel::DownstreamHomUpstreamGroup { data: *data, channel_id, group_id, @@ -117,7 +123,7 @@ impl DownstreamMiningNodeStatus { match self { DownstreamMiningNodeStatus::Initializing => panic!(), DownstreamMiningNodeStatus::Paired(data) => { - let channel = Channel::DowntreamHomUpstreamExtended { + let channel = Channel::DownstreamHomUpstreamExtended { data: *data, channel_id, group_id, @@ -153,10 +159,6 @@ impl DownstreamMiningNodeStatus { } } -use core::convert::TryInto; -use std::sync::Arc; -use tokio::task; - impl PartialEq for DownstreamMiningNode { fn eq(&self, other: &Self) -> bool { self.id == other.id @@ -316,7 +318,7 @@ impl DownstreamMiningNode { pub fn exit(self_: Arc>) { if let Some(up) = self_.safe_lock(|s| s.upstream.clone()).unwrap() { - super::upstream_mining::UpstreamMiningNode::remove_dowstream(up, &self_); + UpstreamMiningNode::remove_dowstream(up, &self_); }; self_ .safe_lock(|s| { @@ -326,8 +328,6 @@ impl DownstreamMiningNode { } } -use super::upstream_mining::ProxyRemoteSelector; - /// It impl UpstreamMining cause the proxy act as an upstream node for the DownstreamMiningNode impl ParseDownstreamMiningMessages< @@ -414,14 +414,14 @@ impl match &self.status { DownstreamMiningNodeStatus::Initializing => todo!(), DownstreamMiningNodeStatus::Paired(_) => todo!(), - DownstreamMiningNodeStatus::ChannelOpened(Channel::DowntreamHomUpstreamGroup { + DownstreamMiningNodeStatus::ChannelOpened(Channel::DownstreamHomUpstreamGroup { .. }) => { let remote = self.upstream.as_ref().unwrap(); let message = Mining::SubmitSharesStandard(m); Ok(SendTo::RelayNewMessageToRemote(remote.clone(), message)) } - DownstreamMiningNodeStatus::ChannelOpened(Channel::DowntreamHomUpstreamExtended { + DownstreamMiningNodeStatus::ChannelOpened(Channel::DownstreamHomUpstreamExtended { .. }) => { // Safe unwrap is channel have been opened it means that the dowsntream is paired @@ -483,44 +483,48 @@ impl } } -use network_helpers_sv2::plain_connection_tokio::PlainConnection; -use std::net::SocketAddr; -use tokio::net::TcpListener; - -pub async fn listen_for_downstream_mining(address: SocketAddr) { - info!("Listening for downstream mining connections on {}", address); - let listner = TcpListener::bind(address).await.unwrap(); +pub async fn listen_for_downstream_mining( + listener: TcpListener, + mut shutdown_rx: TokioReceiver<()>, +) { let mut ids = roles_logic_sv2::utils::Id::new(); - - while let Ok((stream, _)) = listner.accept().await { - let (receiver, sender): (Receiver, Sender) = - PlainConnection::new(stream).await; - let node = DownstreamMiningNode::new(receiver, sender, ids.next()); - - task::spawn(async move { - let mut incoming: StdFrame = node.receiver.recv().await.unwrap().try_into().unwrap(); - let message_type = incoming.get_header().unwrap().msg_type(); - let payload = incoming.payload(); - let routing_logic = super::get_common_routing_logic(); - let node = Arc::new(Mutex::new(node)); - - // Call handle_setup_connection or fail - match DownstreamMiningNode::handle_message_common( - node.clone(), - message_type, - payload, - routing_logic, - ) { - Ok(SendToCommon::RelayNewMessageToRemote(_, message)) => { - let message = match message { - roles_logic_sv2::parsers::CommonMessages::SetupConnectionSuccess(m) => m, - _ => panic!(), - }; - DownstreamMiningNode::start(node, message).await + loop { + tokio::select! { + accept_result = listener.accept() => { + let (stream, _) = accept_result.expect("failed to accept downstream connection"); + let (receiver, sender): (Receiver, Sender) = + PlainConnection::new(stream).await; + let node = DownstreamMiningNode::new(receiver, sender, ids.next()); + + let mut incoming: StdFrame = + node.receiver.recv().await.unwrap().try_into().unwrap(); + let message_type = incoming.get_header().unwrap().msg_type(); + let payload = incoming.payload(); + let routing_logic = super::get_common_routing_logic(); + let node = Arc::new(Mutex::new(node)); + + // Call handle_setup_connection or fail + let common_msg = DownstreamMiningNode::handle_message_common( + node.clone(), + message_type, + payload, + routing_logic + ).expect("failed to process downstream message"); + + + if let SendToCommon::RelayNewMessageToRemote(_, relay_msg) = common_msg { + if let roles_logic_sv2::parsers::CommonMessages::SetupConnectionSuccess(setup_msg) = relay_msg { + DownstreamMiningNode::start(node, setup_msg).await; + } + } else { + warn!("Received unexpected message from downstream"); } - _ => panic!(), } - }); + _ = &mut shutdown_rx => { + info!("Closing listener"); + return; + } + } } } @@ -529,14 +533,14 @@ impl IsDownstream for DownstreamMiningNode { match self.status { DownstreamMiningNodeStatus::Initializing => panic!(), DownstreamMiningNodeStatus::Paired(data) => data, - DownstreamMiningNodeStatus::ChannelOpened(Channel::DowntreamHomUpstreamGroup { + DownstreamMiningNodeStatus::ChannelOpened(Channel::DownstreamHomUpstreamGroup { data, .. }) => data, DownstreamMiningNodeStatus::ChannelOpened( Channel::DowntreamNonHomUpstreamExtended { data, .. }, ) => data, - DownstreamMiningNodeStatus::ChannelOpened(Channel::DowntreamHomUpstreamExtended { + DownstreamMiningNodeStatus::ChannelOpened(Channel::DownstreamHomUpstreamExtended { data, .. }) => data, diff --git a/roles/mining-proxy/src/lib/upstream_mining.rs b/roles/mining-proxy/src/lib/upstream_mining.rs index e3f6eef99..96683d0f3 100644 --- a/roles/mining-proxy/src/lib/upstream_mining.rs +++ b/roles/mining-proxy/src/lib/upstream_mining.rs @@ -1,14 +1,16 @@ #![allow(dead_code)] -use super::EXTRANONCE_RANGE_1_LENGTH; -use roles_logic_sv2::utils::Id; +use core::convert::TryInto; +use std::{collections::HashMap, net::SocketAddr, sync::Arc, time::Duration}; -use super::downstream_mining::{Channel, DownstreamMiningNode, StdFrame as DownstreamFrame}; use async_channel::{Receiver, SendError, Sender}; use async_recursion::async_recursion; +use nohash_hasher::BuildNoHashHasher; +use tokio::{net::TcpStream, task}; +use tracing::{debug, error, info}; + use codec_sv2::{HandshakeRole, Initiator, StandardEitherFrame, StandardSv2Frame}; use network_helpers_sv2::noise_connection_tokio::Connection; -use nohash_hasher::BuildNoHashHasher; use roles_logic_sv2::{ channel_logic::{ channel_factory::{ExtendedChannelKind, OnNewShare, ProxyExtendedChannelFactory, Share}, @@ -26,14 +28,15 @@ use roles_logic_sv2::{ routing_logic::MiningProxyRoutingLogic, selectors::{DownstreamMiningSelector, ProxyDownstreamMiningSelector as Prs}, template_distribution_sv2::SubmitSolution, - utils::{GroupId, Mutex}, + utils::{GroupId, Id, Mutex}, }; -use std::{collections::HashMap, sync::Arc}; -use tokio::{net::TcpStream, task}; -use tracing::error; - use stratum_common::bitcoin::TxOut; +use super::{ + downstream_mining::{Channel, DownstreamMiningNode, StdFrame as DownstreamFrame}, + EXTRANONCE_RANGE_1_LENGTH, +}; + pub type Message = PoolMessages<'static>; pub type StdFrame = StandardSv2Frame; pub type EitherFrame = StandardEitherFrame; @@ -188,10 +191,6 @@ pub struct UpstreamMiningNode { reconnect: bool, } -use core::convert::TryInto; -use std::{net::SocketAddr, time::Duration}; -use tracing::{debug, info}; - /// It assume that endpoint NEVER change flags and version! /// I can open both extended and group channel with upstream. impl UpstreamMiningNode { @@ -471,8 +470,8 @@ impl UpstreamMiningNode { super::downstream_mining::DownstreamMiningNodeStatus::ChannelOpened( channel, ) => match channel { - Channel::DowntreamHomUpstreamGroup { channel_id, .. } => Some(*channel_id), - Channel::DowntreamHomUpstreamExtended { channel_id, .. } => { + Channel::DownstreamHomUpstreamGroup { channel_id, .. } => Some(*channel_id), + Channel::DownstreamHomUpstreamExtended { channel_id, .. } => { Some(*channel_id) } Channel::DowntreamNonHomUpstreamExtended { .. } => todo!(), @@ -1048,7 +1047,7 @@ impl .ok_or(Error::NoDownstreamsConnected)?; for downstream in downstreams { match downstream.safe_lock(|r| r.get_channel().clone()).unwrap() { - Channel::DowntreamHomUpstreamGroup { + Channel::DownstreamHomUpstreamGroup { channel_id, group_id, .. @@ -1257,9 +1256,10 @@ impl IsMiningUpstream for UpstreamMin #[cfg(test)] mod tests { - use super::*; use std::net::{IpAddr, Ipv4Addr}; + use super::*; + #[test] fn new_upstream_minining_node() { let id = 0; diff --git a/roles/mining-proxy/src/main.rs b/roles/mining-proxy/src/main.rs index 0725c189b..5931990ac 100644 --- a/roles/mining-proxy/src/main.rs +++ b/roles/mining-proxy/src/main.rs @@ -3,7 +3,7 @@ //! Downstream means another proxy or a mining device //! //! UpstreamMining is the trait that a proxy must implement in order to -//! understant Downstream mining messages. +//! understand Downstream mining messages. //! //! DownstreamMining is the trait that a proxy must implement in order to //! understand Upstream mining messages @@ -18,12 +18,15 @@ //! A Downstream that signal the incapacity to handle group channels can open only one channel. //! #![allow(special_module_name)] -mod lib; +use std::{net::SocketAddr, sync::Arc}; + +use tokio::{net::TcpListener, sync::oneshot}; +use tracing::{error, info}; use lib::Config; use roles_logic_sv2::utils::{GroupId, Mutex}; -use std::{net::SocketAddr, sync::Arc}; -use tracing::{error, info}; + +mod lib; mod args { use std::path::PathBuf; @@ -89,12 +92,12 @@ mod args { } /// 1. the proxy scan all the upstreams and map them -/// 2. donwstream open a connetcion with proxy +/// 2. downstream open a connection with proxy /// 3. downstream send SetupConnection -/// 4. a mining_channle::Upstream is created +/// 4. a mining_channels::Upstream is created /// 5. upstream_mining::UpstreamMiningNodes is used to pair this downstream with the most suitable /// upstream -/// 6. mining_channle::Upstream create a new downstream_mining::DownstreamMiningNode embedding +/// 6. mining_channels::Upstream create a new downstream_mining::DownstreamMiningNode embedding /// itself in it /// 7. normal operation between the paired downstream_mining::DownstreamMiningNode and /// upstream_mining::UpstreamMiningNode begin @@ -126,16 +129,37 @@ async fn main() { lib::initialize_r_logic(&config.upstreams, group_id, config.clone()).await, )) .expect("BUG: Failed to set ROUTING_LOGIC"); - info!("PROXY INITIALIZING"); + + info!("Initializing upstream scanner"); lib::initialize_upstreams(config.min_supported_version, config.max_supported_version).await; - info!("PROXY INITIALIZED"); + info!("Initializing downstream listener"); - // Wait for downstream connection let socket = SocketAddr::new( config.listen_address.parse().unwrap(), config.listen_mining_port, ); + let listener = TcpListener::bind(socket).await.unwrap(); + + info!("Listening for downstream mining connections on {}", socket); + + let (shutdown_tx, shutdown_rx) = oneshot::channel(); + + let (_, res) = tokio::join!( + // Wait for downstream connection + lib::downstream_mining::listen_for_downstream_mining(listener, shutdown_rx), + // handle SIGTERM/QUIT / ctrl+c + tokio::spawn(async { + tokio::signal::ctrl_c() + .await + .expect("Failed to listen to signals"); + let _ = shutdown_tx.send(()); + info!("Interrupt received"); + }) + ); + + if let Err(e) = res { + panic!("Failed to wait for clean exit: {:?}", e); + } - info!("PROXY INITIALIZED"); - crate::lib::downstream_mining::listen_for_downstream_mining(socket).await + info!("Shutdown done"); } From 5b5d28e4662be0866d3cbe317983e57f951bd1f5 Mon Sep 17 00:00:00 2001 From: jbesraa Date: Tue, 9 Jul 2024 11:15:52 +0300 Subject: [PATCH 059/101] Remove dead code from `downstream_mining.rs` (cherry picked from commit f91be81b129b3f2008d0168fc1599881094f40ce) Co-authored-by: jbesraa --- .../mining-proxy/src/lib/downstream_mining.rs | 53 +------------------ roles/mining-proxy/src/lib/upstream_mining.rs | 1 - 2 files changed, 2 insertions(+), 52 deletions(-) diff --git a/roles/mining-proxy/src/lib/downstream_mining.rs b/roles/mining-proxy/src/lib/downstream_mining.rs index 907b3d6b0..da8901c49 100644 --- a/roles/mining-proxy/src/lib/downstream_mining.rs +++ b/roles/mining-proxy/src/lib/downstream_mining.rs @@ -1,7 +1,4 @@ -#![allow(dead_code)] - -use core::convert::TryInto; -use std::sync::Arc; +use std::{convert::TryInto, sync::Arc}; use async_channel::{Receiver, SendError, Sender}; use tokio::{net::TcpListener, sync::oneshot::Receiver as TokioReceiver}; @@ -39,7 +36,6 @@ pub struct DownstreamMiningNode { receiver: Receiver, sender: Sender, pub status: DownstreamMiningNodeStatus, - pub prev_job_id: Option, upstream: Option>>, } @@ -61,14 +57,6 @@ pub enum Channel { DownstreamHomUpstreamExtended { data: CommonDownstreamData, channel_id: u32, - group_id: u32, - }, - // Below variant is not supported cause do not have much sense - // DowntreamNonHomUpstreamGroup { data: CommonDownstreamData, group_ids: Vec, extended_ids: Vec}, - DowntreamNonHomUpstreamExtended { - data: CommonDownstreamData, - group_ids: Vec, - extended_ids: Vec, }, } @@ -119,14 +107,13 @@ impl DownstreamMiningNodeStatus { } } - fn open_channel_for_down_hom_up_extended(&mut self, channel_id: u32, group_id: u32) { + fn open_channel_for_down_hom_up_extended(&mut self, channel_id: u32, _group_id: u32) { match self { DownstreamMiningNodeStatus::Initializing => panic!(), DownstreamMiningNodeStatus::Paired(data) => { let channel = Channel::DownstreamHomUpstreamExtended { data: *data, channel_id, - group_id, }; let self_ = Self::ChannelOpened(channel); let _ = std::mem::replace(self, self_); @@ -134,29 +121,6 @@ impl DownstreamMiningNodeStatus { DownstreamMiningNodeStatus::ChannelOpened(..) => panic!("Channel already opened"), } } - - fn add_extended_from_non_hom_for_up_extended(&mut self, id: u32) { - match self { - DownstreamMiningNodeStatus::Initializing => panic!(), - DownstreamMiningNodeStatus::Paired(data) => { - let channel = Channel::DowntreamNonHomUpstreamExtended { - data: *data, - group_ids: vec![], - extended_ids: vec![id], - }; - let self_ = Self::ChannelOpened(channel); - let _ = std::mem::replace(self, self_); - } - DownstreamMiningNodeStatus::ChannelOpened( - Channel::DowntreamNonHomUpstreamExtended { extended_ids, .. }, - ) => { - if !extended_ids.contains(&id) { - extended_ids.push(id) - } - } - _ => panic!(), - } - } } impl PartialEq for DownstreamMiningNode { @@ -179,16 +143,12 @@ impl DownstreamMiningNode { self.status .open_channel_for_down_hom_up_extended(channel_id, group_id); } - pub fn add_extended_from_non_hom_for_up_extended(&mut self, id: u32) { - self.status.add_extended_from_non_hom_for_up_extended(id); - } pub fn new(receiver: Receiver, sender: Sender, id: u32) -> Self { Self { receiver, sender, status: DownstreamMiningNodeStatus::Initializing, - prev_job_id: None, upstream: None, id, } @@ -430,12 +390,6 @@ impl let res = UpstreamMiningNode::handle_std_shr(remote.clone(), m).unwrap(); Ok(SendTo::Respond(res)) } - DownstreamMiningNodeStatus::ChannelOpened( - Channel::DowntreamNonHomUpstreamExtended { .. }, - ) => { - // unreachable cause the proxy do not support this kind of channel - unreachable!(); - } } } @@ -537,9 +491,6 @@ impl IsDownstream for DownstreamMiningNode { data, .. }) => data, - DownstreamMiningNodeStatus::ChannelOpened( - Channel::DowntreamNonHomUpstreamExtended { data, .. }, - ) => data, DownstreamMiningNodeStatus::ChannelOpened(Channel::DownstreamHomUpstreamExtended { data, .. diff --git a/roles/mining-proxy/src/lib/upstream_mining.rs b/roles/mining-proxy/src/lib/upstream_mining.rs index 96683d0f3..5ad012d10 100644 --- a/roles/mining-proxy/src/lib/upstream_mining.rs +++ b/roles/mining-proxy/src/lib/upstream_mining.rs @@ -474,7 +474,6 @@ impl UpstreamMiningNode { Channel::DownstreamHomUpstreamExtended { channel_id, .. } => { Some(*channel_id) } - Channel::DowntreamNonHomUpstreamExtended { .. } => todo!(), }, }) .unwrap() From fd098189be547f6c278ddd5fecafbb830f2fe653 Mon Sep 17 00:00:00 2001 From: plebhash Date: Mon, 1 Jul 2024 11:44:25 -0300 Subject: [PATCH 060/101] avoid JDS dropping connection on SubmitSolution with missing txs this is the actual fix for #912 use handle_result! macro following suggestion by @lorbax https://github.com/stratum-mining/stratum/pull/1025#issuecomment-2245767959 the implementation diverged a bit from the suggestion, but it was still a good reminder that we should leverage `handle_result!` macro here --- roles/jd-server/src/lib/job_declarator/mod.rs | 45 ++++++++----------- 1 file changed, 19 insertions(+), 26 deletions(-) diff --git a/roles/jd-server/src/lib/job_declarator/mod.rs b/roles/jd-server/src/lib/job_declarator/mod.rs index 34d9e66de..dbfce89a6 100644 --- a/roles/jd-server/src/lib/job_declarator/mod.rs +++ b/roles/jd-server/src/lib/job_declarator/mod.rs @@ -280,23 +280,18 @@ impl JobDeclaratorDownstream { match Self::collect_txs_in_job(self_mutex.clone()) { Ok(_) => { info!("All transactions in downstream job are recognized correctly by the JD Server"); - let hexdata = - match JobDeclaratorDownstream::get_block_hex( - self_mutex.clone(), - message, - ) { - Ok(inner) => inner, - Err(e) => { - error!( - "Received solution but encountered error: {:?}", - e - ); - recv.close(); - //TODO should we brake it? - break; - } - }; - let _ = new_block_sender.send(hexdata).await; + match JobDeclaratorDownstream::get_block_hex( + self_mutex.clone(), + message, + ) { + Ok(hexdata) => { + let _ = + new_block_sender.send(hexdata).await; + } + Err(e) => { + handle_result!(tx_status, Err(*e)) + } + }; } Err(error) => { error!("Missing transactions: {:?}", error); @@ -316,22 +311,20 @@ impl JobDeclaratorDownstream { .unwrap(); tokio::select! { _ = JDsMempool::add_tx_data_to_mempool(mempool, retrieve_transactions) => { - let hexdata = match JobDeclaratorDownstream::get_block_hex( + match JobDeclaratorDownstream::get_block_hex( self_mutex.clone(), message.clone(), ) { - Ok(inner) => inner, + Ok(hexdata) => { + let _ = new_block_sender.send(hexdata).await; + }, Err(e) => { - error!( - "Error retrieving transactions: {:?}", - e + handle_result!( + tx_status, + Err(*e) ); - recv.close(); - //TODO should we brake it? - break; } }; - let _ = new_block_sender.send(hexdata).await; } _ = tokio::time::sleep(Duration::from_secs(60)) => {} }; From a1977702c3067873b3a218765bcd8d9af2c71be9 Mon Sep 17 00:00:00 2001 From: plebhash Date: Fri, 12 Jul 2024 16:27:11 -0300 Subject: [PATCH 061/101] add MG test for #912 --- .github/workflows/mg.yaml | 12 ++ .../jds-config.toml | 20 +++ ...olution-while-processing-declared-job.json | 146 ++++++++++++++++++ ...olution-while-processing-declared-job.json | 79 ++++++++++ ...-solution-while-processing-declared-job.sh | 9 ++ 5 files changed, 266 insertions(+) create mode 100644 test/config/jds-receive-solution-while-processing-declared-job/jds-config.toml create mode 100644 test/message-generator/mock/jdc-mock-jds-receive-solution-while-processing-declared-job.json create mode 100644 test/message-generator/test/jds-receive-solution-while-processing-declared-job/jds-receive-solution-while-processing-declared-job.json create mode 100755 test/message-generator/test/jds-receive-solution-while-processing-declared-job/jds-receive-solution-while-processing-declared-job.sh diff --git a/.github/workflows/mg.yaml b/.github/workflows/mg.yaml index 13aaef9e6..8ee27a9c3 100644 --- a/.github/workflows/mg.yaml +++ b/.github/workflows/mg.yaml @@ -67,6 +67,16 @@ jobs: - name: Run jds-do-not-stackoverflow-when-no-token run: sh ./test/message-generator/test/jds-do-not-stackoverflow-when-no-token/jds-do-not-stackoverflow-when-no-token.sh + jds-receive-solution-while-processing-declared-job: + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v4 + - name: Install cargo-llvm-cov + run: cargo install cargo-llvm-cov + - name: Run jds-receive-solution-while-processing-declared-job + run: sh ./test/message-generator/test/jds-receive-solution-while-processing-declared-job/jds-receive-solution-while-processing-declared-job.sh + pool-sri-test-1-standard: runs-on: ubuntu-latest steps: @@ -171,6 +181,7 @@ jobs: jds-do-not-fail-on-wrong-tsdatasucc, jds-do-not-panic-if-jdc-close-connection, jds-do-not-stackoverflow-when-no-token, + jds-receive-solution-while-processing-declared-job, pool-sri-test-1-standard, pool-sri-test-close-channel, pool-sri-test-extended_0, @@ -192,6 +203,7 @@ jobs: [ "${{ needs.jds-do-not-fail-on-wrong-tsdatasucc.result }}" != "success" ] || [ "${{ needs.jds-do-not-panic-if-jdc-close-connection.result }}" != "success" ] || [ "${{ needs.jds-do-not-stackoverflow-when-no-token.result }}" != "success" ] || + [ "${{ needs.jds-receive-solution-while-processing-declared-job.result }}" != "success" ] || [ "${{ needs.pool-sri-test-1-standard.result }}" != "success" ] || [ "${{ needs.pool-sri-test-close-channel.result }}" != "success" ] || [ "${{ needs.pool-sri-test-extended_0.result }}" != "success" ] || diff --git a/test/config/jds-receive-solution-while-processing-declared-job/jds-config.toml b/test/config/jds-receive-solution-while-processing-declared-job/jds-config.toml new file mode 100644 index 000000000..576b35443 --- /dev/null +++ b/test/config/jds-receive-solution-while-processing-declared-job/jds-config.toml @@ -0,0 +1,20 @@ +# SRI Pool config +authority_public_key = "9auqWEzQDVyd2oe1JVGFLMLHZtCo2FFqZwtKA5gd9xbuEu7PH72" +authority_secret_key = "mkDLTBBRxdBv998612qipDYoTK3YUrqLe8uWw7gu3iXbSrn2n" +cert_validity_sec = 3600 + +# list of compressed or uncompressed pubkeys for coinbase payout (only supports 1 item in the array at this point) +coinbase_outputs = [ + { output_script_type = "P2WPKH", output_script_value = "036adc3bdf21e6f9a0f0fb0066bf517e5b7909ed1563d6958a10993849a7554075" }, +] + +listen_jd_address = "127.0.0.1:34264" + +core_rpc_url = "" +core_rpc_port = 48332 +core_rpc_user = "" +core_rpc_pass = "" +# Time interval used for JDS mempool update +[mempool_update_interval] +unit = "secs" +value = 1 diff --git a/test/message-generator/mock/jdc-mock-jds-receive-solution-while-processing-declared-job.json b/test/message-generator/mock/jdc-mock-jds-receive-solution-while-processing-declared-job.json new file mode 100644 index 000000000..67148be87 --- /dev/null +++ b/test/message-generator/mock/jdc-mock-jds-receive-solution-while-processing-declared-job.json @@ -0,0 +1,146 @@ +{ + "version": "2", + "doc": [ + "This test does", + "Mock JDC", + "Send SetupConnection and await for SetupConnection.Success", + "Send AllocateMiningJobToken0 and await for AllocateMiningJobToken.Success0", + "Send AllocateMiningJobToken1 and await for AllocateMiningJobToken.Success1", + "Send DeclareMiningJob and await for ProvideMissingTransactions", + "Send SubmitSolution and assert that connection was sustained" + ], + "job_declaration_messages": [ + { + "message": { + "type": "AllocateMiningJobToken", + "user_identifier": "", + "request_id": 0, + "coinbase_tx_outputs": [] + }, + "id": "allocate_mining_job_token0" + }, + { + "message": { + "type": "AllocateMiningJobToken", + "user_identifier": "", + "request_id": 1, + "coinbase_tx_outputs": [] + }, + "id": "allocate_mining_job_token1" + }, + { + "message": { + "type": "DeclareMiningJob", + "request_id": 0, + "mining_job_token": [1, 0, 0, 0], + "version": 0, + "coinbase_prefix": [2, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 255, 255, 255, 55, 2, 128, 121, 0, 83, 116, 114, 97, 116, 117, 109, 32, 118, 50, 32, 83, 82, 73, 32, 80, 111, 111, 108], + "coinbase_suffix": [255, 255, 255, 255, 2, 168, 247, 5, 42, 1, 0, 0, 0, 22, 0, 20, 235, 225, 183, 220, 194, 147, 204, 170, 14, 231, 67, 168, 111, 137, 223, 130, 88, 194, 8, 252, 0, 0, 0, 0, 0, 0, 0, 0, 38, 106, 36, 170, 33, 169, 237, 226, 201, 13, 62, 213, 94, 164, 53, 216, 76, 246, 14, 110, 125, 255, 48, 66, 12, 220, 90, 217, 209, 75, 129, 37, 185, 117, 116, 254, 30, 81, 159, 1, 32, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], + "tx_short_hash_nonce": 0, + "tx_short_hash_list": [[95, 135, 113, 8, 147, 179]], + "tx_hash_list_hash": [133, 189, 184, 91, 252, 203, 225, 42, 233, 16, 77, 119, 76, 134, 93, 189, 192, 159, 221, 130, 150, 196, 18, 32, 54, 212, 138, 255, 57, 63, 118, 74], + "excess_data": [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] + }, + "id": "declare_mining_job" + }, + { + "message": { + "type": "SubmitSolution", + "extranonce": [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0], + "prev_hash": [184, 103, 138, 88, 153, 105, 236, 29, 123, 246, 107, 203, 1, 33, 10, 122, 188, 139, 218, 141, 62, 177, 158, 101, 125, 92, 214, 150, 199, 220, 29, 8], + "ntime": 0, + "nonce": 0, + "nbits": 0, + "version": 0 + }, + "id": "submit_solution" + } + ], + "frame_builders": [ + { + "type": "automatic", + "message_id": "test/message-generator/messages/common_messages.json::setup_connection_job_declarator" + }, + { + "type": "automatic", + "message_id": "allocate_mining_job_token0" + }, + { + "type": "automatic", + "message_id": "allocate_mining_job_token1" + }, + { + "type": "automatic", + "message_id": "declare_mining_job" + }, + { + "type": "automatic", + "message_id": "submit_solution" + } + ], + "actions": [ + { + "message_ids": ["setup_connection_job_declarator"], + "role": "client", + "results": [ + { + "type": "match_message_type", + "value": "0x01" + } + ], + "actiondoc": "This action sends SetupConnection and awaits for a SetupConnection.Success" + }, + { + "message_ids": ["allocate_mining_job_token0"], + "role": "client", + "results": [ + { + "type": "match_message_type", + "value": "0x51" + } + ], + "actiondoc": "This action sends AllocateMiningJobToken0 and awaits for a AllocateMiningJobToken.Success0" + }, + { + "message_ids": ["allocate_mining_job_token1"], + "role": "client", + "results": [ + { + "type": "match_message_type", + "value": "0x51" + } + ], + "actiondoc": "This action sends AllocateMiningJobToken1 and awaits for a AllocateMiningJobToken.Success1" + }, + { + "message_ids": ["declare_mining_job"], + "role": "client", + "results": [ + { + "type": "match_message_type", + "value": "0x55" + } + ], + "actiondoc": "This action sends DeclareMiningJob and awaits for a ProvideMissingTransactions" + }, + { + "message_ids": ["submit_solution"], + "role": "client", + "results": [ + { + "type": "sustain_connection" + } + ], + "actiondoc": "This action sends SubmitSolution and asserts that connection was sustained" + } + ], + "setup_commands": [], + "execution_commands": [], + "cleanup_commands": [], + "role": "client", + "downstream": { + "ip": "127.0.0.1", + "port": 34264, + "pub_key": "9auqWEzQDVyd2oe1JVGFLMLHZtCo2FFqZwtKA5gd9xbuEu7PH72" + } +} diff --git a/test/message-generator/test/jds-receive-solution-while-processing-declared-job/jds-receive-solution-while-processing-declared-job.json b/test/message-generator/test/jds-receive-solution-while-processing-declared-job/jds-receive-solution-while-processing-declared-job.json new file mode 100644 index 000000000..27807bb82 --- /dev/null +++ b/test/message-generator/test/jds-receive-solution-while-processing-declared-job/jds-receive-solution-while-processing-declared-job.json @@ -0,0 +1,79 @@ +{ + "version": "2", + "doc": [ + "This test does", + "Launch real JDS", + "Mock a JDC that sends SubmitSolution before ProvideMissingTransactionsSuccess" + ], + "frame_builders": [ + ], + "actions": [ + ], + "setup_commands": [ + { + "command": "cargo", + "args": [ + "llvm-cov", + "--no-report", + "run", + "-p", + "jd_server", + "--", + "-c", + "../test/config/jds-receive-solution-while-processing-declared-job/jds-config.toml" + ], + "conditions": { + "WithConditions": { + "conditions": [ + { + "output_string": "JD INITIALIZED", + "output_location": "StdOut", + "late_condition": false, + "condition": true + } + ], + "timer_secs": 300, + "warn_no_panic": false + } + } + }, + { + "command": "cargo", + "args": [ + "run", + "../../test/message-generator/mock/jdc-mock-jds-receive-solution-while-processing-declared-job.json" + ], + + "conditions": { + "WithConditions": { + "conditions": [ + { + "output_string": "TEST FAIL", + "output_location": "StdErr", + "condition": false, + "late_condition": false + } + ], + "timer_secs": 600, + "warn_no_panic": true + } + } + }, + { + "command": "sleep", + "args": ["1000"], + "conditions": "None" + } + ], + "execution_commands": [ + ], + "cleanup_commands": [ + { + "command": "pkill", + "args": ["-f", "jd_server", "-SIGINT"], + "late_condition": false, + "conditions": "None" + } + ], + "role": "none" +} diff --git a/test/message-generator/test/jds-receive-solution-while-processing-declared-job/jds-receive-solution-while-processing-declared-job.sh b/test/message-generator/test/jds-receive-solution-while-processing-declared-job/jds-receive-solution-while-processing-declared-job.sh new file mode 100755 index 000000000..39c8559ef --- /dev/null +++ b/test/message-generator/test/jds-receive-solution-while-processing-declared-job/jds-receive-solution-while-processing-declared-job.sh @@ -0,0 +1,9 @@ +cd roles +cargo llvm-cov --no-report -p jd_server + +cd ../utils/message-generator/ +cargo build + +RUST_LOG=debug cargo run ../../test/message-generator/test/jds-receive-solution-while-processing-declared-job/jds-receive-solution-while-processing-declared-job.json || { echo 'mg test failed' ; exit 1; } + +sleep 10 From 362276441f68403763d6dcafc46fb518be125313 Mon Sep 17 00:00:00 2001 From: plebhash Date: Fri, 26 Jul 2024 19:54:45 -0400 Subject: [PATCH 062/101] test_diff_management: adjust total_run_time 60s --- roles/translator/src/lib/downstream_sv1/diff_management.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/roles/translator/src/lib/downstream_sv1/diff_management.rs b/roles/translator/src/lib/downstream_sv1/diff_management.rs index d4a7a47fb..3ee03c76f 100644 --- a/roles/translator/src/lib/downstream_sv1/diff_management.rs +++ b/roles/translator/src/lib/downstream_sv1/diff_management.rs @@ -320,7 +320,7 @@ mod test { #[test] fn test_diff_management() { let expected_shares_per_minute = 1000.0; - let total_run_time = std::time::Duration::from_secs(30); + let total_run_time = std::time::Duration::from_secs(60); let initial_nominal_hashrate = measure_hashrate(5); let target = match roles_logic_sv2::utils::hash_rate_to_target( initial_nominal_hashrate, From 3fd1ff979addedc58f3e29648c58d534a06ab8f1 Mon Sep 17 00:00:00 2001 From: xyephy Date: Mon, 20 May 2024 23:35:25 +0300 Subject: [PATCH 063/101] Add flag check logic for Job declarator protocol --- benches/Cargo.lock | 6 +-- protocols/Cargo.lock | 2 +- .../common-messages/src/setup_connection.rs | 41 +++++++++++++++++-- 3 files changed, 41 insertions(+), 8 deletions(-) diff --git a/benches/Cargo.lock b/benches/Cargo.lock index 946b7c02b..396474838 100644 --- a/benches/Cargo.lock +++ b/benches/Cargo.lock @@ -373,7 +373,7 @@ dependencies = [ [[package]] name = "buffer_sv2" -version = "1.0.0" +version = "1.1.0" dependencies = [ "aes-gcm", ] @@ -497,7 +497,7 @@ checksum = "98cc8fbded0c607b7ba9dd60cd98df59af97e84d24e49c8557331cfc26d301ce" [[package]] name = "codec_sv2" -version = "1.1.0" +version = "1.2.0" dependencies = [ "binary_sv2", "buffer_sv2", @@ -1564,7 +1564,7 @@ checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc" [[package]] name = "sv1_api" -version = "1.0.0" +version = "1.0.1" dependencies = [ "binary_sv2", "bitcoin_hashes 0.3.2", diff --git a/protocols/Cargo.lock b/protocols/Cargo.lock index fbe5ddcea..31450eea4 100644 --- a/protocols/Cargo.lock +++ b/protocols/Cargo.lock @@ -744,7 +744,7 @@ checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc" [[package]] name = "sv1_api" -version = "1.0.0" +version = "1.0.1" dependencies = [ "binary_sv2", "bitcoin_hashes 0.3.2", diff --git a/protocols/v2/subprotocols/common-messages/src/setup_connection.rs b/protocols/v2/subprotocols/common-messages/src/setup_connection.rs index 49312b686..bc1bb59bf 100644 --- a/protocols/v2/subprotocols/common-messages/src/setup_connection.rs +++ b/protocols/v2/subprotocols/common-messages/src/setup_connection.rs @@ -84,8 +84,23 @@ impl<'decoder> SetupConnection<'decoder> { work_selection && version_rolling } - // TODO - _ => todo!(), + Protocol::JobDeclarationProtocol => { + let available = available_flags.reverse_bits(); + let required = required_flags.reverse_bits(); + + let requires_async_job_mining_passed = (required >> 31) & 1 > 0; + let requires_async_job_mining_self = (available >> 31) & 1 > 0; + + let specific_flags_check = + !requires_async_job_mining_self || requires_async_job_mining_passed; + let general_flags_check = (available & required) == required; + + specific_flags_check && general_flags_check + } + Protocol::TemplateDistributionProtocol | Protocol::JobDistributionProtocol => { + // Assuming these protocols do not define flags + false + } } } @@ -391,13 +406,31 @@ mod test { #[test] fn test_check_flag() { let protocol = crate::Protocol::MiningProtocol; - let flag_available = 0b_0000_0000_0000_0000_0000_0000_0000_0000; + let flag_avaiable = 0b_0000_0000_0000_0000_0000_0000_0000_0000; let flag_required = 0b_0000_0000_0000_0000_0000_0000_0000_0001; assert!(SetupConnection::check_flags( protocol, - flag_available, + flag_avaiable, flag_required )); + + let protocol = crate::Protocol::JobDeclarationProtocol; + + let available_flags = 0b_1000_0000_0000_0000_0000_0000_0000_0000; + let required_flags = 0b_1000_0000_0000_0000_0000_0000_0000_0000; + assert!(SetupConnection::check_flags( + protocol, + available_flags, + required_flags + )); + + let available_flags = 0b_0000_0000_0000_0000_0000_0000_0000_0000; + let required_flags = 0b_1000_0000_0000_0000_0000_0000_0000_0000; + assert!(!SetupConnection::check_flags( + protocol, + available_flags, + required_flags + )); } #[test] From bebd825f62a8a0ef67662365108ce134d97156a8 Mon Sep 17 00:00:00 2001 From: bit-aloo Date: Fri, 5 Jul 2024 07:36:19 -0400 Subject: [PATCH 064/101] Add MG test to test the flag change in setup connection --- .../messages/common_messages.json | 16 +++++ .../job-declarator-mock-invalid-flag.json | 38 ++++++++++ .../job-declarator-flag-test.json | 71 +++++++++++++++++++ .../job-declarator-flag-test.sh | 9 +++ 4 files changed, 134 insertions(+) create mode 100644 test/message-generator/mock/job-declarator-mock-invalid-flag.json create mode 100644 test/message-generator/test/job-declarator-flag-test/job-declarator-flag-test.json create mode 100755 test/message-generator/test/job-declarator-flag-test/job-declarator-flag-test.sh diff --git a/test/message-generator/messages/common_messages.json b/test/message-generator/messages/common_messages.json index ff5d200dd..80ee1c68c 100644 --- a/test/message-generator/messages/common_messages.json +++ b/test/message-generator/messages/common_messages.json @@ -48,6 +48,22 @@ }, "id": "setup_connection_job_declarator" }, + { + "message": { + "type": "SetupConnection", + "protocol": 1, + "min_version": 2, + "max_version": 2, + "flags": 6, + "endpoint_host": "", + "endpoint_port": 0, + "vendor": "", + "hardware_version": "", + "firmware": "", + "device_id": "" + }, + "id": "setup_connection_job_declarator_with_no_async_flag" + }, { "message": { "type": "SetupConnectionSuccess", diff --git a/test/message-generator/mock/job-declarator-mock-invalid-flag.json b/test/message-generator/mock/job-declarator-mock-invalid-flag.json new file mode 100644 index 000000000..a14f0554b --- /dev/null +++ b/test/message-generator/mock/job-declarator-mock-invalid-flag.json @@ -0,0 +1,38 @@ +{ + "version": "2", + "doc": [ + "This test does", + "Soft mock of JD", + "Connect to JDS", + "Receive Setup Connection Error as incorrect bits are set" + ], + "frame_builders": [ + { + "type": "automatic", + "message_id": "test/message-generator/messages/common_messages.json::setup_connection_job_declarator_with_no_async_flag" + } + ], + "actions": [ + { + "message_ids": ["setup_connection_job_declarator_with_no_async_flag"], + "role": "client", + "results": [ + { + "type": "match_message_type", + "value": "0x01" + } + + ], + "actiondoc": "This action sends SetupConnection and checks that .Success" + } + ], + "setup_commands": [], + "execution_commands": [], + "cleanup_commands": [], + "role": "client", + "downstream": { + "ip": "127.0.0.1", + "port": 34264, + "pub_key": "9auqWEzQDVyd2oe1JVGFLMLHZtCo2FFqZwtKA5gd9xbuEu7PH72" + } +} diff --git a/test/message-generator/test/job-declarator-flag-test/job-declarator-flag-test.json b/test/message-generator/test/job-declarator-flag-test/job-declarator-flag-test.json new file mode 100644 index 000000000..feb4cf571 --- /dev/null +++ b/test/message-generator/test/job-declarator-flag-test/job-declarator-flag-test.json @@ -0,0 +1,71 @@ +{ + "version": "2", + "doc": [ + "This test does", + "Launch the jd-server" + ], + "frame_builders": [ + ], + "actions": [ + ], + "setup_commands": [ + { + "command": "cargo", + "args": [ + "llvm-cov", + "--no-report", + "run", + "-p", + "jd_server", + "--", + "-c", + "../test/config/jds-do-not-fail-on-wrong-txdatasucc/jds-config.toml" + ], + "conditions": { + "WithConditions": { + "conditions": [ + { + "output_string": "JD INITIALIZED", + "output_location": "StdOut", + "late_condition": false, + "condition": true + } + ], + "timer_secs": 300, + "warn_no_panic": false + } + } + }, + { + "command": "cargo", + "args": [ + "run", + "../../test/message-generator/mock/job-declarator-mock-invalid-flag.json" + ], + "conditions": { + "WithConditions": { + "conditions": [ + { + "output_string": "MATCHED MESSAGE TYPE 1", + "output_location": "StdOut", + "late_condition": false, + "condition": true + } + ], + "timer_secs": 600, + "warn_no_panic": false + } + } + } + ], + "execution_commands": [ + ], + "cleanup_commands": [ + { + "command": "pkill", + "args": ["-f", "jd_server", "-SIGINT"], + "conditions": "None" + } + ], + "role": "none" +} diff --git a/test/message-generator/test/job-declarator-flag-test/job-declarator-flag-test.sh b/test/message-generator/test/job-declarator-flag-test/job-declarator-flag-test.sh new file mode 100755 index 000000000..e11ba44a6 --- /dev/null +++ b/test/message-generator/test/job-declarator-flag-test/job-declarator-flag-test.sh @@ -0,0 +1,9 @@ +cd roles +cargo llvm-cov --no-report -p pool_sv2 + +cd ../utils/message-generator/ +cargo build + +RUST_LOG=debug cargo run ../../test/message-generator/test/job-declarator-flag-test/job-declarator-flag-test.json || { echo 'mg test failed' ; exit 1; } + +sleep 10 From bac8c2e44e28ab9dda66cbbe254f6a50bdb77d08 Mon Sep 17 00:00:00 2001 From: bit-aloo Date: Sat, 6 Jul 2024 04:21:02 -0400 Subject: [PATCH 065/101] Add flag check at setup connection --- benches/Cargo.lock | 6 +- protocols/Cargo.lock | 2 +- .../common-messages/src/setup_connection.rs | 20 +- roles/Cargo.lock | 2 +- roles/jd-server/src/lib/job_declarator/mod.rs | 175 ++++++++++++------ .../jds-config.toml | 20 ++ .../messages/common_messages.json | 2 +- ...s-setup-connection-mock-invalid-flag.json} | 7 +- .../jds-setup-connection-flag-test.json} | 4 +- .../jds-setup-connection-flag-test.sh | 9 + .../job-declarator-flag-test.sh | 9 - 11 files changed, 167 insertions(+), 89 deletions(-) create mode 100644 test/config/jds-setup-connection-flag-test/jds-config.toml rename test/message-generator/mock/{job-declarator-mock-invalid-flag.json => jdc-mock-jds-setup-connection-mock-invalid-flag.json} (91%) rename test/message-generator/test/{job-declarator-flag-test/job-declarator-flag-test.json => jds-setup-connection-flag-test/jds-setup-connection-flag-test.json} (90%) create mode 100755 test/message-generator/test/jds-setup-connection-flag-test/jds-setup-connection-flag-test.sh delete mode 100755 test/message-generator/test/job-declarator-flag-test/job-declarator-flag-test.sh diff --git a/benches/Cargo.lock b/benches/Cargo.lock index 396474838..946b7c02b 100644 --- a/benches/Cargo.lock +++ b/benches/Cargo.lock @@ -373,7 +373,7 @@ dependencies = [ [[package]] name = "buffer_sv2" -version = "1.1.0" +version = "1.0.0" dependencies = [ "aes-gcm", ] @@ -497,7 +497,7 @@ checksum = "98cc8fbded0c607b7ba9dd60cd98df59af97e84d24e49c8557331cfc26d301ce" [[package]] name = "codec_sv2" -version = "1.2.0" +version = "1.1.0" dependencies = [ "binary_sv2", "buffer_sv2", @@ -1564,7 +1564,7 @@ checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc" [[package]] name = "sv1_api" -version = "1.0.1" +version = "1.0.0" dependencies = [ "binary_sv2", "bitcoin_hashes 0.3.2", diff --git a/protocols/Cargo.lock b/protocols/Cargo.lock index 31450eea4..fbe5ddcea 100644 --- a/protocols/Cargo.lock +++ b/protocols/Cargo.lock @@ -744,7 +744,7 @@ checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc" [[package]] name = "sv1_api" -version = "1.0.1" +version = "1.0.0" dependencies = [ "binary_sv2", "bitcoin_hashes 0.3.2", diff --git a/protocols/v2/subprotocols/common-messages/src/setup_connection.rs b/protocols/v2/subprotocols/common-messages/src/setup_connection.rs index bc1bb59bf..32acaa8fd 100644 --- a/protocols/v2/subprotocols/common-messages/src/setup_connection.rs +++ b/protocols/v2/subprotocols/common-messages/src/setup_connection.rs @@ -54,11 +54,11 @@ pub struct SetupConnection<'decoder> { impl<'decoder> SetupConnection<'decoder> { pub fn set_requires_standard_job(&mut self) { - self.flags |= 0b_0000_0000_0000_0000_0000_0000_0000_0001 + self.flags |= 0b_0000_0000_0000_0000_0000_0000_0000_0001; } pub fn set_async_job_nogotiation(&mut self) { - self.flags |= 0b_0000_0000_0000_0000_0000_0000_0000_0001 + self.flags |= 0b_0000_0000_0000_0000_0000_0000_0000_0001; } /// Check if passed flags support self flag @@ -71,11 +71,11 @@ impl<'decoder> SetupConnection<'decoder> { Protocol::MiningProtocol => { let available = available_flags.reverse_bits(); let required_flags = required_flags.reverse_bits(); - let requires_work_selection_passed = (required_flags >> 30) > 0; - let requires_version_rolling_passed = (required_flags >> 29) > 0; + let requires_work_selection_passed = required_flags >> 30 > 0; + let requires_version_rolling_passed = required_flags >> 29 > 0; - let requires_work_selection_self = (available >> 30) > 0; - let requires_version_rolling_self = (available >> 29) > 0; + let requires_work_selection_self = available >> 30 > 0; + let requires_version_rolling_self = available >> 29 > 0; let work_selection = !requires_work_selection_self || requires_work_selection_passed; @@ -98,7 +98,7 @@ impl<'decoder> SetupConnection<'decoder> { specific_flags_check && general_flags_check } Protocol::TemplateDistributionProtocol | Protocol::JobDistributionProtocol => { - // Assuming these protocols do not define flags + // These protocols do not define flags for setting up a connection. false } } @@ -325,7 +325,7 @@ impl<'decoder> binary_sv2::Decodable<'decoder> for Protocol { fn get_structure( _: &[u8], ) -> core::result::Result, binary_sv2::Error> { - let field: FieldMarker = 0_u8.into(); + let field: FieldMarker = (0_u8).into(); Ok(alloc::vec![field]) } fn from_decoded_fields( @@ -406,11 +406,11 @@ mod test { #[test] fn test_check_flag() { let protocol = crate::Protocol::MiningProtocol; - let flag_avaiable = 0b_0000_0000_0000_0000_0000_0000_0000_0000; + let flag_available = 0b_0000_0000_0000_0000_0000_0000_0000_0000; let flag_required = 0b_0000_0000_0000_0000_0000_0000_0000_0001; assert!(SetupConnection::check_flags( protocol, - flag_avaiable, + flag_available, flag_required )); diff --git a/roles/Cargo.lock b/roles/Cargo.lock index ea0dcfa1c..bf54dcc03 100644 --- a/roles/Cargo.lock +++ b/roles/Cargo.lock @@ -2053,7 +2053,7 @@ dependencies = [ [[package]] name = "sv1_api" -version = "1.0.1" +version = "1.0.0" dependencies = [ "binary_sv2", "bitcoin_hashes 0.3.2", diff --git a/roles/jd-server/src/lib/job_declarator/mod.rs b/roles/jd-server/src/lib/job_declarator/mod.rs index dbfce89a6..2c230c75b 100644 --- a/roles/jd-server/src/lib/job_declarator/mod.rs +++ b/roles/jd-server/src/lib/job_declarator/mod.rs @@ -3,12 +3,15 @@ use super::{error::JdsError, mempool::JDsMempool, status, Configuration, EitherF use async_channel::{Receiver, Sender}; use binary_sv2::{B0255, U256}; use codec_sv2::{HandshakeRole, Responder}; +use core::panic; use error_handling::handle_result; use key_utils::{Secp256k1PublicKey, Secp256k1SecretKey, SignatureService}; use network_helpers_sv2::noise_connection_tokio::Connection; use nohash_hasher::BuildNoHashHasher; use roles_logic_sv2::{ - common_messages_sv2::SetupConnectionSuccess, + common_messages_sv2::{ + Protocol, SetupConnection, SetupConnectionError, SetupConnectionSuccess, + }, handlers::job_declaration::{ParseClientJobDeclarationMessages, SendTo}, job_declaration_sv2::{DeclareMiningJob, SubmitSolutionJd}, parsers::{JobDeclaration, PoolMessages as JdsMessages}, @@ -175,8 +178,10 @@ impl JobDeclaratorDownstream { for transaction in job_transactions { match transaction { TransactionState::PresentInMempool(txid) => known_transactions.push(txid), - TransactionState::Missing => continue, - }; + TransactionState::Missing => { + continue; + } + } } known_transactions } @@ -230,26 +235,26 @@ impl JobDeclaratorDownstream { Ok(SendTo::Respond(m)) => { match m { JobDeclaration::AllocateMiningJobToken(_) => { - error!("Send unexpected message: AMJT") + error!("Send unexpected message: AMJT"); } JobDeclaration::AllocateMiningJobTokenSuccess(_) => { - debug!("Send message: AMJTS") + debug!("Send message: AMJTS"); } JobDeclaration::DeclareMiningJob(_) => { error!("Send unexpected message: DMJ"); } JobDeclaration::DeclareMiningJobError(_) => { - debug!("Send nmessage: DMJE") + debug!("Send nmessage: DMJE"); } JobDeclaration::DeclareMiningJobSuccess(_) => { debug!("Send message: DMJS. Updating the JDS mempool."); Self::send_txs_to_mempool(self_mutex.clone()).await; } JobDeclaration::IdentifyTransactions(_) => { - debug!("Send message: IT") + debug!("Send message: IT"); } JobDeclaration::IdentifyTransactionsSuccess(_) => { - error!("Send unexpected message: ITS") + error!("Send unexpected message: ITS"); } JobDeclaration::ProvideMissingTransactions(_) => { debug!("Send message: PMT. Updating the JDS mempool."); @@ -266,10 +271,17 @@ impl JobDeclaratorDownstream { error!("JD Server: unexpected relay new message {:?}", message); } Ok(SendTo::RelayNewMessageToRemote(remote, message)) => { - error!("JD Server: unexpected relay new message to remote. Remote: {:?}, Message: {:?}", remote, message); + error!( + "JD Server: unexpected relay new message to remote. Remote: {:?}, Message: {:?}", + remote, + message + ); } Ok(SendTo::RelaySameMessageToRemote(remote)) => { - error!("JD Server: unexpected relay same message to remote. Remote: {:?}", remote); + error!( + "JD Server: unexpected relay same message to remote. Remote: {:?}", + remote + ); } Ok(SendTo::Multiple(multiple)) => { error!("JD Server: unexpected multiple messages: {:?}", multiple); @@ -279,26 +291,33 @@ impl JobDeclaratorDownstream { Some(JobDeclaration::SubmitSolution(message)) => { match Self::collect_txs_in_job(self_mutex.clone()) { Ok(_) => { - info!("All transactions in downstream job are recognized correctly by the JD Server"); - match JobDeclaratorDownstream::get_block_hex( - self_mutex.clone(), - message, - ) { - Ok(hexdata) => { - let _ = - new_block_sender.send(hexdata).await; - } - Err(e) => { - handle_result!(tx_status, Err(*e)) - } - }; + info!( + "All transactions in downstream job are recognized correctly by the JD Server" + ); + let hexdata = + match JobDeclaratorDownstream::get_block_hex( + self_mutex.clone(), + message, + ) { + Ok(inner) => inner, + Err(e) => { + error!( + "Received solution but encountered error: {:?}", + e + ); + recv.close(); + //TODO should we brake it? + break; + } + }; + let _ = new_block_sender.send(hexdata).await; } Err(error) => { error!("Missing transactions: {:?}", error); // TODO print here the ip of the downstream let known_transactions = JobDeclaratorDownstream::get_transactions_in_job( - self_mutex.clone(), + self_mutex.clone() ); let retrieve_transactions = AddTrasactionsToMempoolInner { @@ -327,7 +346,7 @@ impl JobDeclaratorDownstream { }; } _ = tokio::time::sleep(Duration::from_secs(60)) => {} - }; + } } }; } @@ -430,54 +449,94 @@ impl JobDeclarator { new_block_sender: Sender, sender_add_txs_to_mempool: Sender, ) { - let listner = TcpListener::bind(&config.listen_jd_address).await.unwrap(); - while let Ok((stream, _)) = listner.accept().await { + let listener = TcpListener::bind(&config.listen_jd_address).await.unwrap(); + + while let Ok((stream, _)) = listener.accept().await { let responder = Responder::from_authority_kp( &config.authority_public_key.into_bytes(), &config.authority_secret_key.into_bytes(), std::time::Duration::from_secs(config.cert_validity_sec), ) .unwrap(); + let addr = stream.peer_addr(); if let Ok((receiver, sender, _, _)) = Connection::new(stream, HandshakeRole::Responder(responder)).await { - let setup_message_from_proxy_jd = receiver.recv().await.unwrap(); - info!( - "Setup connection message from proxy: {:?}", - setup_message_from_proxy_jd - ); + match receiver.recv().await { + Ok(EitherFrame::Sv2(mut sv2_message)) => { + debug!("Received SV2 message: {:?}", sv2_message); + let payload = sv2_message.payload(); - let setup_connection_success_to_proxy = SetupConnectionSuccess { - used_version: 2, - // Setup flags for async_mining_allowed - flags: 0b_0000_0000_0000_0000_0000_0000_0000_0001, - }; - let sv2_frame: StdFrame = - JdsMessages::Common(setup_connection_success_to_proxy.into()) - .try_into() - .unwrap(); - let sv2_frame = sv2_frame.into(); - info!("Sending success message for proxy"); - sender.send(sv2_frame).await.unwrap(); + if let Ok(setup_connection) = + binary_sv2::from_bytes::(payload) + { + let flag = setup_connection.flags; + let is_valid = SetupConnection::check_flags( + Protocol::JobDeclarationProtocol, + flag, + 1, + ); - let jddownstream = Arc::new(Mutex::new(JobDeclaratorDownstream::new( - receiver.clone(), - sender.clone(), - &config, - mempool.clone(), - // each downstream has its own sender (multi producer single consumer) - sender_add_txs_to_mempool.clone(), - ))); + if is_valid { + let success_message = SetupConnectionSuccess { + used_version: 2, + flags: 0b_0000_0000_0000_0000_0000_0000_0000_0001, + }; + info!("Sending success message for proxy"); + let sv2_frame: StdFrame = JdsMessages::Common(success_message.into()) + .try_into() + .expect("Failed to convert setup connection response message to standard frame"); + + sender.send(sv2_frame.into()).await.unwrap(); + + let jddownstream = + Arc::new(Mutex::new(JobDeclaratorDownstream::new( + receiver.clone(), + sender.clone(), + &config, + mempool.clone(), + sender_add_txs_to_mempool.clone(), // each downstream has its own sender (multi producer single consumer) + ))); + + JobDeclaratorDownstream::start( + jddownstream, + status_tx.clone(), + new_block_sender.clone(), + ); + } else { + let error_message = SetupConnectionError { + flags: flag, + error_code: "unsupported-feature-flags" + .to_string() + .into_bytes() + .try_into() + .unwrap(), + }; + info!("Sending error message for proxy"); + let sv2_frame: StdFrame = JdsMessages::Common(error_message.into()) + .try_into() + .expect("Failed to convert setup connection response message to standard frame"); - JobDeclaratorDownstream::start( - jddownstream, - status_tx.clone(), - new_block_sender.clone(), - ); + sender.send(sv2_frame.into()).await.unwrap(); + } + } else { + error!("Error parsing SetupConnection message"); + } + } + Ok(EitherFrame::HandShake(handshake_message)) => { + error!( + "Unexpected handshake message from upstream: {:?} at {:?}", + handshake_message, addr + ); + } + Err(e) => { + error!("Error receiving message: {:?}", e); + } + } } else { - error!("Can not connect {:?}", addr); + error!("Cannot connect to {:?}", addr); } } } diff --git a/test/config/jds-setup-connection-flag-test/jds-config.toml b/test/config/jds-setup-connection-flag-test/jds-config.toml new file mode 100644 index 000000000..acda73227 --- /dev/null +++ b/test/config/jds-setup-connection-flag-test/jds-config.toml @@ -0,0 +1,20 @@ +# SRI Pool config +authority_public_key = "9auqWEzQDVyd2oe1JVGFLMLHZtCo2FFqZwtKA5gd9xbuEu7PH72" +authority_secret_key = "mkDLTBBRxdBv998612qipDYoTK3YUrqLe8uWw7gu3iXbSrn2n" +cert_validity_sec = 3600 + +# list of compressed or uncompressed pubkeys for coinbase payout (only supports 1 item in the array at this point) +coinbase_outputs = [ + { output_script_type = "P2WPKH", output_script_value = "036adc3bdf21e6f9a0f0fb0066bf517e5b7909ed1563d6958a10993849a7554075" }, +] + +listen_jd_address = "127.0.0.1:34264" + +core_rpc_url = "" +core_rpc_port = 18332 +core_rpc_user = "" +core_rpc_pass = "" +# Time interval used for JDS mempool update +[mempool_update_interval] +unit = "secs" +value = 1 \ No newline at end of file diff --git a/test/message-generator/messages/common_messages.json b/test/message-generator/messages/common_messages.json index 80ee1c68c..65cc32a9a 100644 --- a/test/message-generator/messages/common_messages.json +++ b/test/message-generator/messages/common_messages.json @@ -54,7 +54,7 @@ "protocol": 1, "min_version": 2, "max_version": 2, - "flags": 6, + "flags": 0, "endpoint_host": "", "endpoint_port": 0, "vendor": "", diff --git a/test/message-generator/mock/job-declarator-mock-invalid-flag.json b/test/message-generator/mock/jdc-mock-jds-setup-connection-mock-invalid-flag.json similarity index 91% rename from test/message-generator/mock/job-declarator-mock-invalid-flag.json rename to test/message-generator/mock/jdc-mock-jds-setup-connection-mock-invalid-flag.json index a14f0554b..8cb2338d7 100644 --- a/test/message-generator/mock/job-declarator-mock-invalid-flag.json +++ b/test/message-generator/mock/jdc-mock-jds-setup-connection-mock-invalid-flag.json @@ -2,7 +2,7 @@ "version": "2", "doc": [ "This test does", - "Soft mock of JD", + "Soft mock of JDC", "Connect to JDS", "Receive Setup Connection Error as incorrect bits are set" ], @@ -19,11 +19,10 @@ "results": [ { "type": "match_message_type", - "value": "0x01" + "value": "0x02" } - ], - "actiondoc": "This action sends SetupConnection and checks that .Success" + "actiondoc": "This action sends SetupConnection and checks that .Error" } ], "setup_commands": [], diff --git a/test/message-generator/test/job-declarator-flag-test/job-declarator-flag-test.json b/test/message-generator/test/jds-setup-connection-flag-test/jds-setup-connection-flag-test.json similarity index 90% rename from test/message-generator/test/job-declarator-flag-test/job-declarator-flag-test.json rename to test/message-generator/test/jds-setup-connection-flag-test/jds-setup-connection-flag-test.json index feb4cf571..403b1eb4f 100644 --- a/test/message-generator/test/job-declarator-flag-test/job-declarator-flag-test.json +++ b/test/message-generator/test/jds-setup-connection-flag-test/jds-setup-connection-flag-test.json @@ -19,7 +19,7 @@ "jd_server", "--", "-c", - "../test/config/jds-do-not-fail-on-wrong-txdatasucc/jds-config.toml" + "../test/config/jds-setup-connection-flag-test/jds-config.toml" ], "conditions": { "WithConditions": { @@ -40,7 +40,7 @@ "command": "cargo", "args": [ "run", - "../../test/message-generator/mock/job-declarator-mock-invalid-flag.json" + "../../test/message-generator/mock/jdc-mock-jds-setup-connection-mock-invalid-flag.json" ], "conditions": { "WithConditions": { diff --git a/test/message-generator/test/jds-setup-connection-flag-test/jds-setup-connection-flag-test.sh b/test/message-generator/test/jds-setup-connection-flag-test/jds-setup-connection-flag-test.sh new file mode 100755 index 000000000..b32716fe3 --- /dev/null +++ b/test/message-generator/test/jds-setup-connection-flag-test/jds-setup-connection-flag-test.sh @@ -0,0 +1,9 @@ +cd roles +cargo llvm-cov --no-report -p pool_sv2 + +cd ../utils/message-generator/ +cargo build + +RUST_LOG=debug cargo run ../../test/message-generator/test/jds-setup-connection-flag-test/jds-setup-connection-flag-test.json || { echo 'mg test failed' ; exit 1; } + +sleep 10 diff --git a/test/message-generator/test/job-declarator-flag-test/job-declarator-flag-test.sh b/test/message-generator/test/job-declarator-flag-test/job-declarator-flag-test.sh deleted file mode 100755 index e11ba44a6..000000000 --- a/test/message-generator/test/job-declarator-flag-test/job-declarator-flag-test.sh +++ /dev/null @@ -1,9 +0,0 @@ -cd roles -cargo llvm-cov --no-report -p pool_sv2 - -cd ../utils/message-generator/ -cargo build - -RUST_LOG=debug cargo run ../../test/message-generator/test/job-declarator-flag-test/job-declarator-flag-test.json || { echo 'mg test failed' ; exit 1; } - -sleep 10 From d018b88a6c0ae84ef442e2cfe5d90fe8758e5e9a Mon Sep 17 00:00:00 2001 From: bit-aloo Date: Thu, 18 Jul 2024 12:36:40 -0400 Subject: [PATCH 066/101] Change MG test with flag filter logic --- .../common-messages/src/setup_connection.rs | 22 ++++++++----------- roles/jd-server/src/lib/job_declarator/mod.rs | 2 +- ...ds-setup-connection-mock-invalid-flag.json | 13 +++++++++-- 3 files changed, 21 insertions(+), 16 deletions(-) diff --git a/protocols/v2/subprotocols/common-messages/src/setup_connection.rs b/protocols/v2/subprotocols/common-messages/src/setup_connection.rs index 32acaa8fd..0c5522d6b 100644 --- a/protocols/v2/subprotocols/common-messages/src/setup_connection.rs +++ b/protocols/v2/subprotocols/common-messages/src/setup_connection.rs @@ -91,11 +91,15 @@ impl<'decoder> SetupConnection<'decoder> { let requires_async_job_mining_passed = (required >> 31) & 1 > 0; let requires_async_job_mining_self = (available >> 31) & 1 > 0; - let specific_flags_check = - !requires_async_job_mining_self || requires_async_job_mining_passed; - let general_flags_check = (available & required) == required; - - specific_flags_check && general_flags_check + match ( + requires_async_job_mining_self, + requires_async_job_mining_passed, + ) { + (true, true) => true, + (true, false) => true, + (false, true) => false, + (false, false) => true, + } } Protocol::TemplateDistributionProtocol | Protocol::JobDistributionProtocol => { // These protocols do not define flags for setting up a connection. @@ -423,14 +427,6 @@ mod test { available_flags, required_flags )); - - let available_flags = 0b_0000_0000_0000_0000_0000_0000_0000_0000; - let required_flags = 0b_1000_0000_0000_0000_0000_0000_0000_0000; - assert!(!SetupConnection::check_flags( - protocol, - available_flags, - required_flags - )); } #[test] diff --git a/roles/jd-server/src/lib/job_declarator/mod.rs b/roles/jd-server/src/lib/job_declarator/mod.rs index 2c230c75b..aaf94e353 100644 --- a/roles/jd-server/src/lib/job_declarator/mod.rs +++ b/roles/jd-server/src/lib/job_declarator/mod.rs @@ -475,8 +475,8 @@ impl JobDeclarator { let flag = setup_connection.flags; let is_valid = SetupConnection::check_flags( Protocol::JobDeclarationProtocol, - flag, 1, + flag, ); if is_valid { diff --git a/test/message-generator/mock/jdc-mock-jds-setup-connection-mock-invalid-flag.json b/test/message-generator/mock/jdc-mock-jds-setup-connection-mock-invalid-flag.json index 8cb2338d7..633be8d07 100644 --- a/test/message-generator/mock/jdc-mock-jds-setup-connection-mock-invalid-flag.json +++ b/test/message-generator/mock/jdc-mock-jds-setup-connection-mock-invalid-flag.json @@ -18,8 +18,17 @@ "role": "client", "results": [ { - "type": "match_message_type", - "value": "0x02" + "type": "match_message_field", + "value": [ + "CommonMessages", + "SetupConnectionSuccess", + [ + [ + "flags", + {"U32": 1} + ] + ] + ] } ], "actiondoc": "This action sends SetupConnection and checks that .Error" From 61337970a0a110438c7142045fb58251808837b4 Mon Sep 17 00:00:00 2001 From: plebhash Date: Thu, 18 Jul 2024 16:00:19 -0300 Subject: [PATCH 067/101] introduce `async_mining_allowed` as JDS config parameter before issue #853 was reported, JDS would simply support async jobs by default and completely ignore this flag. but checking this flag implies that JDS could either support async jobs or not, and that is what this commit does. a new `asyn_mining_allowed` parameter is introduced to the TOML config files, and that is used when: - checking for the flags of `SetupConnection` messages - responding to `AllocateMiningJobToken` messages --- .../config-examples/jds-config-hosted-example.toml | 3 +++ .../config-examples/jds-config-local-example.toml | 3 +++ roles/jd-server/src/lib/job_declarator/message_handler.rs | 2 +- roles/jd-server/src/lib/job_declarator/mod.rs | 8 ++++++-- roles/jd-server/src/lib/mod.rs | 6 ++++++ 5 files changed, 19 insertions(+), 3 deletions(-) diff --git a/roles/jd-server/config-examples/jds-config-hosted-example.toml b/roles/jd-server/config-examples/jds-config-hosted-example.toml index 328cffd52..999a69ae3 100644 --- a/roles/jd-server/config-examples/jds-config-hosted-example.toml +++ b/roles/jd-server/config-examples/jds-config-hosted-example.toml @@ -1,3 +1,6 @@ +# Async Job Support +async_mining_allowed = true + # SRI Pool config authority_public_key = "9auqWEzQDVyd2oe1JVGFLMLHZtCo2FFqZwtKA5gd9xbuEu7PH72" authority_secret_key = "mkDLTBBRxdBv998612qipDYoTK3YUrqLe8uWw7gu3iXbSrn2n" diff --git a/roles/jd-server/config-examples/jds-config-local-example.toml b/roles/jd-server/config-examples/jds-config-local-example.toml index a3c7622ac..dc8ce0555 100644 --- a/roles/jd-server/config-examples/jds-config-local-example.toml +++ b/roles/jd-server/config-examples/jds-config-local-example.toml @@ -1,3 +1,6 @@ +# Async Job Support +async_mining_allowed = true + # SRI Pool config authority_public_key = "9auqWEzQDVyd2oe1JVGFLMLHZtCo2FFqZwtKA5gd9xbuEu7PH72" authority_secret_key = "mkDLTBBRxdBv998612qipDYoTK3YUrqLe8uWw7gu3iXbSrn2n" diff --git a/roles/jd-server/src/lib/job_declarator/message_handler.rs b/roles/jd-server/src/lib/job_declarator/message_handler.rs index 96887bdd0..b71eb96ca 100644 --- a/roles/jd-server/src/lib/job_declarator/message_handler.rs +++ b/roles/jd-server/src/lib/job_declarator/message_handler.rs @@ -49,7 +49,7 @@ impl ParseClientJobDeclarationMessages for JobDeclaratorDownstream { request_id: message.request_id, mining_job_token: token.to_le_bytes().to_vec().try_into().unwrap(), coinbase_output_max_additional_size: 100, - async_mining_allowed: true, + async_mining_allowed: self.async_mining_allowed, coinbase_output: self.coinbase_output.clone().try_into().unwrap(), }; let message_enum = JobDeclaration::AllocateMiningJobTokenSuccess(message_success); diff --git a/roles/jd-server/src/lib/job_declarator/mod.rs b/roles/jd-server/src/lib/job_declarator/mod.rs index aaf94e353..d1dfcdf05 100644 --- a/roles/jd-server/src/lib/job_declarator/mod.rs +++ b/roles/jd-server/src/lib/job_declarator/mod.rs @@ -47,6 +47,7 @@ pub struct AddTrasactionsToMempool { #[derive(Debug)] pub struct JobDeclaratorDownstream { + async_mining_allowed: bool, sender: Sender, receiver: Receiver, // TODO this should be computed for each new template so that fees are included @@ -70,6 +71,7 @@ pub struct JobDeclaratorDownstream { impl JobDeclaratorDownstream { pub fn new( + async_mining_allowed: bool, receiver: Receiver, sender: Sender, config: &Configuration, @@ -89,6 +91,7 @@ impl JobDeclaratorDownstream { .expect("Invalid coinbase output in config"); Self { + async_mining_allowed, receiver, sender, coinbase_output, @@ -475,14 +478,14 @@ impl JobDeclarator { let flag = setup_connection.flags; let is_valid = SetupConnection::check_flags( Protocol::JobDeclarationProtocol, - 1, + config.async_mining_allowed as u32, flag, ); if is_valid { let success_message = SetupConnectionSuccess { used_version: 2, - flags: 0b_0000_0000_0000_0000_0000_0000_0000_0001, + flags: (setup_connection.flags & 1u32), }; info!("Sending success message for proxy"); let sv2_frame: StdFrame = JdsMessages::Common(success_message.into()) @@ -493,6 +496,7 @@ impl JobDeclarator { let jddownstream = Arc::new(Mutex::new(JobDeclaratorDownstream::new( + (setup_connection.flags & 1u32) != 0u32, // this takes a bool instead of u32 receiver.clone(), sender.clone(), &config, diff --git a/roles/jd-server/src/lib/mod.rs b/roles/jd-server/src/lib/mod.rs index 5fb12c75e..e71a3b476 100644 --- a/roles/jd-server/src/lib/mod.rs +++ b/roles/jd-server/src/lib/mod.rs @@ -57,6 +57,8 @@ pub struct CoinbaseOutput { #[derive(Debug, Deserialize, Clone)] pub struct Configuration { + #[serde(default = "default_true")] + pub async_mining_allowed: bool, pub listen_jd_address: String, pub authority_public_key: Secp256k1PublicKey, pub authority_secret_key: Secp256k1SecretKey, @@ -70,6 +72,10 @@ pub struct Configuration { pub mempool_update_interval: Duration, } +fn default_true() -> bool { + true +} + fn duration_from_toml<'de, D>(deserializer: D) -> Result where D: serde::Deserializer<'de>, From 7bb7811c952018815d07ec59cbac2a0a3b3e96e8 Mon Sep 17 00:00:00 2001 From: plebhash Date: Thu, 18 Jul 2024 16:25:44 -0300 Subject: [PATCH 068/101] fix MG tests for JD SetupConnection flags as already described in the previous commit, we introduced a new JDS config parameter (`async_mining_allowed`) so now, we need 2 separate tests: - a JDS that supports async jobs - a JDS that does not support async jobs and for each test, we need a mock that: - sends a `SetupConnection` with flag 0 and asserts the expected outcome - sends a `SetupConnection` with flag 1 and asserts the expected outcome if JDS does not support async jobs and receives a `SetupConnection` with flag 1, the expected outcome is `SetupConnection.Error` in all other cases, the expected outcome is a `SetupConnection.Success` with the same flag as the original `SetupConnection` --- .../jds-config-with-async-support.toml} | 3 + .../jds-config-without-async-support.toml | 23 +++++ ...flag-0-for-jds-setupconnection-tests.json} | 8 +- ...ds-setupconnection-with-async-support.json | 46 ++++++++++ ...setupconnection-without-async-support.json | 40 ++++++++ ...s-setupconnection-with-async-support.json} | 31 ++++++- ...jds-setupconnection-with-async-support.sh} | 2 +- ...setupconnection-without-async-support.json | 92 +++++++++++++++++++ ...s-setupconnection-without-async-support.sh | 9 ++ 9 files changed, 244 insertions(+), 10 deletions(-) rename test/config/{jds-setup-connection-flag-test/jds-config.toml => jds-setupconnection-flag-test/jds-config-with-async-support.toml} (93%) create mode 100644 test/config/jds-setupconnection-flag-test/jds-config-without-async-support.toml rename test/message-generator/mock/{jdc-mock-jds-setup-connection-mock-invalid-flag.json => jdc-mock-flag-0-for-jds-setupconnection-tests.json} (81%) create mode 100644 test/message-generator/mock/jdc-mock-flag-1-for-jds-setupconnection-with-async-support.json create mode 100644 test/message-generator/mock/jdc-mock-flag-1-for-jds-setupconnection-without-async-support.json rename test/message-generator/test/{jds-setup-connection-flag-test/jds-setup-connection-flag-test.json => jds-setupconnection-with-async-support/jds-setupconnection-with-async-support.json} (62%) rename test/message-generator/test/{jds-setup-connection-flag-test/jds-setup-connection-flag-test.sh => jds-setupconnection-with-async-support/jds-setupconnection-with-async-support.sh} (58%) create mode 100644 test/message-generator/test/jds-setupconnection-without-async-support/jds-setupconnection-without-async-support.json create mode 100755 test/message-generator/test/jds-setupconnection-without-async-support/jds-setupconnection-without-async-support.sh diff --git a/test/config/jds-setup-connection-flag-test/jds-config.toml b/test/config/jds-setupconnection-flag-test/jds-config-with-async-support.toml similarity index 93% rename from test/config/jds-setup-connection-flag-test/jds-config.toml rename to test/config/jds-setupconnection-flag-test/jds-config-with-async-support.toml index acda73227..379fee561 100644 --- a/test/config/jds-setup-connection-flag-test/jds-config.toml +++ b/test/config/jds-setupconnection-flag-test/jds-config-with-async-support.toml @@ -1,3 +1,6 @@ +# Async Job Support +async_mining_allowed = true + # SRI Pool config authority_public_key = "9auqWEzQDVyd2oe1JVGFLMLHZtCo2FFqZwtKA5gd9xbuEu7PH72" authority_secret_key = "mkDLTBBRxdBv998612qipDYoTK3YUrqLe8uWw7gu3iXbSrn2n" diff --git a/test/config/jds-setupconnection-flag-test/jds-config-without-async-support.toml b/test/config/jds-setupconnection-flag-test/jds-config-without-async-support.toml new file mode 100644 index 000000000..c84655276 --- /dev/null +++ b/test/config/jds-setupconnection-flag-test/jds-config-without-async-support.toml @@ -0,0 +1,23 @@ +# Async Job Support +async_mining_allowed = false + +# SRI Pool config +authority_public_key = "9auqWEzQDVyd2oe1JVGFLMLHZtCo2FFqZwtKA5gd9xbuEu7PH72" +authority_secret_key = "mkDLTBBRxdBv998612qipDYoTK3YUrqLe8uWw7gu3iXbSrn2n" +cert_validity_sec = 3600 + +# list of compressed or uncompressed pubkeys for coinbase payout (only supports 1 item in the array at this point) +coinbase_outputs = [ + { output_script_type = "P2WPKH", output_script_value = "036adc3bdf21e6f9a0f0fb0066bf517e5b7909ed1563d6958a10993849a7554075" }, +] + +listen_jd_address = "127.0.0.1:34264" + +core_rpc_url = "" +core_rpc_port = 18332 +core_rpc_user = "" +core_rpc_pass = "" +# Time interval used for JDS mempool update +[mempool_update_interval] +unit = "secs" +value = 1 \ No newline at end of file diff --git a/test/message-generator/mock/jdc-mock-jds-setup-connection-mock-invalid-flag.json b/test/message-generator/mock/jdc-mock-flag-0-for-jds-setupconnection-tests.json similarity index 81% rename from test/message-generator/mock/jdc-mock-jds-setup-connection-mock-invalid-flag.json rename to test/message-generator/mock/jdc-mock-flag-0-for-jds-setupconnection-tests.json index 633be8d07..e80141297 100644 --- a/test/message-generator/mock/jdc-mock-jds-setup-connection-mock-invalid-flag.json +++ b/test/message-generator/mock/jdc-mock-flag-0-for-jds-setupconnection-tests.json @@ -3,8 +3,8 @@ "doc": [ "This test does", "Soft mock of JDC", - "Connect to JDS", - "Receive Setup Connection Error as incorrect bits are set" + "Connect to JDS (where it doesn't matter if it supports async jobs or not)", + "Send SetupConnection with flag 0 (no async jobs) and expect SetupConnection.Success with flag 0" ], "frame_builders": [ { @@ -25,13 +25,13 @@ [ [ "flags", - {"U32": 1} + { "U32": 0 } ] ] ] } ], - "actiondoc": "This action sends SetupConnection and checks that .Error" + "actiondoc": "This action sends SetupConnection and expects SetupConnection.Success with flag 0" } ], "setup_commands": [], diff --git a/test/message-generator/mock/jdc-mock-flag-1-for-jds-setupconnection-with-async-support.json b/test/message-generator/mock/jdc-mock-flag-1-for-jds-setupconnection-with-async-support.json new file mode 100644 index 000000000..edbd83e13 --- /dev/null +++ b/test/message-generator/mock/jdc-mock-flag-1-for-jds-setupconnection-with-async-support.json @@ -0,0 +1,46 @@ +{ + "version": "2", + "doc": [ + "This test does", + "Soft mock of JDC", + "Connect to JDS (that supports async jobs)", + "Send SetupConnection with flag 1 (async jobs) and expect SetupConnection.Success with flag 1" + ], + "frame_builders": [ + { + "type": "automatic", + "message_id": "test/message-generator/messages/common_messages.json::setup_connection_job_declarator" + } + ], + "actions": [ + { + "message_ids": ["setup_connection_job_declarator"], + "role": "client", + "results": [ + { + "type": "match_message_field", + "value": [ + "CommonMessages", + "SetupConnectionSuccess", + [ + [ + "flags", + { "U32": 1 } + ] + ] + ] + } + ], + "actiondoc": "This action sends SetupConnection and expects SetupConnection.Success with flag 1" + } + ], + "setup_commands": [], + "execution_commands": [], + "cleanup_commands": [], + "role": "client", + "downstream": { + "ip": "127.0.0.1", + "port": 34264, + "pub_key": "9auqWEzQDVyd2oe1JVGFLMLHZtCo2FFqZwtKA5gd9xbuEu7PH72" + } +} diff --git a/test/message-generator/mock/jdc-mock-flag-1-for-jds-setupconnection-without-async-support.json b/test/message-generator/mock/jdc-mock-flag-1-for-jds-setupconnection-without-async-support.json new file mode 100644 index 000000000..eeaf19ca5 --- /dev/null +++ b/test/message-generator/mock/jdc-mock-flag-1-for-jds-setupconnection-without-async-support.json @@ -0,0 +1,40 @@ +{ + "version": "2", + "doc": [ + "This test does", + "Soft mock of JDC", + "Connect to JDS (that does not support async jobs)", + "Send SetupConnection with flag 1 (async jobs) and expect SetupConnection.Error" + ], + "frame_builders": [ + { + "type": "automatic", + "message_id": "test/message-generator/messages/common_messages.json::setup_connection_job_declarator" + } + ], + "actions": [ + { + "message_ids": ["setup_connection_job_declarator"], + "role": "client", + "results": [ + { + "type": "match_message_type", + "value": "0x02" + }, + { + "type": "close_connection" + } + ], + "actiondoc": "This action sends SetupConnection and expects SetupConnection.Error" + } + ], + "setup_commands": [], + "execution_commands": [], + "cleanup_commands": [], + "role": "client", + "downstream": { + "ip": "127.0.0.1", + "port": 34264, + "pub_key": "9auqWEzQDVyd2oe1JVGFLMLHZtCo2FFqZwtKA5gd9xbuEu7PH72" + } +} diff --git a/test/message-generator/test/jds-setup-connection-flag-test/jds-setup-connection-flag-test.json b/test/message-generator/test/jds-setupconnection-with-async-support/jds-setupconnection-with-async-support.json similarity index 62% rename from test/message-generator/test/jds-setup-connection-flag-test/jds-setup-connection-flag-test.json rename to test/message-generator/test/jds-setupconnection-with-async-support/jds-setupconnection-with-async-support.json index 403b1eb4f..c0d781d69 100644 --- a/test/message-generator/test/jds-setup-connection-flag-test/jds-setup-connection-flag-test.json +++ b/test/message-generator/test/jds-setupconnection-with-async-support/jds-setupconnection-with-async-support.json @@ -19,7 +19,7 @@ "jd_server", "--", "-c", - "../test/config/jds-setup-connection-flag-test/jds-config.toml" + "../test/config/jds-setupconnection-flag-test/jds-config-with-async-support.toml" ], "conditions": { "WithConditions": { @@ -40,16 +40,37 @@ "command": "cargo", "args": [ "run", - "../../test/message-generator/mock/jdc-mock-jds-setup-connection-mock-invalid-flag.json" + "../../test/message-generator/mock/jdc-mock-flag-0-for-jds-setupconnection-tests.json" ], "conditions": { "WithConditions": { "conditions": [ { - "output_string": "MATCHED MESSAGE TYPE 1", - "output_location": "StdOut", + "output_string": "TEST FAILED", + "output_location": "StdErr", "late_condition": false, - "condition": true + "condition": false + } + ], + "timer_secs": 600, + "warn_no_panic": false + } + } + }, + { + "command": "cargo", + "args": [ + "run", + "../../test/message-generator/mock/jdc-mock-flag-1-for-jds-setupconnection-with-async-support.json" + ], + "conditions": { + "WithConditions": { + "conditions": [ + { + "output_string": "TEST FAILED", + "output_location": "StdErr", + "late_condition": false, + "condition": false } ], "timer_secs": 600, diff --git a/test/message-generator/test/jds-setup-connection-flag-test/jds-setup-connection-flag-test.sh b/test/message-generator/test/jds-setupconnection-with-async-support/jds-setupconnection-with-async-support.sh similarity index 58% rename from test/message-generator/test/jds-setup-connection-flag-test/jds-setup-connection-flag-test.sh rename to test/message-generator/test/jds-setupconnection-with-async-support/jds-setupconnection-with-async-support.sh index b32716fe3..87dcf97e9 100755 --- a/test/message-generator/test/jds-setup-connection-flag-test/jds-setup-connection-flag-test.sh +++ b/test/message-generator/test/jds-setupconnection-with-async-support/jds-setupconnection-with-async-support.sh @@ -4,6 +4,6 @@ cargo llvm-cov --no-report -p pool_sv2 cd ../utils/message-generator/ cargo build -RUST_LOG=debug cargo run ../../test/message-generator/test/jds-setup-connection-flag-test/jds-setup-connection-flag-test.json || { echo 'mg test failed' ; exit 1; } +RUST_LOG=debug cargo run ../../test/message-generator/test/jds-setupconnection-with-async-support/jds-setupconnection-with-async-support.json || { echo 'mg test failed' ; exit 1; } sleep 10 diff --git a/test/message-generator/test/jds-setupconnection-without-async-support/jds-setupconnection-without-async-support.json b/test/message-generator/test/jds-setupconnection-without-async-support/jds-setupconnection-without-async-support.json new file mode 100644 index 000000000..15eb2e978 --- /dev/null +++ b/test/message-generator/test/jds-setupconnection-without-async-support/jds-setupconnection-without-async-support.json @@ -0,0 +1,92 @@ +{ + "version": "2", + "doc": [ + "This test does", + "Launch the jd-server" + ], + "frame_builders": [ + ], + "actions": [ + ], + "setup_commands": [ + { + "command": "cargo", + "args": [ + "llvm-cov", + "--no-report", + "run", + "-p", + "jd_server", + "--", + "-c", + "../test/config/jds-setupconnection-flag-test/jds-config-without-async-support.toml" + ], + "conditions": { + "WithConditions": { + "conditions": [ + { + "output_string": "JD INITIALIZED", + "output_location": "StdOut", + "late_condition": false, + "condition": true + } + ], + "timer_secs": 300, + "warn_no_panic": false + } + } + }, + { + "command": "cargo", + "args": [ + "run", + "../../test/message-generator/mock/jdc-mock-flag-0-for-jds-setupconnection-tests.json" + ], + "conditions": { + "WithConditions": { + "conditions": [ + { + "output_string": "TEST FAILED", + "output_location": "StdErr", + "late_condition": false, + "condition": false + } + ], + "timer_secs": 600, + "warn_no_panic": false + } + } + }, + { + "command": "cargo", + "args": [ + "run", + "../../test/message-generator/mock/jdc-mock-flag-1-for-jds-setupconnection-without-async-support.json" + ], + "conditions": { + "WithConditions": { + "conditions": [ + { + "output_string": "TEST FAILED", + "output_location": "StdErr", + "late_condition": false, + "condition": false + } + ], + "timer_secs": 600, + "warn_no_panic": false + } + } + } + ], + "execution_commands": [ + ], + "cleanup_commands": [ + { + "command": "pkill", + "args": ["-f", "jd_server", "-SIGINT"], + "conditions": "None" + } + ], + "role": "none" +} diff --git a/test/message-generator/test/jds-setupconnection-without-async-support/jds-setupconnection-without-async-support.sh b/test/message-generator/test/jds-setupconnection-without-async-support/jds-setupconnection-without-async-support.sh new file mode 100755 index 000000000..151cec4ff --- /dev/null +++ b/test/message-generator/test/jds-setupconnection-without-async-support/jds-setupconnection-without-async-support.sh @@ -0,0 +1,9 @@ +cd roles +cargo llvm-cov --no-report -p pool_sv2 + +cd ../utils/message-generator/ +cargo build + +RUST_LOG=debug cargo run ../../test/message-generator/test/jds-setupconnection-without-async-support/jds-setupconnection-without-async-support.json || { echo 'mg test failed' ; exit 1; } + +sleep 10 From 51a20d8b079ad5935c5682df8cc7c471158a7c20 Mon Sep 17 00:00:00 2001 From: bit-aloo Date: Thu, 25 Jul 2024 07:03:12 -0400 Subject: [PATCH 069/101] Add check flags protocols docs --- .../common-messages/src/setup_connection.rs | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/protocols/v2/subprotocols/common-messages/src/setup_connection.rs b/protocols/v2/subprotocols/common-messages/src/setup_connection.rs index 0c5522d6b..763907b0b 100644 --- a/protocols/v2/subprotocols/common-messages/src/setup_connection.rs +++ b/protocols/v2/subprotocols/common-messages/src/setup_connection.rs @@ -69,6 +69,17 @@ impl<'decoder> SetupConnection<'decoder> { // [1] [1] -> true // [0] [1] -> false Protocol::MiningProtocol => { + // Evaluates protocol requirements based on flag bits. + // + // Checks if the current protocol meets the required flags for work selection and version rolling + // by reversing the bits of `available_flags` and `required_flags`. It extracts the 30th and 29th + // bits to determine if work selection and version rolling are needed. + // + // Returns `true` if: + // - The work selection requirement is satisfied or not needed. + // - The version rolling requirement is satisfied or not needed. + // + // Otherwise, returns `false`. let available = available_flags.reverse_bits(); let required_flags = required_flags.reverse_bits(); let requires_work_selection_passed = required_flags >> 30 > 0; @@ -85,6 +96,13 @@ impl<'decoder> SetupConnection<'decoder> { work_selection && version_rolling } Protocol::JobDeclarationProtocol => { + // Determines if asynchronous job mining is required based on flag bits. + // + // Reverses the bits of `available_flags` and `required_flags`, extracts the 31st bit from each, + // and evaluates if the condition is met using these bits. Returns `true` or `false` based on: + // - True if `requires_async_job_mining_self` is true, or both are true. + // - False if `requires_async_job_mining_self` is false and `requires_async_job_mining_passed` is true. + // - True otherwise. let available = available_flags.reverse_bits(); let required = required_flags.reverse_bits(); From e2471f1174cbda5926b0c29e93cb3797f3c13d31 Mon Sep 17 00:00:00 2001 From: bit-aloo Date: Thu, 25 Jul 2024 07:39:32 -0400 Subject: [PATCH 070/101] Remove JOB DISTRIBUTION INSTANCES and renamed SV2_JOB_NEG_PROTOCOL_DISCRIMINANT to SV2_JOB_DECLARATION_PROTOCOL_DISCRIMINANT --- protocols/v2/const-sv2/src/lib.rs | 3 +- .../common-messages/src/setup_connection.rs | 16 +- protocols/v2/sv2-ffi/sv2.h | 10 +- scripts/sv2.h | 634 ++++++++++++++++++ 4 files changed, 644 insertions(+), 19 deletions(-) create mode 100644 scripts/sv2.h diff --git a/protocols/v2/const-sv2/src/lib.rs b/protocols/v2/const-sv2/src/lib.rs index 1a32c1b8f..507f63532 100644 --- a/protocols/v2/const-sv2/src/lib.rs +++ b/protocols/v2/const-sv2/src/lib.rs @@ -39,9 +39,8 @@ pub const NOISE_HASHED_PROTOCOL_NAME_CHACHA: [u8; 32] = [ pub const NOISE_SUPPORTED_CIPHERS_MESSAGE: [u8; 5] = [1, 0x47, 0x53, 0x45, 0x41]; pub const SV2_MINING_PROTOCOL_DISCRIMINANT: u8 = 0; -pub const SV2_JOB_NEG_PROTOCOL_DISCRIMINANT: u8 = 1; +pub const SV2_JOB_DECLARATION_PROTOCOL_DISCRIMINANT: u8 = 1; pub const SV2_TEMPLATE_DISTR_PROTOCOL_DISCRIMINANT: u8 = 2; -pub const SV2_JOB_DISTR_PROTOCOL_DISCRIMINANT: u8 = 3; // COMMON MESSAGES TYPES pub const MESSAGE_TYPE_SETUP_CONNECTION: u8 = 0x0; diff --git a/protocols/v2/subprotocols/common-messages/src/setup_connection.rs b/protocols/v2/subprotocols/common-messages/src/setup_connection.rs index 763907b0b..5bc6adb0e 100644 --- a/protocols/v2/subprotocols/common-messages/src/setup_connection.rs +++ b/protocols/v2/subprotocols/common-messages/src/setup_connection.rs @@ -7,8 +7,8 @@ use binary_sv2::{ }; use binary_sv2::{Deserialize, GetSize, Serialize, Str0255}; use const_sv2::{ - SV2_JOB_DISTR_PROTOCOL_DISCRIMINANT, SV2_JOB_NEG_PROTOCOL_DISCRIMINANT, - SV2_MINING_PROTOCOL_DISCRIMINANT, SV2_TEMPLATE_DISTR_PROTOCOL_DISCRIMINANT, + SV2_JOB_DECLARATION_PROTOCOL_DISCRIMINANT, SV2_MINING_PROTOCOL_DISCRIMINANT, + SV2_TEMPLATE_DISTR_PROTOCOL_DISCRIMINANT, }; use core::convert::TryFrom; #[cfg(not(feature = "with_serde"))] @@ -320,18 +320,16 @@ impl<'a> From> for CSetupConnectionError { } /// MiningProtocol = [`SV2_MINING_PROTOCOL_DISCRIMINANT`], -/// JobDeclarationProtocol = [`SV2_JOB_NEG_PROTOCOL_DISCRIMINANT`], +/// JobDeclarationProtocol = [`SV2_JOB_DECLARATION_PROTOCOL_DISCRIMINANT`], /// TemplateDistributionProtocol = [`SV2_TEMPLATE_DISTR_PROTOCOL_DISCRIMINANT`], -/// JobDistributionProtocol = [`SV2_JOB_DISTR_PROTOCOL_DISCRIMINANT`], #[cfg_attr(feature = "with_serde", derive(Serialize_repr, Deserialize_repr))] #[derive(Debug, Clone, Copy, PartialEq, Eq)] #[repr(u8)] #[allow(clippy::enum_variant_names)] pub enum Protocol { MiningProtocol = SV2_MINING_PROTOCOL_DISCRIMINANT, - JobDeclarationProtocol = SV2_JOB_NEG_PROTOCOL_DISCRIMINANT, + JobDeclarationProtocol = SV2_JOB_DECLARATION_PROTOCOL_DISCRIMINANT, TemplateDistributionProtocol = SV2_TEMPLATE_DISTR_PROTOCOL_DISCRIMINANT, - JobDistributionProtocol = SV2_JOB_DISTR_PROTOCOL_DISCRIMINANT, } #[cfg(not(feature = "with_serde"))] @@ -366,9 +364,8 @@ impl TryFrom for Protocol { fn try_from(value: u8) -> Result { match value { SV2_MINING_PROTOCOL_DISCRIMINANT => Ok(Protocol::MiningProtocol), - SV2_JOB_NEG_PROTOCOL_DISCRIMINANT => Ok(Protocol::JobDeclarationProtocol), + SV2_JOB_DECLARATION_PROTOCOL_DISCRIMINANT => Ok(Protocol::JobDeclarationProtocol), SV2_TEMPLATE_DISTR_PROTOCOL_DISCRIMINANT => Ok(Protocol::TemplateDistributionProtocol), - SV2_JOB_DISTR_PROTOCOL_DISCRIMINANT => Ok(Protocol::JobDistributionProtocol), _ => Err(()), } } @@ -385,9 +382,8 @@ impl From for u8 { fn from(val: Protocol) -> Self { match val { Protocol::MiningProtocol => SV2_MINING_PROTOCOL_DISCRIMINANT, - Protocol::JobDeclarationProtocol => SV2_JOB_NEG_PROTOCOL_DISCRIMINANT, + Protocol::JobDeclarationProtocol => SV2_JOB_DECLARATION_PROTOCOL_DISCRIMINANT, Protocol::TemplateDistributionProtocol => SV2_TEMPLATE_DISTR_PROTOCOL_DISCRIMINANT, - Protocol::JobDistributionProtocol => SV2_JOB_DISTR_PROTOCOL_DISCRIMINANT, } } } diff --git a/protocols/v2/sv2-ffi/sv2.h b/protocols/v2/sv2-ffi/sv2.h index 8aa7fdae3..8c941fff9 100644 --- a/protocols/v2/sv2-ffi/sv2.h +++ b/protocols/v2/sv2-ffi/sv2.h @@ -38,12 +38,10 @@ static const uintptr_t INITIATOR_EXPECTED_HANDSHAKE_MESSAGE_SIZE = ((ELLSWIFT_EN static const uint8_t SV2_MINING_PROTOCOL_DISCRIMINANT = 0; -static const uint8_t SV2_JOB_NEG_PROTOCOL_DISCRIMINANT = 1; +static const uint8_t SV2_JOB_DECLARATION_PROTOCOL_DISCRIMINANT = 1; static const uint8_t SV2_TEMPLATE_DISTR_PROTOCOL_DISCRIMINANT = 2; -static const uint8_t SV2_JOB_DISTR_PROTOCOL_DISCRIMINANT = 3; - static const uint8_t MESSAGE_TYPE_SETUP_CONNECTION = 0; static const uint8_t MESSAGE_TYPE_SETUP_CONNECTION_SUCCESS = 1; @@ -271,14 +269,12 @@ void _c_export_cvec2(CVec2 _a); #include /// MiningProtocol = [`SV2_MINING_PROTOCOL_DISCRIMINANT`], -/// JobDeclarationProtocol = [`SV2_JOB_NEG_PROTOCOL_DISCRIMINANT`], +/// JobDeclarationProtocol = [`SV2_JOB_DECLARATION_PROTOCOL_DISCRIMINANT`], /// TemplateDistributionProtocol = [`SV2_TEMPLATE_DISTR_PROTOCOL_DISCRIMINANT`], -/// JobDistributionProtocol = [`SV2_JOB_DISTR_PROTOCOL_DISCRIMINANT`], enum class Protocol : uint8_t { MiningProtocol = SV2_MINING_PROTOCOL_DISCRIMINANT, - JobDeclarationProtocol = SV2_JOB_NEG_PROTOCOL_DISCRIMINANT, + JobDeclarationProtocol = SV2_JOB_DECLARATION_PROTOCOL_DISCRIMINANT, TemplateDistributionProtocol = SV2_TEMPLATE_DISTR_PROTOCOL_DISCRIMINANT, - JobDistributionProtocol = SV2_JOB_DISTR_PROTOCOL_DISCRIMINANT, }; /// ## ChannelEndpointChanged (Server -> Client) diff --git a/scripts/sv2.h b/scripts/sv2.h new file mode 100644 index 000000000..8c941fff9 --- /dev/null +++ b/scripts/sv2.h @@ -0,0 +1,634 @@ +#include +#include +#include +#include +#include + +static const uint16_t EXTENSION_TYPE_NO_EXTENSION = 0; + +static const uintptr_t SV2_FRAME_HEADER_SIZE = 6; + +static const uintptr_t SV2_FRAME_HEADER_LEN_OFFSET = 3; + +static const uintptr_t SV2_FRAME_HEADER_LEN_END = 3; + +static const uintptr_t SV2_FRAME_CHUNK_SIZE = 65535; + +static const uintptr_t AEAD_MAC_LEN = 16; + +static const uintptr_t ENCRYPTED_SV2_FRAME_HEADER_SIZE = (SV2_FRAME_HEADER_SIZE + AEAD_MAC_LEN); + +static const uintptr_t NOISE_FRAME_HEADER_SIZE = 2; + +static const uintptr_t NOISE_FRAME_HEADER_LEN_OFFSET = 0; + +static const uintptr_t ELLSWIFT_ENCODING_SIZE = 64; + +static const uintptr_t RESPONDER_EXPECTED_HANDSHAKE_MESSAGE_SIZE = ELLSWIFT_ENCODING_SIZE; + +static const uintptr_t MAC = 16; + +static const uintptr_t ENCRYPTED_ELLSWIFT_ENCODING_SIZE = (ELLSWIFT_ENCODING_SIZE + MAC); + +static const uintptr_t SIGNATURE_NOISE_MESSAGE_SIZE = 74; + +static const uintptr_t ENCRYPTED_SIGNATURE_NOISE_MESSAGE_SIZE = (SIGNATURE_NOISE_MESSAGE_SIZE + MAC); + +static const uintptr_t INITIATOR_EXPECTED_HANDSHAKE_MESSAGE_SIZE = ((ELLSWIFT_ENCODING_SIZE + ENCRYPTED_ELLSWIFT_ENCODING_SIZE) + ENCRYPTED_SIGNATURE_NOISE_MESSAGE_SIZE); + +static const uint8_t SV2_MINING_PROTOCOL_DISCRIMINANT = 0; + +static const uint8_t SV2_JOB_DECLARATION_PROTOCOL_DISCRIMINANT = 1; + +static const uint8_t SV2_TEMPLATE_DISTR_PROTOCOL_DISCRIMINANT = 2; + +static const uint8_t MESSAGE_TYPE_SETUP_CONNECTION = 0; + +static const uint8_t MESSAGE_TYPE_SETUP_CONNECTION_SUCCESS = 1; + +static const uint8_t MESSAGE_TYPE_SETUP_CONNECTION_ERROR = 2; + +static const uint8_t MESSAGE_TYPE_CHANNEL_ENDPOINT_CHANGED = 3; + +static const uint8_t MESSAGE_TYPE_COINBASE_OUTPUT_DATA_SIZE = 112; + +static const uint8_t MESSAGE_TYPE_NEW_TEMPLATE = 113; + +static const uint8_t MESSAGE_TYPE_SET_NEW_PREV_HASH = 114; + +static const uint8_t MESSAGE_TYPE_REQUEST_TRANSACTION_DATA = 115; + +static const uint8_t MESSAGE_TYPE_REQUEST_TRANSACTION_DATA_SUCCESS = 116; + +static const uint8_t MESSAGE_TYPE_REQUEST_TRANSACTION_DATA_ERROR = 117; + +static const uint8_t MESSAGE_TYPE_SUBMIT_SOLUTION = 118; + +static const uint8_t MESSAGE_TYPE_ALLOCATE_MINING_JOB_TOKEN = 80; + +static const uint8_t MESSAGE_TYPE_ALLOCATE_MINING_JOB_TOKEN_SUCCESS = 81; + +static const uint8_t MESSAGE_TYPE_DECLARE_MINING_JOB = 87; + +static const uint8_t MESSAGE_TYPE_DECLARE_MINING_JOB_SUCCESS = 88; + +static const uint8_t MESSAGE_TYPE_DECLARE_MINING_JOB_ERROR = 89; + +static const uint8_t MESSAGE_TYPE_IDENTIFY_TRANSACTIONS = 83; + +static const uint8_t MESSAGE_TYPE_IDENTIFY_TRANSACTIONS_SUCCESS = 84; + +static const uint8_t MESSAGE_TYPE_PROVIDE_MISSING_TRANSACTIONS = 85; + +static const uint8_t MESSAGE_TYPE_PROVIDE_MISSING_TRANSACTIONS_SUCCESS = 86; + +static const uint8_t MESSAGE_TYPE_SUBMIT_SOLUTION_JD = 96; + +static const uint8_t MESSAGE_TYPE_CLOSE_CHANNEL = 24; + +/// This has been cahnged before was 0x1e it can be that old Sv2 implementation still use the old +/// one but this means that old impl are not following Sv2 spec +static const uint8_t MESSAGE_TYPE_NEW_EXTENDED_MINING_JOB = 31; + +static const uint8_t MESSAGE_TYPE_NEW_MINING_JOB = 21; + +static const uint8_t MESSAGE_TYPE_OPEN_EXTENDED_MINING_CHANNEL = 19; + +static const uint8_t MESSAGE_TYPE_OPEN_EXTENDED_MINING_CHANNEL_SUCCES = 20; + +static const uint8_t MESSAGE_TYPE_OPEN_MINING_CHANNEL_ERROR = 18; + +static const uint8_t MESSAGE_TYPE_OPEN_STANDARD_MINING_CHANNEL = 16; + +static const uint8_t MESSAGE_TYPE_OPEN_STANDARD_MINING_CHANNEL_SUCCESS = 17; + +static const uint8_t MESSAGE_TYPE_RECONNECT = 37; + +static const uint8_t MESSAGE_TYPE_SET_CUSTOM_MINING_JOB = 34; + +static const uint8_t MESSAGE_TYPE_SET_CUSTOM_MINING_JOB_ERROR = 36; + +static const uint8_t MESSAGE_TYPE_SET_CUSTOM_MINING_JOB_SUCCESS = 35; + +static const uint8_t MESSAGE_TYPE_SET_EXTRANONCE_PREFIX = 25; + +static const uint8_t MESSAGE_TYPE_SET_GROUP_CHANNEL = 38; + +static const uint8_t MESSAGE_TYPE_MINING_SET_NEW_PREV_HASH = 32; + +static const uint8_t MESSAGE_TYPE_SET_TARGET = 33; + +static const uint8_t MESSAGE_TYPE_SUBMIT_SHARES_ERROR = 29; + +static const uint8_t MESSAGE_TYPE_SUBMIT_SHARES_EXTENDED = 27; + +static const uint8_t MESSAGE_TYPE_SUBMIT_SHARES_STANDARD = 26; + +static const uint8_t MESSAGE_TYPE_SUBMIT_SHARES_SUCCESS = 28; + +static const uint8_t MESSAGE_TYPE_UPDATE_CHANNEL = 22; + +static const uint8_t MESSAGE_TYPE_UPDATE_CHANNEL_ERROR = 23; + +static const bool CHANNEL_BIT_SETUP_CONNECTION = false; + +static const bool CHANNEL_BIT_SETUP_CONNECTION_SUCCESS = false; + +static const bool CHANNEL_BIT_SETUP_CONNECTION_ERROR = false; + +static const bool CHANNEL_BIT_CHANNEL_ENDPOINT_CHANGED = true; + +static const bool CHANNEL_BIT_COINBASE_OUTPUT_DATA_SIZE = false; + +static const bool CHANNEL_BIT_NEW_TEMPLATE = false; + +static const bool CHANNEL_BIT_SET_NEW_PREV_HASH = false; + +static const bool CHANNEL_BIT_REQUEST_TRANSACTION_DATA = false; + +static const bool CHANNEL_BIT_REQUEST_TRANSACTION_DATA_SUCCESS = false; + +static const bool CHANNEL_BIT_REQUEST_TRANSACTION_DATA_ERROR = false; + +static const bool CHANNEL_BIT_SUBMIT_SOLUTION = false; + +static const bool CHANNEL_BIT_ALLOCATE_MINING_JOB_TOKEN = false; + +static const bool CHANNEL_BIT_ALLOCATE_MINING_JOB_TOKEN_SUCCESS = false; + +static const bool CHANNEL_BIT_DECLARE_MINING_JOB = false; + +static const bool CHANNEL_BIT_DECLARE_MINING_JOB_SUCCESS = false; + +static const bool CHANNEL_BIT_DECLARE_MINING_JOB_ERROR = false; + +static const bool CHANNEL_BIT_IDENTIFY_TRANSACTIONS = false; + +static const bool CHANNEL_BIT_IDENTIFY_TRANSACTIONS_SUCCESS = false; + +static const bool CHANNEL_BIT_PROVIDE_MISSING_TRANSACTIONS = false; + +static const bool CHANNEL_BIT_PROVIDE_MISSING_TRANSACTIONS_SUCCESS = false; + +static const bool CHANNEL_BIT_SUBMIT_SOLUTION_JD = true; + +static const bool CHANNEL_BIT_CLOSE_CHANNEL = true; + +static const bool CHANNEL_BIT_NEW_EXTENDED_MINING_JOB = true; + +static const bool CHANNEL_BIT_NEW_MINING_JOB = true; + +static const bool CHANNEL_BIT_OPEN_EXTENDED_MINING_CHANNEL = false; + +static const bool CHANNEL_BIT_OPEN_EXTENDED_MINING_CHANNEL_SUCCES = false; + +static const bool CHANNEL_BIT_OPEN_MINING_CHANNEL_ERROR = false; + +static const bool CHANNEL_BIT_OPEN_STANDARD_MINING_CHANNEL = false; + +static const bool CHANNEL_BIT_OPEN_STANDARD_MINING_CHANNEL_SUCCESS = false; + +static const bool CHANNEL_BIT_RECONNECT = false; + +static const bool CHANNEL_BIT_SET_CUSTOM_MINING_JOB = false; + +static const bool CHANNEL_BIT_SET_CUSTOM_MINING_JOB_ERROR = false; + +static const bool CHANNEL_BIT_SET_CUSTOM_MINING_JOB_SUCCESS = false; + +static const bool CHANNEL_BIT_SET_EXTRANONCE_PREFIX = true; + +static const bool CHANNEL_BIT_SET_GROUP_CHANNEL = false; + +static const bool CHANNEL_BIT_MINING_SET_NEW_PREV_HASH = true; + +static const bool CHANNEL_BIT_SET_TARGET = true; + +static const bool CHANNEL_BIT_SUBMIT_SHARES_ERROR = true; + +static const bool CHANNEL_BIT_SUBMIT_SHARES_EXTENDED = true; + +static const bool CHANNEL_BIT_SUBMIT_SHARES_STANDARD = true; + +static const bool CHANNEL_BIT_SUBMIT_SHARES_SUCCESS = true; + +static const bool CHANNEL_BIT_UPDATE_CHANNEL = true; + +static const bool CHANNEL_BIT_UPDATE_CHANNEL_ERROR = true; +#include +#include +#include +#include +#include + +struct CVec { + uint8_t *data; + uintptr_t len; + uintptr_t capacity; +}; + +struct CVec2 { + CVec *data; + uintptr_t len; + uintptr_t capacity; +}; + +struct U24 { + uint32_t _0; +}; + +extern "C" { + +/// Given a C allocated buffer return a rust allocated CVec +/// +/// # Safety +/// +CVec cvec_from_buffer(const uint8_t *data, uintptr_t len); + +/// # Safety +/// +CVec2 init_cvec2(); + +/// The caller is reponsible for NOT adding duplicate cvecs to the cvec2 structure, +/// as this can lead to double free errors when the message is dropped. +/// # Safety +/// +void cvec2_push(CVec2 *cvec2, CVec cvec); + +void _c_export_u24(U24 _a); + +void _c_export_cvec(CVec _a); + +void _c_export_cvec2(CVec2 _a); + +} // extern "C" +#include +#include +#include +#include +#include + +/// MiningProtocol = [`SV2_MINING_PROTOCOL_DISCRIMINANT`], +/// JobDeclarationProtocol = [`SV2_JOB_DECLARATION_PROTOCOL_DISCRIMINANT`], +/// TemplateDistributionProtocol = [`SV2_TEMPLATE_DISTR_PROTOCOL_DISCRIMINANT`], +enum class Protocol : uint8_t { + MiningProtocol = SV2_MINING_PROTOCOL_DISCRIMINANT, + JobDeclarationProtocol = SV2_JOB_DECLARATION_PROTOCOL_DISCRIMINANT, + TemplateDistributionProtocol = SV2_TEMPLATE_DISTR_PROTOCOL_DISCRIMINANT, +}; + +/// ## ChannelEndpointChanged (Server -> Client) +/// When a channel’s upstream or downstream endpoint changes and that channel had previously +/// sent messages with [channel_msg] bitset of unknown extension_type, the intermediate proxy +/// MUST send a [`ChannelEndpointChanged`] message. Upon receipt thereof, any extension state +/// (including version negotiation and the presence of support for a given extension) MUST be +/// reset and version/presence negotiation must begin again. +/// +struct ChannelEndpointChanged { + /// The channel which has changed endpoint. + uint32_t channel_id; +}; + +/// ## SetupConnection.Success (Server -> Client) +/// Response to [`SetupConnection`] message if the server accepts the connection. The client is +/// required to verify the set of feature flags that the server supports and act accordingly. +struct SetupConnectionSuccess { + /// Selected version proposed by the connecting node that the upstream + /// node supports. This version will be used on the connection for the rest + /// of its life. + uint16_t used_version; + /// Flags indicating optional protocol features the server supports. Each + /// protocol from [`Protocol`] field has its own values/flags. + uint32_t flags; +}; + +struct CSetupConnection { + Protocol protocol; + uint16_t min_version; + uint16_t max_version; + uint32_t flags; + CVec endpoint_host; + uint16_t endpoint_port; + CVec vendor; + CVec hardware_version; + CVec firmware; + CVec device_id; +}; + +struct CSetupConnectionError { + uint32_t flags; + CVec error_code; +}; + +extern "C" { + +void _c_export_channel_endpoint_changed(ChannelEndpointChanged _a); + +void _c_export_setup_conn_succ(SetupConnectionSuccess _a); + +void free_setup_connection(CSetupConnection s); + +void free_setup_connection_error(CSetupConnectionError s); + +} // extern "C" +#include +#include +#include +#include +#include + +/// ## CoinbaseOutputDataSize (Client -> Server) +/// Ultimately, the pool is responsible for adding coinbase transaction outputs for payouts and +/// other uses, and thus the Template Provider will need to consider this additional block size +/// when selecting transactions for inclusion in a block (to not create an invalid, oversized block). +/// Thus, this message is used to indicate that some additional space in the block/coinbase +/// transaction be reserved for the pool’s use (while always assuming the pool will use the entirety +/// of available coinbase space). +/// The Job Declarator MUST discover the maximum serialized size of the additional outputs which +/// will be added by the pool(s) it intends to use this work. It then MUST communicate the +/// maximum such size to the Template Provider via this message. The Template Provider MUST +/// NOT provide NewWork messages which would represent consensus-invalid blocks once this +/// additional size — along with a maximally-sized (100 byte) coinbase field — is added. Further, +/// the Template Provider MUST consider the maximum additional bytes required in the output +/// count variable-length integer in the coinbase transaction when complying with the size limits. +struct CoinbaseOutputDataSize { + /// The maximum additional serialized bytes which the pool will add in + /// coinbase transaction outputs. + uint32_t coinbase_output_max_additional_size; +}; + +/// ## RequestTransactionData (Client -> Server) +/// A request sent by the Job Declarator to the Template Provider which requests the set of +/// transaction data for all transactions (excluding the coinbase transaction) included in a block, as +/// well as any additional data which may be required by the Pool to validate the work. +struct RequestTransactionData { + /// The template_id corresponding to a NewTemplate message. + uint64_t template_id; +}; + +struct CNewTemplate { + uint64_t template_id; + bool future_template; + uint32_t version; + uint32_t coinbase_tx_version; + CVec coinbase_prefix; + uint32_t coinbase_tx_input_sequence; + uint64_t coinbase_tx_value_remaining; + uint32_t coinbase_tx_outputs_count; + CVec coinbase_tx_outputs; + uint32_t coinbase_tx_locktime; + CVec2 merkle_path; +}; + +struct CRequestTransactionDataSuccess { + uint64_t template_id; + CVec excess_data; + CVec2 transaction_list; +}; + +struct CRequestTransactionDataError { + uint64_t template_id; + CVec error_code; +}; + +struct CSetNewPrevHash { + uint64_t template_id; + CVec prev_hash; + uint32_t header_timestamp; + uint32_t n_bits; + CVec target; +}; + +struct CSubmitSolution { + uint64_t template_id; + uint32_t version; + uint32_t header_timestamp; + uint32_t header_nonce; + CVec coinbase_tx; +}; + +extern "C" { + +void _c_export_coinbase_out(CoinbaseOutputDataSize _a); + +void _c_export_req_tx_data(RequestTransactionData _a); + +void free_new_template(CNewTemplate s); + +void free_request_tx_data_success(CRequestTransactionDataSuccess s); + +void free_request_tx_data_error(CRequestTransactionDataError s); + +void free_set_new_prev_hash(CSetNewPrevHash s); + +void free_submit_solution(CSubmitSolution s); + +} // extern "C" +#include +#include +#include +#include +#include + +struct CError { + enum class Tag { + /// Errors from the `binary_sv2` crate + BinarySv2Error, + /// Errors from the `framing_sv2` crate + FramingSv2Error, + /// Errors if there are missing bytes in the Noise protocol + MissingBytes, + /// Errors from the `noise_sv2` crate + NoiseSv2Error, + /// `snow` errors + AeadError, + /// Error if Noise protocol state is not as expected + UnexpectedNoiseState, + InvalidStepForResponder, + InvalidStepForInitiator, + NotInHandShakeState, + FramingError, + }; + + struct MissingBytes_Body { + uintptr_t _0; + }; + + Tag tag; + union { + MissingBytes_Body missing_bytes; + }; +}; + +extern "C" { + +/// Here only to force cbindgen to create header for CError +CError export_cerror(); + +} // extern "C" +#include +#include +#include +#include +#include + +struct DecoderWrapper; + +struct EncoderWrapper; + +struct CSv2Message { + enum class Tag { + CoinbaseOutputDataSize, + NewTemplate, + RequestTransactionData, + RequestTransactionDataError, + RequestTransactionDataSuccess, + SetNewPrevHash, + SubmitSolution, + ChannelEndpointChanged, + SetupConnection, + SetupConnectionError, + SetupConnectionSuccess, + }; + + struct CoinbaseOutputDataSize_Body { + CoinbaseOutputDataSize _0; + }; + + struct NewTemplate_Body { + CNewTemplate _0; + }; + + struct RequestTransactionData_Body { + RequestTransactionData _0; + }; + + struct RequestTransactionDataError_Body { + CRequestTransactionDataError _0; + }; + + struct RequestTransactionDataSuccess_Body { + CRequestTransactionDataSuccess _0; + }; + + struct SetNewPrevHash_Body { + CSetNewPrevHash _0; + }; + + struct SubmitSolution_Body { + CSubmitSolution _0; + }; + + struct ChannelEndpointChanged_Body { + ChannelEndpointChanged _0; + }; + + struct SetupConnection_Body { + CSetupConnection _0; + }; + + struct SetupConnectionError_Body { + CSetupConnectionError _0; + }; + + struct SetupConnectionSuccess_Body { + SetupConnectionSuccess _0; + }; + + Tag tag; + union { + CoinbaseOutputDataSize_Body coinbase_output_data_size; + NewTemplate_Body new_template; + RequestTransactionData_Body request_transaction_data; + RequestTransactionDataError_Body request_transaction_data_error; + RequestTransactionDataSuccess_Body request_transaction_data_success; + SetNewPrevHash_Body set_new_prev_hash; + SubmitSolution_Body submit_solution; + ChannelEndpointChanged_Body channel_endpoint_changed; + SetupConnection_Body setup_connection; + SetupConnectionError_Body setup_connection_error; + SetupConnectionSuccess_Body setup_connection_success; + }; +}; + +struct Sv2Error { + enum class Tag { + BinaryError, + CodecError, + EncoderBusy, + InvalidSv2Frame, + MissingBytes, + PayloadTooBig, + Unknown, + }; + + struct BinaryError_Body { + CError _0; + }; + + struct CodecError_Body { + CError _0; + }; + + struct PayloadTooBig_Body { + CVec _0; + }; + + Tag tag; + union { + BinaryError_Body binary_error; + CodecError_Body codec_error; + PayloadTooBig_Body payload_too_big; + }; +}; + +template +struct CResult { + enum class Tag { + Ok, + Err, + }; + + struct Ok_Body { + T _0; + }; + + struct Err_Body { + E _0; + }; + + Tag tag; + union { + Ok_Body ok; + Err_Body err; + }; +}; + +extern "C" { + +void drop_sv2_message(CSv2Message s); + +/// This function does nothing unless there is some heap allocated data owned by the C side that +/// needs to be dropped (specifically a `CVec`). In this case, `free_vec` is used in order to drop +/// that memory. +void drop_sv2_error(Sv2Error s); + +bool is_ok(const CResult *cresult); + +EncoderWrapper *new_encoder(); + +void flush_encoder(EncoderWrapper *encoder); + +void free_decoder(DecoderWrapper *decoder); + +/// # Safety +/// +CResult encode(CSv2Message *message, EncoderWrapper *encoder); + +DecoderWrapper *new_decoder(); + +CVec get_writable(DecoderWrapper *decoder); + +CResult next_frame(DecoderWrapper *decoder); + +} // extern "C" From acf9534d876690a4a9dd95c305d88d875edafb03 Mon Sep 17 00:00:00 2001 From: bit-aloo Date: Fri, 26 Jul 2024 23:33:29 -0400 Subject: [PATCH 071/101] Corrected build_header.sh file --- scripts/build_header.sh | 5 +- scripts/sv2.h | 634 ---------------------------------------- 2 files changed, 2 insertions(+), 637 deletions(-) delete mode 100644 scripts/sv2.h diff --git a/scripts/build_header.sh b/scripts/build_header.sh index e0e20cd7e..046723cca 100755 --- a/scripts/build_header.sh +++ b/scripts/build_header.sh @@ -1,9 +1,8 @@ #! /bin/sh - cargo install --version 0.20.0 cbindgen -rm -f ./scripts/sv2.h -touch ./scripts/sv2.h +rm -f ./sv2.h +touch ./sv2.h dir=${1:-../protocols} diff --git a/scripts/sv2.h b/scripts/sv2.h deleted file mode 100644 index 8c941fff9..000000000 --- a/scripts/sv2.h +++ /dev/null @@ -1,634 +0,0 @@ -#include -#include -#include -#include -#include - -static const uint16_t EXTENSION_TYPE_NO_EXTENSION = 0; - -static const uintptr_t SV2_FRAME_HEADER_SIZE = 6; - -static const uintptr_t SV2_FRAME_HEADER_LEN_OFFSET = 3; - -static const uintptr_t SV2_FRAME_HEADER_LEN_END = 3; - -static const uintptr_t SV2_FRAME_CHUNK_SIZE = 65535; - -static const uintptr_t AEAD_MAC_LEN = 16; - -static const uintptr_t ENCRYPTED_SV2_FRAME_HEADER_SIZE = (SV2_FRAME_HEADER_SIZE + AEAD_MAC_LEN); - -static const uintptr_t NOISE_FRAME_HEADER_SIZE = 2; - -static const uintptr_t NOISE_FRAME_HEADER_LEN_OFFSET = 0; - -static const uintptr_t ELLSWIFT_ENCODING_SIZE = 64; - -static const uintptr_t RESPONDER_EXPECTED_HANDSHAKE_MESSAGE_SIZE = ELLSWIFT_ENCODING_SIZE; - -static const uintptr_t MAC = 16; - -static const uintptr_t ENCRYPTED_ELLSWIFT_ENCODING_SIZE = (ELLSWIFT_ENCODING_SIZE + MAC); - -static const uintptr_t SIGNATURE_NOISE_MESSAGE_SIZE = 74; - -static const uintptr_t ENCRYPTED_SIGNATURE_NOISE_MESSAGE_SIZE = (SIGNATURE_NOISE_MESSAGE_SIZE + MAC); - -static const uintptr_t INITIATOR_EXPECTED_HANDSHAKE_MESSAGE_SIZE = ((ELLSWIFT_ENCODING_SIZE + ENCRYPTED_ELLSWIFT_ENCODING_SIZE) + ENCRYPTED_SIGNATURE_NOISE_MESSAGE_SIZE); - -static const uint8_t SV2_MINING_PROTOCOL_DISCRIMINANT = 0; - -static const uint8_t SV2_JOB_DECLARATION_PROTOCOL_DISCRIMINANT = 1; - -static const uint8_t SV2_TEMPLATE_DISTR_PROTOCOL_DISCRIMINANT = 2; - -static const uint8_t MESSAGE_TYPE_SETUP_CONNECTION = 0; - -static const uint8_t MESSAGE_TYPE_SETUP_CONNECTION_SUCCESS = 1; - -static const uint8_t MESSAGE_TYPE_SETUP_CONNECTION_ERROR = 2; - -static const uint8_t MESSAGE_TYPE_CHANNEL_ENDPOINT_CHANGED = 3; - -static const uint8_t MESSAGE_TYPE_COINBASE_OUTPUT_DATA_SIZE = 112; - -static const uint8_t MESSAGE_TYPE_NEW_TEMPLATE = 113; - -static const uint8_t MESSAGE_TYPE_SET_NEW_PREV_HASH = 114; - -static const uint8_t MESSAGE_TYPE_REQUEST_TRANSACTION_DATA = 115; - -static const uint8_t MESSAGE_TYPE_REQUEST_TRANSACTION_DATA_SUCCESS = 116; - -static const uint8_t MESSAGE_TYPE_REQUEST_TRANSACTION_DATA_ERROR = 117; - -static const uint8_t MESSAGE_TYPE_SUBMIT_SOLUTION = 118; - -static const uint8_t MESSAGE_TYPE_ALLOCATE_MINING_JOB_TOKEN = 80; - -static const uint8_t MESSAGE_TYPE_ALLOCATE_MINING_JOB_TOKEN_SUCCESS = 81; - -static const uint8_t MESSAGE_TYPE_DECLARE_MINING_JOB = 87; - -static const uint8_t MESSAGE_TYPE_DECLARE_MINING_JOB_SUCCESS = 88; - -static const uint8_t MESSAGE_TYPE_DECLARE_MINING_JOB_ERROR = 89; - -static const uint8_t MESSAGE_TYPE_IDENTIFY_TRANSACTIONS = 83; - -static const uint8_t MESSAGE_TYPE_IDENTIFY_TRANSACTIONS_SUCCESS = 84; - -static const uint8_t MESSAGE_TYPE_PROVIDE_MISSING_TRANSACTIONS = 85; - -static const uint8_t MESSAGE_TYPE_PROVIDE_MISSING_TRANSACTIONS_SUCCESS = 86; - -static const uint8_t MESSAGE_TYPE_SUBMIT_SOLUTION_JD = 96; - -static const uint8_t MESSAGE_TYPE_CLOSE_CHANNEL = 24; - -/// This has been cahnged before was 0x1e it can be that old Sv2 implementation still use the old -/// one but this means that old impl are not following Sv2 spec -static const uint8_t MESSAGE_TYPE_NEW_EXTENDED_MINING_JOB = 31; - -static const uint8_t MESSAGE_TYPE_NEW_MINING_JOB = 21; - -static const uint8_t MESSAGE_TYPE_OPEN_EXTENDED_MINING_CHANNEL = 19; - -static const uint8_t MESSAGE_TYPE_OPEN_EXTENDED_MINING_CHANNEL_SUCCES = 20; - -static const uint8_t MESSAGE_TYPE_OPEN_MINING_CHANNEL_ERROR = 18; - -static const uint8_t MESSAGE_TYPE_OPEN_STANDARD_MINING_CHANNEL = 16; - -static const uint8_t MESSAGE_TYPE_OPEN_STANDARD_MINING_CHANNEL_SUCCESS = 17; - -static const uint8_t MESSAGE_TYPE_RECONNECT = 37; - -static const uint8_t MESSAGE_TYPE_SET_CUSTOM_MINING_JOB = 34; - -static const uint8_t MESSAGE_TYPE_SET_CUSTOM_MINING_JOB_ERROR = 36; - -static const uint8_t MESSAGE_TYPE_SET_CUSTOM_MINING_JOB_SUCCESS = 35; - -static const uint8_t MESSAGE_TYPE_SET_EXTRANONCE_PREFIX = 25; - -static const uint8_t MESSAGE_TYPE_SET_GROUP_CHANNEL = 38; - -static const uint8_t MESSAGE_TYPE_MINING_SET_NEW_PREV_HASH = 32; - -static const uint8_t MESSAGE_TYPE_SET_TARGET = 33; - -static const uint8_t MESSAGE_TYPE_SUBMIT_SHARES_ERROR = 29; - -static const uint8_t MESSAGE_TYPE_SUBMIT_SHARES_EXTENDED = 27; - -static const uint8_t MESSAGE_TYPE_SUBMIT_SHARES_STANDARD = 26; - -static const uint8_t MESSAGE_TYPE_SUBMIT_SHARES_SUCCESS = 28; - -static const uint8_t MESSAGE_TYPE_UPDATE_CHANNEL = 22; - -static const uint8_t MESSAGE_TYPE_UPDATE_CHANNEL_ERROR = 23; - -static const bool CHANNEL_BIT_SETUP_CONNECTION = false; - -static const bool CHANNEL_BIT_SETUP_CONNECTION_SUCCESS = false; - -static const bool CHANNEL_BIT_SETUP_CONNECTION_ERROR = false; - -static const bool CHANNEL_BIT_CHANNEL_ENDPOINT_CHANGED = true; - -static const bool CHANNEL_BIT_COINBASE_OUTPUT_DATA_SIZE = false; - -static const bool CHANNEL_BIT_NEW_TEMPLATE = false; - -static const bool CHANNEL_BIT_SET_NEW_PREV_HASH = false; - -static const bool CHANNEL_BIT_REQUEST_TRANSACTION_DATA = false; - -static const bool CHANNEL_BIT_REQUEST_TRANSACTION_DATA_SUCCESS = false; - -static const bool CHANNEL_BIT_REQUEST_TRANSACTION_DATA_ERROR = false; - -static const bool CHANNEL_BIT_SUBMIT_SOLUTION = false; - -static const bool CHANNEL_BIT_ALLOCATE_MINING_JOB_TOKEN = false; - -static const bool CHANNEL_BIT_ALLOCATE_MINING_JOB_TOKEN_SUCCESS = false; - -static const bool CHANNEL_BIT_DECLARE_MINING_JOB = false; - -static const bool CHANNEL_BIT_DECLARE_MINING_JOB_SUCCESS = false; - -static const bool CHANNEL_BIT_DECLARE_MINING_JOB_ERROR = false; - -static const bool CHANNEL_BIT_IDENTIFY_TRANSACTIONS = false; - -static const bool CHANNEL_BIT_IDENTIFY_TRANSACTIONS_SUCCESS = false; - -static const bool CHANNEL_BIT_PROVIDE_MISSING_TRANSACTIONS = false; - -static const bool CHANNEL_BIT_PROVIDE_MISSING_TRANSACTIONS_SUCCESS = false; - -static const bool CHANNEL_BIT_SUBMIT_SOLUTION_JD = true; - -static const bool CHANNEL_BIT_CLOSE_CHANNEL = true; - -static const bool CHANNEL_BIT_NEW_EXTENDED_MINING_JOB = true; - -static const bool CHANNEL_BIT_NEW_MINING_JOB = true; - -static const bool CHANNEL_BIT_OPEN_EXTENDED_MINING_CHANNEL = false; - -static const bool CHANNEL_BIT_OPEN_EXTENDED_MINING_CHANNEL_SUCCES = false; - -static const bool CHANNEL_BIT_OPEN_MINING_CHANNEL_ERROR = false; - -static const bool CHANNEL_BIT_OPEN_STANDARD_MINING_CHANNEL = false; - -static const bool CHANNEL_BIT_OPEN_STANDARD_MINING_CHANNEL_SUCCESS = false; - -static const bool CHANNEL_BIT_RECONNECT = false; - -static const bool CHANNEL_BIT_SET_CUSTOM_MINING_JOB = false; - -static const bool CHANNEL_BIT_SET_CUSTOM_MINING_JOB_ERROR = false; - -static const bool CHANNEL_BIT_SET_CUSTOM_MINING_JOB_SUCCESS = false; - -static const bool CHANNEL_BIT_SET_EXTRANONCE_PREFIX = true; - -static const bool CHANNEL_BIT_SET_GROUP_CHANNEL = false; - -static const bool CHANNEL_BIT_MINING_SET_NEW_PREV_HASH = true; - -static const bool CHANNEL_BIT_SET_TARGET = true; - -static const bool CHANNEL_BIT_SUBMIT_SHARES_ERROR = true; - -static const bool CHANNEL_BIT_SUBMIT_SHARES_EXTENDED = true; - -static const bool CHANNEL_BIT_SUBMIT_SHARES_STANDARD = true; - -static const bool CHANNEL_BIT_SUBMIT_SHARES_SUCCESS = true; - -static const bool CHANNEL_BIT_UPDATE_CHANNEL = true; - -static const bool CHANNEL_BIT_UPDATE_CHANNEL_ERROR = true; -#include -#include -#include -#include -#include - -struct CVec { - uint8_t *data; - uintptr_t len; - uintptr_t capacity; -}; - -struct CVec2 { - CVec *data; - uintptr_t len; - uintptr_t capacity; -}; - -struct U24 { - uint32_t _0; -}; - -extern "C" { - -/// Given a C allocated buffer return a rust allocated CVec -/// -/// # Safety -/// -CVec cvec_from_buffer(const uint8_t *data, uintptr_t len); - -/// # Safety -/// -CVec2 init_cvec2(); - -/// The caller is reponsible for NOT adding duplicate cvecs to the cvec2 structure, -/// as this can lead to double free errors when the message is dropped. -/// # Safety -/// -void cvec2_push(CVec2 *cvec2, CVec cvec); - -void _c_export_u24(U24 _a); - -void _c_export_cvec(CVec _a); - -void _c_export_cvec2(CVec2 _a); - -} // extern "C" -#include -#include -#include -#include -#include - -/// MiningProtocol = [`SV2_MINING_PROTOCOL_DISCRIMINANT`], -/// JobDeclarationProtocol = [`SV2_JOB_DECLARATION_PROTOCOL_DISCRIMINANT`], -/// TemplateDistributionProtocol = [`SV2_TEMPLATE_DISTR_PROTOCOL_DISCRIMINANT`], -enum class Protocol : uint8_t { - MiningProtocol = SV2_MINING_PROTOCOL_DISCRIMINANT, - JobDeclarationProtocol = SV2_JOB_DECLARATION_PROTOCOL_DISCRIMINANT, - TemplateDistributionProtocol = SV2_TEMPLATE_DISTR_PROTOCOL_DISCRIMINANT, -}; - -/// ## ChannelEndpointChanged (Server -> Client) -/// When a channel’s upstream or downstream endpoint changes and that channel had previously -/// sent messages with [channel_msg] bitset of unknown extension_type, the intermediate proxy -/// MUST send a [`ChannelEndpointChanged`] message. Upon receipt thereof, any extension state -/// (including version negotiation and the presence of support for a given extension) MUST be -/// reset and version/presence negotiation must begin again. -/// -struct ChannelEndpointChanged { - /// The channel which has changed endpoint. - uint32_t channel_id; -}; - -/// ## SetupConnection.Success (Server -> Client) -/// Response to [`SetupConnection`] message if the server accepts the connection. The client is -/// required to verify the set of feature flags that the server supports and act accordingly. -struct SetupConnectionSuccess { - /// Selected version proposed by the connecting node that the upstream - /// node supports. This version will be used on the connection for the rest - /// of its life. - uint16_t used_version; - /// Flags indicating optional protocol features the server supports. Each - /// protocol from [`Protocol`] field has its own values/flags. - uint32_t flags; -}; - -struct CSetupConnection { - Protocol protocol; - uint16_t min_version; - uint16_t max_version; - uint32_t flags; - CVec endpoint_host; - uint16_t endpoint_port; - CVec vendor; - CVec hardware_version; - CVec firmware; - CVec device_id; -}; - -struct CSetupConnectionError { - uint32_t flags; - CVec error_code; -}; - -extern "C" { - -void _c_export_channel_endpoint_changed(ChannelEndpointChanged _a); - -void _c_export_setup_conn_succ(SetupConnectionSuccess _a); - -void free_setup_connection(CSetupConnection s); - -void free_setup_connection_error(CSetupConnectionError s); - -} // extern "C" -#include -#include -#include -#include -#include - -/// ## CoinbaseOutputDataSize (Client -> Server) -/// Ultimately, the pool is responsible for adding coinbase transaction outputs for payouts and -/// other uses, and thus the Template Provider will need to consider this additional block size -/// when selecting transactions for inclusion in a block (to not create an invalid, oversized block). -/// Thus, this message is used to indicate that some additional space in the block/coinbase -/// transaction be reserved for the pool’s use (while always assuming the pool will use the entirety -/// of available coinbase space). -/// The Job Declarator MUST discover the maximum serialized size of the additional outputs which -/// will be added by the pool(s) it intends to use this work. It then MUST communicate the -/// maximum such size to the Template Provider via this message. The Template Provider MUST -/// NOT provide NewWork messages which would represent consensus-invalid blocks once this -/// additional size — along with a maximally-sized (100 byte) coinbase field — is added. Further, -/// the Template Provider MUST consider the maximum additional bytes required in the output -/// count variable-length integer in the coinbase transaction when complying with the size limits. -struct CoinbaseOutputDataSize { - /// The maximum additional serialized bytes which the pool will add in - /// coinbase transaction outputs. - uint32_t coinbase_output_max_additional_size; -}; - -/// ## RequestTransactionData (Client -> Server) -/// A request sent by the Job Declarator to the Template Provider which requests the set of -/// transaction data for all transactions (excluding the coinbase transaction) included in a block, as -/// well as any additional data which may be required by the Pool to validate the work. -struct RequestTransactionData { - /// The template_id corresponding to a NewTemplate message. - uint64_t template_id; -}; - -struct CNewTemplate { - uint64_t template_id; - bool future_template; - uint32_t version; - uint32_t coinbase_tx_version; - CVec coinbase_prefix; - uint32_t coinbase_tx_input_sequence; - uint64_t coinbase_tx_value_remaining; - uint32_t coinbase_tx_outputs_count; - CVec coinbase_tx_outputs; - uint32_t coinbase_tx_locktime; - CVec2 merkle_path; -}; - -struct CRequestTransactionDataSuccess { - uint64_t template_id; - CVec excess_data; - CVec2 transaction_list; -}; - -struct CRequestTransactionDataError { - uint64_t template_id; - CVec error_code; -}; - -struct CSetNewPrevHash { - uint64_t template_id; - CVec prev_hash; - uint32_t header_timestamp; - uint32_t n_bits; - CVec target; -}; - -struct CSubmitSolution { - uint64_t template_id; - uint32_t version; - uint32_t header_timestamp; - uint32_t header_nonce; - CVec coinbase_tx; -}; - -extern "C" { - -void _c_export_coinbase_out(CoinbaseOutputDataSize _a); - -void _c_export_req_tx_data(RequestTransactionData _a); - -void free_new_template(CNewTemplate s); - -void free_request_tx_data_success(CRequestTransactionDataSuccess s); - -void free_request_tx_data_error(CRequestTransactionDataError s); - -void free_set_new_prev_hash(CSetNewPrevHash s); - -void free_submit_solution(CSubmitSolution s); - -} // extern "C" -#include -#include -#include -#include -#include - -struct CError { - enum class Tag { - /// Errors from the `binary_sv2` crate - BinarySv2Error, - /// Errors from the `framing_sv2` crate - FramingSv2Error, - /// Errors if there are missing bytes in the Noise protocol - MissingBytes, - /// Errors from the `noise_sv2` crate - NoiseSv2Error, - /// `snow` errors - AeadError, - /// Error if Noise protocol state is not as expected - UnexpectedNoiseState, - InvalidStepForResponder, - InvalidStepForInitiator, - NotInHandShakeState, - FramingError, - }; - - struct MissingBytes_Body { - uintptr_t _0; - }; - - Tag tag; - union { - MissingBytes_Body missing_bytes; - }; -}; - -extern "C" { - -/// Here only to force cbindgen to create header for CError -CError export_cerror(); - -} // extern "C" -#include -#include -#include -#include -#include - -struct DecoderWrapper; - -struct EncoderWrapper; - -struct CSv2Message { - enum class Tag { - CoinbaseOutputDataSize, - NewTemplate, - RequestTransactionData, - RequestTransactionDataError, - RequestTransactionDataSuccess, - SetNewPrevHash, - SubmitSolution, - ChannelEndpointChanged, - SetupConnection, - SetupConnectionError, - SetupConnectionSuccess, - }; - - struct CoinbaseOutputDataSize_Body { - CoinbaseOutputDataSize _0; - }; - - struct NewTemplate_Body { - CNewTemplate _0; - }; - - struct RequestTransactionData_Body { - RequestTransactionData _0; - }; - - struct RequestTransactionDataError_Body { - CRequestTransactionDataError _0; - }; - - struct RequestTransactionDataSuccess_Body { - CRequestTransactionDataSuccess _0; - }; - - struct SetNewPrevHash_Body { - CSetNewPrevHash _0; - }; - - struct SubmitSolution_Body { - CSubmitSolution _0; - }; - - struct ChannelEndpointChanged_Body { - ChannelEndpointChanged _0; - }; - - struct SetupConnection_Body { - CSetupConnection _0; - }; - - struct SetupConnectionError_Body { - CSetupConnectionError _0; - }; - - struct SetupConnectionSuccess_Body { - SetupConnectionSuccess _0; - }; - - Tag tag; - union { - CoinbaseOutputDataSize_Body coinbase_output_data_size; - NewTemplate_Body new_template; - RequestTransactionData_Body request_transaction_data; - RequestTransactionDataError_Body request_transaction_data_error; - RequestTransactionDataSuccess_Body request_transaction_data_success; - SetNewPrevHash_Body set_new_prev_hash; - SubmitSolution_Body submit_solution; - ChannelEndpointChanged_Body channel_endpoint_changed; - SetupConnection_Body setup_connection; - SetupConnectionError_Body setup_connection_error; - SetupConnectionSuccess_Body setup_connection_success; - }; -}; - -struct Sv2Error { - enum class Tag { - BinaryError, - CodecError, - EncoderBusy, - InvalidSv2Frame, - MissingBytes, - PayloadTooBig, - Unknown, - }; - - struct BinaryError_Body { - CError _0; - }; - - struct CodecError_Body { - CError _0; - }; - - struct PayloadTooBig_Body { - CVec _0; - }; - - Tag tag; - union { - BinaryError_Body binary_error; - CodecError_Body codec_error; - PayloadTooBig_Body payload_too_big; - }; -}; - -template -struct CResult { - enum class Tag { - Ok, - Err, - }; - - struct Ok_Body { - T _0; - }; - - struct Err_Body { - E _0; - }; - - Tag tag; - union { - Ok_Body ok; - Err_Body err; - }; -}; - -extern "C" { - -void drop_sv2_message(CSv2Message s); - -/// This function does nothing unless there is some heap allocated data owned by the C side that -/// needs to be dropped (specifically a `CVec`). In this case, `free_vec` is used in order to drop -/// that memory. -void drop_sv2_error(Sv2Error s); - -bool is_ok(const CResult *cresult); - -EncoderWrapper *new_encoder(); - -void flush_encoder(EncoderWrapper *encoder); - -void free_decoder(DecoderWrapper *decoder); - -/// # Safety -/// -CResult encode(CSv2Message *message, EncoderWrapper *encoder); - -DecoderWrapper *new_decoder(); - -CVec get_writable(DecoderWrapper *decoder); - -CResult next_frame(DecoderWrapper *decoder); - -} // extern "C" From ddad29f7dc659ee5c52c9e6a56d4727e28b4de6f Mon Sep 17 00:00:00 2001 From: bit-aloo Date: Tue, 30 Jul 2024 00:45:31 -0400 Subject: [PATCH 072/101] remove rebase conflicts --- .../v2/subprotocols/common-messages/src/setup_connection.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/protocols/v2/subprotocols/common-messages/src/setup_connection.rs b/protocols/v2/subprotocols/common-messages/src/setup_connection.rs index 5bc6adb0e..f214802ea 100644 --- a/protocols/v2/subprotocols/common-messages/src/setup_connection.rs +++ b/protocols/v2/subprotocols/common-messages/src/setup_connection.rs @@ -119,7 +119,7 @@ impl<'decoder> SetupConnection<'decoder> { (false, false) => true, } } - Protocol::TemplateDistributionProtocol | Protocol::JobDistributionProtocol => { + Protocol::TemplateDistributionProtocol => { // These protocols do not define flags for setting up a connection. false } From df6b94a53bc12d6fdb847bb09f4c84b27c669356 Mon Sep 17 00:00:00 2001 From: plebhash Date: Thu, 1 Aug 2024 14:46:43 -0400 Subject: [PATCH 073/101] JDC parse incoming mining messages from upstream after SetupConnection `Upstream::parse_incoming` only calls `handle_message_mining`, which is not able to handle a `SetupConnection.Success` (because it belongs to the "Common Messages" category, not "Mining Messages") --- roles/jd-client/src/lib/mod.rs | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/roles/jd-client/src/lib/mod.rs b/roles/jd-client/src/lib/mod.rs index 7e2fe7edc..db52a6c39 100644 --- a/roles/jd-client/src/lib/mod.rs +++ b/roles/jd-client/src/lib/mod.rs @@ -261,12 +261,6 @@ impl JobDeclaratorClient { } }; - // Start receiving messages from the SV2 Upstream role - if let Err(e) = upstream_sv2::Upstream::parse_incoming(upstream.clone()) { - error!("failed to create sv2 parser: {}", e); - panic!() - } - match upstream_sv2::Upstream::setup_connection( upstream.clone(), proxy_config.min_supported_version, @@ -281,6 +275,12 @@ impl JobDeclaratorClient { } } + // Start receiving messages from the SV2 Upstream role + if let Err(e) = upstream_sv2::Upstream::parse_incoming(upstream.clone()) { + error!("failed to create sv2 parser: {}", e); + panic!() + } + // Format `Downstream` connection address let downstream_addr = SocketAddr::new( IpAddr::from_str(&proxy_config.downstream_address).unwrap(), From f7ef878981eaebc11229c95204859a7c2a2a16ad Mon Sep 17 00:00:00 2001 From: plebhash Date: Sat, 25 May 2024 18:46:28 -0300 Subject: [PATCH 074/101] make no_std into an optional feature --- protocols/v2/binary-sv2/serde-sv2/Cargo.toml | 3 +++ protocols/v2/binary-sv2/serde-sv2/src/lib.rs | 2 +- protocols/v2/codec-sv2/Cargo.toml | 1 + protocols/v2/codec-sv2/src/lib.rs | 2 +- protocols/v2/const-sv2/Cargo.toml | 3 +++ protocols/v2/const-sv2/src/lib.rs | 2 +- protocols/v2/framing-sv2/Cargo.toml | 1 + protocols/v2/framing-sv2/src/lib.rs | 2 +- protocols/v2/subprotocols/common-messages/Cargo.toml | 1 + protocols/v2/subprotocols/common-messages/src/lib.rs | 2 +- protocols/v2/subprotocols/job-declaration/Cargo.toml | 1 + protocols/v2/subprotocols/job-declaration/src/lib.rs | 2 +- protocols/v2/subprotocols/mining/Cargo.toml | 1 + protocols/v2/subprotocols/mining/src/lib.rs | 2 +- protocols/v2/subprotocols/template-distribution/Cargo.toml | 1 + protocols/v2/subprotocols/template-distribution/src/lib.rs | 2 +- 16 files changed, 20 insertions(+), 8 deletions(-) diff --git a/protocols/v2/binary-sv2/serde-sv2/Cargo.toml b/protocols/v2/binary-sv2/serde-sv2/Cargo.toml index 5e01eb2d3..07c52d664 100644 --- a/protocols/v2/binary-sv2/serde-sv2/Cargo.toml +++ b/protocols/v2/binary-sv2/serde-sv2/Cargo.toml @@ -13,3 +13,6 @@ repository = "https://github.com/stratum-mining/stratum" [dependencies] serde = { version = "1.0.89", features = ["derive", "alloc"], default-features = false } buffer_sv2 = {version = "^1.0.0", path = "../../../../utils/buffer"} + +[features] +no_std = [] diff --git a/protocols/v2/binary-sv2/serde-sv2/src/lib.rs b/protocols/v2/binary-sv2/serde-sv2/src/lib.rs index a573bf3bb..004a4dfce 100644 --- a/protocols/v2/binary-sv2/serde-sv2/src/lib.rs +++ b/protocols/v2/binary-sv2/serde-sv2/src/lib.rs @@ -72,7 +72,7 @@ //! [rkyv1]: https://docs.rs/rkyv/0.4.3/rkyv //! [rkyv2]: https://davidkoloski.me/blog/rkyv-is-faster-than/ -#![no_std] +#![cfg_attr(feature = "no_std", no_std)] #[macro_use] extern crate alloc; diff --git a/protocols/v2/codec-sv2/Cargo.toml b/protocols/v2/codec-sv2/Cargo.toml index 9e2bddf1f..b3db39783 100644 --- a/protocols/v2/codec-sv2/Cargo.toml +++ b/protocols/v2/codec-sv2/Cargo.toml @@ -21,3 +21,4 @@ tracing = { version = "0.1"} [features] with_serde = ["binary_sv2/with_serde", "serde", "framing_sv2/with_serde", "buffer_sv2/with_serde"] with_buffer_pool = ["framing_sv2/with_buffer_pool"] +no_std = [] \ No newline at end of file diff --git a/protocols/v2/codec-sv2/src/lib.rs b/protocols/v2/codec-sv2/src/lib.rs index 0a2492890..c0594f9c1 100644 --- a/protocols/v2/codec-sv2/src/lib.rs +++ b/protocols/v2/codec-sv2/src/lib.rs @@ -1,4 +1,4 @@ -#![no_std] +#![cfg_attr(feature = "no_std", no_std)] extern crate alloc; diff --git a/protocols/v2/const-sv2/Cargo.toml b/protocols/v2/const-sv2/Cargo.toml index af3067193..8736d91e0 100644 --- a/protocols/v2/const-sv2/Cargo.toml +++ b/protocols/v2/const-sv2/Cargo.toml @@ -14,3 +14,6 @@ secp256k1 = { version = "0.28.2", default-features = false, features =["hashes", #[dev-dependencies] #cbindgen = "0.16.0" + +[features] +no_std = [] diff --git a/protocols/v2/const-sv2/src/lib.rs b/protocols/v2/const-sv2/src/lib.rs index 507f63532..c9cda8277 100644 --- a/protocols/v2/const-sv2/src/lib.rs +++ b/protocols/v2/const-sv2/src/lib.rs @@ -1,5 +1,5 @@ //! Central repository for all the sv2 constants -#![no_std] +#![cfg_attr(feature = "no_std", no_std)] pub const EXTENSION_TYPE_NO_EXTENSION: u16 = 0; diff --git a/protocols/v2/framing-sv2/Cargo.toml b/protocols/v2/framing-sv2/Cargo.toml index 67465688d..04f4c53dd 100644 --- a/protocols/v2/framing-sv2/Cargo.toml +++ b/protocols/v2/framing-sv2/Cargo.toml @@ -17,5 +17,6 @@ binary_sv2 = { version = "^1.0.0", path = "../../../protocols/v2/binary-sv2/bina buffer_sv2 = { version = "^1.0.0", path = "../../../utils/buffer", optional=true } [features] +no_std = [] with_serde = ["binary_sv2/with_serde", "serde", "buffer_sv2/with_serde"] with_buffer_pool = ["binary_sv2/with_buffer_pool", "buffer_sv2"] diff --git a/protocols/v2/framing-sv2/src/lib.rs b/protocols/v2/framing-sv2/src/lib.rs index 33dd11fe2..cf792e65d 100644 --- a/protocols/v2/framing-sv2/src/lib.rs +++ b/protocols/v2/framing-sv2/src/lib.rs @@ -19,7 +19,7 @@ //! //! The `with_serde` feature flag is only used for the Message Generator, and deprecated for any other kind of usage. It will likely be fully deprecated in the future. -#![no_std] +#![cfg_attr(feature = "no_std", no_std)] extern crate alloc; /// SV2 framing types diff --git a/protocols/v2/subprotocols/common-messages/Cargo.toml b/protocols/v2/subprotocols/common-messages/Cargo.toml index 2f803c25e..3bbde6bfc 100644 --- a/protocols/v2/subprotocols/common-messages/Cargo.toml +++ b/protocols/v2/subprotocols/common-messages/Cargo.toml @@ -18,5 +18,6 @@ quickcheck_macros = { version = "1", optional=true } serde_repr = {version= "0.1.10", optional=true} [features] +no_std = [] with_serde = ["binary_sv2/with_serde", "serde", "serde_repr"] prop_test = ["quickcheck"] diff --git a/protocols/v2/subprotocols/common-messages/src/lib.rs b/protocols/v2/subprotocols/common-messages/src/lib.rs index e720306ab..cc0df87a2 100644 --- a/protocols/v2/subprotocols/common-messages/src/lib.rs +++ b/protocols/v2/subprotocols/common-messages/src/lib.rs @@ -1,4 +1,4 @@ -#![no_std] +#![cfg_attr(feature = "no_std", no_std)] //! Common messages for [stratum v2][Sv2] //! The following protocol messages are common across all of the sv2 (sub)protocols. diff --git a/protocols/v2/subprotocols/job-declaration/Cargo.toml b/protocols/v2/subprotocols/job-declaration/Cargo.toml index 9db52a792..f09173fb6 100644 --- a/protocols/v2/subprotocols/job-declaration/Cargo.toml +++ b/protocols/v2/subprotocols/job-declaration/Cargo.toml @@ -14,4 +14,5 @@ binary_sv2 = {version = "^1.0.0", path = "../../../../protocols/v2/binary-sv2/bi const_sv2 = {version = "^1.0.0", path = "../../../../protocols/v2/const-sv2"} [features] +no_std = [] with_serde = ["binary_sv2/with_serde", "serde"] diff --git a/protocols/v2/subprotocols/job-declaration/src/lib.rs b/protocols/v2/subprotocols/job-declaration/src/lib.rs index 2f4bc42cc..fa03ca00a 100644 --- a/protocols/v2/subprotocols/job-declaration/src/lib.rs +++ b/protocols/v2/subprotocols/job-declaration/src/lib.rs @@ -1,4 +1,4 @@ -#![no_std] +#![cfg_attr(feature = "no_std", no_std)] //! # Job Declaration Protocol //! diff --git a/protocols/v2/subprotocols/mining/Cargo.toml b/protocols/v2/subprotocols/mining/Cargo.toml index 493b77d63..4e84417bd 100644 --- a/protocols/v2/subprotocols/mining/Cargo.toml +++ b/protocols/v2/subprotocols/mining/Cargo.toml @@ -20,4 +20,5 @@ quickcheck = "1.0.3" quickcheck_macros = "1" [features] +no_std = [] with_serde = ["binary_sv2/with_serde", "serde"] diff --git a/protocols/v2/subprotocols/mining/src/lib.rs b/protocols/v2/subprotocols/mining/src/lib.rs index bfd0e4114..e82809a79 100644 --- a/protocols/v2/subprotocols/mining/src/lib.rs +++ b/protocols/v2/subprotocols/mining/src/lib.rs @@ -1,4 +1,4 @@ -#![no_std] +#![cfg_attr(feature = "no_std", no_std)] //! # Mining Protocol //! ## Channels diff --git a/protocols/v2/subprotocols/template-distribution/Cargo.toml b/protocols/v2/subprotocols/template-distribution/Cargo.toml index 38cd6f56f..0b8a36a9c 100644 --- a/protocols/v2/subprotocols/template-distribution/Cargo.toml +++ b/protocols/v2/subprotocols/template-distribution/Cargo.toml @@ -17,5 +17,6 @@ quickcheck = { version = "1.0.3", optional=true } quickcheck_macros = { version = "1", optional=true } [features] +no_std = [] with_serde = ["binary_sv2/with_serde", "serde"] prop_test = ["quickcheck"] diff --git a/protocols/v2/subprotocols/template-distribution/src/lib.rs b/protocols/v2/subprotocols/template-distribution/src/lib.rs index 2f1549f2d..11f33afe3 100644 --- a/protocols/v2/subprotocols/template-distribution/src/lib.rs +++ b/protocols/v2/subprotocols/template-distribution/src/lib.rs @@ -1,4 +1,4 @@ -#![no_std] +#![cfg_attr(feature = "no_std", no_std)] //! # Template Distribution Protocol //! The Template Distribution protocol is used to receive updates of the block template to use in From bcf33305d2c1f332e76ed52970a5a8f0b8b3c9c3 Mon Sep 17 00:00:00 2001 From: plebhash Date: Thu, 8 Aug 2024 21:54:55 -0300 Subject: [PATCH 075/101] impl<'a> From>> for Seq0255<'a, U256<'a>> required to unblock https://github.com/stratum-mining/stratum/pull/985#issuecomment-2234328927 --- protocols/Cargo.lock | 4 ++-- protocols/v2/binary-sv2/serde-sv2/Cargo.toml | 2 +- .../serde-sv2/src/primitives/sequences/seq0255.rs | 9 +++++++++ 3 files changed, 12 insertions(+), 3 deletions(-) diff --git a/protocols/Cargo.lock b/protocols/Cargo.lock index fbe5ddcea..ba38358d6 100644 --- a/protocols/Cargo.lock +++ b/protocols/Cargo.lock @@ -716,7 +716,7 @@ dependencies = [ [[package]] name = "serde_sv2" -version = "1.0.0" +version = "1.0.1" dependencies = [ "buffer_sv2", "serde", @@ -744,7 +744,7 @@ checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc" [[package]] name = "sv1_api" -version = "1.0.0" +version = "1.0.1" dependencies = [ "binary_sv2", "bitcoin_hashes 0.3.2", diff --git a/protocols/v2/binary-sv2/serde-sv2/Cargo.toml b/protocols/v2/binary-sv2/serde-sv2/Cargo.toml index 07c52d664..2dcc744c8 100644 --- a/protocols/v2/binary-sv2/serde-sv2/Cargo.toml +++ b/protocols/v2/binary-sv2/serde-sv2/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "serde_sv2" -version = "1.0.0" +version = "1.0.1" authors = ["fi3 "] edition = "2018" description = "Serlializer and Deserializer for Stratum V2 data format" diff --git a/protocols/v2/binary-sv2/serde-sv2/src/primitives/sequences/seq0255.rs b/protocols/v2/binary-sv2/serde-sv2/src/primitives/sequences/seq0255.rs index 2f01bce3c..331edb34e 100644 --- a/protocols/v2/binary-sv2/serde-sv2/src/primitives/sequences/seq0255.rs +++ b/protocols/v2/binary-sv2/serde-sv2/src/primitives/sequences/seq0255.rs @@ -259,6 +259,15 @@ impl<'a> From> for Seq0255<'a, u32> { } } +impl<'a> From>> for Seq0255<'a, U256<'a>> { + fn from(v: Vec>) -> Self { + Seq0255 { + seq: None, + data: Some(v), + } + } +} + impl<'a> From> for Vec { fn from(v: Seq0255) -> Self { if let Some(inner) = v.data { From 12df8e2b7e46312bcc4b5e92507e3598c44a02a6 Mon Sep 17 00:00:00 2001 From: plebhash Date: Thu, 8 Aug 2024 22:17:28 -0300 Subject: [PATCH 076/101] import TryInto on template_distribution_sv2 requirement to unblock https://github.com/stratum-mining/stratum/pull/985#issuecomment-2234328927 --- protocols/Cargo.lock | 2 +- protocols/v2/roles-logic-sv2/Cargo.toml | 2 +- protocols/v2/subprotocols/template-distribution/Cargo.toml | 2 +- .../v2/subprotocols/template-distribution/src/new_template.rs | 2 ++ protocols/v2/sv2-ffi/Cargo.toml | 2 +- 5 files changed, 6 insertions(+), 4 deletions(-) diff --git a/protocols/Cargo.lock b/protocols/Cargo.lock index ba38358d6..8eea1ef61 100644 --- a/protocols/Cargo.lock +++ b/protocols/Cargo.lock @@ -796,7 +796,7 @@ dependencies = [ [[package]] name = "template_distribution_sv2" -version = "1.0.0" +version = "1.0.1" dependencies = [ "binary_sv2", "const_sv2", diff --git a/protocols/v2/roles-logic-sv2/Cargo.toml b/protocols/v2/roles-logic-sv2/Cargo.toml index 4254d6a1f..1e20f03ba 100644 --- a/protocols/v2/roles-logic-sv2/Cargo.toml +++ b/protocols/v2/roles-logic-sv2/Cargo.toml @@ -14,7 +14,7 @@ serde = { version = "1.0.89", features = ["derive", "alloc"], default-features = binary_sv2 = {version = "^1.0.0", path = "../../../protocols/v2/binary-sv2/binary-sv2", default-features = true } common_messages_sv2 = { path = "../../../protocols/v2/subprotocols/common-messages", version = "^1.0.0" } mining_sv2 = { path = "../../../protocols/v2/subprotocols/mining", version = "^1.0.0" } -template_distribution_sv2 = { path = "../../../protocols/v2/subprotocols/template-distribution", version = "^1.0.0" } +template_distribution_sv2 = { path = "../../../protocols/v2/subprotocols/template-distribution", version = "^1.0.1" } job_declaration_sv2 = { path = "../../../protocols/v2/subprotocols/job-declaration", version = "^1.0.0" } const_sv2 = { version = "^1.0.0", path = "../../../protocols/v2/const-sv2"} framing_sv2 = { version = "^1.1.0", path = "../../../protocols/v2/framing-sv2" } diff --git a/protocols/v2/subprotocols/template-distribution/Cargo.toml b/protocols/v2/subprotocols/template-distribution/Cargo.toml index 0b8a36a9c..9aa2c1788 100644 --- a/protocols/v2/subprotocols/template-distribution/Cargo.toml +++ b/protocols/v2/subprotocols/template-distribution/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "template_distribution_sv2" -version = "1.0.0" +version = "1.0.1" authors = ["fi3 "] edition = "2018" description = "Sv2 template distribution subprotocol" diff --git a/protocols/v2/subprotocols/template-distribution/src/new_template.rs b/protocols/v2/subprotocols/template-distribution/src/new_template.rs index 67c5b1290..6c1f3b435 100644 --- a/protocols/v2/subprotocols/template-distribution/src/new_template.rs +++ b/protocols/v2/subprotocols/template-distribution/src/new_template.rs @@ -7,6 +7,8 @@ use binary_sv2::Error; use binary_sv2::{Deserialize, Seq0255, Serialize, B0255, B064K, U256}; #[cfg(not(feature = "with_serde"))] use core::convert::TryInto; +#[cfg(all(feature = "with_serde", not(feature = "no_std")))] +use std::convert::TryInto; /// ## NewTemplate (Server -> Client) /// The primary template-providing function. Note that the coinbase_tx_outputs bytes will appear diff --git a/protocols/v2/sv2-ffi/Cargo.toml b/protocols/v2/sv2-ffi/Cargo.toml index 7930f5805..f8c2fe328 100644 --- a/protocols/v2/sv2-ffi/Cargo.toml +++ b/protocols/v2/sv2-ffi/Cargo.toml @@ -15,7 +15,7 @@ codec_sv2 = { path = "../../../protocols/v2/codec-sv2", version = "^1.0.0" } const_sv2 = { path = "../../../protocols/v2/const-sv2", version = "^1.0.0" } binary_sv2 = { path = "../../../protocols/v2/binary-sv2/binary-sv2", version = "^1.0.0" } common_messages_sv2 = { path = "../../../protocols/v2/subprotocols/common-messages", version = "^1.0.0" } -template_distribution_sv2 = { path = "../../../protocols/v2/subprotocols/template-distribution", version = "^1.0.0" } +template_distribution_sv2 = { path = "../../../protocols/v2/subprotocols/template-distribution", version = "^1.0.1" } [dev-dependencies] quickcheck = "1.0.3" From 100a331a3bcda0721835ac4f5ca17c8554df79ef Mon Sep 17 00:00:00 2001 From: plebhash Date: Fri, 9 Aug 2024 12:49:27 -0300 Subject: [PATCH 077/101] enable framing_sv2/with_serde feature for roles_logic_sv2 requirement to unblock https://github.com/stratum-mining/stratum/pull/985#issuecomment-2234328927 --- protocols/v2/roles-logic-sv2/Cargo.toml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/protocols/v2/roles-logic-sv2/Cargo.toml b/protocols/v2/roles-logic-sv2/Cargo.toml index 1e20f03ba..d7692bf47 100644 --- a/protocols/v2/roles-logic-sv2/Cargo.toml +++ b/protocols/v2/roles-logic-sv2/Cargo.toml @@ -35,7 +35,8 @@ with_serde = [ "serde", "common_messages_sv2/with_serde", "template_distribution_sv2/with_serde", "job_declaration_sv2/with_serde", -"mining_sv2/with_serde"] +"mining_sv2/with_serde", +"framing_sv2/with_serde"] prop_test = ["template_distribution_sv2/prop_test"] # Code coverage tools may conflict with the nopanic logic, so we can disable it when needed disable_nopanic = [] From 99443cec45fd056332e572e558d111da99eafb5e Mon Sep 17 00:00:00 2001 From: bit-aloo Date: Fri, 9 Aug 2024 07:01:40 -0400 Subject: [PATCH 078/101] Remove TOML support from jd-client crate - Removed dependencies related to TOML configuration. - Updated configuration handling to use ext-config crate instead. - Refactored code to eliminate TOML-specific logic. --- roles/jd-client/Cargo.toml | 2 +- roles/jd-client/src/lib/error.rs | 19 +++++++------------ roles/jd-client/src/lib/status.rs | 6 +++--- roles/jd-client/src/main.rs | 31 +++++++++++++++++++++---------- 4 files changed, 32 insertions(+), 26 deletions(-) diff --git a/roles/jd-client/Cargo.toml b/roles/jd-client/Cargo.toml index 4c79016d0..4afaf4fc4 100644 --- a/roles/jd-client/Cargo.toml +++ b/roles/jd-client/Cargo.toml @@ -23,7 +23,7 @@ roles_logic_sv2 = { version = "^1.0.0", path = "../../protocols/v2/roles-logic-s serde = { version = "1.0.89", default-features = false, features = ["derive", "alloc"] } futures = "0.3.25" tokio = { version = "1", features = ["full"] } -toml = { version = "0.5.6", git = "https://github.com/diondokter/toml-rs", default-features = false, rev = "c4161aa" } +ext-config = { version = "0.14.0", features = ["toml"], package = "config" } tracing = { version = "0.1" } tracing-subscriber = { version = "0.3" } error_handling = { version = "1.0.0", path = "../../utils/error-handling" } diff --git a/roles/jd-client/src/lib/error.rs b/roles/jd-client/src/lib/error.rs index 685137457..c3e24bd4d 100644 --- a/roles/jd-client/src/lib/error.rs +++ b/roles/jd-client/src/lib/error.rs @@ -1,3 +1,4 @@ +use ext_config::ConfigError; use std::fmt; use roles_logic_sv2::mining_sv2::{ExtendedExtranonce, NewExtendedMiningJob, SetCustomMiningJob}; @@ -30,8 +31,8 @@ pub enum Error<'a> { VecToSlice32(Vec), /// Errors on bad CLI argument input. BadCliArgs, - /// Errors on bad `toml` deserialize. - BadTomlDeserialize(toml::de::Error), + /// Errors on bad `config` TOML deserialize. + BadConfigDeserialize(ConfigError), /// Errors from `binary_sv2` crate. BinarySv2(binary_sv2::Error), /// Errors on bad noise handshake. @@ -63,7 +64,7 @@ impl<'a> fmt::Display for Error<'a> { use Error::*; match self { BadCliArgs => write!(f, "Bad CLI arg input"), - BadTomlDeserialize(ref e) => write!(f, "Bad `toml` deserialize: `{:?}`", e), + BadConfigDeserialize(ref e) => write!(f, "Bad `config` TOML deserialize: `{:?}`", e), BinarySv2(ref e) => write!(f, "Binary SV2 error: `{:?}`", e), CodecNoise(ref e) => write!(f, "Noise error: `{:?}", e), FramingSv2(ref e) => write!(f, "Framing SV2 error: `{:?}`", e), @@ -119,9 +120,9 @@ impl<'a> From for Error<'a> { } } -impl<'a> From for Error<'a> { - fn from(e: toml::de::Error) -> Self { - Error::BadTomlDeserialize(e) +impl<'a> From for Error<'a> { + fn from(e: ConfigError) -> Self { + Error::BadConfigDeserialize(e) } } @@ -209,12 +210,6 @@ impl<'a> } } -impl<'a> From> for Error<'a> { - fn from(e: Vec) -> Self { - Error::VecToSlice32(e) - } -} - impl<'a> From for Error<'a> { fn from(e: ParseLengthError) -> Self { Error::Uint256Conversion(e) diff --git a/roles/jd-client/src/lib/status.rs b/roles/jd-client/src/lib/status.rs index 44e6056d2..292a4037a 100644 --- a/roles/jd-client/src/lib/status.rs +++ b/roles/jd-client/src/lib/status.rs @@ -84,7 +84,7 @@ async fn send_status( outcome } -// this is called by `error_handling::handle_result!` +// This is called by `error_handling::handle_result!` pub async fn handle_error( sender: &Sender, e: error::Error<'static>, @@ -94,8 +94,8 @@ pub async fn handle_error( Error::VecToSlice32(_) => send_status(sender, e, error_handling::ErrorBranch::Break).await, // Errors on bad CLI argument input. Error::BadCliArgs => send_status(sender, e, error_handling::ErrorBranch::Break).await, - // Errors on bad `toml` deserialize. - Error::BadTomlDeserialize(_) => { + // Errors on bad `config` TOML deserialize. + Error::BadConfigDeserialize(_) => { send_status(sender, e, error_handling::ErrorBranch::Break).await } // Errors from `binary_sv2` crate. diff --git a/roles/jd-client/src/main.rs b/roles/jd-client/src/main.rs index cbc3fbb61..763fb30be 100644 --- a/roles/jd-client/src/main.rs +++ b/roles/jd-client/src/main.rs @@ -9,20 +9,31 @@ use lib::{ }; use args::Args; +use ext_config::{Config, File, FileFormat}; use tracing::error; -/// Process CLI args, if any. +/// Process CLI args and load configuration. #[allow(clippy::result_large_err)] fn process_cli_args<'a>() -> ProxyResult<'a, ProxyConfig> { - let args = match Args::from_args() { - Ok(cfg) => cfg, - Err(help) => { - error!("{}", help); - return Err(Error::BadCliArgs); - } - }; - let config_file = std::fs::read_to_string(args.config_path)?; - Ok(toml::from_str::(&config_file)?) + // Parse CLI arguments + let args = Args::from_args().map_err(|help| { + error!("{}", help); + Error::BadCliArgs + })?; + + // Build configuration from the provided file path + let config_path = args.config_path.to_str().ok_or_else(|| { + error!("Invalid configuration path."); + Error::BadCliArgs + })?; + + let settings = Config::builder() + .add_source(File::new(config_path, FileFormat::Toml)) + .build()?; + + // Deserialize settings into ProxyConfig + let config = settings.try_deserialize::()?; + Ok(config) } /// TODO on the setup phase JDC must send a random nonce to bitcoind and JDS used for the tx From a6bfa3cf3bbb915ee21941bee458a3bb4d05cf90 Mon Sep 17 00:00:00 2001 From: bit-aloo Date: Fri, 9 Aug 2024 07:02:25 -0400 Subject: [PATCH 079/101] Remove TOML support from jd-server crate - Removed dependencies related to TOML configuration. - Updated configuration handling to use ext-config crate instead. - Refactored code to eliminate TOML-specific logic. --- roles/jd-server/Cargo.toml | 4 ++-- roles/jd-server/src/lib/mod.rs | 13 +++++++++---- roles/jd-server/src/main.rs | 14 ++++++++++---- 3 files changed, 21 insertions(+), 10 deletions(-) diff --git a/roles/jd-server/Cargo.toml b/roles/jd-server/Cargo.toml index 6f55ece06..a3c66c872 100644 --- a/roles/jd-server/Cargo.toml +++ b/roles/jd-server/Cargo.toml @@ -21,7 +21,7 @@ noise_sv2 = { version = "1.1.0", path = "../../protocols/v2/noise-sv2" } rand = "0.8.4" roles_logic_sv2 = { version = "^1.0.0", path = "../../protocols/v2/roles-logic-sv2" } tokio = { version = "1", features = ["full"] } -toml = { version = "0.5.6", git = "https://github.com/diondokter/toml-rs", default-features = false, rev = "c4161aa" } +ext-config = { version = "0.14.0", features = ["toml"], package = "config" } tracing = { version = "0.1" } tracing-subscriber = "0.3" error_handling = { version = "1.0.0", path = "../../utils/error-handling" } @@ -31,4 +31,4 @@ serde = { version = "1.0.89", features = ["derive", "alloc"], default-features = hashbrown = { version = "0.11", default-features = false, features = ["ahash", "serde"] } key-utils = { version = "^1.0.0", path = "../../utils/key-utils" } rpc_sv2 = { version = "1.0.0", path = "../roles-utils/rpc" } -hex = "0.4.3" +hex = "0.4.3" \ No newline at end of file diff --git a/roles/jd-server/src/lib/mod.rs b/roles/jd-server/src/lib/mod.rs index e71a3b476..07f7c6603 100644 --- a/roles/jd-server/src/lib/mod.rs +++ b/roles/jd-server/src/lib/mod.rs @@ -104,9 +104,9 @@ where _ => Err(serde::de::Error::custom("Unsupported duration unit")), } } - #[cfg(test)] mod tests { + use ext_config::{Config, File, FileFormat}; use std::path::PathBuf; use super::*; @@ -119,9 +119,14 @@ mod tests { config_path ); - let config_string = - std::fs::read_to_string(config_path).expect("Failed to read the config file"); - toml::from_str(&config_string).expect("Failed to parse config") + let config_path = config_path.to_str().unwrap(); + + let settings = Config::builder() + .add_source(File::new(&config_path, FileFormat::Toml)) + .build() + .expect("Failed to build config"); + + settings.try_deserialize().expect("Failed to parse config") } #[test] diff --git a/roles/jd-server/src/main.rs b/roles/jd-server/src/main.rs index 9d56b491f..ac030445a 100644 --- a/roles/jd-server/src/main.rs +++ b/roles/jd-server/src/main.rs @@ -11,6 +11,7 @@ use tokio::{select, task}; use tracing::{error, info, warn}; mod lib; +use ext_config::{Config, File, FileFormat}; use lib::job_declarator::JobDeclarator; mod args { @@ -87,17 +88,22 @@ async fn main() { } }; + let config_path = args.config_path.to_str().expect("Invalid config path"); + // Load config - let config: Configuration = match std::fs::read_to_string(&args.config_path) { - Ok(c) => match toml::from_str(&c) { + let config: Configuration = match Config::builder() + .add_source(File::new(config_path, FileFormat::Toml)) + .build() + { + Ok(settings) => match settings.try_deserialize::() { Ok(c) => c, Err(e) => { - error!("Failed to parse config: {}", e); + error!("Failed to deserialize config: {}", e); return; } }, Err(e) => { - error!("Failed to read config: {}", e); + error!("Failed to build config: {}", e); return; } }; From aed6fc187265e6ec1cd2d6e8c861af9033dd6607 Mon Sep 17 00:00:00 2001 From: bit-aloo Date: Fri, 9 Aug 2024 07:03:19 -0400 Subject: [PATCH 080/101] Remove TOML support from mining-proxy crate - Removed dependencies related to TOML configuration. - Updated configuration handling to use ext-config crate instead. - Refactored code to eliminate TOML-specific logic. --- roles/mining-proxy/Cargo.toml | 2 +- roles/mining-proxy/src/lib/mod.rs | 4 ++-- roles/mining-proxy/src/main.rs | 23 ++++++++++++++++------- 3 files changed, 19 insertions(+), 10 deletions(-) diff --git a/roles/mining-proxy/Cargo.toml b/roles/mining-proxy/Cargo.toml index 951645fde..9cd8664bc 100644 --- a/roles/mining-proxy/Cargo.toml +++ b/roles/mining-proxy/Cargo.toml @@ -25,7 +25,7 @@ once_cell = "1.12.0" roles_logic_sv2 = { version = "^1.0.0", path = "../../protocols/v2/roles-logic-sv2" } serde = { version = "1.0.89", features = ["derive", "alloc"], default-features = false } tokio = { version = "1", features = ["full"] } -toml = { version = "0.5.6", git = "https://github.com/diondokter/toml-rs", default-features = false, rev = "c4161aa" } +ext-config = { version = "0.14.0", features = ["toml"], package = "config" } tracing = {version = "0.1"} tracing-subscriber = {version = "0.3"} nohash-hasher = "0.2.0" diff --git a/roles/mining-proxy/src/lib/mod.rs b/roles/mining-proxy/src/lib/mod.rs index b9a9fd2f8..8c7a8563d 100644 --- a/roles/mining-proxy/src/lib/mod.rs +++ b/roles/mining-proxy/src/lib/mod.rs @@ -97,7 +97,7 @@ pub enum ChannelKind { } #[derive(Debug, Deserialize, Clone)] -pub struct Config { +pub struct Configuration { pub upstreams: Vec, pub listen_address: String, pub listen_mining_port: u16, @@ -110,7 +110,7 @@ pub struct Config { pub async fn initialize_r_logic( upstreams: &[UpstreamMiningValues], group_id: Arc>, - config: Config, + config: Configuration, ) -> RLogic { let channel_ids = Arc::new(Mutex::new(Id::new())); let mut upstream_mining_nodes = Vec::with_capacity(upstreams.len()); diff --git a/roles/mining-proxy/src/main.rs b/roles/mining-proxy/src/main.rs index 5931990ac..575098fe7 100644 --- a/roles/mining-proxy/src/main.rs +++ b/roles/mining-proxy/src/main.rs @@ -23,7 +23,8 @@ use std::{net::SocketAddr, sync::Arc}; use tokio::{net::TcpListener, sync::oneshot}; use tracing::{error, info}; -use lib::Config; +use ext_config::{Config, File, FileFormat}; +use lib::Configuration; use roles_logic_sv2::utils::{GroupId, Mutex}; mod lib; @@ -112,13 +113,21 @@ async fn main() { } }; - // Scan all the upstreams and map them - let config_file = std::fs::read_to_string(args.config_path.clone()) - .unwrap_or_else(|_| panic!("Can not open {:?}", args.config_path)); - let config = match toml::from_str::(&config_file) { - Ok(cfg) => cfg, + let config_path = args.config_path.to_str().expect("Invalid config path"); + + let config: Configuration = match Config::builder() + .add_source(File::new(config_path, FileFormat::Toml)) + .build() + { + Ok(settings) => match settings.try_deserialize::() { + Ok(c) => c, + Err(e) => { + error!("Failed to deserialize config: {}", e); + return; + } + }, Err(e) => { - error!("Failed to parse config file: {}", e); + error!("Failed to build config: {}", e); return; } }; From 5ee4b497b3c442c342d396ec2a2874a11ed6ff4a Mon Sep 17 00:00:00 2001 From: bit-aloo Date: Fri, 9 Aug 2024 07:21:06 -0400 Subject: [PATCH 081/101] Remove TOML support from translator crate - Removed dependencies related to TOML configuration. - Updated configuration handling to use ext-config crate instead. - Refactored code to eliminate TOML-specific logic. --- roles/translator/Cargo.toml | 3 +-- roles/translator/src/lib/error.rs | 13 +++++++------ roles/translator/src/lib/status.rs | 4 ++-- roles/translator/src/main.rs | 29 ++++++++++++++++++++--------- 4 files changed, 30 insertions(+), 19 deletions(-) diff --git a/roles/translator/Cargo.toml b/roles/translator/Cargo.toml index 182370cbd..56b009423 100644 --- a/roles/translator/Cargo.toml +++ b/roles/translator/Cargo.toml @@ -26,7 +26,7 @@ serde = { version = "1.0.89", default-features = false, features = ["derive", "a serde_json = { version = "1.0.64", default-features = false, features = ["alloc"] } futures = "0.3.25" tokio = { version = "1", features = ["full"] } -toml = { version = "0.5.6", git = "https://github.com/diondokter/toml-rs", default-features = false, rev = "c4161aa" } +ext-config = { version = "0.14.0", features = ["toml"], package = "config" } tracing = { version = "0.1" } tracing-subscriber = { version = "0.3" } v1 = { version = "^1.0.0", path = "../../protocols/v1", package="sv1_api" } @@ -36,7 +36,6 @@ tokio-util = { version = "0.7.10", features = ["codec"] } async-compat = "0.2.1" - [dev-dependencies] rand = "0.8.4" sha2 = "0.10.6" diff --git a/roles/translator/src/lib/error.rs b/roles/translator/src/lib/error.rs index debad1819..8abd61690 100644 --- a/roles/translator/src/lib/error.rs +++ b/roles/translator/src/lib/error.rs @@ -1,3 +1,4 @@ +use ext_config::ConfigError; use roles_logic_sv2::{ mining_sv2::{ExtendedExtranonce, NewExtendedMiningJob, SetCustomMiningJob}, parsers::Mining, @@ -38,8 +39,8 @@ pub enum Error<'a> { BadCliArgs, /// Errors on bad `serde_json` serialize/deserialize. BadSerdeJson(serde_json::Error), - /// Errors on bad `toml` deserialize. - BadTomlDeserialize(toml::de::Error), + /// Errors on bad `config` TOML deserialize. + BadConfigDeserialize(ConfigError), /// Errors from `binary_sv2` crate. BinarySv2(binary_sv2::Error), /// Errors on bad noise handshake. @@ -83,7 +84,7 @@ impl<'a> fmt::Display for Error<'a> { match self { BadCliArgs => write!(f, "Bad CLI arg input"), BadSerdeJson(ref e) => write!(f, "Bad serde json: `{:?}`", e), - BadTomlDeserialize(ref e) => write!(f, "Bad `toml` deserialize: `{:?}`", e), + BadConfigDeserialize(ref e) => write!(f, "Bad `config` TOML deserialize: `{:?}`", e), BinarySv2(ref e) => write!(f, "Binary SV2 error: `{:?}`", e), CodecNoise(ref e) => write!(f, "Noise error: `{:?}", e), FramingSv2(ref e) => write!(f, "Framing SV2 error: `{:?}`", e), @@ -159,9 +160,9 @@ impl<'a> From for Error<'a> { } } -impl<'a> From for Error<'a> { - fn from(e: toml::de::Error) -> Self { - Error::BadTomlDeserialize(e) +impl<'a> From for Error<'a> { + fn from(e: ConfigError) -> Self { + Error::BadConfigDeserialize(e) } } diff --git a/roles/translator/src/lib/status.rs b/roles/translator/src/lib/status.rs index 3ecbcc634..4cdd770e2 100644 --- a/roles/translator/src/lib/status.rs +++ b/roles/translator/src/lib/status.rs @@ -113,8 +113,8 @@ pub async fn handle_error( Error::BadCliArgs => send_status(sender, e, error_handling::ErrorBranch::Break).await, // Errors on bad `serde_json` serialize/deserialize. Error::BadSerdeJson(_) => send_status(sender, e, error_handling::ErrorBranch::Break).await, - // Errors on bad `toml` deserialize. - Error::BadTomlDeserialize(_) => { + // Errors on bad `config` TOML deserialize. + Error::BadConfigDeserialize(_) => { send_status(sender, e, error_handling::ErrorBranch::Break).await } // Errors from `binary_sv2` crate. diff --git a/roles/translator/src/main.rs b/roles/translator/src/main.rs index c1307a5a2..f958c3fc8 100644 --- a/roles/translator/src/main.rs +++ b/roles/translator/src/main.rs @@ -16,6 +16,7 @@ use std::{ sync::Arc, }; +use ext_config::{Config, File, FileFormat}; use tokio::{sync::broadcast, task}; use v1::server_to_client; @@ -24,15 +25,25 @@ use tracing::{debug, error, info}; /// Process CLI args, if any. #[allow(clippy::result_large_err)] fn process_cli_args<'a>() -> ProxyResult<'a, ProxyConfig> { - let args = match Args::from_args() { - Ok(cfg) => cfg, - Err(help) => { - error!("{}", help); - return Err(Error::BadCliArgs); - } - }; - let config_file = std::fs::read_to_string(args.config_path)?; - Ok(toml::from_str::(&config_file)?) + // Parse CLI arguments + let args = Args::from_args().map_err(|help| { + error!("{}", help); + Error::BadCliArgs + })?; + + // Build configuration from the provided file path + let config_path = args.config_path.to_str().ok_or_else(|| { + error!("Invalid configuration path."); + Error::BadCliArgs + })?; + + let settings = Config::builder() + .add_source(File::new(config_path, FileFormat::Toml)) + .build()?; + + // Deserialize settings into ProxyConfig + let config = settings.try_deserialize::()?; + Ok(config) } #[tokio::main] From 4d7a93b83edbe084ec2c0d33b04f8739d8755e53 Mon Sep 17 00:00:00 2001 From: bit-aloo Date: Fri, 9 Aug 2024 07:23:30 -0400 Subject: [PATCH 082/101] Remove TOML support from pool crate - Removed dependencies related to TOML configuration. - Updated configuration handling to use ext-config crate instead. - Refactored code to eliminate TOML-specific logic. --- roles/pool/Cargo.toml | 2 +- roles/pool/src/lib/mining_pool/mod.rs | 28 ++++++++++++++++++++++----- roles/pool/src/main.rs | 14 ++++++++++---- 3 files changed, 34 insertions(+), 10 deletions(-) diff --git a/roles/pool/Cargo.toml b/roles/pool/Cargo.toml index 3b62c74e1..14eee1613 100644 --- a/roles/pool/Cargo.toml +++ b/roles/pool/Cargo.toml @@ -23,7 +23,7 @@ rand = "0.8.4" roles_logic_sv2 = { version = "^1.0.0", path = "../../protocols/v2/roles-logic-sv2" } serde = { version = "1.0.89", features = ["derive", "alloc"], default-features = false } tokio = { version = "1", features = ["full"] } -toml = { version = "0.5.6", git = "https://github.com/diondokter/toml-rs", default-features = false, rev = "c4161aa" } +ext-config = { version = "0.14.0", features = ["toml"], package = "config" } tracing = { version = "0.1" } tracing-subscriber = "0.3" async-recursion = "1.0.0" diff --git a/roles/pool/src/lib/mining_pool/mod.rs b/roles/pool/src/lib/mining_pool/mod.rs index 4b9d10b18..0db3f8a5f 100644 --- a/roles/pool/src/lib/mining_pool/mod.rs +++ b/roles/pool/src/lib/mining_pool/mod.rs @@ -657,23 +657,41 @@ impl Pool { #[cfg(test)] mod test { use binary_sv2::{B0255, B064K}; + use ext_config::{Config, File, FileFormat}; use std::convert::TryInto; + use tracing::error; use stratum_common::{ bitcoin, bitcoin::{util::psbt::serialize::Serialize, Transaction, Witness}, }; + use super::Configuration; + // this test is used to verify the `coinbase_tx_prefix` and `coinbase_tx_suffix` values tested against in // message generator `stratum/test/message-generator/test/pool-sri-test-extended.json` #[test] fn test_coinbase_outputs_from_config() { + let config_path = "./config-examples/pool-config-local-tp-example.toml"; + // Load config - let config: super::Configuration = toml::from_str( - &std::fs::read_to_string("./config-examples/pool-config-local-tp-example.toml") - .unwrap(), - ) - .unwrap(); + let config: Configuration = match Config::builder() + .add_source(File::new(&config_path, FileFormat::Toml)) + .build() + { + Ok(settings) => match settings.try_deserialize::() { + Ok(c) => c, + Err(e) => { + error!("Failed to deserialize config: {}", e); + return; + } + }, + Err(e) => { + error!("Failed to build config: {}", e); + return; + } + }; + // template from message generator test (mock TP template) let _extranonce_len = 3; let coinbase_prefix = vec![3, 76, 163, 38, 0]; diff --git a/roles/pool/src/main.rs b/roles/pool/src/main.rs index 169243c23..55d6e117a 100644 --- a/roles/pool/src/main.rs +++ b/roles/pool/src/main.rs @@ -9,6 +9,7 @@ use lib::{ template_receiver::TemplateRx, }; +use ext_config::{Config, File, FileFormat}; use tokio::select; mod args { @@ -86,17 +87,22 @@ async fn main() { } }; + let config_path = args.config_path.to_str().expect("Invalid config path"); + // Load config - let config: Configuration = match std::fs::read_to_string(&args.config_path) { - Ok(c) => match toml::from_str(&c) { + let config: Configuration = match Config::builder() + .add_source(File::new(config_path, FileFormat::Toml)) + .build() + { + Ok(settings) => match settings.try_deserialize::() { Ok(c) => c, Err(e) => { - error!("Failed to parse config: {}", e); + error!("Failed to deserialize config: {}", e); return; } }, Err(e) => { - error!("Failed to read config: {}", e); + error!("Failed to build config: {}", e); return; } }; From 86b6a557fc694a0659238389e37cebe325242605 Mon Sep 17 00:00:00 2001 From: plebhash Date: Thu, 8 Aug 2024 15:07:15 -0300 Subject: [PATCH 083/101] bump framing_sv2 version --- benches/Cargo.toml | 2 +- protocols/Cargo.lock | 2 +- protocols/v2/codec-sv2/Cargo.toml | 2 +- protocols/v2/framing-sv2/Cargo.toml | 2 +- protocols/v2/roles-logic-sv2/Cargo.toml | 2 +- roles/Cargo.lock | 4 ++-- roles/jd-client/Cargo.toml | 2 +- roles/translator/Cargo.toml | 2 +- 8 files changed, 9 insertions(+), 9 deletions(-) diff --git a/benches/Cargo.toml b/benches/Cargo.toml index ecba50ab8..525c526e5 100644 --- a/benches/Cargo.toml +++ b/benches/Cargo.toml @@ -12,7 +12,7 @@ serde_json = { version = "1.0.64", default-features = false, features = ["alloc" iai="0.1" mining_sv2 = { path = "../protocols/v2/subprotocols/mining", version = "^1.0.0" } roles_logic_sv2 = { path = "../protocols/v2/roles-logic-sv2", version = "^1.0.0" } -framing_sv2 = { version = "1.1.0", path = "../protocols/v2/framing-sv2" } +framing_sv2 = { version = "2.0.0", path = "../protocols/v2/framing-sv2" } serde = { version = "1.0.89", default-features = false, features = ["derive", "alloc"] } num-bigint = "0.4.3" num-traits = "0.2.15" diff --git a/protocols/Cargo.lock b/protocols/Cargo.lock index 8eea1ef61..5eb7a37cf 100644 --- a/protocols/Cargo.lock +++ b/protocols/Cargo.lock @@ -290,7 +290,7 @@ dependencies = [ [[package]] name = "framing_sv2" -version = "1.1.0" +version = "2.0.0" dependencies = [ "binary_sv2", "buffer_sv2", diff --git a/protocols/v2/codec-sv2/Cargo.toml b/protocols/v2/codec-sv2/Cargo.toml index b3db39783..577bf9c79 100644 --- a/protocols/v2/codec-sv2/Cargo.toml +++ b/protocols/v2/codec-sv2/Cargo.toml @@ -9,7 +9,7 @@ repository = "https://github.com/stratum-mining/stratum" [dependencies] serde = { version = "1.0.89", default-features = false, optional = true } -framing_sv2 = { version = "1.1.0", path = "../../../protocols/v2/framing-sv2" } +framing_sv2 = { version = "^2.0.0", path = "../../../protocols/v2/framing-sv2" } noise_sv2 = { version = "1.0", path = "../../../protocols/v2/noise-sv2", optional=true} binary_sv2 = { version = "1.0.0", path = "../../../protocols/v2/binary-sv2/binary-sv2" } const_sv2 = { version = "1.0.0", path = "../../../protocols/v2/const-sv2"} diff --git a/protocols/v2/framing-sv2/Cargo.toml b/protocols/v2/framing-sv2/Cargo.toml index 04f4c53dd..13a401d0b 100644 --- a/protocols/v2/framing-sv2/Cargo.toml +++ b/protocols/v2/framing-sv2/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "framing_sv2" -version = "1.1.0" +version = "2.0.0" authors = ["fi3 "] edition = "2018" description = "Sv2 frames" diff --git a/protocols/v2/roles-logic-sv2/Cargo.toml b/protocols/v2/roles-logic-sv2/Cargo.toml index d7692bf47..ae8541556 100644 --- a/protocols/v2/roles-logic-sv2/Cargo.toml +++ b/protocols/v2/roles-logic-sv2/Cargo.toml @@ -17,7 +17,7 @@ mining_sv2 = { path = "../../../protocols/v2/subprotocols/mining", version = "^1 template_distribution_sv2 = { path = "../../../protocols/v2/subprotocols/template-distribution", version = "^1.0.1" } job_declaration_sv2 = { path = "../../../protocols/v2/subprotocols/job-declaration", version = "^1.0.0" } const_sv2 = { version = "^1.0.0", path = "../../../protocols/v2/const-sv2"} -framing_sv2 = { version = "^1.1.0", path = "../../../protocols/v2/framing-sv2" } +framing_sv2 = { version = "^2.0.0", path = "../../../protocols/v2/framing-sv2" } tracing = { version = "0.1"} chacha20poly1305 = { version = "0.10.1"} nohash-hasher = "0.2.0" diff --git a/roles/Cargo.lock b/roles/Cargo.lock index bf54dcc03..686e2f589 100644 --- a/roles/Cargo.lock +++ b/roles/Cargo.lock @@ -800,7 +800,7 @@ checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" [[package]] name = "framing_sv2" -version = "1.1.0" +version = "2.0.0" dependencies = [ "binary_sv2", "buffer_sv2", @@ -2053,7 +2053,7 @@ dependencies = [ [[package]] name = "sv1_api" -version = "1.0.0" +version = "1.0.1" dependencies = [ "binary_sv2", "bitcoin_hashes 0.3.2", diff --git a/roles/jd-client/Cargo.toml b/roles/jd-client/Cargo.toml index 4afaf4fc4..dc2d46079 100644 --- a/roles/jd-client/Cargo.toml +++ b/roles/jd-client/Cargo.toml @@ -17,7 +17,7 @@ async-recursion = "0.3.2" binary_sv2 = { version = "^1.0.0", path = "../../protocols/v2/binary-sv2/binary-sv2" } buffer_sv2 = { version = "^1.0.0", path = "../../utils/buffer" } codec_sv2 = { version = "^1.0.1", path = "../../protocols/v2/codec-sv2", features = ["noise_sv2", "with_buffer_pool"] } -framing_sv2 = { version = "^1.1.0", path = "../../protocols/v2/framing-sv2" } +framing_sv2 = { version = "^2.0.0", path = "../../protocols/v2/framing-sv2" } network_helpers_sv2 = { version = "2.0.0", path = "../roles-utils/network-helpers", features=["with_tokio", "with_buffer_pool"] } roles_logic_sv2 = { version = "^1.0.0", path = "../../protocols/v2/roles-logic-sv2" } serde = { version = "1.0.89", default-features = false, features = ["derive", "alloc"] } diff --git a/roles/translator/Cargo.toml b/roles/translator/Cargo.toml index 56b009423..f835f8dd5 100644 --- a/roles/translator/Cargo.toml +++ b/roles/translator/Cargo.toml @@ -18,7 +18,7 @@ async-std = { version = "1.12.0", features = ["attributes"] } binary_sv2 = { version = "^1.0.0", path = "../../protocols/v2/binary-sv2/binary-sv2" } buffer_sv2 = { version = "^1.0.0", path = "../../utils/buffer" } codec_sv2 = { version = "^1.0.1", path = "../../protocols/v2/codec-sv2", features = ["noise_sv2", "with_buffer_pool"] } -framing_sv2 = { version = "^1.1.0", path = "../../protocols/v2/framing-sv2" } +framing_sv2 = { version = "^2.0.0", path = "../../protocols/v2/framing-sv2" } network_helpers_sv2 = { version = "2.0.0", path = "../roles-utils/network-helpers", features=["async_std", "with_buffer_pool"] } once_cell = "1.12.0" roles_logic_sv2 = { version = "^1.0.0", path = "../../protocols/v2/roles-logic-sv2" } From 4162eb65986842dbdfed401b2208518865872f7e Mon Sep 17 00:00:00 2001 From: plebhash Date: Thu, 8 Aug 2024 15:30:05 -0300 Subject: [PATCH 084/101] bump codec_sv2 version --- protocols/Cargo.lock | 2 +- protocols/v2/codec-sv2/Cargo.toml | 2 +- protocols/v2/codec-sv2/src/lib.rs | 2 ++ 3 files changed, 4 insertions(+), 2 deletions(-) diff --git a/protocols/Cargo.lock b/protocols/Cargo.lock index 5eb7a37cf..127fad336 100644 --- a/protocols/Cargo.lock +++ b/protocols/Cargo.lock @@ -199,7 +199,7 @@ dependencies = [ [[package]] name = "codec_sv2" -version = "1.2.0" +version = "1.2.1" dependencies = [ "binary_sv2", "buffer_sv2", diff --git a/protocols/v2/codec-sv2/Cargo.toml b/protocols/v2/codec-sv2/Cargo.toml index 577bf9c79..d90d721ad 100644 --- a/protocols/v2/codec-sv2/Cargo.toml +++ b/protocols/v2/codec-sv2/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "codec_sv2" -version = "1.2.0" +version = "1.2.1" authors = ["fi3 "] edition = "2018" description = "Sv2 data format" diff --git a/protocols/v2/codec-sv2/src/lib.rs b/protocols/v2/codec-sv2/src/lib.rs index c0594f9c1..a4eec1efd 100644 --- a/protocols/v2/codec-sv2/src/lib.rs +++ b/protocols/v2/codec-sv2/src/lib.rs @@ -1,5 +1,7 @@ #![cfg_attr(feature = "no_std", no_std)] +pub use framing_sv2::framing::Frame; + extern crate alloc; #[cfg(feature = "noise_sv2")] From ee0a93644182801b343c40f3568e07ec54f7bde9 Mon Sep 17 00:00:00 2001 From: plebhash Date: Fri, 9 Aug 2024 15:42:54 -0300 Subject: [PATCH 085/101] bump const_sv2 version --- protocols/Cargo.lock | 2 +- protocols/v2/codec-sv2/Cargo.toml | 2 +- protocols/v2/const-sv2/Cargo.toml | 2 +- protocols/v2/framing-sv2/Cargo.toml | 2 +- protocols/v2/noise-sv2/Cargo.toml | 2 +- protocols/v2/roles-logic-sv2/Cargo.toml | 2 +- .../subprotocols/common-messages/Cargo.toml | 2 +- .../subprotocols/job-declaration/Cargo.toml | 2 +- protocols/v2/subprotocols/mining/Cargo.toml | 2 +- .../template-distribution/Cargo.toml | 2 +- protocols/v2/sv2-ffi/Cargo.toml | 2 +- roles/Cargo.lock | 333 ++++++++++++++++-- roles/jd-server/Cargo.toml | 2 +- roles/mining-proxy/Cargo.toml | 2 +- roles/pool/Cargo.toml | 2 +- roles/roles-utils/network-helpers/Cargo.toml | 2 +- roles/test-utils/mining-device/Cargo.toml | 2 +- utils/message-generator/Cargo.toml | 2 +- 18 files changed, 322 insertions(+), 45 deletions(-) diff --git a/protocols/Cargo.lock b/protocols/Cargo.lock index 127fad336..4cb7bb6fd 100644 --- a/protocols/Cargo.lock +++ b/protocols/Cargo.lock @@ -224,7 +224,7 @@ dependencies = [ [[package]] name = "const_sv2" -version = "1.0.0" +version = "2.0.0" dependencies = [ "secp256k1 0.28.2", ] diff --git a/protocols/v2/codec-sv2/Cargo.toml b/protocols/v2/codec-sv2/Cargo.toml index d90d721ad..20b0288af 100644 --- a/protocols/v2/codec-sv2/Cargo.toml +++ b/protocols/v2/codec-sv2/Cargo.toml @@ -12,7 +12,7 @@ serde = { version = "1.0.89", default-features = false, optional = true } framing_sv2 = { version = "^2.0.0", path = "../../../protocols/v2/framing-sv2" } noise_sv2 = { version = "1.0", path = "../../../protocols/v2/noise-sv2", optional=true} binary_sv2 = { version = "1.0.0", path = "../../../protocols/v2/binary-sv2/binary-sv2" } -const_sv2 = { version = "1.0.0", path = "../../../protocols/v2/const-sv2"} +const_sv2 = { version = "2.0.0", path = "../../../protocols/v2/const-sv2"} buffer_sv2 = { version = "1.0.0", path = "../../../utils/buffer"} tracing = { version = "0.1"} diff --git a/protocols/v2/const-sv2/Cargo.toml b/protocols/v2/const-sv2/Cargo.toml index 8736d91e0..1a74cfa0b 100644 --- a/protocols/v2/const-sv2/Cargo.toml +++ b/protocols/v2/const-sv2/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "const_sv2" -version = "1.0.0" +version = "2.0.0" authors = ["fi3 "] edition = "2018" description = "Sv2 constatnts" diff --git a/protocols/v2/framing-sv2/Cargo.toml b/protocols/v2/framing-sv2/Cargo.toml index 13a401d0b..4450a4653 100644 --- a/protocols/v2/framing-sv2/Cargo.toml +++ b/protocols/v2/framing-sv2/Cargo.toml @@ -12,7 +12,7 @@ repository = "https://github.com/stratum-mining/stratum" [dependencies] serde = { version = "1.0.89", default-features = false, optional = true } -const_sv2 = { version = "^1.0.0", path = "../../../protocols/v2/const-sv2"} +const_sv2 = { version = "^2.0.0", path = "../../../protocols/v2/const-sv2"} binary_sv2 = { version = "^1.0.0", path = "../../../protocols/v2/binary-sv2/binary-sv2" } buffer_sv2 = { version = "^1.0.0", path = "../../../utils/buffer", optional=true } diff --git a/protocols/v2/noise-sv2/Cargo.toml b/protocols/v2/noise-sv2/Cargo.toml index 7316fddb8..c627bc1af 100644 --- a/protocols/v2/noise-sv2/Cargo.toml +++ b/protocols/v2/noise-sv2/Cargo.toml @@ -13,7 +13,7 @@ rand = {version = "0.8.5", default-features = false, features = ["std","std_rng" aes-gcm = "0.10.2" chacha20poly1305 = "0.10.1" rand_chacha = "0.3.1" -const_sv2 = { version = "^1.0.0", path = "../../../protocols/v2/const-sv2"} +const_sv2 = { version = "^2.0.0", path = "../../../protocols/v2/const-sv2"} [dev-dependencies] quickcheck = "1.0.3" diff --git a/protocols/v2/roles-logic-sv2/Cargo.toml b/protocols/v2/roles-logic-sv2/Cargo.toml index ae8541556..297fe72d6 100644 --- a/protocols/v2/roles-logic-sv2/Cargo.toml +++ b/protocols/v2/roles-logic-sv2/Cargo.toml @@ -16,7 +16,7 @@ common_messages_sv2 = { path = "../../../protocols/v2/subprotocols/common-messag mining_sv2 = { path = "../../../protocols/v2/subprotocols/mining", version = "^1.0.0" } template_distribution_sv2 = { path = "../../../protocols/v2/subprotocols/template-distribution", version = "^1.0.1" } job_declaration_sv2 = { path = "../../../protocols/v2/subprotocols/job-declaration", version = "^1.0.0" } -const_sv2 = { version = "^1.0.0", path = "../../../protocols/v2/const-sv2"} +const_sv2 = { version = "^2.0.0", path = "../../../protocols/v2/const-sv2"} framing_sv2 = { version = "^2.0.0", path = "../../../protocols/v2/framing-sv2" } tracing = { version = "0.1"} chacha20poly1305 = { version = "0.10.1"} diff --git a/protocols/v2/subprotocols/common-messages/Cargo.toml b/protocols/v2/subprotocols/common-messages/Cargo.toml index 3bbde6bfc..ad3262fb5 100644 --- a/protocols/v2/subprotocols/common-messages/Cargo.toml +++ b/protocols/v2/subprotocols/common-messages/Cargo.toml @@ -12,7 +12,7 @@ repository = "https://github.com/stratum-mining/stratum" [dependencies] serde = { version = "1.0.89", default-features = false, optional= true } binary_sv2 = {version = "^1.0.0", path = "../../../../protocols/v2/binary-sv2/binary-sv2" } -const_sv2 = {version = "^1.0.0", path = "../../../../protocols/v2/const-sv2"} +const_sv2 = {version = "^2.0.0", path = "../../../../protocols/v2/const-sv2"} quickcheck = { version = "1.0.3", optional=true } quickcheck_macros = { version = "1", optional=true } serde_repr = {version= "0.1.10", optional=true} diff --git a/protocols/v2/subprotocols/job-declaration/Cargo.toml b/protocols/v2/subprotocols/job-declaration/Cargo.toml index f09173fb6..2ee4313f4 100644 --- a/protocols/v2/subprotocols/job-declaration/Cargo.toml +++ b/protocols/v2/subprotocols/job-declaration/Cargo.toml @@ -11,7 +11,7 @@ repository = "https://github.com/stratum-mining/stratum" [dependencies] serde = { version = "1.0.89", default-features = false, optional= true } binary_sv2 = {version = "^1.0.0", path = "../../../../protocols/v2/binary-sv2/binary-sv2" } -const_sv2 = {version = "^1.0.0", path = "../../../../protocols/v2/const-sv2"} +const_sv2 = {version = "^2.0.0", path = "../../../../protocols/v2/const-sv2"} [features] no_std = [] diff --git a/protocols/v2/subprotocols/mining/Cargo.toml b/protocols/v2/subprotocols/mining/Cargo.toml index 4e84417bd..c98bb7ec9 100644 --- a/protocols/v2/subprotocols/mining/Cargo.toml +++ b/protocols/v2/subprotocols/mining/Cargo.toml @@ -13,7 +13,7 @@ repository = "https://github.com/stratum-mining/stratum" [dependencies] serde = { version = "1.0.89", default-features = false, optional= true } binary_sv2 = {version = "^1.0.0", path = "../../../../protocols/v2/binary-sv2/binary-sv2" } -const_sv2 = {version = "^1.0.0", path = "../../../../protocols/v2/const-sv2"} +const_sv2 = {version = "^2.0.0", path = "../../../../protocols/v2/const-sv2"} [dev-dependencies] quickcheck = "1.0.3" diff --git a/protocols/v2/subprotocols/template-distribution/Cargo.toml b/protocols/v2/subprotocols/template-distribution/Cargo.toml index 9aa2c1788..b348f8954 100644 --- a/protocols/v2/subprotocols/template-distribution/Cargo.toml +++ b/protocols/v2/subprotocols/template-distribution/Cargo.toml @@ -12,7 +12,7 @@ repository = "https://github.com/stratum-mining/stratum" [dependencies] serde = { version = "1.0.89", default-features = false, optional= true } binary_sv2 = { version = "^1.0.0", path = "../../../../protocols/v2/binary-sv2/binary-sv2" } -const_sv2 = { version = "^1.0.0", path = "../../../../protocols/v2/const-sv2"} +const_sv2 = { version = "^2.0.0", path = "../../../../protocols/v2/const-sv2"} quickcheck = { version = "1.0.3", optional=true } quickcheck_macros = { version = "1", optional=true } diff --git a/protocols/v2/sv2-ffi/Cargo.toml b/protocols/v2/sv2-ffi/Cargo.toml index f8c2fe328..3906af416 100644 --- a/protocols/v2/sv2-ffi/Cargo.toml +++ b/protocols/v2/sv2-ffi/Cargo.toml @@ -12,7 +12,7 @@ crate-type = ["staticlib"] [dependencies] codec_sv2 = { path = "../../../protocols/v2/codec-sv2", version = "^1.0.0" } -const_sv2 = { path = "../../../protocols/v2/const-sv2", version = "^1.0.0" } +const_sv2 = { path = "../../../protocols/v2/const-sv2", version = "^2.0.0" } binary_sv2 = { path = "../../../protocols/v2/binary-sv2/binary-sv2", version = "^1.0.0" } common_messages_sv2 = { path = "../../../protocols/v2/subprotocols/common-messages", version = "^1.0.0" } template_distribution_sv2 = { path = "../../../protocols/v2/subprotocols/template-distribution", version = "^1.0.1" } diff --git a/roles/Cargo.lock b/roles/Cargo.lock index 686e2f589..7abea3787 100644 --- a/roles/Cargo.lock +++ b/roles/Cargo.lock @@ -52,12 +52,6 @@ dependencies = [ "subtle", ] -[[package]] -name = "ahash" -version = "0.3.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8fd72866655d1904d6b0997d0b07ba561047d070fbe29de039031c641b61217" - [[package]] name = "ahash" version = "0.7.8" @@ -316,6 +310,17 @@ version = "4.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b75356056920673b02621b35afd0f7dda9306d03c79a30f5c56c44cf256e3de" +[[package]] +name = "async-trait" +version = "0.1.81" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e0c28dcc82d7c8ead5cb13beb15405b57b8546e93215673ff8ca0349a028107" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.61", +] + [[package]] name = "atomic-waker" version = "1.1.2" @@ -437,6 +442,9 @@ name = "bitflags" version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cf4b9d6a944f767f8e5e0db018570623c85f3d925ac718db4e06d0187adb21c1" +dependencies = [ + "serde", +] [[package]] name = "block-buffer" @@ -594,7 +602,7 @@ checksum = "98cc8fbded0c607b7ba9dd60cd98df59af97e84d24e49c8557331cfc26d301ce" [[package]] name = "codec_sv2" -version = "1.2.0" +version = "1.2.1" dependencies = [ "binary_sv2", "buffer_sv2", @@ -628,13 +636,62 @@ dependencies = [ "crossbeam-utils", ] +[[package]] +name = "config" +version = "0.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7328b20597b53c2454f0b1919720c25c7339051c02b72b7e05409e00b14132be" +dependencies = [ + "async-trait", + "convert_case", + "json5", + "lazy_static", + "nom", + "pathdiff", + "ron", + "rust-ini", + "serde", + "serde_json", + "toml", + "yaml-rust", +] + +[[package]] +name = "const-random" +version = "0.1.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87e00182fe74b066627d63b85fd550ac2998d4b0bd86bfed477a0ae4c7c71359" +dependencies = [ + "const-random-macro", +] + +[[package]] +name = "const-random-macro" +version = "0.1.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f9d839f2a20b0aee515dc581a6172f2321f96cab76c1a38a4c584a194955390e" +dependencies = [ + "getrandom", + "once_cell", + "tiny-keccak", +] + [[package]] name = "const_sv2" -version = "1.0.0" +version = "2.0.0" dependencies = [ "secp256k1 0.28.2", ] +[[package]] +name = "convert_case" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec182b0ca2f35d8fc196cf3404988fd8b8c739a4d270ff118a398feb0cbec1ca" +dependencies = [ + "unicode-segmentation", +] + [[package]] name = "cpufeatures" version = "0.2.12" @@ -650,6 +707,12 @@ version = "0.8.19" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "248e3bacc7dc6baa3b21e405ee045c3047101a49145e7e9eca583ab4c2ca5345" +[[package]] +name = "crunchy" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7" + [[package]] name = "crypto-common" version = "0.1.6" @@ -696,6 +759,15 @@ dependencies = [ "crypto-common", ] +[[package]] +name = "dlv-list" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "442039f5147480ba31067cb00ada1adae6892028e40e45fc5de7b7df6dcc1b5f" +dependencies = [ + "const-random", +] + [[package]] name = "env_logger" version = "0.7.1" @@ -995,23 +1067,19 @@ dependencies = [ [[package]] name = "hashbrown" -version = "0.7.2" +version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96282e96bfcd3da0d3aa9938bedf1e50df3269b6db08b4876d2da0bb1a0841cf" +checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e" dependencies = [ - "ahash 0.3.8", - "autocfg", + "ahash", + "serde", ] [[package]] name = "hashbrown" -version = "0.11.2" +version = "0.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e" -dependencies = [ - "ahash 0.7.8", - "serde", -] +checksum = "43a3c133739dddd0d2990f9a4bdf8eb4b21ef50e4851ca85ab661199821d510e" [[package]] name = "hashbrown" @@ -1208,6 +1276,7 @@ dependencies = [ "binary_sv2", "buffer_sv2", "codec_sv2", + "config", "error_handling", "framing_sv2", "futures", @@ -1218,7 +1287,6 @@ dependencies = [ "serde", "stratum-common", "tokio", - "toml", "tracing", "tracing-subscriber", ] @@ -1231,6 +1299,7 @@ dependencies = [ "binary_sv2", "buffer_sv2", "codec_sv2", + "config", "const_sv2", "error_handling", "hashbrown 0.11.2", @@ -1246,7 +1315,6 @@ dependencies = [ "serde_json", "stratum-common", "tokio", - "toml", "tracing", "tracing-subscriber", ] @@ -1268,6 +1336,17 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "json5" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96b0db21af676c1ce64250b5f40f3ce2cf27e4e47cb91ed91eb6fe9350b430c1" +dependencies = [ + "pest", + "pest_derive", + "serde", +] + [[package]] name = "key-utils" version = "1.1.0" @@ -1298,6 +1377,12 @@ version = "0.2.154" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ae743338b92ff9146ce83992f766a31066a91a8c84a45e0e9f21e7cf6de6d346" +[[package]] +name = "linked-hash-map" +version = "0.5.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" + [[package]] name = "linux-raw-sys" version = "0.3.8" @@ -1335,6 +1420,12 @@ version = "2.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6c8640c5d730cb13ebd907d8d04b52f55ac9a2eec55b440c8892f40d56c76c1d" +[[package]] +name = "minimal-lexical" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" + [[package]] name = "mining-device" version = "0.1.1" @@ -1367,6 +1458,7 @@ dependencies = [ "binary_sv2", "buffer_sv2", "codec_sv2", + "config", "const_sv2", "futures", "key-utils", @@ -1377,7 +1469,6 @@ dependencies = [ "serde", "stratum-common", "tokio", - "toml", "tracing", "tracing-subscriber", ] @@ -1443,6 +1534,16 @@ dependencies = [ "secp256k1 0.28.2", ] +[[package]] +name = "nom" +version = "7.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a" +dependencies = [ + "memchr", + "minimal-lexical", +] + [[package]] name = "nu-ansi-term" version = "0.46.0" @@ -1512,6 +1613,16 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c08d65885ee38876c4f86fa503fb49d7b507c2b62552df7c70b2fce627e06381" +[[package]] +name = "ordered-multimap" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ed8acf08e98e744e5384c8bc63ceb0364e68a6854187221c18df61c4797690e" +dependencies = [ + "dlv-list", + "hashbrown 0.13.2", +] + [[package]] name = "overload" version = "0.1.1" @@ -1547,6 +1658,57 @@ dependencies = [ "windows-targets 0.52.5", ] +[[package]] +name = "pathdiff" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8835116a5c179084a830efb3adc117ab007512b535bc1a21c991d3b32a6b44dd" + +[[package]] +name = "pest" +version = "2.7.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd53dff83f26735fdc1ca837098ccf133605d794cdae66acfc2bfac3ec809d95" +dependencies = [ + "memchr", + "thiserror", + "ucd-trie", +] + +[[package]] +name = "pest_derive" +version = "2.7.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a548d2beca6773b1c244554d36fcf8548a8a58e74156968211567250e48e49a" +dependencies = [ + "pest", + "pest_generator", +] + +[[package]] +name = "pest_generator" +version = "2.7.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c93a82e8d145725dcbaf44e5ea887c8a869efdcc28706df2d08c69e17077183" +dependencies = [ + "pest", + "pest_meta", + "proc-macro2", + "quote", + "syn 2.0.61", +] + +[[package]] +name = "pest_meta" +version = "2.7.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a941429fea7e08bedec25e4f6785b6ffaacc6b755da98df5ef3e7dcf4a124c4f" +dependencies = [ + "once_cell", + "pest", + "sha2 0.10.8", +] + [[package]] name = "pin-project" version = "1.1.5" @@ -1653,6 +1815,7 @@ dependencies = [ "binary_sv2", "buffer_sv2", "codec_sv2", + "config", "const_sv2", "error_handling", "hex", @@ -1665,7 +1828,6 @@ dependencies = [ "serde", "stratum-common", "tokio", - "toml", "tracing", "tracing-subscriber", ] @@ -1796,6 +1958,18 @@ dependencies = [ "tracing", ] +[[package]] +name = "ron" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b91f7eff05f748767f183df4320a63d6936e9c6107d97c9e6bdd9784f4289c94" +dependencies = [ + "base64", + "bitflags 2.5.0", + "serde", + "serde_derive", +] + [[package]] name = "rpc_sv2" version = "1.0.0" @@ -1810,6 +1984,16 @@ dependencies = [ "stratum-common", ] +[[package]] +name = "rust-ini" +version = "0.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e2a3bcec1f113553ef1c88aae6c020a369d03d55b58de9869a0908930385091" +dependencies = [ + "cfg-if", + "ordered-multimap", +] + [[package]] name = "rustc-demangle" version = "0.1.24" @@ -1925,9 +2109,18 @@ dependencies = [ "serde", ] +[[package]] +name = "serde_spanned" +version = "0.6.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eb5b1b31579f3811bf615c144393417496f152e12ac8b7663bf664f4a815306d" +dependencies = [ + "serde", +] + [[package]] name = "serde_sv2" -version = "1.0.0" +version = "1.0.1" dependencies = [ "buffer_sv2", "serde", @@ -2090,7 +2283,7 @@ dependencies = [ [[package]] name = "template_distribution_sv2" -version = "1.0.0" +version = "1.0.1" dependencies = [ "binary_sv2", "const_sv2", @@ -2105,6 +2298,26 @@ dependencies = [ "winapi-util", ] +[[package]] +name = "thiserror" +version = "1.0.63" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0342370b38b6a11b6cc11d6a805569958d54cfa061a29969c3b5ce2ea405724" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.63" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4558b58466b9ad7ca0f102865eccc95938dca1a74a856f2b57b6629050da261" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.61", +] + [[package]] name = "thread_local" version = "1.1.8" @@ -2115,6 +2328,15 @@ dependencies = [ "once_cell", ] +[[package]] +name = "tiny-keccak" +version = "2.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c9d3793400a45f954c52e73d068316d76b6f4e36977e3fcebb13a2721e80237" +dependencies = [ + "crunchy", +] + [[package]] name = "tokio" version = "1.38.0" @@ -2160,11 +2382,36 @@ dependencies = [ [[package]] name = "toml" -version = "0.5.6" -source = "git+https://github.com/diondokter/toml-rs?rev=c4161aa#c4161aa70202b3992dbec79b76e7a8659713b604" +version = "0.8.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1ed1f98e3fdc28d6d910e6737ae6ab1a93bf1985935a1193e68f93eeb68d24e" +dependencies = [ + "serde", + "serde_spanned", + "toml_datetime", + "toml_edit", +] + +[[package]] +name = "toml_datetime" +version = "0.6.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0dd7358ecb8fc2f8d014bf86f6f638ce72ba252a2c3a2572f2a795f1d23efb41" +dependencies = [ + "serde", +] + +[[package]] +name = "toml_edit" +version = "0.22.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "583c44c02ad26b0c3f3066fe629275e50627026c51ac2e595cca4c230ce1ce1d" dependencies = [ - "hashbrown 0.7.2", + "indexmap", "serde", + "serde_spanned", + "toml_datetime", + "winnow", ] [[package]] @@ -2264,6 +2511,7 @@ dependencies = [ "binary_sv2", "buffer_sv2", "codec_sv2", + "config", "error_handling", "framing_sv2", "futures", @@ -2279,7 +2527,6 @@ dependencies = [ "sv1_api", "tokio", "tokio-util", - "toml", "tracing", "tracing-subscriber", ] @@ -2296,12 +2543,24 @@ version = "1.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" +[[package]] +name = "ucd-trie" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed646292ffc8188ef8ea4d1e0e0150fb15a5c2e12ad9b8fc191ae7a8a7f3c4b9" + [[package]] name = "unicode-ident" version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" +[[package]] +name = "unicode-segmentation" +version = "1.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d4c87d22b6e3f4a18d4d40ef354e97c90fcb14dd91d7dc0aa9d8a1172ebf7202" + [[package]] name = "universal-hash" version = "0.5.1" @@ -2603,6 +2862,24 @@ version = "0.52.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bec47e5bfd1bff0eeaf6d8b485cc1074891a197ab4225d504cb7a1ab88b02bf0" +[[package]] +name = "winnow" +version = "0.6.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68a9bda4691f099d435ad181000724da8e5899daa10713c2d432552b9ccd3a6f" +dependencies = [ + "memchr", +] + +[[package]] +name = "yaml-rust" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56c1936c4cc7a1c9ab21a1ebb602eb942ba868cbd44a99cb7cdc5892335e1c85" +dependencies = [ + "linked-hash-map", +] + [[package]] name = "zeroize" version = "1.7.0" diff --git a/roles/jd-server/Cargo.toml b/roles/jd-server/Cargo.toml index a3c66c872..e8d82438a 100644 --- a/roles/jd-server/Cargo.toml +++ b/roles/jd-server/Cargo.toml @@ -15,7 +15,7 @@ async-channel = "1.5.1" binary_sv2 = { version = "^1.0.0", path = "../../protocols/v2/binary-sv2/binary-sv2" } buffer_sv2 = { version = "^1.0.0", path = "../../utils/buffer" } codec_sv2 = { version = "^1.0.1", path = "../../protocols/v2/codec-sv2", features = ["noise_sv2"] } -const_sv2 = { version = "^1.0.0", path = "../../protocols/v2/const-sv2" } +const_sv2 = { version = "^2.0.0", path = "../../protocols/v2/const-sv2" } network_helpers_sv2 = { version = "2.0.0", path = "../roles-utils/network-helpers", features = ["with_tokio"] } noise_sv2 = { version = "1.1.0", path = "../../protocols/v2/noise-sv2" } rand = "0.8.4" diff --git a/roles/mining-proxy/Cargo.toml b/roles/mining-proxy/Cargo.toml index 9cd8664bc..8ffb5d095 100644 --- a/roles/mining-proxy/Cargo.toml +++ b/roles/mining-proxy/Cargo.toml @@ -18,7 +18,7 @@ async-recursion = "0.3.2" binary_sv2 = { version = "^1.0.0", path = "../../protocols/v2/binary-sv2/binary-sv2" } buffer_sv2 = { version = "^1.0.0", path = "../../utils/buffer" } codec_sv2 = { version = "^1.0.1", path = "../../protocols/v2/codec-sv2", features = ["noise_sv2", "with_buffer_pool"] } -const_sv2 = { version = "^1.0.0", path = "../../protocols/v2/const-sv2" } +const_sv2 = { version = "^2.0.0", path = "../../protocols/v2/const-sv2" } futures = "0.3.19" network_helpers_sv2 = {version = "2.0.0", path = "../roles-utils/network-helpers", features = ["with_tokio","with_buffer_pool"] } once_cell = "1.12.0" diff --git a/roles/pool/Cargo.toml b/roles/pool/Cargo.toml index 14eee1613..c253e6b3e 100644 --- a/roles/pool/Cargo.toml +++ b/roles/pool/Cargo.toml @@ -16,7 +16,7 @@ async-channel = "1.5.1" binary_sv2 = { version = "^1.0.0", path = "../../protocols/v2/binary-sv2/binary-sv2" } buffer_sv2 = { version = "^1.0.0", path = "../../utils/buffer" } codec_sv2 = { version = "^1.0.1", path = "../../protocols/v2/codec-sv2", features = ["noise_sv2"] } -const_sv2 = { version = "^1.0.0", path = "../../protocols/v2/const-sv2" } +const_sv2 = { version = "^2.0.0", path = "../../protocols/v2/const-sv2" } network_helpers_sv2 = { version = "2.0.0", path = "../roles-utils/network-helpers", features =["with_tokio","with_buffer_pool"] } noise_sv2 = { version = "1.1.0", path = "../../protocols/v2/noise-sv2" } rand = "0.8.4" diff --git a/roles/roles-utils/network-helpers/Cargo.toml b/roles/roles-utils/network-helpers/Cargo.toml index e2c69806a..b4af8f293 100644 --- a/roles/roles-utils/network-helpers/Cargo.toml +++ b/roles/roles-utils/network-helpers/Cargo.toml @@ -15,7 +15,7 @@ async-channel = { version = "1.8.0", optional = true } tokio = { version = "1", features = ["full"], optional = true } binary_sv2 = { version = "^1.0.0", path = "../../../protocols/v2/binary-sv2/binary-sv2", optional = true } codec_sv2 = { version = "1.0.1", path = "../../../protocols/v2/codec-sv2", features=["noise_sv2"], optional = true } -const_sv2 = {version = "1.0.0", path = "../../../protocols/v2/const-sv2"} +const_sv2 = {version = "2.0.0", path = "../../../protocols/v2/const-sv2"} serde = { version = "1.0.89", features = ["derive"], default-features = false, optional = true } tracing = { version = "0.1" } futures = "0.3.28" diff --git a/roles/test-utils/mining-device/Cargo.toml b/roles/test-utils/mining-device/Cargo.toml index ca710fefb..a59719504 100644 --- a/roles/test-utils/mining-device/Cargo.toml +++ b/roles/test-utils/mining-device/Cargo.toml @@ -10,7 +10,7 @@ publish = false stratum-common = { version = "1.0.0", path = "../../../common" } codec_sv2 = { version = "^1.0.1", path = "../../../protocols/v2/codec-sv2", features=["noise_sv2"] } roles_logic_sv2 = { version = "1.0.0", path = "../../../protocols/v2/roles-logic-sv2" } -const_sv2 = { version = "1.0.0", path = "../../../protocols/v2/const-sv2" } +const_sv2 = { version = "2.0.0", path = "../../../protocols/v2/const-sv2" } async-channel = "1.5.1" binary_sv2 = { version = "1.0.0", path = "../../../protocols/v2/binary-sv2/binary-sv2" } network_helpers_sv2 = { version = "2.0.0", path = "../../roles-utils/network-helpers", features=["tokio"] } diff --git a/utils/message-generator/Cargo.toml b/utils/message-generator/Cargo.toml index b1a8a119d..f97d74f0d 100644 --- a/utils/message-generator/Cargo.toml +++ b/utils/message-generator/Cargo.toml @@ -9,7 +9,7 @@ edition = "2021" async-channel = "1.8.0" binary_sv2 = { version = "1.0.0", path = "../../protocols/v2/binary-sv2/binary-sv2", features = ["with_serde"] } codec_sv2 = { version = "1.0.0", path = "../../protocols/v2/codec-sv2", features = ["noise_sv2","with_buffer_pool","with_serde"] } -const_sv2 = { version = "1.0.0", path = "../../protocols/v2/const-sv2" } +const_sv2 = { version = "2.0.0", path = "../../protocols/v2/const-sv2" } load_file = "1.0.1" network_helpers_sv2 = { version = "2.0.0", path = "../../roles/roles-utils/network-helpers", features = ["with_tokio","with_serde"] } roles_logic_sv2 = { version = "1.0.0", path = "../../protocols/v2/roles-logic-sv2", features = ["with_serde"] } From 4234557c729d263f12371325a861098244c6073f Mon Sep 17 00:00:00 2001 From: plebhash Date: Fri, 9 Aug 2024 17:20:10 -0300 Subject: [PATCH 086/101] bump common_messages_sv2 version --- protocols/Cargo.lock | 2 +- protocols/v2/roles-logic-sv2/Cargo.toml | 2 +- protocols/v2/subprotocols/common-messages/Cargo.toml | 2 +- protocols/v2/sv2-ffi/Cargo.toml | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/protocols/Cargo.lock b/protocols/Cargo.lock index 4cb7bb6fd..ba8fbddaa 100644 --- a/protocols/Cargo.lock +++ b/protocols/Cargo.lock @@ -212,7 +212,7 @@ dependencies = [ [[package]] name = "common_messages_sv2" -version = "1.0.0" +version = "2.0.0" dependencies = [ "binary_sv2", "const_sv2", diff --git a/protocols/v2/roles-logic-sv2/Cargo.toml b/protocols/v2/roles-logic-sv2/Cargo.toml index 297fe72d6..383dfa5c0 100644 --- a/protocols/v2/roles-logic-sv2/Cargo.toml +++ b/protocols/v2/roles-logic-sv2/Cargo.toml @@ -12,7 +12,7 @@ repository = "https://github.com/stratum-mining/stratum" stratum-common = { version="1.0.0", path = "../../../common", features=["bitcoin"]} serde = { version = "1.0.89", features = ["derive", "alloc"], default-features = false, optional = true} binary_sv2 = {version = "^1.0.0", path = "../../../protocols/v2/binary-sv2/binary-sv2", default-features = true } -common_messages_sv2 = { path = "../../../protocols/v2/subprotocols/common-messages", version = "^1.0.0" } +common_messages_sv2 = { path = "../../../protocols/v2/subprotocols/common-messages", version = "^2.0.0" } mining_sv2 = { path = "../../../protocols/v2/subprotocols/mining", version = "^1.0.0" } template_distribution_sv2 = { path = "../../../protocols/v2/subprotocols/template-distribution", version = "^1.0.1" } job_declaration_sv2 = { path = "../../../protocols/v2/subprotocols/job-declaration", version = "^1.0.0" } diff --git a/protocols/v2/subprotocols/common-messages/Cargo.toml b/protocols/v2/subprotocols/common-messages/Cargo.toml index ad3262fb5..783c208df 100644 --- a/protocols/v2/subprotocols/common-messages/Cargo.toml +++ b/protocols/v2/subprotocols/common-messages/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "common_messages_sv2" -version = "1.0.0" +version = "2.0.0" authors = ["fi3 "] edition = "2018" description = "Sv2 subprotocol common messages" diff --git a/protocols/v2/sv2-ffi/Cargo.toml b/protocols/v2/sv2-ffi/Cargo.toml index 3906af416..6498b1b5b 100644 --- a/protocols/v2/sv2-ffi/Cargo.toml +++ b/protocols/v2/sv2-ffi/Cargo.toml @@ -14,7 +14,7 @@ crate-type = ["staticlib"] codec_sv2 = { path = "../../../protocols/v2/codec-sv2", version = "^1.0.0" } const_sv2 = { path = "../../../protocols/v2/const-sv2", version = "^2.0.0" } binary_sv2 = { path = "../../../protocols/v2/binary-sv2/binary-sv2", version = "^1.0.0" } -common_messages_sv2 = { path = "../../../protocols/v2/subprotocols/common-messages", version = "^1.0.0" } +common_messages_sv2 = { path = "../../../protocols/v2/subprotocols/common-messages", version = "^2.0.0" } template_distribution_sv2 = { path = "../../../protocols/v2/subprotocols/template-distribution", version = "^1.0.1" } [dev-dependencies] From dd88aaceadd537eecf7bf1835e0ae04efbd510f8 Mon Sep 17 00:00:00 2001 From: plebhash Date: Fri, 9 Aug 2024 18:18:19 -0300 Subject: [PATCH 087/101] bump roles_logic_sv2 version --- protocols/Cargo.lock | 2 +- protocols/v2/roles-logic-sv2/Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/protocols/Cargo.lock b/protocols/Cargo.lock index ba8fbddaa..420dc7645 100644 --- a/protocols/Cargo.lock +++ b/protocols/Cargo.lock @@ -606,7 +606,7 @@ checksum = "adad44e29e4c806119491a7f06f03de4d1af22c3a680dd47f1e6e179439d1f56" [[package]] name = "roles_logic_sv2" -version = "1.1.0" +version = "1.2.0" dependencies = [ "binary_sv2", "chacha20poly1305", diff --git a/protocols/v2/roles-logic-sv2/Cargo.toml b/protocols/v2/roles-logic-sv2/Cargo.toml index 383dfa5c0..28eee2f57 100644 --- a/protocols/v2/roles-logic-sv2/Cargo.toml +++ b/protocols/v2/roles-logic-sv2/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "roles_logic_sv2" -version = "1.1.0" +version = "1.2.0" edition = "2018" description = "Common handlers for use within SV2 roles" license = "MIT OR Apache-2.0" From ae3260035cdade54e087b7dfac1b2307108cb428 Mon Sep 17 00:00:00 2001 From: plebhash Date: Fri, 9 Aug 2024 19:04:37 -0300 Subject: [PATCH 088/101] bump binary_sv2 version --- protocols/Cargo.lock | 4 ++-- protocols/v2/binary-sv2/binary-sv2/Cargo.toml | 2 +- .../v2/subprotocols/template-distribution/Cargo.toml | 4 ++-- roles/Cargo.lock | 8 ++++---- 4 files changed, 9 insertions(+), 9 deletions(-) diff --git a/protocols/Cargo.lock b/protocols/Cargo.lock index 420dc7645..94b6710b8 100644 --- a/protocols/Cargo.lock +++ b/protocols/Cargo.lock @@ -85,7 +85,7 @@ dependencies = [ [[package]] name = "binary_sv2" -version = "1.0.0" +version = "1.0.1" dependencies = [ "binary_codec_sv2", "derive_codec_sv2", @@ -796,7 +796,7 @@ dependencies = [ [[package]] name = "template_distribution_sv2" -version = "1.0.1" +version = "1.0.2" dependencies = [ "binary_sv2", "const_sv2", diff --git a/protocols/v2/binary-sv2/binary-sv2/Cargo.toml b/protocols/v2/binary-sv2/binary-sv2/Cargo.toml index ae7849aa9..1fd78fcc1 100644 --- a/protocols/v2/binary-sv2/binary-sv2/Cargo.toml +++ b/protocols/v2/binary-sv2/binary-sv2/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "binary_sv2" -version = "1.0.0" +version = "1.0.1" authors = ["fi3 "] edition = "2018" description = "Sv2 data format" diff --git a/protocols/v2/subprotocols/template-distribution/Cargo.toml b/protocols/v2/subprotocols/template-distribution/Cargo.toml index b348f8954..b0120836d 100644 --- a/protocols/v2/subprotocols/template-distribution/Cargo.toml +++ b/protocols/v2/subprotocols/template-distribution/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "template_distribution_sv2" -version = "1.0.1" +version = "1.0.2" authors = ["fi3 "] edition = "2018" description = "Sv2 template distribution subprotocol" @@ -11,7 +11,7 @@ repository = "https://github.com/stratum-mining/stratum" [dependencies] serde = { version = "1.0.89", default-features = false, optional= true } -binary_sv2 = { version = "^1.0.0", path = "../../../../protocols/v2/binary-sv2/binary-sv2" } +binary_sv2 = { version = "^1.0.1", path = "../../../../protocols/v2/binary-sv2/binary-sv2" } const_sv2 = { version = "^2.0.0", path = "../../../../protocols/v2/const-sv2"} quickcheck = { version = "1.0.3", optional=true } quickcheck_macros = { version = "1", optional=true } diff --git a/roles/Cargo.lock b/roles/Cargo.lock index 7abea3787..97cf8e6c8 100644 --- a/roles/Cargo.lock +++ b/roles/Cargo.lock @@ -380,7 +380,7 @@ dependencies = [ [[package]] name = "binary_sv2" -version = "1.0.0" +version = "1.0.1" dependencies = [ "binary_codec_sv2", "derive_codec_sv2", @@ -621,7 +621,7 @@ checksum = "0b6a852b24ab71dffc585bcb46eaf7959d175cb865a7152e35b348d1b2960422" [[package]] name = "common_messages_sv2" -version = "1.0.0" +version = "2.0.0" dependencies = [ "binary_sv2", "const_sv2", @@ -1942,7 +1942,7 @@ checksum = "adad44e29e4c806119491a7f06f03de4d1af22c3a680dd47f1e6e179439d1f56" [[package]] name = "roles_logic_sv2" -version = "1.1.0" +version = "1.2.0" dependencies = [ "binary_sv2", "chacha20poly1305", @@ -2283,7 +2283,7 @@ dependencies = [ [[package]] name = "template_distribution_sv2" -version = "1.0.1" +version = "1.0.2" dependencies = [ "binary_sv2", "const_sv2", From 57976344d25c0465d4c0843828aa4fd2d6e7fb11 Mon Sep 17 00:00:00 2001 From: bit-aloo Date: Wed, 19 Jun 2024 20:58:42 -0400 Subject: [PATCH 089/101] Added semver ci workflow yaml --- .github/workflows/semver-check.yaml | 131 ++++++++++++++++++++++++++++ 1 file changed, 131 insertions(+) create mode 100644 .github/workflows/semver-check.yaml diff --git a/.github/workflows/semver-check.yaml b/.github/workflows/semver-check.yaml new file mode 100644 index 000000000..45ee8419f --- /dev/null +++ b/.github/workflows/semver-check.yaml @@ -0,0 +1,131 @@ +name: Semver Check + +on: + push: + branches: + - "main" + - "dev" + pull_request: + branches: + - "main" + - "dev" + +jobs: + semver-check: + runs-on: ubuntu-latest + + steps: + - name: Checkout repository + uses: actions/checkout@v2 + + - name: Install Rust 1.75.0 + uses: actions-rs/toolchain@v1 + with: + toolchain: 1.75.0 + override: true + + - name: Cache Cargo registry + uses: actions/cache@v2 + with: + path: ~/.cargo/registry + key: ${{ runner.os }}-cargo-registry-${{ hashFiles('**/Cargo.lock') }} + restore-keys: | + ${{ runner.os }}-cargo-registry- + + - name: Cache Cargo index + uses: actions/cache@v2 + with: + path: ~/.cargo/git + key: ${{ runner.os }}-cargo-index-${{ hashFiles('**/Cargo.lock') }} + restore-keys: | + ${{ runner.os }}-cargo-index- + + - name: Install dependencies + run: sudo apt-get update && sudo apt-get install -y cmake + + - name: Install cargo-semver-checks + run: cargo install cargo-semver-checks --locked + + - name: Run semver checks for common + working-directory: common + run: cargo semver-checks + + - name: Run semver checks for utils/buffer + working-directory: utils/buffer + run: cargo semver-checks + + - name: Run semver checks for protocols/v2/binary-sv2/no-serde-sv2/codec + working-directory: protocols/v2/binary-sv2/no-serde-sv2/codec + run: cargo semver-checks + + - name: Run semver checks for protocols/v2/binary-sv2/serde-sv2 + working-directory: protocols/v2/binary-sv2/serde-sv2 + run: cargo semver-checks + + - name: Run semver checks for protocols/v2/binary-sv2/binary-sv2 + working-directory: protocols/v2/binary-sv2/binary-sv2 + run: cargo semver-checks + + - name: Run semver checks for protocols/v2/const-sv2 + working-directory: protocols/v2/const-sv2 + run: cargo semver-checks + + - name: Run semver checks for protocols/v2/framing-sv2 + working-directory: protocols/v2/framing-sv2 + run: cargo semver-checks + + - name: Run semver checks for protocols/v2/noise-sv2 + working-directory: protocols/v2/noise-sv2 + run: cargo semver-checks + + - name: Run semver checks for protocols/v2/codec-sv2 + working-directory: protocols/v2/codec-sv2 + run: cargo semver-checks + + - name: Run semver checks for protocols/v2/subprotocols/common-messages + working-directory: protocols/v2/subprotocols/common-messages + run: cargo semver-checks + + - name: Run semver checks for protocols/v2/subprotocols/job-declaration + working-directory: protocols/v2/subprotocols/job-declaration + run: cargo semver-checks + + - name: Run semver checks for protocols/v2/subprotocols/mining + working-directory: protocols/v2/subprotocols/mining + run: cargo semver-checks + + - name: Run semver checks for protocols/v2/subprotocols/template-distribution + working-directory: protocols/v2/subprotocols/template-distribution + run: cargo semver-checks + + - name: Run semver checks for protocols/v2/sv2-ffi + working-directory: protocols/v2/sv2-ffi + run: cargo semver-checks + + - name: Run semver checks for protocols/v2/roles-logic-sv2 + working-directory: protocols/v2/roles-logic-sv2 + run: cargo semver-checks + + - name: Run semver checks for protocols/v1 + working-directory: protocols/v1 + run: cargo semver-checks + + - name: Run semver checks for utils/bip32-key-derivation + working-directory: utils/bip32-key-derivation + run: cargo semver-checks + + - name: Run semver checks for utils/error-handling + working-directory: utils/error-handling + run: cargo semver-checks + + - name: Run semver checks for utils/key-utils + working-directory: utils/key-utils + run: cargo semver-checks + + - name: Run semver checks for roles/roles-utils/network-helpers + working-directory: roles/roles-utils/network-helpers + run: cargo semver-checks + + - name: Run semver checks for roles/roles-utils/rpc + working-directory: roles/roles-utils/rpc + run: cargo semver-checks \ No newline at end of file From a74262aed1fbc2b96e6149a1a1f453d14dfbd637 Mon Sep 17 00:00:00 2001 From: lorban Date: Mon, 10 Jun 2024 22:56:16 +0200 Subject: [PATCH 090/101] TProxy restart if gests disconnected by upstream - Add `start` function and put starting logic there. - Every AbortHandle of each task is stored in a collector, which is a mutex. - Added `kill_tasks` function that takes in input this mutex, access it, pop each JoinHandle and kill the corresponding task. When receive an UpstreamShutdown does the following: 1. waits a random amount of time s 0>, + task_collector: Arc>>, ) { let stream = std::sync::Arc::new(stream); @@ -150,11 +151,12 @@ impl Downstream { let rx_shutdown_clone = rx_shutdown.clone(); let tx_shutdown_clone = tx_shutdown.clone(); let tx_status_reader = tx_status.clone(); + let task_collector_mining_device = task_collector.clone(); // Task to read from SV1 Mining Device Client socket via `socket_reader`. Depending on the // SV1 message received, a message response is sent directly back to the SV1 Downstream // role, or the message is sent upwards to the Bridge for translation into a SV2 message // and then sent to the SV2 Upstream role. - let _socket_reader_task = task::spawn(async move { + let socket_reader_task = tokio::task::spawn(async move { let reader = BufReader::new(&*socket_reader); let mut messages = FramedRead::new( async_compat::Compat::new(reader), @@ -205,15 +207,22 @@ impl Downstream { kill(&tx_shutdown_clone).await; warn!("Downstream: Shutting down sv1 downstream reader"); }); + let _ = task_collector_mining_device.safe_lock(|a| { + a.push(( + socket_reader_task.abort_handle(), + "socket_reader_task".to_string(), + )) + }); let rx_shutdown_clone = rx_shutdown.clone(); let tx_shutdown_clone = tx_shutdown.clone(); let tx_status_writer = tx_status.clone(); let host_ = host.clone(); + let task_collector_new_sv1_message_no_transl = task_collector.clone(); // Task to receive SV1 message responses to SV1 messages that do NOT need translation. // These response messages are sent directly to the SV1 Downstream role. - let _socket_writer_task = task::spawn(async move { + let socket_writer_task = tokio::task::spawn(async move { loop { select! { res = receiver_outgoing.recv().fuse() => { @@ -242,11 +251,18 @@ impl Downstream { &host_ ); }); + let _ = task_collector_new_sv1_message_no_transl.safe_lock(|a| { + a.push(( + socket_writer_task.abort_handle(), + "socket_writer_task".to_string(), + )) + }); let tx_status_notify = tx_status; let self_ = downstream.clone(); - let _notify_task = task::spawn(async move { + let task_collector_notify_task = task_collector.clone(); + let notify_task = tokio::task::spawn(async move { let timeout_timer = std::time::Instant::now(); let mut first_sent = false; loop { @@ -329,10 +345,14 @@ impl Downstream { &host ); }); + + let _ = task_collector_notify_task + .safe_lock(|a| a.push((notify_task.abort_handle(), "notify_task".to_string()))); } /// Accept connections from one or more SV1 Downstream roles (SV1 Mining Devices) and create a /// new `Downstream` for each connection. + #[allow(clippy::too_many_arguments)] pub fn accept_connections( downstream_addr: SocketAddr, tx_sv1_submit: Sender, @@ -341,8 +361,11 @@ impl Downstream { bridge: Arc>, downstream_difficulty_config: DownstreamDifficultyConfig, upstream_difficulty_config: Arc>, + task_collector: Arc>>, ) { - task::spawn(async move { + let task_collector_downstream = task_collector.clone(); + + let accept_connections = tokio::task::spawn(async move { let downstream_listener = TcpListener::bind(downstream_addr).await.unwrap(); let mut downstream_incoming = downstream_listener.incoming(); @@ -369,6 +392,7 @@ impl Downstream { host, downstream_difficulty_config.clone(), upstream_difficulty_config.clone(), + task_collector_downstream.clone(), ) .await; } @@ -378,6 +402,12 @@ impl Downstream { } } }); + let _ = task_collector.safe_lock(|a| { + a.push(( + accept_connections.abort_handle(), + "accept_connections".to_string(), + )) + }); } /// As SV1 messages come in, determines if the message response needs to be translated to SV2 diff --git a/roles/translator/src/lib/proxy/bridge.rs b/roles/translator/src/lib/proxy/bridge.rs index ee9ad4337..74db21111 100644 --- a/roles/translator/src/lib/proxy/bridge.rs +++ b/roles/translator/src/lib/proxy/bridge.rs @@ -1,5 +1,4 @@ use async_channel::{Receiver, Sender}; -use async_std::task; use roles_logic_sv2::{ channel_logic::channel_factory::{ExtendedChannelKind, ProxyExtendedChannelFactory, Share}, mining_sv2::{ @@ -9,7 +8,7 @@ use roles_logic_sv2::{ utils::{GroupId, Mutex}, }; use std::sync::Arc; -use tokio::sync::broadcast; +use tokio::{sync::broadcast, task::AbortHandle}; use v1::{client_to_server::Submit, server_to_client, utils::HexU32Be}; use super::super::{ @@ -64,6 +63,7 @@ pub struct Bridge { last_p_hash: Option>, target: Arc>>, last_job_id: u32, + task_collector: Arc>>, } impl Bridge { @@ -79,6 +79,7 @@ impl Bridge { extranonces: ExtendedExtranonce, target: Arc>>, up_id: u32, + task_collector: Arc>>, ) -> Arc> { let ids = Arc::new(Mutex::new(GroupId::new())); let share_per_min = 1.0; @@ -107,6 +108,7 @@ impl Bridge { last_p_hash: None, target, last_job_id: 0, + task_collector, })) } @@ -162,10 +164,12 @@ impl Bridge { /// Receives a `DownstreamMessages` message from the `Downstream`, handles based on the /// variant received. fn handle_downstream_messages(self_: Arc>) { + let task_collector_handle_downstream = + self_.safe_lock(|b| b.task_collector.clone()).unwrap(); let (rx_sv1_downstream, tx_status) = self_ .safe_lock(|s| (s.rx_sv1_downstream.clone(), s.tx_status.clone())) .unwrap(); - task::spawn(async move { + let handle_downstream = tokio::task::spawn(async move { loop { let msg = handle_result!(tx_status, rx_sv1_downstream.clone().recv().await); @@ -185,6 +189,12 @@ impl Bridge { }; } }); + let _ = task_collector_handle_downstream.safe_lock(|a| { + a.push(( + handle_downstream.abort_handle(), + "handle_downstream_message".to_string(), + )) + }); } /// receives a `SetDownstreamTarget` and updates the downstream target for the channel #[allow(clippy::result_large_err)] @@ -367,6 +377,8 @@ impl Bridge { /// corresponding `job_id` has already been received. If this is not the case, an error has /// occurred on the Upstream pool role and the connection will close. fn handle_new_prev_hash(self_: Arc>) { + let task_collector_handle_new_prev_hash = + self_.safe_lock(|b| b.task_collector.clone()).unwrap(); let (tx_sv1_notify, rx_sv2_set_new_prev_hash, tx_status) = self_ .safe_lock(|s| { ( @@ -377,7 +389,7 @@ impl Bridge { }) .unwrap(); debug!("Starting handle_new_prev_hash task"); - task::spawn(async move { + let handle_new_prev_hash = tokio::task::spawn(async move { loop { // Receive `SetNewPrevHash` from `Upstream` let sv2_set_new_prev_hash: SetNewPrevHash = @@ -397,6 +409,12 @@ impl Bridge { ) } }); + let _ = task_collector_handle_new_prev_hash.safe_lock(|a| { + a.push(( + handle_new_prev_hash.abort_handle(), + "handle_new_prev_hash".to_string(), + )) + }); } async fn handle_new_extended_mining_job_( @@ -460,6 +478,8 @@ impl Bridge { /// `SetNewPrevHash` `job_id`, an error has occurred on the Upstream pool role and the /// connection will close. fn handle_new_extended_mining_job(self_: Arc>) { + let task_collector_new_extended_mining_job = + self_.safe_lock(|b| b.task_collector.clone()).unwrap(); let (tx_sv1_notify, rx_sv2_new_ext_mining_job, tx_status) = self_ .safe_lock(|s| { ( @@ -470,7 +490,7 @@ impl Bridge { }) .unwrap(); debug!("Starting handle_new_extended_mining_job task"); - task::spawn(async move { + let handle_new_extended_mining_job = tokio::task::spawn(async move { loop { // Receive `NewExtendedMiningJob` from `Upstream` let sv2_new_extended_mining_job: NewExtendedMiningJob = handle_result!( @@ -494,6 +514,12 @@ impl Bridge { .store(true, std::sync::atomic::Ordering::SeqCst); } }); + let _ = task_collector_new_extended_mining_job.safe_lock(|a| { + a.push(( + handle_new_extended_mining_job.abort_handle(), + "handle_new_extended_mining_job".to_string(), + )) + }); } } pub struct OpenSv1Downstream { @@ -543,6 +569,7 @@ mod test { rx_sv1_notify, }; + let task_collector = Arc::new(Mutex::new(vec![])); let b = Bridge::new( rx_sv1_submit, tx_sv2_submit_shares_ext, @@ -553,6 +580,7 @@ mod test { extranonces, Arc::new(Mutex::new(upstream_target)), 1, + task_collector, ); (b, interface) } diff --git a/roles/translator/src/lib/status.rs b/roles/translator/src/lib/status.rs index 4cdd770e2..e8af6883e 100644 --- a/roles/translator/src/lib/status.rs +++ b/roles/translator/src/lib/status.rs @@ -48,6 +48,7 @@ pub enum State<'a> { DownstreamShutdown(Error<'a>), BridgeShutdown(Error<'a>), UpstreamShutdown(Error<'a>), + UpstreamTryReconnect(Error<'a>), Healthy(String), } @@ -83,13 +84,22 @@ async fn send_status( .await .unwrap_or(()); } - Sender::Upstream(tx) => { - tx.send(Status { - state: State::UpstreamShutdown(e), - }) - .await - .unwrap_or(()); - } + Sender::Upstream(tx) => match e { + Error::ChannelErrorReceiver(_) => { + tx.send(Status { + state: State::UpstreamTryReconnect(e), + }) + .await + .unwrap_or(()); + } + _ => { + tx.send(Status { + state: State::UpstreamShutdown(e), + }) + .await + .unwrap_or(()); + } + }, Sender::TemplateReceiver(tx) => { tx.send(Status { state: State::UpstreamShutdown(e), diff --git a/roles/translator/src/lib/upstream_sv2/upstream.rs b/roles/translator/src/lib/upstream_sv2/upstream.rs index 6aab5978e..613517432 100644 --- a/roles/translator/src/lib/upstream_sv2/upstream.rs +++ b/roles/translator/src/lib/upstream_sv2/upstream.rs @@ -9,7 +9,7 @@ use crate::{ upstream_sv2::{EitherFrame, Message, StdFrame, UpstreamConnection}, }; use async_channel::{Receiver, Sender}; -use async_std::{net::TcpStream, task}; +use async_std::net::TcpStream; use binary_sv2::u256_from_int; use codec_sv2::{HandshakeRole, Initiator}; use error_handling::handle_result; @@ -36,8 +36,10 @@ use roles_logic_sv2::{ use std::{ net::SocketAddr, sync::{atomic::AtomicBool, Arc}, - thread::sleep, - time::Duration, +}; +use tokio::{ + task::AbortHandle, + time::{sleep, Duration}, }; use tracing::{error, info, warn}; @@ -98,6 +100,7 @@ pub struct Upstream { // and the upstream just needs to occasionally check if it has changed more than // than the configured percentage pub(super) difficulty_config: Arc>, + task_collector: Arc>>, } impl PartialEq for Upstream { @@ -124,6 +127,7 @@ impl Upstream { tx_status: status::Sender, target: Arc>>, difficulty_config: Arc>, + task_collector: Arc>>, ) -> ProxyResult<'static, Arc>> { // Connect to the SV2 Upstream role retry connection every 5 seconds. let socket = loop { @@ -135,7 +139,7 @@ impl Upstream { address, e ); - sleep(Duration::from_secs(5)); + sleep(Duration::from_secs(5)).await; } } }; @@ -171,6 +175,7 @@ impl Upstream { tx_status, target, difficulty_config, + task_collector, }))) } @@ -259,6 +264,9 @@ impl Upstream { #[allow(clippy::result_large_err)] pub fn parse_incoming(self_: Arc>) -> ProxyResult<'static, ()> { let clone = self_.clone(); + let task_collector = self_.safe_lock(|s| s.task_collector.clone()).unwrap(); + let collector1 = task_collector.clone(); + let collector2 = task_collector.clone(); let ( tx_frame, tx_sv2_extranonce, @@ -281,16 +289,22 @@ impl Upstream { { let self_ = self_.clone(); let tx_status = tx_status.clone(); - task::spawn(async move { + let start_diff_management = tokio::task::spawn(async move { // No need to start diff management immediatly - async_std::task::sleep(Duration::from_secs(10)).await; + sleep(Duration::from_secs(10)).await; loop { handle_result!(tx_status, Self::try_update_hashrate(self_.clone()).await); } }); + let _ = collector1.safe_lock(|a| { + a.push(( + start_diff_management.abort_handle(), + "start_diff_management".to_string(), + )) + }); } - task::spawn(async move { + let parse_incoming = tokio::task::spawn(async move { loop { // Waiting to receive a message from the SV2 Upstream role let incoming = handle_result!(tx_status, recv.recv().await); @@ -433,6 +447,8 @@ impl Upstream { } } }); + let _ = collector2 + .safe_lock(|a| a.push((parse_incoming.abort_handle(), "parse_incoming".to_string()))); Ok(()) } @@ -459,6 +475,7 @@ impl Upstream { #[allow(clippy::result_large_err)] pub fn handle_submit(self_: Arc>) -> ProxyResult<'static, ()> { + let task_collector = self_.safe_lock(|s| s.task_collector.clone()).unwrap(); let clone = self_.clone(); let (tx_frame, receiver, tx_status) = clone .safe_lock(|s| { @@ -470,7 +487,7 @@ impl Upstream { }) .map_err(|_| PoisonLock)?; - task::spawn(async move { + let handle_submit = tokio::task::spawn(async move { loop { let mut sv2_submit: SubmitSharesExtended = handle_result!(tx_status, receiver.recv().await); @@ -506,6 +523,9 @@ impl Upstream { ); } }); + let _ = task_collector + .safe_lock(|a| a.push((handle_submit.abort_handle(), "handle_submit".to_string()))); + Ok(()) } diff --git a/roles/translator/src/main.rs b/roles/translator/src/main.rs index f958c3fc8..fc8e91de9 100644 --- a/roles/translator/src/main.rs +++ b/roles/translator/src/main.rs @@ -3,13 +3,13 @@ mod args; mod lib; use args::Args; +use async_channel::{bounded, unbounded}; use error::{Error, ProxyResult}; +use futures::{select, FutureExt}; use lib::{downstream_sv1, error, proxy, proxy_config, status, upstream_sv2}; use proxy_config::ProxyConfig; +use rand::Rng; use roles_logic_sv2::utils::Mutex; - -use async_channel::{bounded, unbounded}; -use futures::{select, FutureExt}; use std::{ net::{IpAddr, SocketAddr}, str::FromStr, @@ -17,11 +17,11 @@ use std::{ }; use ext_config::{Config, File, FileFormat}; -use tokio::{sync::broadcast, task}; +use tokio::{sync::broadcast, task, task::AbortHandle, time::Duration}; use v1::server_to_client; use crate::status::{State, Status}; -use tracing::{debug, error, info}; +use tracing::{debug, error, info, warn}; /// Process CLI args, if any. #[allow(clippy::result_large_err)] fn process_cli_args<'a>() -> ProxyResult<'a, ProxyConfig> { @@ -54,22 +54,121 @@ async fn main() { Ok(p) => p, Err(e) => panic!("failed to load config: {}", e), }; - info!("PC: {:?}", &proxy_config); + info!("Proxy Config: {:?}", &proxy_config); let (tx_status, rx_status) = unbounded(); - // `tx_sv1_bridge` sender is used by `Downstream` to send a `DownstreamMessages` message to - // `Bridge` via the `rx_sv1_downstream` receiver - // (Sender, Receiver) - let (tx_sv1_bridge, rx_sv1_downstream) = unbounded(); + let target = Arc::new(Mutex::new(vec![0; 32])); + + // Sender/Receiver to send SV1 `mining.notify` message from the `Bridge` to the `Downstream` + let (tx_sv1_notify, _rx_sv1_notify): ( + broadcast::Sender, + broadcast::Receiver, + ) = broadcast::channel(10); + + let task_collector: Arc>> = Arc::new(Mutex::new(Vec::new())); + start( + tx_sv1_notify.clone(), + target.clone(), + tx_status.clone(), + task_collector.clone(), + proxy_config.clone(), + ) + .await; + + debug!("Starting up signal listener"); + let task_collector_ = task_collector.clone(); + + let mut interrupt_signal_future = Box::pin(tokio::signal::ctrl_c().fuse()); + debug!("Starting up status listener"); + // Check all tasks if is_finished() is true, if so exit + loop { + let task_status = select! { + task_status = rx_status.recv().fuse() => task_status, + interrupt_signal = interrupt_signal_future => { + match interrupt_signal { + Ok(()) => { + info!("Interrupt received"); + }, + Err(err) => { + error!("Unable to listen for interrupt signal: {}", err); + // we also shut down in case of error + }, + } + break; + } + }; + let task_status: Status = task_status.unwrap(); + + match task_status.state { + // Should only be sent by the downstream listener + State::DownstreamShutdown(err) => { + error!("SHUTDOWN from: {}", err); + break; + } + State::BridgeShutdown(err) => { + error!("SHUTDOWN from: {}", err); + break; + } + State::UpstreamShutdown(err) => { + error!("SHUTDOWN from: {}", err); + break; + } + State::UpstreamTryReconnect(err) => { + error!("SHUTDOWN from: {}", err); + + // wait a random amount of time between 0 and 3000ms + // if all the downstreams try to reconnect at the same time, the upstream may fail + let mut rng = rand::thread_rng(); + let wait_time = rng.gen_range(0..=3000); + tokio::time::sleep(Duration::from_millis(wait_time)).await; + + // kill al the tasks + let task_collector_aborting = task_collector_.clone(); + kill_tasks(task_collector_aborting.clone()); + + warn!("Trying reconnecting to upstream"); + start( + tx_sv1_notify.clone(), + target.clone(), + tx_status.clone(), + task_collector_.clone(), + proxy_config.clone(), + ) + .await; + } + State::Healthy(msg) => { + info!("HEALTHY message: {}", msg); + } + } + } +} + +fn kill_tasks(task_collector: Arc>>) { + let _ = task_collector.safe_lock(|t| { + while let Some(handle) = t.pop() { + handle.0.abort(); + warn!("Killed task: {:?}", handle.1); + } + }); +} + +async fn start<'a>( + tx_sv1_notify: broadcast::Sender>, + target: Arc>>, + tx_status: async_channel::Sender>, + task_collector: Arc>>, + proxy_config: ProxyConfig, +) { // Sender/Receiver to send a SV2 `SubmitSharesExtended` from the `Bridge` to the `Upstream` // (Sender>, Receiver>) let (tx_sv2_submit_shares_ext, rx_sv2_submit_shares_ext) = bounded(10); - // Sender/Receiver to send a SV2 `SetNewPrevHash` message from the `Upstream` to the `Bridge` - // (Sender>, Receiver>) - let (tx_sv2_set_new_prev_hash, rx_sv2_set_new_prev_hash) = bounded(10); + // `tx_sv1_bridge` sender is used by `Downstream` to send a `DownstreamMessages` message to + // `Bridge` via the `rx_sv1_downstream` receiver + // (Sender, Receiver) + let (tx_sv1_bridge, rx_sv1_downstream) = unbounded(); // Sender/Receiver to send a SV2 `NewExtendedMiningJob` message from the `Upstream` to the // `Bridge` @@ -80,13 +179,10 @@ async fn main() { // passed to the `Downstream` upon a Downstream role connection // (Sender, Receiver) let (tx_sv2_extranonce, rx_sv2_extranonce) = bounded(1); - let target = Arc::new(Mutex::new(vec![0; 32])); - // Sender/Receiver to send SV1 `mining.notify` message from the `Bridge` to the `Downstream` - let (tx_sv1_notify, _rx_sv1_notify): ( - broadcast::Sender, - broadcast::Receiver, - ) = broadcast::channel(10); + // Sender/Receiver to send a SV2 `SetNewPrevHash` message from the `Upstream` to the `Bridge` + // (Sender>, Receiver>) + let (tx_sv2_set_new_prev_hash, rx_sv2_set_new_prev_hash) = bounded(10); // Format `Upstream` connection address let upstream_addr = SocketAddr::new( @@ -96,7 +192,7 @@ async fn main() { ); let diff_config = Arc::new(Mutex::new(proxy_config.upstream_difficulty_config.clone())); - + let task_collector_upstream = task_collector.clone(); // Instantiate a new `Upstream` (SV2 Pool) let upstream = match upstream_sv2::Upstream::new( upstream_addr, @@ -109,6 +205,7 @@ async fn main() { status::Sender::Upstream(tx_status.clone()), target.clone(), diff_config.clone(), + task_collector_upstream, ) .await { @@ -118,12 +215,12 @@ async fn main() { return; } }; - + let task_collector_init_task = task_collector.clone(); // Spawn a task to do all of this init work so that the main thread // can listen for signals and failures on the status channel. This // allows for the tproxy to fail gracefully if any of these init tasks //fail - task::spawn(async move { + let task = task::spawn(async move { // Connect to the SV2 Upstream role match upstream_sv2::Upstream::connect( upstream.clone(), @@ -163,6 +260,7 @@ async fn main() { async_std::task::sleep(std::time::Duration::from_millis(100)).await; } + let task_collector_bridge = task_collector_init_task.clone(); // Instantiate a new `Bridge` and begins handling incoming messages let b = proxy::Bridge::new( rx_sv1_downstream, @@ -174,6 +272,7 @@ async fn main() { extended_extranonce, target, up_id, + task_collector_bridge, ); proxy::Bridge::start(b.clone()); @@ -183,6 +282,7 @@ async fn main() { proxy_config.downstream_port, ); + let task_collector_downstream = task_collector_init_task.clone(); // Accept connections from one or more SV1 Downstream roles (SV1 Mining Devices) downstream_sv1::Downstream::accept_connections( downstream_addr, @@ -192,49 +292,8 @@ async fn main() { b, proxy_config.downstream_difficulty_config, diff_config, + task_collector_downstream, ); }); // End of init task - - debug!("Starting up signal listener"); - let mut interrupt_signal_future = Box::pin(tokio::signal::ctrl_c().fuse()); - debug!("Starting up status listener"); - - // Check all tasks if is_finished() is true, if so exit - loop { - let task_status = select! { - task_status = rx_status.recv().fuse() => task_status, - interrupt_signal = interrupt_signal_future => { - match interrupt_signal { - Ok(()) => { - info!("Interrupt received"); - }, - Err(err) => { - error!("Unable to listen for interrupt signal: {}", err); - // we also shut down in case of error - }, - } - break; - } - }; - let task_status: Status = task_status.unwrap(); - - match task_status.state { - // Should only be sent by the downstream listener - State::DownstreamShutdown(err) => { - error!("SHUTDOWN from: {}", err); - break; - } - State::BridgeShutdown(err) => { - error!("SHUTDOWN from: {}", err); - break; - } - State::UpstreamShutdown(err) => { - error!("SHUTDOWN from: {}", err); - break; - } - State::Healthy(msg) => { - info!("HEALTHY message: {}", msg); - } - } - } + let _ = task_collector.safe_lock(|t| t.push((task.abort_handle(), "init task".to_string()))); } From 3e891be21d9d40da15e69a052f70d802642ac731 Mon Sep 17 00:00:00 2001 From: plebhash Date: Tue, 13 Aug 2024 17:15:32 -0300 Subject: [PATCH 091/101] bypass cargo semver-checks --all-features for roles_logic_sv2 --- .github/workflows/semver-check.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/semver-check.yaml b/.github/workflows/semver-check.yaml index 45ee8419f..d822dbaac 100644 --- a/.github/workflows/semver-check.yaml +++ b/.github/workflows/semver-check.yaml @@ -104,7 +104,7 @@ jobs: - name: Run semver checks for protocols/v2/roles-logic-sv2 working-directory: protocols/v2/roles-logic-sv2 - run: cargo semver-checks + run: cargo semver-checks --default-features - name: Run semver checks for protocols/v1 working-directory: protocols/v1 From 56a93342f28c157b62b303504e1b427c0355c2f0 Mon Sep 17 00:00:00 2001 From: plebhash Date: Tue, 13 Aug 2024 17:25:25 -0300 Subject: [PATCH 092/101] patch roles_logic_sv2 with_serde flag --- protocols/Cargo.lock | 2 +- protocols/v2/roles-logic-sv2/Cargo.toml | 5 ++--- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/protocols/Cargo.lock b/protocols/Cargo.lock index 94b6710b8..9e04903ff 100644 --- a/protocols/Cargo.lock +++ b/protocols/Cargo.lock @@ -606,7 +606,7 @@ checksum = "adad44e29e4c806119491a7f06f03de4d1af22c3a680dd47f1e6e179439d1f56" [[package]] name = "roles_logic_sv2" -version = "1.2.0" +version = "1.2.1" dependencies = [ "binary_sv2", "chacha20poly1305", diff --git a/protocols/v2/roles-logic-sv2/Cargo.toml b/protocols/v2/roles-logic-sv2/Cargo.toml index 28eee2f57..2b7c34610 100644 --- a/protocols/v2/roles-logic-sv2/Cargo.toml +++ b/protocols/v2/roles-logic-sv2/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "roles_logic_sv2" -version = "1.2.0" +version = "1.2.1" edition = "2018" description = "Common handlers for use within SV2 roles" license = "MIT OR Apache-2.0" @@ -35,8 +35,7 @@ with_serde = [ "serde", "common_messages_sv2/with_serde", "template_distribution_sv2/with_serde", "job_declaration_sv2/with_serde", -"mining_sv2/with_serde", -"framing_sv2/with_serde"] +"mining_sv2/with_serde"] prop_test = ["template_distribution_sv2/prop_test"] # Code coverage tools may conflict with the nopanic logic, so we can disable it when needed disable_nopanic = [] From f64deca860d19a1de4b42edeca49c9f109800384 Mon Sep 17 00:00:00 2001 From: plebhash Date: Wed, 14 Aug 2024 19:50:58 -0300 Subject: [PATCH 093/101] use rust stable for semver CI --- .github/workflows/semver-check.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/semver-check.yaml b/.github/workflows/semver-check.yaml index d822dbaac..9a2a45ad3 100644 --- a/.github/workflows/semver-check.yaml +++ b/.github/workflows/semver-check.yaml @@ -18,10 +18,10 @@ jobs: - name: Checkout repository uses: actions/checkout@v2 - - name: Install Rust 1.75.0 + - name: Install Rust uses: actions-rs/toolchain@v1 with: - toolchain: 1.75.0 + toolchain: stable override: true - name: Cache Cargo registry From ef04f978ab151d8bafde45a3c47d64d3b3bcbd1d Mon Sep 17 00:00:00 2001 From: plebhash Date: Thu, 15 Aug 2024 16:03:13 -0300 Subject: [PATCH 094/101] update roles lockfile --- roles/Cargo.lock | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/roles/Cargo.lock b/roles/Cargo.lock index 97cf8e6c8..e0dfec0c1 100644 --- a/roles/Cargo.lock +++ b/roles/Cargo.lock @@ -1942,7 +1942,7 @@ checksum = "adad44e29e4c806119491a7f06f03de4d1af22c3a680dd47f1e6e179439d1f56" [[package]] name = "roles_logic_sv2" -version = "1.2.0" +version = "1.2.1" dependencies = [ "binary_sv2", "chacha20poly1305", From d479f94e4b7c967c16d5098d78b41737863ee896 Mon Sep 17 00:00:00 2001 From: lorban Date: Fri, 12 Jul 2024 11:28:29 +0200 Subject: [PATCH 095/101] jdc fallback to solo-mining fixed the fallback to solo-mining in the case that the upstream sends a `SubmitShareError` on a valid share AND there are no other available upstreams in the JDC config. --- benches/Cargo.lock | 6 +++--- roles/jd-client/src/lib/downstream.rs | 26 ++++++++++++++++++-------- 2 files changed, 21 insertions(+), 11 deletions(-) diff --git a/benches/Cargo.lock b/benches/Cargo.lock index 946b7c02b..396474838 100644 --- a/benches/Cargo.lock +++ b/benches/Cargo.lock @@ -373,7 +373,7 @@ dependencies = [ [[package]] name = "buffer_sv2" -version = "1.0.0" +version = "1.1.0" dependencies = [ "aes-gcm", ] @@ -497,7 +497,7 @@ checksum = "98cc8fbded0c607b7ba9dd60cd98df59af97e84d24e49c8557331cfc26d301ce" [[package]] name = "codec_sv2" -version = "1.1.0" +version = "1.2.0" dependencies = [ "binary_sv2", "buffer_sv2", @@ -1564,7 +1564,7 @@ checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc" [[package]] name = "sv1_api" -version = "1.0.0" +version = "1.0.1" dependencies = [ "binary_sv2", "bitcoin_hashes 0.3.2", diff --git a/roles/jd-client/src/lib/downstream.rs b/roles/jd-client/src/lib/downstream.rs index 5b26cef2f..92e5a874a 100644 --- a/roles/jd-client/src/lib/downstream.rs +++ b/roles/jd-client/src/lib/downstream.rs @@ -47,7 +47,7 @@ pub struct DownstreamMiningNode { miner_coinbase_output: Vec, // used to retreive the job id of the share that we send upstream last_template_id: u64, - jd: Option>>, + pub jd: Option>>, } #[allow(clippy::large_enum_variant)] @@ -376,12 +376,13 @@ impl DownstreamMiningNode { let to_send = to_send.into_values(); for message in to_send { let message = if let Mining::NewExtendedMiningJob(job) = message { - let jd = self_mutex.safe_lock(|s| s.jd.clone()).unwrap().unwrap(); - jd.safe_lock(|jd| jd.coinbase_tx_prefix = job.coinbase_tx_prefix.clone()) - .unwrap(); - jd.safe_lock(|jd| jd.coinbase_tx_suffix = job.coinbase_tx_suffix.clone()) + if let Some(jd) = self_mutex.safe_lock(|s| s.jd.clone()).unwrap() { + jd.safe_lock(|jd| { + jd.coinbase_tx_prefix = job.coinbase_tx_prefix.clone(); + jd.coinbase_tx_suffix = job.coinbase_tx_suffix.clone(); + }) .unwrap(); - + } Mining::NewExtendedMiningJob(job) } else { message @@ -514,7 +515,7 @@ impl fn handle_update_channel( &mut self, - _: UpdateChannel, + m: UpdateChannel, ) -> Result, Error> { if !self.status.is_solo_miner() { // Safe unwrap alreay checked if it cointains upstream with is_solo_miner @@ -522,7 +523,16 @@ impl self.status.get_upstream().unwrap(), )) } else { - todo!() + let maximum_target = + roles_logic_sv2::utils::hash_rate_to_target(m.nominal_hash_rate.into(), 10.0)?; + self.status + .get_channel() + .update_target_for_channel(m.channel_id, maximum_target.clone().into()); + let set_target = SetTarget { + channel_id: m.channel_id, + maximum_target, + }; + Ok(SendTo::Respond(Mining::SetTarget(set_target))) } } From cd493ee2b9291d42fad9565f55d3c409ecabb57d Mon Sep 17 00:00:00 2001 From: GitGab19 Date: Fri, 16 Aug 2024 23:43:39 +0200 Subject: [PATCH 096/101] take first coinbase output before encoding - solo mining fmt --- roles/jd-client/src/lib/template_receiver/mod.rs | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/roles/jd-client/src/lib/template_receiver/mod.rs b/roles/jd-client/src/lib/template_receiver/mod.rs index f418318a8..cfc1eec0a 100644 --- a/roles/jd-client/src/lib/template_receiver/mod.rs +++ b/roles/jd-client/src/lib/template_receiver/mod.rs @@ -57,9 +57,16 @@ impl TemplateRx { test_only_do_not_send_solution_to_tp: bool, ) { let mut encoded_outputs = vec![]; - miner_coinbase_outputs - .consensus_encode(&mut encoded_outputs) - .expect("Invalid coinbase output in config"); + // jd is set to None in initialize_jd_as_solo_miner (in this case we need to take the first output as done by JDS) + if jd.is_none() { + miner_coinbase_outputs[0] + .consensus_encode(&mut encoded_outputs) + .expect("Invalid coinbase output in config"); + } else { + miner_coinbase_outputs + .consensus_encode(&mut encoded_outputs) + .expect("Invalid coinbase output in config"); + } let stream = tokio::net::TcpStream::connect(address).await.unwrap(); let initiator = match authority_public_key { From d581c2f0f5c2e6fb1155c448f478ad2ad8dce888 Mon Sep 17 00:00:00 2001 From: plebhash Date: Fri, 16 Aug 2024 15:52:16 -0300 Subject: [PATCH 097/101] rm release-bin.yaml --- .github/workflows/release-bin.yaml | 419 ----------------------------- 1 file changed, 419 deletions(-) delete mode 100644 .github/workflows/release-bin.yaml diff --git a/.github/workflows/release-bin.yaml b/.github/workflows/release-bin.yaml deleted file mode 100644 index 87f2eaa3e..000000000 --- a/.github/workflows/release-bin.yaml +++ /dev/null @@ -1,419 +0,0 @@ -# This workflow is used to create a new release with a binary distribution or SRI roles -# If the binary releases fails due to not having tags, force run the `autoversion` workflow -# on the main branch and merge the resulting PR to create the tags and move them to the main branch. - -name: Release Binaries - -on: - # Manually run by going to "Actions/Release" in Github and running the workflow - workflow_dispatch: - -jobs: - release_pool: - runs-on: ${{ matrix.os }} - strategy: - matrix: - os: [ubuntu-20.04, macos-latest] - steps: - - uses: actions/checkout@v4 - - uses: actions-rs/toolchain@v1 - with: - toolchain: stable - override: true - - - name: Set env - run: echo "RELEASE_VERSION=${GITHUB_REF#refs/*/}" >> $GITHUB_ENV - - - name: Compile Native - run: cargo build --release --locked --manifest-path=roles/pool/Cargo.toml - - - name: Install cross - run: cargo install cross - - - name: Compile Binaries for aarch64-unknown-linux-gnu - if: matrix.os == 'ubuntu-20.04' - run: cross build --release --locked --manifest-path=roles/pool/Cargo.toml --target aarch64-unknown-linux-gnu - - - name: Compile Binaries for arm-unknown-linux-gnueabi - if: matrix.os == 'ubuntu-20.04' - run: cross build --release --locked --manifest-path=roles/pool/Cargo.toml --target arm-unknown-linux-gnueabi - - - name: Install aarch64-apple-darwin target - if: matrix.os == 'macos-latest' - run: rustup target add aarch64-apple-darwin - - - name: Compile MacOS ARM64 - if: matrix.os == 'macos-latest' - run: cargo build --release --locked --manifest-path=roles/pool/Cargo.toml --target=aarch64-apple-darwin - - - name: Upload Linux x86-64 binaries to release - if: matrix.os == 'ubuntu-20.04' - uses: svenstaro/upload-release-action@v2 - with: - repo_token: ${{ secrets.GITHUB_TOKEN }} - file: roles/target/release/pool_sv2 - asset_name: pool-sv2-${{ env.RELEASE_VERSION }}-x86_64-linux-gnu - tag: ${{ env.RELEASE_VERSION }} - - - name: Upload MacOS x86-64 binaries to release - if: matrix.os == 'macos-latest' - uses: svenstaro/upload-release-action@v2 - with: - repo_token: ${{ secrets.GITHUB_TOKEN }} - file: roles/target/release/pool_sv2 - asset_name: pool-sv2-${{ env.RELEASE_VERSION }}-x86_64-apple-darwin - tag: ${{ env.RELEASE_VERSION }} - - - name: Upload Linux aarch64 binaries to release - if: matrix.os == 'ubuntu-20.04' - uses: svenstaro/upload-release-action@v2 - with: - repo_token: ${{ secrets.GITHUB_TOKEN }} - file: roles/target/aarch64-unknown-linux-gnu/release/pool_sv2 - asset_name: pool-sv2-${{ env.RELEASE_VERSION }}-aarch64-linux-gnu - tag: ${{ env.RELEASE_VERSION }} - - - name: Upload Linux ARM binaries to release - if: matrix.os == 'ubuntu-20.04' - uses: svenstaro/upload-release-action@v2 - with: - repo_token: ${{ secrets.GITHUB_TOKEN }} - file: roles/target/arm-unknown-linux-gnueabi/release/pool_sv2 - asset_name: pool-sv2-${{ env.RELEASE_VERSION }}-arm-linux-gnueabi - tag: ${{ env.RELEASE_VERSION }} - - - name: Upload MacOS ARM64 binaries to release - if: matrix.os == 'macos-latest' - uses: svenstaro/upload-release-action@v2 - with: - repo_token: ${{ secrets.GITHUB_TOKEN }} - file: roles/target/aarch64-apple-darwin/release/pool_sv2 - asset_name: pool-sv2-${{ env.RELEASE_VERSION }}-aarch64-apple-darwin - tag: ${{ env.RELEASE_VERSION }} - - release_jdc: - runs-on: ${{ matrix.os }} - strategy: - matrix: - os: [ ubuntu-20.04, macos-latest ] - steps: - - uses: actions/checkout@v4 - - uses: actions-rs/toolchain@v1 - with: - toolchain: stable - override: true - - name: Compile Native - run: cargo build --release --locked --manifest-path=roles/jd-client/Cargo.toml - - - name: Set env - run: echo "RELEASE_VERSION=${GITHUB_REF#refs/*/}" >> $GITHUB_ENV - - - name: Install cross - run: cargo install cross - - - name: Compile Binaries for aarch64-unknown-linux-gnu - if: matrix.os == 'ubuntu-20.04' - run: cross build --release --locked --manifest-path=roles/jd-client/Cargo.toml --target aarch64-unknown-linux-gnu - - - name: Compile Binaries for arm-unknown-linux-gnueabi - if: matrix.os == 'ubuntu-20.04' - run: cross build --release --locked --manifest-path=roles/jd-client/Cargo.toml --target arm-unknown-linux-gnueabi - - - name: Install aarch64-apple-darwin target - if: matrix.os == 'macos-latest' - run: rustup target add aarch64-apple-darwin - - - name: Compile MacOS ARM64 - if: matrix.os == 'macos-latest' - run: cargo build --release --locked --manifest-path=roles/jd-client/Cargo.toml --target=aarch64-apple-darwin - - - name: Upload Linux x86-64 binaries to release - if: matrix.os == 'ubuntu-20.04' - uses: svenstaro/upload-release-action@v2 - with: - repo_token: ${{ secrets.GITHUB_TOKEN }} - file: roles/target/release/jd_client - asset_name: jd-client-sv2-${{ env.RELEASE_VERSION }}-x86_64-linux-gnu - tag: ${{ env.RELEASE_VERSION }} - - - name: Upload Linux aarch64 binaries to release - if: matrix.os == 'ubuntu-20.04' - uses: svenstaro/upload-release-action@v2 - with: - repo_token: ${{ secrets.GITHUB_TOKEN }} - file: roles/target/aarch64-unknown-linux-gnu/release/jd_client - asset_name: jd-client-sv2-${{ env.RELEASE_VERSION }}-aarch64-linux-gnu - tag: ${{ env.RELEASE_VERSION }} - - - name: Upload Linux ARM binaries to release - if: matrix.os == 'ubuntu-20.04' - uses: svenstaro/upload-release-action@v2 - with: - repo_token: ${{ secrets.GITHUB_TOKEN }} - file: roles/target/arm-unknown-linux-gnueabi/release/jd_client - asset_name: jd-client-sv2-${{ env.RELEASE_VERSION }}-arm-linux-gnueabi - tag: ${{ env.RELEASE_VERSION }} - - - name: Upload MacOS x86-64 binaries to release - if: matrix.os == 'macos-latest' - uses: svenstaro/upload-release-action@v2 - with: - repo_token: ${{ secrets.GITHUB_TOKEN }} - file: roles/target/release/jd_client - asset_name: jd-client-sv2-${{ env.RELEASE_VERSION }}-x86_64-apple-darwin - tag: ${{ env.RELEASE_VERSION }} - - - name: Upload MacOS ARM64 binaries to release - if: matrix.os == 'macos-latest' - uses: svenstaro/upload-release-action@v2 - with: - repo_token: ${{ secrets.GITHUB_TOKEN }} - file: roles/target/aarch64-apple-darwin/release/jd_client - asset_name: jd-client-sv2-${{ env.RELEASE_VERSION }}-aarch64-apple-darwin - tag: ${{ env.RELEASE_VERSION }} - - release_jds: - runs-on: ${{ matrix.os }} - strategy: - matrix: - os: [ ubuntu-20.04, macos-latest ] - steps: - - uses: actions/checkout@v4 - - uses: actions-rs/toolchain@v1 - with: - toolchain: stable - override: true - - - name: Set env - run: echo "RELEASE_VERSION=${GITHUB_REF#refs/*/}" >> $GITHUB_ENV - - - name: Compile Native - run: cargo build --release --locked --manifest-path=roles/jd-server/Cargo.toml - - - name: Install cross - run: cargo install cross - - - name: Compile Binaries for aarch64-unknown-linux-gnu - if: matrix.os == 'ubuntu-20.04' - run: cross build --release --locked --manifest-path=roles/jd-server/Cargo.toml --target aarch64-unknown-linux-gnu - - - name: Compile Binaries for arm-unknown-linux-gnueabi - if: matrix.os == 'ubuntu-20.04' - run: cross build --release --locked --manifest-path=roles/jd-server/Cargo.toml --target arm-unknown-linux-gnueabi - - - name: Install aarch64-apple-darwin target - if: matrix.os == 'macos-latest' - run: rustup target add aarch64-apple-darwin - - - name: Compile MacOS ARM64 - if: matrix.os == 'macos-latest' - run: cargo build --release --locked --manifest-path=roles/jd-server/Cargo.toml --target=aarch64-apple-darwin - - - name: Upload Linux x86-64 binaries to release - if: matrix.os == 'ubuntu-20.04' - uses: svenstaro/upload-release-action@v2 - with: - repo_token: ${{ secrets.GITHUB_TOKEN }} - file: roles/target/release/jd_server - asset_name: jd-server-sv2-${{ env.RELEASE_VERSION }}-x86_64-linux-gnu - tag: ${{ env.RELEASE_VERSION }} - - - name: Upload Linux aarch64 binaries to release - if: matrix.os == 'ubuntu-20.04' - uses: svenstaro/upload-release-action@v2 - with: - repo_token: ${{ secrets.GITHUB_TOKEN }} - file: roles/target/aarch64-unknown-linux-gnu/release/jd_server - asset_name: jd-server-sv2-${{ env.RELEASE_VERSION }}-aarch64-linux-gnu - tag: ${{ env.RELEASE_VERSION }} - - - name: Upload Linux ARM binaries to release - if: matrix.os == 'ubuntu-20.04' - uses: svenstaro/upload-release-action@v2 - with: - repo_token: ${{ secrets.GITHUB_TOKEN }} - file: roles/target/arm-unknown-linux-gnueabi/release/jd_server - asset_name: jd-server-sv2-${{ env.RELEASE_VERSION }}-arm-linux-gnueabi - tag: ${{ env.RELEASE_VERSION }} - - - name: Upload MacOS x86-64 binaries to release - if: matrix.os == 'macos-latest' - uses: svenstaro/upload-release-action@v2 - with: - repo_token: ${{ secrets.GITHUB_TOKEN }} - file: roles/target/release/jd_server - asset_name: jd-server-sv2-${{ env.RELEASE_VERSION }}-x86_64-apple-darwin - tag: ${{ env.RELEASE_VERSION }} - - - name: Upload MacOS ARM64 binaries to release - if: matrix.os == 'macos-latest' - uses: svenstaro/upload-release-action@v2 - with: - repo_token: ${{ secrets.GITHUB_TOKEN }} - file: roles/target/aarch64-apple-darwin/release/jd_server - asset_name: jd-server-sv2-${{ env.RELEASE_VERSION }}-aarch64-apple-darwin - tag: ${{ env.RELEASE_VERSION }} - - release_proxy: - runs-on: ${{ matrix.os }} - strategy: - matrix: - os: [ ubuntu-20.04, macos-latest ] - steps: - - uses: actions/checkout@v4 - - uses: actions-rs/toolchain@v1 - with: - toolchain: stable - override: true - - - name: Set env - run: echo "RELEASE_VERSION=${GITHUB_REF#refs/*/}" >> $GITHUB_ENV - - - name: Compile Native - run: cargo build --release --locked --manifest-path=roles/mining-proxy/Cargo.toml - - - name: Install cross - run: cargo install cross - - - name: Compile Binaries for aarch64-unknown-linux-gnu - if: matrix.os == 'ubuntu-20.04' - run: cross build --release --locked --manifest-path=roles/mining-proxy/Cargo.toml --target aarch64-unknown-linux-gnu - - - name: Compile Binaries for arm-unknown-linux-gnueabi - if: matrix.os == 'ubuntu-20.04' - run: cross build --release --locked --manifest-path=roles/mining-proxy/Cargo.toml --target arm-unknown-linux-gnueabi - - - name: Install aarch64-apple-darwin target - if: matrix.os == 'macos-latest' - run: rustup target add aarch64-apple-darwin - - - name: Compile MacOS ARM64 - if: matrix.os == 'macos-latest' - run: cargo build --release --locked --manifest-path=roles/mining-proxy/Cargo.toml --target=aarch64-apple-darwin - - - name: Upload Linux x86-64 binaries to release - if: matrix.os == 'ubuntu-20.04' - uses: svenstaro/upload-release-action@v2 - with: - repo_token: ${{ secrets.GITHUB_TOKEN }} - file: roles/target/release/mining_proxy_sv2 - asset_name: mining-proxy-sv2-${{ env.RELEASE_VERSION }}-x86_64-linux-gnu - tag: ${{ env.RELEASE_VERSION }} - - - name: Upload Linux aarch64 binaries to release - if: matrix.os == 'ubuntu-20.04' - uses: svenstaro/upload-release-action@v2 - with: - repo_token: ${{ secrets.GITHUB_TOKEN }} - file: roles/target/aarch64-unknown-linux-gnu/release/mining_proxy_sv2 - asset_name: mining-proxy-sv2-${{ env.RELEASE_VERSION }}-aarch64-linux-gnu - tag: ${{ env.RELEASE_VERSION }} - - - name: Upload Linux ARM binaries to release - if: matrix.os == 'ubuntu-20.04' - uses: svenstaro/upload-release-action@v2 - with: - repo_token: ${{ secrets.GITHUB_TOKEN }} - file: roles/target/arm-unknown-linux-gnueabi/release/mining_proxy_sv2 - asset_name: mining-proxy-sv2-${{ env.RELEASE_VERSION }}-arm-linux-gnueabi - tag: ${{ env.RELEASE_VERSION }} - - - name: Upload MacOS x86-64 binaries to release - if: matrix.os == 'macos-latest' - uses: svenstaro/upload-release-action@v2 - with: - repo_token: ${{ secrets.GITHUB_TOKEN }} - file: roles/target/release/mining_proxy_sv2 - asset_name: mining-proxy-sv2-${{ env.RELEASE_VERSION }}-x86_64-apple-darwin - tag: ${{ env.RELEASE_VERSION }} - - - name: Upload MacOS ARM64 binaries to release - if: matrix.os == 'macos-latest' - uses: svenstaro/upload-release-action@v2 - with: - repo_token: ${{ secrets.GITHUB_TOKEN }} - file: roles/target/aarch64-apple-darwin/release/mining_proxy_sv2 - asset_name: mining-proxy-sv2-${{ env.RELEASE_VERSION }}-aarch64-apple-darwin - tag: ${{ env.RELEASE_VERSION }} - - release_translator: - runs-on: ${{ matrix.os }} - strategy: - matrix: - os: [ ubuntu-20.04, macos-latest ] - steps: - - uses: actions/checkout@v4 - - uses: actions-rs/toolchain@v1 - with: - toolchain: stable - override: true - - - name: Set env - run: echo "RELEASE_VERSION=${GITHUB_REF#refs/*/}" >> $GITHUB_ENV - - - name: Compile Native - run: cargo build --release --locked --manifest-path=roles/translator/Cargo.toml - - - name: Install cross - run: cargo install cross - - - name: Compile Binaries for aarch64-unknown-linux-gnu - if: matrix.os == 'ubuntu-20.04' - run: cross build --release --locked --manifest-path=roles/translator/Cargo.toml --target aarch64-unknown-linux-gnu - - - name: Compile Binaries for arm-unknown-linux-gnueabi - if: matrix.os == 'ubuntu-20.04' - run: cross build --release --locked --manifest-path=roles/translator/Cargo.toml --target arm-unknown-linux-gnueabi - - - name: Install aarch64-apple-darwin target - if: matrix.os == 'macos-latest' - run: rustup target add aarch64-apple-darwin - - - name: Compile MacOS ARM64 - if: matrix.os == 'macos-latest' - run: cargo build --release --locked --manifest-path=roles/translator/Cargo.toml --target=aarch64-apple-darwin - - - name: Upload Linux x86-64 binaries to release - if: matrix.os == 'ubuntu-20.04' - uses: svenstaro/upload-release-action@v2 - with: - repo_token: ${{ secrets.GITHUB_TOKEN }} - file: roles/target/release/translator_sv2 - asset_name: translator-sv2-${{ env.RELEASE_VERSION }}-x86_64-linux-gnu - tag: ${{ env.RELEASE_VERSION }} - - - name: Upload Linux aarch64 binaries to release - if: matrix.os == 'ubuntu-20.04' - uses: svenstaro/upload-release-action@v2 - with: - repo_token: ${{ secrets.GITHUB_TOKEN }} - file: roles/target/aarch64-unknown-linux-gnu/release/translator_sv2 - asset_name: translator-sv2-${{ env.RELEASE_VERSION }}-aarch64-linux-gnu - tag: ${{ env.RELEASE_VERSION }} - - - name: Upload Linux ARM binaries to release - if: matrix.os == 'ubuntu-20.04' - uses: svenstaro/upload-release-action@v2 - with: - repo_token: ${{ secrets.GITHUB_TOKEN }} - file: roles/target/arm-unknown-linux-gnueabi/release/translator_sv2 - asset_name: translator-sv2-${{ env.RELEASE_VERSION }}-arm-linux-gnueabi - tag: ${{ env.RELEASE_VERSION }} - - - name: Upload MacOS x86-64 binaries to release - if: matrix.os == 'macos-latest' - uses: svenstaro/upload-release-action@v2 - with: - repo_token: ${{ secrets.GITHUB_TOKEN }} - file: roles/target/release/translator_sv2 - asset_name: translator-sv2-${{ env.RELEASE_VERSION }}-x86_64-apple-darwin - tag: ${{ env.RELEASE_VERSION }} - - - name: Upload MacOS ARM64 binaries to release - if: matrix.os == 'macos-latest' - uses: svenstaro/upload-release-action@v2 - with: - repo_token: ${{ secrets.GITHUB_TOKEN }} - file: roles/target/aarch64-apple-darwin/release/translator_sv2 - asset_name: translator-sv2-${{ env.RELEASE_VERSION }}-aarch64-apple-darwin - tag: ${{ env.RELEASE_VERSION }} From 8e50099a18d9ca185d8356fd37447ac6e800ba31 Mon Sep 17 00:00:00 2001 From: fi3 Date: Mon, 19 Aug 2024 11:03:54 +0200 Subject: [PATCH 098/101] Add support for data types defined by sv2 extensions In no-serde-sv2 an sv2 sequence is generic over T. If we want this sequence to be Deserialize we need T to be Fixed and GerMarker. This 2 traits were private since all the sv2 types are already defined in no-serde-sv2. But if we want to use sv2 types defined in an sv2 extensions we need to make these traits public. The Encodable dervive macro in derive_codec implement GetSize for the passed struct. But GetSize is also a blanket implementation for every type that implement Fixed. So if we implement Fixed for our new sv2 type and then we derive Encodable (commonly renamed Serialize) we get an error. This commit add an attribute to Encodable called already_sized if the struct that we want derive Encodable is market as already_sized the macro will not implement GetSize for it. This commit also bump minor version of derive_codec and no-serde-sv2 consequentially also of binary-sv2 since it reexport the above libs. --- protocols/Cargo.lock | 6 +- protocols/v2/binary-sv2/binary-sv2/Cargo.toml | 2 +- .../binary-sv2/no-serde-sv2/codec/Cargo.toml | 2 +- .../binary-sv2/no-serde-sv2/codec/src/lib.rs | 8 +-- .../no-serde-sv2/derive_codec/Cargo.toml | 2 +- .../no-serde-sv2/derive_codec/src/lib.rs | 59 +++++++++++++------ roles/Cargo.lock | 4 +- 7 files changed, 52 insertions(+), 31 deletions(-) diff --git a/protocols/Cargo.lock b/protocols/Cargo.lock index 9e04903ff..1f853f4ad 100644 --- a/protocols/Cargo.lock +++ b/protocols/Cargo.lock @@ -77,7 +77,7 @@ checksum = "d86b93f97252c47b41663388e6d155714a9d0c398b99f1005cbc5f978b29f445" [[package]] name = "binary_codec_sv2" -version = "1.0.0" +version = "1.1.0" dependencies = [ "buffer_sv2", "quickcheck", @@ -85,7 +85,7 @@ dependencies = [ [[package]] name = "binary_sv2" -version = "1.0.1" +version = "1.1.0" dependencies = [ "binary_codec_sv2", "derive_codec_sv2", @@ -260,7 +260,7 @@ dependencies = [ [[package]] name = "derive_codec_sv2" -version = "1.0.0" +version = "1.1.0" dependencies = [ "binary_codec_sv2", ] diff --git a/protocols/v2/binary-sv2/binary-sv2/Cargo.toml b/protocols/v2/binary-sv2/binary-sv2/Cargo.toml index 1fd78fcc1..b8742ad48 100644 --- a/protocols/v2/binary-sv2/binary-sv2/Cargo.toml +++ b/protocols/v2/binary-sv2/binary-sv2/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "binary_sv2" -version = "1.0.1" +version = "1.1.0" authors = ["fi3 "] edition = "2018" description = "Sv2 data format" diff --git a/protocols/v2/binary-sv2/no-serde-sv2/codec/Cargo.toml b/protocols/v2/binary-sv2/no-serde-sv2/codec/Cargo.toml index 5f1f83e12..d564c96be 100644 --- a/protocols/v2/binary-sv2/no-serde-sv2/codec/Cargo.toml +++ b/protocols/v2/binary-sv2/no-serde-sv2/codec/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "binary_codec_sv2" -version = "1.0.0" +version = "1.1.0" authors = ["fi3 "] edition = "2018" description = "Sv2 data format" diff --git a/protocols/v2/binary-sv2/no-serde-sv2/codec/src/lib.rs b/protocols/v2/binary-sv2/no-serde-sv2/codec/src/lib.rs index 8f7d96e62..2199d47e7 100644 --- a/protocols/v2/binary-sv2/no-serde-sv2/codec/src/lib.rs +++ b/protocols/v2/binary-sv2/no-serde-sv2/codec/src/lib.rs @@ -25,14 +25,14 @@ use std::io::{Error as E, ErrorKind}; mod codec; mod datatypes; pub use datatypes::{ - PubKey, Seq0255, Seq064K, ShortTxId, Signature, Str0255, Sv2Option, U32AsRef, B016M, B0255, - B032, B064K, U24, U256, + PubKey, Seq0255, Seq064K, ShortTxId, Signature, Str0255, Sv2DataType, Sv2Option, U32AsRef, + B016M, B0255, B032, B064K, U24, U256, }; pub use crate::codec::{ - decodable::Decodable, + decodable::{Decodable, GetMarker}, encodable::{Encodable, EncodableField}, - GetSize, SizeHint, + Fixed, GetSize, SizeHint, }; #[allow(clippy::wrong_self_convention)] diff --git a/protocols/v2/binary-sv2/no-serde-sv2/derive_codec/Cargo.toml b/protocols/v2/binary-sv2/no-serde-sv2/derive_codec/Cargo.toml index 62e62bd9c..93f202d8b 100644 --- a/protocols/v2/binary-sv2/no-serde-sv2/derive_codec/Cargo.toml +++ b/protocols/v2/binary-sv2/no-serde-sv2/derive_codec/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "derive_codec_sv2" -version = "1.0.0" +version = "1.1.0" authors = ["fi3 "] edition = "2018" description = "Derive macro for Sv2 binary format serializer and deserializer" diff --git a/protocols/v2/binary-sv2/no-serde-sv2/derive_codec/src/lib.rs b/protocols/v2/binary-sv2/no-serde-sv2/derive_codec/src/lib.rs index 6821abb6a..aef883ba1 100644 --- a/protocols/v2/binary-sv2/no-serde-sv2/derive_codec/src/lib.rs +++ b/protocols/v2/binary-sv2/no-serde-sv2/derive_codec/src/lib.rs @@ -2,6 +2,24 @@ extern crate proc_macro; use core::iter::FromIterator; use proc_macro::{Group, TokenStream, TokenTree}; +fn is_already_sized(item: TokenStream) -> bool { + let stream = item.into_iter(); + + for next in stream { + if let TokenTree::Group(g) = next.clone() { + if g.delimiter() == proc_macro::Delimiter::Bracket { + for t in g.stream().into_iter() { + if let TokenTree::Ident(i) = t { + if i.to_string() == "already_sized" { + return true; + } + } + } + } + } + } + false +} fn remove_attributes(item: TokenStream) -> TokenStream { let stream = item.into_iter(); let mut is_attribute = false; @@ -356,8 +374,9 @@ fn get_static_generics(gen: &str) -> &str { } } -#[proc_macro_derive(Encodable)] +#[proc_macro_derive(Encodable, attributes(already_sized))] pub fn encodable(item: TokenStream) -> TokenStream { + let is_already_sized = is_already_sized(item.clone()); let parsed_struct = get_struct_properties(item); let fields = parsed_struct.fields.clone(); @@ -392,6 +411,23 @@ pub fn encodable(item: TokenStream) -> TokenStream { "<'decoder>".to_string() }; + let get_size = if is_already_sized { + String::new() + } else { + format!( + " + impl{} GetSize for {}{} {{ + fn get_size(&self) -> usize {{ + let mut size = 0; + {} + size + }} + }} + ", + impl_generics, parsed_struct.name, parsed_struct.generics, sizes + ) + }; + let result = format!( "mod impl_parse_encodable_{} {{ @@ -408,14 +444,7 @@ pub fn encodable(item: TokenStream) -> TokenStream { }} }} - - impl{} GetSize for {}{} {{ - fn get_size(&self) -> usize {{ - let mut size = 0; - {} - size - }} - }} + {} }}", // imports @@ -428,16 +457,8 @@ pub fn encodable(item: TokenStream) -> TokenStream { parsed_struct.name, parsed_struct.generics, field_into_decoded_field, - // impl Encodable for Struct - //impl{} Encodable<'decoder> for {}{} {{}} - //impl_generics, - //parsed_struct.name, - //parsed_struct.generics, - // impl GetSize for Struct - impl_generics, - parsed_struct.name, - parsed_struct.generics, - sizes, + // impl get_size + get_size, ); //println!("{}", result); diff --git a/roles/Cargo.lock b/roles/Cargo.lock index e0dfec0c1..d46de28ea 100644 --- a/roles/Cargo.lock +++ b/roles/Cargo.lock @@ -373,14 +373,14 @@ checksum = "d86b93f97252c47b41663388e6d155714a9d0c398b99f1005cbc5f978b29f445" [[package]] name = "binary_codec_sv2" -version = "1.0.0" +version = "1.1.0" dependencies = [ "buffer_sv2", ] [[package]] name = "binary_sv2" -version = "1.0.1" +version = "1.1.0" dependencies = [ "binary_codec_sv2", "derive_codec_sv2", From 89819ea21b4397dbd9bc1890245d44657d789d8b Mon Sep 17 00:00:00 2001 From: fi3 Date: Mon, 19 Aug 2024 19:59:36 +0200 Subject: [PATCH 099/101] Update binary-sv2/no-serde-sv2 excport EncodablePrimitive In order to implement Encodable (aka Serialize) for sv2 type defined by extensions EncodablePrimitive is needed. --- protocols/Cargo.lock | 4 ++-- protocols/v2/binary-sv2/binary-sv2/Cargo.toml | 2 +- protocols/v2/binary-sv2/no-serde-sv2/codec/Cargo.toml | 2 +- protocols/v2/binary-sv2/no-serde-sv2/codec/src/lib.rs | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/protocols/Cargo.lock b/protocols/Cargo.lock index 1f853f4ad..03934402e 100644 --- a/protocols/Cargo.lock +++ b/protocols/Cargo.lock @@ -77,7 +77,7 @@ checksum = "d86b93f97252c47b41663388e6d155714a9d0c398b99f1005cbc5f978b29f445" [[package]] name = "binary_codec_sv2" -version = "1.1.0" +version = "1.2.0" dependencies = [ "buffer_sv2", "quickcheck", @@ -85,7 +85,7 @@ dependencies = [ [[package]] name = "binary_sv2" -version = "1.1.0" +version = "1.2.0" dependencies = [ "binary_codec_sv2", "derive_codec_sv2", diff --git a/protocols/v2/binary-sv2/binary-sv2/Cargo.toml b/protocols/v2/binary-sv2/binary-sv2/Cargo.toml index b8742ad48..922d491a2 100644 --- a/protocols/v2/binary-sv2/binary-sv2/Cargo.toml +++ b/protocols/v2/binary-sv2/binary-sv2/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "binary_sv2" -version = "1.1.0" +version = "1.2.0" authors = ["fi3 "] edition = "2018" description = "Sv2 data format" diff --git a/protocols/v2/binary-sv2/no-serde-sv2/codec/Cargo.toml b/protocols/v2/binary-sv2/no-serde-sv2/codec/Cargo.toml index d564c96be..5394f336c 100644 --- a/protocols/v2/binary-sv2/no-serde-sv2/codec/Cargo.toml +++ b/protocols/v2/binary-sv2/no-serde-sv2/codec/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "binary_codec_sv2" -version = "1.1.0" +version = "1.2.0" authors = ["fi3 "] edition = "2018" description = "Sv2 data format" diff --git a/protocols/v2/binary-sv2/no-serde-sv2/codec/src/lib.rs b/protocols/v2/binary-sv2/no-serde-sv2/codec/src/lib.rs index 2199d47e7..929fb07a0 100644 --- a/protocols/v2/binary-sv2/no-serde-sv2/codec/src/lib.rs +++ b/protocols/v2/binary-sv2/no-serde-sv2/codec/src/lib.rs @@ -58,7 +58,7 @@ pub mod decodable { } pub mod encodable { - pub use crate::codec::encodable::{Encodable, EncodableField}; + pub use crate::codec::encodable::{Encodable, EncodableField, EncodablePrimitive}; } #[macro_use] From f883bded85ed425a845766432d776d8ccda9821e Mon Sep 17 00:00:00 2001 From: plebhash Date: Mon, 19 Aug 2024 13:04:30 -0300 Subject: [PATCH 100/101] rm scripts/check-versioning-lib-release.sh --- .github/workflows/release-libs.yaml | 8 ---- scripts/check-versioning-lib-release.sh | 60 ------------------------- 2 files changed, 68 deletions(-) delete mode 100755 scripts/check-versioning-lib-release.sh diff --git a/.github/workflows/release-libs.yaml b/.github/workflows/release-libs.yaml index ac7883dba..a6fcc776b 100644 --- a/.github/workflows/release-libs.yaml +++ b/.github/workflows/release-libs.yaml @@ -22,14 +22,6 @@ jobs: - name: Checkout code uses: actions/checkout@v4 - - name: Run check-versioning-lib-release.sh - run: | - ./check-versioning-lib-release.sh - if [ $? -eq 1 ]; then - echo "Script returned exit code 1, halting the workflow" - exit 1 - fi - - uses: actions/checkout@v4 - uses: actions-rs/toolchain@v1 with: diff --git a/scripts/check-versioning-lib-release.sh b/scripts/check-versioning-lib-release.sh deleted file mode 100755 index 5fdf6eb91..000000000 --- a/scripts/check-versioning-lib-release.sh +++ /dev/null @@ -1,60 +0,0 @@ -#!/bin/bash - -git fetch --all - -crates=( -"utils/buffer" -"protocols/v2/binary-sv2/no-serde-sv2/derive_codec" -"protocols/v2/binary-sv2/no-serde-sv2/codec" -"protocols/v2/binary-sv2/serde-sv2" -"protocols/v2/binary-sv2/binary-sv2" -"protocols/v2/const-sv2" -"protocols/v2/framing-sv2" -"protocols/v2/noise-sv2" -"protocols/v2/codec-sv2" -"protocols/v2/subprotocols/common-messages" -"protocols/v2/subprotocols/job-declaration" -"protocols/v2/subprotocols/mining" -"protocols/v2/subprotocols/template-distribution" -"protocols/v2/sv2-ffi" -"protocols/v2/roles-logic-sv2" -"protocols/v1" -"utils/bip32-key-derivation" -"utils/error-handling" -"utils/key-utils" -"roles/roles-utils/network-helpers" -"roles/roles-utils/rpc" -"roles/jd-client" -"roles/jd-server" -"roles/mining-proxy" -"roles/pool" -"roles/translator" -) - -# Loop through each crate -for crate in "${crates[@]}"; do - cd "$crate" - - # Check if the branches exist locally, if not, create them - git show-ref --verify --quiet refs/remotes/origin/main || { echo "Branch 'main' not found."; exit 1; } - git show-ref --verify --quiet refs/remotes/origin/dev || { echo "Branch 'dev' not found."; exit 1; } - - # Check if there were any changes between dev and main - git diff --quiet "origin/dev" "origin/main" -- . - if [ $? -ne 0 ]; then - - # Check if crate versions on dev and main are identical - version_dev=$(git show origin/dev:./Cargo.toml | awk -F' = ' '$1 == "version" {gsub(/[ "]+/, "", $2); print $2}') - version_main=$(git show origin/main:./Cargo.toml | awk -F' = ' '$1 == "version" {gsub(/[ "]+/, "", $2); print $2}') - if [ "$version_dev" = "$version_main" ]; then - # this prevents the release PR from being merged, since we do `exit 1`, effectively stopping the Github CI - echo "Changes detected in crate $crate between dev and main branches! Versions on dev and main branches are identical ($version_dev), so you should bump the crate version on dev before merging into main." - exit 1 - else - # this creates a log of version changes, useful for release logs - echo "Changes detected in crate $crate between dev and main branches! Version in dev is: ($version_dev), while version in main is ($version_main)." - fi - fi - - cd - >/dev/null -done \ No newline at end of file From b3dde39b327de55003efbec3f1e32655d16504e0 Mon Sep 17 00:00:00 2001 From: plebhash Date: Mon, 19 Aug 2024 13:16:11 -0300 Subject: [PATCH 101/101] only release libs on merges to main --- .github/workflows/release-libs.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release-libs.yaml b/.github/workflows/release-libs.yaml index a6fcc776b..24e925196 100644 --- a/.github/workflows/release-libs.yaml +++ b/.github/workflows/release-libs.yaml @@ -11,7 +11,7 @@ name: Release Libs on: - pull_request: + push: branches: - main