Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions fuzz/fuzz_targets/chanmon_fail_consistency.rs
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ use lightning::ln::channelmonitor;
use lightning::ln::channelmonitor::{ChannelMonitorUpdateErr, HTLCUpdate};
use lightning::ln::channelmanager::{ChannelManager, PaymentHash, PaymentPreimage};
use lightning::ln::router::{Route, RouteHop};
use lightning::ln::msgs::{CommitmentUpdate, ChannelMessageHandler, ErrorAction, HandleError, UpdateAddHTLC};
use lightning::ln::msgs::{CommitmentUpdate, ChannelMessageHandler, ErrorAction, HandleError, UpdateAddHTLC, LocalFeatures};
use lightning::util::{reset_rng_state, fill_bytes, events};
use lightning::util::logger::Logger;
use lightning::util::config::UserConfig;
Expand Down Expand Up @@ -168,7 +168,7 @@ pub fn do_test(data: &[u8]) {
} else { panic!("Wrong event type"); }
};

$dest.handle_open_channel(&$source.get_our_node_id(), &open_channel).unwrap();
$dest.handle_open_channel(&$source.get_our_node_id(), LocalFeatures::new(), &open_channel).unwrap();
let accept_channel = {
let events = $dest.get_and_clear_pending_msg_events();
assert_eq!(events.len(), 1);
Expand All @@ -177,7 +177,7 @@ pub fn do_test(data: &[u8]) {
} else { panic!("Wrong event type"); }
};

$source.handle_accept_channel(&$dest.get_our_node_id(), &accept_channel).unwrap();
$source.handle_accept_channel(&$dest.get_our_node_id(), LocalFeatures::new(), &accept_channel).unwrap();
{
let events = $source.get_and_clear_pending_events();
assert_eq!(events.len(), 1);
Expand Down
70 changes: 35 additions & 35 deletions src/ln/chanmon_update_fail_tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
use ln::channelmanager::{RAACommitmentOrder, PaymentPreimage, PaymentHash};
use ln::channelmonitor::ChannelMonitorUpdateErr;
use ln::msgs;
use ln::msgs::ChannelMessageHandler;
use ln::msgs::{ChannelMessageHandler, LocalFeatures};
use util::events::{Event, EventsProvider, MessageSendEvent, MessageSendEventsProvider};
use util::errors::APIError;

Expand All @@ -18,8 +18,8 @@ use ln::functional_test_utils::*;
#[test]
fn test_simple_monitor_permanent_update_fail() {
// Test that we handle a simple permanent monitor update failure
let mut nodes = create_network(2);
create_announced_chan_between_nodes(&nodes, 0, 1);
let mut nodes = create_network(2, &[None, None]);
create_announced_chan_between_nodes(&nodes, 0, 1, LocalFeatures::new(), LocalFeatures::new());

let route = nodes[0].router.get_route(&nodes[1].node.get_our_node_id(), None, &Vec::new(), 1000000, TEST_FINAL_CLTV).unwrap();
let (_, payment_hash_1) = get_payment_preimage_hash!(nodes[0]);
Expand Down Expand Up @@ -48,8 +48,8 @@ fn test_simple_monitor_permanent_update_fail() {
fn do_test_simple_monitor_temporary_update_fail(disconnect: bool) {
// Test that we can recover from a simple temporary monitor update failure optionally with
// a disconnect in between
let mut nodes = create_network(2);
create_announced_chan_between_nodes(&nodes, 0, 1);
let mut nodes = create_network(2, &[None, None]);
create_announced_chan_between_nodes(&nodes, 0, 1, LocalFeatures::new(), LocalFeatures::new());

let route = nodes[0].router.get_route(&nodes[1].node.get_our_node_id(), None, &Vec::new(), 1000000, TEST_FINAL_CLTV).unwrap();
let (payment_preimage_1, payment_hash_1) = get_payment_preimage_hash!(nodes[0]);
Expand Down Expand Up @@ -147,8 +147,8 @@ fn do_test_monitor_temporary_update_fail(disconnect_count: usize) {
// * We then walk through more message exchanges to get the original update_add_htlc
// through, swapping message ordering based on disconnect_count & 8 and optionally
// disconnect/reconnecting based on disconnect_count.
let mut nodes = create_network(2);
create_announced_chan_between_nodes(&nodes, 0, 1);
let mut nodes = create_network(2, &[None, None]);
create_announced_chan_between_nodes(&nodes, 0, 1, LocalFeatures::new(), LocalFeatures::new());

let (payment_preimage_1, _) = route_payment(&nodes[0], &[&nodes[1]], 1000000);

Expand Down Expand Up @@ -473,8 +473,8 @@ fn test_monitor_temporary_update_fail_c() {
#[test]
fn test_monitor_update_fail_cs() {
// Tests handling of a monitor update failure when processing an incoming commitment_signed
let mut nodes = create_network(2);
create_announced_chan_between_nodes(&nodes, 0, 1);
let mut nodes = create_network(2, &[None, None]);
create_announced_chan_between_nodes(&nodes, 0, 1, LocalFeatures::new(), LocalFeatures::new());

let route = nodes[0].router.get_route(&nodes[1].node.get_our_node_id(), None, &Vec::new(), 1000000, TEST_FINAL_CLTV).unwrap();
let (payment_preimage, our_payment_hash) = get_payment_preimage_hash!(nodes[0]);
Expand Down Expand Up @@ -552,8 +552,8 @@ fn test_monitor_update_fail_no_rebroadcast() {
// Tests handling of a monitor update failure when no message rebroadcasting on
// test_restore_channel_monitor() is required. Backported from
// chanmon_fail_consistency fuzz tests.
let mut nodes = create_network(2);
create_announced_chan_between_nodes(&nodes, 0, 1);
let mut nodes = create_network(2, &[None, None]);
create_announced_chan_between_nodes(&nodes, 0, 1, LocalFeatures::new(), LocalFeatures::new());

let route = nodes[0].router.get_route(&nodes[1].node.get_our_node_id(), None, &Vec::new(), 1000000, TEST_FINAL_CLTV).unwrap();
let (payment_preimage_1, our_payment_hash) = get_payment_preimage_hash!(nodes[0]);
Expand Down Expand Up @@ -594,8 +594,8 @@ fn test_monitor_update_fail_no_rebroadcast() {
fn test_monitor_update_raa_while_paused() {
// Tests handling of an RAA while monitor updating has already been marked failed.
// Backported from chanmon_fail_consistency fuzz tests as this used to be broken.
let mut nodes = create_network(2);
create_announced_chan_between_nodes(&nodes, 0, 1);
let mut nodes = create_network(2, &[None, None]);
create_announced_chan_between_nodes(&nodes, 0, 1, LocalFeatures::new(), LocalFeatures::new());

send_payment(&nodes[0], &[&nodes[1]], 5000000);

Expand Down Expand Up @@ -661,9 +661,9 @@ fn test_monitor_update_raa_while_paused() {

fn do_test_monitor_update_fail_raa(test_ignore_second_cs: bool) {
// Tests handling of a monitor update failure when processing an incoming RAA
let mut nodes = create_network(3);
create_announced_chan_between_nodes(&nodes, 0, 1);
let chan_2 = create_announced_chan_between_nodes(&nodes, 1, 2);
let mut nodes = create_network(3, &[None, None, None]);
create_announced_chan_between_nodes(&nodes, 0, 1, LocalFeatures::new(), LocalFeatures::new());
let chan_2 = create_announced_chan_between_nodes(&nodes, 1, 2, LocalFeatures::new(), LocalFeatures::new());

// Rebalance a bit so that we can send backwards from 2 to 1.
send_payment(&nodes[0], &[&nodes[1], &nodes[2]], 5000000);
Expand Down Expand Up @@ -914,9 +914,9 @@ fn test_monitor_update_fail_reestablish() {
// Simple test for message retransmission after monitor update failure on
// channel_reestablish generating a monitor update (which comes from freeing holding cell
// HTLCs).
let mut nodes = create_network(3);
create_announced_chan_between_nodes(&nodes, 0, 1);
create_announced_chan_between_nodes(&nodes, 1, 2);
let mut nodes = create_network(3, &[None, None, None]);
create_announced_chan_between_nodes(&nodes, 0, 1, LocalFeatures::new(), LocalFeatures::new());
create_announced_chan_between_nodes(&nodes, 1, 2, LocalFeatures::new(), LocalFeatures::new());

let (our_payment_preimage, _) = route_payment(&nodes[0], &[&nodes[1], &nodes[2]], 1000000);

Expand Down Expand Up @@ -992,8 +992,8 @@ fn raa_no_response_awaiting_raa_state() {
// due to a previous monitor update failure, we still set AwaitingRemoteRevoke on the channel
// in question (assuming it intends to respond with a CS after monitor updating is restored).
// Backported from chanmon_fail_consistency fuzz tests as this used to be broken.
let mut nodes = create_network(2);
create_announced_chan_between_nodes(&nodes, 0, 1);
let mut nodes = create_network(2, &[None, None]);
create_announced_chan_between_nodes(&nodes, 0, 1, LocalFeatures::new(), LocalFeatures::new());

let route = nodes[0].router.get_route(&nodes[1].node.get_our_node_id(), None, &Vec::new(), 1000000, TEST_FINAL_CLTV).unwrap();
let (payment_preimage_1, payment_hash_1) = get_payment_preimage_hash!(nodes[0]);
Expand Down Expand Up @@ -1105,8 +1105,8 @@ fn claim_while_disconnected_monitor_update_fail() {
// Backported from chanmon_fail_consistency fuzz tests as an unmerged version of the handling
// code introduced a regression in this test (specifically, this caught a removal of the
// channel_reestablish handling ensuring the order was sensical given the messages used).
let mut nodes = create_network(2);
create_announced_chan_between_nodes(&nodes, 0, 1);
let mut nodes = create_network(2, &[None, None]);
create_announced_chan_between_nodes(&nodes, 0, 1, LocalFeatures::new(), LocalFeatures::new());

// Forward a payment for B to claim
let (payment_preimage_1, _) = route_payment(&nodes[0], &[&nodes[1]], 1000000);
Expand Down Expand Up @@ -1220,8 +1220,8 @@ fn monitor_failed_no_reestablish_response() {
// response to a commitment_signed.
// Backported from chanmon_fail_consistency fuzz tests as it caught a long-standing
// debug_assert!() failure in channel_reestablish handling.
let mut nodes = create_network(2);
create_announced_chan_between_nodes(&nodes, 0, 1);
let mut nodes = create_network(2, &[None, None]);
create_announced_chan_between_nodes(&nodes, 0, 1, LocalFeatures::new(), LocalFeatures::new());

// Route the payment and deliver the initial commitment_signed (with a monitor update failure
// on receipt).
Expand Down Expand Up @@ -1286,8 +1286,8 @@ fn first_message_on_recv_ordering() {
// have no pending response but will want to send a RAA/CS (with the updates for the second
// payment applied).
// Backported from chanmon_fail_consistency fuzz tests as it caught a bug here.
let mut nodes = create_network(2);
create_announced_chan_between_nodes(&nodes, 0, 1);
let mut nodes = create_network(2, &[None, None]);
create_announced_chan_between_nodes(&nodes, 0, 1, LocalFeatures::new(), LocalFeatures::new());

// Route the first payment outbound, holding the last RAA for B until we are set up so that we
// can deliver it and fail the monitor update.
Expand Down Expand Up @@ -1371,9 +1371,9 @@ fn test_monitor_update_fail_claim() {
// update to claim the payment. We then send a payment C->B->A, making the forward of this
// payment from B to A fail due to the paused channel. Finally, we restore the channel monitor
// updating and claim the payment on B.
let mut nodes = create_network(3);
let chan_1 = create_announced_chan_between_nodes(&nodes, 0, 1);
create_announced_chan_between_nodes(&nodes, 1, 2);
let mut nodes = create_network(3, &[None, None, None]);
let chan_1 = create_announced_chan_between_nodes(&nodes, 0, 1, LocalFeatures::new(), LocalFeatures::new());
create_announced_chan_between_nodes(&nodes, 1, 2, LocalFeatures::new(), LocalFeatures::new());

// Rebalance a bit so that we can send backwards from 3 to 2.
send_payment(&nodes[0], &[&nodes[1], &nodes[2]], 5000000);
Expand Down Expand Up @@ -1441,9 +1441,9 @@ fn test_monitor_update_on_pending_forwards() {
// We do this with a simple 3-node network, sending a payment from A to C and one from C to A.
// The payment from A to C will be failed by C and pending a back-fail to A, while the payment
// from C to A will be pending a forward to A.
let mut nodes = create_network(3);
create_announced_chan_between_nodes(&nodes, 0, 1);
create_announced_chan_between_nodes(&nodes, 1, 2);
let mut nodes = create_network(3, &[None, None, None]);
create_announced_chan_between_nodes(&nodes, 0, 1, LocalFeatures::new(), LocalFeatures::new());
create_announced_chan_between_nodes(&nodes, 1, 2, LocalFeatures::new(), LocalFeatures::new());

// Rebalance a bit so that we can send backwards from 3 to 1.
send_payment(&nodes[0], &[&nodes[1], &nodes[2]], 5000000);
Expand Down Expand Up @@ -1505,8 +1505,8 @@ fn monitor_update_claim_fail_no_response() {
// to channel being AwaitingRAA).
// Backported from chanmon_fail_consistency fuzz tests as an unmerged version of the handling
// code was broken.
let mut nodes = create_network(2);
create_announced_chan_between_nodes(&nodes, 0, 1);
let mut nodes = create_network(2, &[None, None]);
create_announced_chan_between_nodes(&nodes, 0, 1, LocalFeatures::new(), LocalFeatures::new());

// Forward a payment for B to claim
let (payment_preimage_1, _) = route_payment(&nodes[0], &[&nodes[1]], 1000000);
Expand Down
55 changes: 49 additions & 6 deletions src/ln/channel.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ use secp256k1::{Secp256k1,Signature};
use secp256k1;

use ln::msgs;
use ln::msgs::{DecodeError, OptionalField};
use ln::msgs::{DecodeError, OptionalField, LocalFeatures};
use ln::channelmonitor::ChannelMonitor;
use ln::channelmanager::{PendingHTLCStatus, HTLCSource, HTLCFailReason, HTLCFailureMsg, PendingForwardHTLCInfo, RAACommitmentOrder, PaymentPreimage, PaymentHash};
use ln::chan_utils::{TxCreationKeys,HTLCOutputInCommitment,HTLC_SUCCESS_TX_WEIGHT,HTLC_TIMEOUT_TX_WEIGHT};
Expand Down Expand Up @@ -522,7 +522,7 @@ impl Channel {

/// Creates a new channel from a remote sides' request for one.
/// Assumes chain_hash has already been checked and corresponds with what we expect!
pub fn new_from_req(fee_estimator: &FeeEstimator, keys_provider: &Arc<KeysInterface>, their_node_id: PublicKey, msg: &msgs::OpenChannel, user_id: u64, logger: Arc<Logger>, config: &UserConfig) -> Result<Channel, ChannelError> {
pub fn new_from_req(fee_estimator: &FeeEstimator, keys_provider: &Arc<KeysInterface>, their_node_id: PublicKey, their_local_features: LocalFeatures, msg: &msgs::OpenChannel, user_id: u64, logger: Arc<Logger>, config: &UserConfig) -> Result<Channel, ChannelError> {
let chan_keys = keys_provider.get_channel_keys(true);
let mut local_config = (*config).channel_options.clone();

Expand Down Expand Up @@ -625,6 +625,27 @@ impl Channel {
channel_monitor.set_their_base_keys(&msg.htlc_basepoint, &msg.delayed_payment_basepoint);
channel_monitor.set_their_to_self_delay(msg.to_self_delay);

let their_shutdown_scriptpubkey = if their_local_features.supports_upfront_shutdown_script() {
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Shouldn't we use the shutdown_scriptpubkey if it is Present (ad not-0-length) even if they have't set their local features? We just reject the message if they did set local features (and we did too) and they didnt send a shutdown_scriptpubkey?

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

On first point, spec said "otherwise:

MAY include ashutdown_scriptpubkey."

But that's a option offered for sending node, not a requirement for the receiving node to enforce upfront_shutdown it sending doesn't signal?

Second point, well I think 0-length is the correct opt-out mechanism, which is exactly to avoid ambiguity. IMO, a node signaling upfront_shutdown should opt-out or we may assume it's a buggy one ?

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Oops, you're right, sorry, still reading the wrong side of the spec.

match &msg.shutdown_scriptpubkey {
&OptionalField::Present(ref script) => {
// Peer is signaling upfront_shutdown and has provided a non-accepted scriptpubkey format. We enforce it while receiving shutdown msg
if script.is_p2pkh() || script.is_p2sh() || script.is_v0_p2wsh() || script.is_v0_p2wpkh() {
Some(script.clone())
// Peer is signaling upfront_shutdown and has opt-out with a 0-length script. We don't enforce anything
} else if script.len() == 0 {
None
// Peer is signaling upfront_shutdown and has provided a non-accepted scriptpubkey format. Fail the channel
} else {
return Err(ChannelError::Close("Peer is signaling upfront_shutdown but has provided a non-accepted scriptpubkey format"));
}
},
// Peer is signaling upfront shutdown but don't opt-out with correct mechanism (a.k.a 0-length script). Peer looks buggy, we fail the channel
&OptionalField::Absent => {
return Err(ChannelError::Close("Peer is signaling upfront_shutdown but we don't get any script. Use 0-length script to opt-out"));
}
}
} else { None };

let mut chan = Channel {
user_id: user_id,
config: local_config,
Expand Down Expand Up @@ -692,7 +713,7 @@ impl Channel {
their_prev_commitment_point: None,
their_node_id: their_node_id,

their_shutdown_scriptpubkey: None,
their_shutdown_scriptpubkey,

channel_monitor: channel_monitor,

Expand Down Expand Up @@ -1341,7 +1362,7 @@ impl Channel {

// Message handlers:

pub fn accept_channel(&mut self, msg: &msgs::AcceptChannel, config: &UserConfig) -> Result<(), ChannelError> {
pub fn accept_channel(&mut self, msg: &msgs::AcceptChannel, config: &UserConfig, their_local_features: LocalFeatures) -> Result<(), ChannelError> {
// Check sanity of message fields:
if !self.channel_outbound {
return Err(ChannelError::Close("Got an accept_channel message from an inbound peer"));
Expand Down Expand Up @@ -1400,6 +1421,27 @@ impl Channel {
return Err(ChannelError::Close("We consider the minimum depth to be unreasonably large"));
}

let their_shutdown_scriptpubkey = if their_local_features.supports_upfront_shutdown_script() {
match &msg.shutdown_scriptpubkey {
&OptionalField::Present(ref script) => {
// Peer is signaling upfront_shutdown and has provided a non-accepted scriptpubkey format. We enforce it while receiving shutdown msg
if script.is_p2pkh() || script.is_p2sh() || script.is_v0_p2wsh() || script.is_v0_p2wpkh() {
Some(script.clone())
// Peer is signaling upfront_shutdown and has opt-out with a 0-length script. We don't enforce anything
} else if script.len() == 0 {
None
// Peer is signaling upfront_shutdown and has provided a non-accepted scriptpubkey format. Fail the channel
} else {
return Err(ChannelError::Close("Peer is signaling upfront_shutdown but has provided a non-accepted scriptpubkey format"));
}
},
// Peer is signaling upfront shutdown but don't opt-out with correct mechanism (a.k.a 0-length script). Peer looks buggy, we fail the channel
&OptionalField::Absent => {
return Err(ChannelError::Close("Peer is signaling upfront_shutdown but we don't get any script. Use 0-length script to opt-out"));
}
}
} else { None };

self.channel_monitor.set_their_base_keys(&msg.htlc_basepoint, &msg.delayed_payment_basepoint);

self.their_dust_limit_satoshis = msg.dust_limit_satoshis;
Expand All @@ -1415,6 +1457,7 @@ impl Channel {
self.their_delayed_payment_basepoint = Some(msg.delayed_payment_basepoint);
self.their_htlc_basepoint = Some(msg.htlc_basepoint);
self.their_cur_commitment_point = Some(msg.first_per_commitment_point);
self.their_shutdown_scriptpubkey = their_shutdown_scriptpubkey;

let obscure_factor = self.get_commitment_transaction_number_obscure_factor();
self.channel_monitor.set_commitment_obscure_factor(obscure_factor);
Expand Down Expand Up @@ -3038,7 +3081,7 @@ impl Channel {
htlc_basepoint: PublicKey::from_secret_key(&self.secp_ctx, &self.local_keys.htlc_base_key),
first_per_commitment_point: PublicKey::from_secret_key(&self.secp_ctx, &local_commitment_secret),
channel_flags: if self.config.announced_channel {1} else {0},
shutdown_scriptpubkey: OptionalField::Absent
shutdown_scriptpubkey: OptionalField::Present(if self.config.commit_upfront_shutdown_pubkey { self.get_closing_scriptpubkey() } else { Builder::new().into_script() })
}
}

Expand Down Expand Up @@ -3070,7 +3113,7 @@ impl Channel {
delayed_payment_basepoint: PublicKey::from_secret_key(&self.secp_ctx, &self.local_keys.delayed_payment_base_key),
htlc_basepoint: PublicKey::from_secret_key(&self.secp_ctx, &self.local_keys.htlc_base_key),
first_per_commitment_point: PublicKey::from_secret_key(&self.secp_ctx, &local_commitment_secret),
shutdown_scriptpubkey: OptionalField::Absent
shutdown_scriptpubkey: OptionalField::Present(if self.config.commit_upfront_shutdown_pubkey { self.get_closing_scriptpubkey() } else { Builder::new().into_script() })
}
}

Expand Down
Loading