diff --git a/lightning/src/chain/onchaintx.rs b/lightning/src/chain/onchaintx.rs index 6c29d147f02..94d6aa35746 100644 --- a/lightning/src/chain/onchaintx.rs +++ b/lightning/src/chain/onchaintx.rs @@ -726,7 +726,10 @@ impl OnchainTxHandler B::Target: BroadcasterInterface, F::Target: FeeEstimator, { - log_debug!(logger, "Updating claims view at height {} with {} claim requests", cur_height, requests.len()); + if !requests.is_empty() { + log_debug!(logger, "Updating claims view at height {} with {} claim requests", cur_height, requests.len()); + } + let mut preprocessed_requests = Vec::with_capacity(requests.len()); let mut aggregated_request = None; @@ -772,6 +775,12 @@ impl OnchainTxHandler // Claim everything up to and including `cur_height` let remaining_locked_packages = self.locktimed_packages.split_off(&(cur_height + 1)); + if !self.locktimed_packages.is_empty() { + log_debug!(logger, + "Updating claims view at height {} with {} locked packages available for claim", + cur_height, + self.locktimed_packages.len()); + } for (pop_height, mut entry) in self.locktimed_packages.iter_mut() { log_trace!(logger, "Restoring delayed claim of package(s) at their timelock at {}.", pop_height); preprocessed_requests.append(&mut entry); @@ -852,8 +861,15 @@ impl OnchainTxHandler B::Target: BroadcasterInterface, F::Target: FeeEstimator, { - log_debug!(logger, "Updating claims view at height {} with {} matched transactions in block {}", cur_height, txn_matched.len(), conf_height); + let mut have_logged_intro = false; + let mut maybe_log_intro = || { + if !have_logged_intro { + log_debug!(logger, "Updating claims view at height {} with {} matched transactions in block {}", cur_height, txn_matched.len(), conf_height); + have_logged_intro = true; + } + }; let mut bump_candidates = new_hash_map(); + if !txn_matched.is_empty() { maybe_log_intro(); } for tx in txn_matched { // Scan all input to verify is one of the outpoint spent is of interest for us let mut claimed_outputs_material = Vec::new(); @@ -946,6 +962,7 @@ impl OnchainTxHandler self.onchain_events_awaiting_threshold_conf.drain(..).collect::>(); for entry in onchain_events_awaiting_threshold_conf { if entry.has_reached_confirmation_threshold(cur_height) { + maybe_log_intro(); match entry.event { OnchainEvent::Claim { claim_id } => { // We may remove a whole set of claim outpoints here, as these one may have @@ -983,7 +1000,11 @@ impl OnchainTxHandler } // Build, bump and rebroadcast tx accordingly - log_trace!(logger, "Bumping {} candidates", bump_candidates.len()); + if !bump_candidates.is_empty() { + maybe_log_intro(); + log_trace!(logger, "Bumping {} candidates", bump_candidates.len()); + } + for (claim_id, request) in bump_candidates.iter() { if let Some((new_timer, new_feerate, bump_claim)) = self.generate_claim( cur_height, &request, &FeerateStrategy::ForceBump, &*fee_estimator, &*logger, diff --git a/lightning/src/ln/channelmanager.rs b/lightning/src/ln/channelmanager.rs index 243cf741822..55b2cbae441 100644 --- a/lightning/src/ln/channelmanager.rs +++ b/lightning/src/ln/channelmanager.rs @@ -4833,10 +4833,6 @@ where // If the feerate has decreased by less than half, don't bother if new_feerate <= chan.context.get_feerate_sat_per_1000_weight() && new_feerate * 2 > chan.context.get_feerate_sat_per_1000_weight() { - if new_feerate != chan.context.get_feerate_sat_per_1000_weight() { - log_trace!(logger, "Channel {} does not qualify for a feerate change from {} to {}.", - chan_id, chan.context.get_feerate_sat_per_1000_weight(), new_feerate); - } return NotifyOption::SkipPersistNoEvents; } if !chan.context.is_live() { diff --git a/lightning/src/ln/peer_handler.rs b/lightning/src/ln/peer_handler.rs index 0f206117ec6..8d83763715c 100644 --- a/lightning/src/ln/peer_handler.rs +++ b/lightning/src/ln/peer_handler.rs @@ -33,7 +33,7 @@ use crate::onion_message::offers::{OffersMessage, OffersMessageHandler}; use crate::onion_message::packet::OnionMessageContents; use crate::routing::gossip::{NodeId, NodeAlias}; use crate::util::atomic_counter::AtomicCounter; -use crate::util::logger::{Logger, WithContext}; +use crate::util::logger::{Level, Logger, WithContext}; use crate::util::string::PrintableString; use crate::prelude::*; @@ -1329,7 +1329,9 @@ impl { - log_given_level!(logger, level, "Error handling message{}; ignoring: {}", OptionalFromDebugger(&peer_node_id), e.err); + log_given_level!(logger, level, "Error handling {}message{}; ignoring: {}", + if level == Level::Gossip { "gossip " } else { "" }, + OptionalFromDebugger(&peer_node_id), e.err); continue }, msgs::ErrorAction::IgnoreDuplicateGossip => continue, // Don't even bother logging these diff --git a/lightning/src/routing/gossip.rs b/lightning/src/routing/gossip.rs index 950ec79a1e9..a4938f72e8b 100644 --- a/lightning/src/routing/gossip.rs +++ b/lightning/src/routing/gossip.rs @@ -1925,7 +1925,10 @@ impl NetworkGraph where L::Target: Logger { None => { core::mem::drop(channels); self.pending_checks.check_hold_pending_channel_update(msg, full_msg)?; - return Err(LightningError{err: "Couldn't find channel for update".to_owned(), action: ErrorAction::IgnoreError}); + return Err(LightningError { + err: "Couldn't find channel for update".to_owned(), + action: ErrorAction::IgnoreAndLog(Level::Gossip), + }); }, Some(channel) => { if msg.htlc_maximum_msat > MAX_VALUE_MSAT {