diff --git a/fuzz/src/chanmon_consistency.rs b/fuzz/src/chanmon_consistency.rs index e0b038c30cd..f2c6b341dff 100644 --- a/fuzz/src/chanmon_consistency.rs +++ b/fuzz/src/chanmon_consistency.rs @@ -177,7 +177,7 @@ struct LatestMonitorState { } struct TestChainMonitor { - pub logger: Arc, + pub logger: Arc>, pub keys: Arc, pub persister: Arc, pub chain_monitor: Arc< @@ -186,7 +186,7 @@ struct TestChainMonitor { Arc, Arc, Arc, - Arc, + Arc>, Arc, >, >, @@ -194,8 +194,8 @@ struct TestChainMonitor { } impl TestChainMonitor { pub fn new( - broadcaster: Arc, logger: Arc, feeest: Arc, - persister: Arc, keys: Arc, + broadcaster: Arc, logger: Arc>, + feeest: Arc, persister: Arc, keys: Arc, ) -> Self { Self { chain_monitor: Arc::new(chainmonitor::ChainMonitor::new( @@ -445,7 +445,7 @@ type ChanMan<'a> = ChannelManager< Arc, &'a FuzzRouter, &'a FuzzRouter, - Arc, + Arc>, >; #[inline] @@ -621,7 +621,7 @@ pub fn do_test(data: &[u8], underlying_out: Out, anchors: bool) { macro_rules! make_node { ($node_id: expr, $fee_estimator: expr) => {{ - let logger: Arc = + let logger: Arc> = Arc::new(test_logger::TestLogger::new($node_id.to_string(), out.clone())); let node_secret = SecretKey::from_slice(&[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, @@ -677,7 +677,7 @@ pub fn do_test(data: &[u8], underlying_out: Out, anchors: bool) { macro_rules! reload_node { ($ser: expr, $node_id: expr, $old_monitors: expr, $keys_manager: expr, $fee_estimator: expr) => {{ let keys_manager = Arc::clone(&$keys_manager); - let logger: Arc = + let logger: Arc> = Arc::new(test_logger::TestLogger::new($node_id.to_string(), out.clone())); let chain_monitor = Arc::new(TestChainMonitor::new( broadcast.clone(), diff --git a/fuzz/src/full_stack.rs b/fuzz/src/full_stack.rs index 5281a933526..9b448a4566c 100644 --- a/fuzz/src/full_stack.rs +++ b/fuzz/src/full_stack.rs @@ -221,7 +221,7 @@ type ChannelMan<'a> = ChannelManager< Arc, Arc, Arc, - Arc, + Arc>, Arc, >, >, @@ -232,14 +232,20 @@ type ChannelMan<'a> = ChannelManager< Arc, &'a FuzzRouter, &'a FuzzRouter, - Arc, + Arc>, >; type PeerMan<'a> = PeerManager< Peer<'a>, Arc>, - Arc>>, Arc, Arc>>, + Arc< + P2PGossipSync< + Arc>>>, + Arc, + Arc>, + >, + >, IgnoringMessageHandler, - Arc, + Arc>, IgnoringMessageHandler, Arc, >; @@ -252,7 +258,7 @@ struct MoneyLossDetector<'a> { Arc, Arc, Arc, - Arc, + Arc>, Arc, >, >, @@ -276,7 +282,7 @@ impl<'a> MoneyLossDetector<'a> { Arc, Arc, Arc, - Arc, + Arc>, Arc, >, >, @@ -556,7 +562,7 @@ impl SignerProvider for KeyProvider { } #[inline] -pub fn do_test(mut data: &[u8], logger: &Arc) { +pub fn do_test(mut data: &[u8], logger: &Arc>) { if data.len() < 32 { return; } @@ -1060,13 +1066,14 @@ pub fn do_test(mut data: &[u8], logger: &Arc) { } pub fn full_stack_test(data: &[u8], out: Out) { - let logger: Arc = Arc::new(test_logger::TestLogger::new("".to_owned(), out)); + let logger: Arc> = + Arc::new(test_logger::TestLogger::new("".to_owned(), out)); do_test(data, &logger); } #[no_mangle] pub extern "C" fn full_stack_run(data: *const u8, datalen: usize) { - let logger: Arc = + let logger: Arc> = Arc::new(test_logger::TestLogger::new("".to_owned(), test_logger::DevNull {})); do_test(unsafe { std::slice::from_raw_parts(data, datalen) }, &logger); } @@ -1695,7 +1702,7 @@ pub fn write_fst_seeds(path: &str) { #[cfg(test)] mod tests { - use lightning::util::logger::{Logger, Record}; + use lightning::util::logger::{Logger, Record, Span}; use std::collections::HashMap; use std::sync::{Arc, Mutex}; @@ -1704,6 +1711,8 @@ mod tests { pub lines: Mutex>, } impl Logger for TrackingLogger { + type UserSpan = (); + fn log(&self, record: Record) { *self .lines @@ -1720,6 +1729,8 @@ mod tests { record.args ); } + + fn start(&self, _span: Span, _parent: Option<&()>) -> () {} } #[test] @@ -1733,7 +1744,7 @@ mod tests { let test = super::two_peer_forwarding_seed(); let logger = Arc::new(TrackingLogger { lines: Mutex::new(HashMap::new()) }); - super::do_test(&test, &(Arc::clone(&logger) as Arc)); + super::do_test(&test, &(Arc::clone(&logger) as Arc>)); let log_entries = logger.lines.lock().unwrap(); // 1 @@ -1769,7 +1780,7 @@ mod tests { let test = super::gossip_exchange_seed(); let logger = Arc::new(TrackingLogger { lines: Mutex::new(HashMap::new()) }); - super::do_test(&test, &(Arc::clone(&logger) as Arc)); + super::do_test(&test, &(Arc::clone(&logger) as Arc>)); let log_entries = logger.lines.lock().unwrap(); assert_eq!(log_entries.get(&("lightning::ln::peer_handler".to_string(), "Sending message to all peers except Some(PublicKey(0000000000000000000000000000000000000000000000000000000000000002ff00000000000000000000000000000000000000000000000000000000000002)) or the announced channel's counterparties: ChannelAnnouncement { node_signature_1: 3026020200b202200303030303030303030303030303030303030303030303030303030303030303, node_signature_2: 3026020200b202200202020202020202020202020202020202020202020202020202020202020202, bitcoin_signature_1: 3026020200b202200303030303030303030303030303030303030303030303030303030303030303, bitcoin_signature_2: 3026020200b202200202020202020202020202020202020202020202020202020202020202020202, contents: UnsignedChannelAnnouncement { features: [], chain_hash: 6fe28c0ab6f1b372c1a6a246ae63f74f931e8365e15a089c68d6190000000000, short_channel_id: 42, node_id_1: NodeId(030303030303030303030303030303030303030303030303030303030303030303), node_id_2: NodeId(020202020202020202020202020202020202020202020202020202020202020202), bitcoin_key_1: NodeId(030303030303030303030303030303030303030303030303030303030303030303), bitcoin_key_2: NodeId(020202020202020202020202020202020202020202020202020202020202020202), excess_data: [] } }".to_string())), Some(&1)); diff --git a/fuzz/src/onion_message.rs b/fuzz/src/onion_message.rs index a5782dacd42..49c392bdf60 100644 --- a/fuzz/src/onion_message.rs +++ b/fuzz/src/onion_message.rs @@ -276,7 +276,7 @@ impl SignerProvider for KeyProvider { #[cfg(test)] mod tests { use bitcoin::hex::FromHex; - use lightning::util::logger::{Logger, Record}; + use lightning::util::logger::{Logger, Record, Span}; use std::collections::HashMap; use std::sync::Mutex; @@ -285,6 +285,8 @@ mod tests { pub lines: Mutex>, } impl Logger for TrackingLogger { + type UserSpan = (); + fn log(&self, record: Record) { let mut lines_lock = self.lines.lock().unwrap(); let key = (record.module_path.to_string(), format!("{}", record.args)); @@ -298,6 +300,8 @@ mod tests { record.args ); } + + fn start(&self, _span: Span, _parent: Option<&()>) -> () {} } #[test] diff --git a/fuzz/src/process_onion_failure.rs b/fuzz/src/process_onion_failure.rs index 2b0d8c4c72b..980cba51d3b 100644 --- a/fuzz/src/process_onion_failure.rs +++ b/fuzz/src/process_onion_failure.rs @@ -63,7 +63,8 @@ fn do_test(data: &[u8], out: Out) { } let secp_ctx = Secp256k1::new(); - let logger: Arc = Arc::new(test_logger::TestLogger::new("".to_owned(), out)); + let logger: Arc> = + Arc::new(test_logger::TestLogger::new("".to_owned(), out)); let session_priv = SecretKey::from_slice(&usize_to_32_bytes(213127)).unwrap(); let payment_id = PaymentId(usize_to_32_bytes(232299)); diff --git a/fuzz/src/utils/test_logger.rs b/fuzz/src/utils/test_logger.rs index 6d9de02e387..4602a5154af 100644 --- a/fuzz/src/utils/test_logger.rs +++ b/fuzz/src/utils/test_logger.rs @@ -7,7 +7,7 @@ // You may not use this file except in accordance with one or both of these // licenses. -use lightning::util::logger::{Logger, Record}; +use lightning::util::logger::{Logger, Record, Span}; use std::io::Write; use std::sync::{Arc, Mutex}; @@ -58,6 +58,8 @@ impl<'a, Out: Output> Write for LockedWriteAdapter<'a, Out> { } impl Logger for TestLogger { + type UserSpan = (); + fn log(&self, record: Record) { write!( LockedWriteAdapter(&self.out), @@ -70,4 +72,6 @@ impl Logger for TestLogger { ) .unwrap(); } + + fn start(&self, _span: Span, _parent: Option<&()>) -> () {} } diff --git a/lightning-background-processor/src/lib.rs b/lightning-background-processor/src/lib.rs index 1f7147f3203..fee71cb2d36 100644 --- a/lightning-background-processor/src/lib.rs +++ b/lightning-background-processor/src/lib.rs @@ -633,7 +633,9 @@ use futures_util::{dummy_waker, OptionalSelector, Selector, SelectorOutput}; /// # use lightning_background_processor::{process_events_async, GossipSync}; /// # struct Logger {} /// # impl lightning::util::logger::Logger for Logger { +/// # type UserSpan = (); /// # fn log(&self, _record: lightning::util::logger::Record) {} +/// # fn start(&self, _span: lightning::util::logger::Span, _parent: Option<&()>) -> () {} /// # } /// # struct Store {} /// # impl lightning::util::persist::KVStore for Store { diff --git a/lightning-dns-resolver/src/lib.rs b/lightning-dns-resolver/src/lib.rs index 6de6b77be90..e6b3481c374 100644 --- a/lightning-dns-resolver/src/lib.rs +++ b/lightning-dns-resolver/src/lib.rs @@ -176,7 +176,7 @@ mod test { use lightning::sign::{KeysManager, NodeSigner, Recipient}; use lightning::types::features::InitFeatures; use lightning::types::payment::PaymentHash; - use lightning::util::logger::Logger; + use lightning::util::logger::{Logger, Span}; use lightning::{ commitment_signed_dance, expect_payment_claimed, expect_pending_htlcs_forwardable, @@ -191,9 +191,13 @@ mod test { node: &'static str, } impl Logger for TestLogger { + type UserSpan = (); + fn log(&self, record: lightning::util::logger::Record) { eprintln!("{}: {}", self.node, record.args); } + + fn start(&self, _span: Span, _parent: Option<&()>) -> () {} } impl Deref for TestLogger { type Target = TestLogger; diff --git a/lightning-net-tokio/src/lib.rs b/lightning-net-tokio/src/lib.rs index 95b83b105ac..0cfe075829a 100644 --- a/lightning-net-tokio/src/lib.rs +++ b/lightning-net-tokio/src/lib.rs @@ -640,6 +640,8 @@ mod tests { pub struct TestLogger(); impl lightning::util::logger::Logger for TestLogger { + type UserSpan = (); + fn log(&self, record: lightning::util::logger::Record) { println!( "{:<5} [{} : {}, {}] {}", @@ -650,6 +652,8 @@ mod tests { record.args ); } + + fn start(&self, _span: lightning::util::logger::Span, _parent: Option<&()>) -> () {} } struct MsgHandler { diff --git a/lightning-rapid-gossip-sync/src/lib.rs b/lightning-rapid-gossip-sync/src/lib.rs index 429a3560be0..ce0ed571399 100644 --- a/lightning-rapid-gossip-sync/src/lib.rs +++ b/lightning-rapid-gossip-sync/src/lib.rs @@ -51,10 +51,12 @@ //! use lightning::routing::gossip::NetworkGraph; //! use lightning_rapid_gossip_sync::RapidGossipSync; //! -//! # use lightning::util::logger::{Logger, Record}; +//! # use lightning::util::logger::{Logger, Record, Span}; //! # struct FakeLogger {} //! # impl Logger for FakeLogger { +//! # type UserSpan = (); //! # fn log(&self, record: Record) { } +//! # fn start(&self, _span: Span, parent: Option<&()>) -> () {} //! # } //! # let logger = FakeLogger {}; //! diff --git a/lightning/src/chain/channelmonitor.rs b/lightning/src/chain/channelmonitor.rs index 9d9e3382ffb..5aa18433784 100644 --- a/lightning/src/chain/channelmonitor.rs +++ b/lightning/src/chain/channelmonitor.rs @@ -48,7 +48,7 @@ use crate::sign::{ChannelDerivationParameters, HTLCDescriptor, SpendableOutputDe use crate::chain::onchaintx::{ClaimEvent, FeerateStrategy, OnchainTxHandler}; use crate::chain::package::{CounterpartyOfferedHTLCOutput, CounterpartyReceivedHTLCOutput, HolderFundingOutput, HolderHTLCOutput, PackageSolvingData, PackageTemplate, RevokedOutput, RevokedHTLCOutput}; use crate::chain::Filter; -use crate::util::logger::{Logger, Record}; +use crate::util::logger::{Logger, Record, Span}; use crate::util::persist::MonitorName; use crate::util::ser::{Readable, ReadableArgs, RequiredWrapper, MaybeReadable, UpgradableRequired, Writer, Writeable, U48}; use crate::util::byte_utils; @@ -1471,12 +1471,18 @@ pub(crate) struct WithChannelMonitor<'a, L: Deref> where L::Target: Logger { } impl<'a, L: Deref> Logger for WithChannelMonitor<'a, L> where L::Target: Logger { + type UserSpan = <::Target as Logger>::UserSpan; + fn log(&self, mut record: Record) { record.peer_id = self.peer_id; record.channel_id = self.channel_id; record.payment_hash = self.payment_hash; self.logger.log(record) } + + fn start(&self, span: Span, parent: Option<&Self::UserSpan>) -> Self::UserSpan { + self.logger.start(span, parent) + } } impl<'a, L: Deref> WithChannelMonitor<'a, L> where L::Target: Logger { diff --git a/lightning/src/ln/channel.rs b/lightning/src/ln/channel.rs index bc4d38beeea..d1c83aed116 100644 --- a/lightning/src/ln/channel.rs +++ b/lightning/src/ln/channel.rs @@ -61,7 +61,7 @@ use crate::events::{ClosureReason, Event}; use crate::events::bump_transaction::BASE_INPUT_WEIGHT; use crate::routing::gossip::NodeId; use crate::util::ser::{Readable, ReadableArgs, RequiredWrapper, TransactionU16LenLimited, Writeable, Writer}; -use crate::util::logger::{Logger, Record, WithContext}; +use crate::util::logger::{BoxedSpan, Logger, Record, Span, WithContext}; use crate::util::errors::APIError; use crate::util::config::{UserConfig, ChannelConfig, LegacyChannelConfig, ChannelHandshakeConfig, ChannelHandshakeLimits, MaxDustHTLCExposure}; use crate::util::scid_utils::scid_from_parts; @@ -258,6 +258,15 @@ impl InboundHTLCState { } struct InboundHTLCOutput { + htlc_id: u64, + amount_msat: u64, + cltv_expiry: u32, + payment_hash: PaymentHash, + state_wrapper: InboundHTLCStateWrapper, + span: BoxedSpan, +} + +struct InboundHTLCOutputParams { htlc_id: u64, amount_msat: u64, cltv_expiry: u32, @@ -265,7 +274,31 @@ struct InboundHTLCOutput { state: InboundHTLCState, } -impl InboundHTLCOutput { +impl InboundHTLCOutput where { + fn new( + channel_id: ChannelId, params: InboundHTLCOutputParams, logger: &L, + ) -> InboundHTLCOutput where L::Target: Logger { + let htlc_span = logger.start( + Span::InboundHTLC { + channel_id: channel_id, + htlc_id: params.htlc_id, + }, + None, + ); + InboundHTLCOutput { + htlc_id: params.htlc_id, + amount_msat: params.amount_msat, + cltv_expiry: params.cltv_expiry, + payment_hash: params.payment_hash, + state_wrapper: InboundHTLCStateWrapper::new( + params.state, + Some(&htlc_span), + logger, + ), + span: BoxedSpan::new(htlc_span), + } + } + fn is_dust(&self, local: bool, feerate_per_kw: u32, broadcaster_dust_limit_sat: u64, features: &ChannelTypeFeatures) -> bool { let htlc_tx_fee_sat = if features.supports_anchors_zero_fee_htlc_tx() { 0 @@ -281,6 +314,41 @@ impl InboundHTLCOutput { }; self.amount_msat / 1000 < broadcaster_dust_limit_sat + htlc_tx_fee_sat } + + fn state(&self) -> &InboundHTLCState { + &self.state_wrapper.state + } + + fn set_state(&mut self, state: InboundHTLCState, logger: &L) where L::Target: Logger { + mem::drop(self.state_wrapper.span.take()); + self.state_wrapper = InboundHTLCStateWrapper::new( + state, + self.span.as_user_span_ref::(), + logger, + ); + } +} + +struct InboundHTLCStateWrapper { + state: InboundHTLCState, + span: Option, +} + +impl InboundHTLCStateWrapper { + fn new( + state: InboundHTLCState, parent_span: Option<&<::Target as Logger>::UserSpan>, logger: &L, + ) -> InboundHTLCStateWrapper where L::Target: Logger { + let state_span = logger.start( + Span::InboundHTLCState { + state: (&state).into(), + }, + parent_span, + ); + InboundHTLCStateWrapper { + state, + span: Some(BoxedSpan::new(state_span)), + } + } } #[cfg_attr(test, derive(Clone, Debug, PartialEq))] @@ -394,6 +462,20 @@ impl<'a> Into> for &'a OutboundHTLCOutcome { #[cfg_attr(test, derive(Clone, Debug, PartialEq))] struct OutboundHTLCOutput { + htlc_id: u64, + amount_msat: u64, + cltv_expiry: u32, + payment_hash: PaymentHash, + state_wrapper: OutboundHTLCStateWrapper, + source: HTLCSource, + blinding_point: Option, + skimmed_fee_msat: Option, + send_timestamp: Option, + span: BoxedSpan, + _forward_span: Option, +} + +struct OutboundHTLCOutputParams { htlc_id: u64, amount_msat: u64, cltv_expiry: u32, @@ -406,6 +488,36 @@ struct OutboundHTLCOutput { } impl OutboundHTLCOutput { + fn new( + channel_id: ChannelId, params: OutboundHTLCOutputParams, + forward_span: Option, logger: &L, + ) -> OutboundHTLCOutput where L::Target: Logger { + let htlc_span = logger.start( + Span::OutboundHTLC { + channel_id: channel_id, + htlc_id: params.htlc_id, + }, + forward_span.as_ref().map(|s| s.as_user_span_ref::()).flatten(), + ); + OutboundHTLCOutput { + htlc_id: params.htlc_id, + amount_msat: params.amount_msat, + cltv_expiry: params.cltv_expiry, + payment_hash: params.payment_hash, + state_wrapper: OutboundHTLCStateWrapper::new( + params.state, + Some(&htlc_span), + logger, + ), + source: params.source, + blinding_point: params.blinding_point, + skimmed_fee_msat: params.skimmed_fee_msat, + send_timestamp: params.send_timestamp, + span: BoxedSpan::new(htlc_span), + _forward_span: forward_span, + } + } + fn is_dust(&self, local: bool, feerate_per_kw: u32, broadcaster_dust_limit_sat: u64, features: &ChannelTypeFeatures) -> bool { let htlc_tx_fee_sat = if features.supports_anchors_zero_fee_htlc_tx() { 0 @@ -421,6 +533,42 @@ impl OutboundHTLCOutput { }; self.amount_msat / 1000 < broadcaster_dust_limit_sat + htlc_tx_fee_sat } + + fn state(&self) -> &OutboundHTLCState { + &self.state_wrapper.state + } + + fn set_state(&mut self, state: OutboundHTLCState, logger: &L) where L::Target: Logger { + mem::drop(self.state_wrapper.span.take()); + self.state_wrapper = OutboundHTLCStateWrapper::new( + state, + self.span.as_user_span_ref::(), + logger, + ); + } +} + +#[cfg_attr(test, derive(Clone, Debug, PartialEq))] +struct OutboundHTLCStateWrapper { + state: OutboundHTLCState, + span: Option, +} + +impl OutboundHTLCStateWrapper { + fn new( + state: OutboundHTLCState, parent_span: Option<&<::Target as Logger>::UserSpan>, logger: &L, + ) -> OutboundHTLCStateWrapper where L::Target: Logger { + let state_span = logger.start( + Span::OutboundHTLCState { + state: (&state).into(), + }, + parent_span, + ); + OutboundHTLCStateWrapper { + state, + span: Some(BoxedSpan::new(state_span)), + } + } } /// See AwaitingRemoteRevoke ChannelState for more info @@ -436,6 +584,7 @@ enum HTLCUpdateAwaitingACK { // The extra fee we're skimming off the top of this HTLC. skimmed_fee_msat: Option, blinding_point: Option, + forward_span: Option, }, ClaimHTLC { payment_preimage: PaymentPreimage, @@ -885,12 +1034,18 @@ pub(super) struct WithChannelContext<'a, L: Deref> where L::Target: Logger { } impl<'a, L: Deref> Logger for WithChannelContext<'a, L> where L::Target: Logger { + type UserSpan = <::Target as Logger>::UserSpan; + fn log(&self, mut record: Record) { record.peer_id = self.peer_id; record.channel_id = self.channel_id; record.payment_hash = self.payment_hash; self.logger.log(record) } + + fn start(&self, span: Span, parent: Option<&Self::UserSpan>) -> Self::UserSpan { + self.logger.start(span, parent) + } } impl<'a, 'b, L: Deref> WithChannelContext<'a, L> @@ -1044,7 +1199,7 @@ pub(super) struct MonitorRestoreUpdates { pub accepted_htlcs: Vec<(PendingHTLCInfo, u64)>, pub failed_htlcs: Vec<(HTLCSource, PaymentHash, HTLCFailReason)>, pub finalized_claimed_htlcs: Vec, - pub pending_update_adds: Vec, + pub pending_update_adds: Vec<(msgs::UpdateAddHTLC, BoxedSpan)>, pub funding_broadcastable: Option, pub channel_ready: Option, pub announcement_sigs: Option, @@ -1973,7 +2128,7 @@ pub(super) struct ChannelContext where SP::Target: SignerProvider { monitor_pending_forwards: Vec<(PendingHTLCInfo, u64)>, monitor_pending_failures: Vec<(HTLCSource, PaymentHash, HTLCFailReason)>, monitor_pending_finalized_fulfills: Vec, - monitor_pending_update_adds: Vec, + monitor_pending_update_adds: Vec<(msgs::UpdateAddHTLC, BoxedSpan)>, monitor_pending_tx_signatures: Option, /// If we went to send a revoke_and_ack but our signer was unable to give us a signature, @@ -3338,7 +3493,7 @@ impl ChannelContext where SP::Target: SignerProvider { } if self.pending_inbound_htlcs.iter() - .any(|htlc| match htlc.state { + .any(|htlc| match htlc.state() { InboundHTLCState::Committed => false, // An HTLC removal from the local node is pending on the remote commitment. InboundHTLCState::LocalRemoved(_) => true, @@ -3352,7 +3507,7 @@ impl ChannelContext where SP::Target: SignerProvider { } self.pending_outbound_htlcs.iter() - .any(|htlc| match htlc.state { + .any(|htlc| match htlc.state() { OutboundHTLCState::Committed => false, // An HTLC add from the local node is pending on the remote commitment. OutboundHTLCState::LocalAnnounced(_) => true, @@ -3874,26 +4029,26 @@ impl ChannelContext where SP::Target: SignerProvider { } for htlc in self.pending_inbound_htlcs.iter() { - if htlc.state.included_in_commitment(generated_by_local) { + if htlc.state().included_in_commitment(generated_by_local) { if !htlc.is_dust(local, feerate_per_kw, broadcaster_dust_limit_sat, funding.get_channel_type()) { non_dust_htlc_count += 1; } remote_htlc_total_msat += htlc.amount_msat; } else { - if htlc.state.preimage().is_some() { + if htlc.state().preimage().is_some() { value_to_self_msat_offset += htlc.amount_msat as i64; } } }; for htlc in self.pending_outbound_htlcs.iter() { - if htlc.state.included_in_commitment(generated_by_local) { + if htlc.state().included_in_commitment(generated_by_local) { if !htlc.is_dust(local, feerate_per_kw, broadcaster_dust_limit_sat, funding.get_channel_type()) { non_dust_htlc_count += 1; } local_htlc_total_msat += htlc.amount_msat; } else { - if htlc.state.preimage().is_some() { + if htlc.state().preimage().is_some() { value_to_self_msat_offset -= htlc.amount_msat as i64; } } @@ -3993,9 +4148,9 @@ impl ChannelContext where SP::Target: SignerProvider { let htlc_in_tx = get_htlc_in_commitment!($htlc, $outbound == local); htlcs_included.push((htlc_in_tx.clone(), $source)); if $htlc.is_dust(local, feerate_per_kw, broadcaster_dust_limit_sat, funding.get_channel_type()) { - log_trace!(logger, " ...including {} {} dust HTLC {} (hash {}) with value {} due to dust limit", if $outbound { "outbound" } else { "inbound" }, $htlc.state, $htlc.htlc_id, $htlc.payment_hash, $htlc.amount_msat); + log_trace!(logger, " ...including {} {} dust HTLC {} (hash {}) with value {} due to dust limit", if $outbound { "outbound" } else { "inbound" }, $htlc.state(), $htlc.htlc_id, $htlc.payment_hash, $htlc.amount_msat); } else { - log_trace!(logger, " ...including {} {} HTLC {} (hash {}) with value {}", if $outbound { "outbound" } else { "inbound" }, $htlc.state, $htlc.htlc_id, $htlc.payment_hash, $htlc.amount_msat); + log_trace!(logger, " ...including {} {} HTLC {} (hash {}) with value {}", if $outbound { "outbound" } else { "inbound" }, $htlc.state(), $htlc.htlc_id, $htlc.payment_hash, $htlc.amount_msat); nondust_htlcs.push(htlc_in_tx); } } @@ -4005,24 +4160,24 @@ impl ChannelContext where SP::Target: SignerProvider { let mut outbound_htlc_preimages: Vec = Vec::new(); for htlc in self.pending_inbound_htlcs.iter() { - if htlc.state.included_in_commitment(generated_by_local) { + if htlc.state().included_in_commitment(generated_by_local) { add_htlc_output!(htlc, false, None); } else { - log_trace!(logger, " ...not including inbound HTLC {} (hash {}) with value {} due to state ({})", htlc.htlc_id, htlc.payment_hash, htlc.amount_msat, htlc.state); - if let Some(preimage) = htlc.state.preimage() { + log_trace!(logger, " ...not including inbound HTLC {} (hash {}) with value {} due to state ({})", htlc.htlc_id, htlc.payment_hash, htlc.amount_msat, htlc.state()); + if let Some(preimage) = htlc.state().preimage() { inbound_htlc_preimages.push(preimage); } } }; for htlc in self.pending_outbound_htlcs.iter() { - if let Some(preimage) = htlc.state.preimage() { + if let Some(preimage) = htlc.state().preimage() { outbound_htlc_preimages.push(preimage); } - if htlc.state.included_in_commitment(generated_by_local) { + if htlc.state().included_in_commitment(generated_by_local) { add_htlc_output!(htlc, true, Some(&htlc.source)); } else { - log_trace!(logger, " ...not including outbound HTLC {} (hash {}) with value {} due to state ({})", htlc.htlc_id, htlc.payment_hash, htlc.amount_msat, htlc.state); + log_trace!(logger, " ...not including outbound HTLC {} (hash {}) with value {} due to state ({})", htlc.htlc_id, htlc.payment_hash, htlc.amount_msat, htlc.state()); } }; @@ -4276,7 +4431,7 @@ impl ChannelContext where SP::Target: SignerProvider { }; let holder_dust_limit_success_sat = htlc_success_dust_limit + self.holder_dust_limit_satoshis; for htlc in self.pending_inbound_htlcs.iter() { - if let Some(state_details) = (&htlc.state).into() { + if let Some(state_details) = htlc.state().into() { inbound_details.push(InboundHTLCDetails{ htlc_id: htlc.htlc_id, amount_msat: htlc.amount_msat, @@ -4307,7 +4462,7 @@ impl ChannelContext where SP::Target: SignerProvider { cltv_expiry: htlc.cltv_expiry, payment_hash: htlc.payment_hash, skimmed_fee_msat: htlc.skimmed_fee_msat, - state: Some((&htlc.state).into()), + state: Some(htlc.state().into()), is_dust: htlc.amount_msat / 1000 < holder_dust_limit_timeout_sat, }); } @@ -4539,7 +4694,7 @@ impl ChannelContext where SP::Target: SignerProvider { if htlc.amount_msat / 1000 < real_dust_limit_timeout_sat { continue } - match htlc.state { + match htlc.state() { OutboundHTLCState::LocalAnnounced {..} => included_htlcs += 1, OutboundHTLCState::Committed => included_htlcs += 1, OutboundHTLCState::RemoteRemoved {..} => included_htlcs += 1, @@ -4652,7 +4807,7 @@ impl ChannelContext where SP::Target: SignerProvider { } // We only include outbound HTLCs if it will not be included in their next commitment_signed, // i.e. if they've responded to us with an RAA after announcement. - match htlc.state { + match htlc.state() { OutboundHTLCState::Committed => included_htlcs += 1, OutboundHTLCState::RemoteRemoved {..} => included_htlcs += 1, OutboundHTLCState::LocalAnnounced { .. } => included_htlcs += 1, @@ -5377,7 +5532,7 @@ impl FundedChannel where debug_assert_eq!(htlc.payment_hash, PaymentHash(Sha256::hash(&payment_preimage_arg.0[..]).to_byte_array())); log_debug!(logger, "Claiming inbound HTLC id {} with payment hash {} with preimage {}", htlc.htlc_id, htlc.payment_hash, payment_preimage_arg); - match htlc.state { + match htlc.state() { InboundHTLCState::Committed => {}, InboundHTLCState::LocalRemoved(ref reason) => { if let &InboundHTLCRemovalReason::Fulfill(_) = reason { @@ -5452,13 +5607,16 @@ impl FundedChannel where { let htlc = &mut self.context.pending_inbound_htlcs[pending_idx]; - if let InboundHTLCState::Committed = htlc.state { + if let InboundHTLCState::Committed = htlc.state() { } else { debug_assert!(false, "Have an inbound HTLC we tried to claim before it was fully committed to"); return UpdateFulfillFetch::NewClaim { monitor_update, htlc_value_msat, msg: None }; } log_trace!(logger, "Upgrading HTLC {} to LocalRemoved with a Fulfill in channel {}!", &htlc.payment_hash, &self.context.channel_id); - htlc.state = InboundHTLCState::LocalRemoved(InboundHTLCRemovalReason::Fulfill(payment_preimage_arg.clone())); + htlc.set_state( + InboundHTLCState::LocalRemoved(InboundHTLCRemovalReason::Fulfill(payment_preimage_arg.clone())), + logger, + ); } UpdateFulfillFetch::NewClaim { @@ -5549,7 +5707,7 @@ impl FundedChannel where let mut pending_idx = core::usize::MAX; for (idx, htlc) in self.context.pending_inbound_htlcs.iter().enumerate() { if htlc.htlc_id == htlc_id_arg { - match htlc.state { + match htlc.state() { InboundHTLCState::Committed => {}, InboundHTLCState::LocalRemoved(_) => { return Err(ChannelError::Ignore(format!("HTLC {} was already resolved", htlc.htlc_id))); @@ -5599,7 +5757,7 @@ impl FundedChannel where E::Message::name(), &self.context.channel_id()); { let htlc = &mut self.context.pending_inbound_htlcs[pending_idx]; - htlc.state = err_contents.clone().to_inbound_htlc_state(); + htlc.set_state(err_contents.clone().to_inbound_htlc_state(), logger); } Ok(Some(err_contents.to_message(htlc_id_arg, self.context.channel_id()))) @@ -5708,9 +5866,9 @@ impl FundedChannel where Ok(self.get_announcement_sigs(node_signer, chain_hash, user_config, best_block.height, logger)) } - pub fn update_add_htlc( - &mut self, msg: &msgs::UpdateAddHTLC, fee_estimator: &LowerBoundedFeeEstimator, - ) -> Result<(), ChannelError> where F::Target: FeeEstimator { + pub fn update_add_htlc( + &mut self, msg: &msgs::UpdateAddHTLC, fee_estimator: &LowerBoundedFeeEstimator, logger: &L, + ) -> Result<(), ChannelError> where F::Target: FeeEstimator, L::Target: Logger { if self.context.channel_state.is_remote_stfu_sent() || self.context.channel_state.is_quiescent() { return Err(ChannelError::WarnAndDisconnect("Got add HTLC message while quiescent".to_owned())); } @@ -5757,9 +5915,9 @@ impl FundedChannel where // transaction). let mut removed_outbound_total_msat = 0; for ref htlc in self.context.pending_outbound_htlcs.iter() { - if let OutboundHTLCState::AwaitingRemoteRevokeToRemove(OutboundHTLCOutcome::Success(_)) = htlc.state { + if let OutboundHTLCState::AwaitingRemoteRevokeToRemove(OutboundHTLCOutcome::Success(_)) = htlc.state() { removed_outbound_total_msat += htlc.amount_msat; - } else if let OutboundHTLCState::AwaitingRemovedRemoteRevoke(OutboundHTLCOutcome::Success(_)) = htlc.state { + } else if let OutboundHTLCState::AwaitingRemovedRemoteRevoke(OutboundHTLCOutcome::Success(_)) = htlc.state() { removed_outbound_total_msat += htlc.amount_msat; } } @@ -5814,21 +5972,28 @@ impl FundedChannel where // Now update local state: self.context.next_counterparty_htlc_id += 1; - self.context.pending_inbound_htlcs.push(InboundHTLCOutput { - htlc_id: msg.htlc_id, - amount_msat: msg.amount_msat, - payment_hash: msg.payment_hash, - cltv_expiry: msg.cltv_expiry, - state: InboundHTLCState::RemoteAnnounced(InboundHTLCResolution::Pending { - update_add_htlc: msg.clone(), - }), - }); + self.context.pending_inbound_htlcs.push(InboundHTLCOutput::new( + self.context.channel_id(), + InboundHTLCOutputParams { + htlc_id: msg.htlc_id, + amount_msat: msg.amount_msat, + cltv_expiry: msg.cltv_expiry, + payment_hash: msg.payment_hash, + state: InboundHTLCState::RemoteAnnounced(InboundHTLCResolution::Pending { + update_add_htlc: msg.clone(), + }), + }, + logger, + )); + Ok(()) } /// Marks an outbound HTLC which we have received update_fail/fulfill/malformed #[inline] - fn mark_outbound_htlc_removed(&mut self, htlc_id: u64, outcome: OutboundHTLCOutcome) -> Result<&OutboundHTLCOutput, ChannelError> { + fn mark_outbound_htlc_removed( + &mut self, htlc_id: u64, outcome: OutboundHTLCOutcome, logger: &L + ) -> Result<&OutboundHTLCOutput, ChannelError> where L::Target: Logger { for htlc in self.context.pending_outbound_htlcs.iter_mut() { if htlc.htlc_id == htlc_id { if let OutboundHTLCOutcome::Success(ref payment_preimage) = outcome { @@ -5837,11 +6002,11 @@ impl FundedChannel where return Err(ChannelError::close(format!("Remote tried to fulfill HTLC ({}) with an incorrect preimage", htlc_id))); } } - match htlc.state { + match htlc.state() { OutboundHTLCState::LocalAnnounced(_) => return Err(ChannelError::close(format!("Remote tried to fulfill/fail HTLC ({}) before it had been committed", htlc_id))), OutboundHTLCState::Committed => { - htlc.state = OutboundHTLCState::RemoteRemoved(outcome); + htlc.set_state(OutboundHTLCState::RemoteRemoved(outcome), logger); }, OutboundHTLCState::AwaitingRemoteRevokeToRemove(_) | OutboundHTLCState::AwaitingRemovedRemoteRevoke(_) | OutboundHTLCState::RemoteRemoved(_) => return Err(ChannelError::close(format!("Remote tried to fulfill/fail HTLC ({}) that they'd already fulfilled/failed", htlc_id))), @@ -5852,7 +6017,9 @@ impl FundedChannel where Err(ChannelError::close("Remote tried to fulfill/fail an HTLC we couldn't find".to_owned())) } - pub fn update_fulfill_htlc(&mut self, msg: &msgs::UpdateFulfillHTLC) -> Result<(HTLCSource, u64, Option), ChannelError> { + pub fn update_fulfill_htlc( + &mut self, msg: &msgs::UpdateFulfillHTLC, logger: &L + ) -> Result<(HTLCSource, u64, Option), ChannelError> where L::Target: Logger { if self.context.channel_state.is_remote_stfu_sent() || self.context.channel_state.is_quiescent() { return Err(ChannelError::WarnAndDisconnect("Got fulfill HTLC message while quiescent".to_owned())); } @@ -5863,10 +6030,12 @@ impl FundedChannel where return Err(ChannelError::close("Peer sent update_fulfill_htlc when we needed a channel_reestablish".to_owned())); } - self.mark_outbound_htlc_removed(msg.htlc_id, OutboundHTLCOutcome::Success(msg.payment_preimage)).map(|htlc| (htlc.source.clone(), htlc.amount_msat, htlc.skimmed_fee_msat)) + self.mark_outbound_htlc_removed(msg.htlc_id, OutboundHTLCOutcome::Success(msg.payment_preimage), logger).map(|htlc| (htlc.source.clone(), htlc.amount_msat, htlc.skimmed_fee_msat)) } - pub fn update_fail_htlc(&mut self, msg: &msgs::UpdateFailHTLC, fail_reason: HTLCFailReason) -> Result<(), ChannelError> { + pub fn update_fail_htlc( + &mut self, msg: &msgs::UpdateFailHTLC, fail_reason: HTLCFailReason, logger: &L + ) -> Result<(), ChannelError> where L::Target: Logger { if self.context.channel_state.is_remote_stfu_sent() || self.context.channel_state.is_quiescent() { return Err(ChannelError::WarnAndDisconnect("Got fail HTLC message while quiescent".to_owned())); } @@ -5877,11 +6046,13 @@ impl FundedChannel where return Err(ChannelError::close("Peer sent update_fail_htlc when we needed a channel_reestablish".to_owned())); } - self.mark_outbound_htlc_removed(msg.htlc_id, OutboundHTLCOutcome::Failure(fail_reason))?; + self.mark_outbound_htlc_removed(msg.htlc_id, OutboundHTLCOutcome::Failure(fail_reason), logger)?; Ok(()) } - pub fn update_fail_malformed_htlc(&mut self, msg: &msgs::UpdateFailMalformedHTLC, fail_reason: HTLCFailReason) -> Result<(), ChannelError> { + pub fn update_fail_malformed_htlc( + &mut self, msg: &msgs::UpdateFailMalformedHTLC, fail_reason: HTLCFailReason, logger: &L + ) -> Result<(), ChannelError> where L::Target: Logger { if self.context.channel_state.is_remote_stfu_sent() || self.context.channel_state.is_quiescent() { return Err(ChannelError::WarnAndDisconnect("Got fail malformed HTLC message while quiescent".to_owned())); } @@ -5892,7 +6063,7 @@ impl FundedChannel where return Err(ChannelError::close("Peer sent update_fail_malformed_htlc when we needed a channel_reestablish".to_owned())); } - self.mark_outbound_htlc_removed(msg.htlc_id, OutboundHTLCOutcome::Failure(fail_reason))?; + self.mark_outbound_htlc_removed(msg.htlc_id, OutboundHTLCOutcome::Failure(fail_reason), logger)?; Ok(()) } @@ -5937,9 +6108,9 @@ impl FundedChannel where Ok(channel_monitor) } - pub fn commitment_signed(&mut self, msg: &msgs::CommitmentSigned, logger: &L) -> Result, ChannelError> - where L::Target: Logger - { + pub fn commitment_signed( + &mut self, msg: &msgs::CommitmentSigned, logger: &L + ) -> Result, ChannelError> where L::Target: Logger { self.commitment_signed_check_state()?; let updates = self @@ -5954,9 +6125,9 @@ impl FundedChannel where self.commitment_signed_update_monitor(updates, logger) } - pub fn commitment_signed_batch(&mut self, batch: &BTreeMap, logger: &L) -> Result, ChannelError> - where L::Target: Logger - { + pub fn commitment_signed_batch( + &mut self, batch: &BTreeMap, logger: &L + ) -> Result, ChannelError> where L::Target: Logger { self.commitment_signed_check_state()?; // Any commitment_signed not associated with a FundingScope is ignored below if a @@ -5999,9 +6170,9 @@ impl FundedChannel where Ok(()) } - fn commitment_signed_update_monitor(&mut self, mut updates: Vec, logger: &L) -> Result, ChannelError> - where L::Target: Logger - { + fn commitment_signed_update_monitor( + &mut self, mut updates: Vec, logger: &L + ) -> Result, ChannelError> where L::Target: Logger { if self.holder_commitment_point.advance(&self.context.holder_signer, &self.context.secp_ctx, logger).is_err() { // We only fail to advance our commitment point/number if we're currently // waiting for our signer to unblock and provide a commitment point. @@ -6026,21 +6197,26 @@ impl FundedChannel where } for htlc in self.context.pending_inbound_htlcs.iter_mut() { - if let &InboundHTLCState::RemoteAnnounced(ref htlc_resolution) = &htlc.state { + if let InboundHTLCState::RemoteAnnounced(ref htlc_resolution) = htlc.state() { log_trace!(logger, "Updating HTLC {} to AwaitingRemoteRevokeToAnnounce due to commitment_signed in channel {}.", &htlc.payment_hash, &self.context.channel_id); - htlc.state = InboundHTLCState::AwaitingRemoteRevokeToAnnounce(htlc_resolution.clone()); + htlc.set_state( + InboundHTLCState::AwaitingRemoteRevokeToAnnounce(htlc_resolution.clone()), + logger, + ); need_commitment = true; } } let mut claimed_htlcs = Vec::new(); for htlc in self.context.pending_outbound_htlcs.iter_mut() { - if let &mut OutboundHTLCState::RemoteRemoved(ref mut outcome) = &mut htlc.state { + if let OutboundHTLCState::RemoteRemoved(_) = htlc.state() { log_trace!(logger, "Updating HTLC {} to AwaitingRemoteRevokeToRemove due to commitment_signed in channel {}.", &htlc.payment_hash, &self.context.channel_id); // Swap against a dummy variant to avoid a potentially expensive clone of `OutboundHTLCOutcome::Failure(HTLCFailReason)` let mut reason = OutboundHTLCOutcome::Success(PaymentPreimage([0u8; 32])); - mem::swap(outcome, &mut reason); + if let &mut OutboundHTLCState::RemoteRemoved(ref mut outcome) = &mut htlc.state_wrapper.state { + mem::swap(outcome, &mut reason); + } if let OutboundHTLCOutcome::Success(preimage) = reason { // If a user (a) receives an HTLC claim using LDK 0.0.104 or before, then (b) // upgrades to LDK 0.0.114 or later before the HTLC is fully resolved, we could @@ -6050,7 +6226,7 @@ impl FundedChannel where // claim anyway. claimed_htlcs.push((SentHTLCId::from_source(&htlc.source), preimage)); } - htlc.state = OutboundHTLCState::AwaitingRemoteRevokeToRemove(reason); + htlc.set_state(OutboundHTLCState::AwaitingRemoteRevokeToRemove(reason), logger); need_commitment = true; } } @@ -6161,14 +6337,14 @@ impl FundedChannel where // the limit. In case it's less rare than I anticipate, we may want to revisit // handling this case better and maybe fulfilling some of the HTLCs while attempting // to rebalance channels. - let fail_htlc_res = match &htlc_update { - &HTLCUpdateAwaitingACK::AddHTLC { + let fail_htlc_res = match htlc_update { + HTLCUpdateAwaitingACK::AddHTLC { amount_msat, cltv_expiry, ref payment_hash, ref source, ref onion_routing_packet, - skimmed_fee_msat, blinding_point, .. + skimmed_fee_msat, blinding_point, forward_span, .. } => { match self.send_htlc( amount_msat, *payment_hash, cltv_expiry, source.clone(), onion_routing_packet.clone(), - false, skimmed_fee_msat, blinding_point, fee_estimator, logger + false, skimmed_fee_msat, blinding_point, fee_estimator, forward_span, logger ) { Ok(update_add_msg_opt) => { // `send_htlc` only returns `Ok(None)`, when an update goes into @@ -6192,7 +6368,7 @@ impl FundedChannel where } None }, - &HTLCUpdateAwaitingACK::ClaimHTLC { ref payment_preimage, htlc_id, .. } => { + HTLCUpdateAwaitingACK::ClaimHTLC { ref payment_preimage, htlc_id, .. } => { // If an HTLC claim was previously added to the holding cell (via // `get_update_fulfill_htlc`, then generating the claim message itself must // not fail - any in between attempts to claim the HTLC will have resulted @@ -6211,11 +6387,11 @@ impl FundedChannel where monitor_update.updates.append(&mut additional_monitor_update.updates); None }, - &HTLCUpdateAwaitingACK::FailHTLC { htlc_id, ref err_packet } => { + HTLCUpdateAwaitingACK::FailHTLC { htlc_id, ref err_packet } => { Some(self.fail_htlc(htlc_id, err_packet.clone(), false, logger) .map(|fail_msg_opt| fail_msg_opt.map(|_| ()))) }, - &HTLCUpdateAwaitingACK::FailMalformedHTLC { htlc_id, failure_code, sha256_of_onion } => { + HTLCUpdateAwaitingACK::FailMalformedHTLC { htlc_id, failure_code, sha256_of_onion } => { Some(self.fail_htlc(htlc_id, (sha256_of_onion, failure_code), false, logger) .map(|fail_msg_opt| fail_msg_opt.map(|_| ()))) } @@ -6352,7 +6528,7 @@ impl FundedChannel where log_trace!(logger, "Updating HTLCs on receipt of RAA in channel {}...", &self.context.channel_id()); let mut to_forward_infos = Vec::new(); - let mut pending_update_adds = Vec::new(); + let mut pending_update_adds = Vec::<(msgs::UpdateAddHTLC, BoxedSpan)>::new(); let mut revoked_htlcs = Vec::new(); let mut finalized_claimed_htlcs = Vec::new(); let mut update_fail_htlcs = Vec::new(); @@ -6368,7 +6544,7 @@ impl FundedChannel where // We really shouldnt have two passes here, but retain gives a non-mutable ref (Rust bug) pending_inbound_htlcs.retain(|htlc| { - if let &InboundHTLCState::LocalRemoved(ref reason) = &htlc.state { + if let &InboundHTLCState::LocalRemoved(ref reason) = htlc.state() { log_trace!(logger, " ...removing inbound LocalRemoved {}", &htlc.payment_hash); if let &InboundHTLCRemovalReason::Fulfill(_) = reason { value_to_self_msat_diff += htlc.amount_msat as i64; @@ -6379,7 +6555,7 @@ impl FundedChannel where }); let now = duration_since_epoch(); pending_outbound_htlcs.retain(|htlc| { - if let &OutboundHTLCState::AwaitingRemovedRemoteRevoke(ref outcome) = &htlc.state { + if let &OutboundHTLCState::AwaitingRemovedRemoteRevoke(ref outcome) = htlc.state() { log_trace!(logger, " ...removing outbound AwaitingRemovedRemoteRevoke {}", &htlc.payment_hash); if let OutboundHTLCOutcome::Failure(mut reason) = outcome.clone() { // We really want take() here, but, again, non-mut ref :( if let (Some(timestamp), Some(now)) = (htlc.send_timestamp, now) { @@ -6397,18 +6573,18 @@ impl FundedChannel where } else { true } }); for htlc in pending_inbound_htlcs.iter_mut() { - let swap = if let &InboundHTLCState::AwaitingRemoteRevokeToAnnounce(_) = &htlc.state { + let swap = if let &InboundHTLCState::AwaitingRemoteRevokeToAnnounce(_) = htlc.state() { true - } else if let &InboundHTLCState::AwaitingAnnouncedRemoteRevoke(_) = &htlc.state { + } else if let &InboundHTLCState::AwaitingAnnouncedRemoteRevoke(_) = htlc.state() { true } else { false }; if swap { let mut state = InboundHTLCState::Committed; - mem::swap(&mut state, &mut htlc.state); + mem::swap(&mut state, &mut htlc.state_wrapper.state); if let InboundHTLCState::AwaitingRemoteRevokeToAnnounce(resolution) = state { log_trace!(logger, " ...promoting inbound AwaitingRemoteRevokeToAnnounce {} to AwaitingAnnouncedRemoteRevoke", &htlc.payment_hash); - htlc.state = InboundHTLCState::AwaitingAnnouncedRemoteRevoke(resolution); + htlc.set_state(InboundHTLCState::AwaitingAnnouncedRemoteRevoke(resolution), logger); require_commitment = true; } else if let InboundHTLCState::AwaitingAnnouncedRemoteRevoke(resolution) = state { match resolution { @@ -6419,11 +6595,17 @@ impl FundedChannel where require_commitment = true; match fail_msg { HTLCFailureMsg::Relay(msg) => { - htlc.state = InboundHTLCState::LocalRemoved(InboundHTLCRemovalReason::FailRelay(msg.clone().into())); + htlc.set_state( + InboundHTLCState::LocalRemoved(InboundHTLCRemovalReason::FailRelay(msg.clone().into())), + logger, + ); update_fail_htlcs.push(msg) }, HTLCFailureMsg::Malformed(msg) => { - htlc.state = InboundHTLCState::LocalRemoved(InboundHTLCRemovalReason::FailMalformed((msg.sha256_of_onion, msg.failure_code))); + htlc.set_state( + InboundHTLCState::LocalRemoved(InboundHTLCRemovalReason::FailMalformed((msg.sha256_of_onion, msg.failure_code))), + logger, + ); update_fail_malformed_htlcs.push(msg) }, } @@ -6431,30 +6613,33 @@ impl FundedChannel where PendingHTLCStatus::Forward(forward_info) => { log_trace!(logger, " ...promoting inbound AwaitingAnnouncedRemoteRevoke {} to Committed, attempting to forward", &htlc.payment_hash); to_forward_infos.push((forward_info, htlc.htlc_id)); - htlc.state = InboundHTLCState::Committed; + htlc.set_state(InboundHTLCState::Committed, logger); } } InboundHTLCResolution::Pending { update_add_htlc } => { log_trace!(logger, " ...promoting inbound AwaitingAnnouncedRemoteRevoke {} to Committed", &htlc.payment_hash); - pending_update_adds.push(update_add_htlc); - htlc.state = InboundHTLCState::Committed; + htlc.set_state(InboundHTLCState::Committed, logger); + let forward_span = logger.start(Span::Forward, htlc.span.as_user_span_ref::()); + pending_update_adds.push((update_add_htlc, BoxedSpan::new(forward_span))); } } } } } for htlc in pending_outbound_htlcs.iter_mut() { - if let OutboundHTLCState::LocalAnnounced(_) = htlc.state { + if let OutboundHTLCState::LocalAnnounced(_) = htlc.state() { log_trace!(logger, " ...promoting outbound LocalAnnounced {} to Committed", &htlc.payment_hash); - htlc.state = OutboundHTLCState::Committed; + htlc.set_state(OutboundHTLCState::Committed, logger); *expecting_peer_commitment_signed = true; } - if let &mut OutboundHTLCState::AwaitingRemoteRevokeToRemove(ref mut outcome) = &mut htlc.state { + if let OutboundHTLCState::AwaitingRemoteRevokeToRemove(_) = htlc.state() { log_trace!(logger, " ...promoting outbound AwaitingRemoteRevokeToRemove {} to AwaitingRemovedRemoteRevoke", &htlc.payment_hash); // Swap against a dummy variant to avoid a potentially expensive clone of `OutboundHTLCOutcome::Failure(HTLCFailReason)` let mut reason = OutboundHTLCOutcome::Success(PaymentPreimage([0u8; 32])); - mem::swap(outcome, &mut reason); - htlc.state = OutboundHTLCState::AwaitingRemovedRemoteRevoke(reason); + if let &mut OutboundHTLCState::AwaitingRemoteRevokeToRemove(ref mut outcome) = &mut htlc.state_wrapper.state { + mem::swap(outcome, &mut reason); + } + htlc.set_state(OutboundHTLCState::AwaitingRemovedRemoteRevoke(reason), logger); require_commitment = true; } } @@ -6737,7 +6922,7 @@ impl FundedChannel where let mut inbound_drop_count = 0; self.context.pending_inbound_htlcs.retain(|htlc| { - match htlc.state { + match htlc.state() { InboundHTLCState::RemoteAnnounced(_) => { // They sent us an update_add_htlc but we never got the commitment_signed. // We'll tell them what commitment_signed we're expecting next and they'll drop @@ -6771,11 +6956,11 @@ impl FundedChannel where } for htlc in self.context.pending_outbound_htlcs.iter_mut() { - if let OutboundHTLCState::RemoteRemoved(_) = htlc.state { + if let OutboundHTLCState::RemoteRemoved(_) = htlc.state() { // They sent us an update to remove this but haven't yet sent the corresponding // commitment_signed, we need to move it back to Committed and they can re-send // the update upon reconnection. - htlc.state = OutboundHTLCState::Committed; + htlc.set_state(OutboundHTLCState::Committed, logger); } } @@ -7110,7 +7295,7 @@ impl FundedChannel where let mut update_fail_malformed_htlcs = Vec::new(); for htlc in self.context.pending_outbound_htlcs.iter() { - if let &OutboundHTLCState::LocalAnnounced(ref onion_packet) = &htlc.state { + if let &OutboundHTLCState::LocalAnnounced(ref onion_packet) = htlc.state() { update_add_htlcs.push(msgs::UpdateAddHTLC { channel_id: self.context.channel_id(), htlc_id: htlc.htlc_id, @@ -7125,7 +7310,7 @@ impl FundedChannel where } for htlc in self.context.pending_inbound_htlcs.iter() { - if let &InboundHTLCState::LocalRemoved(ref reason) = &htlc.state { + if let &InboundHTLCState::LocalRemoved(ref reason) = htlc.state() { match reason { &InboundHTLCRemovalReason::FailRelay(ref err_packet) => { update_fail_htlcs.push(msgs::UpdateFailHTLC { @@ -7553,7 +7738,7 @@ impl FundedChannel where return Err(ChannelError::close("Peer sent shutdown pre-funding generation".to_owned())); } for htlc in self.context.pending_inbound_htlcs.iter() { - if let InboundHTLCState::RemoteAnnounced(_) = htlc.state { + if let InboundHTLCState::RemoteAnnounced(_) = htlc.state() { return Err(ChannelError::close("Got shutdown with remote pending HTLCs".to_owned())); } } @@ -7955,9 +8140,9 @@ impl FundedChannel where let mut removed_outbound_total_msat = 0; for ref htlc in self.context.pending_outbound_htlcs.iter() { - if let OutboundHTLCState::AwaitingRemoteRevokeToRemove(OutboundHTLCOutcome::Success(_)) = htlc.state { + if let OutboundHTLCState::AwaitingRemoteRevokeToRemove(OutboundHTLCOutcome::Success(_)) = htlc.state() { removed_outbound_total_msat += htlc.amount_msat; - } else if let OutboundHTLCState::AwaitingRemovedRemoteRevoke(OutboundHTLCOutcome::Success(_)) = htlc.state { + } else if let OutboundHTLCState::AwaitingRemovedRemoteRevoke(OutboundHTLCOutcome::Success(_)) = htlc.state() { removed_outbound_total_msat += htlc.amount_msat; } } @@ -8851,13 +9036,14 @@ impl FundedChannel where pub fn queue_add_htlc( &mut self, amount_msat: u64, payment_hash: PaymentHash, cltv_expiry: u32, source: HTLCSource, onion_routing_packet: msgs::OnionPacket, skimmed_fee_msat: Option, - blinding_point: Option, fee_estimator: &LowerBoundedFeeEstimator, logger: &L + blinding_point: Option, fee_estimator: &LowerBoundedFeeEstimator, + forward_span: Option, logger: &L ) -> Result<(), (LocalHTLCFailureReason, String)> where F::Target: FeeEstimator, L::Target: Logger { self .send_htlc(amount_msat, payment_hash, cltv_expiry, source, onion_routing_packet, true, - skimmed_fee_msat, blinding_point, fee_estimator, logger) + skimmed_fee_msat, blinding_point, fee_estimator, forward_span, logger) .map(|msg_opt| assert!(msg_opt.is_none(), "We forced holding cell?")) .map_err(|err| { debug_assert!(err.0.is_temporary(), "Queuing HTLC should return temporary error"); @@ -8885,7 +9071,8 @@ impl FundedChannel where &mut self, amount_msat: u64, payment_hash: PaymentHash, cltv_expiry: u32, source: HTLCSource, onion_routing_packet: msgs::OnionPacket, mut force_holding_cell: bool, skimmed_fee_msat: Option, blinding_point: Option, - fee_estimator: &LowerBoundedFeeEstimator, logger: &L + fee_estimator: &LowerBoundedFeeEstimator, + forward_span: Option, logger: &L ) -> Result, (LocalHTLCFailureReason, String)> where F::Target: FeeEstimator, L::Target: Logger { @@ -8949,6 +9136,7 @@ impl FundedChannel where onion_routing_packet, skimmed_fee_msat, blinding_point, + forward_span, }); return Ok(None); } @@ -8960,17 +9148,22 @@ impl FundedChannel where // that are simple to implement, and we do it on the outgoing side because then the failure message that encodes // the hold time still needs to be built in channel manager. let send_timestamp = duration_since_epoch(); - self.context.pending_outbound_htlcs.push(OutboundHTLCOutput { - htlc_id: self.context.next_holder_htlc_id, - amount_msat, - payment_hash: payment_hash.clone(), - cltv_expiry, - state: OutboundHTLCState::LocalAnnounced(Box::new(onion_routing_packet.clone())), - source, - blinding_point, - skimmed_fee_msat, - send_timestamp, - }); + self.context.pending_outbound_htlcs.push(OutboundHTLCOutput::new( + self.context.channel_id(), + OutboundHTLCOutputParams { + htlc_id: self.context.next_holder_htlc_id, + amount_msat, + cltv_expiry, + payment_hash: payment_hash.clone(), + state: OutboundHTLCState::LocalAnnounced(Box::new(onion_routing_packet.clone())), + source, + blinding_point, + skimmed_fee_msat, + send_timestamp, + }, + forward_span, + logger, + )); let res = msgs::UpdateAddHTLC { channel_id: self.context.channel_id, @@ -9013,21 +9206,23 @@ impl FundedChannel where // fail to generate this, we still are at least at a position where upgrading their status // is acceptable. for htlc in self.context.pending_inbound_htlcs.iter_mut() { - let new_state = if let &InboundHTLCState::AwaitingRemoteRevokeToAnnounce(ref forward_info) = &htlc.state { + let new_state = if let InboundHTLCState::AwaitingRemoteRevokeToAnnounce(ref forward_info) = htlc.state() { Some(InboundHTLCState::AwaitingAnnouncedRemoteRevoke(forward_info.clone())) } else { None }; if let Some(state) = new_state { log_trace!(logger, " ...promoting inbound AwaitingRemoteRevokeToAnnounce {} to AwaitingAnnouncedRemoteRevoke", &htlc.payment_hash); - htlc.state = state; + htlc.set_state(state, logger); } } for htlc in self.context.pending_outbound_htlcs.iter_mut() { - if let &mut OutboundHTLCState::AwaitingRemoteRevokeToRemove(ref mut outcome) = &mut htlc.state { + if let OutboundHTLCState::AwaitingRemoteRevokeToRemove(_) = htlc.state() { log_trace!(logger, " ...promoting outbound AwaitingRemoteRevokeToRemove {} to AwaitingRemovedRemoteRevoke", &htlc.payment_hash); // Swap against a dummy variant to avoid a potentially expensive clone of `OutboundHTLCOutcome::Failure(HTLCFailReason)` let mut reason = OutboundHTLCOutcome::Success(PaymentPreimage([0u8; 32])); - mem::swap(outcome, &mut reason); - htlc.state = OutboundHTLCState::AwaitingRemovedRemoteRevoke(reason); + if let &mut OutboundHTLCState::AwaitingRemoteRevokeToRemove(ref mut outcome) = &mut htlc.state_wrapper.state { + mem::swap(outcome, &mut reason); + } + htlc.set_state(OutboundHTLCState::AwaitingRemovedRemoteRevoke(reason), logger); } } if let Some((feerate, update_state)) = self.context.pending_update_fee { @@ -9208,12 +9403,12 @@ impl FundedChannel where pub fn send_htlc_and_commit( &mut self, amount_msat: u64, payment_hash: PaymentHash, cltv_expiry: u32, source: HTLCSource, onion_routing_packet: msgs::OnionPacket, skimmed_fee_msat: Option, - fee_estimator: &LowerBoundedFeeEstimator, logger: &L + fee_estimator: &LowerBoundedFeeEstimator, forward_span: Option, logger: &L ) -> Result, ChannelError> where F::Target: FeeEstimator, L::Target: Logger { let send_res = self.send_htlc(amount_msat, payment_hash, cltv_expiry, source, - onion_routing_packet, false, skimmed_fee_msat, None, fee_estimator, logger); + onion_routing_packet, false, skimmed_fee_msat, None, fee_estimator, forward_span, logger); // All [`LocalHTLCFailureReason`] errors are temporary, so they are [`ChannelError::Ignore`]. match send_res.map_err(|(_, msg)| ChannelError::Ignore(msg))? { Some(_) => { @@ -9254,7 +9449,7 @@ impl FundedChannel where return Err(APIError::APIMisuseError { err: "Cannot begin shutdown while quiescent".to_owned() }); } for htlc in self.context.pending_outbound_htlcs.iter() { - if let OutboundHTLCState::LocalAnnounced(_) = htlc.state { + if let OutboundHTLCState::LocalAnnounced(_) = htlc.state() { return Err(APIError::APIMisuseError{err: "Cannot begin shutdown with pending HTLCs. Process pending events first".to_owned()}); } } @@ -10607,21 +10802,21 @@ impl Writeable for FundedChannel where SP::Target: SignerProvider let mut dropped_inbound_htlcs = 0; for htlc in self.context.pending_inbound_htlcs.iter() { - if let InboundHTLCState::RemoteAnnounced(_) = htlc.state { + if let InboundHTLCState::RemoteAnnounced(_) = htlc.state() { dropped_inbound_htlcs += 1; } } let mut removed_htlc_failure_attribution_data: Vec<&Option> = Vec::new(); (self.context.pending_inbound_htlcs.len() as u64 - dropped_inbound_htlcs).write(writer)?; for htlc in self.context.pending_inbound_htlcs.iter() { - if let &InboundHTLCState::RemoteAnnounced(_) = &htlc.state { + if let &InboundHTLCState::RemoteAnnounced(_) = htlc.state() { continue; // Drop } htlc.htlc_id.write(writer)?; htlc.amount_msat.write(writer)?; htlc.cltv_expiry.write(writer)?; htlc.payment_hash.write(writer)?; - match &htlc.state { + match htlc.state() { &InboundHTLCState::RemoteAnnounced(_) => unreachable!(), &InboundHTLCState::AwaitingRemoteRevokeToAnnounce(ref htlc_resolution) => { 1u8.write(writer)?; @@ -10668,7 +10863,7 @@ impl Writeable for FundedChannel where SP::Target: SignerProvider htlc.cltv_expiry.write(writer)?; htlc.payment_hash.write(writer)?; htlc.source.write(writer)?; - match &htlc.state { + match htlc.state() { &OutboundHTLCState::LocalAnnounced(ref onion_packet) => { 0u8.write(writer)?; onion_packet.write(writer)?; @@ -10713,7 +10908,7 @@ impl Writeable for FundedChannel where SP::Target: SignerProvider match update { &HTLCUpdateAwaitingACK::AddHTLC { ref amount_msat, ref cltv_expiry, ref payment_hash, ref source, ref onion_routing_packet, - blinding_point, skimmed_fee_msat, + blinding_point, skimmed_fee_msat, forward_span: _ } => { 0u8.write(writer)?; amount_msat.write(writer)?; @@ -10875,7 +11070,7 @@ impl Writeable for FundedChannel where SP::Target: SignerProvider let mut monitor_pending_update_adds = None; if !self.context.monitor_pending_update_adds.is_empty() { - monitor_pending_update_adds = Some(&self.context.monitor_pending_update_adds); + monitor_pending_update_adds = Some(self.context.monitor_pending_update_adds.iter().map(|a| &a.0).collect::>()); } let is_manual_broadcast = Some(self.context.is_manual_broadcast); @@ -10933,13 +11128,14 @@ impl Writeable for FundedChannel where SP::Target: SignerProvider } } -impl<'a, 'b, 'c, ES: Deref, SP: Deref> ReadableArgs<(&'a ES, &'b SP, &'c ChannelTypeFeatures)> for FundedChannel +impl<'a, 'b, 'c, 'd, ES: Deref, SP: Deref, L: Deref> ReadableArgs<(&'a ES, &'b SP, &'c ChannelTypeFeatures, &'d L)> for FundedChannel where ES::Target: EntropySource, - SP::Target: SignerProvider + SP::Target: SignerProvider, + L::Target: Logger, { - fn read(reader: &mut R, args: (&'a ES, &'b SP, &'c ChannelTypeFeatures)) -> Result { - let (entropy_source, signer_provider, our_supported_features) = args; + fn read(reader: &mut R, args: (&'a ES, &'b SP, &'c ChannelTypeFeatures, &'d L)) -> Result { + let (entropy_source, signer_provider, our_supported_features, logger) = args; let ver = read_ver_prefix!(reader, SERIALIZATION_VERSION); if ver <= 2 { return Err(DecodeError::UnknownVersion); @@ -10977,91 +11173,111 @@ impl<'a, 'b, 'c, ES: Deref, SP: Deref> ReadableArgs<(&'a ES, &'b SP, &'c Channel let mut pending_inbound_htlcs = Vec::with_capacity(cmp::min(pending_inbound_htlc_count as usize, DEFAULT_MAX_HTLCS as usize)); for _ in 0..pending_inbound_htlc_count { - pending_inbound_htlcs.push(InboundHTLCOutput { - htlc_id: Readable::read(reader)?, - amount_msat: Readable::read(reader)?, - cltv_expiry: Readable::read(reader)?, - payment_hash: Readable::read(reader)?, - state: match ::read(reader)? { - 1 => { - let resolution = if ver <= 3 { - InboundHTLCResolution::Resolved { pending_htlc_status: Readable::read(reader)? } - } else { - Readable::read(reader)? - }; - InboundHTLCState::AwaitingRemoteRevokeToAnnounce(resolution) - }, - 2 => { - let resolution = if ver <= 3 { - InboundHTLCResolution::Resolved { pending_htlc_status: Readable::read(reader)? } - } else { - Readable::read(reader)? - }; - InboundHTLCState::AwaitingAnnouncedRemoteRevoke(resolution) - }, - 3 => InboundHTLCState::Committed, - 4 => { - let reason = match ::read(reader)? { - 0 => InboundHTLCRemovalReason::FailRelay(msgs::OnionErrorPacket { - data: Readable::read(reader)?, - attribution_data: None, - }), - 1 => InboundHTLCRemovalReason::FailMalformed(Readable::read(reader)?), - 2 => InboundHTLCRemovalReason::Fulfill(Readable::read(reader)?), - _ => return Err(DecodeError::InvalidValue), - }; - InboundHTLCState::LocalRemoved(reason) - }, - _ => return Err(DecodeError::InvalidValue), + let htlc_id = Readable::read(reader)?; + let amount_msat = Readable::read(reader)?; + let cltv_expiry = Readable::read(reader)?; + let payment_hash = Readable::read(reader)?; + let state = match ::read(reader)? { + 1 => { + let resolution = if ver <= 3 { + InboundHTLCResolution::Resolved { pending_htlc_status: Readable::read(reader)? } + } else { + Readable::read(reader)? + }; + InboundHTLCState::AwaitingRemoteRevokeToAnnounce(resolution) }, - }); + 2 => { + let resolution = if ver <= 3 { + InboundHTLCResolution::Resolved { pending_htlc_status: Readable::read(reader)? } + } else { + Readable::read(reader)? + }; + InboundHTLCState::AwaitingAnnouncedRemoteRevoke(resolution) + }, + 3 => InboundHTLCState::Committed, + 4 => { + let reason = match ::read(reader)? { + 0 => InboundHTLCRemovalReason::FailRelay(msgs::OnionErrorPacket { + data: Readable::read(reader)?, + attribution_data: None, + }), + 1 => InboundHTLCRemovalReason::FailMalformed(Readable::read(reader)?), + 2 => InboundHTLCRemovalReason::Fulfill(Readable::read(reader)?), + _ => return Err(DecodeError::InvalidValue), + }; + InboundHTLCState::LocalRemoved(reason) + }, + _ => return Err(DecodeError::InvalidValue), + }; + pending_inbound_htlcs.push(InboundHTLCOutput::new( + channel_id, + InboundHTLCOutputParams { + htlc_id, + amount_msat, + cltv_expiry, + payment_hash, + state, + }, + logger, + )); } let pending_outbound_htlc_count: u64 = Readable::read(reader)?; let mut pending_outbound_htlcs = Vec::with_capacity(cmp::min(pending_outbound_htlc_count as usize, DEFAULT_MAX_HTLCS as usize)); for _ in 0..pending_outbound_htlc_count { - pending_outbound_htlcs.push(OutboundHTLCOutput { - htlc_id: Readable::read(reader)?, - amount_msat: Readable::read(reader)?, - cltv_expiry: Readable::read(reader)?, - payment_hash: Readable::read(reader)?, - source: Readable::read(reader)?, - state: match ::read(reader)? { - 0 => OutboundHTLCState::LocalAnnounced(Box::new(Readable::read(reader)?)), - 1 => OutboundHTLCState::Committed, - 2 => { - let option: Option = Readable::read(reader)?; - let outcome = match option { - Some(r) => OutboundHTLCOutcome::Failure(r), - // Initialize this variant with a dummy preimage, the actual preimage will be filled in further down - None => OutboundHTLCOutcome::Success(PaymentPreimage([0u8; 32])), - }; - OutboundHTLCState::RemoteRemoved(outcome) - }, - 3 => { - let option: Option = Readable::read(reader)?; - let outcome = match option { - Some(r) => OutboundHTLCOutcome::Failure(r), - // Initialize this variant with a dummy preimage, the actual preimage will be filled in further down - None => OutboundHTLCOutcome::Success(PaymentPreimage([0u8; 32])), - }; - OutboundHTLCState::AwaitingRemoteRevokeToRemove(outcome) - }, - 4 => { - let option: Option = Readable::read(reader)?; - let outcome = match option { - Some(r) => OutboundHTLCOutcome::Failure(r), - // Initialize this variant with a dummy preimage, the actual preimage will be filled in further down - None => OutboundHTLCOutcome::Success(PaymentPreimage([0u8; 32])), - }; - OutboundHTLCState::AwaitingRemovedRemoteRevoke(outcome) - }, - _ => return Err(DecodeError::InvalidValue), + let htlc_id = Readable::read(reader)?; + let amount_msat = Readable::read(reader)?; + let cltv_expiry = Readable::read(reader)?; + let payment_hash = Readable::read(reader)?; + let source = Readable::read(reader)?; + let state = match ::read(reader)? { + 0 => OutboundHTLCState::LocalAnnounced(Box::new(Readable::read(reader)?)), + 1 => OutboundHTLCState::Committed, + 2 => { + let option: Option = Readable::read(reader)?; + let outcome = match option { + Some(r) => OutboundHTLCOutcome::Failure(r), + // Initialize this variant with a dummy preimage, the actual preimage will be filled in further down + None => OutboundHTLCOutcome::Success(PaymentPreimage([0u8; 32])), + }; + OutboundHTLCState::RemoteRemoved(outcome) }, - skimmed_fee_msat: None, - blinding_point: None, - send_timestamp: None, - }); + 3 => { + let option: Option = Readable::read(reader)?; + let outcome = match option { + Some(r) => OutboundHTLCOutcome::Failure(r), + // Initialize this variant with a dummy preimage, the actual preimage will be filled in further down + None => OutboundHTLCOutcome::Success(PaymentPreimage([0u8; 32])), + }; + OutboundHTLCState::AwaitingRemoteRevokeToRemove(outcome) + }, + 4 => { + let option: Option = Readable::read(reader)?; + let outcome = match option { + Some(r) => OutboundHTLCOutcome::Failure(r), + // Initialize this variant with a dummy preimage, the actual preimage will be filled in further down + None => OutboundHTLCOutcome::Success(PaymentPreimage([0u8; 32])), + }; + OutboundHTLCState::AwaitingRemovedRemoteRevoke(outcome) + }, + _ => return Err(DecodeError::InvalidValue), + }; + pending_outbound_htlcs.push(OutboundHTLCOutput::new( + channel_id, + OutboundHTLCOutputParams { + htlc_id, + amount_msat, + cltv_expiry, + payment_hash, + state, + source, + blinding_point: None, + skimmed_fee_msat: None, + send_timestamp: None, + }, + None, + logger, + )); } let holding_cell_htlc_update_count: u64 = Readable::read(reader)?; @@ -11076,6 +11292,7 @@ impl<'a, 'b, 'c, ES: Deref, SP: Deref> ReadableArgs<(&'a ES, &'b SP, &'c Channel onion_routing_packet: Readable::read(reader)?, skimmed_fee_msat: None, blinding_point: None, + forward_span: None, }, 1 => HTLCUpdateAwaitingACK::ClaimHTLC { payment_preimage: Readable::read(reader)?, @@ -11287,7 +11504,7 @@ impl<'a, 'b, 'c, ES: Deref, SP: Deref> ReadableArgs<(&'a ES, &'b SP, &'c Channel let mut iter = preimages.into_iter(); for htlc in pending_outbound_htlcs.iter_mut() { - match &mut htlc.state { + match &mut htlc.state_wrapper.state { OutboundHTLCState::AwaitingRemoteRevokeToRemove(OutboundHTLCOutcome::Success(ref mut preimage)) => { // This variant was initialized like this further above debug_assert_eq!(preimage, &PaymentPreimage([0u8; 32])); @@ -11369,7 +11586,7 @@ impl<'a, 'b, 'c, ES: Deref, SP: Deref> ReadableArgs<(&'a ES, &'b SP, &'c Channel if let Some(attribution_data_list) = removed_htlc_failure_attribution_data { let mut removed_htlc_relay_failures = pending_inbound_htlcs.iter_mut().filter_map(|status| - if let InboundHTLCState::LocalRemoved(InboundHTLCRemovalReason::FailRelay(ref mut packet)) = &mut status.state { + if let InboundHTLCState::LocalRemoved(InboundHTLCRemovalReason::FailRelay(ref mut packet)) = &mut status.state_wrapper.state { Some(&mut packet.attribution_data) } else { None @@ -11439,6 +11656,11 @@ impl<'a, 'b, 'c, ES: Deref, SP: Deref> ReadableArgs<(&'a ES, &'b SP, &'c Channel &channel_parameters.counterparty_parameters.as_ref() .expect("Persisted channel must have counterparty parameters").pubkeys.revocation_basepoint); + let monitor_pending_update_adds = monitor_pending_update_adds.unwrap_or_default().into_iter().map(|msg| { + let span = BoxedSpan::new(logger.start(Span::Forward, None)); + (msg, span) + }).collect::>(); + Ok(FundedChannel { funding: FundingScope { value_to_self_msat, @@ -11497,7 +11719,7 @@ impl<'a, 'b, 'c, ES: Deref, SP: Deref> ReadableArgs<(&'a ES, &'b SP, &'c Channel monitor_pending_forwards, monitor_pending_failures, monitor_pending_finalized_fulfills: monitor_pending_finalized_fulfills.unwrap(), - monitor_pending_update_adds: monitor_pending_update_adds.unwrap_or_default(), + monitor_pending_update_adds, monitor_pending_tx_signatures: None, signer_pending_revoke_and_ack: false, @@ -11618,7 +11840,7 @@ mod tests { use crate::ln::channel_keys::{RevocationKey, RevocationBasepoint}; use crate::ln::channelmanager::{self, HTLCSource, PaymentId}; use crate::ln::channel::InitFeatures; - use crate::ln::channel::{AwaitingChannelReadyFlags, ChannelState, FundedChannel, InboundHTLCOutput, OutboundV1Channel, InboundV1Channel, OutboundHTLCOutput, InboundHTLCState, OutboundHTLCState, HTLCCandidate, HTLCInitiator, HTLCUpdateAwaitingACK, commit_tx_fee_sat}; + use crate::ln::channel::{AwaitingChannelReadyFlags, ChannelState, FundedChannel, InboundHTLCOutput, InboundHTLCOutputParams, OutboundV1Channel, InboundV1Channel, OutboundHTLCOutput, OutboundHTLCOutputParams, InboundHTLCState, OutboundHTLCState, HTLCCandidate, HTLCInitiator, HTLCUpdateAwaitingACK, commit_tx_fee_sat}; use crate::ln::channel::{MAX_FUNDING_SATOSHIS_NO_WUMBO, TOTAL_BITCOIN_SUPPLY_SATOSHIS, MIN_THEIR_CHAN_RESERVE_SATOSHIS}; use crate::types::features::{ChannelFeatures, ChannelTypeFeatures, NodeFeatures}; use crate::ln::msgs; @@ -11813,31 +12035,40 @@ mod tests { // Put some inbound and outbound HTLCs in A's channel. let htlc_amount_msat = 11_092_000; // put an amount below A's effective dust limit but above B's. - node_a_chan.context.pending_inbound_htlcs.push(InboundHTLCOutput { - htlc_id: 0, - amount_msat: htlc_amount_msat, - payment_hash: PaymentHash(Sha256::hash(&[42; 32]).to_byte_array()), - cltv_expiry: 300000000, - state: InboundHTLCState::Committed, - }); + node_a_chan.context.pending_inbound_htlcs.push(InboundHTLCOutput::new( + node_a_chan.context.channel_id(), + InboundHTLCOutputParams { + htlc_id: 0, + amount_msat: htlc_amount_msat, + cltv_expiry: 300000000, + payment_hash: PaymentHash(Sha256::hash(&[42; 32]).to_byte_array()), + state: InboundHTLCState::Committed, + }, + &&logger, + )); - node_a_chan.context.pending_outbound_htlcs.push(OutboundHTLCOutput { - htlc_id: 1, - amount_msat: htlc_amount_msat, // put an amount below A's dust amount but above B's. - payment_hash: PaymentHash(Sha256::hash(&[43; 32]).to_byte_array()), - cltv_expiry: 200000000, - state: OutboundHTLCState::Committed, - source: HTLCSource::OutboundRoute { - path: Path { hops: Vec::new(), blinded_tail: None }, - session_priv: SecretKey::from_slice(&>::from_hex("0fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff").unwrap()[..]).unwrap(), - first_hop_htlc_msat: 548, - payment_id: PaymentId([42; 32]), - bolt12_invoice: None, + node_a_chan.context.pending_outbound_htlcs.push(OutboundHTLCOutput::new( + node_a_chan.context.channel_id(), + OutboundHTLCOutputParams { + htlc_id: 1, + amount_msat: htlc_amount_msat, // put an amount below A's dust amount but above B's. + payment_hash: PaymentHash(Sha256::hash(&[43; 32]).to_byte_array()), + cltv_expiry: 200000000, + state: OutboundHTLCState::Committed, + source: HTLCSource::OutboundRoute { + path: Path { hops: Vec::new(), blinded_tail: None }, + session_priv: SecretKey::from_slice(&>::from_hex("0fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff").unwrap()[..]).unwrap(), + first_hop_htlc_msat: 548, + payment_id: PaymentId([42; 32]), + bolt12_invoice: None, + }, + skimmed_fee_msat: None, + blinding_point: None, + send_timestamp: None, }, - skimmed_fee_msat: None, - blinding_point: None, - send_timestamp: None, - }); + None, + &&logger, + )); // Make sure when Node A calculates their local commitment transaction, none of the HTLCs pass // the dust limit check. @@ -12213,17 +12444,22 @@ mod tests { payment_id: PaymentId([42; 32]), bolt12_invoice: None, }; - let dummy_outbound_output = OutboundHTLCOutput { - htlc_id: 0, - amount_msat: 0, - payment_hash: PaymentHash([43; 32]), - cltv_expiry: 0, - state: OutboundHTLCState::Committed, - source: dummy_htlc_source.clone(), - skimmed_fee_msat: None, - blinding_point: None, - send_timestamp: None, - }; + let dummy_outbound_output = OutboundHTLCOutput::new( + chan.context.channel_id(), + OutboundHTLCOutputParams { + htlc_id: 0, + amount_msat: 0, + payment_hash: PaymentHash([43; 32]), + cltv_expiry: 0, + state: OutboundHTLCState::Committed, + source: dummy_htlc_source.clone(), + skimmed_fee_msat: None, + blinding_point: None, + send_timestamp: None, + }, + None, + &&logger, + ); let mut pending_outbound_htlcs = vec![dummy_outbound_output.clone(); 10]; for (idx, htlc) in pending_outbound_htlcs.iter_mut().enumerate() { if idx % 2 == 0 { @@ -12248,6 +12484,7 @@ mod tests { }, skimmed_fee_msat: None, blinding_point: None, + forward_span: None, }; let dummy_holding_cell_claim_htlc = HTLCUpdateAwaitingACK::ClaimHTLC { payment_preimage: PaymentPreimage([42; 32]), @@ -12288,7 +12525,7 @@ mod tests { let mut s = crate::io::Cursor::new(&encoded_chan); let mut reader = crate::util::ser::FixedLengthReader::new(&mut s, encoded_chan.len() as u64); let features = channelmanager::provided_channel_type_features(&config); - let decoded_chan = FundedChannel::read(&mut reader, (&&keys_provider, &&keys_provider, &features)).unwrap(); + let decoded_chan = FundedChannel::read(&mut reader, (&&keys_provider, &&keys_provider, &features, &&logger)).unwrap(); assert_eq!(decoded_chan.context.pending_outbound_htlcs, pending_outbound_htlcs); assert_eq!(decoded_chan.context.holding_cell_htlc_updates, holding_cell_htlc_updates); } diff --git a/lightning/src/ln/channel_state.rs b/lightning/src/ln/channel_state.rs index c941e0eb9d0..a499e641552 100644 --- a/lightning/src/ln/channel_state.rs +++ b/lightning/src/ln/channel_state.rs @@ -37,7 +37,7 @@ use core::ops::Deref; /// through the exchange of commitment_signed and revoke_and_ack messages. /// /// This can be used to inspect what next message an HTLC is waiting for to advance its state. -#[derive(Clone, Debug, PartialEq)] +#[derive(Clone, Debug, Hash, PartialEq, Eq)] pub enum InboundHTLCStateDetails { /// We have added this HTLC in our commitment transaction by receiving commitment_signed and /// returning revoke_and_ack. We are awaiting the appropriate revoke_and_ack's from the remote @@ -130,7 +130,7 @@ impl_writeable_tlv_based!(InboundHTLCDetails, { /// through the exchange of commitment_signed and revoke_and_ack messages. /// /// This can be used to inspect what next message an HTLC is waiting for to advance its state. -#[derive(Clone, Debug, PartialEq)] +#[derive(Clone, Debug, Hash, PartialEq, Eq)] pub enum OutboundHTLCStateDetails { /// We are awaiting the appropriate revoke_and_ack's from the remote before the HTLC is added /// on the remote's commitment transaction after update_add_htlc. diff --git a/lightning/src/ln/channelmanager.rs b/lightning/src/ln/channelmanager.rs index edb0b7dd928..55b9936a7a6 100644 --- a/lightning/src/ln/channelmanager.rs +++ b/lightning/src/ln/channelmanager.rs @@ -85,7 +85,7 @@ use crate::util::wakers::{Future, Notifier}; use crate::util::scid_utils::fake_scid; use crate::util::string::UntrustedString; use crate::util::ser::{BigSize, FixedLengthReader, LengthReadable, Readable, ReadableArgs, MaybeReadable, Writeable, Writer, VecWriter}; -use crate::util::logger::{Level, Logger, WithContext}; +use crate::util::logger::{BoxedSpan, Level, Logger, Span, WithContext}; use crate::util::errors::APIError; #[cfg(async_payments)] use { @@ -308,7 +308,6 @@ impl PendingHTLCRouting { /// Information about an incoming HTLC, including the [`PendingHTLCRouting`] describing where it /// should go next. -#[derive(Clone)] // See FundedChannel::revoke_and_ack for why, tl;dr: Rust bug #[cfg_attr(test, derive(Debug, PartialEq))] pub struct PendingHTLCInfo { /// Further routing details based on whether the HTLC is being forwarded or received. @@ -349,6 +348,23 @@ pub struct PendingHTLCInfo { /// This is used to allow LSPs to take fees as a part of payments, without the sender having to /// shoulder them. pub skimmed_fee_msat: Option, + pub(crate) forward_span: Option, +} + +// See FundedChannel::revoke_and_ack for why, tl;dr: Rust bug +impl Clone for PendingHTLCInfo { + fn clone(&self) -> Self { + Self { + routing: self.routing.clone(), + incoming_shared_secret: self.incoming_shared_secret.clone(), + payment_hash: self.payment_hash.clone(), + incoming_amt_msat: self.incoming_amt_msat.clone(), + outgoing_amt_msat: self.outgoing_amt_msat, + outgoing_cltv_value: self.outgoing_cltv_value, + skimmed_fee_msat: self.skimmed_fee_msat.clone(), + forward_span: None, + } + } } #[derive(Clone)] // See FundedChannel::revoke_and_ack for why, tl;dr: Rust bug @@ -2512,7 +2528,7 @@ where /// `short_channel_id` here, nor the `channel_id` in `UpdateAddHTLC`! /// /// See `ChannelManager` struct-level documentation for lock order requirements. - decode_update_add_htlcs: Mutex>>, + decode_update_add_htlcs: Mutex>>, /// The sets of payments which are claimable or currently being claimed. See /// [`ClaimablePayments`]' individual field docs for more info. @@ -4533,7 +4549,7 @@ where fn get_pending_htlc_info<'a>( &self, msg: &msgs::UpdateAddHTLC, shared_secret: [u8; 32], decoded_hop: onion_utils::Hop, allow_underpay: bool, - next_packet_pubkey_opt: Option>, + next_packet_pubkey_opt: Option>, forward_span: Option, ) -> Result { match decoded_hop { onion_utils::Hop::Receive { .. } | onion_utils::Hop::BlindedReceive { .. } | @@ -4546,13 +4562,13 @@ where let current_height: u32 = self.best_block.read().unwrap().height; create_recv_pending_htlc_info(decoded_hop, shared_secret, msg.payment_hash, msg.amount_msat, msg.cltv_expiry, None, allow_underpay, msg.skimmed_fee_msat, - current_height) + current_height, forward_span) }, onion_utils::Hop::Forward { .. } | onion_utils::Hop::BlindedForward { .. } => { - create_fwd_pending_htlc_info(msg, decoded_hop, shared_secret, next_packet_pubkey_opt) + create_fwd_pending_htlc_info(msg, decoded_hop, shared_secret, next_packet_pubkey_opt, forward_span) }, onion_utils::Hop::TrampolineForward { .. } | onion_utils::Hop::TrampolineBlindedForward { .. } => { - create_fwd_pending_htlc_info(msg, decoded_hop, shared_secret, next_packet_pubkey_opt) + create_fwd_pending_htlc_info(msg, decoded_hop, shared_secret, next_packet_pubkey_opt, forward_span) }, } } @@ -4694,7 +4710,7 @@ where first_hop_htlc_msat: htlc_msat, payment_id, bolt12_invoice: bolt12_invoice.cloned(), - }, onion_packet, None, &self.fee_estimator, &&logger); + }, onion_packet, None, &self.fee_estimator, None, &&logger); match break_channel_entry!(self, peer_state, send_res, chan_entry) { Some(monitor_update) => { match handle_new_monitor_update!(self, funding_txo, monitor_update, peer_state_lock, peer_state, per_peer_state, chan) { @@ -5790,7 +5806,7 @@ where let mut htlc_forwards = Vec::new(); let mut htlc_fails = Vec::new(); - for update_add_htlc in &update_add_htlcs { + for (update_add_htlc, forward_span) in update_add_htlcs { let (next_hop, next_packet_details_opt) = match decode_incoming_update_add_htlc_onion( &update_add_htlc, &*self.node_signer, &*self.logger, &self.secp_ctx ) { @@ -5815,7 +5831,7 @@ where match self.do_funded_channel_callback(incoming_scid, |chan: &mut FundedChannel| { let logger = WithChannelContext::from(&self.logger, &chan.context, Some(update_add_htlc.payment_hash)); chan.can_accept_incoming_htlc( - update_add_htlc, &self.fee_estimator, &logger, + &update_add_htlc, &self.fee_estimator, &logger, ) }) { Some(Ok(_)) => {}, @@ -5849,7 +5865,7 @@ where match self.get_pending_htlc_info( &update_add_htlc, shared_secret, next_hop, incoming_accept_underpaying_htlcs, - next_packet_details_opt.map(|d| d.next_packet_pubkey), + next_packet_details_opt.map(|d| d.next_packet_pubkey), Some(forward_span) ) { Ok(info) => htlc_forwards.push((info, update_add_htlc.htlc_id)), Err(inbound_err) => { @@ -5917,7 +5933,7 @@ where prev_short_channel_id, prev_htlc_id, prev_channel_id, prev_funding_outpoint, prev_user_channel_id, prev_counterparty_node_id, forward_info: PendingHTLCInfo { routing, incoming_shared_secret, payment_hash, outgoing_amt_msat, - outgoing_cltv_value, .. + outgoing_cltv_value, forward_span, .. } }) => { let cltv_expiry = routing.incoming_cltv_expiry(); @@ -5992,7 +6008,7 @@ where match create_recv_pending_htlc_info(next_hop, incoming_shared_secret, payment_hash, outgoing_amt_msat, outgoing_cltv_value, Some(phantom_shared_secret), false, None, - current_height) + current_height, forward_span) { Ok(info) => phantom_receives.push(( prev_short_channel_id, prev_counterparty_node_id, prev_funding_outpoint, @@ -6037,7 +6053,7 @@ where let mut peer_state_lock = peer_state_mutex_opt.unwrap().lock().unwrap(); let peer_state = &mut *peer_state_lock; let mut draining_pending_forwards = pending_forwards.drain(..); - while let Some(forward_info) = draining_pending_forwards.next() { + while let Some(mut forward_info) = draining_pending_forwards.next() { let queue_fail_htlc_res = match forward_info { HTLCForwardInfo::AddHTLC(PendingAddHTLCInfo { prev_short_channel_id, prev_htlc_id, prev_channel_id, prev_funding_outpoint, @@ -6045,7 +6061,7 @@ where incoming_shared_secret, payment_hash, outgoing_amt_msat, outgoing_cltv_value, routing: PendingHTLCRouting::Forward { ref onion_packet, blinded, incoming_cltv_expiry, .. - }, skimmed_fee_msat, .. + }, skimmed_fee_msat, ref mut forward_span, .. }, }) => { let htlc_source = HTLCSource::PreviousHopData(HTLCPreviousHopData { @@ -6113,9 +6129,11 @@ where }; log_trace!(logger, "Forwarding HTLC from SCID {} with payment_hash {} and next hop SCID {} over {} channel {} with corresponding peer {}", prev_short_channel_id, &payment_hash, short_chan_id, channel_description, optimal_channel.context.channel_id(), &counterparty_node_id); + let mut swapped_span = None; + mem::swap(forward_span, &mut swapped_span); if let Err((reason, msg)) = optimal_channel.queue_add_htlc(outgoing_amt_msat, payment_hash, outgoing_cltv_value, htlc_source.clone(), - onion_packet.clone(), skimmed_fee_msat, next_blinding_point, &self.fee_estimator, + onion_packet.clone(), skimmed_fee_msat, next_blinding_point, &self.fee_estimator, swapped_span, &&logger) { log_trace!(logger, "Failed to forward HTLC with payment_hash {} to peer {}: {}", &payment_hash, &counterparty_node_id, msg); @@ -7644,11 +7662,14 @@ This indicates a bug inside LDK. Please report this error at https://github.com/ fn handle_channel_resumption(&self, pending_msg_events: &mut Vec, channel: &mut FundedChannel, raa: Option, commitment_update: Option, order: RAACommitmentOrder, - pending_forwards: Vec<(PendingHTLCInfo, u64)>, pending_update_adds: Vec, + pending_forwards: Vec<(PendingHTLCInfo, u64)>, pending_update_adds: Vec<(msgs::UpdateAddHTLC, BoxedSpan)>, funding_broadcastable: Option, channel_ready: Option, announcement_sigs: Option, tx_signatures: Option - ) -> (Option<(u64, Option, OutPoint, ChannelId, u128, Vec<(PendingHTLCInfo, u64)>)>, Option<(u64, Vec)>) { + ) -> ( + Option<(u64, Option, OutPoint, ChannelId, u128, Vec<(PendingHTLCInfo, u64)>)>, + Option<(u64, Vec<(msgs::UpdateAddHTLC, BoxedSpan)>)> + ) { let logger = WithChannelContext::from(&self.logger, &channel.context, None); log_trace!(logger, "Handling channel resumption for channel {} with {} RAA, {} commitment update, {} pending forwards, {} pending update_add_htlcs, {}broadcasting funding, {} channel ready, {} announcement, {} tx_signatures", &channel.context.channel_id(), @@ -8864,7 +8885,7 @@ This indicates a bug inside LDK. Please report this error at https://github.com/ match peer_state.channel_by_id.entry(msg.channel_id) { hash_map::Entry::Occupied(mut chan_entry) => { if let Some(chan) = chan_entry.get_mut().as_funded_mut() { - try_channel_entry!(self, peer_state, chan.update_add_htlc(&msg, &self.fee_estimator), chan_entry); + try_channel_entry!(self, peer_state, chan.update_add_htlc(&msg, &self.fee_estimator, &self.logger), chan_entry); } else { return try_channel_entry!(self, peer_state, Err(ChannelError::close( "Got an update_add_htlc message for an unfunded channel!".into())), chan_entry); @@ -8890,9 +8911,9 @@ This indicates a bug inside LDK. Please report this error at https://github.com/ match peer_state.channel_by_id.entry(msg.channel_id) { hash_map::Entry::Occupied(mut chan_entry) => { if let Some(chan) = chan_entry.get_mut().as_funded_mut() { - let res = try_channel_entry!(self, peer_state, chan.update_fulfill_htlc(&msg), chan_entry); + let logger = WithChannelContext::from(&self.logger, &chan.context, None); + let res = try_channel_entry!(self, peer_state, chan.update_fulfill_htlc(&msg, &&logger), chan_entry); if let HTLCSource::PreviousHopData(prev_hop) = &res.0 { - let logger = WithChannelContext::from(&self.logger, &chan.context, None); log_trace!(logger, "Holding the next revoke_and_ack from {} until the preimage is durably persisted in the inbound edge's ChannelMonitor", msg.channel_id); @@ -8939,7 +8960,8 @@ This indicates a bug inside LDK. Please report this error at https://github.com/ match peer_state.channel_by_id.entry(msg.channel_id) { hash_map::Entry::Occupied(mut chan_entry) => { if let Some(chan) = chan_entry.get_mut().as_funded_mut() { - try_channel_entry!(self, peer_state, chan.update_fail_htlc(&msg, HTLCFailReason::from_msg(msg)), chan_entry); + let logger = WithChannelContext::from(&self.logger, &chan.context, None); + try_channel_entry!(self, peer_state, chan.update_fail_htlc(&msg, HTLCFailReason::from_msg(msg), &&logger), chan_entry); } else { return try_channel_entry!(self, peer_state, Err(ChannelError::close( "Got an update_fail_htlc message for an unfunded channel!".into())), chan_entry); @@ -8968,7 +8990,7 @@ This indicates a bug inside LDK. Please report this error at https://github.com/ try_channel_entry!(self, peer_state, Err(chan_err), chan_entry); } if let Some(chan) = chan_entry.get_mut().as_funded_mut() { - try_channel_entry!(self, peer_state, chan.update_fail_malformed_htlc(&msg, HTLCFailReason::reason(msg.failure_code.into(), msg.sha256_of_onion.to_vec())), chan_entry); + try_channel_entry!(self, peer_state, chan.update_fail_malformed_htlc(&msg, HTLCFailReason::reason(msg.failure_code.into(), msg.sha256_of_onion.to_vec()), &self.logger), chan_entry); } else { return try_channel_entry!(self, peer_state, Err(ChannelError::close( "Got an update_fail_malformed_htlc message for an unfunded channel!".into())), chan_entry); @@ -9057,7 +9079,7 @@ This indicates a bug inside LDK. Please report this error at https://github.com/ } } - fn push_decode_update_add_htlcs(&self, mut update_add_htlcs: (u64, Vec)) { + fn push_decode_update_add_htlcs(&self, mut update_add_htlcs: (u64, Vec<(msgs::UpdateAddHTLC, BoxedSpan)>)) { let mut push_forward_event = self.forward_htlcs.lock().unwrap().is_empty(); let mut decode_update_add_htlcs = self.decode_update_add_htlcs.lock().unwrap(); push_forward_event &= decode_update_add_htlcs.is_empty(); @@ -12975,6 +12997,7 @@ impl_writeable_tlv_based!(PendingHTLCInfo, { (8, outgoing_cltv_value, required), (9, incoming_amt_msat, option), (10, skimmed_fee_msat, option), + (_unused, forward_span, (static_value, None)) }); @@ -13384,10 +13407,14 @@ where } } - let mut decode_update_add_htlcs_opt = None; let decode_update_add_htlcs = self.decode_update_add_htlcs.lock().unwrap(); + let mut decode_update_add_htlcs_opt = None; if !decode_update_add_htlcs.is_empty() { - decode_update_add_htlcs_opt = Some(decode_update_add_htlcs); + let mut without_spans = new_hash_map(); + for (scid, htlcs) in decode_update_add_htlcs.iter() { + without_spans.insert(scid, htlcs.iter().map(|(msg, _span)| msg).collect::>()); + } + decode_update_add_htlcs_opt = Some(without_spans); } let claimable_payments = self.claimable_payments.lock().unwrap(); @@ -13796,7 +13823,7 @@ where let mut close_background_events = Vec::new(); for _ in 0..channel_count { let mut channel: FundedChannel = FundedChannel::read(reader, ( - &args.entropy_source, &args.signer_provider, &provided_channel_type_features(&args.default_config) + &args.entropy_source, &args.signer_provider, &provided_channel_type_features(&args.default_config), &args.logger, ))?; let logger = WithChannelContext::from(&args.logger, &channel.context, None); let channel_id = channel.context.channel_id(); @@ -14734,6 +14761,14 @@ where } } + let decode_update_add_htlcs = decode_update_add_htlcs.into_iter().map(|(scid, htlcs)| ( + scid, + htlcs.into_iter().map(|htlc| { + let span = BoxedSpan::new(args.logger.start(Span::Forward, None)); + (htlc, span) + }).collect::>() + )).collect::>(); + let channel_manager = ChannelManager { chain_hash, fee_estimator: bounded_fee_estimator, @@ -15989,7 +16024,7 @@ mod tests { if let Err(crate::ln::channelmanager::InboundHTLCErr { reason, .. }) = create_recv_pending_htlc_info(hop_data, [0; 32], PaymentHash([0; 32]), sender_intended_amt_msat - extra_fee_msat - 1, 42, None, true, Some(extra_fee_msat), - current_height) + current_height, None) { assert_eq!(reason, LocalHTLCFailureReason::FinalIncorrectHTLCAmount); } else { panic!(); } @@ -16012,7 +16047,7 @@ mod tests { let current_height: u32 = node[0].node.best_block.read().unwrap().height; assert!(create_recv_pending_htlc_info(hop_data, [0; 32], PaymentHash([0; 32]), sender_intended_amt_msat - extra_fee_msat, 42, None, true, Some(extra_fee_msat), - current_height).is_ok()); + current_height, None).is_ok()); } #[test] @@ -16036,7 +16071,7 @@ mod tests { custom_tlvs: Vec::new(), }, shared_secret: SharedSecret::from_bytes([0; 32]), - }, [0; 32], PaymentHash([0; 32]), 100, TEST_FINAL_CLTV + 1, None, true, None, current_height); + }, [0; 32], PaymentHash([0; 32]), 100, TEST_FINAL_CLTV + 1, None, true, None, current_height, None); // Should not return an error as this condition: // https://github.com/lightning/bolts/blob/4dcc377209509b13cf89a4b91fde7d478f5b46d8/04-onion-routing.md?plain=1#L334 diff --git a/lightning/src/ln/functional_tests.rs b/lightning/src/ln/functional_tests.rs index 2f8a9b53a5d..7d14f900477 100644 --- a/lightning/src/ln/functional_tests.rs +++ b/lightning/src/ln/functional_tests.rs @@ -26,6 +26,7 @@ use crate::types::payment::{PaymentPreimage, PaymentSecret, PaymentHash}; use crate::ln::channel::{get_holder_selected_channel_reserve_satoshis, Channel, InboundV1Channel, OutboundV1Channel, COINBASE_MATURITY, CONCURRENT_INBOUND_HTLC_FEE_BUFFER, FEE_SPIKE_BUFFER_FEE_INCREASE_MULTIPLE, MIN_AFFORDABLE_HTLC_COUNT}; use crate::ln::channelmanager::{self, PaymentId, RAACommitmentOrder, RecipientOnionFields, BREAKDOWN_TIMEOUT, ENABLE_GOSSIP_TICKS, DISABLE_GOSSIP_TICKS, MIN_CLTV_EXPIRY_DELTA}; use crate::ln::channel::{DISCONNECT_PEER_AWAITING_RESPONSE_TICKS, ChannelError, MIN_CHAN_DUST_LIMIT_SATOSHIS}; +use crate::ln::channel_state::{InboundHTLCStateDetails, OutboundHTLCStateDetails}; use crate::ln::{chan_utils, onion_utils}; use crate::ln::chan_utils::{commitment_tx_base_weight, COMMITMENT_TX_WEIGHT_PER_HTLC, OFFERED_HTLC_SCRIPT_WEIGHT, htlc_success_tx_weight, htlc_timeout_tx_weight, HTLCOutputInCommitment}; use crate::routing::gossip::{NetworkGraph, NetworkUpdate}; @@ -34,11 +35,12 @@ use crate::types::features::{ChannelFeatures, ChannelTypeFeatures, NodeFeatures} use crate::ln::msgs; use crate::ln::msgs::{AcceptChannel, BaseMessageHandler, ChannelMessageHandler, RoutingMessageHandler, ErrorAction, MessageSendEvent}; use crate::util::test_channel_signer::TestChannelSigner; -use crate::util::test_utils::{self, TestLogger, WatchtowerPersister}; +use crate::util::test_utils::{self, TestLogger, TestSpanBoundary, WatchtowerPersister}; use crate::util::errors::APIError; use crate::util::ser::{Writeable, ReadableArgs}; use crate::util::string::UntrustedString; use crate::util::config::{ChannelConfigOverrides, ChannelHandshakeConfigUpdate, ChannelConfigUpdate, MaxDustHTLCExposure, UserConfig}; +use crate::util::logger::Span; use crate::ln::onion_utils::AttributionData; use bitcoin::hash_types::BlockHash; @@ -12048,3 +12050,73 @@ pub fn test_funding_signed_event() { nodes[0].node.get_and_clear_pending_msg_events(); nodes[1].node.get_and_clear_pending_msg_events(); } + +#[xtest(feature = "_externalize_tests")] +pub fn test_payment_traces() { + let chanmon_cfgs = create_chanmon_cfgs(3); + let node_cfgs = create_node_cfgs(3, &chanmon_cfgs); + let node_chanmgrs = create_node_chanmgrs(3, &node_cfgs, &[None, None, None]); + let mut nodes = create_network(3, &node_cfgs, &node_chanmgrs); + let channel_id_1 = create_announced_chan_between_nodes(&nodes, 0, 1).2; + let channel_id_2 = create_announced_chan_between_nodes(&nodes, 1, 2).2; + send_payment(&nodes[0], &[&nodes[1], &nodes[2]], 1_000_000); + + assert_eq!(node_cfgs[1].logger.span_boundaries.lock().unwrap().as_ref(), vec![ + TestSpanBoundary::Start(Span::InboundHTLC { channel_id: channel_id_1, htlc_id: 0 }, None), + TestSpanBoundary::Start( + Span::InboundHTLCState { state: None }, + Some(Span::InboundHTLC { channel_id: channel_id_1, htlc_id: 0 }), + ), + TestSpanBoundary::End(Span::InboundHTLCState { state: None }), + TestSpanBoundary::Start( + Span::InboundHTLCState { state: Some(InboundHTLCStateDetails::AwaitingRemoteRevokeToAdd) }, + Some(Span::InboundHTLC { channel_id: channel_id_1, htlc_id: 0 }), + ), + TestSpanBoundary::End(Span::InboundHTLCState { state: Some(InboundHTLCStateDetails::AwaitingRemoteRevokeToAdd) }), + TestSpanBoundary::Start( + Span::InboundHTLCState { state: Some(InboundHTLCStateDetails::AwaitingRemoteRevokeToAdd) }, + Some(Span::InboundHTLC { channel_id: channel_id_1, htlc_id: 0 }), + ), + TestSpanBoundary::End(Span::InboundHTLCState { state: Some(InboundHTLCStateDetails::AwaitingRemoteRevokeToAdd) }), + TestSpanBoundary::Start( + Span::InboundHTLCState { state: Some(InboundHTLCStateDetails::Committed) }, + Some(Span::InboundHTLC { channel_id: channel_id_1, htlc_id: 0 }) + ), + TestSpanBoundary::Start(Span::Forward, Some(Span::InboundHTLC { channel_id: channel_id_1, htlc_id: 0 })), + TestSpanBoundary::Start(Span::OutboundHTLC { channel_id: channel_id_2, htlc_id: 0 }, Some(Span::Forward)), + TestSpanBoundary::Start( + Span::OutboundHTLCState { state: OutboundHTLCStateDetails::AwaitingRemoteRevokeToAdd }, + Some(Span::OutboundHTLC { channel_id: channel_id_2, htlc_id: 0 }), + ), + TestSpanBoundary::End(Span::OutboundHTLCState { state: OutboundHTLCStateDetails::AwaitingRemoteRevokeToAdd }), + TestSpanBoundary::Start( + Span::OutboundHTLCState { state: OutboundHTLCStateDetails::Committed }, + Some(Span::OutboundHTLC { channel_id: channel_id_2, htlc_id: 0 }), + ), + TestSpanBoundary::End(Span::OutboundHTLCState { state: OutboundHTLCStateDetails::Committed }), + TestSpanBoundary::Start( + Span::OutboundHTLCState { state: OutboundHTLCStateDetails::Committed }, + Some(Span::OutboundHTLC { channel_id: channel_id_2, htlc_id: 0 }), + ), + TestSpanBoundary::End(Span::InboundHTLCState { state: Some(InboundHTLCStateDetails::Committed) }), + TestSpanBoundary::Start( + Span::InboundHTLCState { state: Some(InboundHTLCStateDetails::AwaitingRemoteRevokeToRemoveFulfill) }, + Some(Span::InboundHTLC { channel_id: channel_id_1, htlc_id: 0 }), + ), + TestSpanBoundary::End(Span::OutboundHTLCState { state: OutboundHTLCStateDetails::Committed }), + TestSpanBoundary::Start( + Span::OutboundHTLCState { state: OutboundHTLCStateDetails::AwaitingRemoteRevokeToRemoveSuccess }, + Some(Span::OutboundHTLC { channel_id: channel_id_2, htlc_id: 0 }), + ), + TestSpanBoundary::End(Span::OutboundHTLCState { state: OutboundHTLCStateDetails::AwaitingRemoteRevokeToRemoveSuccess }), + TestSpanBoundary::Start( + Span::OutboundHTLCState { state: OutboundHTLCStateDetails::AwaitingRemoteRevokeToRemoveSuccess }, + Some(Span::OutboundHTLC { channel_id: channel_id_2, htlc_id: 0 }), + ), + TestSpanBoundary::End(Span::OutboundHTLCState { state: OutboundHTLCStateDetails::AwaitingRemoteRevokeToRemoveSuccess }), + TestSpanBoundary::End(Span::OutboundHTLC { channel_id: channel_id_2, htlc_id: 0 }), + TestSpanBoundary::End(Span::Forward), + TestSpanBoundary::End(Span::InboundHTLCState { state: Some(InboundHTLCStateDetails::AwaitingRemoteRevokeToRemoveFulfill) }), + TestSpanBoundary::End(Span::InboundHTLC { channel_id: channel_id_1, htlc_id: 0 }) + ]); +} diff --git a/lightning/src/ln/invoice_utils.rs b/lightning/src/ln/invoice_utils.rs index a8eec347f1f..6508c145a4e 100644 --- a/lightning/src/ln/invoice_utils.rs +++ b/lightning/src/ln/invoice_utils.rs @@ -16,7 +16,7 @@ use crate::ln::inbound_payment::{create, create_from_hash}; use crate::routing::gossip::RoutingFees; use crate::routing::router::{RouteHint, RouteHintHop, Router}; use crate::onion_message::messenger::MessageRouter; -use crate::util::logger::{Logger, Record}; +use crate::util::logger::{Logger, Record, Span}; use bitcoin::secp256k1::PublicKey; use alloc::collections::{btree_map, BTreeMap}; use core::ops::Deref; @@ -689,11 +689,17 @@ struct WithChannelDetails<'a, 'b, L: Deref> where L::Target: Logger { } impl<'a, 'b, L: Deref> Logger for WithChannelDetails<'a, 'b, L> where L::Target: Logger { + type UserSpan = <::Target as Logger>::UserSpan; + fn log(&self, mut record: Record) { record.peer_id = Some(self.details.counterparty.node_id); record.channel_id = Some(self.details.channel_id); self.logger.log(record) } + + fn start(&self, span: Span, parent: Option<&Self::UserSpan>) -> Self::UserSpan { + self.logger.start(span, parent) + } } impl<'a, 'b, L: Deref> WithChannelDetails<'a, 'b, L> where L::Target: Logger { diff --git a/lightning/src/ln/onion_payment.rs b/lightning/src/ln/onion_payment.rs index 2519372f11b..ad415b5f9c1 100644 --- a/lightning/src/ln/onion_payment.rs +++ b/lightning/src/ln/onion_payment.rs @@ -18,7 +18,7 @@ use crate::ln::msgs; use crate::ln::onion_utils; use crate::ln::onion_utils::{HTLCFailReason, ONION_DATA_LEN, LocalHTLCFailureReason}; use crate::sign::{NodeSigner, Recipient}; -use crate::util::logger::Logger; +use crate::util::logger::{BoxedSpan, Logger}; #[allow(unused_imports)] use crate::prelude::*; @@ -87,7 +87,7 @@ enum RoutingInfo { pub(super) fn create_fwd_pending_htlc_info( msg: &msgs::UpdateAddHTLC, hop_data: onion_utils::Hop, shared_secret: [u8; 32], - next_packet_pubkey_opt: Option> + next_packet_pubkey_opt: Option>, forward_span: Option, ) -> Result { debug_assert!(next_packet_pubkey_opt.is_some()); @@ -234,13 +234,14 @@ pub(super) fn create_fwd_pending_htlc_info( outgoing_amt_msat: amt_to_forward, outgoing_cltv_value, skimmed_fee_msat: None, + forward_span, }) } pub(super) fn create_recv_pending_htlc_info( hop_data: onion_utils::Hop, shared_secret: [u8; 32], payment_hash: PaymentHash, amt_msat: u64, cltv_expiry: u32, phantom_shared_secret: Option<[u8; 32]>, allow_underpay: bool, - counterparty_skimmed_fee_msat: Option, current_height: u32 + counterparty_skimmed_fee_msat: Option, current_height: u32, forward_span: Option, ) -> Result { let ( payment_data, keysend_preimage, custom_tlvs, onion_amt_msat, onion_cltv_expiry, @@ -408,6 +409,7 @@ pub(super) fn create_recv_pending_htlc_info( outgoing_amt_msat: onion_amt_msat, outgoing_cltv_value: onion_cltv_expiry, skimmed_fee_msat: counterparty_skimmed_fee_msat, + forward_span, }) } @@ -465,13 +467,13 @@ where // TODO: If this is potentially a phantom payment we should decode the phantom payment // onion here and check it. - create_fwd_pending_htlc_info(msg, hop, shared_secret.secret_bytes(), Some(next_packet_pubkey))? + create_fwd_pending_htlc_info(msg, hop, shared_secret.secret_bytes(), Some(next_packet_pubkey), None)? }, _ => { let shared_secret = hop.shared_secret().secret_bytes(); create_recv_pending_htlc_info( hop, shared_secret, msg.payment_hash, msg.amount_msat, msg.cltv_expiry, - None, allow_skimmed_fees, msg.skimmed_fee_msat, cur_height, + None, allow_skimmed_fees, msg.skimmed_fee_msat, cur_height, None )? } }) diff --git a/lightning/src/ln/peer_handler.rs b/lightning/src/ln/peer_handler.rs index 07e63cf576c..157ff692b9c 100644 --- a/lightning/src/ln/peer_handler.rs +++ b/lightning/src/ln/peer_handler.rs @@ -36,7 +36,7 @@ use crate::onion_message::offers::{OffersMessage, OffersMessageHandler}; use crate::onion_message::packet::OnionMessageContents; use crate::routing::gossip::{NodeId, NodeAlias}; use crate::util::atomic_counter::AtomicCounter; -use crate::util::logger::{Level, Logger, WithContext}; +use crate::util::logger::{BoxedSpan, Level, Logger, Span, WithContext}; use crate::util::string::PrintableString; #[allow(unused_imports)] @@ -607,6 +607,7 @@ struct Peer { msgs_sent_since_pong: usize, awaiting_pong_timer_tick_intervals: i64, + ping_pong_span: Option, received_message_since_timer_tick: bool, sent_gossip_timestamp_filter: bool, @@ -1151,6 +1152,7 @@ impl) -> () {} /// # } /// # struct FakeMessageRouter {} /// # impl MessageRouter for FakeMessageRouter { diff --git a/lightning/src/routing/router.rs b/lightning/src/routing/router.rs index d87edb4296d..378c77ed315 100644 --- a/lightning/src/routing/router.rs +++ b/lightning/src/routing/router.rs @@ -8978,14 +8978,16 @@ pub mod benches { use crate::routing::gossip::NetworkGraph; use crate::routing::scoring::{FixedPenaltyScorer, ProbabilisticScoringFeeParameters}; use crate::util::config::UserConfig; - use crate::util::logger::{Logger, Record}; + use crate::util::logger::{Logger, Record, Span}; use crate::util::test_utils::TestLogger; use criterion::Criterion; struct DummyLogger {} impl Logger for DummyLogger { + type UserSpan = (); fn log(&self, _record: Record) {} + fn start(&self, _span: Span, _parent: Option<&()>) -> () {} } pub fn generate_routes_with_zero_penalty_scorer(bench: &mut Criterion) { diff --git a/lightning/src/routing/scoring.rs b/lightning/src/routing/scoring.rs index 6403889df6d..4dbea09c5c3 100644 --- a/lightning/src/routing/scoring.rs +++ b/lightning/src/routing/scoring.rs @@ -21,12 +21,14 @@ //! # use lightning::routing::router::{RouteParameters, find_route}; //! # use lightning::routing::scoring::{ProbabilisticScorer, ProbabilisticScoringFeeParameters, ProbabilisticScoringDecayParameters}; //! # use lightning::sign::KeysManager; -//! # use lightning::util::logger::{Logger, Record}; +//! # use lightning::util::logger::{Logger, Record, Span}; //! # use bitcoin::secp256k1::PublicKey; //! # //! # struct FakeLogger {}; //! # impl Logger for FakeLogger { +//! # type UserSpan = (); //! # fn log(&self, record: Record) { unimplemented!() } +//! # fn start(&self, _span: Span, parent: Option<&()>) -> () {} //! # } //! # fn find_scored_route(payer: PublicKey, route_params: RouteParameters, network_graph: NetworkGraph<&FakeLogger>) { //! # let logger = FakeLogger {}; diff --git a/lightning/src/util/logger.rs b/lightning/src/util/logger.rs index 283d3158144..34495e29d65 100644 --- a/lightning/src/util/logger.rs +++ b/lightning/src/util/logger.rs @@ -15,13 +15,14 @@ use bitcoin::secp256k1::PublicKey; +use core::any::Any; use core::cmp; use core::fmt; use core::ops::Deref; +use crate::ln::channel_state::{InboundHTLCStateDetails, OutboundHTLCStateDetails}; use crate::ln::types::ChannelId; -#[cfg(c_bindings)] -use crate::prelude::*; // Needed for String +use crate::prelude::*; use crate::types::payment::PaymentHash; static LOG_LEVEL_NAMES: [&'static str; 6] = ["GOSSIP", "TRACE", "DEBUG", "INFO", "WARN", "ERROR"]; @@ -160,8 +161,101 @@ impl_record!(, 'a); /// A trait encapsulating the operations required of a logger. pub trait Logger { + /// The user-defined type returned to capture a span with its lifetime. + #[cfg(feature = "std")] + type UserSpan: 'static + Send; + /// The user-defined type returned to capture a span with its lifetime. + #[cfg(not(feature = "std"))] + type UserSpan: 'static; + /// Logs the [`Record`]. fn log(&self, record: Record); + /// Indicates the start of a span of computation. + /// The returned object will be dropped when the span ends. + fn start(&self, span: Span, parent: Option<&Self::UserSpan>) -> Self::UserSpan; +} + +/// A span of computation in time. +#[derive(Debug, Hash, PartialEq, Eq, Clone)] +pub enum Span { + /// Span representing the lifetime of an inbound HTLC. + InboundHTLC { + /// Channel ID. + channel_id: ChannelId, + /// HTLC ID. + htlc_id: u64, + }, + /// Span representing the lifetime of an outbound HTLC. + OutboundHTLC { + /// Channel ID. + channel_id: ChannelId, + /// HTLC ID. + htlc_id: u64, + }, + /// Span representing the downstream forward of an incoming HTLC. + Forward, + /// Span representing an inbound HTLC state in the commitment state machine. + InboundHTLCState { + /// The state. + state: Option, + }, + /// Span representing an outbound HTLC state in the commitment state machine. + OutboundHTLCState { + /// The state. + state: OutboundHTLCStateDetails, + }, + /// Span representing sending an outbound Ping and receiving an inbound Pong. + PingPong { + /// The node id of the counterparty. + node_id: PublicKey, + }, +} + +#[cfg(feature = "std")] +pub(crate) struct BoxedSpan(Box); +#[cfg(not(feature = "std"))] +pub(crate) struct BoxedSpan(Box); + +impl BoxedSpan { + #[cfg(feature = "std")] + pub fn new(s: S) -> Self { + BoxedSpan(Box::new(s)) + } + #[cfg(not(feature = "std"))] + pub fn new(s: S) -> Self { + BoxedSpan(Box::new(s)) + } + + pub fn as_user_span_ref(&self) -> Option<&<::Target as Logger>::UserSpan> + where + L::Target: Logger, + { + self.0.downcast_ref() + } +} + +// A set of implementations for tests ignoring spans. +// Note that cloning just creates a dummy span. + +#[cfg(test)] +impl core::fmt::Debug for BoxedSpan { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + write!(f, "boxed span") + } +} + +#[cfg(test)] +impl PartialEq for BoxedSpan { + fn eq(&self, _other: &Self) -> bool { + true + } +} + +#[cfg(test)] +impl Clone for BoxedSpan { + fn clone(&self) -> Self { + BoxedSpan::new(()) + } } /// Adds relevant context to a [`Record`] before passing it to the wrapped [`Logger`]. @@ -186,6 +280,8 @@ impl<'a, L: Deref> Logger for WithContext<'a, L> where L::Target: Logger, { + type UserSpan = <::Target as Logger>::UserSpan; + fn log(&self, mut record: Record) { if self.peer_id.is_some() { record.peer_id = self.peer_id @@ -198,6 +294,10 @@ where } self.logger.log(record) } + + fn start(&self, span: Span, parent: Option<&Self::UserSpan>) -> Self::UserSpan { + self.logger.start(span, parent) + } } impl<'a, L: Deref> WithContext<'a, L> @@ -278,11 +378,11 @@ mod tests { } struct WrapperLog { - logger: Arc, + logger: Arc::UserSpan>>, } impl WrapperLog { - fn new(logger: Arc) -> WrapperLog { + fn new(logger: Arc::UserSpan>>) -> WrapperLog { WrapperLog { logger } } @@ -299,7 +399,7 @@ mod tests { #[test] fn test_logging_macros() { let logger = TestLogger::new(); - let logger: Arc = Arc::new(logger); + let logger: Arc::UserSpan>> = Arc::new(logger); let wrapper = WrapperLog::new(Arc::clone(&logger)); wrapper.call_macros(); } diff --git a/lightning/src/util/test_utils.rs b/lightning/src/util/test_utils.rs index f90bfb97ef7..45acea3e6fe 100644 --- a/lightning/src/util/test_utils.rs +++ b/lightning/src/util/test_utils.rs @@ -53,7 +53,7 @@ use crate::util::config::UserConfig; use crate::util::dyn_signer::{ DynKeysInterface, DynKeysInterfaceTrait, DynPhantomKeysInterface, DynSigner, }; -use crate::util::logger::{Logger, Record}; +use crate::util::logger::{Logger, Record, Span}; #[cfg(feature = "std")] use crate::util::mut_global::MutGlobal; use crate::util::persist::{KVStore, MonitorName}; @@ -1373,10 +1373,48 @@ impl BaseMessageHandler for TestRoutingMessageHandler { } } +pub struct TestUserSpan { + span: Span, + span_boundaries: Arc>>, +} + +impl TestUserSpan { + fn new( + span: Span, parent_span: Option<&TestUserSpan>, + span_boundaries: Arc>>, + ) -> Self { + { + let mut span_boundaries_guard = span_boundaries.lock().unwrap(); + span_boundaries_guard + .push(TestSpanBoundary::Start(span.clone(), parent_span.map(|s| s.span.clone()))); + core::mem::drop(span_boundaries_guard); + } + TestUserSpan { span, span_boundaries } + } +} + +impl Drop for TestUserSpan { + fn drop(&mut self) { + if std::thread::panicking() { + return; + } + let mut span_boundaries = self.span_boundaries.lock().unwrap(); + span_boundaries.push(TestSpanBoundary::End(self.span.clone())); + core::mem::drop(span_boundaries); + } +} + +#[derive(Debug, Eq, PartialEq)] +pub enum TestSpanBoundary { + Start(Span, Option), + End(Span), +} + pub struct TestLogger { pub(crate) id: String, pub lines: Mutex>, pub context: Mutex, Option), usize>>, + pub span_boundaries: Arc>>, } impl TestLogger { @@ -1386,7 +1424,8 @@ impl TestLogger { pub fn with_id(id: String) -> TestLogger { let lines = Mutex::new(new_hash_map()); let context = Mutex::new(new_hash_map()); - TestLogger { id, lines, context } + let span_boundaries = Arc::new(Mutex::new(Vec::new())); + TestLogger { id, lines, context, span_boundaries } } pub fn assert_log(&self, module: &str, line: String, count: usize) { let log_entries = self.lines.lock().unwrap(); @@ -1433,6 +1472,8 @@ impl TestLogger { } impl Logger for TestLogger { + type UserSpan = TestUserSpan; + fn log(&self, record: Record) { let s = format!( "{:<55} {}", @@ -1473,6 +1514,10 @@ impl Logger for TestLogger { println!("{}", s); } } + + fn start(&self, span: Span, parent: Option<&TestUserSpan>) -> TestUserSpan { + TestUserSpan::new(span, parent, self.span_boundaries.clone()) + } } pub struct TestNodeSigner {